summaryrefslogtreecommitdiff
path: root/chromium/tools
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@theqtcompany.com>2016-05-09 14:22:11 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2016-05-09 15:11:45 +0000
commit2ddb2d3e14eef3de7dbd0cef553d669b9ac2361c (patch)
treee75f511546c5fd1a173e87c1f9fb11d7ac8d1af3 /chromium/tools
parenta4f3d46271c57e8155ba912df46a05559d14726e (diff)
downloadqtwebengine-chromium-2ddb2d3e14eef3de7dbd0cef553d669b9ac2361c.tar.gz
BASELINE: Update Chromium to 51.0.2704.41
Also adds in all smaller components by reversing logic for exclusion. Change-Id: Ibf90b506e7da088ea2f65dcf23f2b0992c504422 Reviewed-by: Joerg Bornemann <joerg.bornemann@theqtcompany.com>
Diffstat (limited to 'chromium/tools')
-rw-r--r--chromium/tools/DEPS7
-rw-r--r--chromium/tools/OWNERS58
-rw-r--r--chromium/tools/accessibility/OWNERS2
-rwxr-xr-xchromium/tools/accessibility/dump_accessibility_tree_auralinux.py42
-rw-r--r--chromium/tools/accessibility/nvda/OWNERS3
-rw-r--r--chromium/tools/accessibility/nvda/README.txt58
-rwxr-xr-xchromium/tools/accessibility/nvda/nvda_chrome_tests.py229
-rwxr-xr-xchromium/tools/accessibility/rebase_dump_accessibility_tree_test.py133
-rw-r--r--chromium/tools/android/android_tools.gyp10
-rw-r--r--chromium/tools/android/push_apps_to_background/push_apps_to_background.gyp21
-rw-r--r--chromium/tools/auto_bisect/OWNERS6
-rw-r--r--chromium/tools/auto_bisect/PRESUBMIT.py100
-rw-r--r--chromium/tools/auto_bisect/README18
-rw-r--r--chromium/tools/auto_bisect/__init__.py0
-rw-r--r--chromium/tools/auto_bisect/bisect.cfg56
-rwxr-xr-xchromium/tools/auto_bisect/bisect_perf_regression.py2903
-rw-r--r--chromium/tools/auto_bisect/bisect_perf_regression_test.py759
-rw-r--r--chromium/tools/auto_bisect/bisect_printer.py342
-rw-r--r--chromium/tools/auto_bisect/bisect_results.py262
-rw-r--r--chromium/tools/auto_bisect/bisect_results_json.py88
-rw-r--r--chromium/tools/auto_bisect/bisect_results_test.py283
-rw-r--r--chromium/tools/auto_bisect/bisect_state.py99
-rw-r--r--chromium/tools/auto_bisect/bisect_state_test.py31
-rw-r--r--chromium/tools/auto_bisect/bisect_utils.py560
-rw-r--r--chromium/tools/auto_bisect/builder.py359
-rw-r--r--chromium/tools/auto_bisect/configs/android.perf_test.sunspider.cfg11
-rw-r--r--chromium/tools/auto_bisect/configs/linux.bisect.functional.cfg13
-rw-r--r--chromium/tools/auto_bisect/configs/linux.bisect.page_cycler.cfg17
-rw-r--r--chromium/tools/auto_bisect/configs/linux.perf_test.tab_switching.cfg11
-rw-r--r--chromium/tools/auto_bisect/configs/mac.bisect.blink_perf.cfg14
-rw-r--r--chromium/tools/auto_bisect/configs/mac.bisect.tab_switching.cfg14
-rwxr-xr-xchromium/tools/auto_bisect/configs/try.py150
-rw-r--r--chromium/tools/auto_bisect/configs/win.bisect.dromaeo.cfg14
-rw-r--r--chromium/tools/auto_bisect/configs/win.perf_test.kraken.cfg11
-rw-r--r--chromium/tools/auto_bisect/configs/winx64.bisect.dromaeo.cfg15
-rw-r--r--chromium/tools/auto_bisect/configs/winx64.perf_test.kraken.cfg12
-rw-r--r--chromium/tools/auto_bisect/fetch_build.py514
-rw-r--r--chromium/tools/auto_bisect/fetch_build_test.py261
-rw-r--r--chromium/tools/auto_bisect/math_utils.py138
-rw-r--r--chromium/tools/auto_bisect/math_utils_test.py115
-rw-r--r--chromium/tools/auto_bisect/query_crbug.py82
-rw-r--r--chromium/tools/auto_bisect/query_crbug_test.py82
-rw-r--r--chromium/tools/auto_bisect/request_build.py208
-rwxr-xr-xchromium/tools/auto_bisect/run_tests45
-rw-r--r--chromium/tools/auto_bisect/source_control.py232
-rw-r--r--chromium/tools/auto_bisect/source_control_test.py83
-rw-r--r--chromium/tools/auto_bisect/test_data/closed.json140
-rw-r--r--chromium/tools/auto_bisect/test_data/open.json174
-rw-r--r--chromium/tools/auto_bisect/ttest.py209
-rw-r--r--chromium/tools/auto_bisect/ttest_test.py130
-rw-r--r--chromium/tools/bash-completion25
-rw-r--r--chromium/tools/battor_agent/BUILD.gn62
-rw-r--r--chromium/tools/battor_agent/DEPS5
-rw-r--r--chromium/tools/battor_agent/OWNERS3
-rw-r--r--chromium/tools/battor_agent/README25
-rw-r--r--chromium/tools/battor_agent/battor_agent.cc570
-rw-r--r--chromium/tools/battor_agent/battor_agent.gyp32
-rw-r--r--chromium/tools/battor_agent/battor_agent.h187
-rw-r--r--chromium/tools/battor_agent/battor_agent_bin.cc311
-rw-r--r--chromium/tools/battor_agent/battor_agent_unittest.cc876
-rw-r--r--chromium/tools/battor_agent/battor_agent_unittests.isolate27
-rw-r--r--chromium/tools/battor_agent/battor_connection.cc18
-rw-r--r--chromium/tools/battor_agent/battor_connection.h79
-rw-r--r--chromium/tools/battor_agent/battor_connection_impl.cc276
-rw-r--r--chromium/tools/battor_agent/battor_connection_impl.h101
-rw-r--r--chromium/tools/battor_agent/battor_connection_impl_unittest.cc398
-rw-r--r--chromium/tools/battor_agent/battor_error.cc31
-rw-r--r--chromium/tools/battor_agent/battor_error.h26
-rw-r--r--chromium/tools/battor_agent/battor_finder.cc60
-rw-r--r--chromium/tools/battor_agent/battor_finder.h24
-rw-r--r--chromium/tools/battor_agent/battor_protocol_types.h146
-rw-r--r--chromium/tools/battor_agent/battor_protocol_types_unittest.cc66
-rw-r--r--chromium/tools/battor_agent/battor_sample_converter.cc108
-rw-r--r--chromium/tools/battor_agent/battor_sample_converter.h55
-rw-r--r--chromium/tools/battor_agent/battor_sample_converter_unittest.cc161
-rw-r--r--chromium/tools/binary_size/OWNERS3
-rw-r--r--chromium/tools/binary_size/PRESUBMIT.py32
-rw-r--r--chromium/tools/binary_size/README.txt152
-rw-r--r--chromium/tools/binary_size/binary_size_utils.py71
-rwxr-xr-xchromium/tools/binary_size/explain_binary_size_delta.py484
-rwxr-xr-xchromium/tools/binary_size/explain_binary_size_delta_unittest.py621
-rwxr-xr-xchromium/tools/binary_size/run_binary_size_analysis.py679
-rw-r--r--chromium/tools/binary_size/template/D3SymbolTreeMap.js938
-rw-r--r--chromium/tools/binary_size/template/index.html525
-rw-r--r--chromium/tools/binary_size/template/test-data-generator.html157
-rwxr-xr-xchromium/tools/bisect-builds.py1309
-rwxr-xr-xchromium/tools/bisect-manual-test.py53
-rw-r--r--chromium/tools/bisect_test.py53
-rwxr-xr-xchromium/tools/boilerplate.py101
-rw-r--r--chromium/tools/cfi/OWNERS2
-rw-r--r--chromium/tools/cfi/blacklist.txt80
-rwxr-xr-xchromium/tools/check_ecs_deps/check_ecs_deps.py205
-rwxr-xr-xchromium/tools/check_git_config.py540
-rwxr-xr-xchromium/tools/check_grd_for_unused_strings.py183
-rwxr-xr-xchromium/tools/checkbins/checkbins.py130
-rw-r--r--chromium/tools/checklicenses/OWNERS3
-rwxr-xr-xchromium/tools/checklicenses/checklicenses.py764
-rw-r--r--chromium/tools/checkperms/OWNERS1
-rw-r--r--chromium/tools/checkperms/PRESUBMIT.py27
-rwxr-xr-xchromium/tools/checkperms/checkperms.py486
-rw-r--r--chromium/tools/chrome_extensions/chromium_code_coverage/js/app.js420
-rw-r--r--chromium/tools/chrome_extensions/chromium_code_coverage/manifest.json18
-rw-r--r--chromium/tools/chrome_proxy/OWNERS8
-rw-r--r--chromium/tools/chrome_proxy/chrome_proxy_config.py20
-rw-r--r--chromium/tools/chrome_proxy/common/__init__.py0
-rw-r--r--chromium/tools/chrome_proxy/common/chrome_proxy_benchmark.py21
-rw-r--r--chromium/tools/chrome_proxy/common/chrome_proxy_measurements.py104
-rw-r--r--chromium/tools/chrome_proxy/common/chrome_proxy_metrics.py116
-rw-r--r--chromium/tools/chrome_proxy/common/chrome_proxy_metrics_unittest.py44
-rw-r--r--chromium/tools/chrome_proxy/common/inspector_network.py293
-rw-r--r--chromium/tools/chrome_proxy/common/inspector_network_unittest.py126
-rw-r--r--chromium/tools/chrome_proxy/common/network_metrics.py219
-rw-r--r--chromium/tools/chrome_proxy/common/network_metrics_unittest.py176
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/__init__.py0
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py288
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py563
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py907
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py398
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/__init__.py27
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/block_once.py52
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/bypass.py27
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/client_type.py74
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/corsbypass.py27
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/exp_directive.py27
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/fallback_viaheader.py27
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/html5test.py27
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/http_to_direct_fallback.py26
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/https_bypass.py27
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/lo_fi.py30
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/lo_fi_preview.py30
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/pass_through.py39
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_bypass.py41
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_set_bypass.py29
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/safebrowsing.py40
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/smoke.py92
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/synthetic.py28
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/video.py73
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/video_instrumented.py25
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/youtube.py27
-rw-r--r--chromium/tools/chrome_proxy/integration_tests/videowrapper.js100
-rw-r--r--chromium/tools/chrome_proxy/live_tests/__init__.py0
-rw-r--r--chromium/tools/chrome_proxy/live_tests/chrome_proxy_benchmark.py67
-rw-r--r--chromium/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py87
-rw-r--r--chromium/tools/chrome_proxy/live_tests/chrome_proxy_metrics.py136
-rw-r--r--chromium/tools/chrome_proxy/live_tests/chrome_proxy_metrics_unittest.py106
-rw-r--r--chromium/tools/chrome_proxy/live_tests/pagesets/__init__.py20
-rw-r--r--chromium/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20.json27
-rw-r--r--chromium/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20_000.wpr.sha11
-rw-r--r--chromium/tools/chrome_proxy/live_tests/pagesets/metrics.py27
-rw-r--r--chromium/tools/chrome_proxy/live_tests/pagesets/top_20.py91
-rwxr-xr-xchromium/tools/chrome_proxy/run_benchmark18
-rwxr-xr-xchromium/tools/chrome_proxy/run_livetests18
-rwxr-xr-xchromium/tools/chrome_proxy/run_tests23
-rw-r--r--chromium/tools/chrome_proxy/testserver/app.yaml14
-rw-r--r--chromium/tools/chrome_proxy/testserver/data/image1.pngbin0 -> 308776 bytes
-rw-r--r--chromium/tools/chrome_proxy/testserver/image/image1.pngbin0 -> 308776 bytes
-rw-r--r--chromium/tools/chrome_proxy/testserver/server.go163
-rw-r--r--chromium/tools/chrome_proxy/testserver/server_test.go103
-rw-r--r--chromium/tools/clang/CMakeLists.txt10
-rw-r--r--chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp227
-rw-r--r--chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h9
-rw-r--r--chromium/tools/clang/blink_gc_plugin/CMakeLists.txt6
-rw-r--r--chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp2
-rw-r--r--chromium/tools/clang/blink_gc_plugin/RecordInfo.h11
-rw-r--r--chromium/tools/clang/pass_to_move/CMakeLists.txt1
-rw-r--r--chromium/tools/clang/plugins/CMakeLists.txt11
-rw-r--r--chromium/tools/clang/plugins/CheckIPCVisitor.cpp288
-rw-r--r--chromium/tools/clang/plugins/CheckIPCVisitor.h99
-rw-r--r--chromium/tools/clang/plugins/ChromeClassTester.cpp43
-rw-r--r--chromium/tools/clang/plugins/ChromeClassTester.h15
-rw-r--r--chromium/tools/clang/plugins/FindBadConstructsAction.cpp14
-rw-r--r--chromium/tools/clang/plugins/FindBadConstructsConsumer.cpp101
-rw-r--r--chromium/tools/clang/plugins/FindBadConstructsConsumer.h12
-rw-r--r--chromium/tools/clang/plugins/Options.h26
-rw-r--r--chromium/tools/clang/pylib/__init__.py3
-rw-r--r--chromium/tools/clang/pylib/clang/__init__.py3
-rwxr-xr-xchromium/tools/clang/pylib/clang/compile_db.py33
-rwxr-xr-xchromium/tools/clang/pylib/clang/plugin_testing.py122
-rw-r--r--chromium/tools/clang/rewrite_scoped_refptr/CMakeLists.txt1
-rw-r--r--chromium/tools/clang/rewrite_scoped_refptr/RewriteScopedRefptr.cpp38
-rw-r--r--chromium/tools/clang/rewrite_to_chrome_style/CMakeLists.txt1
-rw-r--r--chromium/tools/clang/rewrite_to_chrome_style/RewriteToChromeStyle.cpp787
-rwxr-xr-xchromium/tools/clang/scripts/build_file.py87
-rwxr-xr-xchromium/tools/clang/scripts/package.py136
-rwxr-xr-xchromium/tools/clang/scripts/run_tool.py42
-rwxr-xr-xchromium/tools/clang/scripts/test_tool.py32
-rwxr-xr-xchromium/tools/clang/scripts/update.py246
-rwxr-xr-xchromium/tools/clang/scripts/upload_revision.py81
-rw-r--r--chromium/tools/clang/translation_unit/TranslationUnitGenerator.cpp75
-rw-r--r--chromium/tools/clang/translation_unit/test_files/compile_commands.json.template12
-rw-r--r--chromium/tools/clang/translation_unit/test_files/sysroot/README19
-rw-r--r--chromium/tools/clang/translation_unit/test_files/sysroot/usr/include/c++/4.6/string0
-rw-r--r--chromium/tools/clang/translation_unit/test_files/sysroot/usr/lib/gcc/x86_64-linux-gnu/4.6/crtbegin.o0
-rw-r--r--chromium/tools/clang/translation_unit/test_files/test.cc.filepaths.expected4
-rw-r--r--chromium/tools/clang/translation_unit/test_files/test_relative_sysroot.cc5
-rw-r--r--chromium/tools/clang/translation_unit/test_files/test_relative_sysroot.cc.filepaths.expected2
-rwxr-xr-xchromium/tools/clang/translation_unit/test_translation_unit.py23
-rw-r--r--chromium/tools/code_coverage/croc.css102
-rwxr-xr-xchromium/tools/code_coverage/croc.py722
-rw-r--r--chromium/tools/code_coverage/croc_html.py451
-rw-r--r--chromium/tools/code_coverage/croc_scan.py164
-rwxr-xr-xchromium/tools/code_coverage/croc_scan_test.py187
-rwxr-xr-xchromium/tools/code_coverage/croc_test.py758
-rw-r--r--chromium/tools/code_coverage/example.croc197
-rw-r--r--chromium/tools/code_coverage/third_party/README.chromium11
-rw-r--r--chromium/tools/code_coverage/third_party/sorttable.js494
-rw-r--r--chromium/tools/copyright_scanner/OWNERS3
-rw-r--r--chromium/tools/copyright_scanner/PRESUBMIT.py29
-rwxr-xr-xchromium/tools/copyright_scanner/__init__.py6
-rw-r--r--chromium/tools/copyright_scanner/copyright_scanner.py401
-rwxr-xr-xchromium/tools/copyright_scanner/copyright_scanner_unittest.py308
-rw-r--r--chromium/tools/copyright_scanner/third_party_files_whitelist.txt251
-rwxr-xr-xchromium/tools/coverity/coverity.py308
-rw-r--r--chromium/tools/cr/OWNERS3
-rw-r--r--chromium/tools/cr/README14
-rwxr-xr-xchromium/tools/cr/cr-bash-helpers.sh59
-rwxr-xr-xchromium/tools/cr/cr.sh14
-rw-r--r--chromium/tools/cr/cr/__init__.py21
-rw-r--r--chromium/tools/cr/cr/actions/__init__.py17
-rw-r--r--chromium/tools/cr/cr/actions/action.py47
-rw-r--r--chromium/tools/cr/cr/actions/adb.py150
-rw-r--r--chromium/tools/cr/cr/actions/builder.py82
-rw-r--r--chromium/tools/cr/cr/actions/debugger.py51
-rw-r--r--chromium/tools/cr/cr/actions/gdb.py39
-rw-r--r--chromium/tools/cr/cr/actions/gn.py88
-rw-r--r--chromium/tools/cr/cr/actions/gyp.py39
-rw-r--r--chromium/tools/cr/cr/actions/installer.py55
-rw-r--r--chromium/tools/cr/cr/actions/linux.py52
-rw-r--r--chromium/tools/cr/cr/actions/linuxchromeos.py30
-rw-r--r--chromium/tools/cr/cr/actions/ninja.py120
-rw-r--r--chromium/tools/cr/cr/actions/runner.py88
-rw-r--r--chromium/tools/cr/cr/auto/__init__.py10
-rw-r--r--chromium/tools/cr/cr/auto/build/__init__.py5
-rw-r--r--chromium/tools/cr/cr/auto/client/__init__.py5
-rw-r--r--chromium/tools/cr/cr/auto/user/__init__.py5
-rw-r--r--chromium/tools/cr/cr/autocomplete.py23
-rw-r--r--chromium/tools/cr/cr/base/__init__.py11
-rw-r--r--chromium/tools/cr/cr/base/android.py130
-rw-r--r--chromium/tools/cr/cr/base/arch.py81
-rw-r--r--chromium/tools/cr/cr/base/buildtype.py58
-rw-r--r--chromium/tools/cr/cr/base/client.py284
-rw-r--r--chromium/tools/cr/cr/base/context.py245
-rw-r--r--chromium/tools/cr/cr/base/host.py185
-rw-r--r--chromium/tools/cr/cr/base/linux.py44
-rw-r--r--chromium/tools/cr/cr/base/linux_chromeos.py31
-rw-r--r--chromium/tools/cr/cr/base/mac.py44
-rw-r--r--chromium/tools/cr/cr/base/platform.py70
-rw-r--r--chromium/tools/cr/cr/commands/__init__.py15
-rw-r--r--chromium/tools/cr/cr/commands/args.py32
-rw-r--r--chromium/tools/cr/cr/commands/build.py81
-rw-r--r--chromium/tools/cr/cr/commands/clobber.py35
-rw-r--r--chromium/tools/cr/cr/commands/command.py96
-rw-r--r--chromium/tools/cr/cr/commands/debug.py40
-rw-r--r--chromium/tools/cr/cr/commands/gn.py39
-rw-r--r--chromium/tools/cr/cr/commands/info.py44
-rw-r--r--chromium/tools/cr/cr/commands/init.py171
-rw-r--r--chromium/tools/cr/cr/commands/install.py36
-rw-r--r--chromium/tools/cr/cr/commands/prepare.py69
-rw-r--r--chromium/tools/cr/cr/commands/run.py53
-rw-r--r--chromium/tools/cr/cr/commands/select.py59
-rw-r--r--chromium/tools/cr/cr/commands/shell.py53
-rw-r--r--chromium/tools/cr/cr/commands/sync.py57
-rw-r--r--chromium/tools/cr/cr/config.py244
-rw-r--r--chromium/tools/cr/cr/fixups/__init__.py9
-rw-r--r--chromium/tools/cr/cr/fixups/arch.py54
-rw-r--r--chromium/tools/cr/cr/loader.py126
-rw-r--r--chromium/tools/cr/cr/plugin.py336
-rw-r--r--chromium/tools/cr/cr/targets/__init__.py13
-rw-r--r--chromium/tools/cr/cr/targets/chrome.py24
-rw-r--r--chromium/tools/cr/cr/targets/chrome_public.py27
-rw-r--r--chromium/tools/cr/cr/targets/content_shell.py26
-rw-r--r--chromium/tools/cr/cr/targets/target.py159
-rw-r--r--chromium/tools/cr/cr/visitor.py260
-rw-r--r--chromium/tools/cr/main.py95
-rw-r--r--chromium/tools/cros/OWNERS7
-rw-r--r--chromium/tools/cros/bootstrap_deps23
-rw-r--r--chromium/tools/cygprofile/BUILD.gn39
-rw-r--r--chromium/tools/cygprofile/OWNERS2
-rw-r--r--chromium/tools/cygprofile/PRESUBMIT.py34
-rwxr-xr-xchromium/tools/cygprofile/check_orderfile.py105
-rwxr-xr-xchromium/tools/cygprofile/check_orderfile_unittest.py46
-rwxr-xr-xchromium/tools/cygprofile/cyglog_to_orderfile.py294
-rwxr-xr-xchromium/tools/cygprofile/cyglog_to_orderfile_unittest.py113
-rw-r--r--chromium/tools/cygprofile/cygprofile.cc376
-rw-r--r--chromium/tools/cygprofile/cygprofile.h166
-rw-r--r--chromium/tools/cygprofile/cygprofile_unittest.cc102
-rwxr-xr-xchromium/tools/cygprofile/cygprofile_utils.py64
-rwxr-xr-xchromium/tools/cygprofile/cygprofile_utils_unittest.py22
-rwxr-xr-xchromium/tools/cygprofile/mergetraces.py254
-rw-r--r--chromium/tools/cygprofile/mergetraces_unittest.py51
-rwxr-xr-xchromium/tools/cygprofile/patch_orderfile.py407
-rwxr-xr-xchromium/tools/cygprofile/patch_orderfile_unittest.py152
-rw-r--r--chromium/tools/cygprofile/profile_android_startup.py366
-rwxr-xr-xchromium/tools/cygprofile/run_tests25
-rwxr-xr-xchromium/tools/cygprofile/symbol_extractor.py165
-rwxr-xr-xchromium/tools/cygprofile/symbol_extractor_unittest.py135
-rwxr-xr-xchromium/tools/diagnose-me.py109
-rwxr-xr-xchromium/tools/dromaeo_benchmark_runner/dromaeo_benchmark_runner.py266
-rw-r--r--chromium/tools/emacs/chrome-filetypes.el16
-rw-r--r--chromium/tools/emacs/flymake-chromium.el129
-rw-r--r--chromium/tools/emacs/trybot-linux.txt6
-rw-r--r--chromium/tools/emacs/trybot-mac.txt1985
-rw-r--r--chromium/tools/emacs/trybot-windows.txt72
-rw-r--r--chromium/tools/emacs/trybot.el176
-rwxr-xr-xchromium/tools/export_tarball/export_tarball.py197
-rwxr-xr-xchromium/tools/export_tarball/export_v8_tarball.py135
-rw-r--r--chromium/tools/find_runtime_symbols/OWNERS1
-rw-r--r--chromium/tools/find_runtime_symbols/PRESUBMIT.py45
-rw-r--r--chromium/tools/find_runtime_symbols/README24
-rwxr-xr-xchromium/tools/find_runtime_symbols/find_runtime_symbols.py214
-rwxr-xr-xchromium/tools/find_runtime_symbols/prepare_symbol_info.py252
-rwxr-xr-xchromium/tools/find_runtime_symbols/reduce_debugline.py68
-rw-r--r--chromium/tools/find_runtime_symbols/static_symbols.py277
-rw-r--r--chromium/tools/findit/OWNERS1
-rw-r--r--chromium/tools/findit/blame.py165
-rw-r--r--chromium/tools/findit/chromium_deps.py240
-rw-r--r--chromium/tools/findit/chromium_deps_unittest.py189
-rw-r--r--chromium/tools/findit/common/__init__.py4
-rw-r--r--chromium/tools/findit/common/cacert.pem2186
-rw-r--r--chromium/tools/findit/common/http_client.py31
-rw-r--r--chromium/tools/findit/common/http_client_local.py253
-rw-r--r--chromium/tools/findit/common/http_client_local_unittest.py15
-rw-r--r--chromium/tools/findit/common/utils.py68
-rw-r--r--chromium/tools/findit/component_dictionary.py122
-rw-r--r--chromium/tools/findit/config.ini26
-rw-r--r--chromium/tools/findit/crash_utils.py570
-rw-r--r--chromium/tools/findit/deps_config.json17
-rw-r--r--chromium/tools/findit/findit_for_clusterfuzz.py224
-rw-r--r--chromium/tools/findit/findit_for_crash.py664
-rw-r--r--chromium/tools/findit/git_repository_parser.py293
-rw-r--r--chromium/tools/findit/match_set.py128
-rw-r--r--chromium/tools/findit/repository_parser_interface.py58
-rw-r--r--chromium/tools/findit/result.py19
-rwxr-xr-xchromium/tools/findit/run_all_tests.py18
-rw-r--r--chromium/tools/findit/stacktrace.py321
-rw-r--r--chromium/tools/findit/svn_repository_parser.py250
-rwxr-xr-xchromium/tools/flakiness/find_flakiness.py179
-rwxr-xr-xchromium/tools/flakiness/is_flaky.py58
-rw-r--r--chromium/tools/flakiness/is_flaky_test.py72
-rw-r--r--chromium/tools/gdb/gdb_chrome.py338
-rwxr-xr-xchromium/tools/gen_keyboard_overlay_data/gen_keyboard_overlay_data.py515
-rw-r--r--chromium/tools/generate_library_loader/OWNERS2
-rw-r--r--chromium/tools/generate_library_loader/generate_library_loader.gni3
-rw-r--r--chromium/tools/git/OWNERS1
-rw-r--r--chromium/tools/git/README16
-rwxr-xr-xchromium/tools/git/for-all-touched-files.py126
-rwxr-xr-xchromium/tools/git/git-diff-ide.py93
-rwxr-xr-xchromium/tools/git/git-utils.sh17
-rwxr-xr-xchromium/tools/git/graph.sh42
-rwxr-xr-xchromium/tools/git/mass-rename.py50
-rwxr-xr-xchromium/tools/git/mass-rename.sh17
-rwxr-xr-xchromium/tools/git/mffr.py169
-rwxr-xr-xchromium/tools/git/move_source_file.bat6
-rwxr-xr-xchromium/tools/git/move_source_file.py265
-rwxr-xr-xchromium/tools/git/post-checkout22
-rwxr-xr-xchromium/tools/git/post-merge12
-rwxr-xr-xchromium/tools/git/update-copyrights.sh7
-rw-r--r--chromium/tools/gn/BUILD.gn331
-rw-r--r--chromium/tools/gn/OWNERS1
-rw-r--r--chromium/tools/gn/README.md86
-rw-r--r--chromium/tools/gn/action_target_generator.cc185
-rw-r--r--chromium/tools/gn/action_target_generator.h40
-rw-r--r--chromium/tools/gn/action_target_generator_unittest.cc42
-rw-r--r--chromium/tools/gn/action_values.cc31
-rw-r--r--chromium/tools/gn/action_values.h68
-rw-r--r--chromium/tools/gn/args.cc313
-rw-r--r--chromium/tools/gn/args.h108
-rw-r--r--chromium/tools/gn/args_unittest.cc41
-rw-r--r--chromium/tools/gn/bin/compare_test_lists.py101
-rw-r--r--chromium/tools/gn/bin/gn-format.py58
-rwxr-xr-xchromium/tools/gn/bin/gyp_flag_compare.py280
-rwxr-xr-xchromium/tools/gn/bin/help_as_html.py105
-rwxr-xr-xchromium/tools/gn/bin/roll_gn.py462
-rw-r--r--chromium/tools/gn/binary_target_generator.cc146
-rw-r--r--chromium/tools/gn/binary_target_generator.h38
-rw-r--r--chromium/tools/gn/bootstrap/OWNERS1
-rwxr-xr-xchromium/tools/gn/bootstrap/bootstrap.py521
-rw-r--r--chromium/tools/gn/bootstrap/build.ninja.template25
-rw-r--r--chromium/tools/gn/bootstrap/build_mac.ninja.template25
-rw-r--r--chromium/tools/gn/build_settings.cc66
-rw-r--r--chromium/tools/gn/build_settings.h130
-rw-r--r--chromium/tools/gn/builder.cc515
-rw-r--r--chromium/tools/gn/builder.h141
-rw-r--r--chromium/tools/gn/builder_record.cc69
-rw-r--r--chromium/tools/gn/builder_record.h112
-rw-r--r--chromium/tools/gn/builder_unittest.cc249
-rw-r--r--chromium/tools/gn/bundle_data.cc112
-rw-r--r--chromium/tools/gn/bundle_data.h104
-rw-r--r--chromium/tools/gn/bundle_data_target_generator.cc94
-rw-r--r--chromium/tools/gn/bundle_data_target_generator.h33
-rw-r--r--chromium/tools/gn/bundle_file_rule.cc59
-rw-r--r--chromium/tools/gn/bundle_file_rule.h44
-rw-r--r--chromium/tools/gn/c_include_iterator.cc176
-rw-r--r--chromium/tools/gn/c_include_iterator.h57
-rw-r--r--chromium/tools/gn/c_include_iterator_unittest.cc159
-rw-r--r--chromium/tools/gn/command_args.cc352
-rw-r--r--chromium/tools/gn/command_check.cc262
-rw-r--r--chromium/tools/gn/command_clean.cc150
-rw-r--r--chromium/tools/gn/command_desc.cc763
-rw-r--r--chromium/tools/gn/command_format.cc1077
-rw-r--r--chromium/tools/gn/command_format_unittest.cc106
-rw-r--r--chromium/tools/gn/command_gen.cc323
-rw-r--r--chromium/tools/gn/command_help.cc276
-rw-r--r--chromium/tools/gn/command_ls.cc115
-rw-r--r--chromium/tools/gn/command_path.cc320
-rw-r--r--chromium/tools/gn/command_refs.cc491
-rw-r--r--chromium/tools/gn/commands.cc502
-rw-r--r--chromium/tools/gn/commands.h184
-rw-r--r--chromium/tools/gn/config.cc51
-rw-r--r--chromium/tools/gn/config.h65
-rw-r--r--chromium/tools/gn/config_unittest.cc85
-rw-r--r--chromium/tools/gn/config_values.cc46
-rw-r--r--chromium/tools/gn/config_values.h85
-rw-r--r--chromium/tools/gn/config_values_extractors.cc35
-rw-r--r--chromium/tools/gn/config_values_extractors.h107
-rw-r--r--chromium/tools/gn/config_values_extractors_unittest.cc138
-rw-r--r--chromium/tools/gn/config_values_generator.cc120
-rw-r--r--chromium/tools/gn/config_values_generator.h46
-rw-r--r--chromium/tools/gn/copy_target_generator.cc44
-rw-r--r--chromium/tools/gn/copy_target_generator.h28
-rw-r--r--chromium/tools/gn/create_bundle_target_generator.cc69
-rw-r--r--chromium/tools/gn/create_bundle_target_generator.h31
-rw-r--r--chromium/tools/gn/deps_iterator.cc56
-rw-r--r--chromium/tools/gn/deps_iterator.h74
-rw-r--r--chromium/tools/gn/docs/check.md112
-rw-r--r--chromium/tools/gn/docs/cookbook.md680
-rw-r--r--chromium/tools/gn/docs/cross_compiles.md96
-rw-r--r--chromium/tools/gn/docs/faq.md113
-rw-r--r--chromium/tools/gn/docs/hacking.md23
-rw-r--r--chromium/tools/gn/docs/language.md810
-rw-r--r--chromium/tools/gn/docs/quick_start.md366
-rw-r--r--chromium/tools/gn/docs/reference.md5838
-rw-r--r--chromium/tools/gn/docs/standalone.md41
-rw-r--r--chromium/tools/gn/docs/style_guide.md214
-rw-r--r--chromium/tools/gn/docs/update_binaries.md4
-rw-r--r--chromium/tools/gn/eclipse_writer.cc172
-rw-r--r--chromium/tools/gn/eclipse_writer.h67
-rw-r--r--chromium/tools/gn/err.cc195
-rw-r--r--chromium/tools/gn/err.h87
-rw-r--r--chromium/tools/gn/escape.cc209
-rw-r--r--chromium/tools/gn/escape.h80
-rw-r--r--chromium/tools/gn/escape_unittest.cc60
-rw-r--r--chromium/tools/gn/example/.gn2
-rw-r--r--chromium/tools/gn/example/BUILD.gn30
-rw-r--r--chromium/tools/gn/example/README.txt4
-rw-r--r--chromium/tools/gn/example/build/BUILD.gn19
-rw-r--r--chromium/tools/gn/example/build/BUILDCONFIG.gn38
-rw-r--r--chromium/tools/gn/example/build/toolchain/BUILD.gn80
-rw-r--r--chromium/tools/gn/example/hello.cc13
-rw-r--r--chromium/tools/gn/example/hello_shared.cc9
-rw-r--r--chromium/tools/gn/example/hello_shared.h32
-rw-r--r--chromium/tools/gn/example/hello_static.cc9
-rw-r--r--chromium/tools/gn/example/hello_static.h10
-rw-r--r--chromium/tools/gn/exec_process.cc260
-rw-r--r--chromium/tools/gn/exec_process.h25
-rw-r--r--chromium/tools/gn/exec_process_unittest.cc130
-rw-r--r--chromium/tools/gn/filesystem_utils.cc937
-rw-r--r--chromium/tools/gn/filesystem_utils.h218
-rw-r--r--chromium/tools/gn/filesystem_utils_unittest.cc805
-rw-r--r--chromium/tools/gn/format_test_data/001.gn2
-rw-r--r--chromium/tools/gn/format_test_data/001.golden3
-rw-r--r--chromium/tools/gn/format_test_data/002.gn6
-rw-r--r--chromium/tools/gn/format_test_data/002.golden6
-rw-r--r--chromium/tools/gn/format_test_data/003.gn10
-rw-r--r--chromium/tools/gn/format_test_data/003.golden10
-rw-r--r--chromium/tools/gn/format_test_data/004.gn10
-rw-r--r--chromium/tools/gn/format_test_data/004.golden13
-rw-r--r--chromium/tools/gn/format_test_data/005.gn5
-rw-r--r--chromium/tools/gn/format_test_data/005.golden5
-rw-r--r--chromium/tools/gn/format_test_data/006.gn9
-rw-r--r--chromium/tools/gn/format_test_data/006.golden5
-rw-r--r--chromium/tools/gn/format_test_data/007.gn9
-rw-r--r--chromium/tools/gn/format_test_data/007.golden11
-rw-r--r--chromium/tools/gn/format_test_data/008.gn1
-rw-r--r--chromium/tools/gn/format_test_data/008.golden5
-rw-r--r--chromium/tools/gn/format_test_data/009.gn2
-rw-r--r--chromium/tools/gn/format_test_data/009.golden9
-rw-r--r--chromium/tools/gn/format_test_data/010.gn2
-rw-r--r--chromium/tools/gn/format_test_data/010.golden9
-rw-r--r--chromium/tools/gn/format_test_data/011.gn4
-rw-r--r--chromium/tools/gn/format_test_data/011.golden13
-rw-r--r--chromium/tools/gn/format_test_data/012.gn16
-rw-r--r--chromium/tools/gn/format_test_data/012.golden22
-rw-r--r--chromium/tools/gn/format_test_data/013.gn7
-rw-r--r--chromium/tools/gn/format_test_data/013.golden7
-rw-r--r--chromium/tools/gn/format_test_data/014.gn6
-rw-r--r--chromium/tools/gn/format_test_data/014.golden5
-rw-r--r--chromium/tools/gn/format_test_data/015.gn4
-rw-r--r--chromium/tools/gn/format_test_data/015.golden6
-rw-r--r--chromium/tools/gn/format_test_data/016.gn1
-rw-r--r--chromium/tools/gn/format_test_data/016.golden1
-rw-r--r--chromium/tools/gn/format_test_data/017.gn15
-rw-r--r--chromium/tools/gn/format_test_data/017.golden16
-rw-r--r--chromium/tools/gn/format_test_data/018.gn3
-rw-r--r--chromium/tools/gn/format_test_data/018.golden3
-rw-r--r--chromium/tools/gn/format_test_data/019.gn23
-rw-r--r--chromium/tools/gn/format_test_data/019.golden23
-rw-r--r--chromium/tools/gn/format_test_data/020.gn5
-rw-r--r--chromium/tools/gn/format_test_data/020.golden5
-rw-r--r--chromium/tools/gn/format_test_data/021.gn33
-rw-r--r--chromium/tools/gn/format_test_data/021.golden61
-rw-r--r--chromium/tools/gn/format_test_data/022.gn6
-rw-r--r--chromium/tools/gn/format_test_data/022.golden6
-rw-r--r--chromium/tools/gn/format_test_data/023.gn38
-rw-r--r--chromium/tools/gn/format_test_data/023.golden88
-rw-r--r--chromium/tools/gn/format_test_data/024.gn1
-rw-r--r--chromium/tools/gn/format_test_data/024.golden2
-rw-r--r--chromium/tools/gn/format_test_data/025.gn5
-rw-r--r--chromium/tools/gn/format_test_data/025.golden9
-rw-r--r--chromium/tools/gn/format_test_data/026.gn6
-rw-r--r--chromium/tools/gn/format_test_data/026.golden7
-rw-r--r--chromium/tools/gn/format_test_data/027.gn3
-rw-r--r--chromium/tools/gn/format_test_data/027.golden5
-rw-r--r--chromium/tools/gn/format_test_data/028.gn9
-rw-r--r--chromium/tools/gn/format_test_data/028.golden7
-rw-r--r--chromium/tools/gn/format_test_data/029.gn9
-rw-r--r--chromium/tools/gn/format_test_data/029.golden9
-rw-r--r--chromium/tools/gn/format_test_data/030.gn12
-rw-r--r--chromium/tools/gn/format_test_data/030.golden12
-rw-r--r--chromium/tools/gn/format_test_data/031.gn8
-rw-r--r--chromium/tools/gn/format_test_data/031.golden8
-rw-r--r--chromium/tools/gn/format_test_data/032.gn6
-rw-r--r--chromium/tools/gn/format_test_data/032.golden7
-rw-r--r--chromium/tools/gn/format_test_data/033.gn8
-rw-r--r--chromium/tools/gn/format_test_data/033.golden8
-rw-r--r--chromium/tools/gn/format_test_data/034.gn13
-rw-r--r--chromium/tools/gn/format_test_data/035.gn1
-rw-r--r--chromium/tools/gn/format_test_data/035.golden1
-rw-r--r--chromium/tools/gn/format_test_data/036.gn9
-rw-r--r--chromium/tools/gn/format_test_data/036.golden9
-rw-r--r--chromium/tools/gn/format_test_data/037.gn5
-rw-r--r--chromium/tools/gn/format_test_data/037.golden6
-rw-r--r--chromium/tools/gn/format_test_data/038.gn4
-rw-r--r--chromium/tools/gn/format_test_data/038.golden3
-rw-r--r--chromium/tools/gn/format_test_data/039.gn6
-rw-r--r--chromium/tools/gn/format_test_data/039.golden4
-rw-r--r--chromium/tools/gn/format_test_data/040.gn9
-rw-r--r--chromium/tools/gn/format_test_data/041.gn12
-rw-r--r--chromium/tools/gn/format_test_data/041.golden12
-rw-r--r--chromium/tools/gn/format_test_data/042.gn44
-rw-r--r--chromium/tools/gn/format_test_data/042.golden110
-rw-r--r--chromium/tools/gn/format_test_data/043.gn6
-rw-r--r--chromium/tools/gn/format_test_data/043.golden7
-rw-r--r--chromium/tools/gn/format_test_data/044.gn10
-rw-r--r--chromium/tools/gn/format_test_data/044.golden11
-rw-r--r--chromium/tools/gn/format_test_data/045.gn10
-rw-r--r--chromium/tools/gn/format_test_data/045.golden14
-rw-r--r--chromium/tools/gn/format_test_data/046.gn22
-rw-r--r--chromium/tools/gn/format_test_data/046.golden19
-rw-r--r--chromium/tools/gn/format_test_data/047.gn7
-rw-r--r--chromium/tools/gn/format_test_data/047.golden10
-rw-r--r--chromium/tools/gn/format_test_data/048.gn19
-rw-r--r--chromium/tools/gn/format_test_data/048.golden19
-rw-r--r--chromium/tools/gn/format_test_data/049.gn14
-rw-r--r--chromium/tools/gn/format_test_data/050.gn10
-rw-r--r--chromium/tools/gn/format_test_data/050.golden27
-rw-r--r--chromium/tools/gn/format_test_data/051.gn6
-rw-r--r--chromium/tools/gn/format_test_data/051.golden7
-rw-r--r--chromium/tools/gn/format_test_data/052.gn11
-rw-r--r--chromium/tools/gn/format_test_data/052.golden12
-rw-r--r--chromium/tools/gn/format_test_data/053.gn7
-rw-r--r--chromium/tools/gn/format_test_data/053.golden8
-rw-r--r--chromium/tools/gn/format_test_data/054.gn7
-rw-r--r--chromium/tools/gn/format_test_data/054.golden8
-rw-r--r--chromium/tools/gn/format_test_data/055.gn10
-rw-r--r--chromium/tools/gn/format_test_data/055.golden11
-rw-r--r--chromium/tools/gn/format_test_data/056.gn45
-rw-r--r--chromium/tools/gn/format_test_data/056.golden45
-rw-r--r--chromium/tools/gn/format_test_data/057.gn24
-rw-r--r--chromium/tools/gn/format_test_data/057.golden24
-rw-r--r--chromium/tools/gn/format_test_data/058.gn2
-rw-r--r--chromium/tools/gn/format_test_data/058.golden2
-rw-r--r--chromium/tools/gn/format_test_data/059.gn10
-rw-r--r--chromium/tools/gn/format_test_data/059.golden11
-rw-r--r--chromium/tools/gn/format_test_data/060.gn2
-rw-r--r--chromium/tools/gn/format_test_data/060.golden2
-rw-r--r--chromium/tools/gn/format_test_data/061.gn9
-rw-r--r--chromium/tools/gn/format_test_data/061.golden9
-rw-r--r--chromium/tools/gn/format_test_data/062.gn112
-rw-r--r--chromium/tools/gn/format_test_data/062.golden117
-rw-r--r--chromium/tools/gn/format_test_data/063.gn36
-rw-r--r--chromium/tools/gn/format_test_data/063.golden36
-rw-r--r--chromium/tools/gn/format_test_data/064.gn3
-rw-r--r--chromium/tools/gn/format_test_data/064.golden5
-rw-r--r--chromium/tools/gn/format_test_data/065.gn4
-rw-r--r--chromium/tools/gn/format_test_data/065.golden8
-rw-r--r--chromium/tools/gn/format_test_data/066.gn30
-rw-r--r--chromium/tools/gn/format_test_data/066.golden28
-rw-r--r--chromium/tools/gn/function_exec_script.cc258
-rw-r--r--chromium/tools/gn/function_foreach.cc121
-rw-r--r--chromium/tools/gn/function_foreach_unittest.cc75
-rw-r--r--chromium/tools/gn/function_forward_variables_from.cc227
-rw-r--r--chromium/tools/gn/function_forward_variables_from_unittest.cc208
-rw-r--r--chromium/tools/gn/function_get_label_info.cc162
-rw-r--r--chromium/tools/gn/function_get_label_info_unittest.cc101
-rw-r--r--chromium/tools/gn/function_get_path_info.cc253
-rw-r--r--chromium/tools/gn/function_get_path_info_unittest.cc120
-rw-r--r--chromium/tools/gn/function_get_target_outputs.cc140
-rw-r--r--chromium/tools/gn/function_get_target_outputs_unittest.cc104
-rw-r--r--chromium/tools/gn/function_process_file_template.cc107
-rw-r--r--chromium/tools/gn/function_process_file_template_unittest.cc64
-rw-r--r--chromium/tools/gn/function_read_file.cc78
-rw-r--r--chromium/tools/gn/function_rebase_path.cc289
-rw-r--r--chromium/tools/gn/function_rebase_path_unittest.cc186
-rw-r--r--chromium/tools/gn/function_set_default_toolchain.cc78
-rw-r--r--chromium/tools/gn/function_set_defaults.cc92
-rw-r--r--chromium/tools/gn/function_template.cc198
-rw-r--r--chromium/tools/gn/function_toolchain.cc1022
-rw-r--r--chromium/tools/gn/function_write_file.cc103
-rw-r--r--chromium/tools/gn/function_write_file_unittest.cc90
-rw-r--r--chromium/tools/gn/functions.cc930
-rw-r--r--chromium/tools/gn/functions.h488
-rw-r--r--chromium/tools/gn/functions_target.cc734
-rw-r--r--chromium/tools/gn/functions_target_unittest.cc38
-rw-r--r--chromium/tools/gn/functions_unittest.cc92
-rw-r--r--chromium/tools/gn/gn.gyp24
-rw-r--r--chromium/tools/gn/gn_main.cc83
-rw-r--r--chromium/tools/gn/group_target_generator.cc25
-rw-r--r--chromium/tools/gn/group_target_generator.h27
-rw-r--r--chromium/tools/gn/group_target_generator_unittest.cc46
-rw-r--r--chromium/tools/gn/header_checker.cc583
-rw-r--r--chromium/tools/gn/header_checker.h183
-rw-r--r--chromium/tools/gn/header_checker_unittest.cc290
-rw-r--r--chromium/tools/gn/import_manager.cc93
-rw-r--r--chromium/tools/gn/import_manager.h42
-rw-r--r--chromium/tools/gn/inherited_libraries.cc76
-rw-r--r--chromium/tools/gn/inherited_libraries.h71
-rw-r--r--chromium/tools/gn/inherited_libraries_unittest.cc135
-rw-r--r--chromium/tools/gn/input_conversion.cc213
-rw-r--r--chromium/tools/gn/input_conversion.h30
-rw-r--r--chromium/tools/gn/input_conversion_unittest.cc182
-rw-r--r--chromium/tools/gn/input_file.cc31
-rw-r--r--chromium/tools/gn/input_file.h65
-rw-r--r--chromium/tools/gn/input_file_manager.cc323
-rw-r--r--chromium/tools/gn/input_file_manager.h156
-rw-r--r--chromium/tools/gn/item.cc49
-rw-r--r--chromium/tools/gn/item.h63
-rw-r--r--chromium/tools/gn/label.cc279
-rw-r--r--chromium/tools/gn/label.h126
-rw-r--r--chromium/tools/gn/label_pattern.cc267
-rw-r--r--chromium/tools/gn/label_pattern.h76
-rw-r--r--chromium/tools/gn/label_pattern_unittest.cc86
-rw-r--r--chromium/tools/gn/label_ptr.h117
-rw-r--r--chromium/tools/gn/label_unittest.cc95
-rw-r--r--chromium/tools/gn/last_commit_position.py101
-rw-r--r--chromium/tools/gn/lib_file.cc30
-rw-r--r--chromium/tools/gn/lib_file.h58
-rw-r--r--chromium/tools/gn/loader.cc420
-rw-r--r--chromium/tools/gn/loader.h181
-rw-r--r--chromium/tools/gn/loader_unittest.cc186
-rw-r--r--chromium/tools/gn/location.cc78
-rw-r--r--chromium/tools/gn/location.h56
-rw-r--r--chromium/tools/gn/misc/emacs/gn-mode.el155
-rw-r--r--chromium/tools/gn/misc/tm/GN.tmLanguage102
-rw-r--r--chromium/tools/gn/misc/tm/GN.tmPreferences22
-rw-r--r--chromium/tools/gn/misc/vim/README.chromium5
-rw-r--r--chromium/tools/gn/misc/vim/ftdetect/gnfiletype.vim27
-rw-r--r--chromium/tools/gn/misc/vim/syntax/gn.vim82
-rw-r--r--chromium/tools/gn/ninja_action_target_writer.cc221
-rw-r--r--chromium/tools/gn/ninja_action_target_writer.h64
-rw-r--r--chromium/tools/gn/ninja_action_target_writer_unittest.cc364
-rw-r--r--chromium/tools/gn/ninja_binary_target_writer.cc1007
-rw-r--r--chromium/tools/gn/ninja_binary_target_writer.h148
-rw-r--r--chromium/tools/gn/ninja_binary_target_writer_unittest.cc736
-rw-r--r--chromium/tools/gn/ninja_build_writer.cc406
-rw-r--r--chromium/tools/gn/ninja_build_writer.h68
-rw-r--r--chromium/tools/gn/ninja_build_writer_unittest.cc119
-rw-r--r--chromium/tools/gn/ninja_bundle_data_target_writer.cc19
-rw-r--r--chromium/tools/gn/ninja_bundle_data_target_writer.h23
-rw-r--r--chromium/tools/gn/ninja_copy_target_writer.cc119
-rw-r--r--chromium/tools/gn/ninja_copy_target_writer.h29
-rw-r--r--chromium/tools/gn/ninja_copy_target_writer_unittest.cc98
-rw-r--r--chromium/tools/gn/ninja_create_bundle_target_writer.cc119
-rw-r--r--chromium/tools/gn/ninja_create_bundle_target_writer.h23
-rw-r--r--chromium/tools/gn/ninja_create_bundle_target_writer_unittest.cc173
-rw-r--r--chromium/tools/gn/ninja_group_target_writer.cc34
-rw-r--r--chromium/tools/gn/ninja_group_target_writer.h23
-rw-r--r--chromium/tools/gn/ninja_group_target_writer_unittest.cc52
-rw-r--r--chromium/tools/gn/ninja_target_writer.cc289
-rw-r--r--chromium/tools/gn/ninja_target_writer.h63
-rw-r--r--chromium/tools/gn/ninja_target_writer_unittest.cc142
-rw-r--r--chromium/tools/gn/ninja_toolchain_writer.cc142
-rw-r--r--chromium/tools/gn/ninja_toolchain_writer.h61
-rw-r--r--chromium/tools/gn/ninja_toolchain_writer_unittest.cc31
-rw-r--r--chromium/tools/gn/ninja_utils.cc27
-rw-r--r--chromium/tools/gn/ninja_utils.h25
-rw-r--r--chromium/tools/gn/ninja_writer.cc114
-rw-r--r--chromium/tools/gn/ninja_writer.h53
-rw-r--r--chromium/tools/gn/operators.cc587
-rw-r--r--chromium/tools/gn/operators.h25
-rw-r--r--chromium/tools/gn/operators_unittest.cc211
-rw-r--r--chromium/tools/gn/ordered_set.h71
-rw-r--r--chromium/tools/gn/output_file.cc51
-rw-r--r--chromium/tools/gn/output_file.h66
-rw-r--r--chromium/tools/gn/parse_tree.cc840
-rw-r--r--chromium/tools/gn/parse_tree.h520
-rw-r--r--chromium/tools/gn/parse_tree_unittest.cc254
-rw-r--r--chromium/tools/gn/parser.cc770
-rw-r--r--chromium/tools/gn/parser.h139
-rw-r--r--chromium/tools/gn/parser_unittest.cc711
-rw-r--r--chromium/tools/gn/path_output.cc172
-rw-r--r--chromium/tools/gn/path_output.h91
-rw-r--r--chromium/tools/gn/path_output_unittest.cc284
-rw-r--r--chromium/tools/gn/pattern.cc193
-rw-r--r--chromium/tools/gn/pattern.h92
-rw-r--r--chromium/tools/gn/pattern_unittest.cc64
-rw-r--r--chromium/tools/gn/runtime_deps.cc294
-rw-r--r--chromium/tools/gn/runtime_deps.h28
-rw-r--r--chromium/tools/gn/runtime_deps_unittest.cc284
-rw-r--r--chromium/tools/gn/scheduler.cc237
-rw-r--r--chromium/tools/gn/scheduler.h132
-rw-r--r--chromium/tools/gn/scope.cc521
-rw-r--r--chromium/tools/gn/scope.h364
-rw-r--r--chromium/tools/gn/scope_per_file_provider.cc116
-rw-r--r--chromium/tools/gn/scope_per_file_provider.h51
-rw-r--r--chromium/tools/gn/scope_per_file_provider_unittest.cc55
-rw-r--r--chromium/tools/gn/scope_unittest.cc295
-rw-r--r--chromium/tools/gn/settings.cc35
-rw-r--r--chromium/tools/gn/settings.h116
-rw-r--r--chromium/tools/gn/setup.cc731
-rw-r--r--chromium/tools/gn/setup.h167
-rw-r--r--chromium/tools/gn/source_dir.cc220
-rw-r--r--chromium/tools/gn/source_dir.h134
-rw-r--r--chromium/tools/gn/source_dir_unittest.cc187
-rw-r--r--chromium/tools/gn/source_file.cc85
-rw-r--r--chromium/tools/gn/source_file.h108
-rw-r--r--chromium/tools/gn/source_file_type.cc33
-rw-r--r--chromium/tools/gn/source_file_type.h31
-rw-r--r--chromium/tools/gn/source_file_unittest.cc19
-rw-r--r--chromium/tools/gn/standard_out.cc285
-rw-r--r--chromium/tools/gn/standard_out.h35
-rw-r--r--chromium/tools/gn/string_utils.cc345
-rw-r--r--chromium/tools/gn/string_utils.h53
-rw-r--r--chromium/tools/gn/string_utils_unittest.cc155
-rw-r--r--chromium/tools/gn/substitution_list.cc72
-rw-r--r--chromium/tools/gn/substitution_list.h47
-rw-r--r--chromium/tools/gn/substitution_pattern.cc152
-rw-r--r--chromium/tools/gn/substitution_pattern.h78
-rw-r--r--chromium/tools/gn/substitution_pattern_unittest.cc49
-rw-r--r--chromium/tools/gn/substitution_type.cc230
-rw-r--r--chromium/tools/gn/substitution_type.h129
-rw-r--r--chromium/tools/gn/substitution_writer.cc563
-rw-r--r--chromium/tools/gn/substitution_writer.h229
-rw-r--r--chromium/tools/gn/substitution_writer_unittest.cc281
-rw-r--r--chromium/tools/gn/switches.cc242
-rw-r--r--chromium/tools/gn/switches.h85
-rw-r--r--chromium/tools/gn/target.cc788
-rw-r--r--chromium/tools/gn/target.h395
-rw-r--r--chromium/tools/gn/target_generator.cc408
-rw-r--r--chromium/tools/gn/target_generator.h85
-rw-r--r--chromium/tools/gn/target_unittest.cc885
-rw-r--r--chromium/tools/gn/template.cc125
-rw-r--r--chromium/tools/gn/template.h56
-rw-r--r--chromium/tools/gn/template_unittest.cc93
-rw-r--r--chromium/tools/gn/test_with_scope.cc192
-rw-r--r--chromium/tools/gn/test_with_scope.h113
-rw-r--r--chromium/tools/gn/token.cc28
-rw-r--r--chromium/tools/gn/token.h86
-rw-r--r--chromium/tools/gn/tokenizer.cc410
-rw-r--r--chromium/tools/gn/tokenizer.h90
-rw-r--r--chromium/tools/gn/tokenizer_unittest.cc228
-rw-r--r--chromium/tools/gn/tool.cc29
-rw-r--r--chromium/tools/gn/tool.h211
-rw-r--r--chromium/tools/gn/toolchain.cc174
-rw-r--r--chromium/tools/gn/toolchain.h136
-rw-r--r--chromium/tools/gn/trace.cc328
-rw-r--r--chromium/tools/gn/trace.h101
-rw-r--r--chromium/tools/gn/tutorial/hello.cc17
-rw-r--r--chromium/tools/gn/tutorial/hello.h14
-rw-r--r--chromium/tools/gn/tutorial/hello_world.cc10
-rw-r--r--chromium/tools/gn/tutorial/say_hello.cc14
-rw-r--r--chromium/tools/gn/unique_vector.h178
-rw-r--r--chromium/tools/gn/unique_vector_unittest.cc45
-rw-r--r--chromium/tools/gn/value.cc223
-rw-r--r--chromium/tools/gn/value.h133
-rw-r--r--chromium/tools/gn/value_extractors.cc257
-rw-r--r--chromium/tools/gn/value_extractors.h89
-rw-r--r--chromium/tools/gn/value_unittest.cc43
-rw-r--r--chromium/tools/gn/variables.cc1660
-rw-r--r--chromium/tools/gn/variables.h269
-rw-r--r--chromium/tools/gn/visibility.cc110
-rw-r--r--chromium/tools/gn/visibility.h60
-rw-r--r--chromium/tools/gn/visibility_unittest.cc52
-rw-r--r--chromium/tools/gn/visual_studio_utils.cc117
-rw-r--r--chromium/tools/gn/visual_studio_utils.h37
-rw-r--r--chromium/tools/gn/visual_studio_utils_unittest.cc94
-rw-r--r--chromium/tools/gn/visual_studio_writer.cc805
-rw-r--r--chromium/tools/gn/visual_studio_writer.h137
-rw-r--r--chromium/tools/gn/visual_studio_writer_unittest.cc148
-rw-r--r--chromium/tools/gn/xml_element_writer.cc83
-rw-r--r--chromium/tools/gn/xml_element_writer.h123
-rw-r--r--chromium/tools/gn/xml_element_writer_unittest.cc86
-rw-r--r--chromium/tools/grit/OWNERS1
-rw-r--r--chromium/tools/grit/grit_rule.gni46
-rw-r--r--chromium/tools/gritsettings/resource_ids130
-rw-r--r--chromium/tools/gritsettings/translation_expectations.pyl7
-rw-r--r--chromium/tools/gyp/AUTHORS1
-rw-r--r--chromium/tools/gyp/pylib/gyp/MSVSUtil.py2
-rw-r--r--chromium/tools/gyp/pylib/gyp/MSVSVersion.py24
-rw-r--r--chromium/tools/gyp/pylib/gyp/common.py11
-rw-r--r--chromium/tools/gyp/pylib/gyp/generator/cmake.py51
-rw-r--r--chromium/tools/gyp/pylib/gyp/generator/ninja.py82
-rw-r--r--chromium/tools/gyp/pylib/gyp/input.py13
-rwxr-xr-xchromium/tools/gyp/pylib/gyp/mac_tool.py171
-rw-r--r--chromium/tools/gyp/pylib/gyp/msvs_emulation.py7
-rw-r--r--chromium/tools/gyp/pylib/gyp/xcode_emulation.py52
-rw-r--r--chromium/tools/gyp/pylib/gyp/xcode_ninja.py13
-rw-r--r--chromium/tools/gyp/test/determinism/determinism.gyp59
-rw-r--r--chromium/tools/gyp/test/determinism/empty-targets.gyp32
-rw-r--r--chromium/tools/gyp/test/determinism/needed-variables.gyp33
-rw-r--r--chromium/tools/gyp/test/determinism/solibs.gyp32
-rw-r--r--chromium/tools/gyp/test/ios/app-bundle/test-archs.gyp1
-rw-r--r--chromium/tools/gyp/test/ios/app-bundle/test-device.gyp46
-rw-r--r--chromium/tools/gyp/test/ios/app-bundle/test.gyp1
-rw-r--r--chromium/tools/gyp/test/ios/extension/extension.gyp10
-rw-r--r--chromium/tools/gyp/test/ios/framework/framework.gyp39
-rw-r--r--chromium/tools/gyp/test/linux/target-rpath/test.gyp47
-rw-r--r--chromium/tools/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp19
-rwxr-xr-xchromium/tools/gyp/tools/pretty_gyp.py27
-rw-r--r--chromium/tools/imagediff/BUILD.gn21
-rw-r--r--chromium/tools/imagediff/DEPS4
-rw-r--r--chromium/tools/imagediff/image_diff.cc453
-rw-r--r--chromium/tools/imagediff/image_diff_png.cc643
-rw-r--r--chromium/tools/imagediff/image_diff_png.h37
-rwxr-xr-xchromium/tools/include_tracer.py202
-rw-r--r--chromium/tools/ipc_fuzzer/DEPS4
-rw-r--r--chromium/tools/ipc_fuzzer/OWNERS3
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/DEPS3
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/fuzzer.cc2051
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/fuzzer.h88
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/fuzzer_main.cc248
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/generator.cc119
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/generator.h43
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/mutator.cc120
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/mutator.h47
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/rand_util.cc18
-rw-r--r--chromium/tools/ipc_fuzzer/fuzzer/rand_util.h46
-rw-r--r--chromium/tools/ipc_fuzzer/message_dump/message_dump.cc65
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/DEPS17
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/all_message_null_macros.h9
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/all_messages.h28
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/message_cracker.h34
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/message_file.h28
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/message_file_format.h63
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/message_file_reader.cc235
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/message_file_writer.cc168
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/message_lib.gyp1
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/message_names.cc37
-rw-r--r--chromium/tools/ipc_fuzzer/message_lib/message_names.h61
-rw-r--r--chromium/tools/ipc_fuzzer/message_replay/DEPS5
-rw-r--r--chromium/tools/ipc_fuzzer/message_replay/replay.cc23
-rw-r--r--chromium/tools/ipc_fuzzer/message_replay/replay_process.cc172
-rw-r--r--chromium/tools/ipc_fuzzer/message_replay/replay_process.h65
-rw-r--r--chromium/tools/ipc_fuzzer/message_tools/DEPS5
-rw-r--r--chromium/tools/ipc_fuzzer/message_tools/message_list.cc183
-rw-r--r--chromium/tools/ipc_fuzzer/message_tools/message_util.cc179
-rwxr-xr-xchromium/tools/ipc_fuzzer/scripts/cf_package_builder.py79
-rwxr-xr-xchromium/tools/ipc_fuzzer/scripts/ipc_fuzzer_gen.py63
-rwxr-xr-xchromium/tools/ipc_fuzzer/scripts/ipc_fuzzer_mut.py94
-rwxr-xr-xchromium/tools/ipc_fuzzer/scripts/play_testcase.py120
-rwxr-xr-xchromium/tools/ipc_fuzzer/scripts/remove_close_messages.py75
-rwxr-xr-xchromium/tools/ipc_fuzzer/scripts/utils.py102
-rwxr-xr-xchromium/tools/ipc_messages_log.py168
-rwxr-xr-xchromium/tools/isolate_driver.py313
-rw-r--r--chromium/tools/json_schema_compiler/cc_generator.py131
-rw-r--r--chromium/tools/json_schema_compiler/code.py4
-rwxr-xr-xchromium/tools/json_schema_compiler/compiler.py4
-rw-r--r--chromium/tools/json_schema_compiler/cpp_type_generator.py19
-rwxr-xr-xchromium/tools/json_schema_compiler/cpp_type_generator_test.py19
-rw-r--r--chromium/tools/json_schema_compiler/h_generator.py16
-rwxr-xr-xchromium/tools/json_schema_compiler/idl_schema.py9
-rwxr-xr-xchromium/tools/json_schema_compiler/idl_schema_test.py6
-rw-r--r--chromium/tools/json_schema_compiler/js_externs_generator.py107
-rwxr-xr-xchromium/tools/json_schema_compiler/js_externs_generator_test.py17
-rw-r--r--chromium/tools/json_schema_compiler/js_interface_generator.py49
-rwxr-xr-xchromium/tools/json_schema_compiler/js_interface_generator_test.py39
-rw-r--r--chromium/tools/json_schema_compiler/js_util.py42
-rw-r--r--chromium/tools/json_schema_compiler/json_schema.py10
-rw-r--r--chromium/tools/json_schema_compiler/model.py1
-rw-r--r--chromium/tools/json_schema_compiler/test/json_schema_compiler_tests.gyp2
-rw-r--r--chromium/tools/json_schema_compiler/util.cc21
-rw-r--r--chromium/tools/json_schema_compiler/util.h107
-rw-r--r--chromium/tools/json_schema_compiler/util_cc_helper.py2
-rw-r--r--chromium/tools/json_to_struct/PRESUBMIT.py20
-rw-r--r--chromium/tools/json_to_struct/element_generator.py158
-rwxr-xr-xchromium/tools/json_to_struct/element_generator_test.py238
-rw-r--r--chromium/tools/json_to_struct/json_to_struct.gni64
-rwxr-xr-xchromium/tools/json_to_struct/json_to_struct.py243
-rw-r--r--chromium/tools/json_to_struct/struct_generator.py53
-rwxr-xr-xchromium/tools/json_to_struct/struct_generator_test.py85
-rwxr-xr-xchromium/tools/licenses.py576
-rw-r--r--chromium/tools/linux/OWNERS2
-rw-r--r--chromium/tools/linux/PRESUBMIT.py45
-rwxr-xr-xchromium/tools/linux/dump-static-initializers.py240
-rwxr-xr-xchromium/tools/linux/procfs.py747
-rw-r--r--chromium/tools/luci-go/OWNERS3
-rw-r--r--chromium/tools/luci-go/README.md10
-rw-r--r--chromium/tools/luci-go/linux64/isolate.sha11
-rw-r--r--chromium/tools/luci-go/mac64/isolate.sha11
-rw-r--r--chromium/tools/luci-go/win64/isolate.exe.sha11
-rw-r--r--chromium/tools/mac/OWNERS2
-rwxr-xr-xchromium/tools/mac/dump-static-initializers.py69
-rwxr-xr-xchromium/tools/mac/symbolicate_crash.py504
-rw-r--r--chromium/tools/mb/OWNERS4
-rw-r--r--chromium/tools/mb/PRESUBMIT.py40
-rw-r--r--chromium/tools/mb/README.md22
-rw-r--r--chromium/tools/mb/docs/README.md4
-rw-r--r--chromium/tools/mb/docs/design_spec.md439
-rw-r--r--chromium/tools/mb/docs/user_guide.md288
-rwxr-xr-xchromium/tools/mb/mb8
-rwxr-xr-xchromium/tools/mb/mb.bat6
-rwxr-xr-xchromium/tools/mb/mb.py1360
-rw-r--r--chromium/tools/mb/mb_config.pyl1842
-rwxr-xr-xchromium/tools/mb/mb_unittest.py450
-rw-r--r--chromium/tools/md_browser/OWNERS2
-rw-r--r--chromium/tools/md_browser/README.md27
-rw-r--r--chromium/tools/md_browser/__init__.py0
-rw-r--r--chromium/tools/md_browser/doc.css298
-rw-r--r--chromium/tools/md_browser/footer.html8
-rw-r--r--chromium/tools/md_browser/gitiles_ext_blocks.py84
-rw-r--r--chromium/tools/md_browser/header.html8
-rw-r--r--chromium/tools/md_browser/md_browser.py162
-rw-r--r--chromium/tools/measure_page_load_time/ff_ext/chrome.manifest2
-rw-r--r--chromium/tools/measure_page_load_time/ff_ext/content/firefoxOverlay.xul7
-rw-r--r--chromium/tools/measure_page_load_time/ff_ext/content/measure_page_load_time.js209
-rw-r--r--chromium/tools/measure_page_load_time/ff_ext/install.rdf17
-rw-r--r--chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.cpp72
-rw-r--r--chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.def9
-rw-r--r--chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.idl40
-rw-r--r--chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.rc121
-rw-r--r--chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.rgs29
-rw-r--r--chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.vcproj320
-rw-r--r--chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.cpp292
-rw-r--r--chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.h87
-rw-r--r--chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.rgs27
-rw-r--r--chromium/tools/measure_page_load_time/ie_bho/resource.h18
-rw-r--r--chromium/tools/memory/OWNERS1
-rwxr-xr-xchromium/tools/multi_process_rss.py143
-rwxr-xr-xchromium/tools/nocompile_driver.py486
-rwxr-xr-xchromium/tools/omahaproxy.py90
-rw-r--r--chromium/tools/oopif/OWNERS3
-rw-r--r--chromium/tools/oopif/iframe_server.py224
-rw-r--r--chromium/tools/origin_trials/OWNERS3
-rw-r--r--chromium/tools/origin_trials/PRESUBMIT.py25
-rw-r--r--chromium/tools/origin_trials/eftest.key1
-rwxr-xr-xchromium/tools/origin_trials/generate_token.py153
-rwxr-xr-xchromium/tools/origin_trials/generate_token_unittest.py68
-rw-r--r--chromium/tools/origin_trials/third_party/ed25519/LICENSE1
-rw-r--r--chromium/tools/origin_trials/third_party/ed25519/OWNERS3
-rw-r--r--chromium/tools/origin_trials/third_party/ed25519/README.chromium21
-rw-r--r--chromium/tools/origin_trials/third_party/ed25519/ed25519.py109
-rw-r--r--chromium/tools/page_cycler/acid3/LICENSE1
-rw-r--r--chromium/tools/page_cycler/acid3/README.chromium14
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.css8
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.html1
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.pngbin0 -> 260 bytes
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.txt1
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.xml6
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/favicon.ico0
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/font.svg1
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/font.ttfbin0 -> 12480 bytes
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/head.js139
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/index.html3493
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/reference.html21
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/support-b.png1
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/support-c.pngbin0 -> 2312 bytes
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/svg.svg3
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.1.xhtml11
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.2.xhtml11
-rw-r--r--chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.3.xhtml11
-rw-r--r--chromium/tools/page_cycler/acid3/pages.js31
-rw-r--r--chromium/tools/page_cycler/acid3/start.html7
-rw-r--r--chromium/tools/page_cycler/common/head.js128
-rw-r--r--chromium/tools/page_cycler/common/report.html183
-rw-r--r--chromium/tools/page_cycler/common/start.js82
-rw-r--r--chromium/tools/page_cycler/sample/page1/index.html9
-rw-r--r--chromium/tools/page_cycler/sample/page2/index.html9
-rw-r--r--chromium/tools/page_cycler/sample/page3/index.html9
-rw-r--r--chromium/tools/page_cycler/sample/page4/index.html9
-rw-r--r--chromium/tools/page_cycler/sample/pages.js10
-rw-r--r--chromium/tools/page_cycler/sample/start.html7
-rw-r--r--chromium/tools/page_cycler/startup_test_common/README4
-rw-r--r--chromium/tools/page_cycler/startup_test_common/blank.html5
-rw-r--r--chromium/tools/page_cycler/startup_test_common/head.js7
-rw-r--r--chromium/tools/perf/chrome_telemetry_build/telemetry_binary_manager.isolate10
-rw-r--r--chromium/tools/perf/chrome_telemetry_build/telemetry_chrome_test.isolate12
-rw-r--r--chromium/tools/perf/perf.isolate1
-rw-r--r--chromium/tools/polymer/OWNERS3
-rwxr-xr-xchromium/tools/polymer/generate_compiled_resources_gyp.py83
-rwxr-xr-xchromium/tools/polymer/polymer_grdp_to_txt.py34
-rwxr-xr-xchromium/tools/polymer/txt_to_polymer_grdp.py86
-rwxr-xr-xchromium/tools/prepare-bisect-perf-regression.py70
-rw-r--r--chromium/tools/python/google/__init__.py1
-rwxr-xr-xchromium/tools/python/google/gethash_timer.py149
-rw-r--r--chromium/tools/python/google/httpd_config/httpd.conf734
-rw-r--r--chromium/tools/python/google/httpd_config/httpd2.conf280
-rw-r--r--chromium/tools/python/google/httpd_config/httpd2.pem110
-rw-r--r--chromium/tools/python/google/httpd_config/httpd2_linux.conf144
-rw-r--r--chromium/tools/python/google/httpd_config/httpd2_mac.conf229
-rw-r--r--chromium/tools/python/google/httpd_config/mime.types599
-rw-r--r--chromium/tools/python/google/httpd_config/root_ca_cert.pem102
-rwxr-xr-xchromium/tools/python/google/httpd_utils.py200
-rw-r--r--chromium/tools/python/google/logging_utils.py82
-rw-r--r--chromium/tools/python/google/path_utils.py84
-rw-r--r--chromium/tools/python/google/platform_utils.py23
-rw-r--r--chromium/tools/python/google/platform_utils_linux.py148
-rw-r--r--chromium/tools/python/google/platform_utils_mac.py145
-rw-r--r--chromium/tools/python/google/platform_utils_win.py194
-rw-r--r--chromium/tools/python/google/process_utils.py221
-rw-r--r--chromium/tools/real_world_impact/nsfw_urls.py79
-rwxr-xr-xchromium/tools/real_world_impact/real_world_impact.py554
-rwxr-xr-xchromium/tools/remove_stale_pyc_files.py39
-rwxr-xr-xchromium/tools/resources/find_unused_resources.py204
-rwxr-xr-xchromium/tools/resources/find_used_resources.py89
-rw-r--r--chromium/tools/resources/ico_tools.py202
-rwxr-xr-xchromium/tools/resources/list_resources_removed_by_repack.py99
-rwxr-xr-xchromium/tools/resources/list_unused_grit_header.py233
-rwxr-xr-xchromium/tools/resources/optimize-ico-files.py67
-rwxr-xr-xchromium/tools/resources/optimize-png-files.sh549
-rwxr-xr-xchromium/tools/roll_angle.py412
-rwxr-xr-xchromium/tools/roll_webgl_conformance.py389
-rwxr-xr-xchromium/tools/roll_webrtc.py442
-rwxr-xr-xchromium/tools/run-bisect-manual-test.py173
-rwxr-xr-xchromium/tools/run-bisect-perf-regression.py886
-rw-r--r--chromium/tools/run-perf-test.cfg77
-rwxr-xr-xchromium/tools/safely-roll-deps.py164
-rw-r--r--chromium/tools/screenshot_testing/update_golden_screenshots.py99
-rw-r--r--chromium/tools/security/OWNERS2
-rwxr-xr-xchromium/tools/security/check_message_owners.py53
-rw-r--r--chromium/tools/set_default_handler/DEPS4
-rw-r--r--chromium/tools/set_default_handler/set_default_handler_main.cc65
-rwxr-xr-xchromium/tools/site_compare/command_line.py802
-rw-r--r--chromium/tools/site_compare/commands/__init__.py0
-rw-r--r--chromium/tools/site_compare/commands/compare2.py170
-rw-r--r--chromium/tools/site_compare/commands/maskmaker.py272
-rw-r--r--chromium/tools/site_compare/commands/measure.py52
-rw-r--r--chromium/tools/site_compare/commands/scrape.py59
-rw-r--r--chromium/tools/site_compare/commands/timeload.py144
-rw-r--r--chromium/tools/site_compare/drivers/__init__.py13
-rw-r--r--chromium/tools/site_compare/drivers/win32/__init__.py0
-rwxr-xr-xchromium/tools/site_compare/drivers/win32/keyboard.py201
-rwxr-xr-xchromium/tools/site_compare/drivers/win32/mouse.py222
-rwxr-xr-xchromium/tools/site_compare/drivers/win32/windowing.py366
-rw-r--r--chromium/tools/site_compare/operators/__init__.py23
-rw-r--r--chromium/tools/site_compare/operators/equals.py37
-rw-r--r--chromium/tools/site_compare/operators/equals_with_mask.py57
-rwxr-xr-xchromium/tools/site_compare/scrapers/__init__.py33
-rwxr-xr-xchromium/tools/site_compare/scrapers/chrome/__init__.py36
-rw-r--r--chromium/tools/site_compare/scrapers/chrome/chrome011010.py42
-rw-r--r--chromium/tools/site_compare/scrapers/chrome/chrome01970.py42
-rwxr-xr-xchromium/tools/site_compare/scrapers/chrome/chromebase.py199
-rwxr-xr-xchromium/tools/site_compare/scrapers/firefox/__init__.py28
-rwxr-xr-xchromium/tools/site_compare/scrapers/firefox/firefox2.py249
-rwxr-xr-xchromium/tools/site_compare/scrapers/ie/__init__.py28
-rwxr-xr-xchromium/tools/site_compare/scrapers/ie/ie7.py210
-rwxr-xr-xchromium/tools/site_compare/site_compare.py176
-rw-r--r--chromium/tools/site_compare/utils/__init__.py0
-rw-r--r--chromium/tools/site_compare/utils/browser_iterate.py199
-rwxr-xr-xchromium/tools/sort-headers.py187
-rwxr-xr-xchromium/tools/sort_sources.py187
-rw-r--r--chromium/tools/stats_viewer/OpenDialog.Designer.cs88
-rw-r--r--chromium/tools/stats_viewer/OpenDialog.cs45
-rw-r--r--chromium/tools/stats_viewer/OpenDialog.resx120
-rw-r--r--chromium/tools/stats_viewer/Properties/AssemblyInfo.cs33
-rw-r--r--chromium/tools/stats_viewer/Properties/Resources.Designer.cs71
-rw-r--r--chromium/tools/stats_viewer/Properties/Resources.resx117
-rw-r--r--chromium/tools/stats_viewer/Properties/Settings.Designer.cs30
-rw-r--r--chromium/tools/stats_viewer/Properties/Settings.settings7
-rw-r--r--chromium/tools/stats_viewer/Resources.Designer.cs77
-rw-r--r--chromium/tools/stats_viewer/Resources.resx127
-rw-r--r--chromium/tools/stats_viewer/Resources/kitten.pngbin0 -> 36163 bytes
-rw-r--r--chromium/tools/stats_viewer/Resources/kittenbackground.pngbin0 -> 1275 bytes
-rw-r--r--chromium/tools/stats_viewer/program.cs23
-rw-r--r--chromium/tools/stats_viewer/stats_table.cs546
-rw-r--r--chromium/tools/stats_viewer/stats_viewer.Designer.cs392
-rw-r--r--chromium/tools/stats_viewer/stats_viewer.cs510
-rw-r--r--chromium/tools/stats_viewer/stats_viewer.csproj107
-rw-r--r--chromium/tools/stats_viewer/stats_viewer.resx1129
-rw-r--r--chromium/tools/stats_viewer/win32.cs50
-rw-r--r--chromium/tools/strict_enum_value_checker/OWNERS3
-rw-r--r--chromium/tools/strict_enum_value_checker/changed_file_1.h30
-rw-r--r--chromium/tools/strict_enum_value_checker/changed_file_10.h23
-rw-r--r--chromium/tools/strict_enum_value_checker/changed_file_2.h28
-rw-r--r--chromium/tools/strict_enum_value_checker/changed_file_3.h29
-rw-r--r--chromium/tools/strict_enum_value_checker/changed_file_4.h26
-rw-r--r--chromium/tools/strict_enum_value_checker/changed_file_5.h27
-rw-r--r--chromium/tools/strict_enum_value_checker/changed_file_6.h29
-rw-r--r--chromium/tools/strict_enum_value_checker/changed_file_7.h33
-rw-r--r--chromium/tools/strict_enum_value_checker/changed_file_8.h30
-rw-r--r--chromium/tools/strict_enum_value_checker/changed_file_9.h32
-rw-r--r--chromium/tools/strict_enum_value_checker/mock_enum.h29
-rw-r--r--chromium/tools/strict_enum_value_checker/strict_enum_value_checker.py284
-rwxr-xr-xchromium/tools/strict_enum_value_checker/strict_enum_value_checker_test.py235
-rw-r--r--chromium/tools/symsrc/COPYING-pefile27
-rw-r--r--chromium/tools/symsrc/README.chromium14
-rwxr-xr-xchromium/tools/symsrc/img_fingerprint.py34
-rwxr-xr-xchromium/tools/symsrc/pdb_fingerprint_from_img.py64
-rw-r--r--chromium/tools/symsrc/pefile.py3729
-rwxr-xr-xchromium/tools/symsrc/source_index.py550
-rwxr-xr-xchromium/tools/tcmalloc/print-live-objects.py91
-rw-r--r--chromium/tools/telemetry/telemetry.gyp16
-rw-r--r--chromium/tools/telemetry/telemetry.isolate18
-rw-r--r--chromium/tools/trace/trace.html287
-rw-r--r--chromium/tools/trace/trace_data.js1050
-rw-r--r--chromium/tools/traceline/svgui/README12
-rw-r--r--chromium/tools/traceline/svgui/startup-release.json178
-rw-r--r--chromium/tools/traceline/svgui/traceline.css73
-rw-r--r--chromium/tools/traceline/svgui/traceline.js693
-rw-r--r--chromium/tools/traceline/svgui/traceline.xml11
-rw-r--r--chromium/tools/traceline/traceline/Makefile30
-rw-r--r--chromium/tools/traceline/traceline/README21
-rw-r--r--chromium/tools/traceline/traceline/assembler.h578
-rw-r--r--chromium/tools/traceline/traceline/assembler_unittest.cc83
-rwxr-xr-xchromium/tools/traceline/traceline/assembler_unittest.sh7
-rw-r--r--chromium/tools/traceline/traceline/assembler_unittest.sh.expected54
-rwxr-xr-xchromium/tools/traceline/traceline/dump_syscalls_idarub.rb32
-rw-r--r--chromium/tools/traceline/traceline/logging.h28
-rw-r--r--chromium/tools/traceline/traceline/main.cc1339
-rw-r--r--chromium/tools/traceline/traceline/rdtsc.h43
-rw-r--r--chromium/tools/traceline/traceline/scripts/__init__.py0
-rwxr-xr-xchromium/tools/traceline/traceline/scripts/alloc.py27
-rw-r--r--chromium/tools/traceline/traceline/scripts/crit_sec.js87
-rwxr-xr-xchromium/tools/traceline/traceline/scripts/crit_sec.py57
-rwxr-xr-xchromium/tools/traceline/traceline/scripts/filter_short.py34
-rwxr-xr-xchromium/tools/traceline/traceline/scripts/filter_split.sh14
-rw-r--r--chromium/tools/traceline/traceline/scripts/heap.js69
-rwxr-xr-xchromium/tools/traceline/traceline/scripts/scstats.py32
-rwxr-xr-xchromium/tools/traceline/traceline/scripts/split.py31
-rw-r--r--chromium/tools/traceline/traceline/scripts/syscalls.py942
-rw-r--r--chromium/tools/traceline/traceline/sidestep/ia32_modrm_map.cc92
-rw-r--r--chromium/tools/traceline/traceline/sidestep/ia32_opcode_map.cc1159
-rw-r--r--chromium/tools/traceline/traceline/sidestep/mini_disassembler.cc416
-rw-r--r--chromium/tools/traceline/traceline/sidestep/mini_disassembler.h156
-rw-r--r--chromium/tools/traceline/traceline/sidestep/mini_disassembler_types.h197
-rw-r--r--chromium/tools/traceline/traceline/stubs.asm132
-rw-r--r--chromium/tools/traceline/traceline/sym_resolver.h167
-rw-r--r--chromium/tools/traceline/traceline/syscall_map.h2116
-rwxr-xr-xchromium/tools/unused-symbols-report.py171
-rw-r--r--chromium/tools/usb_gadget/BUILD.gn39
-rw-r--r--chromium/tools/usb_gadget/OWNERS1
-rw-r--r--chromium/tools/usb_gadget/__init__.py3
-rw-r--r--chromium/tools/usb_gadget/__main__.py67
-rw-r--r--chromium/tools/usb_gadget/composite_echo_gadget.py68
-rw-r--r--chromium/tools/usb_gadget/composite_gadget.py277
-rw-r--r--chromium/tools/usb_gadget/default_gadget.py41
-rw-r--r--chromium/tools/usb_gadget/echo_gadget.py239
-rwxr-xr-xchromium/tools/usb_gadget/echo_gadget_test.py22
-rw-r--r--chromium/tools/usb_gadget/gadget.py585
-rwxr-xr-xchromium/tools/usb_gadget/gadget_test.py352
-rw-r--r--chromium/tools/usb_gadget/hid_constants.py140
-rw-r--r--chromium/tools/usb_gadget/hid_descriptors.py159
-rwxr-xr-xchromium/tools/usb_gadget/hid_descriptors_test.py51
-rw-r--r--chromium/tools/usb_gadget/hid_echo_gadget.py105
-rw-r--r--chromium/tools/usb_gadget/hid_gadget.py432
-rwxr-xr-xchromium/tools/usb_gadget/hid_gadget_test.py258
-rw-r--r--chromium/tools/usb_gadget/keyboard_gadget.py202
-rwxr-xr-xchromium/tools/usb_gadget/keyboard_gadget_test.py64
-rw-r--r--chromium/tools/usb_gadget/linux_gadgetfs.py302
-rw-r--r--chromium/tools/usb_gadget/mouse_gadget.py158
-rwxr-xr-xchromium/tools/usb_gadget/mouse_gadget_test.py50
-rw-r--r--chromium/tools/usb_gadget/msos20_descriptors.py95
-rwxr-xr-xchromium/tools/usb_gadget/package.py95
-rw-r--r--chromium/tools/usb_gadget/server.py170
-rw-r--r--chromium/tools/usb_gadget/usb_constants.py191
-rw-r--r--chromium/tools/usb_gadget/usb_descriptors.py454
-rwxr-xr-xchromium/tools/usb_gadget/usb_descriptors_test.py214
-rw-r--r--chromium/tools/usb_gadget/usb_gadget.inf64
-rw-r--r--chromium/tools/valgrind/OWNERS3
-rw-r--r--chromium/tools/valgrind/README10
-rwxr-xr-xchromium/tools/valgrind/asan/asan_symbolize.py271
-rw-r--r--chromium/tools/valgrind/asan/third_party/README.chromium7
-rw-r--r--chromium/tools/valgrind/asan/third_party/__init__.py0
-rwxr-xr-xchromium/tools/valgrind/asan/third_party/asan_symbolize.py479
-rw-r--r--chromium/tools/valgrind/browser_wrapper_win.py49
-rwxr-xr-xchromium/tools/valgrind/chrome_tests.bat53
-rwxr-xr-xchromium/tools/valgrind/chrome_tests.py798
-rwxr-xr-xchromium/tools/valgrind/chrome_tests.sh90
-rw-r--r--chromium/tools/valgrind/common.py252
-rwxr-xr-xchromium/tools/valgrind/drmemory.bat5
-rw-r--r--chromium/tools/valgrind/drmemory/OWNERS1
-rw-r--r--chromium/tools/valgrind/drmemory/PRESUBMIT.py39
-rw-r--r--chromium/tools/valgrind/drmemory/suppressions.txt812
-rw-r--r--chromium/tools/valgrind/drmemory/suppressions_full.txt2236
-rwxr-xr-xchromium/tools/valgrind/drmemory_analyze.py202
-rwxr-xr-xchromium/tools/valgrind/fixed_suppressions.sh15
-rw-r--r--chromium/tools/valgrind/gdb_helper.py87
-rw-r--r--chromium/tools/valgrind/gtest_exclude/OWNERS1
-rw-r--r--chromium/tools/valgrind/gtest_exclude/ash_unittests.gtest-memcheck.txt14
-rw-r--r--chromium/tools/valgrind/gtest_exclude/aura_unittests.gtest.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/base_unittests.gtest-drmemory_win32.txt45
-rw-r--r--chromium/tools/valgrind/gtest_exclude/base_unittests.gtest.txt36
-rw-r--r--chromium/tools/valgrind/gtest_exclude/base_unittests.gtest_win-8.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/base_unittests.gtest_win32.txt11
-rw-r--r--chromium/tools/valgrind/gtest_exclude/blink_heap_unittests.gtest-drmemory_win32.txt5
-rw-r--r--chromium/tools/valgrind/gtest_exclude/blink_platform_unittests.gtest_win32.txt4
-rw-r--r--chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory.txt159
-rw-r--r--chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory_win32.txt211
-rw-r--r--chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-memcheck.txt58
-rw-r--r--chromium/tools/valgrind/gtest_exclude/cast_unittests.gtest-drmemory.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/cc_unittests.gtest-drmemory_win32.txt54
-rw-r--r--chromium/tools/valgrind/gtest_exclude/chromeos_unittests.gtest.txt3
-rw-r--r--chromium/tools/valgrind/gtest_exclude/components_unittests.gtest-drmemory_win32.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/components_unittests.gtest.txt11
-rw-r--r--chromium/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory.txt66
-rw-r--r--chromium/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt69
-rw-r--r--chromium/tools/valgrind/gtest_exclude/content_unittests.gtest-drmemory_win32.txt13
-rw-r--r--chromium/tools/valgrind/gtest_exclude/content_unittests.gtest.txt16
-rw-r--r--chromium/tools/valgrind/gtest_exclude/extensions_unittests.gtest-drmemory.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/extensions_unittests.gtest-memcheck.txt4
-rw-r--r--chromium/tools/valgrind/gtest_exclude/gin_unittests.gtest-drmemory.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/installer_util_unittests.gtest-drmemory_win32.txt6
-rw-r--r--chromium/tools/valgrind/gtest_exclude/interactive_ui_tests.gtest.txt34
-rw-r--r--chromium/tools/valgrind/gtest_exclude/ipc_tests.gtest-drmemory_win32.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/ipc_tests.gtest.txt6
-rw-r--r--chromium/tools/valgrind/gtest_exclude/libphonenumber_unittests.gtest-drmemory_win32.txt3
-rw-r--r--chromium/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory_win32.txt4
-rw-r--r--chromium/tools/valgrind/gtest_exclude/media_unittests.gtest.txt19
-rw-r--r--chromium/tools/valgrind/gtest_exclude/message_center_unittests.gtest.txt4
-rw-r--r--chromium/tools/valgrind/gtest_exclude/mojo_system_unittests.gtest-drmemory.txt6
-rw-r--r--chromium/tools/valgrind/gtest_exclude/net_unittests.gtest-drmemory_win32.txt36
-rw-r--r--chromium/tools/valgrind/gtest_exclude/net_unittests.gtest-memcheck.txt23
-rw-r--r--chromium/tools/valgrind/gtest_exclude/net_unittests.gtest.txt7
-rw-r--r--chromium/tools/valgrind/gtest_exclude/net_unittests.gtest_linux.txt7
-rw-r--r--chromium/tools/valgrind/gtest_exclude/printing_unittests.gtest-drmemory_win32.txt3
-rw-r--r--chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest-drmemory_win32.txt18
-rw-r--r--chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest.txt5
-rw-r--r--chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest_win-8.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/sandbox_linux_unittests.gtest.txt5
-rw-r--r--chromium/tools/valgrind/gtest_exclude/suppressions.txt39
-rw-r--r--chromium/tools/valgrind/gtest_exclude/sync_unit_tests.gtest-asan.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/ui_base_unittests.gtest-memcheck.txt8
-rw-r--r--chromium/tools/valgrind/gtest_exclude/ui_unittests.gtest-memcheck_linux.txt2
-rw-r--r--chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win-xp.txt7
-rw-r--r--chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win32.txt86
-rw-r--r--chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-memcheck.txt37
-rw-r--r--chromium/tools/valgrind/gtest_exclude/unit_tests.gtest.txt23
-rw-r--r--chromium/tools/valgrind/gtest_exclude/unit_tests.gtest_linux.txt29
-rwxr-xr-xchromium/tools/valgrind/locate_valgrind.sh69
-rw-r--r--chromium/tools/valgrind/memcheck/OWNERS1
-rw-r--r--chromium/tools/valgrind/memcheck/PRESUBMIT.py78
-rw-r--r--chromium/tools/valgrind/memcheck/suppressions.txt3158
-rw-r--r--chromium/tools/valgrind/memcheck/suppressions_linux.txt143
-rwxr-xr-xchromium/tools/valgrind/memcheck_analyze.py640
-rwxr-xr-xchromium/tools/valgrind/regrind.sh138
-rw-r--r--chromium/tools/valgrind/reliability/url_list.txt11
-rwxr-xr-xchromium/tools/valgrind/scan-build.py246
-rwxr-xr-xchromium/tools/valgrind/suppressions.py945
-rwxr-xr-xchromium/tools/valgrind/test_suppressions.py195
-rw-r--r--chromium/tools/valgrind/tsan_v2/suppressions.txt2
-rwxr-xr-xchromium/tools/valgrind/unused_suppressions.py24
-rwxr-xr-xchromium/tools/valgrind/valgrind.sh106
-rw-r--r--chromium/tools/valgrind/valgrind_test.py846
-rwxr-xr-xchromium/tools/valgrind/waterfall.sh244
-rw-r--r--chromium/tools/variations/OWNERS2
-rwxr-xr-xchromium/tools/variations/fieldtrial_to_struct.py101
-rw-r--r--chromium/tools/variations/fieldtrial_to_struct_unittest.py82
-rw-r--r--chromium/tools/variations/fieldtrial_util.py101
-rw-r--r--chromium/tools/variations/fieldtrial_util_unittest.py121
-rw-r--r--chromium/tools/variations/unittest_data/expected_output.cc68
-rw-r--r--chromium/tools/variations/unittest_data/expected_output.h40
-rw-r--r--chromium/tools/variations/unittest_data/test_config.json19
-rw-r--r--chromium/tools/vim/OWNERS3
-rw-r--r--chromium/tools/vim/PRESUBMIT.py31
-rw-r--r--chromium/tools/vim/chromium.ycm_extra_conf.py362
-rw-r--r--chromium/tools/vim/clang-format.vim19
-rw-r--r--chromium/tools/vim/filetypes.vim9
-rw-r--r--chromium/tools/vim/mojom/ftdetect/mojomfiletype.vim28
-rw-r--r--chromium/tools/vim/mojom/syntax/mojom.vim48
-rw-r--r--chromium/tools/vim/ninja-build.vim119
-rw-r--r--chromium/tools/vim/ninja_output.py72
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug.sln26
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.Designer.cs201
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.cs263
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.resx120
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebug.csproj211
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebug.vsct125
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebugPackage.cs107
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/GlobalSuppressions.cs16
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/Guids.cs16
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/Key.snkbin0 -> 596 bytes
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/LICENSE27
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/PkgCmdID.cs13
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/ProcessCategory.cs37
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/ProcessDetail.cs286
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/Properties/AssemblyInfo.cs40
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/Resources.Designer.cs63
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/Resources.resx140
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/Resources/Images.pngbin0 -> 989 bytes
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/Resources/Package.icobin0 -> 2998 bytes
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/Utility.cs85
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/VSPackage.resx151
-rw-r--r--chromium/tools/win/ChromeDebug/ChromeDebug/source.extension.vsixmanifest24
-rw-r--r--chromium/tools/win/ChromeDebug/LowLevel/Key.snkbin0 -> 596 bytes
-rw-r--r--chromium/tools/win/ChromeDebug/LowLevel/LowLevel.csproj64
-rw-r--r--chromium/tools/win/ChromeDebug/LowLevel/NativeMethods.cs72
-rw-r--r--chromium/tools/win/ChromeDebug/LowLevel/Properties/AssemblyInfo.cs40
-rw-r--r--chromium/tools/win/ChromeDebug/LowLevel/Types.cs219
-rw-r--r--chromium/tools/win/ChromeDebug/README.txt7
-rw-r--r--chromium/tools/win/DebugVisualizers/chrome.natvis266
-rw-r--r--chromium/tools/win/DebugVisualizers/skia.natvis38
-rw-r--r--chromium/tools/win/DebugVisualizers/webkit.natvis211
-rw-r--r--chromium/tools/win/OWNERS2
-rw-r--r--chromium/tools/win/RetrieveSymbols/ReadMe.txt37
-rw-r--r--chromium/tools/win/RetrieveSymbols/RetrieveSymbols.cpp162
-rw-r--r--chromium/tools/win/RetrieveSymbols/RetrieveSymbols.sln22
-rw-r--r--chromium/tools/win/RetrieveSymbols/RetrieveSymbols.vcxproj88
-rw-r--r--chromium/tools/win/RetrieveSymbols/RetrieveSymbols.vcxproj.filters9
-rwxr-xr-xchromium/tools/win/copy-installer.bat124
-rwxr-xr-xchromium/tools/win/link_limiter/build_link_limiter.py99
-rw-r--r--chromium/tools/win/link_limiter/limiter.cc337
-rw-r--r--chromium/tools/win/new_analyze_warnings/README20
-rwxr-xr-xchromium/tools/win/new_analyze_warnings/retrieve_latest_warnings.bat71
-rw-r--r--chromium/tools/win/new_analyze_warnings/retrieve_warnings.py100
-rw-r--r--chromium/tools/win/new_analyze_warnings/warning_diff.py165
-rw-r--r--chromium/tools/win/new_analyze_warnings/warnings_by_type.py160
-rw-r--r--chromium/tools/win/sizeviewer/README.chromium7
-rw-r--r--chromium/tools/win/sizeviewer/clike.js489
-rw-r--r--chromium/tools/win/sizeviewer/codemirror.js7922
-rw-r--r--chromium/tools/win/sizeviewer/favicon.pngbin0 -> 918 bytes
-rw-r--r--chromium/tools/win/sizeviewer/main.js120
-rw-r--r--chromium/tools/win/sizeviewer/sizeviewer.py199
-rw-r--r--chromium/tools/win/sizeviewer/template.html380
-rw-r--r--chromium/tools/win/static_initializers/static_initializers.cc178
-rw-r--r--chromium/tools/xdisplaycheck/BUILD.gn15
-rw-r--r--chromium/tools/xdisplaycheck/xdisplaycheck.cc119
-rw-r--r--chromium/tools/yes_no.py28
1327 files changed, 202699 insertions, 1123 deletions
diff --git a/chromium/tools/DEPS b/chromium/tools/DEPS
new file mode 100644
index 00000000000..88fa2ecb736
--- /dev/null
+++ b/chromium/tools/DEPS
@@ -0,0 +1,7 @@
+# checkdeps.py shouldn't check include paths for files in these dirs:
+skip_child_includes = [
+ "clang",
+ "gyp",
+ "traceline",
+ "perf/page_sets",
+]
diff --git a/chromium/tools/OWNERS b/chromium/tools/OWNERS
new file mode 100644
index 00000000000..fc7d228261b
--- /dev/null
+++ b/chromium/tools/OWNERS
@@ -0,0 +1,58 @@
+# You can add new small tools to this directory at your desire, feel free
+# to owners-TBR new folders (assuming you have a regular review already,
+# of course). Include an OWNERS file with at least two people for your new
+# folder.
+# If you're changing existing tools, have your change reviewed by the
+# OWNERS of the existing tool.
+
+dpranke@chromium.org
+scottmg@chromium.org
+thakis@chromium.org
+
+
+per-file bisect*.py=anantha@chromium.org
+per-file bisect*.py=prasadv@chromium.org
+per-file bisect*.py=robertocn@chromium.org
+per-file run-bisect*.py=prasadv@chromium.org
+per-file run-bisect*.py=robertocn@chromium.org
+per-file prepare-bisect*.py=prasadv@chromium.org
+per-file prepare-bisect*.py=robertocn@chromium.org
+
+per-file boilerplate.py=rsesek@chromium.org
+
+per-file check_git_config.py=iannucci@chromium.org
+per-file check_git_config.py=vadimsh@chromium.org
+
+per-file check_grd_for_unused_strings.py=estade@chromium.org
+
+per-file gyp-explain.py=thakis@chromium.org
+
+per-file gypv8shy.py=jochen@chromium.org
+
+per-file include_tracer.py=thakis@chromium.org
+
+per-file ipc_messages_log.py=yfriedman@chromium.org
+
+per-file isolate_driver.py=maruel@chromium.org
+per-file isolate_driver.py=vadimsh@chromium.org
+
+per-file licenses.py=file://tools/copyright_scanner/OWNERS
+
+per-file remove_stale_pyc_files.py=dtu@chromium.org
+
+per-file roll_angle.py=kbr@chromium.org
+per-file roll_angle.py=kjellander@chromium.org
+per-file roll_angle.py=geofflang@chromium.org
+per-file roll_webgl_conformance.py=bajones@chromium.org
+per-file roll_webgl_conformance.py=kbr@chromium.org
+per-file roll_webgl_conformance.py=kjellander@chromium.org
+per-file roll_webgl_conformance.py=geofflang@chromium.org
+per-file roll_webgl_conformance.py=zmo@chromium.org
+per-file roll_webrtc.py=kjellander@chromium.org
+
+per-file safely-roll-deps.py=borenet@chromium.org
+
+per-file sort-headers.py=satorux@chromium.org
+per-file sort-sources.py=satorux@chromium.org
+per-file yes_no.py=satorux@chromium.org
+
diff --git a/chromium/tools/accessibility/OWNERS b/chromium/tools/accessibility/OWNERS
new file mode 100644
index 00000000000..11e8fd837ee
--- /dev/null
+++ b/chromium/tools/accessibility/OWNERS
@@ -0,0 +1,2 @@
+dmazzoni@chromium.org
+dtseng@chromium.org
diff --git a/chromium/tools/accessibility/dump_accessibility_tree_auralinux.py b/chromium/tools/accessibility/dump_accessibility_tree_auralinux.py
new file mode 100755
index 00000000000..70e33b0685b
--- /dev/null
+++ b/chromium/tools/accessibility/dump_accessibility_tree_auralinux.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dump Chrome's ATK accessibility tree to the command line.
+
+Accerciser is slow and buggy. This is a quick way to check that Chrome is
+exposing its interface to ATK from the command line.
+"""
+
+import pyatspi
+
+# Helper function to check application name
+def AppNameFinder(name):
+ if (name.lower().find('chromium') !=0 and
+ name.lower().find('chrome') !=0 and
+ name.lower().find('google chrome') != 0):
+ return False
+ return True
+
+def Dump(obj, indent):
+ if not obj:
+ return
+ indent_str = ' ' * indent
+ role = obj.get_role_name()
+ name = obj.get_name()
+ bounds = obj.get_extents(pyatspi.DESKTOP_COORDS)
+ bounds_str = '(%d, %d) size (%d x %d)' % (
+ bounds.x, bounds.y, bounds.width, bounds.height)
+ print '%s%s name="%s" %s' % (indent_str, role, name, bounds_str)
+
+ # Don't recurse into applications other than Chrome
+ if role == 'application':
+ if (not AppNameFinder(name)):
+ return
+
+ for i in range(obj.get_child_count()):
+ Dump(obj.get_child_at_index(i), indent + 1)
+
+desktop = pyatspi.Registry.getDesktop(0)
+Dump(desktop, 0)
diff --git a/chromium/tools/accessibility/nvda/OWNERS b/chromium/tools/accessibility/nvda/OWNERS
new file mode 100644
index 00000000000..c50d5b87619
--- /dev/null
+++ b/chromium/tools/accessibility/nvda/OWNERS
@@ -0,0 +1,3 @@
+aboxhall@chromium.org
+dmazzoni@chromium.org
+dtseng@chromium.org
diff --git a/chromium/tools/accessibility/nvda/README.txt b/chromium/tools/accessibility/nvda/README.txt
new file mode 100644
index 00000000000..ce34d93bd23
--- /dev/null
+++ b/chromium/tools/accessibility/nvda/README.txt
@@ -0,0 +1,58 @@
+This directory contains semi-automated tests of Chrome with
+NVDA (NonVisual Desktop Access), a popular open-source screen reader for
+visually impaired users on Windows. It works by launching Chrome in a
+subprocess, then launching NVDA in a special environment that simulates
+speech rather than actually speaking, and ignores all events coming from
+processes other than a specific Chrome process ID. Each test automates
+Chrome with a series of actions and asserts that NVDA gives the expected
+feedback in response.
+
+Instructions for running these tests:
+
+1. Install Python 2.7, 32-bit: http://www.python.org/
+
+ Note - the version of Python installed by Chrome's depot_tools will not
+ work, it's 64-bit.
+
+2. Download pywinauto here:
+ https://code.google.com/p/pywinauto/downloads/list
+
+ Unzip it, then install it by running this from a cmd shell in that directory:
+ python setup.py install
+
+ If you get an error, make sure you're using the 32-bit version of Python.
+
+3. Install the latest NVDA "next" snapshot from:
+ http://community.nvda-project.org/wiki/Snapshots
+
+ In the installer, choose "Create Portable copy" rather than "Install...".
+ From the Browse dialog, create an new folder called nvdaPortable inside
+ this folder.
+
+ Note: after NVDA 2014.3 stable is released, just use the stable version
+ instead, from http://www.nvaccess.org/download/
+ - if you do this, you need to run NVDA, then from the NVDA menu, choose
+ Tools > Create Portable Copy.
+ From the Browse dialog, create an new folder called nvdaPortable inside
+ this folder.
+ You should now have something like this:
+ d:\src\nvda_chrome_tests\nvdaPortable\nvda.exe
+ You can now exit NVDA.
+
+4. Install Chrome Canary. The binary is typically installed in:
+ c:\Users\USERNAME\AppData\Local\Google\Chrome SxS\Application\chrome.exe
+ ...if not, edit nvda_chrome_tests.py to point to it.
+
+5. Clone the nvda-proctest environment into this directory:
+ git clone https://bitbucket.org/nvaccess/nvda-proctest.git
+
+6. Run the tests:
+
+ First make sure NVDA is not already running.
+
+ Open a cmd console, change to the nvda_chrome_tests directory, and run:
+ python nvda_chrome_tests.py
+
+ If you get an error, open the Windows task manager and make sure NVDA
+ isn't running, kill it if necessary.
+
diff --git a/chromium/tools/accessibility/nvda/nvda_chrome_tests.py b/chromium/tools/accessibility/nvda/nvda_chrome_tests.py
new file mode 100755
index 00000000000..6cbfd1b30ee
--- /dev/null
+++ b/chromium/tools/accessibility/nvda/nvda_chrome_tests.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Semi-automated tests of Chrome with NVDA.
+
+This file performs (semi) automated tests of Chrome with NVDA
+(NonVisual Desktop Access), a popular open-source screen reader for
+visually impaired users on Windows. It works by launching Chrome in a
+subprocess, then launching NVDA in a special environment that simulates
+speech rather than actually speaking, and ignores all events coming from
+processes other than a specific Chrome process ID. Each test automates
+Chrome with a series of actions and asserts that NVDA gives the expected
+feedback in response.
+
+The tests are "semi" automated in the sense that they are not intended to be
+run from any developer machine, or on a buildbot - it requires setting up the
+environment according to the instructions in README.txt, then running the
+test script, then filing bugs for any potential failures. If the environment
+is set up correctly, the actual tests should run automatically and unattended.
+"""
+
+import os
+import pywinauto
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+import unittest
+
+CHROME_PROFILES_PATH = os.path.join(os.getcwd(), 'chrome_profiles')
+CHROME_PATH = os.path.join(os.environ['USERPROFILE'],
+ 'AppData',
+ 'Local',
+ 'Google',
+ 'Chrome SxS',
+ 'Application',
+ 'chrome.exe')
+NVDA_PATH = os.path.join(os.getcwd(),
+ 'nvdaPortable',
+ 'nvda_noUIAccess.exe')
+NVDA_PROCTEST_PATH = os.path.join(os.getcwd(),
+ 'nvda-proctest')
+NVDA_LOGPATH = os.path.join(os.getcwd(),
+ 'nvda_log.txt')
+WAIT_FOR_SPEECH_TIMEOUT_SECS = 3.0
+
+class NvdaChromeTest(unittest.TestCase):
+ @classmethod
+ def setUpClass(cls):
+ print 'user data: %s' % CHROME_PROFILES_PATH
+ print 'chrome: %s' % CHROME_PATH
+ print 'nvda: %s' % NVDA_PATH
+ print 'nvda_proctest: %s' % NVDA_PROCTEST_PATH
+
+ print
+ print 'Clearing user data directory and log file from previous runs'
+ if os.access(NVDA_LOGPATH, os.F_OK):
+ os.remove(NVDA_LOGPATH)
+ if os.access(CHROME_PROFILES_PATH, os.F_OK):
+ shutil.rmtree(CHROME_PROFILES_PATH)
+ os.mkdir(CHROME_PROFILES_PATH, 0777)
+
+ def handler(signum, frame):
+ print 'Test interrupted, attempting to kill subprocesses.'
+ self.tearDown()
+ sys.exit()
+ signal.signal(signal.SIGINT, handler)
+
+ def setUp(self):
+ user_data_dir = tempfile.mkdtemp(dir = CHROME_PROFILES_PATH)
+ args = [CHROME_PATH,
+ '--user-data-dir=%s' % user_data_dir,
+ '--no-first-run',
+ 'about:blank']
+ print
+ print ' '.join(args)
+ self._chrome_proc = subprocess.Popen(args)
+ self._chrome_proc.poll()
+ if self._chrome_proc.returncode is None:
+ print 'Chrome is running'
+ else:
+ print 'Chrome exited with code', self._chrome_proc.returncode
+ sys.exit()
+ print 'Chrome pid: %d' % self._chrome_proc.pid
+
+ os.environ['NVDA_SPECIFIC_PROCESS'] = str(self._chrome_proc.pid)
+
+ args = [NVDA_PATH,
+ '-m',
+ '-c',
+ NVDA_PROCTEST_PATH,
+ '-f',
+ NVDA_LOGPATH]
+ self._nvda_proc = subprocess.Popen(args)
+ self._nvda_proc.poll()
+ if self._nvda_proc.returncode is None:
+ print 'NVDA is running'
+ else:
+ print 'NVDA exited with code', self._nvda_proc.returncode
+ sys.exit()
+ print 'NVDA pid: %d' % self._nvda_proc.pid
+
+ app = pywinauto.application.Application()
+ app.connect_(process = self._chrome_proc.pid)
+ self._pywinauto_window = app.top_window_()
+
+ try:
+ self._WaitForSpeech(['Address and search bar edit', 'about:blank'])
+ except:
+ self.tearDown()
+
+ def tearDown(self):
+ print
+ print 'Shutting down'
+
+ self._chrome_proc.poll()
+ if self._chrome_proc.returncode is None:
+ print 'Killing Chrome subprocess'
+ self._chrome_proc.kill()
+ else:
+ print 'Chrome already died.'
+
+ self._nvda_proc.poll()
+ if self._nvda_proc.returncode is None:
+ print 'Killing NVDA subprocess'
+ self._nvda_proc.kill()
+ else:
+ print 'NVDA already died.'
+
+ def _GetSpeechFromNvdaLogFile(self):
+ """Return everything NVDA would have spoken as a list of strings.
+
+ Parses lines like this from NVDA's log file:
+ Speaking [LangChangeCommand ('en'), u'Google Chrome', u'window']
+ Speaking character u'slash'
+
+ Returns a single list of strings like this:
+ [u'Google Chrome', u'window', u'slash']
+ """
+ if not os.access(NVDA_LOGPATH, os.F_OK):
+ return []
+ lines = open(NVDA_LOGPATH).readlines()
+ regex = re.compile(r"u'((?:[^\'\\]|\\.)*)\'")
+ result = []
+ for line in lines:
+ for m in regex.finditer(line):
+ speech_with_whitespace = m.group(1)
+ speech_stripped = re.sub(r'\s+', ' ', speech_with_whitespace).strip()
+ result.append(speech_stripped)
+ return result
+
+ def _WaitForSpeech(self, expected):
+ """Block until the last speech in NVDA's log file is the given string(s).
+
+ Repeatedly parses the log file until the last speech line(s) in the
+ log file match the given strings, or it times out.
+
+ Args:
+ expected: string or a list of string - only succeeds if these are the last
+ strings spoken, in order.
+
+ Returns when those strings are spoken, or throws an error if it times out
+ waiting for those strings.
+ """
+ if type(expected) is type(''):
+ expected = [expected]
+ start_time = time.time()
+ while True:
+ lines = self._GetSpeechFromNvdaLogFile()
+ if (lines[-len(expected):] == expected):
+ break
+
+ if time.time() - start_time >= WAIT_FOR_SPEECH_TIMEOUT_SECS:
+ print '** Speech from NVDA so far:'
+ for line in lines:
+ print '"%s"' % line
+ print '** Was waiting for:'
+ for line in expected:
+ print '"%s"' % line
+ raise Exception('Timed out')
+ time.sleep(0.1)
+
+ #
+ # Tests
+ #
+
+ def testTypingInOmnibox(self):
+ # Ctrl+A: Select all.
+ self._pywinauto_window.TypeKeys('^A')
+ self._WaitForSpeech('selecting about:blank')
+
+ # Type three characters.
+ self._pywinauto_window.TypeKeys('xyz')
+ self._WaitForSpeech(['x', 'y', 'z'])
+
+ # Arrow back over two characters.
+ self._pywinauto_window.TypeKeys('{LEFT}')
+ self._WaitForSpeech(['z', 'z', 'unselecting'])
+
+ self._pywinauto_window.TypeKeys('{LEFT}')
+ self._WaitForSpeech('y')
+
+ def testFocusToolbarButton(self):
+ # Alt+Shift+T.
+ self._pywinauto_window.TypeKeys('%+T')
+ self._WaitForSpeech('Reload button Reload this page')
+
+ def testReadAllOnPageLoad(self):
+ # Ctrl+A: Select all
+ self._pywinauto_window.TypeKeys('^A')
+ self._WaitForSpeech('selecting about:blank')
+
+ # Load data url.
+ self._pywinauto_window.TypeKeys('data:text/html,Hello<p>World.')
+ self._WaitForSpeech('dot')
+ self._pywinauto_window.TypeKeys('{ENTER}')
+ self._WaitForSpeech(
+ ['document',
+ 'Hello',
+ 'World.'])
+
+if __name__ == '__main__':
+ unittest.main()
+
diff --git a/chromium/tools/accessibility/rebase_dump_accessibility_tree_test.py b/chromium/tools/accessibility/rebase_dump_accessibility_tree_test.py
new file mode 100755
index 00000000000..560114458e9
--- /dev/null
+++ b/chromium/tools/accessibility/rebase_dump_accessibility_tree_test.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Rebase DumpAccessibilityTree Tests.
+
+This script is intended to be run when you make a change that could affect the
+expected results of tests in:
+
+ content/test/data/accessibility
+
+It assumes that you've already uploaded a change and the try jobs have finished.
+It collects all of the results from try jobs on all platforms and updates the
+expectation files locally. From there you can run 'git diff' to make sure all
+of the changes look reasonable, then upload the change for code review.
+"""
+
+import os
+import re
+import sys
+import time
+import urllib
+
+# Load BeautifulSoup. It's checked into two places in the Chromium tree.
+sys.path.append(
+ 'third_party/trace-viewer/third_party/tvcm/third_party/beautifulsoup')
+from BeautifulSoup import BeautifulSoup
+
+# The location of the DumpAccessibilityTree html test files and expectations.
+TEST_DATA_PATH = os.path.join(os.getcwd(), 'content/test/data/accessibility')
+
+# A global that keeps track of files we've already updated, so we don't
+# bother to update the same file twice.
+completed_files = set()
+
+def GitClIssue():
+ '''Retrieve the current issue number as a string.'''
+ result = os.popen('git cl issue').read()
+ # Returns string like: 'Issue number: 12345 (https://...)'
+ return result.split()[2]
+
+def ParseFailure(name, url):
+ '''Parse given the name of a failing trybot and the url of its build log.'''
+
+ # Figure out the platform.
+ if name.find('android') >= 0:
+ platform_suffix = '-expected-android.txt'
+ elif name.find('mac') >= 0:
+ platform_suffix = '-expected-mac.txt'
+ elif name.find('win') >= 0:
+ platform_suffix = '-expected-win.txt'
+ else:
+ return
+
+ # Read the content_browsertests log file.
+ data = None
+ lines = None
+ urls = []
+ for url_suffix in [
+ '/steps/content_browsertests%20(with%20patch)/logs/stdio/text',
+ '/steps/content_browsertests/logs/stdio/text']:
+ urls.append(url + url_suffix)
+ for url in urls:
+ response = urllib.urlopen(url)
+ if response.getcode() == 200:
+ data = response.read()
+ lines = data.splitlines()
+ break
+
+ if not data:
+ return
+
+ # Parse the log file for failing tests and overwrite the expected
+ # result file locally with the actual results from the log.
+ test_name = None
+ start = None
+ filename = None
+ for i in range(len(lines)):
+ line = lines[i]
+ if line[:12] == '[ RUN ]':
+ test_name = line[13:]
+ if test_name and line[:8] == 'Testing:':
+ filename = re.search('content.test.*accessibility.(.*)', line).group(1)
+ if test_name and line == 'Actual':
+ start = i + 2
+ if start and test_name and filename and line[:12] == '[ FAILED ]':
+ # Get the path to the html file.
+ dst_fullpath = os.path.join(TEST_DATA_PATH, filename)
+ # Strip off .html and replace it with the platform expected suffix.
+ dst_fullpath = dst_fullpath[:-5] + platform_suffix
+ if dst_fullpath in completed_files:
+ continue
+
+ actual = [line for line in lines[start : i - 1] if line]
+ fp = open(dst_fullpath, 'w')
+ fp.write('\n'.join(actual))
+ fp.close()
+ print dst_fullpath
+ completed_files.add(dst_fullpath)
+ start = None
+ test_name = None
+ filename = None
+
+def ParseTrybots(data):
+ '''Parse the code review page to find links to try bots.'''
+ soup = BeautifulSoup(data)
+ failures = soup.findAll(
+ 'a',
+ { "class" : "build-result build-status-color-failure" })
+ print 'Found %d trybots that failed' % len(failures)
+ for f in failures:
+ name = f.text.replace('&nbsp;', '')
+ url = f['href']
+ ParseFailure(name, url)
+
+def Run():
+ '''Main. Get the issue number and parse the code review page.'''
+ if len(sys.argv) == 2:
+ issue = sys.argv[1]
+ else:
+ issue = GitClIssue()
+
+ url = 'https://codereview.chromium.org/%s' % issue
+ print 'Fetching issue from %s' % url
+ response = urllib.urlopen(url)
+ if response.getcode() != 200:
+ print 'Error code %d accessing url: %s' % (response.getcode(), url)
+ data = response.read()
+ ParseTrybots(data)
+
+if __name__ == '__main__':
+ sys.exit(Run())
diff --git a/chromium/tools/android/android_tools.gyp b/chromium/tools/android/android_tools.gyp
index b963b3f0ce2..3e1589f1fb0 100644
--- a/chromium/tools/android/android_tools.gyp
+++ b/chromium/tools/android/android_tools.gyp
@@ -17,7 +17,7 @@
'md5sum/md5sum.gyp:md5sum',
'memtrack_helper/memtrack_helper.gyp:memtrack_helper',
'purge_ashmem/purge_ashmem.gyp:purge_ashmem',
- '../../tools/telemetry/telemetry.gyp:*#host',
+ '../../third_party/catapult/telemetry/telemetry.gyp:*#host',
],
},
{
@@ -85,5 +85,13 @@
'audio_focus_grabber/audio_focus_grabber.gyp:audio_focus_grabber_apk',
],
},
+ {
+ # GN: //tools/android:push_apps_to_background
+ 'target_name': 'push_apps_to_background',
+ 'type': 'none',
+ 'dependencies': [
+ 'push_apps_to_background/push_apps_to_background.gyp:push_apps_to_background_apk',
+ ],
+ },
],
}
diff --git a/chromium/tools/android/push_apps_to_background/push_apps_to_background.gyp b/chromium/tools/android/push_apps_to_background/push_apps_to_background.gyp
new file mode 100644
index 00000000000..25fca1f6ed7
--- /dev/null
+++ b/chromium/tools/android/push_apps_to_background/push_apps_to_background.gyp
@@ -0,0 +1,21 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ # GN: //tools/android/push_apps_to_background:push_apps_to_background_apk
+ {
+ 'target_name': 'push_apps_to_background_apk',
+ 'type': 'none',
+ 'variables': {
+ 'apk_name': 'PushAppsToBackground',
+ 'java_in_dir': '.',
+ 'resource_dir': 'res',
+ 'android_manifest_path': 'AndroidManifest.xml',
+ },
+ 'includes': [
+ '../../../build/java_apk.gypi',
+ ],
+ },
+ ],
+}
diff --git a/chromium/tools/auto_bisect/OWNERS b/chromium/tools/auto_bisect/OWNERS
new file mode 100644
index 00000000000..0a10c869a8b
--- /dev/null
+++ b/chromium/tools/auto_bisect/OWNERS
@@ -0,0 +1,6 @@
+prasadv@chromium.org
+qyearsley@chromium.org
+robertocn@chromium.org
+sergiyb@chromium.org
+simonhatch@chromium.org
+tonyg@chromium.org
diff --git a/chromium/tools/auto_bisect/PRESUBMIT.py b/chromium/tools/auto_bisect/PRESUBMIT.py
new file mode 100644
index 00000000000..14bc6d5d88b
--- /dev/null
+++ b/chromium/tools/auto_bisect/PRESUBMIT.py
@@ -0,0 +1,100 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Top-level presubmit script for auto-bisect.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
+details on the presubmit API.
+"""
+
+import imp
+import subprocess
+import os
+
+# Paths to bisect config files relative to this script.
+CONFIG_FILES = [
+ 'bisect.cfg',
+ os.path.join('..', 'run-perf-test.cfg'),
+]
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return _CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return _CommonChecks(input_api, output_api)
+
+
+def _CommonChecks(input_api, output_api):
+ """Does all presubmit checks for auto-bisect."""
+ results = []
+ results.extend(_CheckAllConfigFiles(input_api, output_api))
+ results.extend(_RunUnitTests(input_api, output_api))
+ results.extend(_RunPyLint(input_api, output_api))
+ return results
+
+
+def _CheckAllConfigFiles(input_api, output_api):
+ """Checks all bisect config files and returns a list of presubmit results."""
+ results = []
+ script_path = input_api.PresubmitLocalPath()
+ for config_file in CONFIG_FILES:
+ file_path = os.path.join(script_path, config_file)
+ results.extend(_CheckConfigFile(file_path, output_api))
+ return results
+
+
+def _CheckConfigFile(file_path, output_api):
+ """Checks one bisect config file and returns a list of presubmit results."""
+ try:
+ config_file = imp.load_source('config', file_path)
+ except IOError as e:
+ warning = 'Failed to read config file %s: %s' % (file_path, str(e))
+ return [output_api.PresubmitError(warning, items=[file_path])]
+
+ if not hasattr(config_file, 'config'):
+ warning = 'Config file has no "config" global variable: %s' % str(e)
+ return [output_api.PresubmitError(warning, items=[file_path])]
+
+ if type(config_file.config) is not dict:
+ warning = 'Config file "config" global variable is not dict: %s' % str(e)
+ return [output_api.PresubmitError(warning, items=[file_path])]
+
+ for k, v in config_file.config.iteritems():
+ if v != '':
+ warning = 'Non-empty value in config dict: %s: %s' % (repr(k), repr(v))
+ warning += ('\nThe bisection config file should only contain a config '
+ 'dict with empty fields. Changes to this file should not '
+ 'be submitted.')
+ return [output_api.PresubmitError(warning, items=[file_path])]
+
+ return []
+
+
+def _RunUnitTests(input_api, output_api):
+ """Runs unit tests for auto-bisect."""
+ repo_root = input_api.change.RepositoryRoot()
+ auto_bisect_dir = os.path.join(repo_root, 'tools', 'auto_bisect')
+ test_runner = os.path.join(auto_bisect_dir, 'run_tests')
+ return_code = subprocess.call(['python', test_runner])
+ if return_code:
+ message = 'Auto-bisect unit tests did not all pass.'
+ return [output_api.PresubmitError(message)]
+ return []
+
+
+def _RunPyLint(input_api, output_api):
+ """Runs unit tests for auto-bisect."""
+ telemetry_path = os.path.join(
+ input_api.PresubmitLocalPath(), '..', '..', 'third_party', 'telemetry')
+ mock_path = os.path.join(
+ input_api.PresubmitLocalPath(), '..', '..', 'third_party', 'pymock')
+ disabled_warnings = [
+ 'relative-import',
+ ]
+ tests = input_api.canned_checks.GetPylint(
+ input_api, output_api, disabled_warnings=disabled_warnings,
+ extra_paths_list=[telemetry_path, mock_path])
+ return input_api.RunTests(tests)
diff --git a/chromium/tools/auto_bisect/README b/chromium/tools/auto_bisect/README
new file mode 100644
index 00000000000..c1a88e7a4ca
--- /dev/null
+++ b/chromium/tools/auto_bisect/README
@@ -0,0 +1,18 @@
+This directory contains modules related to tools for bisecting regressions.
+
+There are several different tools for bisecting regressions; the main use
+of these tools is to find revisions where a performance regression occurred.
+These tools are generally run by trybots but can also be run locally.
+
+Documentation:
+ http://www.chromium.org/developers/bisecting-bugs
+ http://www.chromium.org/developers/tree-sheriffs/perf-sheriffs/bisecting-performance-regressions
+
+Overview of bisect-related files in src/tools:
+ run-bisect-perf-regression.py -- used to kick off a bisect job
+ prepare-bisect-perf-regression.py -- run before the above to prepare the repo
+ run-bisect-manual-test.py -- used to manually bisect
+ bisect-manual-test.py -- helper module used by run-bisect-manual-test.py
+ auto_bisect/bisect.cfg -- config parameters for a bisect job
+ run-perf-test.cfg -- config parameters running a perf test once
+
diff --git a/chromium/tools/auto_bisect/__init__.py b/chromium/tools/auto_bisect/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/auto_bisect/__init__.py
diff --git a/chromium/tools/auto_bisect/bisect.cfg b/chromium/tools/auto_bisect/bisect.cfg
new file mode 100644
index 00000000000..25323ff2c03
--- /dev/null
+++ b/chromium/tools/auto_bisect/bisect.cfg
@@ -0,0 +1,56 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Config file read by run-bisect-perf-regression.py.
+
+This script is intended for use by anyone that wants to run a remote bisection
+on a range of revisions to look for a performance regression. Modify the config
+below and add the revision range, performance command, and metric. You can then
+run a git try <bot>.
+
+Changes to this file should never be submitted.
+
+Args:
+ 'command': This is the full command to execute the test.
+ 'good_revision': An svn or git revision where the metric hadn't regressed yet.
+ 'bad_revision': An svn or git revision sometime after the metric regressed.
+ 'metric': The name of the metric to parse out from the results of the
+ performance test. You can retrieve the metric by looking at the stdio of
+ the performance test. Look for lines of the format:
+ RESULT <graph>: <trace>= <value> <units>
+ The metric name is "<graph>/<trace>".
+ 'repeat_count': The number of times to repeat the performance test.
+ 'max_time_minutes': The script will attempt to run the performance test
+ "repeat_count" times, unless it exceeds "max_time_minutes".
+ 'truncate_percent': The highest/lowest % values will be discarded before
+ computing the mean result for each revision.
+
+Sample config:
+
+config = {
+ 'command': './tools/perf/run_benchmark --browser=release sunspider',
+ 'metric': 'Total/Total',
+ 'good_revision': '14ac2486c0eba1266d2da1c52e8759d9c784fe80',
+ 'bad_revision': 'fcf8643d31301eea990a4c42d7d8c9fc30cc33ec',
+ 'repeat_count': '20',
+ 'max_time_minutes': '20',
+ 'truncate_percent': '25',
+}
+
+For Windows, if you're calling a python script you will need to add "python"
+to the command, so the command would be changed to:
+ 'python tools/perf/run_benchmark -v --browser=release sunspider',
+"""
+
+config = {
+ 'command': '',
+ 'good_revision': '',
+ 'bad_revision': '',
+ 'metric': '',
+ 'repeat_count': '',
+ 'max_time_minutes': '',
+ 'truncate_percent': '',
+}
+
+# Workaround git try issue, see crbug.com/257689
diff --git a/chromium/tools/auto_bisect/bisect_perf_regression.py b/chromium/tools/auto_bisect/bisect_perf_regression.py
new file mode 100755
index 00000000000..beced3ad7cc
--- /dev/null
+++ b/chromium/tools/auto_bisect/bisect_perf_regression.py
@@ -0,0 +1,2903 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Chromium auto-bisect tool
+
+This script bisects a range of commits using binary search. It starts by getting
+reference values for the specified "good" and "bad" commits. Then, for revisions
+in between, it will get builds, run tests and classify intermediate revisions as
+"good" or "bad" until an adjacent "good" and "bad" revision is found; this is
+the culprit.
+
+If the culprit is a roll of a depedency repository (e.g. v8), it will then
+expand the revision range and continue the bisect until a culprit revision in
+the dependency repository is found.
+
+Example usage using git commit hashes, bisecting a performance test based on
+the mean value of a particular metric:
+
+./tools/auto_bisect/bisect_perf_regression.py
+ --command "out/Release/performance_ui_tests \
+ --gtest_filter=ShutdownTest.SimpleUserQuit"\
+ --metric shutdown/simple-user-quit
+ --good_revision 1f6e67861535121c5c819c16a666f2436c207e7b\
+ --bad-revision b732f23b4f81c382db0b23b9035f3dadc7d925bb\
+
+Example usage using git commit positions, bisecting a functional test based on
+whether it passes or fails.
+
+./tools/auto_bisect/bisect_perf_regression.py\
+ --command "out/Release/content_unittests -single-process-tests \
+ --gtest_filter=GpuMemoryBufferImplTests"\
+ --good_revision 408222\
+ --bad_revision 408232\
+ --bisect_mode return_code\
+ --builder_type full
+
+In practice, the auto-bisect tool is usually run on tryserver.chromium.perf
+try bots, and is started by tools/run-bisect-perf-regression.py using
+config parameters from tools/auto_bisect/bisect.cfg.
+"""
+
+import argparse
+import copy
+import errno
+import hashlib
+import json
+import logging
+import os
+import re
+import shlex
+import shutil
+import StringIO
+import sys
+import time
+import urllib
+import urllib2
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..',
+ 'third_party', 'catapult', 'telemetry'))
+
+from bisect_printer import BisectPrinter
+from bisect_results import BisectResults
+import bisect_results_json
+from bisect_state import BisectState
+import bisect_utils
+import builder
+import fetch_build
+import math_utils
+import query_crbug
+import request_build
+import source_control
+
+# The script is in chromium/src/tools/auto_bisect. Throughout this script,
+# we use paths to other things in the chromium/src repository.
+
+# Possible return values from BisectPerformanceMetrics.RunTest.
+BUILD_RESULT_SUCCEED = 0
+BUILD_RESULT_FAIL = 1
+BUILD_RESULT_SKIPPED = 2
+
+# How many times to repeat the test on the last known good and first known bad
+# revisions in order to assess a more accurate confidence score in the
+# regression culprit.
+BORDER_REVISIONS_EXTRA_RUNS = 2
+
+# Patch template to add a new file, DEPS.sha under src folder.
+# This file contains SHA1 value of the DEPS changes made while bisecting
+# dependency repositories. This patch send along with DEPS patch to try server.
+# When a build requested is posted with a patch, bisect builders on try server,
+# once build is produced, it reads SHA value from this file and appends it
+# to build archive filename.
+DEPS_SHA_PATCH = """diff --git DEPS.sha DEPS.sha
+new file mode 100644
+--- /dev/null
++++ DEPS.sha
+@@ -0,0 +1 @@
++%(deps_sha)s
+"""
+
+REGRESSION_NOT_REPRODUCED_MESSAGE_TEMPLATE = """
+Bisect did not clearly reproduce a regression between the given "good"
+and "bad" revisions.
+
+Results:
+"Good" revision: {good_rev}
+\tMean: {good_mean}
+\tStandard error: {good_std_err}
+\tSample size: {good_sample_size}
+
+"Bad" revision: {bad_rev}
+\tMean: {bad_mean}
+\tStandard error: {bad_std_err}
+\tSample size: {bad_sample_size}
+
+You may want to try bisecting on a different platform or metric.
+"""
+
+# Git branch name used to run bisect try jobs.
+BISECT_TRYJOB_BRANCH = 'bisect-tryjob'
+# Git master branch name.
+BISECT_MASTER_BRANCH = 'master'
+# File to store 'git diff' content.
+BISECT_PATCH_FILE = 'deps_patch.txt'
+# SVN repo where the bisect try jobs are submitted.
+PERF_SVN_REPO_URL = 'svn://svn.chromium.org/chrome-try/try-perf'
+FULL_SVN_REPO_URL = 'svn://svn.chromium.org/chrome-try/try'
+ANDROID_CHROME_SVN_REPO_URL = ('svn://svn.chromium.org/chrome-try-internal/'
+ 'try-perf')
+PERF_DASH_RESULTS_URL = 'https://chromeperf.appspot.com/post_bisect_results'
+
+
+class RunGitError(Exception):
+
+ def __str__(self):
+ return '%s\nError executing git command.' % self.args[0]
+
+
+def GetSHA1HexDigest(contents):
+ """Returns SHA1 hex digest of the given string."""
+ return hashlib.sha1(contents).hexdigest()
+
+
+def WriteStringToFile(text, file_name):
+ """Writes text to a file, raising an RuntimeError on failure."""
+ try:
+ with open(file_name, 'wb') as f:
+ f.write(text)
+ except IOError:
+ raise RuntimeError('Error writing to file [%s]' % file_name)
+
+
+def ReadStringFromFile(file_name):
+ """Writes text to a file, raising an RuntimeError on failure."""
+ try:
+ with open(file_name) as f:
+ return f.read()
+ except IOError:
+ raise RuntimeError('Error reading file [%s]' % file_name)
+
+
+def ChangeBackslashToSlashInPatch(diff_text):
+ """Formats file paths in the given patch text to Unix-style paths."""
+ if not diff_text:
+ return None
+ diff_lines = diff_text.split('\n')
+ for i in range(len(diff_lines)):
+ line = diff_lines[i]
+ if line.startswith('--- ') or line.startswith('+++ '):
+ diff_lines[i] = line.replace('\\', '/')
+ return '\n'.join(diff_lines)
+
+
+def _ParseRevisionsFromDEPSFileManually(deps_file_contents):
+ """Parses the vars section of the DEPS file using regular expressions.
+
+ Args:
+ deps_file_contents: The DEPS file contents as a string.
+
+ Returns:
+ A dictionary in the format {depot: revision} if successful, otherwise None.
+ """
+ # We'll parse the "vars" section of the DEPS file.
+ rxp = re.compile('vars = {(?P<vars_body>[^}]+)', re.MULTILINE)
+ re_results = rxp.search(deps_file_contents)
+
+ if not re_results:
+ return None
+
+ # We should be left with a series of entries in the vars component of
+ # the DEPS file with the following format:
+ # 'depot_name': 'revision',
+ vars_body = re_results.group('vars_body')
+ rxp = re.compile(r"'(?P<depot_body>[\w_-]+)':[\s]+'(?P<rev_body>[\w@]+)'",
+ re.MULTILINE)
+ re_results = rxp.findall(vars_body)
+
+ return dict(re_results)
+
+
+def _WaitUntilBuildIsReady(fetch_build_func, builder_name, build_request_id,
+ max_timeout, buildbot_server_url):
+ """Waits until build is produced by bisect builder on try server.
+
+ Args:
+ fetch_build_func: Function to check and download build from cloud storage.
+ builder_name: Builder bot name on try server.
+ build_request_id: A unique ID of the build request posted to try server.
+ max_timeout: Maximum time to wait for the build.
+ buildbot_server_url: Buildbot url to check build status.
+
+ Returns:
+ Downloaded archive file path if exists, otherwise None.
+ """
+ # Build number on the try server.
+ build_num = None
+ # Interval to check build on cloud storage.
+ poll_interval = 60
+ # Interval to check build status on try server in seconds.
+ status_check_interval = 600
+ last_status_check = time.time()
+ start_time = time.time()
+
+ while True:
+ # Checks for build on gs://chrome-perf and download if exists.
+ res = fetch_build_func()
+ if res:
+ return (res, 'Build successfully found')
+ elapsed_status_check = time.time() - last_status_check
+ # To avoid overloading try server with status check requests, we check
+ # build status for every 10 minutes.
+ if elapsed_status_check > status_check_interval:
+ last_status_check = time.time()
+ if not build_num:
+ # Get the build number on try server for the current build.
+ build_num = request_build.GetBuildNumFromBuilder(
+ build_request_id, builder_name, buildbot_server_url)
+ # Check the status of build using the build number.
+ # Note: Build is treated as PENDING if build number is not found
+ # on the the try server.
+ build_status, status_link = request_build.GetBuildStatus(
+ build_num, builder_name, buildbot_server_url)
+ if build_status == request_build.FAILED:
+ return (None, 'Failed to produce build, log: %s' % status_link)
+ elapsed_time = time.time() - start_time
+ if elapsed_time > max_timeout:
+ return (None, 'Timed out: %ss without build' % max_timeout)
+
+ logging.info('Time elapsed: %ss without build.', elapsed_time)
+ time.sleep(poll_interval)
+ # For some reason, mac bisect bots were not flushing stdout periodically.
+ # As a result buildbot command is timed-out. Flush stdout on all platforms
+ # while waiting for build.
+ sys.stdout.flush()
+
+
+def _UpdateV8Branch(deps_content):
+ """Updates V8 branch in DEPS file to process v8_bleeding_edge.
+
+ Check for "v8_branch" in DEPS file if exists update its value
+ with v8_bleeding_edge branch. Note: "v8_branch" is added to DEPS
+ variable from DEPS revision 254916, therefore check for "src/v8":
+ <v8 source path> in DEPS in order to support prior DEPS revisions
+ and update it.
+
+ Args:
+ deps_content: DEPS file contents to be modified.
+
+ Returns:
+ Modified DEPS file contents as a string.
+ """
+ new_branch = r'branches/bleeding_edge'
+ v8_branch_pattern = re.compile(r'(?<="v8_branch": ")(.*)(?=")')
+ if re.search(v8_branch_pattern, deps_content):
+ deps_content = re.sub(v8_branch_pattern, new_branch, deps_content)
+ else:
+ # Replaces the branch assigned to "src/v8" key in DEPS file.
+ # Format of "src/v8" in DEPS:
+ # "src/v8":
+ # (Var("googlecode_url") % "v8") + "/trunk@" + Var("v8_revision"),
+ # So, "/trunk@" is replace with "/branches/bleeding_edge@"
+ v8_src_pattern = re.compile(
+ r'(?<="v8"\) \+ "/)(.*)(?=@" \+ Var\("v8_revision"\))', re.MULTILINE)
+ if re.search(v8_src_pattern, deps_content):
+ deps_content = re.sub(v8_src_pattern, new_branch, deps_content)
+ return deps_content
+
+
+def _UpdateDEPSForAngle(revision, depot, deps_file):
+ """Updates DEPS file with new revision for Angle repository.
+
+ This is a hack for Angle depot case because, in DEPS file "vars" dictionary
+ variable contains "angle_revision" key that holds git hash instead of
+ SVN revision.
+
+ And sometimes "angle_revision" key is not specified in "vars" variable,
+ in such cases check "deps" dictionary variable that matches
+ angle.git@[a-fA-F0-9]{40}$ and replace git hash.
+ """
+ deps_var = bisect_utils.DEPOT_DEPS_NAME[depot]['deps_var']
+ try:
+ deps_contents = ReadStringFromFile(deps_file)
+ # Check whether the depot and revision pattern in DEPS file vars variable
+ # e.g. "angle_revision": "fa63e947cb3eccf463648d21a05d5002c9b8adfa".
+ angle_rev_pattern = re.compile(r'(?<="%s": ")([a-fA-F0-9]{40})(?=")' %
+ deps_var, re.MULTILINE)
+ match = re.search(angle_rev_pattern, deps_contents)
+ if match:
+ # Update the revision information for the given depot
+ new_data = re.sub(angle_rev_pattern, revision, deps_contents)
+ else:
+ # Check whether the depot and revision pattern in DEPS file deps
+ # variable. e.g.,
+ # "src/third_party/angle": Var("chromium_git") +
+ # "/angle/angle.git@fa63e947cb3eccf463648d21a05d5002c9b8adfa",.
+ angle_rev_pattern = re.compile(
+ r'(?<=angle\.git@)([a-fA-F0-9]{40})(?=")', re.MULTILINE)
+ match = re.search(angle_rev_pattern, deps_contents)
+ if not match:
+ logging.info('Could not find angle revision information in DEPS file.')
+ return False
+ new_data = re.sub(angle_rev_pattern, revision, deps_contents)
+ # Write changes to DEPS file
+ WriteStringToFile(new_data, deps_file)
+ return True
+ except IOError, e:
+ logging.warn('Something went wrong while updating DEPS file, %s', e)
+ return False
+
+
+def _TryParseHistogramValuesFromOutput(metric, text):
+ """Attempts to parse a metric in the format HISTOGRAM <graph: <trace>.
+
+ Args:
+ metric: The metric as a list of [<trace>, <value>] strings.
+ text: The text to parse the metric values from.
+
+ Returns:
+ A list of floating point numbers found, [] if none were found.
+ """
+ metric_formatted = 'HISTOGRAM %s: %s= ' % (metric[0], metric[1])
+
+ text_lines = text.split('\n')
+ values_list = []
+
+ for current_line in text_lines:
+ if metric_formatted in current_line:
+ current_line = current_line[len(metric_formatted):]
+
+ try:
+ histogram_values = eval(current_line)
+
+ for b in histogram_values['buckets']:
+ average_for_bucket = float(b['high'] + b['low']) * 0.5
+ # Extends the list with N-elements with the average for that bucket.
+ values_list.extend([average_for_bucket] * b['count'])
+ except Exception:
+ pass
+
+ return values_list
+
+
+def _TryParseResultValuesFromOutput(metric, text):
+ """Attempts to parse a metric in the format RESULT <graph>: <trace>= ...
+
+ Args:
+ metric: The metric as a list of [<trace>, <value>] string pairs.
+ text: The text to parse the metric values from.
+
+ Returns:
+ A list of floating point numbers found.
+ """
+ # Format is: RESULT <graph>: <trace>= <value> <units>
+ metric_re = re.escape('RESULT %s: %s=' % (metric[0], metric[1]))
+
+ # The log will be parsed looking for format:
+ # <*>RESULT <graph_name>: <trace_name>= <value>
+ single_result_re = re.compile(
+ metric_re + r'\s*(?P<VALUE>[-]?\d*(\.\d*)?)')
+
+ # The log will be parsed looking for format:
+ # <*>RESULT <graph_name>: <trace_name>= [<value>,value,value,...]
+ multi_results_re = re.compile(
+ metric_re + r'\s*\[\s*(?P<VALUES>[-]?[\d\., ]+)\s*\]')
+
+ # The log will be parsed looking for format:
+ # <*>RESULT <graph_name>: <trace_name>= {<mean>, <std deviation>}
+ mean_stddev_re = re.compile(
+ metric_re +
+ r'\s*\{\s*(?P<MEAN>[-]?\d*(\.\d*)?),\s*(?P<STDDEV>\d+(\.\d*)?)\s*\}')
+
+ text_lines = text.split('\n')
+ values_list = []
+ for current_line in text_lines:
+ # Parse the output from the performance test for the metric we're
+ # interested in.
+ single_result_match = single_result_re.search(current_line)
+ multi_results_match = multi_results_re.search(current_line)
+ mean_stddev_match = mean_stddev_re.search(current_line)
+ if (not single_result_match is None and
+ single_result_match.group('VALUE')):
+ values_list += [single_result_match.group('VALUE')]
+ elif (not multi_results_match is None and
+ multi_results_match.group('VALUES')):
+ metric_values = multi_results_match.group('VALUES')
+ values_list += metric_values.split(',')
+ elif (not mean_stddev_match is None and
+ mean_stddev_match.group('MEAN')):
+ values_list += [mean_stddev_match.group('MEAN')]
+
+ values_list = [float(v) for v in values_list
+ if bisect_utils.IsStringFloat(v)]
+
+ return values_list
+
+
+def _ParseMetricValuesFromOutput(metric, text):
+ """Parses output from performance_ui_tests and retrieves the results for
+ a given metric.
+
+ Args:
+ metric: The metric as a list of [<trace>, <value>] strings.
+ text: The text to parse the metric values from.
+
+ Returns:
+ A list of floating point numbers found.
+ """
+ metric_values = _TryParseResultValuesFromOutput(metric, text)
+
+ if not metric_values:
+ metric_values = _TryParseHistogramValuesFromOutput(metric, text)
+
+ return metric_values
+
+
+def _GenerateProfileIfNecessary(command_args):
+ """Checks the command line of the performance test for dependencies on
+ profile generation, and runs tools/perf/generate_profile as necessary.
+
+ Args:
+ command_args: Command line being passed to performance test, as a list.
+
+ Returns:
+ False if profile generation was necessary and failed, otherwise True.
+ """
+ if '--profile-dir' in ' '.join(command_args):
+ # If we were using python 2.7+, we could just use the argparse
+ # module's parse_known_args to grab --profile-dir. Since some of the
+ # bots still run 2.6, have to grab the arguments manually.
+ arg_dict = {}
+ args_to_parse = ['--profile-dir', '--browser']
+
+ for arg_to_parse in args_to_parse:
+ for i, current_arg in enumerate(command_args):
+ if arg_to_parse in current_arg:
+ current_arg_split = current_arg.split('=')
+
+ # Check 2 cases, --arg=<val> and --arg <val>
+ if len(current_arg_split) == 2:
+ arg_dict[arg_to_parse] = current_arg_split[1]
+ elif i + 1 < len(command_args):
+ arg_dict[arg_to_parse] = command_args[i+1]
+
+ path_to_generate = os.path.join('tools', 'perf', 'generate_profile')
+
+ if '--profile-dir' in arg_dict and '--browser' in arg_dict:
+ profile_path, profile_type = os.path.split(arg_dict['--profile-dir'])
+ return not bisect_utils.RunProcess(
+ [
+ 'python', path_to_generate,
+ '--profile-type-to-generate', profile_type,
+ '--browser', arg_dict['--browser'],
+ '--output-dir', profile_path
+ ])
+ return False
+ return True
+
+
+def _IsRegressionReproduced(known_good_result, known_bad_result,
+ required_initial_confidence):
+ """Checks whether the regression was reproduced based on the initial values.
+
+ Args:
+ known_good_result: A dict with the keys "values", "mean" and "std_err".
+ known_bad_result: Same as above.
+ required_initial_confidence: Minimum confidence score for the given
+ good and bad revisions to avoid early aborting.
+
+ Returns:
+ True if there is a clear change between the result values for the given
+ good and bad revisions, False otherwise.
+ """
+ def PossiblyFlatten(values):
+ """Flattens if needed, by averaging the values in each nested list."""
+ if isinstance(values, list) and all(isinstance(x, list) for x in values):
+ return map(math_utils.Mean, values)
+ return values
+
+ initial_confidence = BisectResults.ConfidenceScore(
+ PossiblyFlatten(known_bad_result['values']),
+ PossiblyFlatten(known_good_result['values']),
+ accept_single_bad_or_good=True)
+
+ return initial_confidence >= required_initial_confidence
+
+
+def _RegressionNotReproducedWarningMessage(
+ good_revision, bad_revision, known_good_value, known_bad_value):
+ return REGRESSION_NOT_REPRODUCED_MESSAGE_TEMPLATE.format(
+ good_rev=good_revision,
+ good_mean=known_good_value['mean'],
+ good_std_err=known_good_value['std_err'],
+ good_sample_size=len(known_good_value['values']),
+ bad_rev=bad_revision,
+ bad_mean=known_bad_value['mean'],
+ bad_std_err=known_bad_value['std_err'],
+ bad_sample_size=len(known_bad_value['values']))
+
+
+class DepotDirectoryRegistry(object):
+
+ def __init__(self, src_cwd):
+ self.depot_cwd = {}
+ for depot in bisect_utils.DEPOT_NAMES:
+ # The working directory of each depot is just the path to the depot, but
+ # since we're already in 'src', we can skip that part.
+ path_in_src = bisect_utils.DEPOT_DEPS_NAME[depot]['src'][4:]
+ self.SetDepotDir(depot, os.path.join(src_cwd, path_in_src))
+
+ self.SetDepotDir('chromium', src_cwd)
+
+ def SetDepotDir(self, depot_name, depot_dir):
+ self.depot_cwd[depot_name] = depot_dir
+
+ def GetDepotDir(self, depot_name):
+ if depot_name in self.depot_cwd:
+ return self.depot_cwd[depot_name]
+ else:
+ assert False, ('Unknown depot [ %s ] encountered. Possibly a new one '
+ 'was added without proper support?' % depot_name)
+
+ def ChangeToDepotDir(self, depot_name):
+ """Given a depot, changes to the appropriate working directory.
+
+ Args:
+ depot_name: The name of the depot (see DEPOT_NAMES).
+ """
+ os.chdir(self.GetDepotDir(depot_name))
+
+
+def _PrepareBisectBranch(parent_branch, new_branch):
+ """Creates a new branch to submit bisect try job.
+
+ Args:
+ parent_branch: Parent branch to be used to create new branch.
+ new_branch: New branch name.
+ """
+ current_branch, returncode = bisect_utils.RunGit(
+ ['rev-parse', '--abbrev-ref', 'HEAD'])
+ if returncode:
+ raise RunGitError('Must be in a git repository to send changes to trybots.')
+
+ current_branch = current_branch.strip()
+ # Make sure current branch is master.
+ if current_branch != parent_branch:
+ output, returncode = bisect_utils.RunGit(['checkout', '-f', parent_branch])
+ if returncode:
+ raise RunGitError('Failed to checkout branch: %s.' % output)
+
+ # Delete new branch if exists.
+ output, returncode = bisect_utils.RunGit(['branch', '--list'])
+ if new_branch in output:
+ output, returncode = bisect_utils.RunGit(['branch', '-D', new_branch])
+ if returncode:
+ raise RunGitError('Deleting branch failed, %s', output)
+
+ # Check if the tree is dirty: make sure the index is up to date and then
+ # run diff-index.
+ bisect_utils.RunGit(['update-index', '--refresh', '-q'])
+ output, returncode = bisect_utils.RunGit(['diff-index', 'HEAD'])
+ if output:
+ raise RunGitError('Cannot send a try job with a dirty tree.')
+
+ # Create and check out the telemetry-tryjob branch, and edit the configs
+ # for the try job there.
+ output, returncode = bisect_utils.RunGit(['checkout', '-b', new_branch])
+ if returncode:
+ raise RunGitError('Failed to checkout branch: %s.' % output)
+
+ output, returncode = bisect_utils.RunGit(
+ ['branch', '--set-upstream-to', parent_branch])
+ if returncode:
+ raise RunGitError('Error in git branch --set-upstream-to')
+
+
+def _StartBuilderTryJob(
+ builder_type, git_revision, builder_name, job_name, patch=None):
+ """Attempts to run a try job from the current directory.
+
+ Args:
+ builder_type: One of the builder types in fetch_build, e.g. "perf".
+ git_revision: A git commit hash.
+ builder_name: Name of the bisect bot to be used for try job.
+ bisect_job_name: Try job name, used to identify which bisect
+ job was responsible for requesting a build.
+ patch: A DEPS patch (used while bisecting dependency repositories),
+ or None if we're bisecting the top-level repository.
+ """
+ # TODO(prasadv, qyearsley): Make this a method of BuildArchive
+ # (which may be renamed to BuilderTryBot or Builder).
+ try:
+ # Temporary branch for running a try job.
+ _PrepareBisectBranch(BISECT_MASTER_BRANCH, BISECT_TRYJOB_BRANCH)
+ patch_content = '/dev/null'
+ # Create a temporary patch file.
+ if patch:
+ WriteStringToFile(patch, BISECT_PATCH_FILE)
+ patch_content = BISECT_PATCH_FILE
+
+ try_command = [
+ 'try',
+ '--bot=%s' % builder_name,
+ '--revision=%s' % git_revision,
+ '--name=%s' % job_name,
+ '--svn_repo=%s' % _TryJobSvnRepo(builder_type),
+ '--diff=%s' % patch_content,
+ ]
+ # Execute try job to build revision.
+ print try_command
+ output, return_code = bisect_utils.RunGit(try_command)
+
+ command_string = ' '.join(['git'] + try_command)
+ if return_code:
+ raise RunGitError('Could not execute try job: %s.\n'
+ 'Error: %s' % (command_string, output))
+ logging.info('Try job successfully submitted.\n TryJob Details: %s\n%s',
+ command_string, output)
+ finally:
+ # Delete patch file if exists.
+ try:
+ os.remove(BISECT_PATCH_FILE)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ # Checkout master branch and delete bisect-tryjob branch.
+ bisect_utils.RunGit(['checkout', '-f', BISECT_MASTER_BRANCH])
+ bisect_utils.RunGit(['branch', '-D', BISECT_TRYJOB_BRANCH])
+
+
+def _TryJobSvnRepo(builder_type):
+ """Returns an SVN repo to use for try jobs based on the builder type."""
+ if builder_type == fetch_build.PERF_BUILDER:
+ return PERF_SVN_REPO_URL
+ if builder_type == fetch_build.FULL_BUILDER:
+ return FULL_SVN_REPO_URL
+ if builder_type == fetch_build.ANDROID_CHROME_PERF_BUILDER:
+ return ANDROID_CHROME_SVN_REPO_URL
+ raise NotImplementedError('Unknown builder type "%s".' % builder_type)
+
+
+class BisectPerformanceMetrics(object):
+ """This class contains functionality to perform a bisection of a range of
+ revisions to narrow down where performance regressions may have occurred.
+
+ The main entry-point is the Run method.
+ """
+
+ def __init__(self, opts, src_cwd):
+ """Constructs a BisectPerformancesMetrics object.
+
+ Args:
+ opts: BisectOptions object containing parsed options.
+ src_cwd: Root src/ directory of the test repository (inside bisect/ dir).
+ """
+ super(BisectPerformanceMetrics, self).__init__()
+
+ self.opts = opts
+ self.src_cwd = src_cwd
+ self.depot_registry = DepotDirectoryRegistry(self.src_cwd)
+ self.printer = BisectPrinter(self.opts, self.depot_registry)
+ self.cleanup_commands = []
+ self.warnings = []
+ self.builder = builder.Builder.FromOpts(opts)
+
+ def PerformCleanup(self):
+ """Performs cleanup when script is finished."""
+ os.chdir(self.src_cwd)
+ for c in self.cleanup_commands:
+ if c[0] == 'mv':
+ shutil.move(c[1], c[2])
+ else:
+ assert False, 'Invalid cleanup command.'
+
+ def GetRevisionList(self, depot, bad_revision, good_revision):
+ """Retrieves a list of all the commits between the bad revision and
+ last known good revision."""
+
+ cwd = self.depot_registry.GetDepotDir(depot)
+ return source_control.GetRevisionList(bad_revision, good_revision, cwd=cwd)
+
+ def _ParseRevisionsFromDEPSFile(self, depot):
+ """Parses the local DEPS file to determine blink/skia/v8 revisions which may
+ be needed if the bisect recurses into those depots later.
+
+ Args:
+ depot: Name of depot being bisected.
+
+ Returns:
+ A dict in the format {depot:revision} if successful, otherwise None.
+ """
+ try:
+ deps_data = {
+ 'Var': lambda _: deps_data["vars"][_],
+ 'From': lambda *args: None,
+ }
+
+ deps_file = bisect_utils.FILE_DEPS_GIT
+ if not os.path.exists(deps_file):
+ deps_file = bisect_utils.FILE_DEPS
+ execfile(deps_file, {}, deps_data)
+ deps_data = deps_data['deps']
+
+ rxp = re.compile(".git@(?P<revision>[a-fA-F0-9]+)")
+ results = {}
+ for depot_name, depot_data in bisect_utils.DEPOT_DEPS_NAME.iteritems():
+ if (depot_data.get('platform') and
+ depot_data.get('platform') != os.name):
+ continue
+
+ if depot_data.get('recurse') and depot in depot_data.get('from'):
+ depot_data_src = depot_data.get('src') or depot_data.get('src_old')
+ src_dir = deps_data.get(depot_data_src)
+ if src_dir:
+ self.depot_registry.SetDepotDir(depot_name, os.path.join(
+ self.src_cwd, depot_data_src[4:]))
+ re_results = rxp.search(src_dir)
+ if re_results:
+ results[depot_name] = re_results.group('revision')
+ else:
+ warning_text = ('Could not parse revision for %s while bisecting '
+ '%s' % (depot_name, depot))
+ if not warning_text in self.warnings:
+ self.warnings.append(warning_text)
+ else:
+ results[depot_name] = None
+ return results
+ except ImportError:
+ deps_file_contents = ReadStringFromFile(deps_file)
+ parse_results = _ParseRevisionsFromDEPSFileManually(deps_file_contents)
+ results = {}
+ for depot_name, depot_revision in parse_results.iteritems():
+ depot_revision = depot_revision.strip('@')
+ logging.warn(depot_name, depot_revision)
+ for cur_name, cur_data in bisect_utils.DEPOT_DEPS_NAME.iteritems():
+ if cur_data.get('deps_var') == depot_name:
+ src_name = cur_name
+ results[src_name] = depot_revision
+ break
+ return results
+
+ def _Get3rdPartyRevisions(self, depot):
+ """Parses the DEPS file to determine WebKit/v8/etc... versions.
+
+ Args:
+ depot: A depot name. Should be in the DEPOT_NAMES list.
+
+ Returns:
+ A dict in the format {depot: revision} if successful, otherwise None.
+ """
+ cwd = os.getcwd()
+ self.depot_registry.ChangeToDepotDir(depot)
+
+ results = {}
+
+ if depot == 'chromium' or depot == 'android-chrome':
+ results = self._ParseRevisionsFromDEPSFile(depot)
+ os.chdir(cwd)
+
+ if depot == 'v8':
+ # We can't try to map the trunk revision to bleeding edge yet, because
+ # we don't know which direction to try to search in. Have to wait until
+ # the bisect has narrowed the results down to 2 v8 rolls.
+ results['v8_bleeding_edge'] = None
+
+ return results
+
+ def BackupOrRestoreOutputDirectory(self, restore=False, build_type='Release'):
+ """Backs up or restores build output directory based on restore argument.
+
+ Args:
+ restore: Indicates whether to restore or backup. Default is False(Backup)
+ build_type: Target build type ('Release', 'Debug', 'Release_x64' etc.)
+
+ Returns:
+ Path to backup or restored location as string. otherwise None if it fails.
+ """
+ build_dir = os.path.abspath(
+ builder.GetBuildOutputDirectory(self.opts, self.src_cwd))
+ source_dir = os.path.join(build_dir, build_type)
+ destination_dir = os.path.join(build_dir, '%s.bak' % build_type)
+ if restore:
+ source_dir, destination_dir = destination_dir, source_dir
+ if os.path.exists(source_dir):
+ RemoveDirectoryTree(destination_dir)
+ shutil.move(source_dir, destination_dir)
+ return destination_dir
+ return None
+
+ def _DownloadAndUnzipBuild(self, revision, depot, build_type='Release',
+ create_patch=False):
+ """Downloads the build archive for the given revision.
+
+ Args:
+ revision: The git revision to download.
+ depot: The name of a dependency repository. Should be in DEPOT_NAMES.
+ build_type: Target build type, e.g. Release', 'Debug', 'Release_x64' etc.
+ create_patch: Create a patch with any locally modified files.
+
+ Returns:
+ True if download succeeds, otherwise False.
+ """
+ patch = None
+ patch_sha = None
+ if depot not in ('chromium', 'android-chrome'):
+ # Create a DEPS patch with new revision for dependency repository.
+ self._CreateDEPSPatch(depot, revision)
+ create_patch = True
+
+ if create_patch:
+ revision, patch = self._CreatePatch(revision)
+
+ if patch:
+ # Get the SHA of the DEPS changes patch.
+ patch_sha = GetSHA1HexDigest(patch)
+
+ # Update the DEPS changes patch with a patch to create a new file named
+ # 'DEPS.sha' and add patch_sha evaluated above to it.
+ patch = '%s\n%s' % (patch, DEPS_SHA_PATCH % {'deps_sha': patch_sha})
+
+ build_dir = builder.GetBuildOutputDirectory(self.opts, self.src_cwd)
+ downloaded_file = self._WaitForBuildDownload(
+ revision, build_dir, deps_patch=patch, deps_patch_sha=patch_sha)
+ if not downloaded_file:
+ return False
+ return self._UnzipAndMoveBuildProducts(downloaded_file, build_dir,
+ build_type=build_type)
+
+ def _WaitForBuildDownload(self, revision, build_dir, deps_patch=None,
+ deps_patch_sha=None):
+ """Tries to download a zip archive for a build.
+
+ This involves seeing whether the archive is already available, and if not,
+ then requesting a build and waiting before downloading.
+
+ Args:
+ revision: A git commit hash.
+ build_dir: The directory to download the build into.
+ deps_patch: A patch which changes a dependency repository revision in
+ the DEPS, if applicable.
+ deps_patch_sha: The SHA1 hex digest of the above patch.
+
+ Returns:
+ File path of the downloaded file if successful, otherwise None.
+ """
+ bucket_name, remote_path = fetch_build.GetBucketAndRemotePath(
+ revision, builder_type=self.opts.builder_type,
+ target_arch=self.opts.target_arch,
+ target_platform=self.opts.target_platform,
+ deps_patch_sha=deps_patch_sha,
+ extra_src=self.opts.extra_src)
+ output_dir = os.path.abspath(build_dir)
+ fetch_build_func = lambda: fetch_build.FetchFromCloudStorage(
+ bucket_name, remote_path, output_dir)
+
+ is_available = fetch_build.BuildIsAvailable(bucket_name, remote_path)
+ if is_available:
+ return fetch_build_func()
+
+ # When build archive doesn't exist, make a request and wait.
+ return self._RequestBuildAndWait(
+ revision, fetch_build_func, deps_patch=deps_patch)
+
+ def _RequestBuildAndWait(self, git_revision, fetch_build_func,
+ deps_patch=None):
+ """Triggers a try job for a build job.
+
+ This function prepares and starts a try job for a builder, and waits for
+ the archive to be produced and archived. Once the build is ready it is
+ downloaded.
+
+ For performance tests, builders on the tryserver.chromium.perf are used.
+
+ TODO(qyearsley): Make this function take "builder_type" as a parameter
+ and make requests to different bot names based on that parameter.
+
+ Args:
+ git_revision: A git commit hash.
+ fetch_build_func: Function to check and download build from cloud storage.
+ deps_patch: DEPS patch string, used when bisecting dependency repos.
+
+ Returns:
+ Downloaded archive file path when requested build exists and download is
+ successful, otherwise None.
+ """
+ if not fetch_build_func:
+ return None
+
+ # Create a unique ID for each build request posted to try server builders.
+ # This ID is added to "Reason" property of the build.
+ build_request_id = GetSHA1HexDigest(
+ '%s-%s-%s' % (git_revision, deps_patch, time.time()))
+
+ # Revert any changes to DEPS file.
+ bisect_utils.CheckRunGit(['reset', '--hard', 'HEAD'], cwd=self.src_cwd)
+
+ builder_name, build_timeout = fetch_build.GetBuilderNameAndBuildTime(
+ builder_type=self.opts.builder_type,
+ target_arch=self.opts.target_arch,
+ target_platform=self.opts.target_platform,
+ extra_src=self.opts.extra_src)
+
+ try:
+ _StartBuilderTryJob(self.opts.builder_type, git_revision, builder_name,
+ job_name=build_request_id, patch=deps_patch)
+ except RunGitError as e:
+ logging.warn('Failed to post builder try job for revision: [%s].\n'
+ 'Error: %s', git_revision, e)
+ return None
+
+ # Get the buildbot master URL to monitor build status.
+ buildbot_server_url = fetch_build.GetBuildBotUrl(
+ builder_type=self.opts.builder_type,
+ target_arch=self.opts.target_arch,
+ target_platform=self.opts.target_platform,
+ extra_src=self.opts.extra_src)
+
+ archive_filename, error_msg = _WaitUntilBuildIsReady(
+ fetch_build_func, builder_name, build_request_id, build_timeout,
+ buildbot_server_url)
+ if not archive_filename:
+ logging.warn('%s [revision: %s]', error_msg, git_revision)
+ return archive_filename
+
+ def _UnzipAndMoveBuildProducts(self, downloaded_file, build_dir,
+ build_type='Release'):
+ """Unzips the build archive and moves it to the build output directory.
+
+ The build output directory is wherever the binaries are expected to
+ be in order to start Chrome and run tests.
+
+ TODO: Simplify and clarify this method if possible.
+
+ Args:
+ downloaded_file: File path of the downloaded zip file.
+ build_dir: Directory where the the zip file was downloaded to.
+ build_type: "Release" or "Debug".
+
+ Returns:
+ True if successful, False otherwise.
+ """
+ abs_build_dir = os.path.abspath(build_dir)
+ output_dir = os.path.join(abs_build_dir, self.GetZipFileBuildDirName())
+ logging.info('EXPERIMENTAL RUN, _UnzipAndMoveBuildProducts locals %s',
+ str(locals()))
+
+ try:
+ RemoveDirectoryTree(output_dir)
+ self.BackupOrRestoreOutputDirectory(restore=False)
+ # Build output directory based on target(e.g. out/Release, out/Debug).
+ target_build_output_dir = os.path.join(abs_build_dir, build_type)
+
+ logging.info('Extracting "%s" to "%s"', downloaded_file, abs_build_dir)
+ fetch_build.Unzip(downloaded_file, abs_build_dir)
+
+ if not os.path.exists(output_dir):
+ # Due to recipe changes, the builds extract folder contains
+ # out/Release instead of full-build-<platform>/Release.
+ if os.path.exists(os.path.join(abs_build_dir, 'out', build_type)):
+ output_dir = os.path.join(abs_build_dir, 'out', build_type)
+ else:
+ raise IOError('Missing extracted folder %s ' % output_dir)
+
+ logging.info('Moving build from %s to %s',
+ output_dir, target_build_output_dir)
+ shutil.move(output_dir, target_build_output_dir)
+ return True
+ except Exception as e:
+ logging.info('Something went wrong while extracting archive file: %s', e)
+ self.BackupOrRestoreOutputDirectory(restore=True)
+ # Cleanup any leftovers from unzipping.
+ if os.path.exists(output_dir):
+ RemoveDirectoryTree(output_dir)
+ finally:
+ # Delete downloaded archive
+ if os.path.exists(downloaded_file):
+ os.remove(downloaded_file)
+ return False
+
+ @staticmethod
+ def GetZipFileBuildDirName():
+ """Gets the base file name of the zip file.
+
+ After extracting the zip file, this is the name of the directory where
+ the build files are expected to be. Possibly.
+
+ TODO: Make sure that this returns the actual directory name where the
+ Release or Debug directory is inside of the zip files. This probably
+ depends on the builder recipe, and may depend on whether the builder is
+ a perf builder or full builder.
+
+ Returns:
+ The name of the directory inside a build archive which is expected to
+ contain a Release or Debug directory.
+ """
+ if bisect_utils.IsWindowsHost():
+ return 'full-build-win32'
+ if bisect_utils.IsLinuxHost():
+ return 'full-build-linux'
+ if bisect_utils.IsMacHost():
+ return 'full-build-mac'
+ raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+ def IsDownloadable(self, depot):
+ """Checks if build can be downloaded based on target platform and depot."""
+ if (self.opts.target_platform in ['chromium', 'android', 'android-chrome']
+ and self.opts.builder_type):
+ # In case of android-chrome platform, download archives only for
+ # android-chrome depot; for other depots such as chromium, v8, skia
+ # etc., build the binary locally.
+ if self.opts.target_platform == 'android-chrome':
+ return depot == 'android-chrome'
+ else:
+ return (depot == 'chromium' or
+ 'chromium' in bisect_utils.DEPOT_DEPS_NAME[depot]['from'] or
+ 'v8' in bisect_utils.DEPOT_DEPS_NAME[depot]['from'])
+ return False
+
+ def UpdateDepsContents(self, deps_contents, depot, git_revision, deps_key):
+ """Returns modified version of DEPS file contents.
+
+ Args:
+ deps_contents: DEPS file content.
+ depot: Current depot being bisected.
+ git_revision: A git hash to be updated in DEPS.
+ deps_key: Key in vars section of DEPS file to be searched.
+
+ Returns:
+ Updated DEPS content as string if deps key is found, otherwise None.
+ """
+ # Check whether the depot and revision pattern in DEPS file vars
+ # e.g. for webkit the format is "webkit_revision": "12345".
+ deps_revision = re.compile(r'(?<="%s": ")([0-9]+)(?=")' % deps_key,
+ re.MULTILINE)
+ new_data = None
+ if re.search(deps_revision, deps_contents):
+ commit_position = source_control.GetCommitPosition(
+ git_revision, self.depot_registry.GetDepotDir(depot))
+ if not commit_position:
+ logging.warn('Could not determine commit position for %s', git_revision)
+ return None
+ # Update the revision information for the given depot
+ new_data = re.sub(deps_revision, str(commit_position), deps_contents)
+ else:
+ # Check whether the depot and revision pattern in DEPS file vars
+ # e.g. for webkit the format is "webkit_revision": "559a6d4ab7a84c539..".
+ deps_revision = re.compile(
+ r'(?<=["\']%s["\']: ["\'])([a-fA-F0-9]{40})(?=["\'])' % deps_key,
+ re.MULTILINE)
+ if re.search(deps_revision, deps_contents):
+ new_data = re.sub(deps_revision, git_revision, deps_contents)
+ if new_data:
+ # For v8_bleeding_edge revisions change V8 branch in order
+ # to fetch bleeding edge revision.
+ if depot == 'v8_bleeding_edge':
+ new_data = _UpdateV8Branch(new_data)
+ if not new_data:
+ return None
+ return new_data
+
+ def UpdateDeps(self, revision, depot, deps_file):
+ """Updates DEPS file with new revision of dependency repository.
+
+ This method search DEPS for a particular pattern in which depot revision
+ is specified (e.g "webkit_revision": "123456"). If a match is found then
+ it resolves the given git hash to SVN revision and replace it in DEPS file.
+
+ Args:
+ revision: A git hash revision of the dependency repository.
+ depot: Current depot being bisected.
+ deps_file: Path to DEPS file.
+
+ Returns:
+ True if DEPS file is modified successfully, otherwise False.
+ """
+ if not os.path.exists(deps_file):
+ return False
+
+ deps_var = bisect_utils.DEPOT_DEPS_NAME[depot]['deps_var']
+ # Don't update DEPS file if deps_var is not set in DEPOT_DEPS_NAME.
+ if not deps_var:
+ logging.warn('DEPS update not supported for Depot: %s', depot)
+ return False
+
+ # Hack for Angle repository. In the DEPS file, "vars" dictionary variable
+ # contains "angle_revision" key that holds git hash instead of SVN revision.
+ # And sometime "angle_revision" key is not specified in "vars" variable.
+ # In such cases check, "deps" dictionary variable that matches
+ # angle.git@[a-fA-F0-9]{40}$ and replace git hash.
+ if depot == 'angle':
+ return _UpdateDEPSForAngle(revision, depot, deps_file)
+
+ try:
+ deps_contents = ReadStringFromFile(deps_file)
+ updated_deps_content = self.UpdateDepsContents(
+ deps_contents, depot, revision, deps_var)
+ # Write changes to DEPS file
+ if updated_deps_content:
+ WriteStringToFile(updated_deps_content, deps_file)
+ return True
+ except IOError, e:
+ logging.warn('Something went wrong while updating DEPS file. [%s]', e)
+ return False
+
+ def _CreateDEPSPatch(self, depot, revision):
+ """Checks out the DEPS file at the specified revision and modifies it.
+
+ Args:
+ depot: Current depot being bisected.
+ revision: A git hash revision of the dependency repository.
+ """
+ deps_file_path = os.path.join(self.src_cwd, bisect_utils.FILE_DEPS)
+ if not os.path.exists(deps_file_path):
+ raise RuntimeError('DEPS file does not exists.[%s]' % deps_file_path)
+ # Get current chromium revision (git hash).
+ cmd = ['rev-parse', 'HEAD']
+ chromium_sha = bisect_utils.CheckRunGit(cmd).strip()
+ if not chromium_sha:
+ raise RuntimeError('Failed to determine Chromium revision for %s' %
+ revision)
+ if ('chromium' in bisect_utils.DEPOT_DEPS_NAME[depot]['from'] or
+ 'v8' in bisect_utils.DEPOT_DEPS_NAME[depot]['from']):
+ # Checkout DEPS file for the current chromium revision.
+ if not source_control.CheckoutFileAtRevision(
+ bisect_utils.FILE_DEPS, chromium_sha, cwd=self.src_cwd):
+ raise RuntimeError(
+ 'DEPS checkout Failed for chromium revision : [%s]' % chromium_sha)
+
+ if not self.UpdateDeps(revision, depot, deps_file_path):
+ raise RuntimeError(
+ 'Failed to update DEPS file for chromium: [%s]' % chromium_sha)
+
+ def _CreatePatch(self, revision):
+ """Creates a patch from currently modified files.
+
+ Args:
+ depot: Current depot being bisected.
+ revision: A git hash revision of the dependency repository.
+
+ Returns:
+ A tuple with git hash of chromium revision and DEPS patch text.
+ """
+ # Get current chromium revision (git hash).
+ chromium_sha = bisect_utils.CheckRunGit(['rev-parse', 'HEAD']).strip()
+ if not chromium_sha:
+ raise RuntimeError('Failed to determine Chromium revision for %s' %
+ revision)
+ # Checkout DEPS file for the current chromium revision.
+ diff_command = [
+ 'diff',
+ '--src-prefix=',
+ '--dst-prefix=',
+ '--no-ext-diff',
+ 'HEAD',
+ ]
+ diff_text = bisect_utils.CheckRunGit(diff_command)
+ return (chromium_sha, ChangeBackslashToSlashInPatch(diff_text))
+
+ def ObtainBuild(
+ self, depot, revision=None, create_patch=False):
+ """Obtains a build by either downloading or building directly.
+
+ Args:
+ depot: Dependency repository name.
+ revision: A git commit hash. If None is given, the currently checked-out
+ revision is built.
+ create_patch: Create a patch with any locally modified files.
+
+ Returns:
+ True for success.
+ """
+ if self.opts.debug_ignore_build:
+ return True
+
+ build_success = False
+ cwd = os.getcwd()
+ os.chdir(self.src_cwd)
+ # Fetch build archive for the given revision from the cloud storage when
+ # the storage bucket is passed.
+ if self.IsDownloadable(depot) and revision:
+ build_success = self._DownloadAndUnzipBuild(
+ revision, depot, build_type='Release', create_patch=create_patch)
+ else:
+ # Print the current environment set on the machine.
+ print 'Full Environment:'
+ for key, value in sorted(os.environ.items()):
+ print '%s: %s' % (key, value)
+ # Print the environment before proceeding with compile.
+ sys.stdout.flush()
+ build_success = self.builder.Build(depot, self.opts)
+ os.chdir(cwd)
+ return build_success
+
+ def RunGClientHooks(self):
+ """Runs gclient with runhooks command.
+
+ Returns:
+ True if gclient reports no errors.
+ """
+ if self.opts.debug_ignore_build:
+ return True
+ # Some "runhooks" calls create symlinks that other (older?) versions
+ # do not handle correctly causing the build to fail. We want to avoid
+ # clearing the entire out/ directory so that changes close together will
+ # build faster so we just clear out all symlinks on the expectation that
+ # the next "runhooks" call will recreate everything properly. Ignore
+ # failures (like Windows that doesn't have "find").
+ try:
+ bisect_utils.RunProcess(
+ ['find', 'out/', '-type', 'l', '-exec', 'rm', '-f', '{}', ';'],
+ cwd=self.src_cwd, shell=False)
+ except OSError:
+ pass
+ return not bisect_utils.RunGClient(['runhooks'], cwd=self.src_cwd)
+
+ def _IsBisectModeUsingMetric(self):
+ return self.opts.bisect_mode in [bisect_utils.BISECT_MODE_MEAN,
+ bisect_utils.BISECT_MODE_STD_DEV]
+
+ def _IsBisectModeReturnCode(self):
+ return self.opts.bisect_mode in [bisect_utils.BISECT_MODE_RETURN_CODE]
+
+ def _IsBisectModeStandardDeviation(self):
+ return self.opts.bisect_mode in [bisect_utils.BISECT_MODE_STD_DEV]
+
+ def RunPerformanceTestAndParseResults(
+ self, command_to_run, metric, reset_on_first_run=False,
+ upload_on_last_run=False, results_label=None, test_run_multiplier=1,
+ allow_flakes=True):
+ """Runs a performance test on the current revision and parses the results.
+
+ Args:
+ command_to_run: The command to be run to execute the performance test.
+ metric: The metric to parse out from the results of the performance test.
+ This is the result chart name and trace name, separated by slash.
+ May be None for perf try jobs.
+ reset_on_first_run: If True, pass the flag --reset-results on first run.
+ upload_on_last_run: If True, pass the flag --upload-results on last run.
+ results_label: A value for the option flag --results-label.
+ The arguments reset_on_first_run, upload_on_last_run and results_label
+ are all ignored if the test is not a Telemetry test.
+ test_run_multiplier: Factor by which to multiply the number of test runs
+ and the timeout period specified in self.opts.
+ allow_flakes: Report success even if some tests fail to run.
+
+ Returns:
+ (values dict, 0) if --debug_ignore_perf_test was passed.
+ (values dict, 0, test output) if the test was run successfully.
+ (error message, -1) if the test couldn't be run.
+ (error message, -1, test output) if the test ran but there was an error.
+ """
+ success_code, failure_code = 0, -1
+
+ if self.opts.debug_ignore_perf_test:
+ fake_results = {
+ 'mean': 0.0,
+ 'std_err': 0.0,
+ 'std_dev': 0.0,
+ 'values': [0.0]
+ }
+
+ # When debug_fake_test_mean is set, its value is returned as the mean
+ # and the flag is cleared so that further calls behave as if it wasn't
+ # set (returning the fake_results dict as defined above).
+ if self.opts.debug_fake_first_test_mean:
+ fake_results['mean'] = float(self.opts.debug_fake_first_test_mean)
+ self.opts.debug_fake_first_test_mean = 0
+
+ return (fake_results, success_code)
+
+ # For Windows platform set posix=False, to parse windows paths correctly.
+ # On Windows, path separators '\' or '\\' are replace by '' when posix=True,
+ # refer to http://bugs.python.org/issue1724822. By default posix=True.
+ args = shlex.split(command_to_run, posix=not bisect_utils.IsWindowsHost())
+
+ if not _GenerateProfileIfNecessary(args):
+ err_text = 'Failed to generate profile for performance test.'
+ return (err_text, failure_code)
+
+ is_telemetry = bisect_utils.IsTelemetryCommand(command_to_run)
+
+ start_time = time.time()
+
+ metric_values = []
+ output_of_all_runs = ''
+ repeat_count = self.opts.repeat_test_count * test_run_multiplier
+ return_codes = []
+ for i in xrange(repeat_count):
+ # Can ignore the return code since if the tests fail, it won't return 0.
+ current_args = copy.copy(args)
+ if is_telemetry:
+ if i == 0 and reset_on_first_run:
+ current_args.append('--reset-results')
+ if i == self.opts.repeat_test_count - 1 and upload_on_last_run:
+ current_args.append('--upload-results')
+ if results_label:
+ current_args.append('--results-label=%s' % results_label)
+ try:
+ output, return_code = bisect_utils.RunProcessAndRetrieveOutput(
+ current_args, cwd=self.src_cwd)
+ return_codes.append(return_code)
+ except OSError, e:
+ if e.errno == errno.ENOENT:
+ err_text = ('Something went wrong running the performance test. '
+ 'Please review the command line:\n\n')
+ if 'src/' in ' '.join(args):
+ err_text += ('Check that you haven\'t accidentally specified a '
+ 'path with src/ in the command.\n\n')
+ err_text += ' '.join(args)
+ err_text += '\n'
+
+ return (err_text, failure_code)
+ raise
+
+ output_of_all_runs += output
+ if self.opts.output_buildbot_annotations:
+ print output
+
+ if metric and self._IsBisectModeUsingMetric():
+ parsed_metric = _ParseMetricValuesFromOutput(metric, output)
+ if parsed_metric:
+ metric_values += parsed_metric
+ # If we're bisecting on a metric (ie, changes in the mean or
+ # standard deviation) and no metric values are produced, bail out.
+ if not metric_values:
+ break
+ elif self._IsBisectModeReturnCode():
+ metric_values.append(return_code)
+ # If there's a failed test, we can bail out early.
+ if return_code:
+ break
+
+ elapsed_minutes = (time.time() - start_time) / 60.0
+ time_limit = self.opts.max_time_minutes * test_run_multiplier
+ if elapsed_minutes >= time_limit:
+ break
+
+ if metric and len(metric_values) == 0:
+ err_text = 'Metric %s was not found in the test output.' % metric
+ # TODO(qyearsley): Consider also getting and displaying a list of metrics
+ # that were found in the output here.
+ return (err_text, failure_code, output_of_all_runs)
+
+ # If we're bisecting on return codes, we're really just looking for zero vs
+ # non-zero.
+ values = {}
+ if self._IsBisectModeReturnCode():
+ # If any of the return codes is non-zero, output 1.
+ overall_return_code = 0 if (
+ all(current_value == 0 for current_value in metric_values)) else 1
+
+ values = {
+ 'mean': overall_return_code,
+ 'std_err': 0.0,
+ 'std_dev': 0.0,
+ 'values': metric_values,
+ }
+
+ print 'Results of performance test: Command returned with %d' % (
+ overall_return_code)
+ print
+ elif metric:
+ # Need to get the average value if there were multiple values.
+ truncated_mean = math_utils.TruncatedMean(
+ metric_values, self.opts.truncate_percent)
+ standard_err = math_utils.StandardError(metric_values)
+ standard_dev = math_utils.StandardDeviation(metric_values)
+
+ if self._IsBisectModeStandardDeviation():
+ metric_values = [standard_dev]
+
+ values = {
+ 'mean': truncated_mean,
+ 'std_err': standard_err,
+ 'std_dev': standard_dev,
+ 'values': metric_values,
+ }
+
+ print 'Results of performance test: %12f %12f' % (
+ truncated_mean, standard_err)
+ print
+
+ overall_success = success_code
+ if not allow_flakes and not self._IsBisectModeReturnCode():
+ overall_success = (
+ success_code
+ if (all(current_value == 0 for current_value in return_codes))
+ else failure_code)
+
+ return (values, overall_success, output_of_all_runs)
+
+ def PerformPreBuildCleanup(self):
+ """Performs cleanup between runs."""
+ print 'Cleaning up between runs.'
+ print
+
+ # Leaving these .pyc files around between runs may disrupt some perf tests.
+ for (path, _, files) in os.walk(self.src_cwd):
+ for cur_file in files:
+ if cur_file.endswith('.pyc'):
+ path_to_file = os.path.join(path, cur_file)
+ os.remove(path_to_file)
+
+ def _RunPostSync(self, _depot):
+ """Performs any work after syncing.
+
+ Args:
+ depot: Depot name.
+
+ Returns:
+ True if successful.
+ """
+ if 'android' in self.opts.target_platform:
+ if not builder.SetupAndroidBuildEnvironment(
+ self.opts, path_to_src=self.src_cwd):
+ return False
+
+ return self.RunGClientHooks()
+
+ @staticmethod
+ def ShouldSkipRevision(depot, revision):
+ """Checks whether a particular revision can be safely skipped.
+
+ Some commits can be safely skipped (such as a DEPS roll for the repos
+ still using .DEPS.git), since the tool is git based those changes
+ would have no effect.
+
+ Args:
+ depot: The depot being bisected.
+ revision: Current revision we're synced to.
+
+ Returns:
+ True if we should skip building/testing this revision.
+ """
+ # Skips revisions with DEPS on android-chrome.
+ if depot == 'android-chrome':
+ cmd = ['diff-tree', '--no-commit-id', '--name-only', '-r', revision]
+ output = bisect_utils.CheckRunGit(cmd)
+
+ files = output.splitlines()
+
+ if len(files) == 1 and files[0] == 'DEPS':
+ return True
+
+ return False
+
+ def RunTest(self, revision, depot, command, metric, skippable=False,
+ skip_sync=False, create_patch=False, force_build=False,
+ test_run_multiplier=1):
+ """Performs a full sync/build/run of the specified revision.
+
+ Args:
+ revision: The revision to sync to.
+ depot: The depot that's being used at the moment (src, webkit, etc.)
+ command: The command to execute the performance test.
+ metric: The performance metric being tested.
+ skip_sync: Skip the sync step.
+ create_patch: Create a patch with any locally modified files.
+ force_build: Force a local build.
+ test_run_multiplier: Factor by which to multiply the given number of runs
+ and the set timeout period.
+
+ Returns:
+ On success, a tuple containing the results of the performance test.
+ Otherwise, a tuple with the error message.
+ """
+ logging.info('Running RunTest with rev "%s", command "%s"',
+ revision, command)
+ # Decide which sync program to use.
+ sync_client = None
+ if depot == 'chromium' or depot == 'android-chrome':
+ sync_client = 'gclient'
+
+ # Do the syncing for all depots.
+ if not (self.opts.debug_ignore_sync or skip_sync):
+ if not self._SyncRevision(depot, revision, sync_client):
+ return ('Failed to sync: [%s]' % str(revision), BUILD_RESULT_FAIL)
+
+ # Try to do any post-sync steps. This may include "gclient runhooks".
+ if not self._RunPostSync(depot):
+ return ('Failed to run [gclient runhooks].', BUILD_RESULT_FAIL)
+
+ # Skip this revision if it can be skipped.
+ if skippable and self.ShouldSkipRevision(depot, revision):
+ return ('Skipped revision: [%s]' % str(revision),
+ BUILD_RESULT_SKIPPED)
+
+ # Obtain a build for this revision. This may be done by requesting a build
+ # from another builder, waiting for it and downloading it.
+ start_build_time = time.time()
+ revision_to_build = revision if not force_build else None
+ build_success = self.ObtainBuild(
+ depot, revision=revision_to_build, create_patch=create_patch)
+ if not build_success:
+ return ('Failed to build revision: [%s]' % str(revision),
+ BUILD_RESULT_FAIL)
+ after_build_time = time.time()
+
+ # Run the command and get the results.
+ results = self.RunPerformanceTestAndParseResults(
+ command, metric, test_run_multiplier=test_run_multiplier)
+
+ # Restore build output directory once the tests are done, to avoid
+ # any discrepancies.
+ if self.IsDownloadable(depot) and revision:
+ self.BackupOrRestoreOutputDirectory(restore=True)
+
+ # A value other than 0 indicates that the test couldn't be run, and results
+ # should also include an error message.
+ if results[1] != 0:
+ return results
+
+ external_revisions = self._Get3rdPartyRevisions(depot)
+
+ if not external_revisions is None:
+ return (results[0], results[1], external_revisions,
+ time.time() - after_build_time, after_build_time -
+ start_build_time)
+ else:
+ return ('Failed to parse DEPS file for external revisions.',
+ BUILD_RESULT_FAIL)
+
+ def _SyncRevision(self, depot, revision, sync_client):
+ """Syncs depot to particular revision.
+
+ Args:
+ depot: The depot that's being used at the moment (src, webkit, etc.)
+ revision: The revision to sync to.
+ sync_client: Program used to sync, e.g. "gclient". Can be None.
+
+ Returns:
+ True if successful, False otherwise.
+ """
+ self.depot_registry.ChangeToDepotDir(depot)
+
+ if sync_client:
+ self.PerformPreBuildCleanup()
+
+ # When using gclient to sync, you need to specify the depot you
+ # want so that all the dependencies sync properly as well.
+ # i.e. gclient sync src@<SHA1>
+ if sync_client == 'gclient' and revision:
+ revision = '%s@%s' % (bisect_utils.DEPOT_DEPS_NAME[depot]['src'],
+ revision)
+ if depot == 'chromium' and self.opts.target_platform == 'android-chrome':
+ return self._SyncRevisionsForAndroidChrome(revision)
+
+ return source_control.SyncToRevision(revision, sync_client)
+
+ def _SyncRevisionsForAndroidChrome(self, revision):
+ """Syncs android-chrome and chromium repos to particular revision.
+
+ This is a special case for android-chrome as the gclient sync for chromium
+ overwrites the android-chrome revision to TOT. Therefore both the repos
+ are synced to known revisions.
+
+ Args:
+ revision: Git hash of the Chromium to sync.
+
+ Returns:
+ True if successful, False otherwise.
+ """
+ revisions_list = [revision]
+ current_android_rev = source_control.GetCurrentRevision(
+ self.depot_registry.GetDepotDir('android-chrome'))
+ revisions_list.append(
+ '%s@%s' % (bisect_utils.DEPOT_DEPS_NAME['android-chrome']['src'],
+ current_android_rev))
+ return not bisect_utils.RunGClientAndSync(revisions_list)
+
+ def _CheckIfRunPassed(self, current_value, known_good_value, known_bad_value):
+ """Given known good and bad values, decide if the current_value passed
+ or failed.
+
+ Args:
+ current_value: The value of the metric being checked.
+ known_bad_value: The reference value for a "failed" run.
+ known_good_value: The reference value for a "passed" run.
+
+ Returns:
+ True if the current_value is closer to the known_good_value than the
+ known_bad_value.
+ """
+ if self.opts.bisect_mode == bisect_utils.BISECT_MODE_STD_DEV:
+ dist_to_good_value = abs(current_value['std_dev'] -
+ known_good_value['std_dev'])
+ dist_to_bad_value = abs(current_value['std_dev'] -
+ known_bad_value['std_dev'])
+ else:
+ dist_to_good_value = abs(current_value['mean'] - known_good_value['mean'])
+ dist_to_bad_value = abs(current_value['mean'] - known_bad_value['mean'])
+
+ return dist_to_good_value < dist_to_bad_value
+
+ def _GetV8BleedingEdgeFromV8TrunkIfMappable(
+ self, revision, bleeding_edge_branch):
+ """Gets v8 bleeding edge revision mapped to v8 revision in trunk.
+
+ Args:
+ revision: A trunk V8 revision mapped to bleeding edge revision.
+ bleeding_edge_branch: Branch used to perform lookup of bleeding edge
+ revision.
+ Return:
+ A mapped bleeding edge revision if found, otherwise None.
+ """
+ commit_position = source_control.GetCommitPosition(revision)
+
+ if bisect_utils.IsStringInt(commit_position):
+ # V8 is tricky to bisect, in that there are only a few instances when
+ # we can dive into bleeding_edge and get back a meaningful result.
+ # Try to detect a V8 "business as usual" case, which is when:
+ # 1. trunk revision N has description "Version X.Y.Z"
+ # 2. bleeding_edge revision (N-1) has description "Prepare push to
+ # trunk. Now working on X.Y.(Z+1)."
+ #
+ # As of 01/24/2014, V8 trunk descriptions are formatted:
+ # "Version 3.X.Y (based on bleeding_edge revision rZ)"
+ # So we can just try parsing that out first and fall back to the old way.
+ v8_dir = self.depot_registry.GetDepotDir('v8')
+ v8_bleeding_edge_dir = self.depot_registry.GetDepotDir('v8_bleeding_edge')
+
+ revision_info = source_control.QueryRevisionInfo(revision, cwd=v8_dir)
+ version_re = re.compile("Version (?P<values>[0-9,.]+)")
+ regex_results = version_re.search(revision_info['subject'])
+ if regex_results:
+ git_revision = None
+ if 'based on bleeding_edge' in revision_info['subject']:
+ try:
+ bleeding_edge_revision = revision_info['subject'].split(
+ 'bleeding_edge revision r')[1]
+ bleeding_edge_revision = int(bleeding_edge_revision.split(')')[0])
+ bleeding_edge_url = ('https://v8.googlecode.com/svn/branches/'
+ 'bleeding_edge@%s' % bleeding_edge_revision)
+ cmd = ['log',
+ '--format=%H',
+ '--grep',
+ bleeding_edge_url,
+ '-1',
+ bleeding_edge_branch]
+ output = bisect_utils.CheckRunGit(cmd, cwd=v8_dir)
+ if output:
+ git_revision = output.strip()
+ return git_revision
+ except (IndexError, ValueError):
+ pass
+ else:
+ # V8 rolls description changed after V8 git migration, new description
+ # includes "Version 3.X.Y (based on <git hash>)"
+ try:
+ rxp = re.compile('based on (?P<git_revision>[a-fA-F0-9]+)')
+ re_results = rxp.search(revision_info['subject'])
+ if re_results:
+ return re_results.group('git_revision')
+ except (IndexError, ValueError):
+ pass
+ if not git_revision:
+ # Wasn't successful, try the old way of looking for "Prepare push to"
+ git_revision = source_control.ResolveToRevision(
+ int(commit_position) - 1, 'v8_bleeding_edge',
+ bisect_utils.DEPOT_DEPS_NAME, -1, cwd=v8_bleeding_edge_dir)
+
+ if git_revision:
+ revision_info = source_control.QueryRevisionInfo(
+ git_revision, cwd=v8_bleeding_edge_dir)
+
+ if 'Prepare push to trunk' in revision_info['subject']:
+ return git_revision
+ return None
+
+ def _GetNearestV8BleedingEdgeFromTrunk(
+ self, revision, v8_branch, bleeding_edge_branch, search_forward=True):
+ """Gets the nearest V8 roll and maps to bleeding edge revision.
+
+ V8 is a bit tricky to bisect since it isn't just rolled out like blink.
+ Each revision on trunk might just be whatever was in bleeding edge, rolled
+ directly out. Or it could be some mixture of previous v8 trunk versions,
+ with bits and pieces cherry picked out from bleeding edge. In order to
+ bisect, we need both the before/after versions on trunk v8 to be just pushes
+ from bleeding edge. With the V8 git migration, the branches got switched.
+ a) master (external/v8) == candidates (v8/v8)
+ b) bleeding_edge (external/v8) == master (v8/v8)
+
+ Args:
+ revision: A V8 revision to get its nearest bleeding edge revision
+ search_forward: Searches forward if True, otherwise search backward.
+
+ Return:
+ A mapped bleeding edge revision if found, otherwise None.
+ """
+ cwd = self.depot_registry.GetDepotDir('v8')
+ cmd = ['log', '--format=%ct', '-1', revision]
+ output = bisect_utils.CheckRunGit(cmd, cwd=cwd)
+ commit_time = int(output)
+ commits = []
+ if search_forward:
+ cmd = ['log',
+ '--format=%H',
+ '--after=%d' % commit_time,
+ v8_branch,
+ '--reverse']
+ output = bisect_utils.CheckRunGit(cmd, cwd=cwd)
+ output = output.split()
+ commits = output
+ #Get 10 git hashes immediately after the given commit.
+ commits = commits[:10]
+ else:
+ cmd = ['log',
+ '--format=%H',
+ '-10',
+ '--before=%d' % commit_time,
+ v8_branch]
+ output = bisect_utils.CheckRunGit(cmd, cwd=cwd)
+ output = output.split()
+ commits = output
+
+ bleeding_edge_revision = None
+
+ for c in commits:
+ bleeding_edge_revision = self._GetV8BleedingEdgeFromV8TrunkIfMappable(
+ c, bleeding_edge_branch)
+ if bleeding_edge_revision:
+ break
+
+ return bleeding_edge_revision
+
+ def _FillInV8BleedingEdgeInfo(self, min_revision_state, max_revision_state):
+ cwd = self.depot_registry.GetDepotDir('v8')
+ # when "remote.origin.url" is https://chromium.googlesource.com/v8/v8.git
+ v8_branch = 'origin/candidates'
+ bleeding_edge_branch = 'origin/master'
+
+ # Support for the chromium revisions with external V8 repo.
+ # ie https://chromium.googlesource.com/external/v8.git
+ cmd = ['config', '--get', 'remote.origin.url']
+ v8_repo_url = bisect_utils.CheckRunGit(cmd, cwd=cwd)
+
+ if 'external/v8.git' in v8_repo_url:
+ v8_branch = 'origin/master'
+ bleeding_edge_branch = 'origin/bleeding_edge'
+
+ r1 = self._GetNearestV8BleedingEdgeFromTrunk(
+ min_revision_state.revision,
+ v8_branch,
+ bleeding_edge_branch,
+ search_forward=True)
+ r2 = self._GetNearestV8BleedingEdgeFromTrunk(
+ max_revision_state.revision,
+ v8_branch,
+ bleeding_edge_branch,
+ search_forward=False)
+ min_revision_state.external['v8_bleeding_edge'] = r1
+ max_revision_state.external['v8_bleeding_edge'] = r2
+
+ if (not self._GetV8BleedingEdgeFromV8TrunkIfMappable(
+ min_revision_state.revision, bleeding_edge_branch)
+ or not self._GetV8BleedingEdgeFromV8TrunkIfMappable(
+ max_revision_state.revision, bleeding_edge_branch)):
+ self.warnings.append(
+ 'Trunk revisions in V8 did not map directly to bleeding_edge. '
+ 'Attempted to expand the range to find V8 rolls which did map '
+ 'directly to bleeding_edge revisions, but results might not be '
+ 'valid.')
+
+ def _FindNextDepotToBisect(
+ self, current_depot, min_revision_state, max_revision_state):
+ """Decides which depot the script should dive into next (if any).
+
+ Args:
+ current_depot: Current depot being bisected.
+ min_revision_state: State of the earliest revision in the bisect range.
+ max_revision_state: State of the latest revision in the bisect range.
+
+ Returns:
+ Name of the depot to bisect next, or None.
+ """
+ external_depot = None
+ for next_depot in bisect_utils.DEPOT_NAMES:
+ if ('platform' in bisect_utils.DEPOT_DEPS_NAME[next_depot] and
+ bisect_utils.DEPOT_DEPS_NAME[next_depot]['platform'] != os.name):
+ continue
+
+ if not (bisect_utils.DEPOT_DEPS_NAME[next_depot]['recurse']
+ and min_revision_state.depot
+ in bisect_utils.DEPOT_DEPS_NAME[next_depot]['from']):
+ continue
+
+ if current_depot == 'v8':
+ # We grab the bleeding_edge info here rather than earlier because we
+ # finally have the revision range. From that we can search forwards and
+ # backwards to try to match trunk revisions to bleeding_edge.
+ self._FillInV8BleedingEdgeInfo(min_revision_state, max_revision_state)
+
+ if (min_revision_state.external.get(next_depot) ==
+ max_revision_state.external.get(next_depot)):
+ continue
+
+ if (min_revision_state.external.get(next_depot) and
+ max_revision_state.external.get(next_depot)):
+ external_depot = next_depot
+ break
+
+ return external_depot
+
+ def PrepareToBisectOnDepot(
+ self, current_depot, start_revision, end_revision, previous_revision):
+ """Changes to the appropriate directory and gathers a list of revisions
+ to bisect between |start_revision| and |end_revision|.
+
+ Args:
+ current_depot: The depot we want to bisect.
+ start_revision: Start of the revision range.
+ end_revision: End of the revision range.
+ previous_revision: The last revision we synced to on |previous_depot|.
+
+ Returns:
+ A list containing the revisions between |start_revision| and
+ |end_revision| inclusive.
+ """
+ # Change into working directory of external library to run
+ # subsequent commands.
+ self.depot_registry.ChangeToDepotDir(current_depot)
+
+ # V8 (and possibly others) is merged in periodically. Bisecting
+ # this directory directly won't give much good info.
+ if 'custom_deps' in bisect_utils.DEPOT_DEPS_NAME[current_depot]:
+ config_path = os.path.join(self.src_cwd, '..')
+ if bisect_utils.RunGClientAndCreateConfig(
+ self.opts, bisect_utils.DEPOT_DEPS_NAME[current_depot]['custom_deps'],
+ cwd=config_path):
+ return []
+ if bisect_utils.RunGClient(
+ ['sync', '--revision', previous_revision], cwd=self.src_cwd):
+ return []
+
+ if current_depot == 'v8_bleeding_edge':
+ self.depot_registry.ChangeToDepotDir('chromium')
+
+ shutil.move('v8', 'v8.bak')
+ shutil.move('v8_bleeding_edge', 'v8')
+
+ self.cleanup_commands.append(['mv', 'v8', 'v8_bleeding_edge'])
+ self.cleanup_commands.append(['mv', 'v8.bak', 'v8'])
+
+ self.depot_registry.SetDepotDir(
+ 'v8_bleeding_edge', os.path.join(self.src_cwd, 'v8'))
+ self.depot_registry.SetDepotDir(
+ 'v8', os.path.join(self.src_cwd, 'v8.bak'))
+
+ self.depot_registry.ChangeToDepotDir(current_depot)
+
+ depot_revision_list = self.GetRevisionList(current_depot,
+ end_revision,
+ start_revision)
+
+ self.depot_registry.ChangeToDepotDir('chromium')
+
+ return depot_revision_list
+
+ def GatherReferenceValues(self, good_rev, bad_rev, cmd, metric, target_depot):
+ """Gathers reference values by running the performance tests on the
+ known good and bad revisions.
+
+ Args:
+ good_rev: The last known good revision where the performance regression
+ has not occurred yet.
+ bad_rev: A revision where the performance regression has already occurred.
+ cmd: The command to execute the performance test.
+ metric: The metric being tested for regression.
+
+ Returns:
+ A tuple with the results of building and running each revision.
+ """
+ bad_run_results = self.RunTest(bad_rev, target_depot, cmd, metric)
+
+ good_run_results = None
+
+ if not bad_run_results[1]:
+ good_run_results = self.RunTest(good_rev, target_depot, cmd, metric)
+
+ return (bad_run_results, good_run_results)
+
+ def PrintRevisionsToBisectMessage(self, revision_list, depot):
+ if self.opts.output_buildbot_annotations:
+ step_name = 'Bisection Range: [%s:%s - %s]' % (depot, revision_list[-1],
+ revision_list[0])
+ bisect_utils.OutputAnnotationStepStart(step_name)
+
+ print
+ print 'Revisions to bisect on [%s]:' % depot
+ for revision_id in revision_list:
+ print ' -> %s' % (revision_id, )
+ print
+
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepClosed()
+
+ def NudgeRevisionsIfDEPSChange(self, bad_revision, good_revision,
+ good_svn_revision=None):
+ """Checks to see if changes to DEPS file occurred, and that the revision
+ range also includes the change to .DEPS.git. If it doesn't, attempts to
+ expand the revision range to include it.
+
+ Args:
+ bad_revision: First known bad git revision.
+ good_revision: Last known good git revision.
+ good_svn_revision: Last known good svn revision.
+
+ Returns:
+ A tuple with the new bad and good revisions.
+ """
+ # DONOT perform nudge because at revision 291563 .DEPS.git was removed
+ # and source contain only DEPS file for dependency changes.
+ if good_svn_revision >= 291563:
+ return (bad_revision, good_revision)
+
+ if self.opts.target_platform == 'chromium':
+ changes_to_deps = source_control.QueryFileRevisionHistory(
+ bisect_utils.FILE_DEPS, good_revision, bad_revision)
+
+ if changes_to_deps:
+ # DEPS file was changed, search from the oldest change to DEPS file to
+ # bad_revision to see if there are matching .DEPS.git changes.
+ oldest_deps_change = changes_to_deps[-1]
+ changes_to_gitdeps = source_control.QueryFileRevisionHistory(
+ bisect_utils.FILE_DEPS_GIT, oldest_deps_change, bad_revision)
+
+ if len(changes_to_deps) != len(changes_to_gitdeps):
+ # Grab the timestamp of the last DEPS change
+ cmd = ['log', '--format=%ct', '-1', changes_to_deps[0]]
+ output = bisect_utils.CheckRunGit(cmd)
+ commit_time = int(output)
+
+ # Try looking for a commit that touches the .DEPS.git file in the
+ # next 15 minutes after the DEPS file change.
+ cmd = [
+ 'log', '--format=%H', '-1',
+ '--before=%d' % (commit_time + 900),
+ '--after=%d' % commit_time,
+ 'origin/master', '--', bisect_utils.FILE_DEPS_GIT
+ ]
+ output = bisect_utils.CheckRunGit(cmd)
+ output = output.strip()
+ if output:
+ self.warnings.append(
+ 'Detected change to DEPS and modified '
+ 'revision range to include change to .DEPS.git')
+ return (output, good_revision)
+ else:
+ self.warnings.append(
+ 'Detected change to DEPS but couldn\'t find '
+ 'matching change to .DEPS.git')
+ return (bad_revision, good_revision)
+
+ def CheckIfRevisionsInProperOrder(
+ self, target_depot, good_revision, bad_revision):
+ """Checks that |good_revision| is an earlier revision than |bad_revision|.
+
+ Args:
+ good_revision: Number/tag of the known good revision.
+ bad_revision: Number/tag of the known bad revision.
+
+ Returns:
+ True if the revisions are in the proper order (good earlier than bad).
+ """
+ cwd = self.depot_registry.GetDepotDir(target_depot)
+ good_position = source_control.GetCommitPosition(good_revision, cwd)
+ bad_position = source_control.GetCommitPosition(bad_revision, cwd)
+ # Compare commit timestamp for repos that don't support commit position.
+ if not (bad_position and good_position):
+ logging.info('Could not get commit positions for revisions %s and %s in '
+ 'depot %s', good_position, bad_position, target_depot)
+ good_position = source_control.GetCommitTime(good_revision, cwd=cwd)
+ bad_position = source_control.GetCommitTime(bad_revision, cwd=cwd)
+
+ return good_position <= bad_position
+
+ def CanPerformBisect(self, good_revision, bad_revision):
+ """Checks whether a given revision is bisectable.
+
+ Checks for following:
+ 1. Non-bisectable revisions for android bots (refer to crbug.com/385324).
+ 2. Non-bisectable revisions for Windows bots (refer to crbug.com/405274).
+
+ Args:
+ good_revision: Known good revision.
+ bad_revision: Known bad revision.
+
+ Returns:
+ A dictionary indicating the result. If revision is not bisectable,
+ this will contain the field "error", otherwise None.
+ """
+ if self.opts.target_platform == 'android':
+ good_revision = source_control.GetCommitPosition(good_revision)
+ if (bisect_utils.IsStringInt(good_revision)
+ and good_revision < 265549):
+ return {'error': (
+ 'Bisect cannot continue for the given revision range.\n'
+ 'It is impossible to bisect Android regressions '
+ 'prior to r265549, which allows the bisect bot to '
+ 'rely on Telemetry to do apk installation of the most recently '
+ 'built local ChromePublic (refer to crbug.com/385324).\n'
+ 'Please try bisecting revisions greater than or equal to r265549.')}
+
+ if bisect_utils.IsWindowsHost():
+ good_revision = source_control.GetCommitPosition(good_revision)
+ bad_revision = source_control.GetCommitPosition(bad_revision)
+ if (bisect_utils.IsStringInt(good_revision) and
+ bisect_utils.IsStringInt(bad_revision)):
+ if (289987 <= good_revision < 290716 or
+ 289987 <= bad_revision < 290716):
+ return {'error': ('Oops! Revision between r289987 and r290716 are '
+ 'marked as dead zone for Windows due to '
+ 'crbug.com/405274. Please try another range.')}
+
+ return None
+
+ def _GatherResultsFromRevertedCulpritCL(
+ self, results, target_depot, command_to_run, metric):
+ """Gathers performance results with/without culprit CL.
+
+ Attempts to revert the culprit CL against ToT and runs the
+ performance tests again with and without the CL, adding the results to
+ the over bisect results.
+
+ Args:
+ results: BisectResults from the bisect.
+ target_depot: The target depot we're bisecting.
+ command_to_run: Specify the command to execute the performance test.
+ metric: The performance metric to monitor.
+ """
+ run_results_tot, run_results_reverted = self._RevertCulpritCLAndRetest(
+ results, target_depot, command_to_run, metric)
+
+ results.AddRetestResults(run_results_tot, run_results_reverted)
+
+ if len(results.culprit_revisions) != 1:
+ return
+
+ # Cleanup reverted files if anything is left.
+ _, _, culprit_depot = results.culprit_revisions[0]
+ bisect_utils.CheckRunGit(
+ ['reset', '--hard', 'HEAD'],
+ cwd=self.depot_registry.GetDepotDir(culprit_depot))
+
+ def _RevertCL(self, culprit_revision, culprit_depot):
+ """Reverts the specified revision in the specified depot."""
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepStart(
+ 'Reverting culprit CL: %s' % culprit_revision)
+ _, return_code = bisect_utils.RunGit(
+ ['revert', '--no-commit', culprit_revision],
+ cwd=self.depot_registry.GetDepotDir(culprit_depot))
+ if return_code:
+ bisect_utils.OutputAnnotationStepWarning()
+ bisect_utils.OutputAnnotationStepText('Failed to revert CL cleanly.')
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepClosed()
+ return not return_code
+
+ def _RevertCulpritCLAndRetest(
+ self, results, target_depot, command_to_run, metric):
+ """Reverts the culprit CL against ToT and runs the performance test.
+
+ Attempts to revert the culprit CL against ToT and runs the
+ performance tests again with and without the CL.
+
+ Args:
+ results: BisectResults from the bisect.
+ target_depot: The target depot we're bisecting.
+ command_to_run: Specify the command to execute the performance test.
+ metric: The performance metric to monitor.
+
+ Returns:
+ A tuple with the results of running the CL at ToT/reverted.
+ """
+ # Might want to retest ToT with a revert of the CL to confirm that
+ # performance returns.
+ if results.confidence < bisect_utils.HIGH_CONFIDENCE:
+ return (None, None)
+
+ # If there were multiple culprit CLs, we won't try to revert.
+ if len(results.culprit_revisions) != 1:
+ return (None, None)
+
+ culprit_revision, _, culprit_depot = results.culprit_revisions[0]
+
+ if not self._SyncRevision(target_depot, None, 'gclient'):
+ return (None, None)
+
+ head_revision = bisect_utils.CheckRunGit(['log', '--format=%H', '-1'])
+ head_revision = head_revision.strip()
+
+ if not self._RevertCL(culprit_revision, culprit_depot):
+ return (None, None)
+
+ # If the culprit CL happened to be in a depot that gets pulled in, we
+ # can't revert the change and issue a try job to build, since that would
+ # require modifying both the DEPS file and files in another depot.
+ # Instead, we build locally.
+ force_build = (culprit_depot != target_depot)
+ if force_build:
+ results.warnings.append(
+ 'Culprit CL is in another depot, attempting to revert and build'
+ ' locally to retest. This may not match the performance of official'
+ ' builds.')
+
+ run_results_reverted = self._RunTestWithAnnotations(
+ 'Re-Testing ToT with reverted culprit',
+ 'Failed to run reverted CL.',
+ head_revision, target_depot, command_to_run, metric, force_build)
+
+ # Clear the reverted file(s).
+ bisect_utils.RunGit(
+ ['reset', '--hard', 'HEAD'],
+ cwd=self.depot_registry.GetDepotDir(culprit_depot))
+
+ # Retesting with the reverted CL failed, so bail out of retesting against
+ # ToT.
+ if run_results_reverted[1]:
+ return (None, None)
+
+ run_results_tot = self._RunTestWithAnnotations(
+ 'Re-Testing ToT',
+ 'Failed to run ToT.',
+ head_revision, target_depot, command_to_run, metric, force_build)
+
+ return (run_results_tot, run_results_reverted)
+
+ def PostBisectResults(self, bisect_results):
+ """Posts bisect results to Perf Dashboard."""
+ bisect_utils.OutputAnnotationStepStart('Post Results')
+
+ results = bisect_results_json.Get(
+ bisect_results, self.opts, self.depot_registry)
+ results_json = json.dumps(results)
+ data = urllib.urlencode({'data': results_json})
+ request = urllib2.Request(PERF_DASH_RESULTS_URL)
+ try:
+ urllib2.urlopen(request, data)
+ except urllib2.URLError as e:
+ print 'Failed to post bisect results. Error: %s.' % e
+ bisect_utils.OutputAnnotationStepWarning()
+
+ bisect_utils.OutputAnnotationStepClosed()
+
+ def _RunTestWithAnnotations(
+ self, step_text, error_text, head_revision,
+ target_depot, command_to_run, metric, force_build):
+ """Runs the performance test and outputs start/stop annotations.
+
+ Args:
+ results: BisectResults from the bisect.
+ target_depot: The target depot we're bisecting.
+ command_to_run: Specify the command to execute the performance test.
+ metric: The performance metric to monitor.
+ force_build: Whether to force a build locally.
+
+ Returns:
+ Results of the test.
+ """
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepStart(step_text)
+
+ # Build and run the test again with the reverted culprit CL against ToT.
+ run_test_results = self.RunTest(
+ head_revision, target_depot, command_to_run,
+ metric, skippable=False, skip_sync=True, create_patch=True,
+ force_build=force_build)
+
+ if self.opts.output_buildbot_annotations:
+ if run_test_results[1]:
+ bisect_utils.OutputAnnotationStepWarning()
+ bisect_utils.OutputAnnotationStepText(error_text)
+ bisect_utils.OutputAnnotationStepClosed()
+
+ return run_test_results
+
+ def Run(self, command_to_run, bad_revision_in, good_revision_in, metric):
+ """Given known good and bad revisions, run a binary search on all
+ intermediate revisions to determine the CL where the performance regression
+ occurred.
+
+ Args:
+ command_to_run: Specify the command to execute the performance test.
+ good_revision: Number/tag of the known good revision.
+ bad_revision: Number/tag of the known bad revision.
+ metric: The performance metric to monitor.
+
+ Returns:
+ A BisectResults object.
+ """
+ # Choose depot to bisect first
+ target_depot = 'chromium'
+ if self.opts.target_platform == 'android-chrome':
+ target_depot = 'android-chrome'
+
+ cwd = os.getcwd()
+ self.depot_registry.ChangeToDepotDir(target_depot)
+
+ # If they passed SVN revisions, we can try match them to git SHA1 hashes.
+ bad_revision = source_control.ResolveToRevision(
+ bad_revision_in, target_depot, bisect_utils.DEPOT_DEPS_NAME, 100)
+ good_revision = source_control.ResolveToRevision(
+ good_revision_in, target_depot, bisect_utils.DEPOT_DEPS_NAME, -100)
+
+ os.chdir(cwd)
+ if bad_revision is None:
+ return BisectResults(
+ error='Couldn\'t resolve [%s] to SHA1.' % bad_revision_in)
+
+ if good_revision is None:
+ return BisectResults(
+ error='Couldn\'t resolve [%s] to SHA1.' % good_revision_in)
+
+ # Check that they didn't accidentally swap good and bad revisions.
+ if not self.CheckIfRevisionsInProperOrder(
+ target_depot, good_revision, bad_revision):
+ return BisectResults(error='Bad rev (%s) appears to be earlier than good '
+ 'rev (%s).' % (good_revision, bad_revision))
+
+ bad_revision, good_revision = self.NudgeRevisionsIfDEPSChange(
+ bad_revision, good_revision, good_revision_in)
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepStart('Gathering Revisions')
+
+ cannot_bisect = self.CanPerformBisect(good_revision, bad_revision)
+ if cannot_bisect:
+ return BisectResults(error=cannot_bisect.get('error'))
+
+ print 'Gathering revision range for bisection.'
+ # Retrieve a list of revisions to do bisection on.
+ revision_list = self.GetRevisionList(target_depot, bad_revision,
+ good_revision)
+
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepClosed()
+
+ if revision_list:
+ self.PrintRevisionsToBisectMessage(revision_list, target_depot)
+
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepStart('Gathering Reference Values')
+
+ print 'Gathering reference values for bisection.'
+
+ # Perform the performance tests on the good and bad revisions, to get
+ # reference values.
+ bad_results, good_results = self.GatherReferenceValues(good_revision,
+ bad_revision,
+ command_to_run,
+ metric,
+ target_depot)
+
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepClosed()
+
+ if bad_results[1]:
+ error = ('An error occurred while building and running the \'bad\' '
+ 'reference value. The bisect cannot continue without '
+ 'a working \'bad\' revision to start from.\n\nError: %s' %
+ bad_results[0])
+ return BisectResults(error=error)
+
+ if good_results[1]:
+ error = ('An error occurred while building and running the \'good\' '
+ 'reference value. The bisect cannot continue without '
+ 'a working \'good\' revision to start from.\n\nError: %s' %
+ good_results[0])
+ return BisectResults(error=error)
+
+ # We need these reference values to determine if later runs should be
+ # classified as pass or fail.
+
+ known_bad_value = bad_results[0]
+ known_good_value = good_results[0]
+
+ # Abort bisect early when the return codes for known good
+ # and known bad revisions are same.
+ if (self._IsBisectModeReturnCode() and
+ known_bad_value['mean'] == known_good_value['mean']):
+ return BisectResults(abort_reason=('known good and known bad revisions '
+ 'returned same return code (return code=%s). '
+ 'Continuing bisect might not yield any results.' %
+ known_bad_value['mean']))
+ # Check the direction of improvement only if the improvement_direction
+ # option is set to a specific direction (1 for higher is better or -1 for
+ # lower is better).
+ improvement_dir = self.opts.improvement_direction
+ if improvement_dir:
+ higher_is_better = improvement_dir > 0
+ if higher_is_better:
+ message = "Expecting higher values to be better for this metric, "
+ else:
+ message = "Expecting lower values to be better for this metric, "
+ metric_increased = known_bad_value['mean'] > known_good_value['mean']
+ if metric_increased:
+ message += "and the metric appears to have increased. "
+ else:
+ message += "and the metric appears to have decreased. "
+ if ((higher_is_better and metric_increased) or
+ (not higher_is_better and not metric_increased)):
+ error = (message + 'Then, the test results for the ends of the given '
+ '\'good\' - \'bad\' range of revisions represent an '
+ 'improvement (and not a regression).')
+ return BisectResults(error=error)
+ logging.info(message + "Therefore we continue to bisect.")
+
+ bisect_state = BisectState(target_depot, revision_list)
+ revision_states = bisect_state.GetRevisionStates()
+
+ min_revision = 0
+ max_revision = len(revision_states) - 1
+
+ # Can just mark the good and bad revisions explicitly here since we
+ # already know the results.
+ bad_revision_state = revision_states[min_revision]
+ bad_revision_state.external = bad_results[2]
+ bad_revision_state.perf_time = bad_results[3]
+ bad_revision_state.build_time = bad_results[4]
+ bad_revision_state.passed = False
+ bad_revision_state.value = known_bad_value
+
+ good_revision_state = revision_states[max_revision]
+ good_revision_state.external = good_results[2]
+ good_revision_state.perf_time = good_results[3]
+ good_revision_state.build_time = good_results[4]
+ good_revision_state.passed = True
+ good_revision_state.value = known_good_value
+
+ # Check how likely it is that the good and bad results are different
+ # beyond chance-induced variation.
+ if not (self.opts.debug_ignore_regression_confidence or
+ self._IsBisectModeReturnCode()):
+ if not _IsRegressionReproduced(known_good_value, known_bad_value,
+ self.opts.required_initial_confidence):
+ # If there is no significant difference between "good" and "bad"
+ # revision results, then the "bad revision" is considered "good".
+ # TODO(qyearsley): Remove this if it is not necessary.
+ bad_revision_state.passed = True
+ self.warnings.append(_RegressionNotReproducedWarningMessage(
+ good_revision, bad_revision, known_good_value, known_bad_value))
+ return BisectResults(bisect_state, self.depot_registry, self.opts,
+ self.warnings)
+
+ while True:
+ if not revision_states:
+ break
+
+ if max_revision - min_revision <= 1:
+ min_revision_state = revision_states[min_revision]
+ max_revision_state = revision_states[max_revision]
+ current_depot = min_revision_state.depot
+ # TODO(sergiyb): Under which conditions can first two branches be hit?
+ if min_revision_state.passed == '?':
+ next_revision_index = min_revision
+ elif max_revision_state.passed == '?':
+ next_revision_index = max_revision
+ elif current_depot in ['android-chrome', 'chromium', 'v8']:
+ previous_revision = revision_states[min_revision].revision
+ # If there were changes to any of the external libraries we track,
+ # should bisect the changes there as well.
+ external_depot = self._FindNextDepotToBisect(
+ current_depot, min_revision_state, max_revision_state)
+ # If there was no change in any of the external depots, the search
+ # is over.
+ if not external_depot:
+ if current_depot == 'v8':
+ self.warnings.append(
+ 'Unfortunately, V8 bisection couldn\'t '
+ 'continue any further. The script can only bisect into '
+ 'V8\'s bleeding_edge repository if both the current and '
+ 'previous revisions in trunk map directly to revisions in '
+ 'bleeding_edge.')
+ break
+
+ earliest_revision = max_revision_state.external[external_depot]
+ latest_revision = min_revision_state.external[external_depot]
+
+ new_revision_list = self.PrepareToBisectOnDepot(
+ external_depot, earliest_revision, latest_revision,
+ previous_revision)
+
+ if not new_revision_list:
+ error = ('An error occurred attempting to retrieve revision '
+ 'range: [%s..%s]' % (earliest_revision, latest_revision))
+ return BisectResults(error=error)
+
+ revision_states = bisect_state.CreateRevisionStatesAfter(
+ external_depot, new_revision_list, current_depot,
+ previous_revision)
+
+ # Reset the bisection and perform it on the newly inserted states.
+ min_revision = 0
+ max_revision = len(revision_states) - 1
+
+ print ('Regression in metric %s appears to be the result of '
+ 'changes in [%s].' % (metric, external_depot))
+
+ revision_list = [state.revision for state in revision_states]
+ self.PrintRevisionsToBisectMessage(revision_list, external_depot)
+
+ continue
+ else:
+ break
+ else:
+ next_revision_index = (int((max_revision - min_revision) / 2) +
+ min_revision)
+
+ next_revision_state = revision_states[next_revision_index]
+ next_revision = next_revision_state.revision
+ next_depot = next_revision_state.depot
+
+ self.depot_registry.ChangeToDepotDir(next_depot)
+
+ message = 'Working on [%s:%s]' % (next_depot, next_revision)
+ print message
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepStart(message)
+
+ run_results = self.RunTest(next_revision, next_depot, command_to_run,
+ metric, skippable=True)
+
+ # If the build is successful, check whether or not the metric
+ # had regressed.
+ if not run_results[1]:
+ if len(run_results) > 2:
+ next_revision_state.external = run_results[2]
+ next_revision_state.perf_time = run_results[3]
+ next_revision_state.build_time = run_results[4]
+
+ passed_regression = self._CheckIfRunPassed(run_results[0],
+ known_good_value,
+ known_bad_value)
+
+ next_revision_state.passed = passed_regression
+ next_revision_state.value = run_results[0]
+
+ if passed_regression:
+ max_revision = next_revision_index
+ else:
+ min_revision = next_revision_index
+ else:
+ if run_results[1] == BUILD_RESULT_SKIPPED:
+ next_revision_state.passed = 'Skipped'
+ elif run_results[1] == BUILD_RESULT_FAIL:
+ next_revision_state.passed = 'Build Failed'
+
+ print run_results[0]
+
+ # If the build is broken, remove it and redo search.
+ revision_states.pop(next_revision_index)
+
+ max_revision -= 1
+
+ if self.opts.output_buildbot_annotations:
+ self.printer.PrintPartialResults(bisect_state)
+ bisect_utils.OutputAnnotationStepClosed()
+
+ self._ConfidenceExtraTestRuns(min_revision_state, max_revision_state,
+ command_to_run, metric)
+ results = BisectResults(bisect_state, self.depot_registry, self.opts,
+ self.warnings)
+
+ self._GatherResultsFromRevertedCulpritCL(
+ results, target_depot, command_to_run, metric)
+
+ return results
+ else:
+ # Weren't able to sync and retrieve the revision range.
+ error = ('An error occurred attempting to retrieve revision range: '
+ '[%s..%s]' % (good_revision, bad_revision))
+ return BisectResults(error=error)
+
+ def _ConfidenceExtraTestRuns(self, good_state, bad_state, command_to_run,
+ metric):
+ if (bool(good_state.passed) != bool(bad_state.passed)
+ and good_state.passed not in ('Skipped', 'Build Failed')
+ and bad_state.passed not in ('Skipped', 'Build Failed')):
+ for state in (good_state, bad_state):
+ run_results = self.RunTest(
+ state.revision,
+ state.depot,
+ command_to_run,
+ metric,
+ test_run_multiplier=BORDER_REVISIONS_EXTRA_RUNS)
+ # Is extend the right thing to do here?
+ if run_results[1] != BUILD_RESULT_FAIL:
+ state.value['values'].extend(run_results[0]['values'])
+ else:
+ warning_text = 'Re-test of revision %s failed with error message: %s'
+ warning_text %= (state.revision, run_results[0])
+ if warning_text not in self.warnings:
+ self.warnings.append(warning_text)
+
+
+def _IsPlatformSupported():
+ """Checks that this platform and build system are supported.
+
+ Args:
+ opts: The options parsed from the command line.
+
+ Returns:
+ True if the platform and build system are supported.
+ """
+ # Haven't tested the script out on any other platforms yet.
+ supported = ['posix', 'nt']
+ return os.name in supported
+
+
+def RemoveBuildFiles(build_type):
+ """Removes build files from previous runs."""
+ out_dir = os.path.join('out', build_type)
+ build_dir = os.path.join('build', build_type)
+ logging.info('Removing build files in "%s" and "%s".',
+ os.path.abspath(out_dir), os.path.abspath(build_dir))
+ try:
+ RemakeDirectoryTree(out_dir)
+ RemakeDirectoryTree(build_dir)
+ except Exception as e:
+ raise RuntimeError('Got error in RemoveBuildFiles: %s' % e)
+
+
+def RemakeDirectoryTree(path_to_dir):
+ """Removes a directory tree and replaces it with an empty one.
+
+ Returns True if successful, False otherwise.
+ """
+ RemoveDirectoryTree(path_to_dir)
+ MaybeMakeDirectory(path_to_dir)
+
+
+def RemoveDirectoryTree(path_to_dir):
+ """Removes a directory tree. Returns True if successful or False otherwise."""
+ if os.path.isfile(path_to_dir):
+ logging.info('REMOVING FILE %s' % path_to_dir)
+ os.remove(path_to_dir)
+ try:
+ if os.path.exists(path_to_dir):
+ shutil.rmtree(path_to_dir)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+
+
+# This is copied from build/scripts/common/chromium_utils.py.
+def MaybeMakeDirectory(*path):
+ """Creates an entire path, if it doesn't already exist."""
+ file_path = os.path.join(*path)
+ try:
+ os.makedirs(file_path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+
+class BisectOptions(object):
+ """Options to be used when running bisection."""
+ def __init__(self):
+ super(BisectOptions, self).__init__()
+
+ self.target_platform = 'chromium'
+ self.build_preference = None
+ self.good_revision = None
+ self.bad_revision = None
+ self.use_goma = None
+ self.goma_dir = None
+ self.goma_threads = 64
+ self.repeat_test_count = 20
+ self.truncate_percent = 25
+ self.max_time_minutes = 20
+ self.metric = None
+ self.command = None
+ self.output_buildbot_annotations = None
+ self.no_custom_deps = False
+ self.working_directory = None
+ self.extra_src = None
+ self.debug_ignore_build = None
+ self.debug_ignore_sync = None
+ self.debug_ignore_perf_test = None
+ self.debug_ignore_regression_confidence = None
+ self.debug_fake_first_test_mean = 0
+ self.target_arch = 'ia32'
+ self.target_build_type = 'Release'
+ self.builder_type = 'perf'
+ self.bisect_mode = bisect_utils.BISECT_MODE_MEAN
+ self.improvement_direction = 0
+ self.bug_id = ''
+ self.required_initial_confidence = 80.0
+ self.try_job_id = None
+
+ @staticmethod
+ def _AddBisectOptionsGroup(parser):
+ group = parser.add_argument_group('Bisect options')
+ group.add_argument('-c', '--command', required=True,
+ help='A command to execute your performance test at '
+ 'each point in the bisection.')
+ group.add_argument('-b', '--bad_revision', required=True,
+ help='A bad revision to start bisection. Must be later '
+ 'than good revision. May be either a git or svn '
+ 'revision.')
+ group.add_argument('-g', '--good_revision', required=True,
+ help='A revision to start bisection where performance '
+ 'test is known to pass. Must be earlier than the '
+ 'bad revision. May be either a git or a svn '
+ 'revision.')
+ group.add_argument('-m', '--metric',
+ help='The desired metric to bisect on. For example '
+ '"vm_rss_final_b/vm_rss_f_b"')
+ group.add_argument('-d', '--improvement_direction', type=int, default=0,
+ help='An integer number representing the direction of '
+ 'improvement. 1 for higher is better, -1 for lower '
+ 'is better, 0 for ignore (default).')
+ group.add_argument('-r', '--repeat_test_count', type=int, default=20,
+ choices=range(1, 101),
+ help='The number of times to repeat the performance '
+ 'test. Values will be clamped to range [1, 100]. '
+ 'Default value is 20.')
+ group.add_argument('--max_time_minutes', type=int, default=20,
+ choices=range(1, 61),
+ help='The maximum time (in minutes) to take running the '
+ 'performance tests. The script will run the '
+ 'performance tests according to '
+ '--repeat_test_count, so long as it doesn\'t exceed'
+ ' --max_time_minutes. Values will be clamped to '
+ 'range [1, 60]. Default value is 20.')
+ group.add_argument('-t', '--truncate_percent', type=int, default=25,
+ help='The highest/lowest percent are discarded to form '
+ 'a truncated mean. Values will be clamped to range '
+ '[0, 25]. Default value is 25 percent.')
+ group.add_argument('--bisect_mode', default=bisect_utils.BISECT_MODE_MEAN,
+ choices=[bisect_utils.BISECT_MODE_MEAN,
+ bisect_utils.BISECT_MODE_STD_DEV,
+ bisect_utils.BISECT_MODE_RETURN_CODE],
+ help='The bisect mode. Choices are to bisect on the '
+ 'difference in mean, std_dev, or return_code.')
+ group.add_argument('--bug_id', default='',
+ help='The id for the bug associated with this bisect. ' +
+ 'If this number is given, bisect will attempt to ' +
+ 'verify that the bug is not closed before '
+ 'starting.')
+ group.add_argument('--try_job_id', default=None,
+ help='The id assigned by Perf Dashboard when sending ' +
+ 'try jobs.')
+ group.add_argument('--required_initial_confidence', type=float,
+ default=80.0,
+ help='The required confidence score for the initial '
+ 'check to see whether there is a significant '
+ 'difference between given good and bad revisions.')
+
+ @staticmethod
+ def _AddBuildOptionsGroup(parser):
+ group = parser.add_argument_group('Build options')
+ group.add_argument('-w', '--working_directory',
+ help='Path to the working directory where the script '
+ 'will do an initial checkout of the chromium depot. The '
+ 'files will be placed in a subdirectory "bisect" under '
+ 'working_directory and that will be used to perform the '
+ 'bisection. This parameter is optional, if it is not '
+ 'supplied, the script will work from the current depot.')
+ group.add_argument('--build_preference',
+ choices=['msvs', 'ninja', 'make'],
+ help='The preferred build system to use. On linux/mac '
+ 'the options are make/ninja. On Windows, the '
+ 'options are msvs/ninja.')
+ group.add_argument('--target_platform', default='chromium',
+ choices=['chromium', 'android', 'android-chrome'],
+ help='The target platform. Choices are "chromium" '
+ '(current platform), or "android". If you specify '
+ 'something other than "chromium", you must be '
+ 'properly set up to build that platform.')
+ group.add_argument('--no_custom_deps', dest='no_custom_deps',
+ action='store_true', default=False,
+ help='Run the script with custom_deps or not.')
+ group.add_argument('--extra_src',
+ help='Path to a script which can be used to modify the '
+ 'bisect script\'s behavior.')
+ group.add_argument('--use_goma', action='store_true',
+ help='Add a bunch of extra threads for goma, and enable '
+ 'goma')
+ group.add_argument('--goma_dir',
+ help='Path to goma tools (or system default if not '
+ 'specified).')
+ group.add_argument('--goma_threads', type=int, default='64',
+ help='Number of threads for goma, only if using goma.')
+ group.add_argument('--output_buildbot_annotations', action='store_true',
+ help='Add extra annotation output for buildbot.')
+ group.add_argument('--target_arch', default='ia32',
+ dest='target_arch',
+ choices=['ia32', 'x64', 'arm', 'arm64'],
+ help='The target build architecture. Choices are "ia32" '
+ '(default), "x64", "arm" or "arm64".')
+ group.add_argument('--target_build_type', default='Release',
+ choices=['Release', 'Debug', 'Release_x64'],
+ help='The target build type. Choices are "Release" '
+ '(default), Release_x64 or "Debug".')
+ group.add_argument('--builder_type', default=fetch_build.PERF_BUILDER,
+ choices=[fetch_build.PERF_BUILDER,
+ fetch_build.FULL_BUILDER,
+ fetch_build.ANDROID_CHROME_PERF_BUILDER, ''],
+ help='Type of builder to get build from. This '
+ 'determines both the bot that builds and the '
+ 'place where archived builds are downloaded from. '
+ 'For local builds, an empty string can be passed.')
+
+ @staticmethod
+ def _AddDebugOptionsGroup(parser):
+ group = parser.add_argument_group('Debug options')
+ group.add_argument('--debug_ignore_build', action='store_true',
+ help='DEBUG: Don\'t perform builds.')
+ group.add_argument('--debug_ignore_sync', action='store_true',
+ help='DEBUG: Don\'t perform syncs.')
+ group.add_argument('--debug_ignore_perf_test', action='store_true',
+ help='DEBUG: Don\'t perform performance tests.')
+ group.add_argument('--debug_ignore_regression_confidence',
+ action='store_true',
+ help='DEBUG: Don\'t score the confidence of the initial '
+ 'good and bad revisions\' test results.')
+ group.add_argument('--debug_fake_first_test_mean', type=int, default='0',
+ help='DEBUG: When faking performance tests, return this '
+ 'value as the mean of the first performance test, '
+ 'and return a mean of 0.0 for further tests.')
+ return group
+
+ @classmethod
+ def _CreateCommandLineParser(cls):
+ """Creates a parser with bisect options.
+
+ Returns:
+ An instance of argparse.ArgumentParser.
+ """
+ usage = ('%(prog)s [options] [-- chromium-options]\n'
+ 'Perform binary search on revision history to find a minimal '
+ 'range of revisions where a performance metric regressed.\n')
+
+ parser = argparse.ArgumentParser(usage=usage)
+ cls._AddBisectOptionsGroup(parser)
+ cls._AddBuildOptionsGroup(parser)
+ cls._AddDebugOptionsGroup(parser)
+ return parser
+
+ def ParseCommandLine(self):
+ """Parses the command line for bisect options."""
+ parser = self._CreateCommandLineParser()
+ opts = parser.parse_args()
+
+ try:
+ if (not opts.metric and
+ opts.bisect_mode != bisect_utils.BISECT_MODE_RETURN_CODE):
+ raise RuntimeError('missing required parameter: --metric')
+
+ if opts.bisect_mode != bisect_utils.BISECT_MODE_RETURN_CODE:
+ metric_values = opts.metric.split('/')
+ if len(metric_values) != 2:
+ raise RuntimeError('Invalid metric specified: [%s]' % opts.metric)
+ opts.metric = metric_values
+
+ opts.truncate_percent = min(max(opts.truncate_percent, 0), 25) / 100.0
+
+ for k, v in opts.__dict__.iteritems():
+ assert hasattr(self, k), 'Invalid %s attribute in BisectOptions.' % k
+ setattr(self, k, v)
+ except RuntimeError, e:
+ output_string = StringIO.StringIO()
+ parser.print_help(file=output_string)
+ error_message = '%s\n\n%s' % (e.message, output_string.getvalue())
+ output_string.close()
+ raise RuntimeError(error_message)
+
+ @staticmethod
+ def FromDict(values):
+ """Creates an instance of BisectOptions from a dictionary.
+
+ Args:
+ values: a dict containing options to set.
+
+ Returns:
+ An instance of BisectOptions.
+ """
+ opts = BisectOptions()
+ for k, v in values.iteritems():
+ assert hasattr(opts, k), 'Invalid %s attribute in BisectOptions.' % k
+ setattr(opts, k, v)
+
+ if opts.metric and opts.bisect_mode != bisect_utils.BISECT_MODE_RETURN_CODE:
+ metric_values = opts.metric.split('/')
+ if len(metric_values) != 2:
+ raise RuntimeError('Invalid metric specified: [%s]' % opts.metric)
+ opts.metric = metric_values
+
+ if opts.target_arch == 'x64' and opts.target_build_type == 'Release':
+ opts.target_build_type = 'Release_x64'
+ opts.repeat_test_count = min(max(opts.repeat_test_count, 1), 100)
+ opts.max_time_minutes = min(max(opts.max_time_minutes, 1), 60)
+ opts.truncate_percent = min(max(opts.truncate_percent, 0), 25)
+ opts.truncate_percent = opts.truncate_percent / 100.0
+
+ return opts
+
+
+def _ConfigureLogging():
+ """Trivial logging config.
+
+ Configures logging to output any messages at or above INFO to standard out,
+ without any additional formatting.
+ """
+ logging_format = '%(message)s'
+ logging.basicConfig(
+ stream=logging.sys.stdout, level=logging.INFO, format=logging_format)
+
+
+def main():
+ _ConfigureLogging()
+ try:
+ opts = BisectOptions()
+ opts.ParseCommandLine()
+
+ if opts.bug_id:
+ if opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepStart('Checking Issue Tracker')
+ issue_closed = query_crbug.CheckIssueClosed(opts.bug_id)
+ if issue_closed:
+ print 'Aborting bisect because bug is closed'
+ else:
+ print 'Could not confirm bug is closed, proceeding.'
+ if opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepClosed()
+ if issue_closed:
+ results = BisectResults(abort_reason='the bug is closed.')
+ bisect_printer = BisectPrinter(opts)
+ bisect_printer.FormatAndPrintResults(results)
+ return 0
+
+ if opts.extra_src:
+ extra_src = bisect_utils.LoadExtraSrc(opts.extra_src)
+ if not extra_src:
+ raise RuntimeError('Invalid or missing --extra_src.')
+ bisect_utils.AddAdditionalDepotInfo(extra_src.GetAdditionalDepotInfo())
+
+ if opts.working_directory:
+ custom_deps = bisect_utils.DEFAULT_GCLIENT_CUSTOM_DEPS
+ if opts.no_custom_deps:
+ custom_deps = None
+ bisect_utils.CreateBisectDirectoryAndSetupDepot(opts, custom_deps)
+
+ os.chdir(os.path.join(os.getcwd(), 'src'))
+ RemoveBuildFiles(opts.target_build_type)
+
+ if not _IsPlatformSupported():
+ raise RuntimeError('Sorry, this platform isn\'t supported yet.')
+
+ if not source_control.IsInGitRepository():
+ raise RuntimeError(
+ 'Sorry, only the git workflow is supported at the moment.')
+
+ # gClient sync seems to fail if you're not in master branch.
+ if (not source_control.IsInProperBranch() and
+ not opts.debug_ignore_sync and
+ not opts.working_directory):
+ raise RuntimeError('You must switch to master branch to run bisection.')
+ bisect_test = BisectPerformanceMetrics(opts, os.getcwd())
+ try:
+ results = bisect_test.Run(opts.command, opts.bad_revision,
+ opts.good_revision, opts.metric)
+ if results.error:
+ raise RuntimeError(results.error)
+ bisect_test.printer.FormatAndPrintResults(results)
+ bisect_test.PostBisectResults(results)
+ return 0
+ finally:
+ bisect_test.PerformCleanup()
+ except RuntimeError as e:
+ if opts.output_buildbot_annotations:
+ # The perf dashboard scrapes the "results" step in order to comment on
+ # bugs. If you change this, please update the perf dashboard as well.
+ bisect_utils.OutputAnnotationStepStart('Results')
+ print 'Runtime Error: %s' % e
+ if opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepClosed()
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/auto_bisect/bisect_perf_regression_test.py b/chromium/tools/auto_bisect/bisect_perf_regression_test.py
new file mode 100644
index 00000000000..30c9b24fd9a
--- /dev/null
+++ b/chromium/tools/auto_bisect/bisect_perf_regression_test.py
@@ -0,0 +1,759 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import re
+import shutil
+import sys
+import urlparse
+import unittest
+
+SRC = os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)
+sys.path.append(os.path.join(SRC, 'third_party', 'pymock'))
+
+import bisect_perf_regression
+import bisect_results
+import bisect_state
+import bisect_utils
+import fetch_build
+import mock
+import source_control
+
+
+# Regression confidence: 0%
+CLEAR_NON_REGRESSION = [
+ # Mean: 30.223 Std. Dev.: 11.383
+ [[16.886], [16.909], [16.99], [17.723], [17.952], [18.118], [19.028],
+ [19.552], [21.954], [38.573], [38.839], [38.965], [40.007], [40.572],
+ [41.491], [42.002], [42.33], [43.109], [43.238]],
+ # Mean: 34.76 Std. Dev.: 11.516
+ [[16.426], [17.347], [20.593], [21.177], [22.791], [27.843], [28.383],
+ [28.46], [29.143], [40.058], [40.303], [40.558], [41.918], [42.44],
+ [45.223], [46.494], [50.002], [50.625], [50.839]]
+]
+
+# Regression confidence: ~ 90%
+ALMOST_REGRESSION = [
+ # Mean: 30.042 Std. Dev.: 2.002
+ [[26.146], [28.04], [28.053], [28.074], [28.168], [28.209], [28.471],
+ [28.652], [28.664], [30.862], [30.973], [31.002], [31.897], [31.929],
+ [31.99], [32.214], [32.323], [32.452], [32.696]],
+ # Mean: 33.008 Std. Dev.: 4.265
+ [[34.963], [30.741], [39.677], [39.512], [34.314], [31.39], [34.361],
+ [25.2], [30.489], [29.434]]
+]
+
+# Regression confidence: ~ 98%
+BARELY_REGRESSION = [
+ # Mean: 28.828 Std. Dev.: 1.993
+ [[26.96], [27.605], [27.768], [27.829], [28.006], [28.206], [28.393],
+ [28.911], [28.933], [30.38], [30.462], [30.808], [31.74], [31.805],
+ [31.899], [32.077], [32.454], [32.597], [33.155]],
+ # Mean: 31.156 Std. Dev.: 1.980
+ [[28.729], [29.112], [29.258], [29.454], [29.789], [30.036], [30.098],
+ [30.174], [30.534], [32.285], [32.295], [32.552], [32.572], [32.967],
+ [33.165], [33.403], [33.588], [33.744], [34.147], [35.84]]
+]
+
+# Regression confidence: 99.5%
+CLEAR_REGRESSION = [
+ # Mean: 30.254 Std. Dev.: 2.987
+ [[26.494], [26.621], [26.701], [26.997], [26.997], [27.05], [27.37],
+ [27.488], [27.556], [31.846], [32.192], [32.21], [32.586], [32.596],
+ [32.618], [32.95], [32.979], [33.421], [33.457], [34.97]],
+ # Mean: 33.190 Std. Dev.: 2.972
+ [[29.547], [29.713], [29.835], [30.132], [30.132], [30.33], [30.406],
+ [30.592], [30.72], [34.486], [35.247], [35.253], [35.335], [35.378],
+ [35.934], [36.233], [36.41], [36.947], [37.982]]
+]
+
+# Regression confidence > 95%, taken from: crbug.com/434318
+# Specifically from Builder android_nexus10_perf_bisect Build #1198
+MULTIPLE_VALUES = [
+ [
+ [18.916, 22.371, 8.527, 5.877, 5.407, 9.476, 8.100, 5.334,
+ 4.507, 4.842, 8.485, 8.308, 27.490, 4.560, 4.804, 23.068, 17.577,
+ 17.346, 26.738, 60.330, 32.307, 5.468, 27.803, 27.373, 17.823,
+ 5.158, 27.439, 5.236, 11.413],
+ [18.999, 22.642, 8.158, 5.995, 5.495, 9.499, 8.092, 5.324,
+ 4.468, 4.788, 8.248, 7.853, 27.533, 4.410, 4.622, 22.341, 22.313,
+ 17.072, 26.731, 57.513, 33.001, 5.500, 28.297, 27.277, 26.462,
+ 5.009, 27.361, 5.130, 10.955]
+ ],
+ [
+ [18.238, 22.365, 8.555, 5.939, 5.437, 9.463, 7.047, 5.345, 4.517,
+ 4.796, 8.593, 7.901, 27.499, 4.378, 5.040, 4.904, 4.816, 4.828,
+ 4.853, 57.363, 34.184, 5.482, 28.190, 27.290, 26.694, 5.099,
+ 4.905, 5.290, 4.813],
+ [18.301, 22.522, 8.035, 6.021, 5.565, 9.037, 6.998, 5.321, 4.485,
+ 4.768, 8.397, 7.865, 27.636, 4.640, 5.015, 4.962, 4.933, 4.977,
+ 4.961, 60.648, 34.593, 5.538, 28.454, 27.297, 26.490, 5.099, 5,
+ 5.247, 4.945],
+ [18.907, 23.368, 8.100, 6.169, 5.621, 9.971, 8.161, 5.331, 4.513,
+ 4.837, 8.255, 7.852, 26.209, 4.388, 5.045, 5.029, 5.032, 4.946,
+ 4.973, 60.334, 33.377, 5.499, 28.275, 27.550, 26.103, 5.108,
+ 4.951, 5.285, 4.910],
+ [18.715, 23.748, 8.128, 6.148, 5.691, 9.361, 8.106, 5.334, 4.528,
+ 4.965, 8.261, 7.851, 27.282, 4.391, 4.949, 4.981, 4.964, 4.935,
+ 4.933, 60.231, 33.361, 5.489, 28.106, 27.457, 26.648, 5.108,
+ 4.963, 5.272, 4.954]
+ ]
+]
+
+# Default options for the dry run
+DEFAULT_OPTIONS = {
+ 'debug_ignore_build': True,
+ 'debug_ignore_sync': True,
+ 'debug_ignore_perf_test': True,
+ 'debug_ignore_regression_confidence': True,
+ 'command': 'fake_command',
+ 'metric': 'fake/metric',
+ 'good_revision': 280000,
+ 'bad_revision': 280005,
+}
+
+# This global is a placeholder for a generator to be defined by the test cases
+# that use _MockRunTests.
+_MockResultsGenerator = (x for x in [])
+
+def _MakeMockRunTests(bisect_mode_is_return_code=False):
+ def _MockRunTests(*args, **kwargs): # pylint: disable=unused-argument
+ return _FakeTestResult(
+ _MockResultsGenerator.next(), bisect_mode_is_return_code)
+
+ return _MockRunTests
+
+
+def _FakeTestResult(values, bisect_mode_is_return_code):
+ mean = 0.0
+ if bisect_mode_is_return_code:
+ mean = 0 if (all(v == 0 for v in values)) else 1
+ result_dict = {'mean': mean, 'std_err': 0.0, 'std_dev': 0.0, 'values': values}
+ success_code = 0
+ return (result_dict, success_code)
+
+
+def _SampleBisecResult(opts):
+ revisions = [
+ 'ae7ef14ba2d9b5ef0d2c1c092ec98a417e44740d'
+ 'ab55ead638496b061c9de61685b982f7cea38ca7',
+ '89aa0c99e4b977b9a4f992ac14da0d6624f7316e']
+ state = bisect_state.BisectState(depot='chromium', revisions=revisions)
+ depot_registry = bisect_perf_regression.DepotDirectoryRegistry('/mock/src')
+ results = bisect_results.BisectResults(
+ bisect_state=state, depot_registry=depot_registry, opts=opts,
+ runtime_warnings=[])
+ results.confidence = 99.9
+ results.culprit_revisions = [(
+ 'ab55ead638496b061c9de61685b982f7cea38ca7',
+ {
+ 'date': 'Thu, 26 Jun 2014 14:29:49 +0000',
+ 'body': 'Fix',
+ 'author': 'author@chromium.org',
+ 'subject': 'Fix',
+ 'email': 'author@chromium.org',
+ },
+ 'chromium')]
+ return results
+
+
+def _GetMockCallArg(function_mock, call_index):
+ """Gets the list of called arguments for call at |call_index|.
+
+ Args:
+ function_mock: A Mock object.
+ call_index: The index at which the mocked function was called.
+
+ Returns:
+ The called argument list.
+ """
+ call_args_list = function_mock.call_args_list
+ if not call_args_list or len(call_args_list) <= call_index:
+ return None
+ args, _ = call_args_list[call_index]
+ return args
+
+
+def _GetBisectPerformanceMetricsInstance(options_dict):
+ """Returns an instance of the BisectPerformanceMetrics class."""
+ opts = bisect_perf_regression.BisectOptions.FromDict(options_dict)
+ return bisect_perf_regression.BisectPerformanceMetrics(opts, os.getcwd())
+
+
+def _GetExtendedOptions(improvement_dir, fake_first, ignore_confidence=True,
+ **extra_opts):
+ """Returns the a copy of the default options dict plus some options."""
+ result = dict(DEFAULT_OPTIONS)
+ result.update({
+ 'improvement_direction': improvement_dir,
+ 'debug_fake_first_test_mean': fake_first,
+ 'debug_ignore_regression_confidence': ignore_confidence
+ })
+ result.update(extra_opts)
+ return result
+
+
+def _GenericDryRun(options, print_results=False):
+ """Performs a dry run of the bisector.
+
+ Args:
+ options: Dictionary containing the options for the bisect instance.
+ print_results: Boolean telling whether to call FormatAndPrintResults.
+
+ Returns:
+ The results dictionary as returned by the bisect Run method.
+ """
+ _AbortIfThereAreStagedChanges()
+ # Disable rmtree to avoid deleting local trees.
+ old_rmtree = shutil.rmtree
+ shutil.rmtree = lambda path, on_error: None
+ # git reset HEAD may be run during the dry run, which removes staged changes.
+ try:
+ bisect_instance = _GetBisectPerformanceMetricsInstance(options)
+ results = bisect_instance.Run(
+ bisect_instance.opts.command, bisect_instance.opts.bad_revision,
+ bisect_instance.opts.good_revision, bisect_instance.opts.metric)
+
+ if print_results:
+ bisect_instance.printer.FormatAndPrintResults(results)
+
+ return results
+ finally:
+ shutil.rmtree = old_rmtree
+
+
+def _AbortIfThereAreStagedChanges():
+ """Exits the test prematurely if there are staged changes."""
+ # The output of "git status --short" will be an empty string if there are
+ # no staged changes in the current branch. Untracked files are ignored
+ # because when running the presubmit on the trybot there are sometimes
+ # untracked changes to the run-perf-test.cfg and bisect.cfg files.
+ status_output = bisect_utils.CheckRunGit(
+ ['status', '--short', '--untracked-files=no'])
+ if status_output:
+ print 'There are un-committed changes in the current branch.'
+ print 'Aborting the tests to avoid destroying local changes. Changes:'
+ print status_output
+ sys.exit(1)
+
+
+class BisectPerfRegressionTest(unittest.TestCase):
+ """Test case for other functions and classes in bisect-perf-regression.py."""
+
+ def setUp(self):
+ self.cwd = os.getcwd()
+ os.chdir(os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.path.pardir, os.path.pardir)))
+
+ def tearDown(self):
+ os.chdir(self.cwd)
+
+ def testBisectOptionsCanPrintHelp(self):
+ """Tests that the argument parser can be made and can print help."""
+ bisect_options = bisect_perf_regression.BisectOptions()
+ parser = bisect_options._CreateCommandLineParser()
+ parser.format_help()
+
+ def testParseDEPSStringManually(self):
+ """Tests DEPS parsing."""
+ deps_file_contents = """
+ vars = {
+ 'ffmpeg_hash':
+ '@ac4a9f31fe2610bd146857bbd55d7a260003a888',
+ 'webkit_url':
+ 'https://chromium.googlesource.com/chromium/blink.git',
+ 'git_url':
+ 'https://chromium.googlesource.com',
+ 'webkit_rev':
+ '@e01ac0a267d1017288bc67fa3c366b10469d8a24',
+ 'angle_revision':
+ '74697cf2064c0a2c0d7e1b1b28db439286766a05'
+ }"""
+
+ # Should only expect SVN/git revisions to come through, and URLs should be
+ # filtered out.
+ expected_vars_dict = {
+ 'ffmpeg_hash': '@ac4a9f31fe2610bd146857bbd55d7a260003a888',
+ 'webkit_rev': '@e01ac0a267d1017288bc67fa3c366b10469d8a24',
+ 'angle_revision': '74697cf2064c0a2c0d7e1b1b28db439286766a05'
+ }
+ # Testing private function.
+ # pylint: disable=W0212
+ vars_dict = bisect_perf_regression._ParseRevisionsFromDEPSFileManually(
+ deps_file_contents)
+ self.assertEqual(vars_dict, expected_vars_dict)
+
+ def _AssertParseResult(self, expected_values, result_string):
+ """Asserts some values are parsed from a RESULT line."""
+ results_template = ('RESULT other_chart: other_trace= 123 count\n'
+ 'RESULT my_chart: my_trace= %(value)s\n')
+ results = results_template % {'value': result_string}
+ metric = ['my_chart', 'my_trace']
+ # Testing private function.
+ # pylint: disable=W0212
+ values = bisect_perf_regression._TryParseResultValuesFromOutput(
+ metric, results)
+ self.assertEqual(expected_values, values)
+
+ def testTryParseResultValuesFromOutput_WithSingleValue(self):
+ """Tests result pattern <*>RESULT <graph>: <trace>= <value>"""
+ self._AssertParseResult([66.88], '66.88 kb')
+ self._AssertParseResult([66.88], '66.88 ')
+ self._AssertParseResult([-66.88], '-66.88 kb')
+ self._AssertParseResult([66], '66 kb')
+ self._AssertParseResult([0.66], '.66 kb')
+ self._AssertParseResult([], '. kb')
+ self._AssertParseResult([], 'aaa kb')
+
+ def testTryParseResultValuesFromOutput_WithMultiValue(self):
+ """Tests result pattern <*>RESULT <graph>: <trace>= [<value>,<value>, ..]"""
+ self._AssertParseResult([66.88], '[66.88] kb')
+ self._AssertParseResult([66.88, 99.44], '[66.88, 99.44]kb')
+ self._AssertParseResult([66.88, 99.44], '[ 66.88, 99.44 ]')
+ self._AssertParseResult([-66.88, 99.44], '[-66.88, 99.44] kb')
+ self._AssertParseResult([-66, 99], '[-66,99] kb')
+ self._AssertParseResult([-66, 99], '[-66,99,] kb')
+ self._AssertParseResult([-66, 0.99], '[-66,.99] kb')
+ self._AssertParseResult([], '[] kb')
+ self._AssertParseResult([], '[-66,abc] kb')
+
+ def testTryParseResultValuesFromOutputWithMeanStd(self):
+ """Tests result pattern <*>RESULT <graph>: <trace>= {<mean, std}"""
+ self._AssertParseResult([33.22], '{33.22, 3.6} kb')
+ self._AssertParseResult([33.22], '{33.22, 3.6} kb')
+ self._AssertParseResult([33.22], '{33.22,3.6}kb')
+ self._AssertParseResult([33.22], '{33.22,3.6} kb')
+ self._AssertParseResult([33.22], '{ 33.22,3.6 }kb')
+ self._AssertParseResult([-33.22], '{-33.22,3.6}kb')
+ self._AssertParseResult([22], '{22,6}kb')
+ self._AssertParseResult([.22], '{.22,6}kb')
+ self._AssertParseResult([], '{.22,6, 44}kb')
+ self._AssertParseResult([], '{}kb')
+ self._AssertParseResult([], '{XYZ}kb')
+
+ # This method doesn't reference self; it fails if an error is thrown.
+ # pylint: disable=R0201
+ def testDryRun(self):
+ """Does a dry run of the bisect script.
+
+ This serves as a smoke test to catch errors in the basic execution of the
+ script.
+ """
+ _GenericDryRun(DEFAULT_OPTIONS, True)
+
+ def testBisectImprovementDirectionFails(self):
+ """Dry run of a bisect with an improvement instead of regression."""
+ # Test result goes from 0 to 100 where higher is better
+ results = _GenericDryRun(_GetExtendedOptions(1, 100))
+ self.assertIsNotNone(results.error)
+ self.assertIn('not a regression', results.error)
+
+ # Test result goes from 0 to -100 where lower is better
+ results = _GenericDryRun(_GetExtendedOptions(-1, -100))
+ self.assertIsNotNone(results.error)
+ self.assertIn('not a regression', results.error)
+
+ def testBisectImprovementDirectionSucceeds(self):
+ """Bisects with improvement direction matching regression range."""
+ # Test result goes from 0 to 100 where lower is better
+ results = _GenericDryRun(_GetExtendedOptions(-1, 100))
+ self.assertIsNone(results.error)
+ # Test result goes from 0 to -100 where higher is better
+ results = _GenericDryRun(_GetExtendedOptions(1, -100))
+ self.assertIsNone(results.error)
+
+ @mock.patch('urllib2.urlopen')
+ def testBisectResultsPosted(self, mock_urlopen):
+ options_dict = dict(DEFAULT_OPTIONS)
+ options_dict.update({
+ 'bisect_mode': bisect_utils.BISECT_MODE_MEAN,
+ 'try_job_id': 1234,
+ })
+ opts = bisect_perf_regression.BisectOptions.FromDict(options_dict)
+ bisect_instance = _GetBisectPerformanceMetricsInstance(options_dict)
+ results = _SampleBisecResult(opts)
+ bisect_instance.PostBisectResults(results)
+
+ call_args = _GetMockCallArg(mock_urlopen, 0)
+ self.assertIsNotNone(call_args)
+ called_data = urlparse.parse_qs(call_args[1])
+ results_data = json.loads(called_data['data'][0])
+ self.assertEqual(1234, results_data['try_job_id'])
+
+ def _CheckAbortsEarly(self, results, **extra_opts):
+ """Returns True if the bisect job would abort early."""
+ global _MockResultsGenerator
+ _MockResultsGenerator = (r for r in results)
+ bisect_class = bisect_perf_regression.BisectPerformanceMetrics
+ original_run_tests = bisect_class.RunPerformanceTestAndParseResults
+ bisect_class.RunPerformanceTestAndParseResults = _MakeMockRunTests()
+
+ try:
+ dry_run_results = _GenericDryRun(_GetExtendedOptions(
+ improvement_dir=0, fake_first=0, ignore_confidence=False,
+ **extra_opts))
+ except StopIteration:
+ # If StopIteration was raised, that means that the next value after
+ # the first two values was requested, so the job was not aborted.
+ return False
+ finally:
+ bisect_class.RunPerformanceTestAndParseResults = original_run_tests
+
+ # If the job was aborted, there should be a warning about it.
+ self.assertTrue(
+ any('did not clearly reproduce a regression' in w
+ for w in dry_run_results.warnings))
+ return True
+
+ def testBisectAbortedOnClearNonRegression(self):
+ self.assertTrue(self._CheckAbortsEarly(CLEAR_NON_REGRESSION))
+
+ def testBisectNotAborted_AlmostRegression(self):
+ self.assertFalse(self._CheckAbortsEarly(ALMOST_REGRESSION))
+
+ def testBisectNotAborted_ClearRegression(self):
+ self.assertFalse(self._CheckAbortsEarly(CLEAR_REGRESSION))
+
+ def testBisectNotAborted_BarelyRegression(self):
+ self.assertFalse(self._CheckAbortsEarly(BARELY_REGRESSION))
+
+ def testBisectNotAborted_MultipleValues(self):
+ self.assertFalse(self._CheckAbortsEarly(MULTIPLE_VALUES))
+
+ def testBisectNotAbortedWhenRequiredConfidenceIsZero(self):
+ self.assertFalse(self._CheckAbortsEarly(
+ CLEAR_NON_REGRESSION, required_initial_confidence=0))
+
+ def _CheckAbortsEarlyForReturnCode(self, results):
+ """Returns True if the bisect job would abort early in return code mode."""
+ global _MockResultsGenerator
+ _MockResultsGenerator = (r for r in results)
+ bisect_class = bisect_perf_regression.BisectPerformanceMetrics
+ original_run_tests = bisect_class.RunPerformanceTestAndParseResults
+ bisect_class.RunPerformanceTestAndParseResults = _MakeMockRunTests(True)
+ options = dict(DEFAULT_OPTIONS)
+ options.update({'bisect_mode': 'return_code'})
+ try:
+ dry_run_results = _GenericDryRun(options)
+ except StopIteration:
+ # If StopIteration was raised, that means that the next value after
+ # the first two values was requested, so the job was not aborted.
+ return False
+ finally:
+ bisect_class.RunPerformanceTestAndParseResults = original_run_tests
+
+ # If the job was aborted, there should be a warning about it.
+ if ('known good and known bad revisions returned same' in
+ dry_run_results.abort_reason):
+ return True
+ return False
+
+ def testBisectAbortOn_SameReturnCode(self):
+ self.assertTrue(self._CheckAbortsEarlyForReturnCode([[0,0,0], [0,0,0]]))
+
+ def testBisectNotAbortedOn_DifferentReturnCode(self):
+ self.assertFalse(self._CheckAbortsEarlyForReturnCode([[1,1,1], [0,0,0]]))
+
+ def testGetCommitPosition(self):
+ cp_git_rev = '7017a81991de983e12ab50dfc071c70e06979531'
+ self.assertEqual(291765, source_control.GetCommitPosition(cp_git_rev))
+
+ svn_git_rev = 'e6db23a037cad47299a94b155b95eebd1ee61a58'
+ self.assertEqual(291467, source_control.GetCommitPosition(svn_git_rev))
+
+ def testGetCommitPositionForV8(self):
+ bisect_instance = _GetBisectPerformanceMetricsInstance(DEFAULT_OPTIONS)
+ v8_rev = '21d700eedcdd6570eff22ece724b63a5eefe78cb'
+ depot_path = os.path.join(bisect_instance.src_cwd, 'v8')
+ self.assertEqual(
+ 23634, source_control.GetCommitPosition(v8_rev, depot_path))
+
+ def testGetCommitPositionForSkia(self):
+ bisect_instance = _GetBisectPerformanceMetricsInstance(DEFAULT_OPTIONS)
+ skia_rev = 'a94d028eCheckAbortsEarly0f2c77f159b3dac95eb90c3b4cf48c61'
+ depot_path = os.path.join(bisect_instance.src_cwd, 'third_party', 'skia')
+ # Skia doesn't use commit positions, and GetCommitPosition should
+ # return None for repos that don't use commit positions.
+ self.assertIsNone(source_control.GetCommitPosition(skia_rev, depot_path))
+
+ def testUpdateDepsContent(self):
+ bisect_instance = _GetBisectPerformanceMetricsInstance(DEFAULT_OPTIONS)
+ deps_file = 'DEPS'
+ # We are intentionally reading DEPS file contents instead of string literal
+ # with few lines from DEPS because to check if the format we are expecting
+ # to search is not changed in DEPS content.
+ # TODO (prasadv): Add a separate test to validate the DEPS contents with the
+ # format that bisect script expects.
+ deps_contents = bisect_perf_regression.ReadStringFromFile(deps_file)
+ deps_key = 'v8_revision'
+ depot = 'v8'
+ git_revision = 'a12345789a23456789a123456789a123456789'
+ updated_content = bisect_instance.UpdateDepsContents(
+ deps_contents, depot, git_revision, deps_key)
+ self.assertIsNotNone(updated_content)
+ ss = re.compile('["\']%s["\']: ["\']%s["\']' % (deps_key, git_revision))
+ self.assertIsNotNone(re.search(ss, updated_content))
+
+ @mock.patch('bisect_utils.RunGClient')
+ def testSyncToRevisionForChromium(self, mock_RunGClient):
+ bisect_instance = _GetBisectPerformanceMetricsInstance(DEFAULT_OPTIONS)
+ mock_RunGClient.return_value = 0
+ bisect_instance._SyncRevision(
+ 'chromium', 'e6db23a037cad47299a94b155b95eebd1ee61a58', 'gclient')
+ expected_params = [
+ 'sync',
+ '--verbose',
+ '--nohooks',
+ '--force',
+ '--delete_unversioned_trees',
+ '--revision',
+ 'src@e6db23a037cad47299a94b155b95eebd1ee61a58',
+ ]
+
+ mock_RunGClient.assert_called_with(expected_params, cwd=None)
+
+ @mock.patch('bisect_utils.RunGit')
+ def testSyncToRevisionForWebKit(self, mock_RunGit):
+ bisect_instance = _GetBisectPerformanceMetricsInstance(DEFAULT_OPTIONS)
+ mock_RunGit.return_value = None, None
+ bisect_instance._SyncRevision(
+ 'webkit', 'a94d028e0f2c77f159b3dac95eb90c3b4cf48c61', None)
+ expected_params = ['checkout', 'a94d028e0f2c77f159b3dac95eb90c3b4cf48c61']
+ mock_RunGit.assert_called_with(expected_params)
+
+ def testTryJobSvnRepo_PerfBuilderType_ReturnsRepoUrl(self):
+ self.assertEqual(
+ bisect_perf_regression.PERF_SVN_REPO_URL,
+ bisect_perf_regression._TryJobSvnRepo(fetch_build.PERF_BUILDER))
+
+ def testTryJobSvnRepo_FullBuilderType_ReturnsRepoUrl(self):
+ self.assertEqual(
+ bisect_perf_regression.FULL_SVN_REPO_URL,
+ bisect_perf_regression._TryJobSvnRepo(fetch_build.FULL_BUILDER))
+
+ def testTryJobSvnRepo_WithUnknownBuilderType_ThrowsError(self):
+ with self.assertRaises(NotImplementedError):
+ bisect_perf_regression._TryJobSvnRepo('foo')
+
+ def _CheckIsDownloadable(self, depot, target_platform='chromium',
+ builder_type='perf'):
+ opts = dict(DEFAULT_OPTIONS)
+ opts.update({'target_platform': target_platform,
+ 'builder_type': builder_type})
+ bisect_instance = _GetBisectPerformanceMetricsInstance(opts)
+ return bisect_instance.IsDownloadable(depot)
+
+ def testIsDownloadable_ChromiumDepot_ReturnsTrue(self):
+ self.assertTrue(self._CheckIsDownloadable(depot='chromium'))
+
+ def testIsDownloadable_DEPSDepot_ReturnsTrue(self):
+ self.assertTrue(self._CheckIsDownloadable(depot='v8'))
+
+ def testIsDownloadable_AndroidChromeDepot_ReturnsTrue(self):
+ self.assertTrue(self._CheckIsDownloadable(
+ depot='android-chrome', target_platform='android-chrome'))
+
+ def testIsDownloadable_AndroidChromeWithDEPSChromium_ReturnsFalse(self):
+ self.assertFalse(self._CheckIsDownloadable(
+ depot='chromium', target_platform='android-chrome'))
+
+ def testIsDownloadable_AndroidChromeWithDEPSV8_ReturnsFalse(self):
+ self.assertFalse(self._CheckIsDownloadable(
+ depot='v8', target_platform='android-chrome'))
+
+ def testIsDownloadable_NoBuilderType_ReturnsFalse(self):
+ self.assertFalse(
+ self._CheckIsDownloadable(depot='chromium', builder_type=''))
+
+
+class DepotDirectoryRegistryTest(unittest.TestCase):
+
+ def setUp(self):
+ self.old_chdir = os.chdir
+ os.chdir = self.mockChdir
+ self.old_depot_names = bisect_utils.DEPOT_NAMES
+ bisect_utils.DEPOT_NAMES = ['mock_depot']
+ self.old_depot_deps_name = bisect_utils.DEPOT_DEPS_NAME
+ bisect_utils.DEPOT_DEPS_NAME = {'mock_depot': {'src': 'src/foo'}}
+
+ self.registry = bisect_perf_regression.DepotDirectoryRegistry('/mock/src')
+ self.cur_dir = None
+
+ def tearDown(self):
+ os.chdir = self.old_chdir
+ bisect_utils.DEPOT_NAMES = self.old_depot_names
+ bisect_utils.DEPOT_DEPS_NAME = self.old_depot_deps_name
+
+ def mockChdir(self, new_dir):
+ self.cur_dir = new_dir
+
+ def testReturnsCorrectResultForChrome(self):
+ self.assertEqual(self.registry.GetDepotDir('chromium'), '/mock/src')
+
+ def testUsesDepotSpecToInitializeRegistry(self):
+ self.assertEqual(self.registry.GetDepotDir('mock_depot'), '/mock/src/foo')
+
+ def testChangedTheDirectory(self):
+ self.registry.ChangeToDepotDir('mock_depot')
+ self.assertEqual(self.cur_dir, '/mock/src/foo')
+
+
+# The tests below test private functions (W0212).
+# pylint: disable=W0212
+class GitTryJobTestCases(unittest.TestCase):
+
+ """Test case for bisect try job."""
+ def setUp(self):
+ bisect_utils_patcher = mock.patch('bisect_perf_regression.bisect_utils')
+ self.mock_bisect_utils = bisect_utils_patcher.start()
+ self.addCleanup(bisect_utils_patcher.stop)
+
+ def _SetupRunGitMock(self, git_cmds):
+ """Setup RunGit mock with expected output for given git command."""
+ def side_effect(git_cmd_args):
+ for val in git_cmds:
+ if set(val[0]) == set(git_cmd_args):
+ return val[1]
+ self.mock_bisect_utils.RunGit = mock.Mock(side_effect=side_effect)
+
+ def _AssertRunGitExceptions(self, git_cmds, func, *args):
+ """Setup RunGit mock and tests RunGitException.
+
+ Args:
+ git_cmds: List of tuples with git command and expected output.
+ func: Callback function to be executed.
+ args: List of arguments to be passed to the function.
+ """
+ self._SetupRunGitMock(git_cmds)
+ self.assertRaises(bisect_perf_regression.RunGitError,
+ func,
+ *args)
+
+ def testNotGitRepo(self):
+ new_branch = bisect_perf_regression.BISECT_TRYJOB_BRANCH
+ parent_branch = bisect_perf_regression.BISECT_MASTER_BRANCH
+ cmds = [(['rev-parse', '--abbrev-ref', 'HEAD'], (None, 128))]
+ self._AssertRunGitExceptions(cmds,
+ bisect_perf_regression._PrepareBisectBranch,
+ parent_branch, new_branch)
+
+ def testFailedCheckoutMaster(self):
+ new_branch = bisect_perf_regression.BISECT_TRYJOB_BRANCH
+ parent_branch = bisect_perf_regression.BISECT_MASTER_BRANCH
+ cmds = [
+ (['rev-parse', '--abbrev-ref', 'HEAD'], (new_branch, 0)),
+ (['checkout', '-f', parent_branch], ('Checkout Failed', 1)),
+ ]
+ self._AssertRunGitExceptions(cmds,
+ bisect_perf_regression._PrepareBisectBranch,
+ parent_branch, new_branch)
+
+ def testDeleteBisectBranchIfExists(self):
+ new_branch = bisect_perf_regression.BISECT_TRYJOB_BRANCH
+ parent_branch = bisect_perf_regression.BISECT_MASTER_BRANCH
+ cmds = [
+ (['rev-parse', '--abbrev-ref', 'HEAD'], (parent_branch, 0)),
+ (['branch', '--list'], ('bisect-tryjob\n*master\nsomebranch', 0)),
+ (['branch', '-D', new_branch], ('Failed to delete branch', 128)),
+ ]
+ self._AssertRunGitExceptions(cmds,
+ bisect_perf_regression._PrepareBisectBranch,
+ parent_branch, new_branch)
+
+ def testCreatNewBranchFails(self):
+ new_branch = bisect_perf_regression.BISECT_TRYJOB_BRANCH
+ parent_branch = bisect_perf_regression.BISECT_MASTER_BRANCH
+ cmds = [
+ (['rev-parse', '--abbrev-ref', 'HEAD'], (parent_branch, 0)),
+ (['branch', '--list'], ('bisect-tryjob\n*master\nsomebranch', 0)),
+ (['branch', '-D', new_branch], ('None', 0)),
+ (['update-index', '--refresh', '-q'], (None, 0)),
+ (['diff-index', 'HEAD'], (None, 0)),
+ (['checkout', '-b', new_branch], ('Failed to create branch', 128)),
+ ]
+ self._AssertRunGitExceptions(cmds,
+ bisect_perf_regression._PrepareBisectBranch,
+ parent_branch, new_branch)
+
+ def testSetUpstreamToFails(self):
+ new_branch = bisect_perf_regression.BISECT_TRYJOB_BRANCH
+ parent_branch = bisect_perf_regression.BISECT_MASTER_BRANCH
+ cmds = [
+ (['rev-parse', '--abbrev-ref', 'HEAD'], (parent_branch, 0)),
+ (['branch', '--list'], ('bisect-tryjob\n*master\nsomebranch', 0)),
+ (['branch', '-D', new_branch], ('None', 0)),
+ (['update-index', '--refresh', '-q'], (None, 0)),
+ (['diff-index', 'HEAD'], (None, 0)),
+ (['checkout', '-b', new_branch], ('None', 0)),
+ (['branch', '--set-upstream-to', parent_branch],
+ ('Setuptream fails', 1)),
+ ]
+ self._AssertRunGitExceptions(cmds,
+ bisect_perf_regression._PrepareBisectBranch,
+ parent_branch, new_branch)
+
+ def testStartBuilderTryJobForException(self):
+ git_revision = 'ac4a9f31fe2610bd146857bbd55d7a260003a888'
+ bot_name = 'linux_perf_bisect_builder'
+ bisect_job_name = 'testBisectJobname'
+ patch = None
+ patch_content = '/dev/null'
+ new_branch = bisect_perf_regression.BISECT_TRYJOB_BRANCH
+ parent_branch = bisect_perf_regression.BISECT_MASTER_BRANCH
+ try_cmd = [
+ (['rev-parse', '--abbrev-ref', 'HEAD'], (parent_branch, 0)),
+ (['branch', '--list'], ('bisect-tryjob\n*master\nsomebranch', 0)),
+ (['branch', '-D', new_branch], ('None', 0)),
+ (['update-index', '--refresh', '-q'], (None, 0)),
+ (['diff-index', 'HEAD'], (None, 0)),
+ (['checkout', '-b', new_branch], ('None', 0)),
+ (['branch', '--set-upstream-to', parent_branch],
+ ('Setuptream fails', 0)),
+ (['try',
+ '--bot=%s' % bot_name,
+ '--revision=%s' % git_revision,
+ '--name=%s' % bisect_job_name,
+ '--svn_repo=%s' % bisect_perf_regression.PERF_SVN_REPO_URL,
+ '--diff=%s' % patch_content],
+ (None, 1)),
+ ]
+ self._AssertRunGitExceptions(
+ try_cmd, bisect_perf_regression._StartBuilderTryJob,
+ fetch_build.PERF_BUILDER, git_revision, bot_name, bisect_job_name,
+ patch)
+
+ def testBuilderTryJob(self):
+ git_revision = 'ac4a9f31fe2610bd146857bbd55d7a260003a888'
+ bot_name = 'linux_perf_bisect_builder'
+ bisect_job_name = 'testBisectJobname'
+ patch = None
+ patch_content = '/dev/null'
+ new_branch = bisect_perf_regression.BISECT_TRYJOB_BRANCH
+ parent_branch = bisect_perf_regression.BISECT_MASTER_BRANCH
+ try_cmd = [
+ (['rev-parse', '--abbrev-ref', 'HEAD'], (parent_branch, 0)),
+ (['branch', '--list'], ('bisect-tryjob\n*master\nsomebranch', 0)),
+ (['branch', '-D', new_branch], ('None', 0)),
+ (['update-index', '--refresh', '-q'], (None, 0)),
+ (['diff-index', 'HEAD'], (None, 0)),
+ (['checkout', '-b', new_branch], ('None', 0)),
+ (['branch', '--set-upstream-to', parent_branch],
+ ('Setuptream fails', 0)),
+ (['try',
+ '--bot=%s' % bot_name,
+ '--revision=%s' % git_revision,
+ '--name=%s' % bisect_job_name,
+ '--svn_repo=%s' % bisect_perf_regression.PERF_SVN_REPO_URL,
+ '--diff=%s' % patch_content],
+ (None, 0)),
+ ]
+ self._SetupRunGitMock(try_cmd)
+ bisect_perf_regression._StartBuilderTryJob(
+ fetch_build.PERF_BUILDER, git_revision, bot_name, bisect_job_name,
+ patch)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/auto_bisect/bisect_printer.py b/chromium/tools/auto_bisect/bisect_printer.py
new file mode 100644
index 00000000000..9b92320b8e0
--- /dev/null
+++ b/chromium/tools/auto_bisect/bisect_printer.py
@@ -0,0 +1,342 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This file contains printing-related functionality of the bisect."""
+
+import datetime
+import re
+
+from bisect_results import BisectResults
+import bisect_utils
+import source_control
+
+
+# The perf dashboard looks for a string like "Estimated Confidence: 95%"
+# to decide whether or not to cc the author(s). If you change this, please
+# update the perf dashboard as well.
+RESULTS_BANNER = """
+===== BISECT JOB RESULTS =====
+Status: %(status)s
+
+Test Command: %(command)s
+Test Metric: %(metric)s
+Relative Change: %(change)s
+Estimated Confidence: %(confidence).02f%%
+Retested CL with revert: %(retest)s"""
+
+# When the bisect was aborted without a bisect failure the following template
+# is used.
+ABORT_REASON_TEMPLATE = """
+===== BISECTION ABORTED =====
+The bisect was aborted because %(abort_reason)s
+Please contact the the team (see below) if you believe this is in error.
+
+Bug ID: %(bug_id)s
+
+Test Command: %(command)s
+Test Metric: %(metric)s
+Good revision: %(good_revision)s
+Bad revision: %(bad_revision)s """
+
+# The perf dashboard specifically looks for the string
+# "Author : " to parse out who to cc on a bug. If you change the
+# formatting here, please update the perf dashboard as well.
+RESULTS_REVISION_INFO = """
+===== SUSPECTED CL(s) =====
+Subject : %(subject)s
+Author : %(author)s%(commit_info)s
+Commit : %(cl)s
+Date : %(cl_date)s"""
+
+RESULTS_THANKYOU = """
+| O O | Visit http://www.chromium.org/developers/speed-infra/perf-bug-faq
+| X | for more information addressing perf regression bugs. For feedback,
+| / \\ | file a bug with label Cr-Tests-AutoBisect. Thank you!"""
+
+
+class BisectPrinter(object):
+
+ def __init__(self, opts, depot_registry=None):
+ self.opts = opts
+ self.depot_registry = depot_registry
+
+ def FormatAndPrintResults(self, bisect_results):
+ """Prints the results from a bisection run in a readable format.
+
+ Also prints annotations creating buildbot step "Results".
+
+ Args:
+ bisect_results: BisectResult object containing results to be printed.
+ """
+ if bisect_results.abort_reason:
+ self._PrintAbortResults(bisect_results.abort_reason)
+ return
+
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepStart('Build Status Per Revision')
+
+ print
+ print 'Full results of bisection:'
+ for revision_state in bisect_results.state.GetRevisionStates():
+ build_status = revision_state.passed
+
+ if type(build_status) is bool:
+ if build_status:
+ build_status = 'Good'
+ else:
+ build_status = 'Bad'
+
+ print ' %20s %40s %s' % (revision_state.depot,
+ revision_state.revision,
+ build_status)
+ print
+
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepClosed()
+ # The perf dashboard scrapes the "results" step in order to comment on
+ # bugs. If you change this, please update the perf dashboard as well.
+ bisect_utils.OutputAnnotationStepStart('Results')
+
+ self._PrintBanner(bisect_results)
+ self._PrintWarnings(bisect_results.warnings)
+
+ if bisect_results.culprit_revisions and bisect_results.confidence:
+ for culprit in bisect_results.culprit_revisions:
+ cl, info, depot = culprit
+ self._PrintRevisionInfo(cl, info, depot)
+ self._PrintRetestResults(bisect_results)
+ self._PrintTestedCommitsTable(bisect_results.state.GetRevisionStates(),
+ bisect_results.first_working_revision,
+ bisect_results.last_broken_revision,
+ bisect_results.confidence,
+ final_step=True)
+ self._PrintStepTime(bisect_results.state.GetRevisionStates())
+ self._PrintThankYou()
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepClosed()
+
+ def PrintPartialResults(self, bisect_state):
+ revision_states = bisect_state.GetRevisionStates()
+ first_working_rev, last_broken_rev = BisectResults.FindBreakingRevRange(
+ revision_states)
+ self._PrintTestedCommitsTable(revision_states, first_working_rev,
+ last_broken_rev, 100, final_step=False)
+
+ def _PrintAbortResults(self, abort_reason):
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepStart('Results')
+
+ # Metric string in config is not split in case of return code mode.
+ if (self.opts.metric and
+ self.opts.bisect_mode != bisect_utils.BISECT_MODE_RETURN_CODE):
+ metric = '/'.join(self.opts.metric)
+ else:
+ metric = self.opts.metric
+
+ print ABORT_REASON_TEMPLATE % {
+ 'abort_reason': abort_reason,
+ 'bug_id': self.opts.bug_id or 'NOT SPECIFIED',
+ 'command': self.opts.command,
+ 'metric': metric,
+ 'good_revision': self.opts.good_revision,
+ 'bad_revision': self.opts.bad_revision,
+ }
+ self._PrintThankYou()
+ if self.opts.output_buildbot_annotations:
+ bisect_utils.OutputAnnotationStepClosed()
+
+ @staticmethod
+ def _PrintThankYou():
+ print RESULTS_THANKYOU
+
+ @staticmethod
+ def _PrintStepTime(revision_states):
+ """Prints information about how long various steps took.
+
+ Args:
+ revision_states: Ordered list of revision states."""
+ step_perf_time_avg = 0.0
+ step_build_time_avg = 0.0
+ step_count = 0.0
+ for revision_state in revision_states:
+ if revision_state.value:
+ step_perf_time_avg += revision_state.perf_time
+ step_build_time_avg += revision_state.build_time
+ step_count += 1
+ if step_count:
+ step_perf_time_avg = step_perf_time_avg / step_count
+ step_build_time_avg = step_build_time_avg / step_count
+ print
+ print 'Average build time : %s' % datetime.timedelta(
+ seconds=int(step_build_time_avg))
+ print 'Average test time : %s' % datetime.timedelta(
+ seconds=int(step_perf_time_avg))
+
+ @staticmethod
+ def _GetViewVCLinkFromDepotAndHash(git_revision, depot):
+ """Gets link to the repository browser."""
+ if depot and 'viewvc' in bisect_utils.DEPOT_DEPS_NAME[depot]:
+ return bisect_utils.DEPOT_DEPS_NAME[depot]['viewvc'] + git_revision
+ return ''
+
+ def _PrintRevisionInfo(self, cl, info, depot=None):
+ commit_link = self._GetViewVCLinkFromDepotAndHash(cl, depot)
+ if commit_link:
+ commit_link = '\nLink : %s' % commit_link
+ else:
+ commit_link = ('\Description:\n%s' % info['body'])
+ print RESULTS_REVISION_INFO % {
+ 'subject': info['subject'],
+ 'author': info['email'],
+ 'commit_info': commit_link,
+ 'cl': cl,
+ 'cl_date': info['date']
+ }
+
+ @staticmethod
+ def _PrintTableRow(column_widths, row_data):
+ """Prints out a row in a formatted table that has columns aligned.
+
+ Args:
+ column_widths: A list of column width numbers.
+ row_data: A list of items for each column in this row.
+ """
+ assert len(column_widths) == len(row_data)
+ text = ''
+ for i in xrange(len(column_widths)):
+ current_row_data = row_data[i].center(column_widths[i], ' ')
+ text += ('%%%ds' % column_widths[i]) % current_row_data
+ print text
+
+ def _PrintTestedCommitsHeader(self):
+ if self.opts.bisect_mode == bisect_utils.BISECT_MODE_MEAN:
+ self._PrintTableRow(
+ [20, 12, 70, 14, 12, 13],
+ ['Depot', 'Position', 'SHA', 'Mean', 'Std. Error', 'State'])
+ elif self.opts.bisect_mode == bisect_utils.BISECT_MODE_STD_DEV:
+ self._PrintTableRow(
+ [20, 12, 70, 14, 12, 13],
+ ['Depot', 'Position', 'SHA', 'Std. Error', 'Mean', 'State'])
+ elif self.opts.bisect_mode == bisect_utils.BISECT_MODE_RETURN_CODE:
+ self._PrintTableRow(
+ [20, 12, 70, 14, 13],
+ ['Depot', 'Position', 'SHA', 'Return Code', 'State'])
+ else:
+ assert False, 'Invalid bisect_mode specified.'
+
+ def _PrintTestedCommitsEntry(self, revision_state, commit_position, cl_link,
+ state_str):
+ if self.opts.bisect_mode == bisect_utils.BISECT_MODE_MEAN:
+ std_error = '+-%.02f' % revision_state.value['std_err']
+ mean = '%.02f' % revision_state.value['mean']
+ self._PrintTableRow(
+ [20, 12, 70, 12, 14, 13],
+ [revision_state.depot, commit_position, cl_link, mean, std_error,
+ state_str])
+ elif self.opts.bisect_mode == bisect_utils.BISECT_MODE_STD_DEV:
+ std_error = '+-%.02f' % revision_state.value['std_err']
+ mean = '%.02f' % revision_state.value['mean']
+ self._PrintTableRow(
+ [20, 12, 70, 12, 14, 13],
+ [revision_state.depot, commit_position, cl_link, std_error, mean,
+ state_str])
+ elif self.opts.bisect_mode == bisect_utils.BISECT_MODE_RETURN_CODE:
+ mean = '%d' % revision_state.value['mean']
+ self._PrintTableRow(
+ [20, 12, 70, 14, 13],
+ [revision_state.depot, commit_position, cl_link, mean,
+ state_str])
+
+ def _PrintTestedCommitsTable(
+ self, revision_states, first_working_revision, last_broken_revision,
+ confidence, final_step=True):
+ print
+ if final_step:
+ print '===== TESTED COMMITS ====='
+ else:
+ print '===== PARTIAL RESULTS ====='
+ self._PrintTestedCommitsHeader()
+ state = 0
+ for revision_state in revision_states:
+ if revision_state.value:
+ if (revision_state == last_broken_revision or
+ revision_state == first_working_revision):
+ # If confidence is too low, don't add this empty line since it's
+ # used to put focus on a suspected CL.
+ if confidence and final_step:
+ print
+ state += 1
+ if state == 2 and not final_step:
+ # Just want a separation between "bad" and "good" cl's.
+ print
+
+ state_str = 'Bad'
+ if state == 1 and final_step:
+ state_str = 'Suspected CL'
+ elif state == 2:
+ state_str = 'Good'
+
+ # If confidence is too low, don't bother outputting good/bad.
+ if not confidence:
+ state_str = ''
+ state_str = state_str.center(13, ' ')
+ commit_position = source_control.GetCommitPosition(
+ revision_state.revision,
+ self.depot_registry.GetDepotDir(revision_state.depot))
+ display_commit_pos = ''
+ if commit_position:
+ display_commit_pos = str(commit_position)
+ self._PrintTestedCommitsEntry(revision_state,
+ display_commit_pos,
+ revision_state.revision,
+ state_str)
+
+ def _PrintRetestResults(self, bisect_results):
+ if (not bisect_results.retest_results_tot or
+ not bisect_results.retest_results_reverted):
+ return
+ print
+ print '===== RETEST RESULTS ====='
+ self._PrintTestedCommitsEntry(
+ bisect_results.retest_results_tot, '', '', '')
+ self._PrintTestedCommitsEntry(
+ bisect_results.retest_results_reverted, '', '', '')
+
+ def _PrintBanner(self, bisect_results):
+ if self.opts.bisect_mode == bisect_utils.BISECT_MODE_RETURN_CODE:
+ metric = 'N/A'
+ change = 'Yes'
+ else:
+ metric = '/'.join(self.opts.metric)
+ change = '%.02f%% (+/-%.02f%%)' % (
+ bisect_results.regression_size, bisect_results.regression_std_err)
+ if not bisect_results.culprit_revisions:
+ change = 'No significant change reproduced.'
+
+ print RESULTS_BANNER % {
+ 'status': self._StatusMessage(bisect_results),
+ 'command': self.opts.command,
+ 'metric': metric,
+ 'change': change,
+ 'confidence': bisect_results.confidence,
+ 'retest': 'Yes' if bisect_results.retest_results_tot else 'No',
+ }
+
+ @staticmethod
+ def _StatusMessage(bisect_results):
+ if bisect_results.confidence >= bisect_utils.HIGH_CONFIDENCE:
+ return 'Positive: Reproduced a change.'
+ elif bisect_results.culprit_revisions:
+ return 'Negative: Found possible suspect(s), but with low confidence.'
+ return 'Negative: Did not reproduce a change.'
+
+ @staticmethod
+ def _PrintWarnings(warnings):
+ """Prints a list of warning strings if there are any."""
+ if not warnings:
+ return
+ print
+ print 'WARNINGS:'
+ for w in set(warnings):
+ print ' ! %s' % w
diff --git a/chromium/tools/auto_bisect/bisect_results.py b/chromium/tools/auto_bisect/bisect_results.py
new file mode 100644
index 00000000000..094d107576f
--- /dev/null
+++ b/chromium/tools/auto_bisect/bisect_results.py
@@ -0,0 +1,262 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import math
+import os
+
+import bisect_utils
+import math_utils
+import source_control
+import ttest
+
+from bisect_state import RevisionState
+
+
+class BisectResults(object):
+ """Contains results of the completed bisect.
+
+ Properties:
+ error: Error message if the bisect failed.
+
+ If the error is None, the following properties are present:
+ warnings: List of warnings from the bisect run.
+ state: BisectState object from which these results were generated.
+ first_working_revision: First good revision.
+ last_broken_revision: Last bad revision.
+
+ If both of above revisions are not None, the follow properties are present:
+ culprit_revisions: A list of revisions, which contain the bad change
+ introducing the failure.
+ regression_size: For performance bisects, this is a relative change of
+ the mean metric value. For other bisects this field always contains
+ 'zero-to-nonzero'.
+ regression_std_err: For performance bisects, it is a pooled standard error
+ for groups of good and bad runs. Not used for other bisects.
+ confidence: For performance bisects, it is a confidence that the good and
+ bad runs are distinct groups. Not used for non-performance bisects.
+ """
+
+ def __init__(self, bisect_state=None, depot_registry=None, opts=None,
+ runtime_warnings=None, error=None, abort_reason=None):
+ """Computes final bisect results after a bisect run is complete.
+
+ This constructor should be called in one of the following ways:
+ BisectResults(state, depot_registry, opts, runtime_warnings)
+ BisectResults(error=error)
+
+ First option creates an object representing successful bisect results, while
+ second option creates an error result.
+
+ Args:
+ bisect_state: BisectState object representing latest bisect state.
+ depot_registry: DepotDirectoryRegistry object with information on each
+ repository in the bisect_state.
+ opts: Options passed to the bisect run.
+ runtime_warnings: A list of warnings from the bisect run.
+ error: Error message. When error is not None, other arguments are ignored.
+ """
+ # Setting these attributes so that bisect printer does not break when the
+ # regression cannot be reproduced (no broken revision was found)
+ self.regression_size = 0
+ self.regression_std_err = 0
+ self.confidence = 0
+ self.culprit_revisions = []
+
+ self.error = error
+ self.abort_reason = abort_reason
+ if error is not None or abort_reason is not None:
+ return
+
+ assert (bisect_state is not None and depot_registry is not None and
+ opts is not None and runtime_warnings is not None), (
+ 'Incorrect use of the BisectResults constructor. '
+ 'When error is None, all other arguments are required.')
+
+ self.state = bisect_state
+
+ rev_states = bisect_state.GetRevisionStates()
+ first_working_rev, last_broken_rev = self.FindBreakingRevRange(rev_states)
+ self.first_working_revision = first_working_rev
+ self.last_broken_revision = last_broken_rev
+
+ self.warnings = runtime_warnings
+
+ self.retest_results_tot = None
+ self.retest_results_reverted = None
+
+ if first_working_rev is not None and last_broken_rev is not None:
+ statistics = self._ComputeRegressionStatistics(
+ rev_states, first_working_rev, last_broken_rev)
+
+ self.regression_size = statistics['regression_size']
+ self.regression_std_err = statistics['regression_std_err']
+ self.confidence = statistics['confidence']
+
+ self.culprit_revisions = self._FindCulpritRevisions(
+ rev_states, depot_registry, first_working_rev, last_broken_rev)
+
+ self.warnings += self._GetResultBasedWarnings(
+ self.culprit_revisions, opts, self.confidence)
+
+ def AddRetestResults(self, results_tot, results_reverted):
+ if not results_tot or not results_reverted:
+ self.warnings.append(
+ 'Failed to re-test reverted culprit CL against ToT.')
+ return
+
+ confidence = BisectResults.ConfidenceScore(
+ results_reverted[0]['values'],
+ results_tot[0]['values'])
+
+ self.retest_results_tot = RevisionState('ToT', 'n/a', 0)
+ self.retest_results_tot.value = results_tot[0]
+
+ self.retest_results_reverted = RevisionState('Reverted', 'n/a', 0)
+ self.retest_results_reverted.value = results_reverted[0]
+
+ if confidence <= bisect_utils.HIGH_CONFIDENCE:
+ self.warnings.append(
+ 'Confidence of re-test with reverted CL is not high.'
+ ' Check that the regression hasn\'t already recovered. '
+ ' There\'s still a chance this is a regression, as performance of'
+ ' local builds may not match official builds.')
+
+ @staticmethod
+ def _GetResultBasedWarnings(culprit_revisions, opts, confidence):
+ warnings = []
+ if len(culprit_revisions) > 1:
+ warnings.append('Due to build errors, regression range could '
+ 'not be narrowed down to a single commit.')
+ if opts.repeat_test_count == 1:
+ warnings.append('Tests were only set to run once. This may '
+ 'be insufficient to get meaningful results.')
+ if 0 < confidence < bisect_utils.HIGH_CONFIDENCE:
+ warnings.append('Confidence is not high. Try bisecting again '
+ 'with increased repeat_count, larger range, or '
+ 'on another metric.')
+ if not confidence:
+ warnings.append('Confidence score is 0%. Try bisecting again on '
+ 'another platform or another metric.')
+ return warnings
+
+ @staticmethod
+ def ConfidenceScore(sample1, sample2, accept_single_bad_or_good=False):
+ """Calculates a confidence score.
+
+ This score is based on a statistical hypothesis test. The null
+ hypothesis is that the two groups of results have no difference,
+ i.e. there is no performance regression. The alternative hypothesis
+ is that there is some difference between the groups that's unlikely
+ to occur by chance.
+
+ The score returned by this function represents our confidence in the
+ alternative hypothesis.
+
+ Note that if there's only one item in either sample, this means only
+ one revision was classified good or bad, so there's not much evidence
+ to make a decision.
+
+ Args:
+ sample1: A flat list of "good" result numbers.
+ sample2: A flat list of "bad" result numbers.
+ accept_single_bad_or_good: If True, compute a value even if
+ there is only one bad or good revision.
+
+ Returns:
+ A float between 0 and 100; 0 if the samples aren't large enough.
+ """
+ if ((len(sample1) <= 1 or len(sample2) <= 1) and
+ not accept_single_bad_or_good):
+ return 0.0
+ if not sample1 or not sample2:
+ return 0.0
+ _, _, p_value = ttest.WelchsTTest(sample1, sample2)
+ return 100.0 * (1.0 - p_value)
+
+ @staticmethod
+ def FindBreakingRevRange(revision_states):
+ """Finds the last known good and first known bad revisions.
+
+ Note that since revision_states is expected to be in reverse chronological
+ order, the last known good revision is the first revision in the list that
+ has the passed property set to 1, therefore the name
+ `first_working_revision`. The inverse applies to `last_broken_revision`.
+
+ Args:
+ revision_states: A list of RevisionState instances.
+
+ Returns:
+ A tuple containing the two revision states at the border. (Last
+ known good and first known bad.)
+ """
+ first_working_revision = None
+ last_broken_revision = None
+
+ for revision_state in revision_states:
+ if revision_state.passed == 1 and not first_working_revision:
+ first_working_revision = revision_state
+
+ if not revision_state.passed:
+ last_broken_revision = revision_state
+
+ return first_working_revision, last_broken_revision
+
+ @staticmethod
+ def _FindCulpritRevisions(revision_states, depot_registry, first_working_rev,
+ last_broken_rev):
+ cwd = os.getcwd()
+
+ culprit_revisions = []
+ for i in xrange(last_broken_rev.index, first_working_rev.index):
+ depot_registry.ChangeToDepotDir(revision_states[i].depot)
+ info = source_control.QueryRevisionInfo(revision_states[i].revision)
+ culprit_revisions.append((revision_states[i].revision, info,
+ revision_states[i].depot))
+
+ os.chdir(cwd)
+ return culprit_revisions
+
+ @classmethod
+ def _ComputeRegressionStatistics(cls, rev_states, first_working_rev,
+ last_broken_rev):
+ # TODO(sergiyb): We assume that value has "values" key, which may not be
+ # the case for failure-bisects, where there is a single value only.
+ broken_means = [state.value['values']
+ for state in rev_states[:last_broken_rev.index+1]
+ if state.value]
+
+ working_means = [state.value['values']
+ for state in rev_states[first_working_rev.index:]
+ if state.value]
+
+ # Flatten the lists to calculate mean of all values.
+ working_mean = sum(working_means, [])
+ broken_mean = sum(broken_means, [])
+
+ # Calculate the approximate size of the regression
+ mean_of_bad_runs = math_utils.Mean(broken_mean)
+ mean_of_good_runs = math_utils.Mean(working_mean)
+
+ regression_size = 100 * math_utils.RelativeChange(mean_of_good_runs,
+ mean_of_bad_runs)
+ if math.isnan(regression_size):
+ regression_size = 'zero-to-nonzero'
+
+ regression_std_err = math.fabs(math_utils.PooledStandardError(
+ [working_mean, broken_mean]) /
+ max(0.0001, min(mean_of_good_runs, mean_of_bad_runs))) * 100.0
+
+ # Give a "confidence" in the bisect culprit by seeing whether the results
+ # of the culprit revision and the revision before that appear to be
+ # statistically significantly different.
+ confidence = cls.ConfidenceScore(
+ sum([first_working_rev.value['values']], []),
+ sum([last_broken_rev.value['values']], []))
+
+ bad_greater_than_good = mean_of_bad_runs > mean_of_good_runs
+
+ return {'regression_size': regression_size,
+ 'regression_std_err': regression_std_err,
+ 'confidence': confidence,
+ 'bad_greater_than_good': bad_greater_than_good}
diff --git a/chromium/tools/auto_bisect/bisect_results_json.py b/chromium/tools/auto_bisect/bisect_results_json.py
new file mode 100644
index 00000000000..210159cdc99
--- /dev/null
+++ b/chromium/tools/auto_bisect/bisect_results_json.py
@@ -0,0 +1,88 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+import bisect_utils
+import source_control
+
+
+def Get(bisect_results, opts, depot_registry):
+ """Returns the results as a jsonable object."""
+ if opts.bisect_mode == bisect_utils.BISECT_MODE_RETURN_CODE:
+ change = '0'
+ else:
+ metric = '/'.join(opts.metric)
+ change = '%.02f%%' % bisect_results.regression_size
+
+ status = 'completed'
+
+ return {
+ 'try_job_id': opts.try_job_id,
+ 'bug_id': opts.bug_id,
+ 'status': status,
+ 'buildbot_log_url': _GetBuildBotLogUrl(),
+ 'bisect_bot': os.environ.get('BUILDBOT_BUILDERNAME', ''),
+ 'command': opts.command,
+ 'metric': metric,
+ 'change': change,
+ 'score': bisect_results.confidence,
+ 'good_revision': opts.good_revision,
+ 'bad_revision': opts.bad_revision,
+ 'warnings': bisect_results.warnings,
+ 'abort_reason': bisect_results.abort_reason,
+ 'culprit_data': _CulpritData(bisect_results),
+ 'revision_data': _RevisionData(bisect_results, depot_registry),
+ }
+
+
+def _CulpritData(bisect_results):
+ if not bisect_results.culprit_revisions:
+ return None
+ cl, culprit_info, depot = bisect_results.culprit_revisions[0]
+ commit_link = _GetViewVCLinkFromDepotAndHash(cl, depot)
+ if commit_link:
+ commit_link = '\nLink : %s' % commit_link
+ else:
+ commit_link = ('\Description:\n%s' % culprit_info['body'])
+
+ return {
+ 'subject': culprit_info['subject'],
+ 'author': culprit_info['email'],
+ 'email': culprit_info['email'],
+ 'cl_date': culprit_info['date'],
+ 'commit_info': commit_link,
+ 'revisions_links': [],
+ 'cl': cl
+ }
+
+
+def _RevisionData(bisect_results, depot_registry):
+ revision_rows = []
+ for state in bisect_results.state.GetRevisionStates():
+ commit_position = source_control.GetCommitPosition(
+ state.revision, depot_registry.GetDepotDir(state.depot))
+ revision_rows.append({
+ 'depot_name': state.depot,
+ 'deps_revision': state.revision,
+ 'commit_pos': commit_position,
+ 'result': 'good' if state.passed else 'bad',
+ })
+ return revision_rows
+
+
+def _GetViewVCLinkFromDepotAndHash(git_revision, depot):
+ """Gets link to the repository browser."""
+ if depot and 'viewvc' in bisect_utils.DEPOT_DEPS_NAME[depot]:
+ return bisect_utils.DEPOT_DEPS_NAME[depot]['viewvc'] + git_revision
+ return ''
+
+
+def _GetBuildBotLogUrl():
+ master_url = os.environ.get('BUILDBOT_BUILDBOTURL')
+ builder_name = os.environ.get('BUILDBOT_BUILDERNAME')
+ builder_number = os.environ.get('BUILDBOT_BUILDNUMBER')
+ if master_url and builder_name and builder_number:
+ return '%s%s/%s' % (master_url, builder_name, builder_number)
+ return ''
diff --git a/chromium/tools/auto_bisect/bisect_results_test.py b/chromium/tools/auto_bisect/bisect_results_test.py
new file mode 100644
index 00000000000..acbff10f693
--- /dev/null
+++ b/chromium/tools/auto_bisect/bisect_results_test.py
@@ -0,0 +1,283 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import unittest
+
+from bisect_results import BisectResults
+import source_control
+
+
+class MockDepotRegistry(object):
+ def ChangeToDepotDir(self, depot):
+ pass
+
+
+class MockRevisionState(object):
+ def __init__(self, revision, index, depot='chromium', value=None,
+ perf_time=0, build_time=0, passed='?', external=None):
+ self.depot = depot
+ self.revision = revision
+ self.index = index
+ self.value = value
+ self.perf_time = perf_time
+ self.build_time = build_time
+ self.passed = passed
+ self.external = external
+
+
+class MockBisectState(object):
+
+ def __init__(self):
+ self.mock_revision_states = []
+
+ mock_bad_val = {'values': [100, 105, 95]}
+ for i, rev in enumerate(['a', 'b']):
+ mock_rev_state = MockRevisionState(rev, i, value=mock_bad_val, passed=0)
+ self.mock_revision_states.append(mock_rev_state)
+
+ mock_good_val = {'values': [1, 2, 3]}
+ for i, rev in enumerate(['c', 'd', 'e'], start=2):
+ mock_rev_state = MockRevisionState(rev, i, value=mock_good_val, passed=1)
+ self.mock_revision_states.append(mock_rev_state)
+
+ def GetRevisionStates(self):
+ return self.mock_revision_states
+
+
+class MockBisectOptions(object):
+
+ def __init__(self):
+ self.repeat_test_count = 3
+
+
+class BisectResultsTest(unittest.TestCase):
+
+ def setUp(self):
+ self.mock_bisect_state = MockBisectState()
+ self.mock_depot_registry = MockDepotRegistry()
+ self.mock_opts = MockBisectOptions()
+ self.mock_warnings = []
+
+ self.original_getcwd = os.getcwd
+ self.original_chdir = os.chdir
+ self.original_query_revision_info = source_control.QueryRevisionInfo
+
+ os.getcwd = lambda: '/path'
+ os.chdir = lambda _: None
+
+ revision_infos = {'b': {'test': 'b'}, 'c': {'test': 'c'}}
+ source_control.QueryRevisionInfo = lambda rev: revision_infos[rev]
+
+ def tearDown(self):
+ os.getcwd = self.original_getcwd
+ os.chdir = self.original_chdir
+ source_control.QueryRevisionInfo = self.original_query_revision_info
+
+ def _AssertConfidence(self, score, bad_values, good_values):
+ """Checks whether the given sets of values have a given confidence score.
+
+ The score represents our confidence that the two sets of values wouldn't
+ be as different as they are just by chance; that is, that some real change
+ occurred between the two sets of values.
+
+ Args:
+ score: Expected confidence score.
+ bad_values: First list of numbers.
+ good_values: Second list of numbers.
+ """
+ confidence = BisectResults.ConfidenceScore(bad_values, good_values)
+ self.assertEqual(score, confidence)
+
+ def testConfidenceScoreIsZeroOnTooFewLists(self):
+ self._AssertConfidence(0.0, [], [1, 2])
+ self._AssertConfidence(0.0, [1, 2], [])
+ self._AssertConfidence(0.0, [1], [1, 2])
+ self._AssertConfidence(0.0, [1, 2], [1])
+
+ def testConfidenceScore_ZeroConfidence(self):
+ # The good and bad sets contain the same values, so the confidence that
+ # they're different should be zero.
+ self._AssertConfidence(0.0, [4, 5, 7, 6, 8, 7], [8, 7, 6, 7, 5, 4])
+
+ def testConfidenceScore_MediumConfidence(self):
+ self._AssertConfidence(80.0, [0, 1, 1, 1, 2, 2], [1, 1, 1, 3, 3, 4])
+
+ def testConfidenceScore_HighConfidence(self):
+ self._AssertConfidence(95.0, [0, 1, 1, 1, 2, 2], [1, 2, 2, 3, 3, 4])
+
+ def testConfidenceScore_VeryHighConfidence(self):
+ # Confidence is high if the two sets of values have no internal variance.
+ self._AssertConfidence(99.9, [1, 1, 1, 1], [1.2, 1.2, 1.2, 1.2])
+ self._AssertConfidence(99.9, [1, 1, 1, 1], [1.01, 1.01, 1.01, 1.01])
+
+ def testConfidenceScore_UnbalancedSampleSize(self):
+ # The second set of numbers only contains one number, so confidence is 0.
+ self._AssertConfidence(0.0, [1.1, 1.2, 1.1, 1.2, 1.0, 1.3, 1.2], [1.4])
+
+ def testConfidenceScore_EmptySample(self):
+ # Confidence is zero if either or both samples are empty.
+ self._AssertConfidence(0.0, [], [])
+ self._AssertConfidence(0.0, [], [1.1, 1.2, 1.1, 1.2, 1.0, 1.3, 1.2, 1.3])
+ self._AssertConfidence(0.0, [1.1, 1.2, 1.1, 1.2, 1.0, 1.3, 1.2, 1.3], [])
+
+ def testConfidenceScore_FunctionalTestResults(self):
+ self._AssertConfidence(80.0, [1, 1, 0, 1, 1, 1, 0, 1], [0, 0, 1, 0, 1, 0])
+ self._AssertConfidence(99.9, [1, 1, 1, 1, 1, 1, 1, 1], [0, 0, 0, 0, 0, 0])
+
+ def testConfidenceScore_RealWorldCases(self):
+ """This method contains a set of data from actual bisect results.
+
+ The confidence scores asserted below were all copied from the actual
+ results, so the purpose of this test method is mainly to show what the
+ results for real cases are, and compare when we change the confidence
+ score function in the future.
+ """
+ self._AssertConfidence(80, [133, 130, 132, 132, 130, 129], [129, 129, 125])
+ self._AssertConfidence(99.5, [668, 667], [498, 498, 499])
+ self._AssertConfidence(80, [67, 68], [65, 65, 67])
+ self._AssertConfidence(0, [514], [514])
+ self._AssertConfidence(90, [616, 613, 607, 615], [617, 619, 619, 617])
+ self._AssertConfidence(0, [3.5, 5.8, 4.7, 3.5, 3.6], [2.8])
+ self._AssertConfidence(90, [3, 3, 3], [2, 2, 2, 3])
+ self._AssertConfidence(0, [1999004, 1999627], [223355])
+ self._AssertConfidence(90, [1040, 934, 961], [876, 875, 789])
+ self._AssertConfidence(90, [309, 305, 304], [302, 302, 299, 303, 298])
+
+ def testCorrectlyFindsBreakingRange(self):
+ revision_states = self.mock_bisect_state.mock_revision_states
+ revision_states[0].passed = 0
+ revision_states[1].passed = 0
+ revision_states[2].passed = 1
+ revision_states[3].passed = 1
+ revision_states[4].passed = 1
+
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertEqual(revision_states[2], results.first_working_revision)
+ self.assertEqual(revision_states[1], results.last_broken_revision)
+
+ def testCorrectlyFindsBreakingRangeNotInOrder(self):
+ revision_states = self.mock_bisect_state.mock_revision_states
+ revision_states[0].passed = 0
+ revision_states[1].passed = 1
+ revision_states[2].passed = 0
+ revision_states[3].passed = 1
+ revision_states[4].passed = 1
+
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertEqual(revision_states[1], results.first_working_revision)
+ self.assertEqual(revision_states[2], results.last_broken_revision)
+
+ def testCorrectlyFindsBreakingRangeIncompleteBisect(self):
+ revision_states = self.mock_bisect_state.mock_revision_states
+ revision_states[0].passed = 0
+ revision_states[1].passed = 0
+ revision_states[2].passed = '?'
+ revision_states[3].passed = 1
+ revision_states[4].passed = 1
+
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertEqual(revision_states[3], results.first_working_revision)
+ self.assertEqual(revision_states[1], results.last_broken_revision)
+
+ def testFindBreakingRangeAllPassed(self):
+ revision_states = self.mock_bisect_state.mock_revision_states
+ revision_states[0].passed = 1
+ revision_states[1].passed = 1
+ revision_states[2].passed = 1
+ revision_states[3].passed = 1
+ revision_states[4].passed = 1
+
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertEqual(revision_states[0], results.first_working_revision)
+ self.assertIsNone(results.last_broken_revision)
+
+ def testFindBreakingRangeNonePassed(self):
+ revision_states = self.mock_bisect_state.mock_revision_states
+ revision_states[0].passed = 0
+ revision_states[1].passed = 0
+ revision_states[2].passed = 0
+ revision_states[3].passed = 0
+ revision_states[4].passed = 0
+
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertIsNone(results.first_working_revision)
+ self.assertEqual(revision_states[4], results.last_broken_revision)
+
+ def testCorrectlyComputesRegressionStatistics(self):
+ revision_states = self.mock_bisect_state.mock_revision_states
+ revision_states[0].passed = 0
+ revision_states[0].value = {'values': [1000, 999, 998]}
+ revision_states[1].passed = 0
+ revision_states[1].value = {'values': [980, 1000, 999]}
+ revision_states[2].passed = 1
+ revision_states[2].value = {'values': [50, 45, 55]}
+ revision_states[3].passed = 1
+ revision_states[3].value = {'values': [45, 56, 45]}
+ revision_states[4].passed = 1
+ revision_states[4].value = {'values': [51, 41, 58]}
+
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertAlmostEqual(99.9, results.confidence)
+ self.assertAlmostEqual(1909.86547085, results.regression_size)
+ self.assertAlmostEqual(7.16625904, results.regression_std_err)
+
+ def testFindsCulpritRevisions(self):
+ revision_states = self.mock_bisect_state.mock_revision_states
+ revision_states[1].depot = 'chromium'
+ revision_states[2].depot = 'webkit'
+
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+
+ self.assertEqual(1, len(results.culprit_revisions))
+ self.assertEqual(('b', {'test': 'b'}, 'chromium'),
+ results.culprit_revisions[0])
+
+ def testNoResultBasedWarningsForNormalState(self):
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertEqual(0, len(results.warnings))
+
+ def testWarningForMultipleCulpritRevisions(self):
+ self.mock_bisect_state.mock_revision_states[2].passed = 'Skipped'
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertEqual(1, len(results.warnings))
+
+ def testWarningForTooLowRetryLimit(self):
+ self.mock_opts.repeat_test_count = 1
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertEqual(1, len(results.warnings))
+
+ def testWarningForTooLowConfidence(self):
+ revision_states = self.mock_bisect_state.mock_revision_states
+ revision_states[2].value = {'values': [95, 90, 90]}
+ revision_states[3].value = {'values': [95, 90, 90]}
+ revision_states[4].value = {'values': [95, 90, 90]}
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertGreater(results.confidence, 0)
+ self.assertEqual(1, len(results.warnings))
+
+ def testWarningForZeroConfidence(self):
+ revision_states = self.mock_bisect_state.mock_revision_states
+ revision_states[2].value = {'values': [100, 105, 95]}
+ revision_states[3].value = {'values': [100, 105, 95]}
+ revision_states[4].value = {'values': [100, 105, 95]}
+ results = BisectResults(self.mock_bisect_state, self.mock_depot_registry,
+ self.mock_opts, self.mock_warnings)
+ self.assertEqual(0, results.confidence)
+ self.assertEqual(1, len(results.warnings))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/auto_bisect/bisect_state.py b/chromium/tools/auto_bisect/bisect_state.py
new file mode 100644
index 00000000000..f5d745115dc
--- /dev/null
+++ b/chromium/tools/auto_bisect/bisect_state.py
@@ -0,0 +1,99 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class RevisionState(object):
+ """Contains bisect state for a given revision.
+
+ Properties:
+ depot: The depot that this revision is from (e.g. WebKit).
+ revision: Revision number (Git hash or SVN number).
+ index: Position of the state in the list of all revisions.
+ value: Value(s) returned from the test.
+ perf_time: Time that a test took.
+ build_time: Time that a build took.
+ passed: Represents whether the performance test was successful at that
+ revision. Possible values include: 1 (passed), 0 (failed),
+ '?' (skipped), 'F' (build failed).
+ external: If the revision is a 'src' revision, 'external' contains the
+ revisions of each of the external libraries.
+ """
+
+ def __init__(self, depot, revision, index):
+ self.depot = depot
+ self.revision = revision
+ self.index = index
+ self.value = None
+ self.perf_time = 0
+ self.build_time = 0
+ self.passed = '?'
+ self.external = None
+
+ # TODO(sergiyb): Update() to parse run_results from the RunTest.
+
+
+class BisectState(object):
+ """Represents a state of the bisect as a collection of revision states."""
+
+ def __init__(self, depot, revisions):
+ """Initializes a new BisectState object with a set of revision states.
+
+ Args:
+ depot: Name of the depot used for initial set of revision states.
+ revisions: List of revisions used for initial set of revision states.
+ """
+ self.revision_states = []
+ self.revision_index = {}
+
+ index = 0
+ for revision in revisions:
+ new_state = self._InitRevisionState(depot, revision, index)
+ self.revision_states.append(new_state)
+ index += 1
+
+ @staticmethod
+ def _RevisionKey(depot, revision):
+ return "%s:%s" % (depot, revision)
+
+ def _InitRevisionState(self, depot, revision, index):
+ key = self._RevisionKey(depot, revision)
+ self.revision_index[key] = index
+ return RevisionState(depot, revision, index)
+
+ def GetRevisionState(self, depot, revision):
+ """Returns a mutable revision state."""
+ key = self._RevisionKey(depot, revision)
+ index = self.revision_index.get(key)
+ return self.revision_states[index] if index else None
+
+ def CreateRevisionStatesAfter(self, depot, revisions, reference_depot,
+ reference_revision):
+ """Creates a set of new revision states after a specified reference state.
+
+ Args:
+ depot: Name of the depot for the new revision states.
+ revisions: List of revisions for the new revision states.
+ reference_depot: Name of the depot for the reference revision state.
+ reference_revision: Revision for the reference revision state.
+
+ Returns:
+ A list containing all created revision states in order as they were added.
+ """
+ ref_key = self._RevisionKey(reference_depot, reference_revision)
+ ref_index = self.revision_index[ref_key]
+ num_new_revisions = len(revisions)
+ for entry in self.revision_states:
+ if entry.index > ref_index:
+ entry.index += num_new_revisions
+
+ first_index = ref_index + 1
+ for index, revision in enumerate(revisions, start=first_index):
+ new_state = self._InitRevisionState(depot, revision, index)
+ self.revision_states.insert(index, new_state)
+
+ return self.revision_states[first_index:first_index + num_new_revisions]
+
+ def GetRevisionStates(self):
+ """Returns a copy of the list of the revision states."""
+ return list(self.revision_states)
diff --git a/chromium/tools/auto_bisect/bisect_state_test.py b/chromium/tools/auto_bisect/bisect_state_test.py
new file mode 100644
index 00000000000..0630fab5eb7
--- /dev/null
+++ b/chromium/tools/auto_bisect/bisect_state_test.py
@@ -0,0 +1,31 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from bisect_state import BisectState
+
+
+class BisectStateTest(unittest.TestCase):
+
+ def testCreatesRevisionsStateAfterAReferenceRevision(self):
+ bisect_state = BisectState('chromium', ['a', 'b', 'c', 'd'])
+ bisect_state.CreateRevisionStatesAfter('webkit', [1, 2, 3], 'chromium', 'b')
+ bisect_state.CreateRevisionStatesAfter('v8', [100, 200], 'webkit', 2)
+
+ actual_revisions = bisect_state.GetRevisionStates()
+ expected_revisions = [('chromium', 'a'), ('chromium', 'b'), ('webkit', 1),
+ ('webkit', 2), ('v8', 100), ('v8', 200),
+ ('webkit', 3), ('chromium', 'c'), ('chromium', 'd')]
+ self.assertEqual(len(expected_revisions), len(actual_revisions))
+ for i in xrange(len(actual_revisions)):
+ self.assertEqual(i, actual_revisions[i].index)
+ self.assertEqual(expected_revisions[i][0], actual_revisions[i].depot)
+ self.assertEqual(expected_revisions[i][1], actual_revisions[i].revision)
+
+ # TODO(sergiyb): More tests for the remaining functions.
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/auto_bisect/bisect_utils.py b/chromium/tools/auto_bisect/bisect_utils.py
new file mode 100644
index 00000000000..3d45910229b
--- /dev/null
+++ b/chromium/tools/auto_bisect/bisect_utils.py
@@ -0,0 +1,560 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions used by the bisect tool.
+
+This includes functions related to checking out the depot and outputting
+annotations for the Buildbot waterfall.
+"""
+
+import errno
+import imp
+import os
+import stat
+import subprocess
+import sys
+
+DEFAULT_GCLIENT_CUSTOM_DEPS = {
+ 'src/data/page_cycler': 'https://chrome-internal.googlesource.com/'
+ 'chrome/data/page_cycler/.git',
+ 'src/data/dom_perf': 'https://chrome-internal.googlesource.com/'
+ 'chrome/data/dom_perf/.git',
+ 'src/data/mach_ports': 'https://chrome-internal.googlesource.com/'
+ 'chrome/data/mach_ports/.git',
+ 'src/tools/perf/data': 'https://chrome-internal.googlesource.com/'
+ 'chrome/tools/perf/data/.git',
+ 'src/third_party/adobe/flash/binaries/ppapi/linux':
+ 'https://chrome-internal.googlesource.com/'
+ 'chrome/deps/adobe/flash/binaries/ppapi/linux/.git',
+ 'src/third_party/adobe/flash/binaries/ppapi/linux_x64':
+ 'https://chrome-internal.googlesource.com/'
+ 'chrome/deps/adobe/flash/binaries/ppapi/linux_x64/.git',
+ 'src/third_party/adobe/flash/binaries/ppapi/mac':
+ 'https://chrome-internal.googlesource.com/'
+ 'chrome/deps/adobe/flash/binaries/ppapi/mac/.git',
+ 'src/third_party/adobe/flash/binaries/ppapi/mac_64':
+ 'https://chrome-internal.googlesource.com/'
+ 'chrome/deps/adobe/flash/binaries/ppapi/mac_64/.git',
+ 'src/third_party/adobe/flash/binaries/ppapi/win':
+ 'https://chrome-internal.googlesource.com/'
+ 'chrome/deps/adobe/flash/binaries/ppapi/win/.git',
+ 'src/third_party/adobe/flash/binaries/ppapi/win_x64':
+ 'https://chrome-internal.googlesource.com/'
+ 'chrome/deps/adobe/flash/binaries/ppapi/win_x64/.git',
+ 'src/third_party/WebKit/LayoutTests': None,
+ 'src/tools/valgrind': None,
+}
+
+GCLIENT_SPEC_DATA = [
+ {
+ 'name': 'src',
+ 'url': 'https://chromium.googlesource.com/chromium/src.git',
+ 'deps_file': '.DEPS.git',
+ 'managed': True,
+ 'custom_deps': {},
+ 'safesync_url': '',
+ },
+]
+GCLIENT_SPEC_ANDROID = "\ntarget_os = ['android']"
+GCLIENT_CUSTOM_DEPS_V8 = {
+ 'src/v8_bleeding_edge': 'https://chromium.googlesource.com/v8/v8.git'
+}
+FILE_DEPS_GIT = '.DEPS.git'
+FILE_DEPS = 'DEPS'
+
+# Bisect working directory.
+BISECT_DIR = 'bisect'
+
+# The percentage at which confidence is considered high.
+HIGH_CONFIDENCE = 95
+
+# Below is the map of "depot" names to information about each depot. Each depot
+# is a repository, and in the process of bisecting, revision ranges in these
+# repositories may also be bisected.
+#
+# Each depot information dictionary may contain:
+# src: Path to the working directory.
+# recurse: True if this repository will get bisected.
+# svn: URL of SVN repository. Needed for git workflow to resolve hashes to
+# SVN revisions.
+# from: Parent depot that must be bisected before this is bisected.
+# deps_var: Key name in vars variable in DEPS file that has revision
+# information.
+DEPOT_DEPS_NAME = {
+ 'chromium': {
+ 'src': 'src',
+ 'recurse': True,
+ 'from': ['android-chrome'],
+ 'viewvc': 'https://chromium.googlesource.com/chromium/src/+/',
+ 'deps_var': 'chromium_rev'
+ },
+ 'webkit': {
+ 'src': 'src/third_party/WebKit',
+ 'recurse': True,
+ 'from': ['chromium'],
+ 'viewvc': 'https://chromium.googlesource.com/chromium/blink/+/',
+ 'deps_var': 'webkit_revision'
+ },
+ 'angle': {
+ 'src': 'src/third_party/angle',
+ 'src_old': 'src/third_party/angle_dx11',
+ 'recurse': True,
+ 'from': ['chromium'],
+ 'platform': 'nt',
+ 'viewvc': 'https://chromium.googlesource.com/angle/angle/+/',
+ 'deps_var': 'angle_revision'
+ },
+ 'v8': {
+ 'src': 'src/v8',
+ 'recurse': True,
+ 'from': ['chromium'],
+ 'custom_deps': GCLIENT_CUSTOM_DEPS_V8,
+ 'viewvc': 'https://chromium.googlesource.com/v8/v8.git/+/',
+ 'deps_var': 'v8_revision'
+ },
+ 'v8_bleeding_edge': {
+ 'src': 'src/v8_bleeding_edge',
+ 'recurse': True,
+ 'svn': 'https://v8.googlecode.com/svn/branches/bleeding_edge',
+ 'from': ['v8'],
+ 'viewvc': 'https://chromium.googlesource.com/v8/v8.git/+/',
+ 'deps_var': 'v8_revision'
+ },
+ 'skia': {
+ 'src': 'src/third_party/skia',
+ 'recurse': True,
+ 'from': ['chromium'],
+ 'viewvc': 'https://chromium.googlesource.com/skia/+/',
+ 'deps_var': 'skia_revision'
+ }
+}
+
+DEPOT_NAMES = DEPOT_DEPS_NAME.keys()
+
+# The possible values of the --bisect_mode flag, which determines what to
+# use when classifying a revision as "good" or "bad".
+BISECT_MODE_MEAN = 'mean'
+BISECT_MODE_STD_DEV = 'std_dev'
+BISECT_MODE_RETURN_CODE = 'return_code'
+
+
+def AddAdditionalDepotInfo(depot_info):
+ """Adds additional depot info to the global depot variables."""
+ global DEPOT_DEPS_NAME
+ global DEPOT_NAMES
+ DEPOT_DEPS_NAME = dict(DEPOT_DEPS_NAME.items() + depot_info.items())
+ DEPOT_NAMES = DEPOT_DEPS_NAME.keys()
+
+
+def OutputAnnotationStepStart(name):
+ """Outputs annotation to signal the start of a step to a try bot.
+
+ Args:
+ name: The name of the step.
+ """
+ print
+ print '@@@SEED_STEP %s@@@' % name
+ print '@@@STEP_CURSOR %s@@@' % name
+ print '@@@STEP_STARTED@@@'
+ print
+ sys.stdout.flush()
+
+
+def OutputAnnotationStepClosed():
+ """Outputs annotation to signal the closing of a step to a try bot."""
+ print
+ print '@@@STEP_CLOSED@@@'
+ print
+ sys.stdout.flush()
+
+
+def OutputAnnotationStepText(text):
+ """Outputs appropriate annotation to print text.
+
+ Args:
+ name: The text to print.
+ """
+ print
+ print '@@@STEP_TEXT@%s@@@' % text
+ print
+ sys.stdout.flush()
+
+
+def OutputAnnotationStepWarning():
+ """Outputs appropriate annotation to signal a warning."""
+ print
+ print '@@@STEP_WARNINGS@@@'
+ print
+
+
+def OutputAnnotationStepFailure():
+ """Outputs appropriate annotation to signal a warning."""
+ print
+ print '@@@STEP_FAILURE@@@'
+ print
+
+
+def OutputAnnotationStepLink(label, url):
+ """Outputs appropriate annotation to print a link.
+
+ Args:
+ label: The name to print.
+ url: The URL to print.
+ """
+ print
+ print '@@@STEP_LINK@%s@%s@@@' % (label, url)
+ print
+ sys.stdout.flush()
+
+
+def LoadExtraSrc(path_to_file):
+ """Attempts to load an extra source file, and overrides global values.
+
+ If the extra source file is loaded successfully, then it will use the new
+ module to override some global values, such as gclient spec data.
+
+ Args:
+ path_to_file: File path.
+
+ Returns:
+ The loaded module object, or None if none was imported.
+ """
+ try:
+ global GCLIENT_SPEC_DATA
+ global GCLIENT_SPEC_ANDROID
+ extra_src = imp.load_source('data', path_to_file)
+ GCLIENT_SPEC_DATA = extra_src.GetGClientSpec()
+ GCLIENT_SPEC_ANDROID = extra_src.GetGClientSpecExtraParams()
+ return extra_src
+ except ImportError:
+ return None
+
+
+def IsTelemetryCommand(command):
+ """Attempts to discern whether or not a given command is running telemetry."""
+ return 'tools/perf/run_' in command or 'tools\\perf\\run_' in command
+
+
+def _CreateAndChangeToSourceDirectory(working_directory):
+ """Creates a directory 'bisect' as a subdirectory of |working_directory|.
+
+ If successful, the current working directory will be changed to the new
+ 'bisect' directory.
+
+ Args:
+ working_directory: The directory to create the new 'bisect' directory in.
+
+ Returns:
+ True if the directory was successfully created (or already existed).
+ """
+ cwd = os.getcwd()
+ os.chdir(working_directory)
+ try:
+ os.mkdir(BISECT_DIR)
+ except OSError, e:
+ if e.errno != errno.EEXIST: # EEXIST indicates that it already exists.
+ os.chdir(cwd)
+ return False
+ os.chdir(BISECT_DIR)
+ return True
+
+
+def _SubprocessCall(cmd, cwd=None):
+ """Runs a command in a subprocess.
+
+ Args:
+ cmd: The command to run.
+ cwd: Working directory to run from.
+
+ Returns:
+ The return code of the call.
+ """
+ if os.name == 'nt':
+ # "HOME" isn't normally defined on windows, but is needed
+ # for git to find the user's .netrc file.
+ if not os.getenv('HOME'):
+ os.environ['HOME'] = os.environ['USERPROFILE']
+ shell = os.name == 'nt'
+ return subprocess.call(cmd, shell=shell, cwd=cwd)
+
+
+def RunGClient(params, cwd=None):
+ """Runs gclient with the specified parameters.
+
+ Args:
+ params: A list of parameters to pass to gclient.
+ cwd: Working directory to run from.
+
+ Returns:
+ The return code of the call.
+ """
+ cmd = ['gclient'] + params
+ return _SubprocessCall(cmd, cwd=cwd)
+
+
+def RunGClientAndCreateConfig(opts, custom_deps=None, cwd=None):
+ """Runs gclient and creates a config containing both src and src-internal.
+
+ Args:
+ opts: The options parsed from the command line through parse_args().
+ custom_deps: A dictionary of additional dependencies to add to .gclient.
+ cwd: Working directory to run from.
+
+ Returns:
+ The return code of the call.
+ """
+ spec = GCLIENT_SPEC_DATA
+
+ if custom_deps:
+ for k, v in custom_deps.iteritems():
+ spec[0]['custom_deps'][k] = v
+
+ # Cannot have newlines in string on windows
+ spec = 'solutions =' + str(spec)
+ spec = ''.join([l for l in spec.splitlines()])
+
+ if 'android' in opts.target_platform:
+ spec += GCLIENT_SPEC_ANDROID
+
+ return_code = RunGClient(
+ ['config', '--spec=%s' % spec], cwd=cwd)
+ return return_code
+
+
+def OnAccessError(func, path, _):
+ """Error handler for shutil.rmtree.
+
+ Source: http://goo.gl/DEYNCT
+
+ If the error is due to an access error (read only file), it attempts to add
+ write permissions, then retries.
+
+ If the error is for another reason it re-raises the error.
+
+ Args:
+ func: The function that raised the error.
+ path: The path name passed to func.
+ _: Exception information from sys.exc_info(). Not used.
+ """
+ if not os.access(path, os.W_OK):
+ os.chmod(path, stat.S_IWUSR)
+ func(path)
+ else:
+ raise
+
+
+def _CleanupPreviousGitRuns(cwd=os.getcwd()):
+ """Cleans up any leftover index.lock files after running git."""
+ # If a previous run of git crashed, or bot was reset, etc., then we might
+ # end up with leftover index.lock files.
+ for path, _, files in os.walk(cwd):
+ for cur_file in files:
+ if cur_file.endswith('index.lock'):
+ path_to_file = os.path.join(path, cur_file)
+ os.remove(path_to_file)
+
+
+def RunGClientAndSync(revisions=None, cwd=None):
+ """Runs gclient and does a normal sync.
+
+ Args:
+ revisions: List of revisions that need to be synced.
+ E.g., "src@2ae43f...", "src/third_party/webkit@asr1234" etc.
+ cwd: Working directory to run from.
+
+ Returns:
+ The return code of the call.
+ """
+ params = ['sync', '--verbose', '--nohooks', '--force',
+ '--delete_unversioned_trees']
+ if revisions is not None:
+ for revision in revisions:
+ if revision is not None:
+ params.extend(['--revision', revision])
+ return RunGClient(params, cwd=cwd)
+
+
+def SetupGitDepot(opts, custom_deps):
+ """Sets up the depot for the bisection.
+
+ The depot will be located in a subdirectory called 'bisect'.
+
+ Args:
+ opts: The options parsed from the command line through parse_args().
+ custom_deps: A dictionary of additional dependencies to add to .gclient.
+
+ Returns:
+ True if gclient successfully created the config file and did a sync, False
+ otherwise.
+ """
+ name = 'Setting up Bisection Depot'
+ try:
+ if opts.output_buildbot_annotations:
+ OutputAnnotationStepStart(name)
+
+ if RunGClientAndCreateConfig(opts, custom_deps):
+ return False
+
+ _CleanupPreviousGitRuns()
+ RunGClient(['revert'])
+ return not RunGClientAndSync()
+ finally:
+ if opts.output_buildbot_annotations:
+ OutputAnnotationStepClosed()
+
+
+def CheckIfBisectDepotExists(opts):
+ """Checks if the bisect directory already exists.
+
+ Args:
+ opts: The options parsed from the command line through parse_args().
+
+ Returns:
+ Returns True if it exists.
+ """
+ path_to_dir = os.path.join(opts.working_directory, BISECT_DIR, 'src')
+ return os.path.exists(path_to_dir)
+
+
+def CheckRunGit(command, cwd=None):
+ """Run a git subcommand, returning its output and return code. Asserts if
+ the return code of the call is non-zero.
+
+ Args:
+ command: A list containing the args to git.
+
+ Returns:
+ A tuple of the output and return code.
+ """
+ output, return_code = RunGit(command, cwd=cwd)
+
+ assert not return_code, 'An error occurred while running'\
+ ' "git %s"' % ' '.join(command)
+ return output
+
+
+def RunGit(command, cwd=None):
+ """Run a git subcommand, returning its output and return code.
+
+ Args:
+ command: A list containing the args to git.
+ cwd: A directory to change to while running the git command (optional).
+
+ Returns:
+ A tuple of the output and return code.
+ """
+ command = ['git'] + command
+ return RunProcessAndRetrieveOutput(command, cwd=cwd)
+
+
+def CreateBisectDirectoryAndSetupDepot(opts, custom_deps):
+ """Sets up a subdirectory 'bisect' and then retrieves a copy of the depot
+ there using gclient.
+
+ Args:
+ opts: The options parsed from the command line through parse_args().
+ custom_deps: A dictionary of additional dependencies to add to .gclient.
+ """
+ if CheckIfBisectDepotExists(opts):
+ path_to_dir = os.path.join(os.path.abspath(opts.working_directory),
+ BISECT_DIR, 'src')
+ output, _ = RunGit(['rev-parse', '--is-inside-work-tree'], cwd=path_to_dir)
+ if output.strip() == 'true':
+ # Before checking out master, cleanup up any leftover index.lock files.
+ _CleanupPreviousGitRuns(path_to_dir)
+ # Checks out the master branch, throws an exception if git command fails.
+ CheckRunGit(['checkout', '-f', 'master'], cwd=path_to_dir)
+ if not _CreateAndChangeToSourceDirectory(opts.working_directory):
+ raise RuntimeError('Could not create bisect directory.')
+
+ if not SetupGitDepot(opts, custom_deps):
+ raise RuntimeError('Failed to grab source.')
+
+
+def RunProcess(command, cwd=None, shell=False):
+ """Runs an arbitrary command.
+
+ If output from the call is needed, use RunProcessAndRetrieveOutput instead.
+
+ Args:
+ command: A list containing the command and args to execute.
+
+ Returns:
+ The return code of the call.
+ """
+ # On Windows, use shell=True to get PATH interpretation.
+ shell = shell or IsWindowsHost()
+ return subprocess.call(command, cwd=cwd, shell=shell)
+
+
+def RunProcessAndRetrieveOutput(command, cwd=None):
+ """Runs an arbitrary command, returning its output and return code.
+
+ Since output is collected via communicate(), there will be no output until
+ the call terminates. If you need output while the program runs (ie. so
+ that the buildbot doesn't terminate the script), consider RunProcess().
+
+ Args:
+ command: A list containing the command and args to execute.
+ cwd: A directory to change to while running the command. The command can be
+ relative to this directory. If this is None, the command will be run in
+ the current directory.
+
+ Returns:
+ A tuple of the output and return code.
+ """
+ if cwd:
+ original_cwd = os.getcwd()
+ os.chdir(cwd)
+
+ # On Windows, use shell=True to get PATH interpretation.
+ shell = IsWindowsHost()
+ proc = subprocess.Popen(
+ command, shell=shell, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ output, _ = proc.communicate()
+
+ if cwd:
+ os.chdir(original_cwd)
+
+ return (output, proc.returncode)
+
+
+def IsStringInt(string_to_check):
+ """Checks whether or not the given string can be converted to an int."""
+ try:
+ int(string_to_check)
+ return True
+ except ValueError:
+ return False
+
+
+def IsStringFloat(string_to_check):
+ """Checks whether or not the given string can be converted to a float."""
+ try:
+ float(string_to_check)
+ return True
+ except ValueError:
+ return False
+
+
+def IsWindowsHost():
+ return sys.platform == 'cygwin' or sys.platform.startswith('win')
+
+
+def Is64BitWindows():
+ """Checks whether or not Windows is a 64-bit version."""
+ platform = os.environ.get('PROCESSOR_ARCHITEW6432')
+ if not platform:
+ # Must not be running in WoW64, so PROCESSOR_ARCHITECTURE is correct.
+ platform = os.environ.get('PROCESSOR_ARCHITECTURE')
+ return platform and platform in ['AMD64', 'I64']
+
+
+def IsLinuxHost():
+ return sys.platform.startswith('linux')
+
+
+def IsMacHost():
+ return sys.platform.startswith('darwin')
diff --git a/chromium/tools/auto_bisect/builder.py b/chromium/tools/auto_bisect/builder.py
new file mode 100644
index 00000000000..ca746d1dc35
--- /dev/null
+++ b/chromium/tools/auto_bisect/builder.py
@@ -0,0 +1,359 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Classes and functions for building Chrome.
+
+This includes functions for running commands to build, as well as
+specific rules about which targets to build.
+"""
+
+import os
+import subprocess
+import sys
+
+import bisect_utils
+
+ORIGINAL_ENV = {}
+
+
+class Builder(object):
+ """Subclasses of the Builder class are used by the bisect script to build
+ relevant targets.
+ """
+ def __init__(self, opts):
+ """Performs setup for building with target build system.
+
+ Args:
+ opts: Options parsed from command line.
+
+ Raises:
+ RuntimeError: Some condition necessary for building was not met.
+ """
+ if bisect_utils.IsWindowsHost():
+ if not opts.build_preference:
+ opts.build_preference = 'msvs'
+
+ if opts.build_preference == 'msvs':
+ if not os.getenv('VS100COMNTOOLS'):
+ raise RuntimeError(
+ 'Path to visual studio could not be determined.')
+ else:
+ # Need to re-escape goma dir, see crbug.com/394990.
+ if opts.goma_dir:
+ opts.goma_dir = opts.goma_dir.encode('string_escape')
+ SetBuildSystemDefault(opts.build_preference, opts.use_goma,
+ opts.goma_dir, opts.target_arch)
+ else:
+ if not opts.build_preference:
+ if 'ninja' in os.getenv('GYP_GENERATORS', default=''):
+ opts.build_preference = 'ninja'
+ else:
+ opts.build_preference = 'make'
+
+ SetBuildSystemDefault(opts.build_preference, opts.use_goma, opts.goma_dir)
+
+ if not SetupPlatformBuildEnvironment(opts):
+ raise RuntimeError('Failed to set platform environment.')
+
+ @staticmethod
+ def FromOpts(opts):
+ """Constructs and returns a Builder object.
+
+ Args:
+ opts: Options parsed from the command-line.
+ """
+ builder = None
+ if opts.target_platform == 'android':
+ builder = AndroidBuilder(opts)
+ elif opts.target_platform == 'android-chrome':
+ builder = AndroidChromeBuilder(opts)
+ else:
+ builder = DesktopBuilder(opts)
+ return builder
+
+ def Build(self, depot, opts):
+ """Runs a command to build Chrome."""
+ raise NotImplementedError()
+
+
+def GetBuildOutputDirectory(opts, src_dir=None):
+ """Returns the path to the build directory, relative to the checkout root.
+
+ Assumes that the current working directory is the checkout root.
+
+ Args:
+ opts: Command-line options.
+ src_dir: Path to chromium/src directory.
+
+ Returns:
+ A path to the directory to use as build output directory.
+
+ Raises:
+ NotImplementedError: The platform according to sys.platform is unexpected.
+ """
+ src_dir = src_dir or 'src'
+ if opts.build_preference == 'ninja' or bisect_utils.IsLinuxHost():
+ return os.path.join(src_dir, 'out')
+ if bisect_utils.IsMacHost():
+ return os.path.join(src_dir, 'xcodebuild')
+ if bisect_utils.IsWindowsHost():
+ return os.path.join(src_dir, 'build')
+ raise NotImplementedError('Unexpected platform %s' % sys.platform)
+
+
+class DesktopBuilder(Builder):
+ """DesktopBuilder is used to build Chromium on Linux, Mac, or Windows."""
+
+ def __init__(self, opts):
+ super(DesktopBuilder, self).__init__(opts)
+
+ def Build(self, depot, opts):
+ """Builds chromium_builder_perf target using options passed into the script.
+
+ Args:
+ depot: Name of current depot being bisected.
+ opts: The options parsed from the command line.
+
+ Returns:
+ True if build was successful.
+ """
+ targets = ['chromium_builder_perf']
+
+ threads = None
+ if opts.use_goma:
+ threads = opts.goma_threads
+
+ build_success = False
+ if opts.build_preference == 'make':
+ build_success = BuildWithMake(threads, targets, opts.target_build_type)
+ elif opts.build_preference == 'ninja':
+ build_success = BuildWithNinja(threads, targets, opts.target_build_type)
+ elif opts.build_preference == 'msvs':
+ assert bisect_utils.IsWindowsHost(), 'msvs is only supported on Windows.'
+ build_success = BuildWithVisualStudio(targets, opts.target_build_type)
+ else:
+ assert False, 'No build system defined.'
+ return build_success
+
+
+class AndroidBuilder(Builder):
+ """AndroidBuilder is used to build on android."""
+
+ def __init__(self, opts):
+ super(AndroidBuilder, self).__init__(opts)
+
+ # TODO(qyearsley): Make this a class method and verify that it works with
+ # a unit test.
+ # pylint: disable=R0201
+ def _GetTargets(self):
+ """Returns a list of build targets."""
+ return [
+ 'chrome_public_apk',
+ 'cc_perftests_apk',
+ 'android_tools'
+ ]
+
+ def Build(self, depot, opts):
+ """Builds the android content shell and other necessary tools.
+
+ Args:
+ depot: Current depot being bisected.
+ opts: The options parsed from the command line.
+
+ Returns:
+ True if build was successful.
+ """
+ threads = None
+ if opts.use_goma:
+ threads = opts.goma_threads
+
+ build_success = False
+ if opts.build_preference == 'ninja':
+ build_success = BuildWithNinja(
+ threads, self._GetTargets(), opts.target_build_type)
+ else:
+ assert False, 'No build system defined.'
+
+ return build_success
+
+
+class AndroidChromeBuilder(AndroidBuilder):
+ """AndroidChromeBuilder is used to build "android-chrome".
+
+ This is slightly different from AndroidBuilder.
+ """
+
+ def __init__(self, opts):
+ super(AndroidChromeBuilder, self).__init__(opts)
+
+ # TODO(qyearsley): Make this a class method and verify that it works with
+ # a unit test.
+ # pylint: disable=R0201
+ def _GetTargets(self):
+ """Returns a list of build targets."""
+ return AndroidBuilder._GetTargets(self) + ['chrome_apk']
+
+
+def SetBuildSystemDefault(build_system, use_goma, goma_dir, target_arch='ia32'):
+ """Sets up any environment variables needed to build with the specified build
+ system.
+
+ Args:
+ build_system: A string specifying build system. Currently only 'ninja' or
+ 'make' are supported.
+ use_goma: Determines whether to GOMA for compile.
+ goma_dir: GOMA directory path.
+ target_arch: The target build architecture, ia32 or x64. Default is ia32.
+ """
+ if build_system == 'ninja':
+ gyp_var = os.getenv('GYP_GENERATORS', default='')
+
+ if not gyp_var or not 'ninja' in gyp_var:
+ if gyp_var:
+ os.environ['GYP_GENERATORS'] = gyp_var + ',ninja'
+ else:
+ os.environ['GYP_GENERATORS'] = 'ninja'
+
+ if bisect_utils.IsWindowsHost():
+ os.environ['GYP_DEFINES'] = 'component=shared_library '\
+ 'incremental_chrome_dll=1 disable_nacl=1 fastbuild=1 '\
+ 'chromium_win_pch=0'
+
+ elif build_system == 'make':
+ os.environ['GYP_GENERATORS'] = 'make'
+ else:
+ raise RuntimeError('%s build not supported.' % build_system)
+
+ if use_goma:
+ os.environ['GYP_DEFINES'] = '%s %s' % (os.getenv('GYP_DEFINES', default=''),
+ 'use_goma=1')
+ if goma_dir:
+ os.environ['GYP_DEFINES'] += ' gomadir=%s' % goma_dir
+
+ # Produce 64 bit chromium binaries when target architecure is set to x64.
+ if target_arch == 'x64':
+ os.environ['GYP_DEFINES'] += ' target_arch=%s' % target_arch
+
+def SetupPlatformBuildEnvironment(opts):
+ """Performs any platform-specific setup.
+
+ Args:
+ opts: The options parsed from the command line through parse_args().
+
+ Returns:
+ True if successful.
+ """
+ if 'android' in opts.target_platform:
+ CopyAndSaveOriginalEnvironmentVars()
+ return SetupAndroidBuildEnvironment(opts)
+ return True
+
+
+def BuildWithMake(threads, targets, build_type='Release'):
+ """Runs a make command with the given targets.
+
+ Args:
+ threads: The number of threads to use. None means unspecified/unlimited.
+ targets: List of make targets.
+ build_type: Release or Debug.
+
+ Returns:
+ True if the command had a 0 exit code, False otherwise.
+ """
+ cmd = ['make', 'BUILDTYPE=%s' % build_type]
+ if threads:
+ cmd.append('-j%d' % threads)
+ cmd += targets
+ return_code = bisect_utils.RunProcess(cmd)
+ return not return_code
+
+
+def BuildWithNinja(threads, targets, build_type='Release'):
+ """Runs a ninja command with the given targets."""
+ cmd = ['ninja', '-C', os.path.join('out', build_type)]
+ if threads:
+ cmd.append('-j%d' % threads)
+ cmd += targets
+ return_code = bisect_utils.RunProcess(cmd)
+ return not return_code
+
+
+def BuildWithVisualStudio(targets, build_type='Release'):
+ """Runs a command to build the given targets with Visual Studio."""
+ path_to_devenv = os.path.abspath(
+ os.path.join(os.environ['VS100COMNTOOLS'], '..', 'IDE', 'devenv.com'))
+ path_to_sln = os.path.join(os.getcwd(), 'chrome', 'chrome.sln')
+ cmd = [path_to_devenv, '/build', build_type, path_to_sln]
+ for t in targets:
+ cmd.extend(['/Project', t])
+ return_code = bisect_utils.RunProcess(cmd)
+ return not return_code
+
+
+def CopyAndSaveOriginalEnvironmentVars():
+ """Makes a copy of the current environment variables.
+
+ Before making a copy of the environment variables and setting a global
+ variable, this function unsets a certain set of environment variables.
+ """
+ # TODO: Waiting on crbug.com/255689, will remove this after.
+ vars_to_remove = [
+ 'CHROME_SRC',
+ 'CHROMIUM_GYP_FILE',
+ 'GYP_DEFINES',
+ 'GYP_GENERATORS',
+ 'GYP_GENERATOR_FLAGS',
+ 'OBJCOPY',
+ ]
+ for key in os.environ:
+ if 'ANDROID' in key:
+ vars_to_remove.append(key)
+ for key in vars_to_remove:
+ if os.environ.has_key(key):
+ del os.environ[key]
+
+ global ORIGINAL_ENV
+ ORIGINAL_ENV = os.environ.copy()
+
+
+def SetupAndroidBuildEnvironment(opts, path_to_src=None):
+ """Sets up the android build environment.
+
+ Args:
+ opts: The options parsed from the command line through parse_args().
+ path_to_src: Path to the src checkout.
+
+ Returns:
+ True if successful.
+ """
+ # Revert the environment variables back to default before setting them up
+ # with envsetup.sh.
+ env_vars = os.environ.copy()
+ for k, _ in env_vars.iteritems():
+ del os.environ[k]
+ for k, v in ORIGINAL_ENV.iteritems():
+ os.environ[k] = v
+
+ envsetup_path = os.path.join('build', 'android', 'envsetup.sh')
+ proc = subprocess.Popen(['bash', '-c', 'source %s && env' % envsetup_path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=path_to_src)
+ out, _ = proc.communicate()
+
+ for line in out.splitlines():
+ k, _, v = line.partition('=')
+ os.environ[k] = v
+
+ # envsetup.sh no longer sets OS=android in GYP_DEFINES environment variable.
+ # (See http://crrev.com/170273005). So, we set this variable explicitly here
+ # in order to build Chrome on Android.
+ if 'GYP_DEFINES' not in os.environ:
+ os.environ['GYP_DEFINES'] = 'OS=android'
+ else:
+ os.environ['GYP_DEFINES'] += ' OS=android'
+
+ if opts.use_goma:
+ os.environ['GYP_DEFINES'] += ' use_goma=1'
+ return not proc.returncode
diff --git a/chromium/tools/auto_bisect/configs/android.perf_test.sunspider.cfg b/chromium/tools/auto_bisect/configs/android.perf_test.sunspider.cfg
new file mode 100644
index 00000000000..87238fcdf9b
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/android.perf_test.sunspider.cfg
@@ -0,0 +1,11 @@
+# This config just runs the sunspider command once.
+# http://build.chromium.org/p/tryserver.chromium.perf/builders/linux_perf_bisect/builds/689
+
+config = {
+ 'command': 'tools/perf/run_benchmark -v --browser=android-chromium sunspider',
+ "max_time_minutes": "10",
+ "repeat_count": "1",
+ "truncate_percent": "0"
+}
+
+# Workaround git try issue, see crbug.com/257689
diff --git a/chromium/tools/auto_bisect/configs/linux.bisect.functional.cfg b/chromium/tools/auto_bisect/configs/linux.bisect.functional.cfg
new file mode 100644
index 00000000000..d5c36991c76
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/linux.bisect.functional.cfg
@@ -0,0 +1,13 @@
+# This should reproduce the regression in http://crbug.com/425582.
+# It was based on:
+# http://build.chromium.org/p/tryserver.chromium.perf/builders/linux_perf_bisect/builds/704
+
+config = {
+ 'command': 'out/Release/content_unittests --single-process-tests --gtest_filter=DOMStorageAreaTest',
+ 'good_revision': '311607',
+ 'bad_revision': '311608',
+ 'bisect_mode': 'return_code',
+ 'builder_type': 'full',
+}
+
+# Workaround git try issue, see crbug.com/257689
diff --git a/chromium/tools/auto_bisect/configs/linux.bisect.page_cycler.cfg b/chromium/tools/auto_bisect/configs/linux.bisect.page_cycler.cfg
new file mode 100644
index 00000000000..9783da6a866
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/linux.bisect.page_cycler.cfg
@@ -0,0 +1,17 @@
+# This should reproduce the regression in http://crbug.com/425582.
+# It was based on:
+# http://build.chromium.org/p/tryserver.chromium.perf/builders/linux_perf_bisect/builds/704
+
+config = {
+ 'command': 'tools/perf/run_benchmark -v --browser=release page_cycler.intl_ar_fa_he',
+ 'good_revision': '300138',
+ 'bad_revision': '300149',
+ 'metric': 'warm_times/page_load_time',
+ 'repeat_count': '5',
+ 'max_time_minutes': '5',
+ 'truncate_percent': '25',
+ # Default is "perf".
+ # 'builder_type': 'perf',
+}
+
+# Workaround git try issue, see crbug.com/257689
diff --git a/chromium/tools/auto_bisect/configs/linux.perf_test.tab_switching.cfg b/chromium/tools/auto_bisect/configs/linux.perf_test.tab_switching.cfg
new file mode 100644
index 00000000000..17c6c2c0129
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/linux.perf_test.tab_switching.cfg
@@ -0,0 +1,11 @@
+# This config just runs the tab-switching command once.
+# http://build.chromium.org/p/tryserver.chromium.perf/builders/linux_perf_bisect/builds/689
+
+config = {
+ "command": "./tools/perf/run_benchmark -v tab_switching.typical_25 --browser=release",
+ "max_time_minutes": "30",
+ "repeat_count": "1",
+ "truncate_percent": "0"
+}
+
+# Workaround git try issue, see crbug.com/257689
diff --git a/chromium/tools/auto_bisect/configs/mac.bisect.blink_perf.cfg b/chromium/tools/auto_bisect/configs/mac.bisect.blink_perf.cfg
new file mode 100644
index 00000000000..1594d2e1fda
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/mac.bisect.blink_perf.cfg
@@ -0,0 +1,14 @@
+# Based on http://crbug.com/420120.
+
+config = {
+ 'command': 'tools/perf/run_benchmark -v --browser=release page_cycler.bloat',
+ 'good_revision': '297905',
+ 'bad_revision': '297940',
+ 'metric': 'warm_times/page_load_time',
+ 'repeat_count': '5',
+ 'max_time_minutes': '5',
+ 'truncate_percent': '20',
+ 'builder_type': 'perf',
+}
+
+# Workaround git try issue, see crbug.com/257689
diff --git a/chromium/tools/auto_bisect/configs/mac.bisect.tab_switching.cfg b/chromium/tools/auto_bisect/configs/mac.bisect.tab_switching.cfg
new file mode 100644
index 00000000000..40c381ba40c
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/mac.bisect.tab_switching.cfg
@@ -0,0 +1,14 @@
+# This config is based on http://crbug.com/435291.
+
+config = {
+ 'command': 'tools/perf/run_benchmark -v --browser=release tab_switching.five_blank_pages',
+ 'good_revision': '304855',
+ 'bad_revision': '304881',
+ 'metric': 'idle_wakeups_total/idle_wakeups_total',
+ 'repeat_count': '5',
+ 'max_time_minutes': '10',
+ 'truncate_percent': '25',
+ 'builder_type': 'perf',
+}
+
+# Workaround git try issue, see crbug.com/257689
diff --git a/chromium/tools/auto_bisect/configs/try.py b/chromium/tools/auto_bisect/configs/try.py
new file mode 100755
index 00000000000..360bdfeb3a6
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/try.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Starts bisect try jobs on multiple platforms using known-good configs.
+
+The purpose of this script is to serve as an integration test for the
+auto-bisect project by starting try jobs for various config types and
+various platforms.
+
+The known-good configs are in this same directory as this script. They
+are expected to all end in ".cfg" and start with the name of the platform
+followed by a dot.
+
+You can specify --full to try running each config on all applicable bots;
+the default behavior is to try each config on only one bot.
+"""
+
+import argparse
+import logging
+import os
+import subprocess
+import sys
+
+SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
+BISECT_CONFIG = os.path.join(SCRIPT_DIR, os.path.pardir, 'bisect.cfg')
+PERF_TEST_CONFIG = os.path.join(
+ SCRIPT_DIR, os.path.pardir, os.path.pardir, 'run-perf-test.cfg')
+PLATFORM_BOT_MAP = {
+ 'linux': ['linux_perf_bisect'],
+ 'mac': ['mac_10_9_perf_bisect', 'mac_10_10_perf_bisect'],
+ 'win': ['win_perf_bisect', 'win_8_perf_bisect', 'win_xp_perf_bisect'],
+ 'winx64': ['win_x64_perf_bisect'],
+ 'android': [
+ 'android_nexus4_perf_bisect',
+ 'android_nexus5_perf_bisect',
+ 'android_nexus7_perf_bisect',
+ ],
+}
+SVN_URL = 'svn://svn.chromium.org/chrome-try/try-perf'
+AUTO_COMMIT_MESSAGE = 'Automatic commit for bisect try job.'
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--full', action='store_true',
+ help='Run each config on all applicable bots.')
+ parser.add_argument('configs', nargs='+',
+ help='One or more sample config files.')
+ parser.add_argument('--verbose', '-v', action='store_true',
+ help='Output additional debugging information.')
+ parser.add_argument('--dry-run', action='store_true',
+ help='Don\'t execute "git try" while running.')
+ args = parser.parse_args(argv[1:])
+ _SetupLogging(args.verbose)
+ logging.debug('Source configs: %s', args.configs)
+ try:
+ _StartTryJobs(args.configs, args.full, args.dry_run)
+ except subprocess.CalledProcessError as error:
+ print str(error)
+ print error.output
+
+
+def _SetupLogging(verbose):
+ level = logging.INFO
+ if verbose:
+ level = logging.DEBUG
+ logging.basicConfig(level=level)
+
+
+def _StartTryJobs(source_configs, full_mode=False, dry_run=False):
+ """Tries each of the given sample configs on one or more try bots."""
+ for source_config in source_configs:
+ dest_config = _DestConfig(source_config)
+ bot_names = _BotNames(source_config, full_mode=full_mode)
+ _StartTry(source_config, dest_config, bot_names, dry_run=dry_run)
+
+
+def _DestConfig(source_config):
+ """Returns the path that a sample config should be copied to."""
+ if 'bisect' in source_config:
+ return BISECT_CONFIG
+ assert 'perf_test' in source_config, source_config
+ return PERF_TEST_CONFIG
+
+
+def _BotNames(source_config, full_mode=False):
+ """Returns try bot names to use for the given config file name."""
+ platform = os.path.basename(source_config).split('.')[0]
+ assert platform in PLATFORM_BOT_MAP
+ bot_names = PLATFORM_BOT_MAP[platform]
+ if full_mode:
+ return bot_names
+ return [bot_names[0]]
+
+
+def _StartTry(source_config, dest_config, bot_names, dry_run=False):
+ """Sends a try job with the given config to the given try bots.
+
+ Args:
+ source_config: Path of the sample config to copy over.
+ dest_config: Destination path to copy sample to, e.g. "./bisect.cfg".
+ bot_names: List of try bot builder names.
+ """
+ assert os.path.exists(source_config)
+ assert os.path.exists(dest_config)
+ assert _LastCommitMessage() != AUTO_COMMIT_MESSAGE
+
+ # Copy the sample config over and commit it.
+ _Run(['cp', source_config, dest_config])
+ _Run(['git', 'commit', '--all', '-m', AUTO_COMMIT_MESSAGE])
+
+ try:
+ # Start the try job.
+ job_name = 'Automatically-started (%s)' % os.path.basename(source_config)
+ try_command = ['git', 'try', '--svn_repo', SVN_URL, '--name', job_name]
+ for bot_name in bot_names:
+ try_command.extend(['--bot', bot_name])
+ print _Run(try_command, dry_run=dry_run)
+ finally:
+ # Revert the immediately-previous commit which was made just above.
+ assert _LastCommitMessage() == AUTO_COMMIT_MESSAGE
+ _Run(['git', 'reset', '--hard', 'HEAD~1'])
+
+
+def _LastCommitMessage():
+ return _Run(['git', 'log', '--format=%s', '-1']).strip()
+
+
+def _Run(command, dry_run=False):
+ """Runs a command in a subprocess.
+
+ Args:
+ command: The command given as an args list.
+
+ Returns:
+ The output of the command.
+
+ Raises:
+ subprocess.CalledProcessError: The return-code was non-zero.
+ """
+ logging.debug('Running %s', command)
+ if dry_run:
+ return 'Did not run command because this is a dry run.'
+ return subprocess.check_output(command)
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/chromium/tools/auto_bisect/configs/win.bisect.dromaeo.cfg b/chromium/tools/auto_bisect/configs/win.bisect.dromaeo.cfg
new file mode 100644
index 00000000000..8cab7125667
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/win.bisect.dromaeo.cfg
@@ -0,0 +1,14 @@
+# Config based on http://crbug.com/444762.
+
+config = {
+ 'command': 'python tools/perf/run_benchmark -v --browser=release dromaeo.domcorequery',
+ 'good_revision': '309431',
+ 'bad_revision': '309442',
+ 'metric': 'dom/dom',
+ 'repeat_count': '5',
+ 'max_time_minutes': '5',
+ 'truncate_percent': '20',
+ 'builder_type': 'perf',
+}
+
+ # Workaround git try issue, see crbug.com/257689
diff --git a/chromium/tools/auto_bisect/configs/win.perf_test.kraken.cfg b/chromium/tools/auto_bisect/configs/win.perf_test.kraken.cfg
new file mode 100644
index 00000000000..b17244b9ba9
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/win.perf_test.kraken.cfg
@@ -0,0 +1,11 @@
+# This config just runs the kraken test once.
+
+config = {
+ "command": "python tools/perf/run_benchmark -v --browser=release kraken",
+ "max_time_minutes": "10",
+ "repeat_count": "1",
+ "truncate_percent": "0"
+}
+
+# Workaround git try issue, see crbug.com/257689
+
diff --git a/chromium/tools/auto_bisect/configs/winx64.bisect.dromaeo.cfg b/chromium/tools/auto_bisect/configs/winx64.bisect.dromaeo.cfg
new file mode 100644
index 00000000000..24f95920de0
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/winx64.bisect.dromaeo.cfg
@@ -0,0 +1,15 @@
+# Config based on http://crbug.com/444762.
+
+config = {
+ 'command': 'python tools/perf/run_benchmark -v --browser=release dromaeo.domcorequery',
+ 'good_revision': '309431',
+ 'bad_revision': '309442',
+ 'metric': 'dom/dom',
+ 'repeat_count': '5',
+ 'max_time_minutes': '5',
+ 'truncate_percent': '20',
+ 'builder_type': 'perf',
+ 'target_arch': 'x64',
+}
+
+ # Workaround git try issue, see crbug.com/257689
diff --git a/chromium/tools/auto_bisect/configs/winx64.perf_test.kraken.cfg b/chromium/tools/auto_bisect/configs/winx64.perf_test.kraken.cfg
new file mode 100644
index 00000000000..17196f7ea19
--- /dev/null
+++ b/chromium/tools/auto_bisect/configs/winx64.perf_test.kraken.cfg
@@ -0,0 +1,12 @@
+# This config just runs the kraken test once.
+
+config = {
+ "command": "python tools/perf/run_benchmark -v --browser=release kraken",
+ "max_time_minutes": "10",
+ "repeat_count": "1",
+ "target_arch": "x64",
+ "truncate_percent": "0"
+}
+
+# Workaround git try issue, see crbug.com/257689
+
diff --git a/chromium/tools/auto_bisect/fetch_build.py b/chromium/tools/auto_bisect/fetch_build.py
new file mode 100644
index 00000000000..27cd66866b8
--- /dev/null
+++ b/chromium/tools/auto_bisect/fetch_build.py
@@ -0,0 +1,514 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module contains functions for fetching and extracting archived builds.
+
+The builds may be stored in different places by different types of builders;
+for example, builders on tryserver.chromium.perf stores builds in one place,
+while builders on chromium.linux store builds in another.
+
+This module can be either imported or run as a stand-alone script to download
+and extract a build.
+
+Usage: fetch_build.py <type> <revision> <output_dir> [options]
+"""
+
+import argparse
+import errno
+import logging
+import os
+import shutil
+import sys
+import zipfile
+
+_CATAPULT_BASE_PATH = os.path.abspath(os.path.join(
+ os.path.dirname(__file__), '..', '..', 'third_party', 'catapult',
+ 'catapult_base'))
+if _CATAPULT_BASE_PATH not in sys.path:
+ sys.path.insert(1, _CATAPULT_BASE_PATH)
+from catapult_base import cloud_storage
+
+import bisect_utils
+
+# Possible builder types.
+PERF_BUILDER = 'perf'
+FULL_BUILDER = 'full'
+ANDROID_CHROME_PERF_BUILDER = 'android-chrome-perf'
+
+# Maximum time in seconds to wait after posting build request to the try server.
+MAX_MAC_BUILD_TIME = 14400
+MAX_WIN_BUILD_TIME = 14400
+MAX_LINUX_BUILD_TIME = 14400
+
+# Try server status page URLs, used to get build status.
+PERF_TRY_SERVER_URL = 'http://build.chromium.org/p/tryserver.chromium.perf'
+LINUX_TRY_SERVER_URL = 'http://build.chromium.org/p/tryserver.chromium.linux'
+
+
+def GetBucketAndRemotePath(revision, builder_type=PERF_BUILDER,
+ target_arch='ia32', target_platform='chromium',
+ deps_patch_sha=None, extra_src=None):
+ """Returns the location where a build archive is expected to be.
+
+ Args:
+ revision: Revision string, e.g. a git commit hash or SVN revision.
+ builder_type: Type of build archive.
+ target_arch: Architecture, e.g. "ia32".
+ target_platform: Platform name, e.g. "chromium" or "android".
+ deps_patch_sha: SHA1 hash which identifies a particular combination of
+ custom revisions for dependency repositories.
+ extra_src: Path to a script which can be used to modify the bisect script's
+ behavior.
+
+ Returns:
+ A pair of strings (bucket, path), where the archive is expected to be.
+ """
+ logging.info('Getting GS URL for archive of builder "%s", "%s", "%s".',
+ builder_type, target_arch, target_platform)
+ build_archive = BuildArchive.Create(
+ builder_type, target_arch=target_arch, target_platform=target_platform,
+ extra_src=extra_src)
+ bucket = build_archive.BucketName()
+ remote_path = build_archive.FilePath(revision, deps_patch_sha=deps_patch_sha)
+ return bucket, remote_path
+
+
+def GetBuilderNameAndBuildTime(builder_type=PERF_BUILDER, target_arch='ia32',
+ target_platform='chromium', extra_src=None):
+ """Gets builder bot name and build time in seconds based on platform."""
+ logging.info('Getting builder name for builder "%s", "%s", "%s".',
+ builder_type, target_arch, target_platform)
+ build_archive = BuildArchive.Create(
+ builder_type, target_arch=target_arch, target_platform=target_platform,
+ extra_src=extra_src)
+ return build_archive.GetBuilderName(), build_archive.GetBuilderBuildTime()
+
+
+def GetBuildBotUrl(builder_type=PERF_BUILDER, target_arch='ia32',
+ target_platform='chromium', extra_src=None):
+ """Gets buildbot URL for a given builder type."""
+ logging.info('Getting buildbot URL for "%s", "%s", "%s".',
+ builder_type, target_arch, target_platform)
+ build_archive = BuildArchive.Create(
+ builder_type, target_arch=target_arch, target_platform=target_platform,
+ extra_src=extra_src)
+ return build_archive.GetBuildBotUrl()
+
+
+class BuildArchive(object):
+ """Represents a place where builds of some type are stored.
+
+ There are two pieces of information required to locate a file in Google
+ Cloud Storage, bucket name and file path. Subclasses of this class contain
+ specific logic about which bucket names and paths should be used to fetch
+ a build.
+ """
+
+ @staticmethod
+ def Create(builder_type, target_arch='ia32', target_platform='chromium',
+ extra_src=None):
+ if builder_type == PERF_BUILDER:
+ return PerfBuildArchive(target_arch, target_platform)
+ if builder_type == FULL_BUILDER:
+ return FullBuildArchive(target_arch, target_platform)
+ if builder_type == ANDROID_CHROME_PERF_BUILDER:
+ try:
+ # Load and initialize a module in extra source file and
+ # return its module object to access android-chrome specific data.
+ loaded_extra_src = bisect_utils.LoadExtraSrc(extra_src)
+ return AndroidChromeBuildArchive(
+ target_arch, target_platform, loaded_extra_src)
+ except (IOError, TypeError, ImportError):
+ raise RuntimeError('Invalid or missing --extra_src. [%s]' % extra_src)
+ raise NotImplementedError('Builder type "%s" not supported.' % builder_type)
+
+ def __init__(self, target_arch='ia32', target_platform='chromium',
+ extra_src=None):
+ self._extra_src = extra_src
+ if bisect_utils.IsLinuxHost() and target_platform == 'android':
+ if target_arch == 'arm64':
+ self._platform = 'android_arm64'
+ else:
+ self._platform = 'android'
+ elif bisect_utils.IsLinuxHost() and target_platform == 'android-chrome':
+ self._platform = 'android-chrome'
+ elif bisect_utils.IsLinuxHost():
+ self._platform = 'linux'
+ elif bisect_utils.IsMacHost():
+ self._platform = 'mac'
+ elif bisect_utils.Is64BitWindows() and target_arch == 'x64':
+ self._platform = 'win64'
+ elif bisect_utils.IsWindowsHost():
+ self._platform = 'win'
+ else:
+ raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+ def BucketName(self):
+ raise NotImplementedError()
+
+ def FilePath(self, revision, deps_patch_sha=None):
+ """Returns the remote file path to download a build from.
+
+ Args:
+ revision: A Chromium revision; this could be a git commit hash or
+ commit position or SVN revision number.
+ deps_patch_sha: The SHA1 hash of a patch to the DEPS file, which
+ uniquely identifies a change to use a particular revision of
+ a dependency.
+
+ Returns:
+ A file path, which not does not include a bucket name.
+ """
+ raise NotImplementedError()
+
+ def _ZipFileName(self, revision, deps_patch_sha=None):
+ """Gets the file name of a zip archive for a particular revision.
+
+ This returns a file name of the form full-build-<platform>_<revision>.zip,
+ which is a format used by multiple types of builders that store archives.
+
+ Args:
+ revision: A git commit hash or other revision string.
+ deps_patch_sha: SHA1 hash of a DEPS file patch.
+
+ Returns:
+ The archive file name.
+ """
+ base_name = 'full-build-%s' % self._PlatformName()
+ if deps_patch_sha:
+ revision = '%s_%s' % (revision, deps_patch_sha)
+ return '%s_%s.zip' % (base_name, revision)
+
+ def _PlatformName(self):
+ """Return a string to be used in paths for the platform."""
+ if self._platform in ('win', 'win64'):
+ # Build archive for win64 is still stored with "win32" in the name.
+ return 'win32'
+ if self._platform in ('linux', 'android', 'android_arm64'):
+ # Android builds are also stored with "linux" in the name.
+ return 'linux'
+ if self._platform == 'mac':
+ return 'mac'
+ raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+ def GetBuilderName(self):
+ raise NotImplementedError()
+
+ def GetBuilderBuildTime(self):
+ """Returns the time to wait for a build after requesting one."""
+ if self._platform in ('win', 'win64'):
+ return MAX_WIN_BUILD_TIME
+ if self._platform in ('linux', 'android',
+ 'android_arm64', 'android-chrome'):
+ return MAX_LINUX_BUILD_TIME
+ if self._platform == 'mac':
+ return MAX_MAC_BUILD_TIME
+ raise NotImplementedError('Unsupported Platform "%s".' % sys.platform)
+
+ def GetBuildBotUrl(self):
+ raise NotImplementedError()
+
+
+class PerfBuildArchive(BuildArchive):
+
+ def BucketName(self):
+ return 'chrome-perf'
+
+ def FilePath(self, revision, deps_patch_sha=None):
+ return '%s/%s' % (self._ArchiveDirectory(),
+ self._ZipFileName(revision, deps_patch_sha))
+
+ def _ArchiveDirectory(self):
+ """Returns the directory name to download builds from."""
+ platform_to_directory = {
+ 'android': 'android_perf_rel',
+ 'android_arm64': 'android_perf_rel_arm64',
+ 'linux': 'Linux Builder',
+ 'mac': 'Mac Builder',
+ 'win64': 'Win x64 Builder',
+ 'win': 'Win Builder',
+ }
+ assert self._platform in platform_to_directory
+ return platform_to_directory.get(self._platform)
+
+ def GetBuilderName(self):
+ """Gets builder bot name based on platform."""
+ if self._platform == 'win64':
+ return 'winx64_bisect_builder'
+ elif self._platform == 'win':
+ return 'win_perf_bisect_builder'
+ elif self._platform == 'linux':
+ return 'linux_perf_bisect_builder'
+ elif self._platform == 'android':
+ return 'android_perf_bisect_builder'
+ elif self._platform == 'android_arm64':
+ return 'android_arm64_perf_bisect_builder'
+ elif self._platform == 'mac':
+ return 'mac_perf_bisect_builder'
+ raise NotImplementedError('Unsupported platform "%s".' % sys.platform)
+
+ def GetBuildBotUrl(self):
+ """Returns buildbot URL for fetching build info."""
+ return PERF_TRY_SERVER_URL
+
+
+class FullBuildArchive(BuildArchive):
+
+ def BucketName(self):
+ platform_to_bucket = {
+ 'android': 'chromium-android',
+ 'linux': 'chromium-linux-archive',
+ 'mac': 'chromium-mac-archive',
+ 'win64': 'chromium-win-archive',
+ 'win': 'chromium-win-archive',
+ }
+ assert self._platform in platform_to_bucket
+ return platform_to_bucket.get(self._platform)
+
+ def FilePath(self, revision, deps_patch_sha=None):
+ return '%s/%s' % (self._ArchiveDirectory(),
+ self._ZipFileName(revision, deps_patch_sha))
+
+ def _ArchiveDirectory(self):
+ """Returns the remote directory to download builds from."""
+ platform_to_directory = {
+ 'android': 'android_main_rel',
+ 'linux': 'chromium.linux/Linux Builder',
+ 'mac': 'chromium.mac/Mac Builder',
+ 'win64': 'chromium.win/Win x64 Builder',
+ 'win': 'chromium.win/Win Builder',
+ }
+ assert self._platform in platform_to_directory
+ return platform_to_directory.get(self._platform)
+
+ def GetBuilderName(self):
+ """Gets builder bot name based on platform."""
+ if self._platform == 'linux':
+ return 'linux_full_bisect_builder'
+ raise NotImplementedError('Unsupported platform "%s".' % sys.platform)
+
+ def GetBuildBotUrl(self):
+ """Returns buildbot URL for fetching build info."""
+ return LINUX_TRY_SERVER_URL
+
+
+class AndroidChromeBuildArchive(BuildArchive):
+ """Represents a place where builds of android-chrome type are stored.
+
+ If AndroidChromeBuildArchive is used, it is assumed that the --extra_src
+ is a valid Python module which contains the module-level functions
+ GetBucketName and GetArchiveDirectory.
+ """
+
+ def BucketName(self):
+ return self._extra_src.GetBucketName()
+
+ def _ZipFileName(self, revision, deps_patch_sha=None):
+ """Gets the file name of a zip archive on android-chrome.
+
+ This returns a file name of the form build_product_<revision>.zip,
+ which is a format used by android-chrome.
+
+ Args:
+ revision: A git commit hash or other revision string.
+ deps_patch_sha: SHA1 hash of a DEPS file patch.
+
+ Returns:
+ The archive file name.
+ """
+ if deps_patch_sha:
+ revision = '%s_%s' % (revision, deps_patch_sha)
+ return 'build_product_%s.zip' % revision
+
+ def FilePath(self, revision, deps_patch_sha=None):
+ return '%s/%s' % (self._ArchiveDirectory(),
+ self._ZipFileName(revision, deps_patch_sha))
+
+ def _ArchiveDirectory(self):
+ """Returns the directory name to download builds from."""
+ return self._extra_src.GetArchiveDirectory()
+
+ def GetBuilderName(self):
+ """Returns the builder name extra source."""
+ return self._extra_src.GetBuilderName()
+
+ def GetBuildBotUrl(self):
+ """Returns buildbot URL for fetching build info."""
+ return self._extra_src.GetBuildBotUrl()
+
+
+def BuildIsAvailable(bucket_name, remote_path):
+ """Checks whether a build is currently archived at some place."""
+ logging.info('Checking existence: gs://%s/%s' % (bucket_name, remote_path))
+ try:
+ exists = cloud_storage.Exists(bucket_name, remote_path)
+ logging.info('Exists? %s' % exists)
+ return exists
+ except cloud_storage.CloudStorageError:
+ return False
+
+
+def FetchFromCloudStorage(bucket_name, source_path, destination_dir):
+ """Fetches file(s) from the Google Cloud Storage.
+
+ As a side-effect, this prints messages to stdout about what's happening.
+
+ Args:
+ bucket_name: Google Storage bucket name.
+ source_path: Source file path.
+ destination_dir: Destination file path.
+
+ Returns:
+ Local file path of downloaded file if it was downloaded. If the file does
+ not exist in the given bucket, or if there was an error while downloading,
+ None is returned.
+ """
+ target_file = os.path.join(destination_dir, os.path.basename(source_path))
+ gs_url = 'gs://%s/%s' % (bucket_name, source_path)
+ try:
+ if cloud_storage.Exists(bucket_name, source_path):
+ logging.info('Fetching file from %s...', gs_url)
+ cloud_storage.Get(bucket_name, source_path, target_file)
+ if os.path.exists(target_file):
+ return target_file
+ else:
+ logging.info('File %s not found in cloud storage.', gs_url)
+ return None
+ except Exception as e:
+ logging.warn('Exception while fetching from cloud storage: %s', e)
+ if os.path.exists(target_file):
+ os.remove(target_file)
+ return None
+
+
+def Unzip(file_path, output_dir, verbose=True):
+ """Extracts a zip archive's contents into the given output directory.
+
+ This was based on ExtractZip from build/scripts/common/chromium_utils.py.
+
+ Args:
+ file_path: Path of the zip file to extract.
+ output_dir: Path to the destination directory.
+ verbose: Whether to print out what is being extracted.
+
+ Raises:
+ IOError: The unzip command had a non-zero exit code.
+ RuntimeError: Failed to create the output directory.
+ """
+ _MakeDirectory(output_dir)
+
+ # On Linux and Mac, we use the unzip command because it handles links and
+ # file permissions bits, so achieving this behavior is easier than with
+ # ZipInfo options.
+ #
+ # The Mac Version of unzip unfortunately does not support Zip64, whereas
+ # the python module does, so we have to fall back to the python zip module
+ # on Mac if the file size is greater than 4GB.
+ mac_zip_size_limit = 2 ** 32 # 4GB
+ if (bisect_utils.IsLinuxHost() or
+ (bisect_utils.IsMacHost()
+ and os.path.getsize(file_path) < mac_zip_size_limit)):
+ unzip_command = ['unzip', '-o']
+ _UnzipUsingCommand(unzip_command, file_path, output_dir)
+ return
+
+ # On Windows, try to use 7z if it is installed, otherwise fall back to the
+ # Python zipfile module. If 7z is not installed, then this may fail if the
+ # zip file is larger than 512MB.
+ sevenzip_path = r'C:\Program Files\7-Zip\7z.exe'
+ if bisect_utils.IsWindowsHost() and os.path.exists(sevenzip_path):
+ unzip_command = [sevenzip_path, 'x', '-y']
+ _UnzipUsingCommand(unzip_command, file_path, output_dir)
+ return
+
+ _UnzipUsingZipFile(file_path, output_dir, verbose)
+
+
+def _UnzipUsingCommand(unzip_command, file_path, output_dir):
+ """Extracts a zip file using an external command.
+
+ Args:
+ unzip_command: An unzipping command, as a string list, without the filename.
+ file_path: Path to the zip file.
+ output_dir: The directory which the contents should be extracted to.
+
+ Raises:
+ IOError: The command had a non-zero exit code.
+ """
+ absolute_filepath = os.path.abspath(file_path)
+ command = unzip_command + [absolute_filepath]
+ return_code = _RunCommandInDirectory(output_dir, command)
+ if return_code:
+ _RemoveDirectoryTree(output_dir)
+ raise IOError('Unzip failed: %s => %s' % (str(command), return_code))
+
+
+def _RunCommandInDirectory(directory, command):
+ """Changes to a directory, runs a command, then changes back."""
+ saved_dir = os.getcwd()
+ os.chdir(directory)
+ return_code = bisect_utils.RunProcess(command)
+ os.chdir(saved_dir)
+ return return_code
+
+
+def _UnzipUsingZipFile(file_path, output_dir, verbose=True):
+ """Extracts a zip file using the Python zipfile module."""
+ assert bisect_utils.IsWindowsHost() or bisect_utils.IsMacHost()
+ zf = zipfile.ZipFile(file_path)
+ for name in zf.namelist():
+ if verbose:
+ print 'Extracting %s' % name
+ zf.extract(name, output_dir)
+ if bisect_utils.IsMacHost():
+ # Restore file permission bits.
+ mode = zf.getinfo(name).external_attr >> 16
+ os.chmod(os.path.join(output_dir, name), mode)
+
+
+def _MakeDirectory(path):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+
+def _RemoveDirectoryTree(path):
+ try:
+ if os.path.exists(path):
+ shutil.rmtree(path)
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+
+
+def Main(argv):
+ """Downloads and extracts a build based on the command line arguments."""
+ parser = argparse.ArgumentParser()
+ parser.add_argument('builder_type')
+ parser.add_argument('revision')
+ parser.add_argument('output_dir')
+ parser.add_argument('--target-arch', default='ia32')
+ parser.add_argument('--target-platform', default='chromium')
+ parser.add_argument('--deps-patch-sha')
+ args = parser.parse_args(argv[1:])
+
+ bucket_name, remote_path = GetBucketAndRemotePath(
+ args.revision, args.builder_type, target_arch=args.target_arch,
+ target_platform=args.target_platform,
+ deps_patch_sha=args.deps_patch_sha)
+ print 'Bucket name: %s, remote path: %s' % (bucket_name, remote_path)
+
+ if not BuildIsAvailable(bucket_name, remote_path):
+ print 'Build is not available.'
+ return 1
+
+ FetchFromCloudStorage(bucket_name, remote_path, args.output_dir)
+ print 'Build has been downloaded to and extracted in %s.' % args.output_dir
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv))
diff --git a/chromium/tools/auto_bisect/fetch_build_test.py b/chromium/tools/auto_bisect/fetch_build_test.py
new file mode 100644
index 00000000000..f559b1ebea3
--- /dev/null
+++ b/chromium/tools/auto_bisect/fetch_build_test.py
@@ -0,0 +1,261 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the fetch_builds module."""
+
+import errno
+import unittest
+
+# The third-party mock module is expected to be available in PYTHONPATH.
+import mock
+
+import fetch_build
+
+
+# The tests below test private functions (W0212).
+# Some methods don't reference self because they use the mock module (R0201).
+# pylint: disable=R0201,W0212
+class FetchBuildTest(unittest.TestCase):
+
+ def setUp(self):
+ # Mocks of the os and bisect_utils modules are used in the methods below.
+ cloud_storage_patcher = mock.patch('fetch_build.cloud_storage')
+ self.mock_cloud_storage = cloud_storage_patcher.start()
+ self.addCleanup(cloud_storage_patcher.stop)
+
+ @mock.patch('fetch_build.os.path.exists')
+ def test_FetchFromCloudStorage_FileFound(self, mock_os_path_exists):
+ self.mock_cloud_storage.Exists.return_value = True
+ mock_os_path_exists.return_value = True
+ local_path = fetch_build.FetchFromCloudStorage(
+ 'my_bucket', 'remote/foo.zip', 'local')
+ self.assertEqual('local/foo.zip', local_path)
+ self.mock_cloud_storage.Get.assert_called_with(
+ 'my_bucket', 'remote/foo.zip', 'local/foo.zip')
+
+ def test_FetchFromCloudStorage_FileNotFound(self):
+ self.mock_cloud_storage.Exists.return_value = False
+ local_path = fetch_build.FetchFromCloudStorage(
+ 'my_bucket', 'remote/foo.zip', 'local')
+ self.assertIsNone(local_path)
+ self.assertFalse(self.mock_cloud_storage.Get.called)
+
+
+class BuildArchiveTest(unittest.TestCase):
+
+ def test_CreatePerfBuildArchive(self):
+ archive = fetch_build.BuildArchive.Create(fetch_build.PERF_BUILDER)
+ self.assertEqual('chrome-perf', archive.BucketName())
+ self.assertTrue(isinstance(archive, fetch_build.PerfBuildArchive))
+
+ def test_CreateFullBuildArchive(self):
+ archive = fetch_build.BuildArchive.Create(fetch_build.FULL_BUILDER)
+ archive._platform = 'linux'
+ self.assertEqual('chromium-linux-archive', archive.BucketName())
+ self.assertTrue(isinstance(archive, fetch_build.FullBuildArchive))
+
+ def test_BuildArchive_NonExistentType(self):
+ self.assertRaises(
+ NotImplementedError, fetch_build.BuildArchive.Create, 'other')
+
+ def test_FullBuildArchive_Linux(self):
+ archive = fetch_build.FullBuildArchive()
+ archive._platform = 'linux'
+ self.assertEqual('chromium-linux-archive', archive.BucketName())
+ self.assertEqual(
+ 'chromium.linux/Linux Builder/full-build-linux_1234567890abcdef.zip',
+ archive.FilePath('1234567890abcdef'))
+
+ def test_FullBuildArchive_Android(self):
+ archive = fetch_build.FullBuildArchive()
+ archive._platform = 'android'
+ self.assertEqual('chromium-android', archive.BucketName())
+ self.assertEqual(
+ 'android_main_rel/full-build-linux_1234567890abcdef.zip',
+ archive.FilePath('1234567890abcdef'))
+
+ def test_FullBuildArchive_Linux_BuilderName(self):
+ archive = fetch_build.FullBuildArchive()
+ archive._platform = 'linux'
+ self.assertEqual('linux_full_bisect_builder', archive.GetBuilderName())
+
+ def test_FullBuildArchive_Windows_BuildTime(self):
+ archive = fetch_build.FullBuildArchive()
+ archive._platform = 'win'
+ self.assertEqual(14400, archive.GetBuilderBuildTime())
+
+ def test_PerfBuildArchive_Linux(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'linux'
+ self.assertEqual('chrome-perf', archive.BucketName())
+ self.assertEqual(
+ 'Linux Builder/full-build-linux_1234567890abcdef.zip',
+ archive.FilePath('1234567890abcdef'))
+
+ def test_PerfBuildArchive_Android(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'android'
+ self.assertEqual('chrome-perf', archive.BucketName())
+ self.assertEqual(
+ 'android_perf_rel/full-build-linux_123456.zip',
+ archive.FilePath('123456'))
+
+ def test_PerfBuildArchive_AndroidArm64(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'android_arm64'
+ self.assertEqual('chrome-perf', archive.BucketName())
+ self.assertEqual(
+ 'android_perf_rel_arm64/full-build-linux_123456.zip',
+ archive.FilePath('123456'))
+
+ def test_PerfBuildArchive_64BitWindows(self):
+ archive = fetch_build.PerfBuildArchive(target_arch='x64')
+ archive._platform = 'win64'
+ self.assertEqual('chrome-perf', archive.BucketName())
+ self.assertEqual(
+ 'Win x64 Builder/full-build-win32_123456.zip',
+ archive.FilePath('123456'))
+
+ def test_PerfBuildArchive_WithDepsPatchSha(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'linux'
+ self.assertEqual(
+ 'Linux Builder/full-build-linux_123456'
+ '_aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.zip',
+ archive.FilePath(123456, 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'))
+
+ def test_PerfBuildArchive_64BitWindows_BuilderName(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'win64'
+ self.assertEqual('winx64_bisect_builder', archive.GetBuilderName())
+
+ def test_PerfBuildArchive_64BitWindows_BuildTime(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'win64'
+ self.assertEqual(14400, archive.GetBuilderBuildTime())
+
+ def test_PerfBuildArchive_Windows_BuilderName(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'win'
+ self.assertEqual('win_perf_bisect_builder', archive.GetBuilderName())
+
+ def test_PerfBuildArchive_Windows_BuildTime(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'win'
+ self.assertEqual(14400, archive.GetBuilderBuildTime())
+
+ def test_PerfBuildArchive_Linux_BuilderName(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'linux'
+ self.assertEqual('linux_perf_bisect_builder', archive.GetBuilderName())
+
+ def test_PerfBuildArchive_Linux_BuildTime(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'linux'
+ self.assertEqual(14400, archive.GetBuilderBuildTime())
+
+ def test_PerfBuildArchive_Android_BuilderName(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'android'
+ self.assertEqual('android_perf_bisect_builder', archive.GetBuilderName())
+
+ def test_PerfBuildArchive_Android_BuildTime(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'android'
+ self.assertEqual(14400, archive.GetBuilderBuildTime())
+
+ def test_PerfBuildArchive_Mac_BuilderName(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'mac'
+ self.assertEqual('mac_perf_bisect_builder', archive.GetBuilderName())
+
+ def test_PerfBuildArchive_mac_BuildTime(self):
+ archive = fetch_build.PerfBuildArchive()
+ archive._platform = 'mac'
+ self.assertEqual(14400, archive.GetBuilderBuildTime())
+
+ def test_GetBuildBotUrl_Perf(self):
+ self.assertEqual(
+ fetch_build.PERF_TRY_SERVER_URL,
+ fetch_build.GetBuildBotUrl(fetch_build.PERF_BUILDER))
+
+ def test_GetBuildBotUrl_full(self):
+ self.assertEqual(
+ fetch_build.LINUX_TRY_SERVER_URL,
+ fetch_build.GetBuildBotUrl(fetch_build.FULL_BUILDER))
+
+
+class UnzipTest(unittest.TestCase):
+
+ def setUp(self):
+ # Mocks of the os and bisect_utils modules are used in the methods below.
+ os_patcher = mock.patch('fetch_build.os')
+ self.mock_os = os_patcher.start()
+ self.addCleanup(os_patcher.stop)
+
+ bisect_utils_patcher = mock.patch('fetch_build.bisect_utils')
+ self.mock_bisect_utils = bisect_utils_patcher.start()
+ self.addCleanup(bisect_utils_patcher.stop)
+
+ @mock.patch('fetch_build._MakeDirectory')
+ @mock.patch('fetch_build._UnzipUsingCommand')
+ def test_Unzip_Linux(self, mock_UnzipUsingCommand, mock_MakeDirectory):
+ self.mock_bisect_utils.IsLinuxHost.return_value = True
+ self.mock_bisect_utils.IsMacHost.return_value = False
+ self.mock_bisect_utils.IsWindowsHost.return_value = False
+ fetch_build.Unzip('x.zip', 'out_dir', verbose=False)
+ mock_MakeDirectory.assert_called_with('out_dir')
+ mock_UnzipUsingCommand.assert_called_with(
+ ['unzip', '-o'], 'x.zip', 'out_dir')
+
+ @mock.patch('fetch_build._MakeDirectory')
+ @mock.patch('fetch_build._UnzipUsingZipFile')
+ def test_Unzip_Mac_LargeFile(
+ self, mock_UnzipUsingZipFile, mock_MakeDirectory):
+ # The zipfile module is used to unzip on mac when the file is > 4GB.
+ self.mock_bisect_utils.IsLinuxHost.return_value = False
+ self.mock_bisect_utils.IsMacHost.return_value = True
+ self.mock_bisect_utils.IsWindowsHost.return_value = False
+ self.mock_os.path.getsize.return_value = 2 ** 33 # 8GB
+ fetch_build.Unzip('x.zip', 'out_dir', verbose=False)
+ mock_MakeDirectory.assert_called_with('out_dir')
+ mock_UnzipUsingZipFile.assert_called_with('x.zip', 'out_dir', False)
+
+ def test_UnzipUsingCommand(self):
+ # The _UnzipUsingCommand function should move to the output
+ # directory and run the command with the file's absolute path.
+ self.mock_os.path.abspath.return_value = '/foo/some/path/x.zip'
+ self.mock_os.getcwd.return_value = 'curr_dir'
+ self.mock_bisect_utils.RunProcess.return_value = 0
+ fetch_build._UnzipUsingCommand(['unzip'], 'x.zip', 'out_dir')
+ self.mock_os.chdir.assert_has_calls(
+ [mock.call('out_dir'), mock.call('curr_dir')])
+ self.mock_bisect_utils.RunProcess.assert_called_with(
+ ['unzip', '/foo/some/path/x.zip'])
+
+ def test_MakeDirectory(self):
+ # _MakeDirectory uses os.makedirs.
+ fetch_build._MakeDirectory('some/path')
+ self.mock_os.makedirs.assert_called_with('some/path')
+
+ def test_MakeDirectory_RaisesError(self):
+ self.mock_os.makedirs.side_effect = OSError()
+ self.assertRaises(OSError, fetch_build._MakeDirectory, 'some/path')
+
+ def test_MakeDirectory_NoErrorIfDirectoryAlreadyExists(self):
+ already_exists = OSError()
+ already_exists.errno = errno.EEXIST
+ self.mock_os.makedirs.side_effect = already_exists
+ fetch_build._MakeDirectory('some/path')
+
+ @mock.patch('fetch_build.shutil')
+ def test_RemoveDirectoryTree(self, mock_shutil):
+ # _RemoveDirectoryTree uses shutil.rmtree.
+ fetch_build._RemoveDirectoryTree('some/path')
+ mock_shutil.rmtree.assert_called_with('some/path')
+
+
+if __name__ == '__main__':
+ unittest.main()
+
diff --git a/chromium/tools/auto_bisect/math_utils.py b/chromium/tools/auto_bisect/math_utils.py
new file mode 100644
index 00000000000..eef7f0936d3
--- /dev/null
+++ b/chromium/tools/auto_bisect/math_utils.py
@@ -0,0 +1,138 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""General statistical or mathematical functions."""
+
+import math
+
+
+def TruncatedMean(data_set, truncate_proportion):
+ """Calculates the truncated mean of a set of values.
+
+ Note that this isn't just the mean of the set of values with the highest
+ and lowest values discarded; the non-discarded values are also weighted
+ differently depending how many values are discarded.
+
+ NOTE: If there's not much benefit from this keeping and weighting
+ partial values, it might be better to use a simplified truncated mean
+ function without weighting.
+
+ Args:
+ data_set: Non-empty list of values.
+ truncate_proportion: How much of the upper and lower portions of the data
+ set to discard, expressed as a value in the range [0, 1].
+ Note: a value of 0.5 or greater would be meaningless
+
+ Returns:
+ The truncated mean as a float.
+
+ Raises:
+ TypeError: The data set was empty after discarding values.
+ """
+ if len(data_set) > 2:
+ data_set = sorted(data_set)
+
+ discard_num_float = len(data_set) * truncate_proportion
+ discard_num_int = int(math.floor(discard_num_float))
+ kept_weight = len(data_set) - (discard_num_float * 2)
+
+ data_set = data_set[discard_num_int:len(data_set)-discard_num_int]
+
+ weight_left = 1.0 - (discard_num_float - discard_num_int)
+
+ if weight_left < 1:
+ # If the % to discard leaves a fractional portion, need to weight those
+ # values.
+ unweighted_vals = data_set[1:len(data_set)-1]
+ weighted_vals = [data_set[0], data_set[len(data_set)-1]]
+ weighted_vals = [w * weight_left for w in weighted_vals]
+ data_set = weighted_vals + unweighted_vals
+ else:
+ kept_weight = len(data_set)
+
+ data_sum = reduce(lambda x, y: float(x) + float(y), data_set)
+ truncated_mean = data_sum / kept_weight
+ return truncated_mean
+
+
+def Mean(values):
+ """Calculates the arithmetic mean of a list of values."""
+ return TruncatedMean(values, 0.0)
+
+
+def Variance(values):
+ """Calculates the sample variance."""
+ if len(values) == 1:
+ return 0.0
+ mean = Mean(values)
+ differences_from_mean = [float(x) - mean for x in values]
+ squared_differences = [float(x * x) for x in differences_from_mean]
+ variance = sum(squared_differences) / (len(values) - 1)
+ return variance
+
+
+def StandardDeviation(values):
+ """Calculates the sample standard deviation of the given list of values."""
+ return math.sqrt(Variance(values))
+
+
+def RelativeChange(before, after):
+ """Returns the relative change of before and after, relative to before.
+
+ There are several different ways to define relative difference between
+ two numbers; sometimes it is defined as relative to the smaller number,
+ or to the mean of the two numbers. This version returns the difference
+ relative to the first of the two numbers.
+
+ Args:
+ before: A number representing an earlier value.
+ after: Another number, representing a later value.
+
+ Returns:
+ A non-negative floating point number; 0.1 represents a 10% change.
+ """
+ if before == after:
+ return 0.0
+ if before == 0:
+ return float('nan')
+ difference = after - before
+ return math.fabs(difference / before)
+
+
+def PooledStandardError(work_sets):
+ """Calculates the pooled sample standard error for a set of samples.
+
+ Args:
+ work_sets: A collection of collections of numbers.
+
+ Returns:
+ Pooled sample standard error.
+ """
+ numerator = 0.0
+ denominator1 = 0.0
+ denominator2 = 0.0
+
+ for current_set in work_sets:
+ std_dev = StandardDeviation(current_set)
+ numerator += (len(current_set) - 1) * std_dev ** 2
+ denominator1 += len(current_set) - 1
+ if len(current_set) > 0:
+ denominator2 += 1.0 / len(current_set)
+
+ if denominator1 == 0:
+ return 0.0
+
+ return math.sqrt(numerator / denominator1) * math.sqrt(denominator2)
+
+
+# Redefining built-in 'StandardError'
+# pylint: disable=W0622
+def StandardError(values):
+ """Calculates the standard error of a list of values."""
+ # NOTE: This behavior of returning 0.0 in the case of an empty list is
+ # inconsistent with Variance and StandardDeviation above.
+ if len(values) <= 1:
+ return 0.0
+ std_dev = StandardDeviation(values)
+ return std_dev / math.sqrt(len(values))
diff --git a/chromium/tools/auto_bisect/math_utils_test.py b/chromium/tools/auto_bisect/math_utils_test.py
new file mode 100644
index 00000000000..ab85b0be6bb
--- /dev/null
+++ b/chromium/tools/auto_bisect/math_utils_test.py
@@ -0,0 +1,115 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import math
+import unittest
+
+import math_utils
+
+
+class MathUtilsTest(unittest.TestCase):
+ """Tests for mathematical utility functions."""
+
+ def testTruncatedMean_EmptyList(self):
+ # TruncatedMean raises an error when passed an empty list.
+ self.assertRaises(TypeError, math_utils.TruncatedMean, [], 0)
+
+ def testTruncatedMean_TruncateTooMuch(self):
+ # An exception is raised if 50% or more is truncated from both sides.
+ self.assertRaises(TypeError, math_utils.TruncatedMean, [1, 2, 3], 1.0)
+ self.assertRaises(
+ ZeroDivisionError, math_utils.TruncatedMean, [1, 2, 3], 0.5)
+
+ def testTruncatedMean_AlwaysKeepsAtLeastTwoValues(self):
+ # If the length of the input is 1 or 2, nothing is truncated and
+ # the average is returned.
+ self.assertEqual(5.0, math_utils.TruncatedMean([5.0], 0.0))
+ self.assertEqual(5.0, math_utils.TruncatedMean([5.0], 0.25))
+ self.assertEqual(5.0, math_utils.TruncatedMean([5.0], 0.5))
+ self.assertEqual(5.5, math_utils.TruncatedMean([5.0, 6.0], 0.0))
+ self.assertEqual(5.5, math_utils.TruncatedMean([5.0, 6.0], 0.25))
+ self.assertEqual(5.5, math_utils.TruncatedMean([5.0, 6.0], 0.5))
+
+ def testTruncatedMean_Interquartile_NumValuesDivisibleByFour(self):
+ self.assertEqual(5.0, math_utils.TruncatedMean([1, 4, 6, 100], 0.25))
+ self.assertEqual(
+ 6.5, math_utils.TruncatedMean([1, 2, 5, 6, 7, 8, 40, 50], 0.25))
+
+ def testTruncatedMean_Weighting(self):
+ # In the list [0, 1, 4, 5, 20, 100], when 25% of the list at the start
+ # and end are discarded, the part that's left is [1, 4, 5, 20], but
+ # first and last values are weighted so that they only count for half
+ # as much. So the truncated mean is (1/2 + 4 + 5 + 20/2) / 5.0.
+ self.assertEqual(6.5, (0.5 + 4 + 5 + 10) / 3.0)
+ self.assertEqual(6.5, math_utils.TruncatedMean([0, 1, 4, 5, 20, 100], 0.25))
+
+ def testMean_OneValue(self):
+ self.assertEqual(3.0, math_utils.Mean([3]))
+
+ def testMean_ShortList(self):
+ self.assertEqual(0.5, math_utils.Mean([-3, 0, 1, 4]))
+
+ def testMean_CompareAlternateImplementation(self):
+ """Tests Mean by comparing against an alternate implementation."""
+ def AlternateMean(values):
+ return sum(values) / float(len(values))
+ test_value_lists = [
+ [1],
+ [5, 6.5, 1.2, 3],
+ [-3, 0, 1, 4],
+ [-3, -1, 0.12, 0.752, 3.33, 8, 16, 32, 439],
+ ]
+ for value_list in test_value_lists:
+ self.assertEqual(AlternateMean(value_list), math_utils.Mean(value_list))
+
+ def testRelativeChange_NonZero(self):
+ # The change is relative to the first value, regardless of which is bigger.
+ self.assertEqual(0.5, math_utils.RelativeChange(1.0, 1.5))
+ self.assertEqual(0.5, math_utils.RelativeChange(2.0, 1.0))
+
+ def testRelativeChange_FromZero(self):
+ # If the first number is zero, then the result is not a number.
+ self.assertEqual(0, math_utils.RelativeChange(0, 0))
+ self.assertTrue(math.isnan(math_utils.RelativeChange(0, 1)))
+ self.assertTrue(math.isnan(math_utils.RelativeChange(0, -1)))
+
+ def testRelativeChange_Negative(self):
+ # Note that the return value of RelativeChange is always positive.
+ self.assertEqual(3.0, math_utils.RelativeChange(-1, 2))
+ self.assertEqual(3.0, math_utils.RelativeChange(1, -2))
+ self.assertEqual(1.0, math_utils.RelativeChange(-1, -2))
+
+ def testVariance_EmptyList(self):
+ self.assertRaises(TypeError, math_utils.Variance, [])
+
+ def testVariance_OneValue(self):
+ self.assertEqual(0, math_utils.Variance([0]))
+ self.assertEqual(0, math_utils.Variance([4.3]))
+
+ def testVariance_ShortList(self):
+ # Population variance is the average of squared deviations from the mean.
+ # The deviations from the mean in this example are [3.5, 0.5, -0.5, -3.5],
+ # and the squared deviations are [12.25, 0.25, 0.25, 12.25].
+ # With sample variance, however, 1 is subtracted from the sample size.
+ # So the sample variance is sum([12.25, 0.25, 0.25, 12.25]) / 3.0.
+ self.assertAlmostEqual(8.333333334, sum([12.25, 0.25, 0.25, 12.25]) / 3.0)
+ self.assertAlmostEqual(8.333333334, math_utils.Variance([-3, 0, 1, 4]))
+
+ def testStandardDeviation(self):
+ # Standard deviation is the square root of variance.
+ self.assertRaises(TypeError, math_utils.StandardDeviation, [])
+ self.assertEqual(0.0, math_utils.StandardDeviation([4.3]))
+ self.assertAlmostEqual(2.88675135, math.sqrt(8.33333333333333))
+ self.assertAlmostEqual(2.88675135,
+ math_utils.StandardDeviation([-3, 0, 1, 4]))
+
+ def testStandardError(self):
+ # Standard error is std. dev. divided by square root of sample size.
+ self.assertEqual(0.0, math_utils.StandardError([]))
+ self.assertEqual(0.0, math_utils.StandardError([4.3]))
+ self.assertAlmostEqual(1.44337567, 2.88675135 / math.sqrt(4))
+ self.assertAlmostEqual(1.44337567, math_utils.StandardError([-3, 0, 1, 4]))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/auto_bisect/query_crbug.py b/chromium/tools/auto_bisect/query_crbug.py
new file mode 100644
index 00000000000..61e774c257b
--- /dev/null
+++ b/chromium/tools/auto_bisect/query_crbug.py
@@ -0,0 +1,82 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions to query the chromium issue tracker.
+
+Note that documentation for the Issue Tracker API says it's DEPRECATED, however
+it seems to be in use in other places like the performance dashboard. Also,
+this module attempts to handle most exceptions thrown by querying the tracker
+so that when and if this api is turned off no impact is caused to the bisection
+process."""
+
+import json
+import urllib2
+
+SINGLE_ISSUE_URL = ('https://code.google.com/feeds/issues/p/chromium/issues'
+ '/full?id=%s&alt=json')
+
+
+class IssueTrackerQueryException(Exception):
+ pass
+
+
+def QuerySingleIssue(issue_id, url_template=SINGLE_ISSUE_URL):
+ """Queries the tracker for a specific issue. Returns a dict.
+
+ This uses the deprecated Issue Tracker API to fetch a JSON representation of
+ the issue details.
+
+ Args:
+ issue_id: An int or string representing the issue id.
+ url_template: URL to query the tracker with '%s' instead of the bug id.
+
+ Returns:
+ A dictionary as parsed by the JSON library from the tracker response.
+
+ Raises:
+ urllib2.HTTPError when appropriate.
+ """
+ assert str(issue_id).isdigit()
+ response = urllib2.urlopen(url_template % issue_id).read()
+ return json.loads(response)
+
+
+def GetIssueState(issue_id):
+ """Returns either 'closed' or 'open' for the given bug ID.
+
+ Args:
+ issue_id: string or string-castable object containing a numeric bug ID.
+ Returns:
+ 'open' or 'closed' depending on the state of the bug.
+ Raises:
+ IssueTrackerQueryException if the data cannot be retrieved or parsed.
+ """
+ try:
+ query_response = QuerySingleIssue(issue_id)
+ # We assume the query returns a single result hence the [0]
+ issue_detail = query_response['feed']['entry'][0]
+ state = issue_detail['issues$state']['$t']
+ return state
+ except urllib2.URLError:
+ raise IssueTrackerQueryException(
+ 'Could not fetch the details form the issue tracker.')
+ except ValueError:
+ raise IssueTrackerQueryException(
+ 'Could not parse the issue tracker\'s response as a json doc.')
+ except KeyError:
+ raise IssueTrackerQueryException(
+ 'The data from the issue tracker is not in the expected format.')
+
+
+def CheckIssueClosed(issue_id):
+ """Checks if a given issue is closed. Returns False when in doubt."""
+ # We only check when issue_id appears to be valid
+ if str(issue_id).isdigit():
+ try:
+ return GetIssueState(issue_id) == 'closed'
+ except IssueTrackerQueryException:
+ # We let this fall through to the return False
+ pass
+ # We return False for anything other than a positive number
+ return False
diff --git a/chromium/tools/auto_bisect/query_crbug_test.py b/chromium/tools/auto_bisect/query_crbug_test.py
new file mode 100644
index 00000000000..5406487542f
--- /dev/null
+++ b/chromium/tools/auto_bisect/query_crbug_test.py
@@ -0,0 +1,82 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+import unittest
+import urllib2
+
+from query_crbug import CheckIssueClosed
+
+SRC = os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)
+sys.path.append(os.path.join(SRC, 'third_party', 'pymock'))
+
+import mock
+
+_current_directory = os.path.dirname(__file__)
+_test_data_directory = os.path.join(_current_directory, 'test_data')
+
+# These strings are simulated responses to various conditions when querying
+# the chromium issue tracker.
+CLOSED_ISSUE_DATA = open(os.path.join(_test_data_directory,
+ 'closed.json')).read()
+OPEN_ISSUE_DATA = open(os.path.join(_test_data_directory,
+ 'open.json')).read()
+UNEXPECTED_FORMAT_DATA = CLOSED_ISSUE_DATA.replace('issues$state', 'gibberish')
+BROKEN_ISSUE_DATA = "\n<HTML><HEAD><TITLE>Not a JSON Doc</TITLE></HEAD></HTML>"
+
+
+class MockResponse(object):
+ def __init__(self, result):
+ self._result = result
+
+ def read(self):
+ return self._result
+
+
+def MockUrlOpen(url):
+ # Note that these strings DO NOT represent http responses. They are just
+ # memorable numeric bug ids to use.
+ if '200' in url:
+ return MockResponse(CLOSED_ISSUE_DATA)
+ elif '201' in url:
+ return MockResponse(OPEN_ISSUE_DATA)
+ elif '300' in url:
+ return MockResponse(UNEXPECTED_FORMAT_DATA)
+ elif '403' in url:
+ raise urllib2.URLError('')
+ elif '404' in url:
+ return MockResponse('')
+ elif '500' in url:
+ return MockResponse(BROKEN_ISSUE_DATA)
+
+
+class crbugQueryTest(unittest.TestCase):
+ @mock.patch('urllib2.urlopen', MockUrlOpen)
+ def testClosedIssueIsClosed(self):
+ self.assertTrue(CheckIssueClosed(200))
+
+ @mock.patch('urllib2.urlopen', MockUrlOpen)
+ def testOpenIssueIsNotClosed(self):
+ self.assertFalse(CheckIssueClosed(201))
+
+ @mock.patch('urllib2.urlopen', MockUrlOpen)
+ def testUnexpectedFormat(self):
+ self.assertFalse(CheckIssueClosed(300))
+
+ @mock.patch('urllib2.urlopen', MockUrlOpen)
+ def testUrlError(self):
+ self.assertFalse(CheckIssueClosed(403))
+
+ @mock.patch('urllib2.urlopen', MockUrlOpen)
+ def testEmptyResponse(self):
+ self.assertFalse(CheckIssueClosed(404))
+
+ @mock.patch('urllib2.urlopen', MockUrlOpen)
+ def testBrokenResponse(self):
+ self.assertFalse(CheckIssueClosed(500))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/auto_bisect/request_build.py b/chromium/tools/auto_bisect/request_build.py
new file mode 100644
index 00000000000..0035a955acf
--- /dev/null
+++ b/chromium/tools/auto_bisect/request_build.py
@@ -0,0 +1,208 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module contains functionality for starting build try jobs via HTTP.
+
+This includes both sending a request to start a job, and also related code
+for querying the status of the job.
+
+This module can be either run as a stand-alone script to send a request to a
+builder, or imported and used by calling the public functions below.
+"""
+
+import json
+import urllib2
+
+# URL template for fetching JSON data about builds.
+BUILDER_JSON_URL = ('%(server_url)s/json/builders/%(bot_name)s/builds/'
+ '%(build_num)s?as_text=1&filter=0')
+
+# URL template for displaying build steps.
+BUILDER_HTML_URL = '%(server_url)s/builders/%(bot_name)s/builds/%(build_num)s'
+
+# Status codes that can be returned by the GetBuildStatus method
+# From buildbot.status.builder.
+# See: http://docs.buildbot.net/current/developer/results.html
+SUCCESS, WARNINGS, FAILURE, SKIPPED, EXCEPTION, RETRY, TRYPENDING = range(7)
+OK = (SUCCESS, WARNINGS) # These indicate build is complete.
+FAILED = (FAILURE, EXCEPTION, SKIPPED) # These indicate build failure.
+PENDING = (RETRY, TRYPENDING) # These indicate in progress or in pending queue.
+
+
+class ServerAccessError(Exception):
+
+ def __str__(self):
+ return '%s\nSorry, cannot connect to server.' % self.args[0]
+
+
+def _IsBuildRunning(build_data):
+ """Checks whether the build is in progress on buildbot.
+
+ Presence of currentStep element in build JSON indicates build is in progress.
+
+ Args:
+ build_data: A dictionary with build data, loaded from buildbot JSON API.
+
+ Returns:
+ True if build is in progress, otherwise False.
+ """
+ current_step = build_data.get('currentStep')
+ if (current_step and current_step.get('isStarted') and
+ current_step.get('results') is None):
+ return True
+ return False
+
+
+def _IsBuildFailed(build_data):
+ """Checks whether the build failed on buildbot.
+
+ Sometime build status is marked as failed even though compile and packaging
+ steps are successful. This may happen due to some intermediate steps of less
+ importance such as gclient revert, generate_telemetry_profile are failed.
+ Therefore we do an addition check to confirm if build was successful by
+ calling _IsBuildSuccessful.
+
+ Args:
+ build_data: A dictionary with build data, loaded from buildbot JSON API.
+
+ Returns:
+ True if revision is failed build, otherwise False.
+ """
+ if (build_data.get('results') in FAILED and
+ not _IsBuildSuccessful(build_data)):
+ return True
+ return False
+
+
+def _IsBuildSuccessful(build_data):
+ """Checks whether the build succeeded on buildbot.
+
+ We treat build as successful if the package_build step is completed without
+ any error i.e., when results attribute of the this step has value 0 or 1
+ in its first element.
+
+ Args:
+ build_data: A dictionary with build data, loaded from buildbot JSON API.
+
+ Returns:
+ True if revision is successfully build, otherwise False.
+ """
+ if build_data.get('steps'):
+ for item in build_data.get('steps'):
+ # The 'results' attribute of each step consists of two elements,
+ # results[0]: This represents the status of build step.
+ # See: http://docs.buildbot.net/current/developer/results.html
+ # results[1]: List of items, contains text if step fails, otherwise empty.
+ if (item.get('name') == 'package_build' and
+ item.get('isFinished') and
+ item.get('results')[0] in OK):
+ return True
+ return False
+
+
+def _FetchBuilderData(builder_url):
+ """Fetches JSON data for the all the builds from the try server.
+
+ Args:
+ builder_url: A try server URL to fetch builds information.
+
+ Returns:
+ A dictionary with information of all build on the try server.
+ """
+ data = None
+ try:
+ url = urllib2.urlopen(builder_url)
+ except urllib2.URLError as e:
+ print ('urllib2.urlopen error %s, waterfall status page down.[%s]' % (
+ builder_url, str(e)))
+ return None
+ if url is not None:
+ try:
+ data = url.read()
+ except IOError as e:
+ print 'urllib2 file object read error %s, [%s].' % (builder_url, str(e))
+ return data
+
+
+def _GetBuildData(buildbot_url):
+ """Gets build information for the given build id from the try server.
+
+ Args:
+ buildbot_url: A try server URL to fetch build information.
+
+ Returns:
+ A dictionary with build information if build exists, otherwise None.
+ """
+ builds_json = _FetchBuilderData(buildbot_url)
+ if builds_json:
+ return json.loads(builds_json)
+ return None
+
+
+def GetBuildStatus(build_num, bot_name, server_url):
+ """Gets build status from the buildbot status page for a given build number.
+
+ Args:
+ build_num: A build number on try server to determine its status.
+ bot_name: Name of the bot where the build information is scanned.
+ server_url: URL of the buildbot.
+
+ Returns:
+ A pair which consists of build status (SUCCESS, FAILED or PENDING) and a
+ link to build status page on the waterfall.
+ """
+ results_url = None
+ if build_num:
+ # Get the URL for requesting JSON data with status information.
+ buildbot_url = BUILDER_JSON_URL % {
+ 'server_url': server_url,
+ 'bot_name': bot_name,
+ 'build_num': build_num,
+ }
+ build_data = _GetBuildData(buildbot_url)
+ if build_data:
+ # Link to build on the buildbot showing status of build steps.
+ results_url = BUILDER_HTML_URL % {
+ 'server_url': server_url,
+ 'bot_name': bot_name,
+ 'build_num': build_num,
+ }
+ if _IsBuildFailed(build_data):
+ return (FAILED, results_url)
+
+ elif _IsBuildSuccessful(build_data):
+ return (OK, results_url)
+ return (PENDING, results_url)
+
+
+def GetBuildNumFromBuilder(build_reason, bot_name, server_url):
+ """Gets build number on build status page for a given 'build reason'.
+
+ This function parses the JSON data from buildbot page and collects basic
+ information about the all the builds, and then uniquely identifies the build
+ based on the 'reason' attribute in the JSON data about the build.
+
+ The 'reason' attribute set is when a build request is posted, and it is used
+ to identify the build on status page.
+
+ Args:
+ build_reason: A unique build name set to build on try server.
+ bot_name: Name of the bot where the build information is scanned.
+ server_url: URL of the buildbot.
+
+ Returns:
+ A build number as a string if found, otherwise None.
+ """
+ buildbot_url = BUILDER_JSON_URL % {
+ 'server_url': server_url,
+ 'bot_name': bot_name,
+ 'build_num': '_all',
+ }
+ builds_json = _FetchBuilderData(buildbot_url)
+ if builds_json:
+ builds_data = json.loads(builds_json)
+ for current_build in builds_data:
+ if builds_data[current_build].get('reason') == build_reason:
+ return builds_data[current_build].get('number')
+ return None
diff --git a/chromium/tools/auto_bisect/run_tests b/chromium/tools/auto_bisect/run_tests
new file mode 100755
index 00000000000..1da4c6c20f0
--- /dev/null
+++ b/chromium/tools/auto_bisect/run_tests
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all tests in all unit test modules in this directory."""
+
+import os
+import sys
+import unittest
+import logging
+
+SRC = os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir)
+
+
+def main():
+ if 'full-log' in sys.argv:
+ # Configure logging to show line numbers and logging level
+ fmt = '%(module)s:%(lineno)d - %(levelname)s: %(message)s'
+ logging.basicConfig(level=logging.DEBUG, stream=sys.stdout, format=fmt)
+ elif 'no-log' in sys.argv:
+ # Only WARN and above are shown, to standard error. (This is the logging
+ # module default config, hence we do nothing here)
+ pass
+ else:
+ # Behave as before. Make logging.info mimic print behavior
+ fmt = '%(message)s'
+ logging.basicConfig(level=logging.INFO, stream=sys.stdout, format=fmt)
+
+ # Running the tests depends on having the below modules in PYTHONPATH.
+ sys.path.append(os.path.join(SRC, 'third_party', 'catapult', 'telemetry'))
+ sys.path.append(os.path.join(SRC, 'third_party', 'pymock'))
+
+ suite = unittest.TestSuite()
+ loader = unittest.TestLoader()
+ script_dir = os.path.dirname(__file__)
+ suite.addTests(loader.discover(start_dir=script_dir, pattern='*_test.py'))
+
+ print 'Running unit tests in %s...' % os.path.abspath(script_dir)
+ result = unittest.TextTestRunner(verbosity=1).run(suite)
+ return 0 if result.wasSuccessful() else 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/auto_bisect/source_control.py b/chromium/tools/auto_bisect/source_control.py
new file mode 100644
index 00000000000..f528f244bfb
--- /dev/null
+++ b/chromium/tools/auto_bisect/source_control.py
@@ -0,0 +1,232 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module contains functions for performing source control operations."""
+
+import bisect_utils
+
+
+def IsInGitRepository():
+ output, _ = bisect_utils.RunGit(['rev-parse', '--is-inside-work-tree'])
+ return output.strip() == 'true'
+
+
+def GetRevisionList(end_revision_hash, start_revision_hash, cwd=None):
+ """Retrieves a list of git commit hashes in a range.
+
+ Args:
+ end_revision_hash: The SHA1 for the end of the range, inclusive.
+ start_revision_hash: The SHA1 for the beginning of the range, inclusive.
+
+ Returns:
+ A list of the git commit hashes in the range, in reverse time order --
+ that is, starting with |end_revision_hash|.
+ """
+ revision_range = '%s..%s' % (start_revision_hash, end_revision_hash)
+ cmd = ['log', '--format=%H', '-10000', '--first-parent', revision_range]
+ log_output = bisect_utils.CheckRunGit(cmd, cwd=cwd)
+
+ revision_hash_list = log_output.split()
+ revision_hash_list.append(start_revision_hash)
+
+ return revision_hash_list
+
+
+def SyncToRevision(revision, sync_client=None):
+ """Syncs or checks out a revision based on sync_client argument.
+
+ Args:
+ revision: Git hash for the solutions with the format <repo>@rev.
+ E.g., "src@2ae43f...", "src/third_party/webkit@asr1234" etc.
+ sync_client: Syncs to revision when this is True otherwise checks out
+ the revision.
+
+ Returns:
+ True if sync or checkout is successful, False otherwise.
+ """
+ if not sync_client:
+ _, return_code = bisect_utils.RunGit(['checkout', revision])
+ elif sync_client == 'gclient':
+ return_code = bisect_utils.RunGClientAndSync([revision])
+ else:
+ raise NotImplementedError('Unsupported sync_client: "%s"' % sync_client)
+
+ return not return_code
+
+
+def GetCurrentRevision(cwd=None):
+ """Gets current revision of the given repository."""
+ return bisect_utils.CheckRunGit(['rev-parse', 'HEAD'], cwd=cwd).strip()
+
+
+def ResolveToRevision(revision_to_check, depot, depot_deps_dict,
+ search, cwd=None):
+ """Tries to resolve an SVN revision or commit position to a git SHA1.
+
+ Args:
+ revision_to_check: The user supplied revision string that may need to be
+ resolved to a git commit hash. This may be an SVN revision, git commit
+ position, or a git commit hash.
+ depot: The depot (dependency repository) that |revision_to_check| is from.
+ depot_deps_dict: A dictionary with information about different depots.
+ search: How many revisions forward or backward to search. If the value is
+ negative, the function will search backwards chronologically, otherwise
+ it will search forward.
+
+ Returns:
+ A string containing a git SHA1 hash, otherwise None.
+ """
+ # Android-chrome is git only, so no need to resolve this to anything else.
+ if depot == 'android-chrome':
+ return revision_to_check
+
+ # If the given revision can't be parsed as an integer, then it may already
+ # be a git commit hash.
+ if not bisect_utils.IsStringInt(revision_to_check):
+ return revision_to_check
+
+ depot_svn = 'svn://svn.chromium.org/chrome/trunk/src'
+
+ if depot != 'chromium':
+ depot_svn = depot_deps_dict[depot]['svn']
+ svn_revision = int(revision_to_check)
+ git_revision = None
+
+ if search > 0:
+ search_range = xrange(svn_revision, svn_revision + search, 1)
+ else:
+ search_range = xrange(svn_revision, svn_revision + search, -1)
+
+ for i in search_range:
+ # NOTE: Checking for the git-svn-id footer is for backwards compatibility.
+ # When we can assume that all the revisions we care about are from after
+ # git commit positions started getting added, we don't need to check this.
+ svn_pattern = 'git-svn-id: %s@%d' % (depot_svn, i)
+ commit_position_pattern = '^Cr-Commit-Position: .*@{#%d}' % i
+ cmd = ['log', '--format=%H', '-1', '--grep', svn_pattern,
+ '--grep', commit_position_pattern, 'origin/master']
+ log_output = bisect_utils.CheckRunGit(cmd, cwd=cwd)
+ log_output = log_output.strip()
+
+ if log_output:
+ git_revision = log_output
+ break
+
+ return git_revision
+
+
+def IsInProperBranch():
+ """Checks whether the current branch is "master"."""
+ cmd = ['rev-parse', '--abbrev-ref', 'HEAD']
+ log_output = bisect_utils.CheckRunGit(cmd)
+ log_output = log_output.strip()
+ return log_output == 'master'
+
+
+def GetCommitPosition(git_revision, cwd=None):
+ """Finds git commit position for the given git hash.
+
+ This function executes "git footer --position-num <git hash>" command to get
+ commit position the given revision.
+
+ Args:
+ git_revision: The git SHA1 to use.
+ cwd: Working directory to run the command from.
+
+ Returns:
+ Git commit position as integer or None.
+ """
+ # Some of the repositories are pure git based, unlike other repositories
+ # they doesn't have commit position. e.g., skia, angle.
+ cmd = ['footers', '--position-num', git_revision]
+ output, return_code = bisect_utils.RunGit(cmd, cwd)
+ if not return_code:
+ commit_position = output.strip()
+ if bisect_utils.IsStringInt(commit_position):
+ return int(commit_position)
+ return None
+
+
+def GetCommitTime(git_revision, cwd=None):
+ """Returns commit time for the given revision in UNIX timestamp."""
+ cmd = ['log', '--format=%ct', '-1', git_revision]
+ output = bisect_utils.CheckRunGit(cmd, cwd=cwd)
+ return int(output)
+
+
+def QueryRevisionInfo(revision, cwd=None):
+ """Gathers information on a particular revision, such as author's name,
+ email, subject, and date.
+
+ Args:
+ revision: Revision you want to gather information on; a git commit hash.
+
+ Returns:
+ A dict in the following format:
+ {
+ 'author': %s,
+ 'email': %s,
+ 'date': %s,
+ 'subject': %s,
+ 'body': %s,
+ }
+ """
+ commit_info = {}
+
+ formats = ['%aN', '%aE', '%s', '%cD', '%b']
+ targets = ['author', 'email', 'subject', 'date', 'body']
+
+ for i in xrange(len(formats)):
+ cmd = ['log', '--format=%s' % formats[i], '-1', revision]
+ output = bisect_utils.CheckRunGit(cmd, cwd=cwd)
+ commit_info[targets[i]] = output.rstrip()
+
+ return commit_info
+
+
+def CheckoutFileAtRevision(file_name, revision, cwd=None):
+ """Performs a checkout on a file at the given revision.
+
+ Returns:
+ True if successful.
+ """
+ command = ['checkout', revision, file_name]
+ _, return_code = bisect_utils.RunGit(command, cwd=cwd)
+ return not return_code
+
+
+def RevertFileToHead(file_name):
+ """Un-stages a file and resets the file's state to HEAD.
+
+ Returns:
+ True if successful.
+ """
+ # Reset doesn't seem to return 0 on success.
+ bisect_utils.RunGit(['reset', 'HEAD', file_name])
+ _, return_code = bisect_utils.RunGit(
+ ['checkout', bisect_utils.FILE_DEPS_GIT])
+ return not return_code
+
+
+def QueryFileRevisionHistory(filename, revision_start, revision_end):
+ """Returns a list of commits that modified this file.
+
+ Args:
+ filename: Name of file.
+ revision_start: Start of revision range (inclusive).
+ revision_end: End of revision range.
+
+ Returns:
+ Returns a list of commits that touched this file.
+ """
+ cmd = [
+ 'log',
+ '--format=%H',
+ '%s~1..%s' % (revision_start, revision_end),
+ '--',
+ filename,
+ ]
+ output = bisect_utils.CheckRunGit(cmd)
+ lines = output.split('\n')
+ return [o for o in lines if o]
diff --git a/chromium/tools/auto_bisect/source_control_test.py b/chromium/tools/auto_bisect/source_control_test.py
new file mode 100644
index 00000000000..e8f05d30720
--- /dev/null
+++ b/chromium/tools/auto_bisect/source_control_test.py
@@ -0,0 +1,83 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the source_control module."""
+
+import unittest
+import mock
+
+import source_control
+
+
+class SourceControlTest(unittest.TestCase):
+
+ @mock.patch('source_control.bisect_utils.CheckRunGit')
+ def testQueryRevisionInfo(self, mock_run_git):
+ # The QueryRevisionInfo function should run a sequence of git commands,
+ # then returns a dict with the results.
+ command_output_map = [
+ (['log', '--format=%aN', '-1', 'abcd1234'], 'Some Name\n'),
+ (['log', '--format=%aE', '-1', 'abcd1234'], 'somename@x.com'),
+ (['log', '--format=%s', '-1', 'abcd1234'], 'Commit subject '),
+ (['log', '--format=%cD', '-1', 'abcd1234'], 'Fri, 10 Oct 2014'),
+ (['log', '--format=%b', '-1', 'abcd1234'], 'Commit body\n'),
+ ]
+ _SetMockCheckRunGitBehavior(mock_run_git, command_output_map)
+ # The result of calling QueryRevisionInfo is a dictionary like that below.
+ # Trailing whitespace is stripped.
+ expected = {
+ 'author': 'Some Name',
+ 'email': 'somename@x.com',
+ 'date': 'Fri, 10 Oct 2014',
+ 'subject': 'Commit subject',
+ 'body': 'Commit body',
+ }
+ self.assertEqual(expected, source_control.QueryRevisionInfo('abcd1234'))
+ self.assertEqual(5, mock_run_git.call_count)
+
+ def testResolveToRevision_InputGitHash(self):
+ # The ResolveToRevision function returns a git commit hash corresponding
+ # to the input, so if the input can't be parsed as an int, it is returned.
+ self.assertEqual(
+ 'abcd1234',
+ source_control.ResolveToRevision('abcd1234', 'chromium', {}, 5))
+
+ # Note: It actually does this for any junk that isn't an int. This isn't
+ # necessarily desired behavior.
+ self.assertEqual(
+ 'foo bar',
+ source_control.ResolveToRevision('foo bar', 'chromium', {}, 5))
+
+ @mock.patch('source_control.bisect_utils.CheckRunGit')
+ def testResolveToRevision_NotFound(self, mock_run_git):
+ # If no corresponding git hash was found, then None is returned.
+ mock_run_git.return_value = ''
+ self.assertIsNone(
+ source_control.ResolveToRevision('12345', 'chromium', {}, 5))
+
+ @mock.patch('source_control.bisect_utils.CheckRunGit')
+ def testResolveToRevision_Found(self, mock_run_git):
+ # In general, ResolveToRevision finds a git commit hash by repeatedly
+ # calling "git log --grep ..." with different numbers until something
+ # matches.
+ mock_run_git.return_value = 'abcd1234'
+ self.assertEqual(
+ 'abcd1234',
+ source_control.ResolveToRevision('12345', 'chromium', {}, 5))
+ self.assertEqual(1, mock_run_git.call_count)
+
+
+def _SetMockCheckRunGitBehavior(mock_obj, command_output_map):
+ """Sets the behavior of a mock function according to the given mapping."""
+ # Unused argument 'cwd', expected in args list but not needed.
+ # pylint: disable=W0613
+ def FakeCheckRunGit(in_command, cwd=None):
+ for command, output in command_output_map:
+ if command == in_command:
+ return output
+ mock_obj.side_effect = FakeCheckRunGit
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/auto_bisect/test_data/closed.json b/chromium/tools/auto_bisect/test_data/closed.json
new file mode 100644
index 00000000000..ab87a212b53
--- /dev/null
+++ b/chromium/tools/auto_bisect/test_data/closed.json
@@ -0,0 +1,140 @@
+{
+ "version": "1.0",
+ "encoding": "UTF-8",
+ "feed": {
+ "xmlns": "http://www.w3.org/2005/Atom",
+ "xmlns$openSearch": "http://a9.com/-/spec/opensearch/1.1/",
+ "xmlns$gd": "http://schemas.google.com/g/2005",
+ "xmlns$issues": "http://schemas.google.com/projecthosting/issues/2009",
+ "id": {
+ "$t": "http://code.google.com/feeds/issues/p/chromium/issues/full"
+ },
+ "updated": {
+ "$t": "2014-10-31T23:44:30.795Z"
+ },
+ "title": {
+ "$t": "Issues - chromium"
+ },
+ "subtitle": {
+ "$t": "Issues - chromium"
+ },
+ "link": [
+ {
+ "rel": "alternate",
+ "type": "text/html",
+ "href": "http://code.google.com/p/chromium/issues/list"
+ },
+ {
+ "rel": "http://schemas.google.com/g/2005#feed",
+ "type": "application/atom+xml",
+ "href": "https://code.google.com/feeds/issues/p/chromium/issues/full"
+ },
+ {
+ "rel": "http://schemas.google.com/g/2005#post",
+ "type": "application/atom+xml",
+ "href": "https://code.google.com/feeds/issues/p/chromium/issues/full"
+ },
+ {
+ "rel": "self",
+ "type": "application/atom+xml",
+ "href": "https://code.google.com/feeds/issues/p/chromium/issues/full?alt=json&max-results=1&id=422382"
+ }
+ ],
+ "generator": {
+ "$t": "ProjectHosting",
+ "version": "1.0",
+ "uri": "http://code.google.com/feeds/issues"
+ },
+ "openSearch$totalResults": {
+ "$t": 1
+ },
+ "openSearch$startIndex": {
+ "$t": 1
+ },
+ "openSearch$itemsPerPage": {
+ "$t": 1
+ },
+ "entry": [
+ {
+ "gd$etag": "W/\"CUUFSX47eCl7ImA9XRdQGEk.\"",
+ "id": {
+ "$t": "http://code.google.com/feeds/issues/p/chromium/issues/full/422382"
+ },
+ "published": {
+ "$t": "2014-10-10T17:07:06.000Z"
+ },
+ "updated": {
+ "$t": "2014-10-20T22:13:38.000Z"
+ },
+ "title": {
+ "$t": "11.1% regression in indexeddb_perf at 298653:298680"
+ },
+ "content": {
+ "$t": "See the link to graphs below.",
+ "type": "html"
+ },
+ "link": [
+ {
+ "rel": "replies",
+ "type": "application/atom+xml",
+ "href": "http://code.google.com/feeds/issues/p/chromium/issues/422382/comments/full"
+ },
+ {
+ "rel": "alternate",
+ "type": "text/html",
+ "href": "http://code.google.com/p/chromium/issues/detail?id=422382"
+ },
+ {
+ "rel": "self",
+ "type": "application/atom+xml",
+ "href": "https://code.google.com/feeds/issues/p/chromium/issues/full/422382"
+ }
+ ],
+ "author": [
+ {
+ "name": {
+ "$t": "gov...@chromium.org"
+ },
+ "uri": {
+ "$t": "/u/104724762920274240672/"
+ }
+ }
+ ],
+ "issues$closedDate": {
+ "$t": "2014-10-20T22:10:22.000Z"
+ },
+ "issues$id": {
+ "$t": 422382
+ },
+ "issues$label": [
+ {
+ "$t": "Type-Bug-Regression"
+ },
+ {
+ "$t": "Performance-Sheriff"
+ },
+ {
+ "$t": "Pri-2"
+ }
+ ],
+ "issues$owner": {
+ "issues$uri": {
+ "$t": "/u/104724762920274240672/"
+ },
+ "issues$username": {
+ "$t": "gov...@chromium.org"
+ }
+ },
+ "issues$stars": {
+ "$t": 0
+ },
+ "issues$state": {
+ "$t": "closed"
+ },
+ "issues$status": {
+ "$t": "WontFix"
+ }
+ }
+ ]
+ }
+}
diff --git a/chromium/tools/auto_bisect/test_data/open.json b/chromium/tools/auto_bisect/test_data/open.json
new file mode 100644
index 00000000000..e23f2f49419
--- /dev/null
+++ b/chromium/tools/auto_bisect/test_data/open.json
@@ -0,0 +1,174 @@
+{
+ "version": "1.0",
+ "encoding": "UTF-8",
+ "feed": {
+ "xmlns": "http://www.w3.org/2005/Atom",
+ "xmlns$openSearch": "http://a9.com/-/spec/opensearch/1.1/",
+ "xmlns$gd": "http://schemas.google.com/g/2005",
+ "xmlns$issues": "http://schemas.google.com/projecthosting/issues/2009",
+ "id": {
+ "$t": "http://code.google.com/feeds/issues/p/chromium/issues/full"
+ },
+ "updated": {
+ "$t": "2014-10-31T23:44:18.640Z"
+ },
+ "title": {
+ "$t": "Issues - chromium"
+ },
+ "subtitle": {
+ "$t": "Issues - chromium"
+ },
+ "link": [
+ {
+ "rel": "alternate",
+ "type": "text/html",
+ "href": "http://code.google.com/p/chromium/issues/list"
+ },
+ {
+ "rel": "http://schemas.google.com/g/2005#feed",
+ "type": "application/atom+xml",
+ "href": "https://code.google.com/feeds/issues/p/chromium/issues/full"
+ },
+ {
+ "rel": "http://schemas.google.com/g/2005#post",
+ "type": "application/atom+xml",
+ "href": "https://code.google.com/feeds/issues/p/chromium/issues/full"
+ },
+ {
+ "rel": "self",
+ "type": "application/atom+xml",
+ "href": "https://code.google.com/feeds/issues/p/chromium/issues/full?alt=json&max-results=1&id=424688"
+ }
+ ],
+ "generator": {
+ "$t": "ProjectHosting",
+ "version": "1.0",
+ "uri": "http://code.google.com/feeds/issues"
+ },
+ "openSearch$totalResults": {
+ "$t": 1
+ },
+ "openSearch$startIndex": {
+ "$t": 1
+ },
+ "openSearch$itemsPerPage": {
+ "$t": 1
+ },
+ "entry": [
+ {
+ "gd$etag": "W/\"A08NQX47eCl7ImA9XRdXFkw.\"",
+ "id": {
+ "$t": "http://code.google.com/feeds/issues/p/chromium/issues/full/424688"
+ },
+ "published": {
+ "$t": "2014-10-17T18:50:15.000Z"
+ },
+ "updated": {
+ "$t": "2014-10-29T21:58:10.000Z"
+ },
+ "title": {
+ "$t": "Should Not Start Bisect on Closed Bugs."
+ },
+ "content": {
+ "$t": "I have noticed that in some cases bisect jobs are running for closed bugs:\r\nhttps://code.google.com/p/chromium/issues/detail?id=422661\r\nhttps://code.google.com/p/chromium/issues/detail?id=422228\r\nhttps://code.google.com/p/chromium/issues/detail?id=421488\r\n\r\nIt is possible that the bugs can be marked as closed when the corresponding bisect jobs are in the queue. So to avoid bisects on such bugs, can we please add logic to bisect script to first check for bug state before running the bisect. This will save us from doing some unnecessary bisects.\r\n\r\n",
+ "type": "html"
+ },
+ "link": [
+ {
+ "rel": "replies",
+ "type": "application/atom+xml",
+ "href": "http://code.google.com/feeds/issues/p/chromium/issues/424688/comments/full"
+ },
+ {
+ "rel": "alternate",
+ "type": "text/html",
+ "href": "http://code.google.com/p/chromium/issues/detail?id=424688"
+ },
+ {
+ "rel": "self",
+ "type": "application/atom+xml",
+ "href": "https://code.google.com/feeds/issues/p/chromium/issues/full/424688"
+ }
+ ],
+ "author": [
+ {
+ "name": {
+ "$t": "anan...@chromium.org"
+ },
+ "uri": {
+ "$t": "/u/112777092906361529031/"
+ }
+ }
+ ],
+ "issues$cc": [
+ {
+ "issues$uri": {
+ "$t": "/u/116704265016059607269/"
+ },
+ "issues$username": {
+ "$t": "pras...@chromium.org"
+ }
+ },
+ {
+ "issues$uri": {
+ "$t": "/u/107012661329935444717/"
+ },
+ "issues$username": {
+ "$t": "qyears...@chromium.org"
+ }
+ },
+ {
+ "issues$uri": {
+ "$t": "/u/tonyg@chromium.org/"
+ },
+ "issues$username": {
+ "$t": "tonyg@chromium.org"
+ }
+ },
+ {
+ "issues$uri": {
+ "$t": "/u/114810703796781371055/"
+ },
+ "issues$username": {
+ "$t": "robert...@chromium.org"
+ }
+ }
+ ],
+ "issues$id": {
+ "$t": 424688
+ },
+ "issues$label": [
+ {
+ "$t": "Type-Feature"
+ },
+ {
+ "$t": "Pri-2"
+ },
+ {
+ "$t": "Cr-Tests-AutoBisect"
+ },
+ {
+ "$t": "OS-All"
+ }
+ ],
+ "issues$owner": {
+ "issues$uri": {
+ "$t": "/u/114810703796781371055/"
+ },
+ "issues$username": {
+ "$t": "robert...@chromium.org"
+ }
+ },
+ "issues$stars": {
+ "$t": 1
+ },
+ "issues$state": {
+ "$t": "open"
+ },
+ "issues$status": {
+ "$t": "Assigned"
+ }
+ }
+ ]
+ }
+}
diff --git a/chromium/tools/auto_bisect/ttest.py b/chromium/tools/auto_bisect/ttest.py
new file mode 100644
index 00000000000..fcb3a979e9f
--- /dev/null
+++ b/chromium/tools/auto_bisect/ttest.py
@@ -0,0 +1,209 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions for doing independent two-sample t-tests and looking up p-values.
+
+Note: This module was copied from the Performance Dashboard code, and changed
+to use definitions of mean and variance from math_utils instead of numpy.
+
+> A t-test is any statistical hypothesis test in which the test statistic
+> follows a Student's t distribution if the null hypothesis is supported.
+> It can be used to determine if two sets of data are significantly different
+> from each other.
+
+There are several conditions that the data under test should meet in order
+for a t-test to be completely applicable:
+ - The data should be roughly normal in distribution.
+ - The two samples that are compared should be roughly similar in size.
+
+References:
+ http://en.wikipedia.org/wiki/Student%27s_t-test
+ http://en.wikipedia.org/wiki/Welch%27s_t-test
+ https://github.com/scipy/scipy/blob/master/scipy/stats/stats.py#L3244
+"""
+
+import math
+
+import math_utils
+
+
+def WelchsTTest(sample1, sample2):
+ """Performs Welch's t-test on the two samples.
+
+ Welch's t-test is an adaptation of Student's t-test which is used when the
+ two samples may have unequal variances. It is also an independent two-sample
+ t-test.
+
+ Args:
+ sample1: A collection of numbers.
+ sample2: Another collection of numbers.
+
+ Returns:
+ A 3-tuple (t-statistic, degrees of freedom, p-value).
+ """
+ mean1 = math_utils.Mean(sample1)
+ mean2 = math_utils.Mean(sample2)
+ v1 = math_utils.Variance(sample1)
+ v2 = math_utils.Variance(sample2)
+ n1 = len(sample1)
+ n2 = len(sample2)
+ t = _TValue(mean1, mean2, v1, v2, n1, n2)
+ df = _DegreesOfFreedom(v1, v2, n1, n2)
+ p = _LookupPValue(t, df)
+ return t, df, p
+
+
+def _TValue(mean1, mean2, v1, v2, n1, n2):
+ """Calculates a t-statistic value using the formula for Welch's t-test.
+
+ The t value can be thought of as a signal-to-noise ratio; a higher t-value
+ tells you that the groups are more different.
+
+ Args:
+ mean1: Mean of sample 1.
+ mean2: Mean of sample 2.
+ v1: Variance of sample 1.
+ v2: Variance of sample 2.
+ n1: Sample size of sample 1.
+ n2: Sample size of sample 2.
+
+ Returns:
+ A t value, which may be negative or positive.
+ """
+ # If variance of both segments is zero, return some large t-value.
+ if v1 == 0 and v2 == 0:
+ return 1000.0
+ return (mean1 - mean2) / (math.sqrt(v1 / n1 + v2 / n2))
+
+
+def _DegreesOfFreedom(v1, v2, n1, n2):
+ """Calculates degrees of freedom using the Welch-Satterthwaite formula.
+
+ Degrees of freedom is a measure of sample size. For other types of tests,
+ degrees of freedom is sometimes N - 1, where N is the sample size. However,
+
+ Args:
+ v1: Variance of sample 1.
+ v2: Variance of sample 2.
+ n1: Size of sample 2.
+ n2: Size of sample 2.
+
+ Returns:
+ An estimate of degrees of freedom. Must be at least 1.0.
+ """
+ # When there's no variance in either sample, return 1.
+ if v1 == 0 and v2 == 0:
+ return 1
+ # If the sample size is too small, also return the minimum (1).
+ if n1 <= 1 or n2 <= 2:
+ return 1
+ df = (((v1 / n1 + v2 / n2) ** 2) /
+ ((v1 ** 2) / ((n1 ** 2) * (n1 - 1)) +
+ (v2 ** 2) / ((n2 ** 2) * (n2 - 1))))
+ return max(1, df)
+
+
+# Below is a hard-coded table for looking up p-values.
+#
+# Normally, p-values are calculated based on the t-distribution formula.
+# Looking up pre-calculated values is a less accurate but less complicated
+# alternative.
+#
+# Reference: http://www.sjsu.edu/faculty/gerstman/StatPrimer/t-table.pdf
+
+# A list of p-values for a two-tailed test. The entries correspond to to
+# entries in the rows of the table below.
+TWO_TAIL = [1, 0.20, 0.10, 0.05, 0.02, 0.01, 0.005, 0.002, 0.001]
+
+# A map of degrees of freedom to lists of t-values. The index of the t-value
+# can be used to look up the corresponding p-value.
+TABLE = {
+ 1: [0, 3.078, 6.314, 12.706, 31.820, 63.657, 127.321, 318.309, 636.619],
+ 2: [0, 1.886, 2.920, 4.303, 6.965, 9.925, 14.089, 22.327, 31.599],
+ 3: [0, 1.638, 2.353, 3.182, 4.541, 5.841, 7.453, 10.215, 12.924],
+ 4: [0, 1.533, 2.132, 2.776, 3.747, 4.604, 5.598, 7.173, 8.610],
+ 5: [0, 1.476, 2.015, 2.571, 3.365, 4.032, 4.773, 5.893, 6.869],
+ 6: [0, 1.440, 1.943, 2.447, 3.143, 3.707, 4.317, 5.208, 5.959],
+ 7: [0, 1.415, 1.895, 2.365, 2.998, 3.499, 4.029, 4.785, 5.408],
+ 8: [0, 1.397, 1.860, 2.306, 2.897, 3.355, 3.833, 4.501, 5.041],
+ 9: [0, 1.383, 1.833, 2.262, 2.821, 3.250, 3.690, 4.297, 4.781],
+ 10: [0, 1.372, 1.812, 2.228, 2.764, 3.169, 3.581, 4.144, 4.587],
+ 11: [0, 1.363, 1.796, 2.201, 2.718, 3.106, 3.497, 4.025, 4.437],
+ 12: [0, 1.356, 1.782, 2.179, 2.681, 3.055, 3.428, 3.930, 4.318],
+ 13: [0, 1.350, 1.771, 2.160, 2.650, 3.012, 3.372, 3.852, 4.221],
+ 14: [0, 1.345, 1.761, 2.145, 2.625, 2.977, 3.326, 3.787, 4.140],
+ 15: [0, 1.341, 1.753, 2.131, 2.602, 2.947, 3.286, 3.733, 4.073],
+ 16: [0, 1.337, 1.746, 2.120, 2.584, 2.921, 3.252, 3.686, 4.015],
+ 17: [0, 1.333, 1.740, 2.110, 2.567, 2.898, 3.222, 3.646, 3.965],
+ 18: [0, 1.330, 1.734, 2.101, 2.552, 2.878, 3.197, 3.610, 3.922],
+ 19: [0, 1.328, 1.729, 2.093, 2.539, 2.861, 3.174, 3.579, 3.883],
+ 20: [0, 1.325, 1.725, 2.086, 2.528, 2.845, 3.153, 3.552, 3.850],
+ 21: [0, 1.323, 1.721, 2.080, 2.518, 2.831, 3.135, 3.527, 3.819],
+ 22: [0, 1.321, 1.717, 2.074, 2.508, 2.819, 3.119, 3.505, 3.792],
+ 23: [0, 1.319, 1.714, 2.069, 2.500, 2.807, 3.104, 3.485, 3.768],
+ 24: [0, 1.318, 1.711, 2.064, 2.492, 2.797, 3.090, 3.467, 3.745],
+ 25: [0, 1.316, 1.708, 2.060, 2.485, 2.787, 3.078, 3.450, 3.725],
+ 26: [0, 1.315, 1.706, 2.056, 2.479, 2.779, 3.067, 3.435, 3.707],
+ 27: [0, 1.314, 1.703, 2.052, 2.473, 2.771, 3.057, 3.421, 3.690],
+ 28: [0, 1.313, 1.701, 2.048, 2.467, 2.763, 3.047, 3.408, 3.674],
+ 29: [0, 1.311, 1.699, 2.045, 2.462, 2.756, 3.038, 3.396, 3.659],
+ 30: [0, 1.310, 1.697, 2.042, 2.457, 2.750, 3.030, 3.385, 3.646],
+ 31: [0, 1.309, 1.695, 2.040, 2.453, 2.744, 3.022, 3.375, 3.633],
+ 32: [0, 1.309, 1.694, 2.037, 2.449, 2.738, 3.015, 3.365, 3.622],
+ 33: [0, 1.308, 1.692, 2.035, 2.445, 2.733, 3.008, 3.356, 3.611],
+ 34: [0, 1.307, 1.691, 2.032, 2.441, 2.728, 3.002, 3.348, 3.601],
+ 35: [0, 1.306, 1.690, 2.030, 2.438, 2.724, 2.996, 3.340, 3.591],
+ 36: [0, 1.306, 1.688, 2.028, 2.434, 2.719, 2.991, 3.333, 3.582],
+ 37: [0, 1.305, 1.687, 2.026, 2.431, 2.715, 2.985, 3.326, 3.574],
+ 38: [0, 1.304, 1.686, 2.024, 2.429, 2.712, 2.980, 3.319, 3.566],
+ 39: [0, 1.304, 1.685, 2.023, 2.426, 2.708, 2.976, 3.313, 3.558],
+ 40: [0, 1.303, 1.684, 2.021, 2.423, 2.704, 2.971, 3.307, 3.551],
+ 42: [0, 1.302, 1.682, 2.018, 2.418, 2.698, 2.963, 3.296, 3.538],
+ 44: [0, 1.301, 1.680, 2.015, 2.414, 2.692, 2.956, 3.286, 3.526],
+ 46: [0, 1.300, 1.679, 2.013, 2.410, 2.687, 2.949, 3.277, 3.515],
+ 48: [0, 1.299, 1.677, 2.011, 2.407, 2.682, 2.943, 3.269, 3.505],
+ 50: [0, 1.299, 1.676, 2.009, 2.403, 2.678, 2.937, 3.261, 3.496],
+ 60: [0, 1.296, 1.671, 2.000, 2.390, 2.660, 2.915, 3.232, 3.460],
+ 70: [0, 1.294, 1.667, 1.994, 2.381, 2.648, 2.899, 3.211, 3.435],
+ 80: [0, 1.292, 1.664, 1.990, 2.374, 2.639, 2.887, 3.195, 3.416],
+ 90: [0, 1.291, 1.662, 1.987, 2.369, 2.632, 2.878, 3.183, 3.402],
+ 100: [0, 1.290, 1.660, 1.984, 2.364, 2.626, 2.871, 3.174, 3.391],
+ 120: [0, 1.289, 1.658, 1.980, 2.358, 2.617, 2.860, 3.160, 3.373],
+ 150: [0, 1.287, 1.655, 1.976, 2.351, 2.609, 2.849, 3.145, 3.357],
+ 200: [0, 1.286, 1.652, 1.972, 2.345, 2.601, 2.839, 3.131, 3.340],
+ 300: [0, 1.284, 1.650, 1.968, 2.339, 2.592, 2.828, 3.118, 3.323],
+ 500: [0, 1.283, 1.648, 1.965, 2.334, 2.586, 2.820, 3.107, 3.310],
+}
+
+
+def _LookupPValue(t, df):
+ """Looks up a p-value in a t-distribution table.
+
+ Args:
+ t: A t statistic value; the result of a t-test.
+ df: Number of degrees of freedom.
+
+ Returns:
+ A p-value, which represents the likelihood of obtaining a result at least
+ as extreme as the one observed just by chance (the null hypothesis).
+ """
+ assert df >= 1, 'Degrees of freedom must be positive'
+
+ # We ignore the negative sign on the t-value because our null hypothesis
+ # is that the two samples are the same; our alternative hypothesis is that
+ # the second sample is lesser OR greater than the first.
+ t = abs(t)
+
+ def GreatestSmaller(nums, target):
+ """Returns the largest number that is <= the target number."""
+ lesser_equal = [n for n in nums if n <= target]
+ assert lesser_equal, 'No number in number list <= target.'
+ return max(lesser_equal)
+
+ df_key = GreatestSmaller(TABLE.keys(), df)
+ t_table_row = TABLE[df_key]
+ approximate_t_value = GreatestSmaller(t_table_row, t)
+ t_value_index = t_table_row.index(approximate_t_value)
+
+ return TWO_TAIL[t_value_index]
diff --git a/chromium/tools/auto_bisect/ttest_test.py b/chromium/tools/auto_bisect/ttest_test.py
new file mode 100644
index 00000000000..b457d090358
--- /dev/null
+++ b/chromium/tools/auto_bisect/ttest_test.py
@@ -0,0 +1,130 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for ttest module."""
+
+import unittest
+
+import ttest
+
+
+# This test case accesses private functions of the ttest module.
+# pylint: disable=W0212
+class TTestTest(unittest.TestCase):
+ """Tests for the t-test functions."""
+
+ def testWelchsFormula(self):
+ """Tests calculation of the t value."""
+ # Results can be verified by directly plugging variables into Welch's
+ # equation (e.g. using a calculator or the Python interpreter).
+ self.assertEqual(
+ -0.2796823595120407,
+ ttest._TValue(0.299, 0.307, 0.05, 0.08, 150, 165))
+
+ # Note that a negative t value is obtained when the first sample has a
+ # smaller mean than the second, otherwise a positive value is returned.
+ self.assertEqual(
+ 0.2796823595120407,
+ ttest._TValue(0.307, 0.299, 0.08, 0.05, 165, 150))
+
+ def testWelchSatterthwaiteFormula(self):
+ """Tests calculation of estimated degrees of freedom."""
+ # Note that since the Welch-Satterthwaite equation gives an estimate of
+ # degrees of freedom, the result may not be an integer.
+ self.assertEqual(
+ 307.1987997516727,
+ ttest._DegreesOfFreedom(0.05, 0.08, 150, 165))
+
+ def testWelchsTTest(self):
+ """Tests the t value and degrees of freedom output of Welch's t-test."""
+ # The t-value can be checked with scipy.stats.ttest_ind(equal_var=False).
+ t, df, _ = ttest.WelchsTTest([2, 3, 2, 3, 2, 3], [4, 5, 4, 5, 4, 5])
+ self.assertAlmostEqual(10.0, df)
+
+ # The t-value produced by scipy.stats.ttest_ind is -6.32455532034.
+ # Our function produces slightly different results.
+ # Possibly due to differences in rounding error?
+ self.assertAlmostEqual(-6.325, t, delta=1.0)
+
+ def testTTestEqualSamples(self):
+ """Checks that t = 0 and p = 1 when the samples are the same."""
+ t, _, p = ttest.WelchsTTest([1, 2, 3], [1, 2, 3])
+ self.assertEqual(0, t)
+ self.assertEqual(1, p)
+
+ t, _, p = ttest.WelchsTTest([1, 2], [1, 2])
+ self.assertEqual(0, t)
+ self.assertEqual(1, p)
+
+ def testTTestVeryDifferentSamples(self):
+ """Checks that p is very low when the samples are clearly different."""
+ t, _, p = ttest.WelchsTTest(
+ [100, 101, 100, 101, 100], [1, 2, 1, 2, 1, 2, 1, 2])
+ self.assertGreaterEqual(t, 250)
+ self.assertLessEqual(p, 0.01)
+
+ def testTTestVariance(self):
+ """Verifies that higher variance -> higher p value."""
+ _, _, p_low_var = ttest.WelchsTTest([2, 3, 2, 3], [4, 5, 4, 5])
+ _, _, p_high_var = ttest.WelchsTTest([1, 4, 1, 4], [3, 6, 3, 6])
+ self.assertLess(p_low_var, p_high_var)
+
+ def testTTestSampleSize(self):
+ """Verifies that smaller sample size -> higher p value."""
+ _, _, p_larger_sample = ttest.WelchsTTest([2, 3, 2, 3], [4, 5, 4, 5])
+ _, _, p_smaller_sample = ttest.WelchsTTest([2, 3, 2, 3], [4, 5])
+ self.assertLess(p_larger_sample, p_smaller_sample)
+
+ def testTTestMeanDifference(self):
+ """Verifies that smaller difference between means -> higher p value."""
+ _, _, p_far_means = ttest.WelchsTTest([2, 3, 2, 3], [5, 6, 5, 6])
+ _, _, p_near_means = ttest.WelchsTTest([2, 3, 2, 3], [3, 4, 3, 4])
+ self.assertLess(p_far_means, p_near_means)
+
+
+class LookupTableTest(unittest.TestCase):
+ """Tests for functionality related to lookup of p-values in a table."""
+
+ def setUp(self):
+ self.original_TWO_TAIL = ttest.TWO_TAIL
+ self.original_TABLE = ttest.TABLE
+ ttest.TWO_TAIL = [1, 0.2, 0.1, 0.05, 0.02, 0.01]
+ ttest.TABLE = {
+ 1: [0, 6.314, 12.71, 31.82, 63.66, 318.31],
+ 2: [0, 2.920, 4.303, 6.965, 9.925, 22.327],
+ 3: [0, 2.353, 3.182, 4.541, 5.841, 10.215],
+ 4: [0, 2.132, 2.776, 3.747, 4.604, 7.173],
+ }
+
+ def tearDown(self):
+ ttest.TWO_TAIL = self.original_TWO_TAIL
+ ttest.TABLE = self.original_TABLE
+
+ def testLookupExactMatch(self):
+ """Tests a lookup when there is an exact match."""
+ self.assertEqual(0.1, ttest._LookupPValue(3.182, 3))
+ self.assertEqual(0.1, ttest._LookupPValue(-3.182, 3))
+
+ def testLookupAbove(self):
+ """Tests a lookup when the given value is above an entry in the table."""
+ self.assertEqual(0.2, ttest._LookupPValue(3.1, 2))
+ self.assertEqual(0.2, ttest._LookupPValue(-3.1, 2))
+
+ def testLookupLargeTValue(self):
+ """Tests a lookup when the given t-value is very large."""
+ self.assertEqual(0.01, ttest._LookupPValue(500.0, 1))
+ self.assertEqual(0.01, ttest._LookupPValue(-500.0, 1))
+
+ def testLookupZeroTValue(self):
+ """Tests a lookup when the given t-value is zero."""
+ self.assertEqual(1, ttest._LookupPValue(0.0, 1))
+ self.assertEqual(1, ttest._LookupPValue(0.0, 2))
+
+ def testLookupLargeDF(self):
+ """Tests a lookup when the given degrees of freedom is large."""
+ self.assertEqual(0.02, ttest._LookupPValue(5.0, 50))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/bash-completion b/chromium/tools/bash-completion
new file mode 100644
index 00000000000..19172dab145
--- /dev/null
+++ b/chromium/tools/bash-completion
@@ -0,0 +1,25 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Flag completion rule for bash.
+# To load in your shell, "source path/to/this/file".
+
+chrome_source=$(cd $(dirname $BASH_SOURCE)/.. && pwd)
+
+_chrome_flag() {
+ local cur targets
+ cur="${COMP_WORDS[COMP_CWORD]}"
+ targets=$(cd $chrome_source; \
+ git ls-files '*switches*' | \
+ xargs sed -ne 's/^[^/]*"\([^" /]\{1,\}\)".*/--\1/p')
+ COMPREPLY=($(compgen -W "$targets" -- "$cur"))
+ return 0
+}
+
+complete -F _chrome_flag google-chrome
+complete -F _chrome_flag chrome
+if [ $(uname) = "Darwin" ]
+then
+ complete -F _chrome_flag Chromium
+fi
diff --git a/chromium/tools/battor_agent/BUILD.gn b/chromium/tools/battor_agent/BUILD.gn
new file mode 100644
index 00000000000..41f89e7dafa
--- /dev/null
+++ b/chromium/tools/battor_agent/BUILD.gn
@@ -0,0 +1,62 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//testing/test.gni")
+
+# Works only on desktop platforms.
+assert(is_win || is_linux || is_mac)
+
+executable("battor_agent") {
+ sources = [
+ "battor_agent_bin.cc",
+ ]
+ deps = [
+ ":battor_agent_lib",
+ "//base",
+ "//device/serial",
+ "//mojo/public/c/system:for_shared_library",
+ ]
+}
+
+source_set("battor_agent_lib") {
+ sources = [
+ "battor_agent.cc",
+ "battor_agent.h",
+ "battor_connection.cc",
+ "battor_connection.h",
+ "battor_connection_impl.cc",
+ "battor_connection_impl.h",
+ "battor_error.cc",
+ "battor_error.h",
+ "battor_finder.cc",
+ "battor_finder.h",
+ "battor_sample_converter.cc",
+ "battor_sample_converter.h",
+ ]
+ deps = [
+ "//base",
+ "//device/serial",
+ "//net",
+ ]
+}
+
+test("battor_agent_unittests") {
+ sources = [
+ "battor_agent_unittest.cc",
+ "battor_connection_impl_unittest.cc",
+ "battor_protocol_types_unittest.cc",
+ "battor_sample_converter_unittest.cc",
+ ]
+ deps = [
+ ":battor_agent_lib",
+ "//base",
+ "//base/test:run_all_unittests",
+ "//base/test:test_support",
+ "//device/serial",
+ "//device/serial:test_support",
+ "//mojo/public/c/system:for_shared_library",
+ "//testing/gmock",
+ "//testing/gtest",
+ ]
+}
diff --git a/chromium/tools/battor_agent/DEPS b/chromium/tools/battor_agent/DEPS
new file mode 100644
index 00000000000..b40de52fd2b
--- /dev/null
+++ b/chromium/tools/battor_agent/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ "+device/serial",
+ "+mojo/public",
+ "+net/base",
+] \ No newline at end of file
diff --git a/chromium/tools/battor_agent/OWNERS b/chromium/tools/battor_agent/OWNERS
new file mode 100644
index 00000000000..a90dd8287d8
--- /dev/null
+++ b/chromium/tools/battor_agent/OWNERS
@@ -0,0 +1,3 @@
+charliea@chromium.org
+nednguyen@google.com
+zhenw@chromium.org
diff --git a/chromium/tools/battor_agent/README b/chromium/tools/battor_agent/README
new file mode 100644
index 00000000000..3c0c2df5b06
--- /dev/null
+++ b/chromium/tools/battor_agent/README
@@ -0,0 +1,25 @@
+BattOr Agent
+============
+
+The BattOr Agent is a C++ library that acts as a means of
+communicating with a BattOr. BattOrs is an external USB device,
+typically connected to the host, that's capable of recording
+accurate, high-frequency (2000Hz) power samples.
+
+The BattOr Agent accepts five high-level tracing commands:
+
+- **StartTracing**, which tells the BattOr to start collecting power
+samples.
+- **StopTracing**, which tells the BattOr to stop collecting power
+samples and return its trace log.
+- **SupportsExplicitClockSync**, which returns whether the BattOr is
+able to record clock sync markers in its own trace log.
+- **RecordClockSyncMarker**, which writes the specified string into the
+BattOr trace log. Because this string is accompanied by a BattOr tracing
+timestamp, we can use this as a way of correlating the BattOr timeline
+and the host computer's timeline.
+- **IssueClockSyncMarker**, which tells the BattOr to issue clock sync
+markers to all other tracing agents that it's connected to.
+
+For those calling the agent from non-C++ code, we also provide a thin
+binary wrapper around the C++ library.
diff --git a/chromium/tools/battor_agent/battor_agent.cc b/chromium/tools/battor_agent/battor_agent.cc
new file mode 100644
index 00000000000..323da5175e4
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_agent.cc
@@ -0,0 +1,570 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/battor_agent/battor_agent.h"
+
+#include <iomanip>
+
+#include "base/bind.h"
+#include "base/thread_task_runner_handle.h"
+#include "tools/battor_agent/battor_connection_impl.h"
+#include "tools/battor_agent/battor_sample_converter.h"
+
+using std::vector;
+
+namespace battor {
+
+namespace {
+
+// The number of seconds that it takes a BattOr to reset.
+const uint8_t kBattOrResetTimeSeconds = 5;
+
+// The maximum number of times to retry when reading a message.
+const uint8_t kMaxReadAttempts = 20;
+
+// The number of milliseconds to wait before trying to read a message again.
+const uint8_t kReadRetryDelayMilliseconds = 1;
+
+// The amount of time we need to wait after recording a clock sync marker in
+// order to ensure that the sample we synced to doesn't get thrown out.
+const uint8_t kStopTracingClockSyncDelayMilliseconds = 100;
+
+// The number of seconds allowed for a given action before timing out.
+const uint8_t kBattOrTimeoutSeconds = 10;
+
+// Returns true if the specified vector of bytes decodes to a message that is an
+// ack for the specified control message type.
+bool IsAckOfControlCommand(BattOrMessageType message_type,
+ BattOrControlMessageType control_message_type,
+ const vector<char>& msg) {
+ if (message_type != BATTOR_MESSAGE_TYPE_CONTROL_ACK)
+ return false;
+
+ if (msg.size() != sizeof(BattOrControlMessageAck))
+ return false;
+
+ const BattOrControlMessageAck* ack =
+ reinterpret_cast<const BattOrControlMessageAck*>(msg.data());
+
+ if (ack->type != control_message_type)
+ return false;
+
+ return true;
+}
+
+// Attempts to decode the specified vector of bytes decodes to a valid EEPROM.
+// Returns the new EEPROM, or nullptr if unsuccessful.
+std::unique_ptr<BattOrEEPROM> ParseEEPROM(BattOrMessageType message_type,
+ const vector<char>& msg) {
+ if (message_type != BATTOR_MESSAGE_TYPE_CONTROL_ACK)
+ return nullptr;
+
+ if (msg.size() != sizeof(BattOrEEPROM))
+ return nullptr;
+
+ std::unique_ptr<BattOrEEPROM> eeprom(new BattOrEEPROM());
+ memcpy(eeprom.get(), msg.data(), sizeof(BattOrEEPROM));
+ return eeprom;
+}
+
+// Returns true if the specified vector of bytes decodes to a valid BattOr
+// samples frame. The frame header and samples are returned via the frame_header
+// and samples paramaters.
+bool ParseSampleFrame(BattOrMessageType type,
+ const vector<char>& msg,
+ uint32_t expected_sequence_number,
+ BattOrFrameHeader* frame_header,
+ vector<RawBattOrSample>* samples) {
+ if (type != BATTOR_MESSAGE_TYPE_SAMPLES)
+ return false;
+
+ // Each frame should contain a header and an integer number of BattOr samples.
+ if ((msg.size() - sizeof(BattOrFrameHeader)) % sizeof(RawBattOrSample) != 0)
+ return false;
+
+ // The first bytes in the frame contain the frame header.
+ const char* frame_ptr = reinterpret_cast<const char*>(msg.data());
+ memcpy(frame_header, frame_ptr, sizeof(BattOrFrameHeader));
+ frame_ptr += sizeof(BattOrFrameHeader);
+
+ if (frame_header->sequence_number != expected_sequence_number) {
+ LOG(WARNING) << "Unexpected sequence number: wanted "
+ << expected_sequence_number << ", but got "
+ << frame_header->sequence_number << ".";
+ return false;
+ }
+
+ size_t remaining_bytes = msg.size() - sizeof(BattOrFrameHeader);
+ if (remaining_bytes != frame_header->length)
+ return false;
+
+ samples->resize(remaining_bytes / sizeof(RawBattOrSample));
+ memcpy(samples->data(), frame_ptr, remaining_bytes);
+
+ return true;
+}
+
+} // namespace
+
+BattOrAgent::BattOrAgent(
+ const std::string& path,
+ Listener* listener,
+ scoped_refptr<base::SingleThreadTaskRunner> file_thread_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> ui_thread_task_runner)
+ : connection_(new BattOrConnectionImpl(path,
+ this,
+ file_thread_task_runner,
+ ui_thread_task_runner)),
+ listener_(listener),
+ last_action_(Action::INVALID),
+ command_(Command::INVALID),
+ num_read_attempts_(0) {
+ // We don't care what thread the constructor is called on - we only care that
+ // all of the other method invocations happen on the same thread.
+ thread_checker_.DetachFromThread();
+}
+
+BattOrAgent::~BattOrAgent() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+}
+
+void BattOrAgent::StartTracing() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ // When tracing is restarted, all previous clock sync markers are invalid.
+ clock_sync_markers_.clear();
+ last_clock_sync_time_ = base::TimeTicks();
+
+ command_ = Command::START_TRACING;
+ PerformAction(Action::REQUEST_CONNECTION);
+}
+
+void BattOrAgent::StopTracing() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ command_ = Command::STOP_TRACING;
+ PerformAction(Action::REQUEST_CONNECTION);
+}
+
+void BattOrAgent::RecordClockSyncMarker(const std::string& marker) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ command_ = Command::RECORD_CLOCK_SYNC_MARKER;
+ pending_clock_sync_marker_ = marker;
+ PerformAction(Action::REQUEST_CONNECTION);
+}
+
+void BattOrAgent::BeginConnect() {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ connection_->Open();
+}
+
+void BattOrAgent::OnConnectionOpened(bool success) {
+ // Return immediately if opening the connection already timed out.
+ if (timeout_callback_.IsCancelled())
+ return;
+ timeout_callback_.Cancel();
+
+ if (!success) {
+ CompleteCommand(BATTOR_ERROR_CONNECTION_FAILED);
+ return;
+ }
+
+ switch (command_) {
+ case Command::START_TRACING:
+ // TODO(charliea): Ideally, we'd just like to send an init, and the BattOr
+ // firmware can handle whether a reset is necessary or not, sending an
+ // init ack regardless. This reset can be removed once this is true.
+ // https://github.com/aschulm/battor/issues/30 tracks this.
+ PerformAction(Action::SEND_RESET);
+ return;
+ case Command::STOP_TRACING:
+ PerformAction(Action::SEND_EEPROM_REQUEST);
+ return;
+ case Command::RECORD_CLOCK_SYNC_MARKER:
+ PerformAction(Action::SEND_CURRENT_SAMPLE_REQUEST);
+ return;
+ case Command::INVALID:
+ NOTREACHED();
+ }
+}
+
+void BattOrAgent::OnBytesSent(bool success) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ // Return immediately if whatever action we were trying to perform already
+ // timed out.
+ if (timeout_callback_.IsCancelled())
+ return;
+ timeout_callback_.Cancel();
+
+ if (!success) {
+ CompleteCommand(BATTOR_ERROR_SEND_ERROR);
+ return;
+ }
+
+ switch (last_action_) {
+ case Action::SEND_RESET:
+ // TODO(charliea): Ideally, we'd just like to send an init, and the BattOr
+ // firmware can handle whether a reset is necessary or not, sending an
+ // init ack regardless. This reset can be removed once this is true.
+ // https://github.com/aschulm/battor/issues/30 tracks this.
+
+ // Wait for the reset to happen before sending the init message.
+ PerformDelayedAction(Action::SEND_INIT, base::TimeDelta::FromSeconds(
+ kBattOrResetTimeSeconds));
+ return;
+ case Action::SEND_INIT:
+ PerformAction(Action::READ_INIT_ACK);
+ return;
+ case Action::SEND_SET_GAIN:
+ PerformAction(Action::READ_SET_GAIN_ACK);
+ return;
+ case Action::SEND_START_TRACING:
+ PerformAction(Action::READ_START_TRACING_ACK);
+ return;
+ case Action::SEND_EEPROM_REQUEST:
+ num_read_attempts_ = 1;
+ PerformAction(Action::READ_EEPROM);
+ return;
+ case Action::SEND_SAMPLES_REQUEST:
+ num_read_attempts_ = 1;
+ PerformAction(Action::READ_CALIBRATION_FRAME);
+ return;
+ case Action::SEND_CURRENT_SAMPLE_REQUEST:
+ num_read_attempts_ = 1;
+ PerformAction(Action::READ_CURRENT_SAMPLE);
+ return;
+ default:
+ CompleteCommand(BATTOR_ERROR_UNEXPECTED_MESSAGE);
+ }
+}
+
+void BattOrAgent::OnMessageRead(bool success,
+ BattOrMessageType type,
+ std::unique_ptr<vector<char>> bytes) {
+ // Return immediately if whatever action we were trying to perform already
+ // timed out.
+ if (timeout_callback_.IsCancelled())
+ return;
+ timeout_callback_.Cancel();
+
+ if (!success) {
+ switch (last_action_) {
+ case Action::READ_EEPROM:
+ case Action::READ_CALIBRATION_FRAME:
+ case Action::READ_DATA_FRAME:
+ case Action::READ_CURRENT_SAMPLE:
+ if (++num_read_attempts_ > kMaxReadAttempts) {
+ CompleteCommand(BATTOR_ERROR_RECEIVE_ERROR);
+ return;
+ }
+
+ PerformDelayedAction(last_action_, base::TimeDelta::FromMilliseconds(
+ kReadRetryDelayMilliseconds));
+ return;
+
+ default:
+ CompleteCommand(BATTOR_ERROR_RECEIVE_ERROR);
+ return;
+ }
+ }
+
+ switch (last_action_) {
+ case Action::READ_INIT_ACK:
+ if (!IsAckOfControlCommand(type, BATTOR_CONTROL_MESSAGE_TYPE_INIT,
+ *bytes)) {
+ CompleteCommand(BATTOR_ERROR_UNEXPECTED_MESSAGE);
+ return;
+ }
+
+ PerformAction(Action::SEND_SET_GAIN);
+ return;
+
+ case Action::READ_SET_GAIN_ACK:
+ if (!IsAckOfControlCommand(type, BATTOR_CONTROL_MESSAGE_TYPE_SET_GAIN,
+ *bytes)) {
+ CompleteCommand(BATTOR_ERROR_UNEXPECTED_MESSAGE);
+ return;
+ }
+
+ PerformAction(Action::SEND_START_TRACING);
+ return;
+
+ case Action::READ_START_TRACING_ACK:
+ if (!IsAckOfControlCommand(
+ type, BATTOR_CONTROL_MESSAGE_TYPE_START_SAMPLING_SD, *bytes)) {
+ CompleteCommand(BATTOR_ERROR_UNEXPECTED_MESSAGE);
+ return;
+ }
+
+ CompleteCommand(BATTOR_ERROR_NONE);
+ return;
+
+ case Action::READ_EEPROM: {
+ battor_eeprom_ = ParseEEPROM(type, *bytes);
+ if (!battor_eeprom_) {
+ CompleteCommand(BATTOR_ERROR_UNEXPECTED_MESSAGE);
+ return;
+ }
+
+ // Make sure that we don't request samples until a safe amount of time has
+ // elapsed since recording the last clock sync marker: we need to ensure
+ // that the sample we synced to doesn't get thrown out.
+ base::TimeTicks min_request_samples_time =
+ last_clock_sync_time_ + base::TimeDelta::FromMilliseconds(
+ kStopTracingClockSyncDelayMilliseconds);
+ base::TimeDelta request_samples_delay = std::max(
+ min_request_samples_time - base::TimeTicks::Now(), base::TimeDelta());
+
+ PerformDelayedAction(Action::SEND_SAMPLES_REQUEST, request_samples_delay);
+ return;
+ }
+ case Action::READ_CALIBRATION_FRAME: {
+ BattOrFrameHeader frame_header;
+ if (!ParseSampleFrame(type, *bytes, next_sequence_number_++,
+ &frame_header, &calibration_frame_)) {
+ CompleteCommand(BATTOR_ERROR_UNEXPECTED_MESSAGE);
+ return;
+ }
+
+ // Make sure that the calibration frame has actual samples in it.
+ if (calibration_frame_.empty()) {
+ CompleteCommand(BATTOR_ERROR_UNEXPECTED_MESSAGE);
+ return;
+ }
+
+ num_read_attempts_ = 1;
+ PerformAction(Action::READ_DATA_FRAME);
+ return;
+ }
+
+ case Action::READ_DATA_FRAME: {
+ BattOrFrameHeader frame_header;
+ vector<RawBattOrSample> frame;
+ if (!ParseSampleFrame(type, *bytes, next_sequence_number_++,
+ &frame_header, &frame)) {
+ CompleteCommand(BATTOR_ERROR_UNEXPECTED_MESSAGE);
+ return;
+ }
+
+ // Check for the empty frame the BattOr uses to indicate it's done
+ // streaming samples.
+ if (frame.empty()) {
+ CompleteCommand(BATTOR_ERROR_NONE);
+ return;
+ }
+
+ samples_.insert(samples_.end(), frame.begin(), frame.end());
+
+ num_read_attempts_ = 1;
+ PerformAction(Action::READ_DATA_FRAME);
+ return;
+ }
+
+ case Action::READ_CURRENT_SAMPLE:
+ if (type != BATTOR_MESSAGE_TYPE_CONTROL_ACK ||
+ bytes->size() != sizeof(uint32_t)) {
+ CompleteCommand(BATTOR_ERROR_UNEXPECTED_MESSAGE);
+ return;
+ }
+
+ uint32_t sample_num;
+ memcpy(&sample_num, bytes->data(), sizeof(uint32_t));
+ clock_sync_markers_[sample_num] = pending_clock_sync_marker_;
+ last_clock_sync_time_ = base::TimeTicks::Now();
+ CompleteCommand(BATTOR_ERROR_NONE);
+ return;
+
+ default:
+ CompleteCommand(BATTOR_ERROR_UNEXPECTED_MESSAGE);
+ }
+}
+
+void BattOrAgent::PerformAction(Action action) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ timeout_callback_.Reset(
+ base::Bind(&BattOrAgent::OnActionTimeout, AsWeakPtr()));
+ base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
+ FROM_HERE, timeout_callback_.callback(),
+ base::TimeDelta::FromSeconds(kBattOrTimeoutSeconds));
+
+ last_action_ = action;
+
+ switch (action) {
+ case Action::REQUEST_CONNECTION:
+ BeginConnect();
+ return;
+
+ // The following actions are required for StartTracing:
+ case Action::SEND_RESET:
+ // Reset the BattOr to clear any preexisting state. After sending the
+ // reset signal, we need to wait for the reset to finish before issuing
+ // further commands.
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_RESET, 0, 0);
+ return;
+ case Action::SEND_INIT:
+ // After resetting the BattOr, we need to make sure to flush the serial
+ // stream. Strange data may have been written into it during the reset.
+ connection_->Flush();
+
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_INIT, 0, 0);
+ return;
+ case Action::READ_INIT_ACK:
+ connection_->ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL_ACK);
+ return;
+ case Action::SEND_SET_GAIN:
+ // Set the BattOr's gain. Setting the gain tells the BattOr the range of
+ // power measurements that we expect to see.
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_SET_GAIN, BATTOR_GAIN_LOW,
+ 0);
+ return;
+ case Action::READ_SET_GAIN_ACK:
+ connection_->ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL_ACK);
+ return;
+ case Action::SEND_START_TRACING:
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_START_SAMPLING_SD, 0, 0);
+ return;
+ case Action::READ_START_TRACING_ACK:
+ connection_->ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL_ACK);
+ return;
+
+ // The following actions are required for StopTracing:
+ case Action::SEND_EEPROM_REQUEST:
+ // Read the BattOr's EEPROM to get calibration information that's required
+ // to convert the raw samples to accurate ones.
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_READ_EEPROM,
+ sizeof(BattOrEEPROM), 0);
+ return;
+ case Action::READ_EEPROM:
+ connection_->ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL_ACK);
+ return;
+ case Action::SEND_SAMPLES_REQUEST:
+ // Send a request to the BattOr to tell it to start streaming the samples
+ // that it's stored on its SD card over the serial connection.
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_READ_SD_UART, 0, 0);
+ return;
+ case Action::READ_CALIBRATION_FRAME:
+ // Data frames are numbered starting at zero and counting up by one each
+ // data frame. We keep track of the next frame sequence number we expect
+ // to see to ensure we don't miss any data.
+ next_sequence_number_ = 0;
+ case Action::READ_DATA_FRAME:
+ // The first frame sent back from the BattOr contains voltage and current
+ // data that excludes whatever device is being measured from the
+ // circuit. We use this first frame to establish a baseline voltage and
+ // current.
+ //
+ // All further frames contain real (but uncalibrated) voltage and current
+ // data.
+ connection_->ReadMessage(BATTOR_MESSAGE_TYPE_SAMPLES);
+ return;
+
+ // The following actions are required for RecordClockSyncMarker:
+ case Action::SEND_CURRENT_SAMPLE_REQUEST:
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_READ_SAMPLE_COUNT, 0, 0);
+ return;
+ case Action::READ_CURRENT_SAMPLE:
+ connection_->ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL_ACK);
+ return;
+
+ case Action::INVALID:
+ NOTREACHED();
+ }
+}
+
+void BattOrAgent::PerformDelayedAction(Action action, base::TimeDelta delay) {
+ base::ThreadTaskRunnerHandle::Get()->PostDelayedTask(
+ FROM_HERE, base::Bind(&BattOrAgent::PerformAction, AsWeakPtr(), action),
+ delay);
+}
+
+void BattOrAgent::OnActionTimeout() {
+ CompleteCommand(BATTOR_ERROR_TIMEOUT);
+ timeout_callback_.Cancel();
+}
+
+void BattOrAgent::SendControlMessage(BattOrControlMessageType type,
+ uint16_t param1,
+ uint16_t param2) {
+ DCHECK(thread_checker_.CalledOnValidThread());
+
+ BattOrControlMessage msg{type, param1, param2};
+ connection_->SendBytes(BATTOR_MESSAGE_TYPE_CONTROL, &msg, sizeof(msg));
+}
+
+void BattOrAgent::CompleteCommand(BattOrError error) {
+ switch (command_) {
+ case Command::START_TRACING:
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE, base::Bind(&Listener::OnStartTracingComplete,
+ base::Unretained(listener_), error));
+ break;
+ case Command::STOP_TRACING:
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE,
+ base::Bind(&Listener::OnStopTracingComplete,
+ base::Unretained(listener_), SamplesToString(), error));
+ break;
+ case Command::RECORD_CLOCK_SYNC_MARKER:
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE, base::Bind(&Listener::OnRecordClockSyncMarkerComplete,
+ base::Unretained(listener_), error));
+ break;
+ case Command::INVALID:
+ NOTREACHED();
+ }
+
+ last_action_ = Action::INVALID;
+ command_ = Command::INVALID;
+ pending_clock_sync_marker_.clear();
+ battor_eeprom_.reset();
+ calibration_frame_.clear();
+ samples_.clear();
+ next_sequence_number_ = 0;
+}
+
+std::string BattOrAgent::SamplesToString() {
+ if (calibration_frame_.empty() || samples_.empty() || !battor_eeprom_)
+ return "";
+
+ BattOrSampleConverter converter(*battor_eeprom_, calibration_frame_);
+
+ std::stringstream trace_stream;
+ trace_stream << std::fixed;
+
+ // Create a header that indicates the BattOr's parameters for these samples.
+ BattOrSample min_sample = converter.MinSample();
+ BattOrSample max_sample = converter.MaxSample();
+ trace_stream << "# BattOr" << std::endl
+ << std::setprecision(1) << "# voltage_range ["
+ << min_sample.voltage_mV << ", " << max_sample.voltage_mV
+ << "] mV" << std::endl
+ << "# current_range [" << min_sample.current_mA << ", "
+ << max_sample.current_mA << "] mA" << std::endl
+ << "# sample_rate " << battor_eeprom_->sd_sample_rate << " Hz"
+ << ", gain " << battor_eeprom_->low_gain << "x" << std::endl;
+
+ // Create a string representation of the BattOr samples.
+ for (size_t i = 0; i < samples_.size(); i++) {
+ BattOrSample sample = converter.ToSample(samples_[i], i);
+ trace_stream << std::setprecision(2) << sample.time_ms << " "
+ << std::setprecision(1) << sample.current_mA << " "
+ << sample.voltage_mV;
+
+ // If there's a clock sync marker for the current sample, print it.
+ auto clock_sync_marker = clock_sync_markers_.find(
+ static_cast<uint32_t>(calibration_frame_.size() + i));
+ if (clock_sync_marker != clock_sync_markers_.end())
+ trace_stream << " <" << clock_sync_marker->second << ">";
+
+ trace_stream << std::endl;
+ }
+
+ return trace_stream.str();
+}
+
+} // namespace battor
diff --git a/chromium/tools/battor_agent/battor_agent.gyp b/chromium/tools/battor_agent/battor_agent.gyp
index 051f66dcbcb..974fbdefcf2 100644
--- a/chromium/tools/battor_agent/battor_agent.gyp
+++ b/chromium/tools/battor_agent/battor_agent.gyp
@@ -14,8 +14,7 @@
'battor_agent_lib',
'../../device/serial/serial.gyp:device_serial',
'../../device/serial/serial.gyp:device_serial_mojo',
- '../../third_party/mojo/mojo_public.gyp:mojo_environment_standalone',
- '../../third_party/mojo/mojo_public.gyp:mojo_public',
+ '../../mojo/mojo_edk.gyp:mojo_system_impl',
],
'sources': [
'battor_agent_bin.cc',
@@ -34,7 +33,10 @@
'battor_connection.h',
'battor_connection_impl.cc',
'battor_connection_impl.h',
+ 'battor_error.cc',
'battor_error.h',
+ 'battor_finder.cc',
+ 'battor_finder.h',
'battor_sample_converter.cc',
'battor_sample_converter.h',
],
@@ -53,11 +55,10 @@
'../../base/base.gyp:run_all_unittests',
'../../base/base.gyp:test_support_base',
'../../device/serial/serial.gyp:device_serial',
- '../../device/serial/serial.gyp:device_serial_test_util',
- '../../testing/gmock.gyp:gmock',
+ '../../device/serial/serial.gyp:device_serial_test_util',
+ '../../mojo/mojo_edk.gyp:mojo_system_impl',
+ '../../testing/gmock.gyp:gmock',
'../../testing/gtest.gyp:gtest',
- '../../third_party/mojo/mojo_public.gyp:mojo_environment_standalone',
- '../../third_party/mojo/mojo_public.gyp:mojo_public',
],
'sources': [
'battor_agent_unittest.cc',
@@ -67,4 +68,23 @@
],
},
],
+ 'conditions': [
+ ['test_isolation_mode != "noop"', {
+ 'targets': [
+ {
+ 'target_name': 'battor_agent_unittests_run',
+ 'type': 'none',
+ 'dependencies': [
+ 'battor_agent_unittests',
+ ],
+ 'includes': [
+ '../../build/isolate.gypi',
+ ],
+ 'sources': [
+ 'battor_agent_unittests.isolate',
+ ],
+ },
+ ],
+ }],
+ ],
}
diff --git a/chromium/tools/battor_agent/battor_agent.h b/chromium/tools/battor_agent/battor_agent.h
new file mode 100644
index 00000000000..3c6c77d170e
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_agent.h
@@ -0,0 +1,187 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_BATTOR_AGENT_BATTOR_AGENT_H_
+#define TOOLS_BATTOR_AGENT_BATTOR_AGENT_H_
+
+#include <map>
+
+#include "base/cancelable_callback.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/weak_ptr.h"
+#include "base/threading/thread_checker.h"
+#include "tools/battor_agent/battor_connection.h"
+#include "tools/battor_agent/battor_error.h"
+
+namespace battor {
+
+// A BattOrAgent is a class used to asynchronously communicate with a BattOr for
+// the purpose of collecting power samples. A BattOr is an external USB device
+// that's capable of recording accurate, high-frequency (2000Hz) power samples.
+//
+// The serial connection is automatically opened when the first command
+// (e.g. StartTracing(), StopTracing(), etc.) is issued, and automatically
+// closed when either StopTracing() or the destructor is called. For Telemetry,
+// this means that the connection must be reinitialized for every command that's
+// issued because a new BattOrAgent is constructed. For Chromium, we use the
+// same BattOrAgent for multiple commands and thus avoid having to reinitialize
+// the serial connection.
+//
+// This class is NOT thread safe. Any interactions with this class that involve
+// IO (i.e. any interactions that require a callback) must be done from the
+// same IO thread, which must also have a running MessageLoop.
+class BattOrAgent : public BattOrConnection::Listener,
+ public base::SupportsWeakPtr<BattOrAgent> {
+ public:
+ // The listener interface that must be implemented in order to interact with
+ // the BattOrAgent.
+ class Listener {
+ public:
+ virtual void OnStartTracingComplete(BattOrError error) = 0;
+ virtual void OnStopTracingComplete(const std::string& trace,
+ BattOrError error) = 0;
+ virtual void OnRecordClockSyncMarkerComplete(BattOrError error) = 0;
+ };
+
+ BattOrAgent(
+ const std::string& path,
+ Listener* listener,
+ scoped_refptr<base::SingleThreadTaskRunner> file_thread_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> ui_thread_task_runner);
+ virtual ~BattOrAgent();
+
+ void StartTracing();
+ void StopTracing();
+ void RecordClockSyncMarker(const std::string& marker);
+
+ // Returns whether the BattOr is able to record clock sync markers in its own
+ // trace log.
+ static bool SupportsExplicitClockSync() { return true; }
+
+ // BattOrConnection::Listener implementation.
+ void OnConnectionOpened(bool success) override;
+ void OnBytesSent(bool success) override;
+ void OnMessageRead(bool success,
+ BattOrMessageType type,
+ std::unique_ptr<std::vector<char>> bytes) override;
+
+ protected:
+ // The connection that knows how to communicate with the BattOr in terms of
+ // protocol primitives. This is protected so that it can be replaced with a
+ // fake in testing.
+ std::unique_ptr<BattOrConnection> connection_;
+
+ // Timeout for when an action isn't completed within the allotted time. This
+ // is virtual and protected so that timeouts can be disabled in testing. The
+ // testing task runner that runs delayed tasks immediately deals poorly with
+ // timeouts posted as future tasks.
+ virtual void OnActionTimeout();
+
+ private:
+ enum class Command {
+ INVALID,
+ START_TRACING,
+ STOP_TRACING,
+ RECORD_CLOCK_SYNC_MARKER,
+ };
+
+ enum class Action {
+ INVALID,
+
+ // Actions required to connect to a BattOr.
+ REQUEST_CONNECTION,
+
+ // Actions required for starting tracing.
+ SEND_RESET,
+ SEND_INIT,
+ READ_INIT_ACK,
+ SEND_SET_GAIN,
+ READ_SET_GAIN_ACK,
+ SEND_START_TRACING,
+ READ_START_TRACING_ACK,
+
+ // Actions required for stopping tracing.
+ SEND_EEPROM_REQUEST,
+ READ_EEPROM,
+ SEND_SAMPLES_REQUEST,
+ READ_CALIBRATION_FRAME,
+ READ_DATA_FRAME,
+
+ // Actions required for recording a clock sync marker.
+ SEND_CURRENT_SAMPLE_REQUEST,
+ READ_CURRENT_SAMPLE,
+ };
+
+ // Performs an action.
+ void PerformAction(Action action);
+ // Performs an action after a delay.
+ void PerformDelayedAction(Action action, base::TimeDelta delay);
+
+
+
+ // Requests a connection to the BattOr.
+ void BeginConnect();
+
+ // Sends a control message over the connection.
+ void SendControlMessage(BattOrControlMessageType type,
+ uint16_t param1,
+ uint16_t param2);
+
+ // Completes the command with the specified error.
+ void CompleteCommand(BattOrError error);
+
+ // Returns a formatted version of samples_ with timestamps and real units.
+ std::string SamplesToString();
+
+ // The listener that handles the commands' results. It must outlive the agent.
+ Listener* listener_;
+
+ // The last action executed by the agent. This should only be updated in
+ // PerformAction().
+ Action last_action_;
+
+ // The tracing command currently being executed by the agent.
+ Command command_;
+
+ // A map from the sample number (including samples from the calibration frame)
+ // to the ID of the clock sync marker that is associated with that sample
+ // number. If we ever have to store a large number of these, consider using an
+ // unordered map.
+ std::map<uint32_t, std::string> clock_sync_markers_;
+
+ // The clock sync marker being recorded (if we're currently recording one).
+ std::string pending_clock_sync_marker_;
+
+ // The time at which the last clock sync marker was recorded.
+ base::TimeTicks last_clock_sync_time_;
+
+ // Checker to make sure that this is only ever called on the IO thread.
+ base::ThreadChecker thread_checker_;
+
+ // The BattOr's EEPROM (which is required for calibration).
+ std::unique_ptr<BattOrEEPROM> battor_eeprom_;
+
+ // The first frame (required for calibration).
+ std::vector<RawBattOrSample> calibration_frame_;
+
+ // The actual data samples recorded.
+ std::vector<RawBattOrSample> samples_;
+
+ // The expected sequence number of the next frame. We use this to ensure that
+ // we receive frames in order.
+ uint32_t next_sequence_number_;
+
+ // The number of times that we've attempted to read the last message.
+ uint8_t num_read_attempts_;
+
+ // The timeout that's run when an action times out.
+ base::CancelableClosure timeout_callback_;
+
+ DISALLOW_COPY_AND_ASSIGN(BattOrAgent);
+};
+
+} // namespace battor
+
+#endif // TOOLS_BATTOR_AGENT_BATTOR_AGENT_H_
diff --git a/chromium/tools/battor_agent/battor_agent_bin.cc b/chromium/tools/battor_agent/battor_agent_bin.cc
new file mode 100644
index 00000000000..7424d1ddb46
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_agent_bin.cc
@@ -0,0 +1,311 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file provides a thin binary wrapper around the BattOr Agent
+// library. This binary wrapper provides a means for non-C++ tracing
+// controllers, such as Telemetry and Android Systrace, to issue high-level
+// tracing commands to the BattOr through an interactive shell.
+//
+// Example usage of how an external trace controller might use this binary:
+//
+// 1) Telemetry's PowerTracingAgent is told to start recording power samples
+// 2) PowerTracingAgent opens up a BattOr agent binary subprocess
+// 3) PowerTracingAgent sends the subprocess the StartTracing message via
+// STDIN
+// 4) PowerTracingAgent waits for the subprocess to write a line to STDOUT
+// ('Done.' if successful, some error message otherwise)
+// 5) If the last command was successful, PowerTracingAgent waits for the
+// duration of the trace
+// 6) When the tracing should end, PowerTracingAgent records the clock sync
+// start timestamp and sends the subprocess the
+// 'RecordClockSyncMark <marker>' message via STDIN.
+// 7) PowerTracingAgent waits for the subprocess to write a line to STDOUT
+// ('Done.' if successful, some error message otherwise)
+// 8) If the last command was successful, PowerTracingAgent records the clock
+// sync end timestamp and sends the subprocess the StopTracing message via
+// STDIN
+// 9) PowerTracingAgent continues to read trace output lines from STDOUT until
+// the binary exits with an exit code of 1 (indicating failure) or the
+// 'Done.' line is printed to STDOUT, signaling the last line of the trace
+// 10) PowerTracingAgent returns the battery trace to the Telemetry trace
+// controller
+
+#include <stdint.h>
+
+#include <fstream>
+#include <iostream>
+
+#include "base/at_exit.h"
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/command_line.h"
+#include "base/location.h"
+#include "base/logging.h"
+#include "base/strings/string_tokenizer.h"
+#include "base/strings/utf_string_conversions.h"
+#include "base/threading/thread.h"
+#include "tools/battor_agent/battor_agent.h"
+#include "tools/battor_agent/battor_error.h"
+#include "tools/battor_agent/battor_finder.h"
+
+using std::endl;
+
+namespace battor {
+
+namespace {
+
+const char kIoThreadName[] = "BattOr IO Thread";
+const char kFileThreadName[] = "BattOr File Thread";
+const char kUiThreadName[] = "BattOr UI Thread";
+
+const char kUsage[] =
+ "Start the battor_agent shell with:\n"
+ "\n"
+ " battor_agent <switches>\n"
+ "\n"
+ "Switches: \n"
+ " --battor-path=<path> Uses the specified BattOr path.\n"
+ "\n"
+ "Once in the shell, you can issue the following commands:\n"
+ "\n"
+ " StartTracing\n"
+ " StopTracing <optional file path>\n"
+ " SupportsExplicitClockSync\n"
+ " RecordClockSyncMarker <marker>\n"
+ " Exit\n"
+ " Help\n"
+ "\n";
+
+void PrintSupportsExplicitClockSync() {
+ std::cout << BattOrAgent::SupportsExplicitClockSync() << endl;
+}
+
+// Logs the error and exits with an error code.
+void HandleError(battor::BattOrError error) {
+ if (error != BATTOR_ERROR_NONE)
+ LOG(FATAL) << "Fatal error when communicating with the BattOr: "
+ << BattOrErrorToString(error);
+}
+
+// Prints an error message and exits due to a required thread failing to start.
+void ExitFromThreadStartFailure(const std::string& thread_name) {
+ LOG(FATAL) << "Failed to start " << thread_name;
+}
+
+std::vector<std::string> TokenizeString(std::string cmd) {
+ base::StringTokenizer tokenizer(cmd, " ");
+ std::vector<std::string> tokens;
+ while (tokenizer.GetNext())
+ tokens.push_back(tokenizer.token());
+ return tokens;
+}
+
+} // namespace
+
+// Wrapper class containing all state necessary for an independent binary to
+// use a BattOrAgent to communicate with a BattOr.
+class BattOrAgentBin : public BattOrAgent::Listener {
+ public:
+ BattOrAgentBin()
+ : done_(false, false),
+ io_thread_(kIoThreadName),
+ file_thread_(kFileThreadName),
+ ui_thread_(kUiThreadName) {}
+
+ ~BattOrAgentBin() { DCHECK(!agent_); }
+
+ // Starts the interactive BattOr agent shell and eventually returns an exit
+ // code.
+ int Run(int argc, char* argv[]) {
+ // If we don't have any BattOr to use, exit.
+ std::string path = BattOrFinder::FindBattOr();
+ if (path.empty()) {
+ std::cout << "Unable to find a BattOr." << endl;
+ exit(1);
+ }
+
+ SetUp(path);
+
+ std::string cmd;
+ for (;;) {
+ std::getline(std::cin, cmd);
+
+ if (cmd == "StartTracing") {
+ StartTracing();
+ } else if (cmd.find("StopTracing") != std::string::npos) {
+ std::vector<std::string> tokens = TokenizeString(cmd);
+ if (tokens.size() == 1 && tokens[0] == "StopTracing") {
+ // No path given.
+ StopTracing();
+ } else if (tokens.size() == 2 && tokens[0] == "StopTracing") {
+ // Path given.
+ StopTracing(tokens[1]);
+ } else {
+ std::cout << "Invalid StopTracing command." << endl;
+ std::cout << kUsage << endl;
+ continue;
+ }
+ break;
+ } else if (cmd == "SupportsExplicitClockSync") {
+ PrintSupportsExplicitClockSync();
+ } else if (cmd.find("RecordClockSyncMarker") != std::string::npos) {
+ std::vector<std::string> tokens = TokenizeString(cmd);
+ if (tokens.size() != 2 || tokens[0] != "RecordClockSyncMarker") {
+ std::cout << "Invalid RecordClockSyncMarker command." << endl;
+ std::cout << kUsage << endl;
+ continue;
+ }
+
+ RecordClockSyncMarker(tokens[1]);
+ } else if (cmd == "Exit") {
+ break;
+ } else {
+ std::cout << kUsage << endl;
+ }
+ }
+
+ TearDown();
+ return 0;
+ }
+
+ // Performs any setup necessary for the BattOr binary to run.
+ void SetUp(const std::string& path) {
+ // TODO(charliea): Investigate whether it's possible to replace this
+ // separate thread with a combination of MessageLoopForIO and RunLoop.
+ base::Thread::Options io_thread_options;
+ io_thread_options.message_loop_type = base::MessageLoopForIO::TYPE_IO;
+ if (!io_thread_.StartWithOptions(io_thread_options)) {
+ ExitFromThreadStartFailure(kIoThreadName);
+ }
+
+ io_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::Bind(&BattOrAgentBin::CreateAgent, base::Unretained(this), path));
+ done_.Wait();
+ }
+
+ // Performs any cleanup necessary after the BattOr binary is done running.
+ void TearDown() {
+ io_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::Bind(&BattOrAgentBin::DeleteAgent, base::Unretained(this)));
+ done_.Wait();
+ }
+
+ void StartTracing() {
+ io_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::Bind(&BattOrAgent::StartTracing, base::Unretained(agent_.get())));
+ done_.Wait();
+ }
+
+ void OnStartTracingComplete(BattOrError error) override {
+ if (error == BATTOR_ERROR_NONE)
+ std::cout << "Done." << endl;
+ else
+ HandleError(error);
+
+ done_.Signal();
+ }
+
+ void StopTracing(const std::string& path = "") {
+ trace_output_file_ = path;
+ io_thread_.task_runner()->PostTask(
+ FROM_HERE,
+ base::Bind(&BattOrAgent::StopTracing, base::Unretained(agent_.get())));
+ done_.Wait();
+ trace_output_file_ = std::string();
+ }
+
+ void OnStopTracingComplete(const std::string& trace,
+ BattOrError error) override {
+ if (error == BATTOR_ERROR_NONE) {
+ if (trace_output_file_.empty()) {
+ std::cout << trace;
+ }
+ else {
+ std::ofstream trace_stream(trace_output_file_);
+ if (!trace_stream.is_open()) {
+ std::cout << "Tracing output file could not be opened." << endl;
+ exit(1);
+ }
+ trace_stream << trace;
+ trace_stream.close();
+ }
+ std::cout << "Done." << endl;
+ } else {
+ HandleError(error);
+ }
+
+ done_.Signal();
+ }
+
+ void RecordClockSyncMarker(const std::string& marker) {
+ io_thread_.task_runner()->PostTask(
+ FROM_HERE, base::Bind(&BattOrAgent::RecordClockSyncMarker,
+ base::Unretained(agent_.get()), marker));
+ done_.Wait();
+ }
+
+ void OnRecordClockSyncMarkerComplete(BattOrError error) override {
+ if (error == BATTOR_ERROR_NONE)
+ std::cout << "Done." << endl;
+ else
+ HandleError(error);
+
+ done_.Signal();
+ }
+
+ // Postable task for creating the BattOrAgent. Because the BattOrAgent has
+ // uber thread safe dependencies, all interactions with it, including creating
+ // and deleting it, MUST happen on the IO thread.
+ void CreateAgent(const std::string& path) {
+ // In Chrome, we already have file and UI threads running. Because the
+ // Chrome serial libraries rely on having those threads available, we have
+ // to spin up our own because we're in a separate binary.
+ if (!file_thread_.Start())
+ ExitFromThreadStartFailure(kFileThreadName);
+
+ base::Thread::Options ui_thread_options;
+ ui_thread_options.message_loop_type = base::MessageLoopForIO::TYPE_UI;
+ if (!ui_thread_.StartWithOptions(ui_thread_options)) {
+ ExitFromThreadStartFailure(kUiThreadName);
+ }
+
+ agent_.reset(new BattOrAgent(path, this, file_thread_.task_runner(),
+ ui_thread_.task_runner()));
+ done_.Signal();
+ }
+
+ // Postable task for deleting the BattOrAgent. See the comment for
+ // CreateAgent() above regarding why this is necessary.
+ void DeleteAgent() {
+ agent_.reset(nullptr);
+ done_.Signal();
+ }
+
+ private:
+ // Event signaled when an async task has finished executing.
+ base::WaitableEvent done_;
+
+ // Threads needed for serial communication.
+ base::Thread io_thread_;
+ base::Thread file_thread_;
+ base::Thread ui_thread_;
+
+ // The agent capable of asynchronously communicating with the BattOr.
+ std::unique_ptr<BattOrAgent> agent_;
+
+ std::string trace_output_file_;
+
+};
+
+} // namespace battor
+
+int main(int argc, char* argv[]) {
+ base::AtExitManager exit_manager;
+ base::CommandLine::Init(argc, argv);
+ battor::BattOrAgentBin bin;
+ return bin.Run(argc, argv);
+}
diff --git a/chromium/tools/battor_agent/battor_agent_unittest.cc b/chromium/tools/battor_agent/battor_agent_unittest.cc
new file mode 100644
index 00000000000..3b6c54338a1
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_agent_unittest.cc
@@ -0,0 +1,876 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/battor_agent/battor_agent.h"
+
+#include "base/test/test_simple_task_runner.h"
+#include "base/thread_task_runner_handle.h"
+#include "testing/gmock/include/gmock/gmock.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/battor_agent/battor_protocol_types.h"
+
+using namespace testing;
+
+using std::vector;
+
+namespace battor {
+
+namespace {
+
+BattOrControlMessageAck kInitAck{BATTOR_CONTROL_MESSAGE_TYPE_INIT, 0};
+BattOrControlMessageAck kSetGainAck{BATTOR_CONTROL_MESSAGE_TYPE_SET_GAIN, 0};
+BattOrControlMessageAck kStartTracingAck{
+ BATTOR_CONTROL_MESSAGE_TYPE_START_SAMPLING_SD, 0};
+const char kClockSyncId[] = "MY_MARKER";
+
+// Creates a byte vector copy of the specified object.
+template <typename T>
+std::unique_ptr<std::vector<char>> ToCharVector(const T& object) {
+ return std::unique_ptr<std::vector<char>>(new std::vector<char>(
+ reinterpret_cast<const char*>(&object),
+ reinterpret_cast<const char*>(&object) + sizeof(T)));
+}
+
+MATCHER_P2(
+ BufferEq,
+ expected_buffer,
+ expected_buffer_size,
+ "Makes sure that the argument has the same contents as the buffer.") {
+ return memcmp(reinterpret_cast<const void*>(arg),
+ reinterpret_cast<const void*>(expected_buffer),
+ expected_buffer_size) == 0;
+}
+
+std::unique_ptr<vector<char>> CreateFrame(const BattOrFrameHeader& frame_header,
+ const RawBattOrSample* samples,
+ const size_t& num_samples) {
+ std::unique_ptr<vector<char>> bytes(new vector<char>(
+ sizeof(BattOrFrameHeader) + sizeof(RawBattOrSample) * num_samples));
+ memcpy(bytes->data(), &frame_header, sizeof(BattOrFrameHeader));
+ memcpy(bytes->data() + sizeof(BattOrFrameHeader), samples,
+ sizeof(RawBattOrSample) * num_samples);
+
+ return bytes;
+}
+
+class MockBattOrConnection : public BattOrConnection {
+ public:
+ MockBattOrConnection(BattOrConnection::Listener* listener)
+ : BattOrConnection(listener) {}
+ ~MockBattOrConnection() override {}
+
+ MOCK_METHOD0(Open, void());
+ MOCK_METHOD0(Close, void());
+ MOCK_METHOD3(SendBytes,
+ void(BattOrMessageType type,
+ const void* buffer,
+ size_t bytes_to_send));
+ MOCK_METHOD1(ReadMessage, void(BattOrMessageType type));
+ MOCK_METHOD0(Flush, void());
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MockBattOrConnection);
+};
+
+} // namespace
+
+// TestableBattOrAgent uses a fake BattOrConnection to be testable.
+class TestableBattOrAgent : public BattOrAgent {
+ public:
+ TestableBattOrAgent(BattOrAgent::Listener* listener)
+ : BattOrAgent("/dev/test", listener, nullptr, nullptr) {
+ connection_ =
+ std::unique_ptr<BattOrConnection>(new MockBattOrConnection(this));
+ }
+
+ MockBattOrConnection* GetConnection() {
+ return static_cast<MockBattOrConnection*>(connection_.get());
+ }
+
+ void OnActionTimeout() override {}
+};
+
+// BattOrAgentTest provides a BattOrAgent and captures the results of its
+// tracing commands.
+class BattOrAgentTest : public testing::Test, public BattOrAgent::Listener {
+ public:
+ BattOrAgentTest()
+ : task_runner_(new base::TestSimpleTaskRunner()),
+ thread_task_runner_handle_(task_runner_) {}
+
+ void OnStartTracingComplete(BattOrError error) override {
+ is_command_complete_ = true;
+ command_error_ = error;
+ }
+
+ void OnStopTracingComplete(const std::string& trace,
+ BattOrError error) override {
+ is_command_complete_ = true;
+ command_error_ = error;
+ trace_ = trace;
+ }
+
+ void OnRecordClockSyncMarkerComplete(BattOrError error) override {
+ is_command_complete_ = true;
+ command_error_ = error;
+ }
+
+ void OnBytesSent(bool success) {
+ agent_->OnBytesSent(success);
+ task_runner_->RunUntilIdle();
+ }
+
+ void OnMessageRead(bool success,
+ BattOrMessageType type,
+ std::unique_ptr<std::vector<char>> bytes) {
+ agent_->OnMessageRead(success, type, std::move(bytes));
+ task_runner_->RunUntilIdle();
+ }
+
+ protected:
+ void SetUp() override {
+ agent_.reset(new TestableBattOrAgent(this));
+ task_runner_->ClearPendingTasks();
+ is_command_complete_ = false;
+ command_error_ = BATTOR_ERROR_NONE;
+ }
+
+ // Possible states that the BattOrAgent can be in.
+ enum class BattOrAgentState {
+ // States required to connect to a BattOr.
+ CONNECTED,
+
+ // States required to StartTracing.
+ RESET_SENT,
+ INIT_SENT,
+ INIT_ACKED,
+ SET_GAIN_SENT,
+ GAIN_ACKED,
+ START_TRACING_SENT,
+ START_TRACING_COMPLETE,
+
+ // States required to StopTracing.
+ EEPROM_REQUEST_SENT,
+ EEPROM_RECEIVED,
+ SAMPLES_REQUEST_SENT,
+ CALIBRATION_FRAME_RECEIVED,
+
+ // States required to RecordClockSyncMarker.
+ CURRENT_SAMPLE_REQUEST_SENT,
+ RECORD_CLOCK_SYNC_MARKER_COMPLETE,
+ };
+
+ // Runs BattOrAgent::StartTracing until it reaches the specified state by
+ // feeding it the callbacks it needs to progress.
+ void RunStartTracingTo(BattOrAgentState end_state) {
+ is_command_complete_ = false;
+
+ GetAgent()->StartTracing();
+ GetTaskRunner()->RunUntilIdle();
+
+ GetAgent()->OnConnectionOpened(true);
+ GetTaskRunner()->RunUntilIdle();
+
+ if (end_state == BattOrAgentState::CONNECTED)
+ return;
+
+ OnBytesSent(true);
+ if (end_state == BattOrAgentState::RESET_SENT)
+ return;
+
+ OnBytesSent(true);
+ if (end_state == BattOrAgentState::INIT_SENT)
+ return;
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ ToCharVector(kInitAck));
+ if (end_state == BattOrAgentState::INIT_ACKED)
+ return;
+
+ OnBytesSent(true);
+ if (end_state == BattOrAgentState::SET_GAIN_SENT)
+ return;
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ ToCharVector(kSetGainAck));
+ if (end_state == BattOrAgentState::GAIN_ACKED)
+ return;
+
+ OnBytesSent(true);
+ if (end_state == BattOrAgentState::START_TRACING_SENT)
+ return;
+
+ // Make sure that we're actually forwarding to a state in the start tracing
+ // state machine.
+ DCHECK(end_state == BattOrAgentState::START_TRACING_COMPLETE);
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ ToCharVector(kStartTracingAck));
+ }
+
+ // Runs BattOrAgent::StopTracing until it reaches the specified state by
+ // feeding it the callbacks it needs to progress.
+ void RunStopTracingTo(BattOrAgentState end_state) {
+ is_command_complete_ = false;
+
+ GetAgent()->StopTracing();
+ GetTaskRunner()->RunUntilIdle();
+
+ GetAgent()->OnConnectionOpened(true);
+ GetTaskRunner()->RunUntilIdle();
+
+ if (end_state == BattOrAgentState::CONNECTED)
+ return;
+
+ OnBytesSent(true);
+ if (end_state == BattOrAgentState::EEPROM_REQUEST_SENT)
+ return;
+
+ BattOrEEPROM eeprom;
+ eeprom.r1 = 1;
+ eeprom.r2 = 1;
+ eeprom.r3 = 1;
+ eeprom.low_gain = 1;
+ eeprom.low_gain_correction_offset = 0;
+ eeprom.low_gain_correction_factor = 1;
+ eeprom.sd_sample_rate = 1000;
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK, ToCharVector(eeprom));
+ if (end_state == BattOrAgentState::EEPROM_RECEIVED)
+ return;
+
+ OnBytesSent(true);
+ if (end_state == BattOrAgentState::SAMPLES_REQUEST_SENT)
+ return;
+
+ DCHECK(end_state == BattOrAgentState::CALIBRATION_FRAME_RECEIVED);
+
+ BattOrFrameHeader cal_frame_header{0, sizeof(RawBattOrSample)};
+ RawBattOrSample cal_frame[] = {RawBattOrSample{1, 1}};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(cal_frame_header, cal_frame, 1));
+ }
+
+ // Runs BattOrAgent::RecordClockSyncMarker until it reaches the specified
+ // state by feeding it the callbacks it needs to progress.
+ void RunRecordClockSyncMarkerTo(BattOrAgentState end_state) {
+ is_command_complete_ = false;
+
+ GetAgent()->RecordClockSyncMarker(kClockSyncId);
+ GetTaskRunner()->RunUntilIdle();
+
+ GetAgent()->OnConnectionOpened(true);
+ GetTaskRunner()->RunUntilIdle();
+
+ if (end_state == BattOrAgentState::CONNECTED)
+ return;
+
+ OnBytesSent(true);
+ if (end_state == BattOrAgentState::CURRENT_SAMPLE_REQUEST_SENT)
+ return;
+
+ DCHECK(end_state == BattOrAgentState::RECORD_CLOCK_SYNC_MARKER_COMPLETE);
+
+ uint32_t current_sample = 1;
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ ToCharVector(current_sample));
+ }
+
+ TestableBattOrAgent* GetAgent() { return agent_.get(); }
+
+ scoped_refptr<base::TestSimpleTaskRunner> GetTaskRunner() {
+ return task_runner_;
+ }
+
+ bool IsCommandComplete() { return is_command_complete_; }
+ BattOrError GetCommandError() { return command_error_; }
+ std::string GetTrace() { return trace_; }
+
+ private:
+ scoped_refptr<base::TestSimpleTaskRunner> task_runner_;
+ // Needed to support ThreadTaskRunnerHandle::Get() in code under test.
+ base::ThreadTaskRunnerHandle thread_task_runner_handle_;
+
+ std::unique_ptr<TestableBattOrAgent> agent_;
+ bool is_command_complete_;
+ BattOrError command_error_;
+ std::string trace_;
+};
+
+TEST_F(BattOrAgentTest, StartTracing) {
+ testing::InSequence s;
+ EXPECT_CALL(*GetAgent()->GetConnection(), Open());
+
+ BattOrControlMessage reset_msg{BATTOR_CONTROL_MESSAGE_TYPE_RESET, 0, 0};
+ EXPECT_CALL(
+ *GetAgent()->GetConnection(),
+ SendBytes(BATTOR_MESSAGE_TYPE_CONTROL,
+ BufferEq(&reset_msg, sizeof(reset_msg)), sizeof(reset_msg)));
+
+ EXPECT_CALL(*GetAgent()->GetConnection(), Flush());
+ BattOrControlMessage init_msg{BATTOR_CONTROL_MESSAGE_TYPE_INIT, 0, 0};
+ EXPECT_CALL(
+ *GetAgent()->GetConnection(),
+ SendBytes(BATTOR_MESSAGE_TYPE_CONTROL,
+ BufferEq(&init_msg, sizeof(init_msg)), sizeof(init_msg)));
+
+ EXPECT_CALL(*GetAgent()->GetConnection(),
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL_ACK));
+
+ BattOrControlMessage set_gain_msg{BATTOR_CONTROL_MESSAGE_TYPE_SET_GAIN,
+ BATTOR_GAIN_LOW, 0};
+ EXPECT_CALL(*GetAgent()->GetConnection(),
+ SendBytes(BATTOR_MESSAGE_TYPE_CONTROL,
+ BufferEq(&set_gain_msg, sizeof(set_gain_msg)),
+ sizeof(set_gain_msg)));
+
+ EXPECT_CALL(*GetAgent()->GetConnection(),
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL_ACK));
+
+ BattOrControlMessage start_tracing_msg{
+ BATTOR_CONTROL_MESSAGE_TYPE_START_SAMPLING_SD, 0, 0};
+ EXPECT_CALL(*GetAgent()->GetConnection(),
+ SendBytes(BATTOR_MESSAGE_TYPE_CONTROL,
+ BufferEq(&start_tracing_msg, sizeof(start_tracing_msg)),
+ sizeof(start_tracing_msg)));
+
+ EXPECT_CALL(*GetAgent()->GetConnection(),
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL_ACK));
+
+ RunStartTracingTo(BattOrAgentState::START_TRACING_COMPLETE);
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_NONE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsWithoutConnection) {
+ GetAgent()->StartTracing();
+ GetTaskRunner()->RunUntilIdle();
+
+ GetAgent()->OnConnectionOpened(false);
+ GetTaskRunner()->RunUntilIdle();
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_CONNECTION_FAILED, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsIfResetSendFails) {
+ RunStartTracingTo(BattOrAgentState::CONNECTED);
+ OnBytesSent(false);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_SEND_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsIfInitSendFails) {
+ RunStartTracingTo(BattOrAgentState::RESET_SENT);
+ OnBytesSent(false);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_SEND_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsIfInitAckReadFails) {
+ RunStartTracingTo(BattOrAgentState::INIT_SENT);
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_CONTROL_ACK, nullptr);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_RECEIVE_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsIfInitWrongAckRead) {
+ RunStartTracingTo(BattOrAgentState::INIT_SENT);
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ ToCharVector(kStartTracingAck));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsIfSetGainSendFails) {
+ RunStartTracingTo(BattOrAgentState::RESET_SENT);
+ OnBytesSent(false);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_SEND_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsIfSetGainAckReadFails) {
+ RunStartTracingTo(BattOrAgentState::SET_GAIN_SENT);
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_CONTROL_ACK, nullptr);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_RECEIVE_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsIfSetGainWrongAckRead) {
+ RunStartTracingTo(BattOrAgentState::SET_GAIN_SENT);
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ ToCharVector(kStartTracingAck));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsIfStartTracingSendFails) {
+ RunStartTracingTo(BattOrAgentState::RESET_SENT);
+ OnBytesSent(false);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_SEND_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsIfStartTracingAckReadFails) {
+ RunStartTracingTo(BattOrAgentState::START_TRACING_SENT);
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_CONTROL_ACK, nullptr);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_RECEIVE_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StartTracingFailsIfStartTracingWrongAckRead) {
+ RunStartTracingTo(BattOrAgentState::START_TRACING_SENT);
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK, ToCharVector(kInitAck));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracing) {
+ testing::InSequence s;
+ EXPECT_CALL(*GetAgent()->GetConnection(), Open());
+
+ BattOrControlMessage request_eeprom_msg{
+ BATTOR_CONTROL_MESSAGE_TYPE_READ_EEPROM, sizeof(BattOrEEPROM), 0};
+ EXPECT_CALL(
+ *GetAgent()->GetConnection(),
+ SendBytes(BATTOR_MESSAGE_TYPE_CONTROL,
+ BufferEq(&request_eeprom_msg, sizeof(request_eeprom_msg)),
+ sizeof(request_eeprom_msg)));
+
+ EXPECT_CALL(*GetAgent()->GetConnection(),
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL_ACK));
+
+ BattOrControlMessage request_samples_msg{
+ BATTOR_CONTROL_MESSAGE_TYPE_READ_SD_UART, 0, 0};
+ EXPECT_CALL(
+ *GetAgent()->GetConnection(),
+ SendBytes(BATTOR_MESSAGE_TYPE_CONTROL,
+ BufferEq(&request_samples_msg, sizeof(request_samples_msg)),
+ sizeof(request_samples_msg)));
+
+ // We send the agent four frames: a calibration frame, and two real frames,
+ // and one zero-length frame to indicate that we're done.
+ EXPECT_CALL(*GetAgent()->GetConnection(),
+ ReadMessage(BATTOR_MESSAGE_TYPE_SAMPLES))
+ .Times(4);
+
+ RunStopTracingTo(BattOrAgentState::SAMPLES_REQUEST_SENT);
+
+ // Send the calibration frame.
+ BattOrFrameHeader cal_frame_header{0, 2 * sizeof(RawBattOrSample)};
+ RawBattOrSample cal_frame[] = {
+ RawBattOrSample{1, 1}, RawBattOrSample{2, 2},
+ };
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(cal_frame_header, cal_frame, 2));
+
+ // Send the two real data frames.
+ BattOrFrameHeader frame_header1{1, 3 * sizeof(RawBattOrSample)};
+ RawBattOrSample frame1[] = {
+ RawBattOrSample{1, 1}, RawBattOrSample{2, 2}, RawBattOrSample{3, 3},
+ };
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header1, frame1, 3));
+
+ BattOrFrameHeader frame_header2{2, 1 * sizeof(RawBattOrSample)};
+ RawBattOrSample frame2[] = {RawBattOrSample{1, 1}};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header2, frame2, 1));
+
+ // Send an empty last frame to indicate that we're done.
+ BattOrFrameHeader frame_header3{3, 0 * sizeof(RawBattOrSample)};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header3, nullptr, 0));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_NONE, GetCommandError());
+ EXPECT_EQ(
+ "# BattOr\n# voltage_range [-2401.8, 2398.2] mV\n# "
+ "current_range [-1200.9, 1199.1] mA\n"
+ "# sample_rate 1000 Hz, gain 1.0x\n"
+ "0.00 -0.3 -0.6\n1.00 0.3 0.6\n2.00 0.9 1.8\n3.00 -0.3 -0.6\n",
+ GetTrace());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsWithoutConnection) {
+ GetAgent()->StopTracing();
+ GetTaskRunner()->RunUntilIdle();
+
+ GetAgent()->OnConnectionOpened(false);
+ GetTaskRunner()->RunUntilIdle();
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_CONNECTION_FAILED, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsIfEEPROMRequestSendFails) {
+ RunStopTracingTo(BattOrAgentState::CONNECTED);
+ OnBytesSent(false);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_SEND_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingRetriesEEPROMRead) {
+ RunStopTracingTo(BattOrAgentState::EEPROM_REQUEST_SENT);
+
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_CONTROL_ACK, nullptr);
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ ToCharVector(BattOrEEPROM()));
+
+ // Give confirmation that the samples request was sent.
+ OnBytesSent(true);
+
+ BattOrFrameHeader cal_frame_header{0, sizeof(RawBattOrSample)};
+ RawBattOrSample cal_frame[] = {RawBattOrSample{1, 1}};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(cal_frame_header, cal_frame, 1));
+
+ BattOrFrameHeader frame_header{1, 0};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header, nullptr, 0));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_NONE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsIfEEPROMReadFails) {
+ RunStopTracingTo(BattOrAgentState::EEPROM_REQUEST_SENT);
+
+ for (int i = 0; i < 20; i++) {
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_CONTROL_ACK, nullptr);
+ }
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_RECEIVE_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsIfEEPROMWrongAckRead) {
+ RunStopTracingTo(BattOrAgentState::EEPROM_REQUEST_SENT);
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK, ToCharVector(kInitAck));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsIfRequestSamplesFails) {
+ RunStopTracingTo(BattOrAgentState::EEPROM_RECEIVED);
+ OnBytesSent(false);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_SEND_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingSucceedsWithOneCalibrationFrameReadFailure) {
+ RunStopTracingTo(BattOrAgentState::SAMPLES_REQUEST_SENT);
+
+ // Make a read fail in order to make sure that the agent will retry.
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_SAMPLES, nullptr);
+
+ BattOrFrameHeader cal_frame_header{0, sizeof(RawBattOrSample)};
+ RawBattOrSample cal_frame[] = {RawBattOrSample{1, 1}};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(cal_frame_header, cal_frame, 1));
+
+ BattOrFrameHeader frame_header{1, 0};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header, nullptr, 0));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_NONE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsWithManyCalibrationFrameReadFailures) {
+ RunStopTracingTo(BattOrAgentState::SAMPLES_REQUEST_SENT);
+
+ // We attempt the read a max of 20 times: send that many failures.
+ for (int i = 0; i < 20; i++) {
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_SAMPLES, nullptr);
+ }
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_RECEIVE_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingSucceedsWithOneDataFrameReadFailure) {
+ RunStopTracingTo(BattOrAgentState::CALIBRATION_FRAME_RECEIVED);
+
+ // Make a read fail in order to make sure that the agent will retry.
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_SAMPLES, nullptr);
+
+ BattOrFrameHeader frame_header{1, 0};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header, nullptr, 0));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_NONE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsWithManyDataFrameReadFailures) {
+ RunStopTracingTo(BattOrAgentState::CALIBRATION_FRAME_RECEIVED);
+
+ // We attempt the read a max of 20 times: send that many failures.
+ for (int i = 0; i < 20; i++) {
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_SAMPLES, nullptr);
+ }
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_RECEIVE_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingRetriesResetEachFrame) {
+ RunStopTracingTo(BattOrAgentState::CALIBRATION_FRAME_RECEIVED);
+
+ // Send 11 failures on two different reads: because the retry count should
+ // reset after a successful read, this should still be okay.
+ for (int i = 0; i < 11; i++) {
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_SAMPLES, nullptr);
+ }
+
+ BattOrFrameHeader frame_header1{1, 1 * sizeof(RawBattOrSample)};
+ RawBattOrSample frame1[] = {RawBattOrSample{1, 1}};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header1, frame1, 1));
+
+ for (int i = 0; i < 11; i++) {
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_SAMPLES, nullptr);
+ }
+
+ BattOrFrameHeader frame_header2{2, 0};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header2, nullptr, 0));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_NONE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsIfSamplesReadHasWrongType) {
+ RunStopTracingTo(BattOrAgentState::SAMPLES_REQUEST_SENT);
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK, ToCharVector(kInitAck));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsIfCalibrationFrameHasWrongLength) {
+ RunStopTracingTo(BattOrAgentState::SAMPLES_REQUEST_SENT);
+
+ // Send a calibration frame with a mismatch between the frame length in the
+ // header and the actual frame length.
+ BattOrFrameHeader cal_frame_header{0, 1 * sizeof(RawBattOrSample)};
+ RawBattOrSample cal_frame[] = {
+ RawBattOrSample{1, 1}, RawBattOrSample{2, 2},
+ };
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(cal_frame_header, cal_frame, 2));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsIfDataFrameHasWrongLength) {
+ RunStopTracingTo(BattOrAgentState::SAMPLES_REQUEST_SENT);
+
+ BattOrFrameHeader cal_frame_header{0, 1 * sizeof(RawBattOrSample)};
+ RawBattOrSample cal_frame[] = {
+ RawBattOrSample{1, 1},
+ };
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(cal_frame_header, cal_frame, 1));
+
+ // Send a data frame with a mismatch between the frame length in the
+ // header and the actual frame length.
+ BattOrFrameHeader frame_header{1, 2 * sizeof(RawBattOrSample)};
+ RawBattOrSample frame[] = {RawBattOrSample{1, 1}};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header, frame, 1));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsIfCalibrationFrameMissingByte) {
+ RunStopTracingTo(BattOrAgentState::SAMPLES_REQUEST_SENT);
+
+ BattOrFrameHeader cal_frame_header{0, 2 * sizeof(RawBattOrSample)};
+ RawBattOrSample cal_frame[] = {
+ RawBattOrSample{1, 1}, RawBattOrSample{2, 2},
+ };
+
+ // Remove the last byte from the frame to make it invalid.
+ std::unique_ptr<vector<char>> cal_frame_bytes =
+ CreateFrame(cal_frame_header, cal_frame, 2);
+ cal_frame_bytes->pop_back();
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES, std::move(cal_frame_bytes));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsIfDataFrameMissingByte) {
+ RunStopTracingTo(BattOrAgentState::SAMPLES_REQUEST_SENT);
+
+ BattOrFrameHeader cal_frame_header{0, 1 * sizeof(RawBattOrSample)};
+ RawBattOrSample cal_frame[] = {
+ RawBattOrSample{1, 1},
+ };
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(cal_frame_header, cal_frame, 1));
+
+ BattOrFrameHeader frame_header{1, 1 * sizeof(RawBattOrSample)};
+ RawBattOrSample frame[] = {RawBattOrSample{1, 1}};
+
+ // Remove the last byte from the frame to make it invalid.
+ std::unique_ptr<vector<char>> frame_bytes =
+ CreateFrame(frame_header, frame, 2);
+ frame_bytes->pop_back();
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES, std::move(frame_bytes));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, StopTracingFailsIfFrameArrivesOutOfOrder) {
+ RunStopTracingTo(BattOrAgentState::CALIBRATION_FRAME_RECEIVED);
+
+ BattOrFrameHeader frame_header{1, 1 * sizeof(RawBattOrSample)};
+ RawBattOrSample frame[] = {RawBattOrSample{1, 1}};
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header, frame, 1));
+
+ // Skip frame with sequence number 2.
+ frame_header = BattOrFrameHeader{3, 1 * sizeof(RawBattOrSample)};
+ frame[0] = RawBattOrSample{1, 1};
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header, frame, 1));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, RecordClockSyncMarker) {
+ testing::InSequence s;
+ EXPECT_CALL(*GetAgent()->GetConnection(), Open());
+
+ BattOrControlMessage request_current_sample_msg{
+ BATTOR_CONTROL_MESSAGE_TYPE_READ_SAMPLE_COUNT, 0, 0};
+ EXPECT_CALL(*GetAgent()->GetConnection(),
+ SendBytes(BATTOR_MESSAGE_TYPE_CONTROL,
+ BufferEq(&request_current_sample_msg,
+ sizeof(request_current_sample_msg)),
+ sizeof(request_current_sample_msg)));
+
+ EXPECT_CALL(*GetAgent()->GetConnection(),
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL_ACK));
+
+ RunRecordClockSyncMarkerTo(
+ BattOrAgentState::RECORD_CLOCK_SYNC_MARKER_COMPLETE);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_NONE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, RecordClockSyncMarkerPrintsInStopTracingResult) {
+ // Record a clock sync marker that says CLOCK_SYNC_ID happened at sample #2.
+ RunRecordClockSyncMarkerTo(BattOrAgentState::CURRENT_SAMPLE_REQUEST_SENT);
+
+ uint32_t current_sample = 1;
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ ToCharVector(current_sample));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_NONE, GetCommandError());
+
+ RunStopTracingTo(BattOrAgentState::SAMPLES_REQUEST_SENT);
+
+ // Now run StopTracing, and make sure that CLOCK_SYNC_ID gets printed out with
+ // sample #2 (including calibration frame samples).
+ BattOrFrameHeader cal_frame_header{0, 1 * sizeof(RawBattOrSample)};
+ RawBattOrSample cal_frame[] = {RawBattOrSample{1, 1}};
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(cal_frame_header, cal_frame, 1));
+
+ BattOrFrameHeader frame_header1{1, 2 * sizeof(RawBattOrSample)};
+ RawBattOrSample frame1[] = {RawBattOrSample{1, 1}, RawBattOrSample{2, 2}};
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header1, frame1, 2));
+
+ BattOrFrameHeader frame_header2{2, 0};
+
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_SAMPLES,
+ CreateFrame(frame_header2, {}, 0));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_NONE, GetCommandError());
+ EXPECT_EQ(
+ "# BattOr\n# voltage_range [-2401.2, 2398.8] mV\n# "
+ "current_range [-1200.6, 1199.4] mA\n"
+ "# sample_rate 1000 Hz, gain 1.0x\n"
+ "0.00 0.0 0.0 <MY_MARKER>\n"
+ "1.00 0.6 1.2\n",
+ GetTrace());
+
+}
+
+TEST_F(BattOrAgentTest, RecordClockSyncMarkerFailsWithoutConnection) {
+ GetAgent()->RecordClockSyncMarker("my_marker");
+ GetTaskRunner()->RunUntilIdle();
+
+ GetAgent()->OnConnectionOpened(false);
+ GetTaskRunner()->RunUntilIdle();
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_CONNECTION_FAILED, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, RecordClockSyncMarkerFailsIfSampleRequestSendFails) {
+ RunRecordClockSyncMarkerTo(BattOrAgentState::CONNECTED);
+ OnBytesSent(false);
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_SEND_ERROR, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest, RecordClockSyncMarkerRetriesCurrentSampleRead) {
+ RunRecordClockSyncMarkerTo(BattOrAgentState::CURRENT_SAMPLE_REQUEST_SENT);
+
+ OnMessageRead(false, BATTOR_MESSAGE_TYPE_CONTROL_ACK, nullptr);
+
+ uint32_t current_sample = 1;
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ ToCharVector(current_sample));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_NONE, GetCommandError());
+}
+
+TEST_F(BattOrAgentTest,
+ RecordClockSyncMarkerFailsIfCurrentSampleReadHasWrongType) {
+ RunRecordClockSyncMarkerTo(BattOrAgentState::CURRENT_SAMPLE_REQUEST_SENT);
+
+ uint32_t current_sample = 1;
+ OnMessageRead(true, BATTOR_MESSAGE_TYPE_CONTROL,
+ ToCharVector(current_sample));
+
+ EXPECT_TRUE(IsCommandComplete());
+ EXPECT_EQ(BATTOR_ERROR_UNEXPECTED_MESSAGE, GetCommandError());
+}
+
+} // namespace battor
diff --git a/chromium/tools/battor_agent/battor_agent_unittests.isolate b/chromium/tools/battor_agent/battor_agent_unittests.isolate
new file mode 100644
index 00000000000..219fb470f6a
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_agent_unittests.isolate
@@ -0,0 +1,27 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'conditions': [
+ ['OS=="linux" or OS=="mac" or OS=="win"', {
+ 'variables': {
+ 'command': [
+ '<(DEPTH)/testing/test_env.py',
+ '<(PRODUCT_DIR)/battor_agent_unittests<(EXECUTABLE_SUFFIX)',
+ '--brave-new-test-launcher',
+ '--test-launcher-bot-mode',
+ '--asan=<(asan)',
+ '--msan=<(msan)',
+ '--tsan=<(tsan)',
+ ],
+ 'files': [
+ '<(DEPTH)/testing/test_env.py',
+ '<(PRODUCT_DIR)/battor_agent_unittests<(EXECUTABLE_SUFFIX)',
+ ],
+ },
+ }],
+ ],
+ 'includes': [
+ '../../base/base.isolate',
+ ],
+} \ No newline at end of file
diff --git a/chromium/tools/battor_agent/battor_connection.cc b/chromium/tools/battor_agent/battor_connection.cc
new file mode 100644
index 00000000000..b99910007d7
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_connection.cc
@@ -0,0 +1,18 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/battor_agent/battor_connection.h"
+
+#include "base/bind.h"
+#include "base/callback.h"
+#include "device/serial/buffer.h"
+#include "device/serial/serial_io_handler.h"
+#include "net/base/io_buffer.h"
+
+namespace battor {
+
+BattOrConnection::BattOrConnection(Listener* listener) : listener_(listener) {}
+BattOrConnection::~BattOrConnection() {}
+
+} // namespace battor
diff --git a/chromium/tools/battor_agent/battor_connection.h b/chromium/tools/battor_agent/battor_connection.h
new file mode 100644
index 00000000000..b8aed3c2565
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_connection.h
@@ -0,0 +1,79 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_BATTOR_AGENT_BATTOR_CONNECTION_H_
+#define TOOLS_BATTOR_AGENT_BATTOR_CONNECTION_H_
+
+#include <memory>
+#include <vector>
+
+#include "base/macros.h"
+#include "base/single_thread_task_runner.h"
+#include "tools/battor_agent/battor_protocol_types.h"
+
+namespace battor {
+
+// A BattOrConnection is a wrapper around the serial connection to the BattOr
+// that handles conversion of a message to and from the byte-level BattOr
+// protocol.
+//
+// At a high-level, all BattOr messages consist of:
+//
+// 0x00 (1 byte start marker)
+// uint8_t (1 byte header indicating the message type)
+// data (message data, with 0x00s and 0x01s escaped with 0x02)
+// 0x01 (1 byte end marker)
+//
+// For a more in-depth description of the protocol, see http://bit.ly/1NvNVc3.
+class BattOrConnection {
+ public:
+ // The listener interface that must be implemented in order to interact with
+ // the BattOrConnection.
+ class Listener {
+ public:
+ virtual void OnConnectionOpened(bool success) = 0;
+ virtual void OnBytesSent(bool success) = 0;
+ virtual void OnMessageRead(bool success,
+ BattOrMessageType type,
+ std::unique_ptr<std::vector<char>> bytes) = 0;
+ };
+
+ BattOrConnection(Listener* listener);
+ virtual ~BattOrConnection() = 0;
+
+ // Initializes the serial connection and calls the listener's
+ // OnConnectionOpened() when complete. This function must be called before
+ // using the BattOrConnection. If the connection is already open, calling this
+ // method immediately calls the listener's OnConnectionOpened method.
+ virtual void Open() = 0;
+ // Closes the serial connection and releases any handles being held.
+ virtual void Close() = 0;
+
+ // Sends the specified buffer over the serial connection and calls the
+ // listener's OnBytesSent() when complete. Note that bytes_to_send should not
+ // include the start, end, type, or escape bytes required by the BattOr
+ // protocol.
+ virtual void SendBytes(BattOrMessageType type,
+ const void* buffer,
+ size_t bytes_to_send) = 0;
+
+ // Gets the next message available from the serial connection, reading the
+ // correct number of bytes based on the specified message type, and calls the
+ // listener's OnMessageRead() when complete.
+ virtual void ReadMessage(BattOrMessageType type) = 0;
+
+ // Flushes the serial connection to the BattOr.
+ virtual void Flush() = 0;
+
+ protected:
+ // The listener receiving the results of the commands being executed.
+ Listener* listener_;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(BattOrConnection);
+};
+
+} // namespace battor
+
+#endif // TOOLS_BATTOR_AGENT_BATTOR_CONNECTION_H_
diff --git a/chromium/tools/battor_agent/battor_connection_impl.cc b/chromium/tools/battor_agent/battor_connection_impl.cc
new file mode 100644
index 00000000000..cce643a6e18
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_connection_impl.cc
@@ -0,0 +1,276 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/battor_agent/battor_connection_impl.h"
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/memory/ptr_util.h"
+#include "base/thread_task_runner_handle.h"
+#include "device/serial/buffer.h"
+#include "device/serial/serial_io_handler.h"
+#include "net/base/io_buffer.h"
+
+using std::vector;
+
+namespace battor {
+
+namespace {
+
+// Serial configuration parameters for the BattOr.
+const uint32_t kBattOrBitrate = 2000000;
+const device::serial::DataBits kBattOrDataBits =
+ device::serial::DataBits::EIGHT;
+const device::serial::ParityBit kBattOrParityBit =
+ device::serial::ParityBit::NONE;
+const device::serial::StopBits kBattOrStopBit = device::serial::StopBits::ONE;
+const bool kBattOrCtsFlowControl = true;
+const bool kBattOrHasCtsFlowControl = true;
+// The maximum BattOr message is 50kB long.
+const size_t kMaxMessageSizeBytes = 50000;
+
+// Returns the maximum number of bytes that could be required to read a message
+// of the specified type.
+size_t GetMaxBytesForMessageType(BattOrMessageType type) {
+ switch (type) {
+ case BATTOR_MESSAGE_TYPE_CONTROL:
+ return 2 * sizeof(BattOrControlMessage) + 3;
+ case BATTOR_MESSAGE_TYPE_CONTROL_ACK:
+ // The BattOr EEPROM is sent back with this type, even though it's
+ // technically more of a response than an ack. We have to make sure that
+ // we read enough bytes to accommodate this behavior.
+ return 2 * sizeof(BattOrEEPROM) + 3;
+ case BATTOR_MESSAGE_TYPE_SAMPLES:
+ return 2 * kMaxMessageSizeBytes + 3;
+ default:
+ return 0;
+ }
+}
+
+} // namespace
+
+BattOrConnectionImpl::BattOrConnectionImpl(
+ const std::string& path,
+ BattOrConnection::Listener* listener,
+ scoped_refptr<base::SingleThreadTaskRunner> file_thread_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> ui_thread_task_runner)
+ : BattOrConnection(listener),
+ path_(path),
+ file_thread_task_runner_(file_thread_task_runner),
+ ui_thread_task_runner_(ui_thread_task_runner) {}
+
+BattOrConnectionImpl::~BattOrConnectionImpl() {}
+
+void BattOrConnectionImpl::Open() {
+ if (io_handler_) {
+ OnOpened(true);
+ return;
+ }
+
+ io_handler_ = CreateIoHandler();
+
+ device::serial::ConnectionOptions options;
+ options.bitrate = kBattOrBitrate;
+ options.data_bits = kBattOrDataBits;
+ options.parity_bit = kBattOrParityBit;
+ options.stop_bits = kBattOrStopBit;
+ options.cts_flow_control = kBattOrCtsFlowControl;
+ options.has_cts_flow_control = kBattOrHasCtsFlowControl;
+
+ io_handler_->Open(path_, options,
+ base::Bind(&BattOrConnectionImpl::OnOpened, AsWeakPtr()));
+}
+
+void BattOrConnectionImpl::OnOpened(bool success) {
+ if (!success)
+ Close();
+
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE, base::Bind(&Listener::OnConnectionOpened,
+ base::Unretained(listener_), success));
+}
+
+void BattOrConnectionImpl::Close() {
+ io_handler_ = nullptr;
+}
+
+void BattOrConnectionImpl::SendBytes(BattOrMessageType type,
+ const void* buffer,
+ size_t bytes_to_send) {
+ const char* bytes = reinterpret_cast<const char*>(buffer);
+
+ // Reserve a send buffer with enough extra bytes for the start, type, end, and
+ // escape bytes.
+ vector<char> data;
+ data.reserve(2 * bytes_to_send + 3);
+
+ data.push_back(BATTOR_CONTROL_BYTE_START);
+ data.push_back(type);
+
+ for (size_t i = 0; i < bytes_to_send; i++) {
+ if (bytes[i] == BATTOR_CONTROL_BYTE_START ||
+ bytes[i] == BATTOR_CONTROL_BYTE_END) {
+ data.push_back(BATTOR_CONTROL_BYTE_ESCAPE);
+ }
+
+ data.push_back(bytes[i]);
+ }
+
+ data.push_back(BATTOR_CONTROL_BYTE_END);
+
+ pending_write_length_ = data.size();
+ io_handler_->Write(base::WrapUnique(new device::SendBuffer(
+ data, base::Bind(&BattOrConnectionImpl::OnBytesSent, AsWeakPtr()))));
+}
+
+void BattOrConnectionImpl::ReadMessage(BattOrMessageType type) {
+ pending_read_message_type_ = type;
+ size_t max_bytes_to_read = GetMaxBytesForMessageType(type);
+
+ // Check the left-over bytes from the last read to make sure that we don't
+ // already have a full message.
+ BattOrMessageType parsed_type;
+ std::unique_ptr<vector<char>> bytes(new vector<char>());
+ bytes->reserve(max_bytes_to_read);
+
+ if (ParseMessage(&parsed_type, bytes.get())) {
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE,
+ base::Bind(&Listener::OnMessageRead, base::Unretained(listener_), true,
+ parsed_type, base::Passed(std::move(bytes))));
+ return;
+ }
+
+ BeginReadBytes(max_bytes_to_read - already_read_buffer_.size());
+}
+
+void BattOrConnectionImpl::Flush() {
+ io_handler_->Flush();
+ already_read_buffer_.clear();
+}
+
+scoped_refptr<device::SerialIoHandler> BattOrConnectionImpl::CreateIoHandler() {
+ return device::SerialIoHandler::Create(file_thread_task_runner_,
+ ui_thread_task_runner_);
+}
+
+void BattOrConnectionImpl::BeginReadBytes(size_t max_bytes_to_read) {
+ pending_read_buffer_ =
+ make_scoped_refptr(new net::IOBuffer(max_bytes_to_read));
+
+ auto on_receive_buffer_filled =
+ base::Bind(&BattOrConnectionImpl::OnBytesRead, AsWeakPtr());
+
+ io_handler_->Read(base::WrapUnique(new device::ReceiveBuffer(
+ pending_read_buffer_, static_cast<uint32_t>(max_bytes_to_read),
+ on_receive_buffer_filled)));
+}
+
+void BattOrConnectionImpl::OnBytesRead(int bytes_read,
+ device::serial::ReceiveError error) {
+ if (bytes_read == 0 || error != device::serial::ReceiveError::NONE) {
+ // If we didn't have a message before, and we weren't able to read any
+ // additional bytes, then there's no valid message available.
+ EndReadBytes(false, BATTOR_MESSAGE_TYPE_CONTROL, nullptr);
+ return;
+ }
+
+ already_read_buffer_.insert(already_read_buffer_.end(),
+ pending_read_buffer_->data(),
+ pending_read_buffer_->data() + bytes_read);
+
+ BattOrMessageType type;
+ std::unique_ptr<vector<char>> bytes(new vector<char>());
+ bytes->reserve(GetMaxBytesForMessageType(pending_read_message_type_));
+
+ if (!ParseMessage(&type, bytes.get())) {
+ // Even after reading the max number of bytes, we still don't have a valid
+ // message.
+ EndReadBytes(false, BATTOR_MESSAGE_TYPE_CONTROL, nullptr);
+ return;
+ }
+
+ if (type != pending_read_message_type_) {
+ // We received a complete message, but it wasn't the type we were expecting.
+ EndReadBytes(false, BATTOR_MESSAGE_TYPE_CONTROL, nullptr);
+ return;
+ }
+
+ EndReadBytes(true, type, std::move(bytes));
+}
+
+void BattOrConnectionImpl::EndReadBytes(
+ bool success,
+ BattOrMessageType type,
+ std::unique_ptr<std::vector<char>> bytes) {
+ pending_read_buffer_ = nullptr;
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE,
+ base::Bind(&Listener::OnMessageRead, base::Unretained(listener_), success,
+ type, base::Passed(std::move(bytes))));
+}
+
+bool BattOrConnectionImpl::ParseMessage(BattOrMessageType* type,
+ vector<char>* bytes) {
+ if (already_read_buffer_.size() <= 3)
+ return false;
+
+ // The first byte is the start byte.
+ if (already_read_buffer_[0] != BATTOR_CONTROL_BYTE_START) {
+ return false;
+ }
+
+ // The second byte specifies the message type.
+ *type = static_cast<BattOrMessageType>(already_read_buffer_[1]);
+
+ if (*type < static_cast<uint8_t>(BATTOR_MESSAGE_TYPE_CONTROL) ||
+ *type > static_cast<uint8_t>(BATTOR_MESSAGE_TYPE_PRINT)) {
+ return false;
+ }
+
+ // After that comes the message bytes.
+ bool escape_next_byte = false;
+ for (size_t i = 2; i < already_read_buffer_.size(); i++) {
+ char next_byte = already_read_buffer_[i];
+
+ if (escape_next_byte) {
+ bytes->push_back(next_byte);
+ escape_next_byte = false;
+ continue;
+ }
+
+ switch (next_byte) {
+ case BATTOR_CONTROL_BYTE_START:
+ // Two start bytes in a message is invalid.
+ return false;
+
+ case BATTOR_CONTROL_BYTE_END:
+ already_read_buffer_.erase(already_read_buffer_.begin(),
+ already_read_buffer_.begin() + i + 1);
+ return true;
+
+ case BATTOR_CONTROL_BYTE_ESCAPE:
+ escape_next_byte = true;
+ continue;
+
+ default:
+ bytes->push_back(next_byte);
+ }
+ }
+
+ // If we made it to the end of the read buffer and no end byte was seen, then
+ // we don't have a complete message.
+ return false;
+}
+
+void BattOrConnectionImpl::OnBytesSent(int bytes_sent,
+ device::serial::SendError error) {
+ bool success = (error == device::serial::SendError::NONE) &&
+ (pending_write_length_ == static_cast<size_t>(bytes_sent));
+ base::ThreadTaskRunnerHandle::Get()->PostTask(
+ FROM_HERE,
+ base::Bind(&Listener::OnBytesSent, base::Unretained(listener_), success));
+}
+
+} // namespace battor
diff --git a/chromium/tools/battor_agent/battor_connection_impl.h b/chromium/tools/battor_agent/battor_connection_impl.h
new file mode 100644
index 00000000000..e0ce5a20ffa
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_connection_impl.h
@@ -0,0 +1,101 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_BATTOR_AGENT_BATTOR_CONNECTION_IMPL_H_
+#define TOOLS_BATTOR_AGENT_BATTOR_CONNECTION_IMPL_H_
+
+#include <vector>
+
+#include "base/callback_forward.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/weak_ptr.h"
+#include "device/serial/serial.mojom.h"
+#include "tools/battor_agent/battor_connection.h"
+#include "tools/battor_agent/battor_error.h"
+#include "tools/battor_agent/battor_protocol_types.h"
+
+namespace device {
+class SerialIoHandler;
+}
+namespace net {
+class IOBuffer;
+}
+
+namespace battor {
+
+// A BattOrConnectionImpl is a concrete implementation of a BattOrConnection.
+class BattOrConnectionImpl
+ : public BattOrConnection,
+ public base::SupportsWeakPtr<BattOrConnectionImpl> {
+ public:
+ BattOrConnectionImpl(
+ const std::string& path,
+ BattOrConnection::Listener* listener,
+ scoped_refptr<base::SingleThreadTaskRunner> file_thread_task_runner,
+ scoped_refptr<base::SingleThreadTaskRunner> ui_thread_task_runner);
+ ~BattOrConnectionImpl() override;
+
+ void Open() override;
+ void Close() override;
+ void SendBytes(BattOrMessageType type,
+ const void* buffer,
+ size_t bytes_to_send) override;
+ void ReadMessage(BattOrMessageType type) override;
+ void Flush() override;
+
+ protected:
+ // Overridden by the test to use a fake serial connection.
+ virtual scoped_refptr<device::SerialIoHandler> CreateIoHandler();
+
+ // IO handler capable of reading and writing from the serial connection.
+ scoped_refptr<device::SerialIoHandler> io_handler_;
+
+ private:
+ void OnOpened(bool success);
+
+ // Reads the specified number of additional bytes and adds them to the pending
+ // read buffer.
+ void BeginReadBytes(size_t bytes_to_read);
+
+ // Internal callback for when bytes are read. This method may trigger
+ // additional reads if any newly read bytes are escape bytes.
+ void OnBytesRead(int bytes_read, device::serial::ReceiveError error);
+
+ void EndReadBytes(bool success,
+ BattOrMessageType type,
+ std::unique_ptr<std::vector<char>> data);
+
+ // Pulls off the next complete message from already_read_buffer_, returning
+ // its type and contents (via out parameters) and whether a complete message
+ // was able to be read (via the return value).
+ bool ParseMessage(BattOrMessageType* type, std::vector<char>* data);
+
+ // Internal callback for when bytes are sent.
+ void OnBytesSent(int bytes_sent, device::serial::SendError error);
+
+ // The path of the BattOr.
+ std::string path_;
+
+ // All bytes that have already been read from the serial stream, but have not
+ // yet been given to the listener as a complete message.
+ std::vector<char> already_read_buffer_;
+ // The bytes that were read in the pending read.
+ scoped_refptr<net::IOBuffer> pending_read_buffer_;
+ // The type of message we're looking for in the pending read.
+ BattOrMessageType pending_read_message_type_;
+
+ // The total number of bytes that we're expecting to send.
+ size_t pending_write_length_;
+
+ // Threads needed for serial communication.
+ scoped_refptr<base::SingleThreadTaskRunner> file_thread_task_runner_;
+ scoped_refptr<base::SingleThreadTaskRunner> ui_thread_task_runner_;
+
+ DISALLOW_COPY_AND_ASSIGN(BattOrConnectionImpl);
+};
+
+} // namespace battor
+
+#endif // TOOLS_BATTOR_AGENT_BATTOR_CONNECTION_IMPL_H_
diff --git a/chromium/tools/battor_agent/battor_connection_impl_unittest.cc b/chromium/tools/battor_agent/battor_connection_impl_unittest.cc
new file mode 100644
index 00000000000..233e7e66db4
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_connection_impl_unittest.cc
@@ -0,0 +1,398 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/battor_agent/battor_connection_impl.h"
+
+#include "base/bind.h"
+#include "base/bind_helpers.h"
+#include "base/memory/ptr_util.h"
+#include "base/memory/weak_ptr.h"
+#include "base/test/test_simple_task_runner.h"
+#include "base/thread_task_runner_handle.h"
+#include "device/serial/serial.mojom.h"
+#include "device/serial/test_serial_io_handler.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/battor_agent/battor_protocol_types.h"
+
+namespace {
+
+void NullWriteCallback(int, device::serial::SendError) {}
+void NullReadCallback(int, device::serial::ReceiveError) {}
+
+} // namespace
+
+namespace battor {
+
+// TestableBattOrConnection uses a fake serial connection be testable.
+class TestableBattOrConnection : public BattOrConnectionImpl {
+ public:
+ TestableBattOrConnection(BattOrConnection::Listener* listener)
+ : BattOrConnectionImpl("/dev/test", listener, nullptr, nullptr) {}
+ scoped_refptr<device::SerialIoHandler> CreateIoHandler() override {
+ return device::TestSerialIoHandler::Create();
+ }
+
+ scoped_refptr<device::SerialIoHandler> GetIoHandler() { return io_handler_; }
+};
+
+// BattOrConnectionImplTest provides a BattOrConnection and captures the
+// results of all its commands.
+class BattOrConnectionImplTest : public testing::Test,
+ public BattOrConnection::Listener {
+ public:
+ BattOrConnectionImplTest()
+ : task_runner_(new base::TestSimpleTaskRunner()),
+ thread_task_runner_handle_(task_runner_) {}
+
+ void OnConnectionOpened(bool success) override { open_success_ = success; };
+ void OnBytesSent(bool success) override { send_success_ = success; }
+ void OnMessageRead(bool success,
+ BattOrMessageType type,
+ std::unique_ptr<std::vector<char>> bytes) override {
+ is_read_complete_ = true;
+ read_success_ = success;
+ read_type_ = type;
+ read_bytes_ = std::move(bytes);
+ }
+
+ protected:
+ void SetUp() override {
+ connection_.reset(new TestableBattOrConnection(this));
+ task_runner_->ClearPendingTasks();
+ }
+
+ void OpenConnection() {
+ connection_->Open();
+ task_runner_->RunUntilIdle();
+ }
+
+ void ReadMessage(BattOrMessageType type) {
+ is_read_complete_ = false;
+ connection_->ReadMessage(type);
+ task_runner_->RunUntilIdle();
+ }
+
+ // Reads the specified number of bytes directly from the serial connection.
+ scoped_refptr<net::IOBuffer> ReadMessageRaw(int bytes_to_read) {
+ scoped_refptr<net::IOBuffer> buffer(
+ new net::IOBuffer((size_t)bytes_to_read));
+
+ connection_->GetIoHandler()->Read(
+ base::WrapUnique(new device::ReceiveBuffer(
+ buffer, bytes_to_read, base::Bind(&NullReadCallback))));
+ task_runner_->RunUntilIdle();
+
+ return buffer;
+ }
+
+ void SendControlMessage(BattOrControlMessageType type,
+ uint16_t param1,
+ uint16_t param2) {
+ BattOrControlMessage msg{type, param1, param2};
+ connection_->SendBytes(BATTOR_MESSAGE_TYPE_CONTROL,
+ reinterpret_cast<char*>(&msg), sizeof(msg));
+ task_runner_->RunUntilIdle();
+ }
+
+ // Writes the specified bytes directly to the serial connection.
+ void SendBytesRaw(const char* data, uint16_t bytes_to_send) {
+ std::vector<char> data_vector(data, data + bytes_to_send);
+ connection_->GetIoHandler()->Write(base::WrapUnique(
+ new device::SendBuffer(data_vector, base::Bind(&NullWriteCallback))));
+ task_runner_->RunUntilIdle();
+ }
+
+ bool GetOpenSuccess() { return open_success_; }
+ bool GetSendSuccess() { return send_success_; }
+ bool IsReadComplete() { return is_read_complete_; }
+ bool GetReadSuccess() { return read_success_; }
+ BattOrMessageType GetReadType() { return read_type_; }
+ std::vector<char>* GetReadMessage() { return read_bytes_.get(); }
+
+ private:
+ std::unique_ptr<TestableBattOrConnection> connection_;
+
+ scoped_refptr<base::TestSimpleTaskRunner> task_runner_;
+ base::ThreadTaskRunnerHandle thread_task_runner_handle_;
+
+ // Result from the last connect command.
+ bool open_success_;
+ // Result from the last send command.
+ bool send_success_;
+ // Results from the last read command.
+ bool is_read_complete_;
+ bool read_success_;
+ BattOrMessageType read_type_;
+ std::unique_ptr<std::vector<char>> read_bytes_;
+};
+
+TEST_F(BattOrConnectionImplTest, InitSendsCorrectBytes) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_INIT, 0, 0);
+
+ const char expected_data[] = {
+ BATTOR_CONTROL_BYTE_START, BATTOR_MESSAGE_TYPE_CONTROL,
+ BATTOR_CONTROL_BYTE_ESCAPE, BATTOR_CONTROL_MESSAGE_TYPE_INIT,
+ BATTOR_CONTROL_BYTE_ESCAPE, 0x00,
+ BATTOR_CONTROL_BYTE_ESCAPE, 0x00,
+ BATTOR_CONTROL_BYTE_ESCAPE, 0x00,
+ BATTOR_CONTROL_BYTE_ESCAPE, 0x00,
+ BATTOR_CONTROL_BYTE_END,
+ };
+
+ ASSERT_TRUE(GetSendSuccess());
+ ASSERT_EQ(0, std::memcmp(ReadMessageRaw(13)->data(), expected_data, 13));
+}
+
+TEST_F(BattOrConnectionImplTest, ResetSendsCorrectBytes) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_RESET, 0, 0);
+
+ const char expected_data[] = {
+ BATTOR_CONTROL_BYTE_START, BATTOR_MESSAGE_TYPE_CONTROL,
+ BATTOR_CONTROL_BYTE_ESCAPE, BATTOR_CONTROL_MESSAGE_TYPE_RESET,
+ BATTOR_CONTROL_BYTE_ESCAPE, 0x00,
+ BATTOR_CONTROL_BYTE_ESCAPE, 0x00,
+ BATTOR_CONTROL_BYTE_ESCAPE, 0x00,
+ BATTOR_CONTROL_BYTE_ESCAPE, 0x00,
+ BATTOR_CONTROL_BYTE_END,
+ };
+
+ ASSERT_TRUE(GetSendSuccess());
+ ASSERT_EQ(0, std::memcmp(ReadMessageRaw(13)->data(), expected_data, 13));
+}
+
+TEST_F(BattOrConnectionImplTest, ReadMessageControlMessage) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ const char data[] = {
+ BATTOR_CONTROL_BYTE_START,
+ BATTOR_MESSAGE_TYPE_CONTROL,
+ BATTOR_CONTROL_BYTE_ESCAPE,
+ BATTOR_CONTROL_MESSAGE_TYPE_RESET,
+ 0x04,
+ 0x04,
+ 0x04,
+ 0x04,
+ BATTOR_CONTROL_BYTE_END,
+ };
+ SendBytesRaw(data, 9);
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL);
+
+ const char expected[] = {BATTOR_CONTROL_MESSAGE_TYPE_RESET, 0x04, 0x04, 0x04,
+ 0x04};
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_TRUE(GetReadSuccess());
+ ASSERT_EQ(BATTOR_MESSAGE_TYPE_CONTROL, GetReadType());
+ ASSERT_EQ(0, std::memcmp(GetReadMessage()->data(), expected, 5));
+}
+
+TEST_F(BattOrConnectionImplTest, ReadMessageInvalidType) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ const char data[] = {
+ BATTOR_CONTROL_BYTE_START,
+ static_cast<char>(UINT8_MAX),
+ BATTOR_CONTROL_BYTE_ESCAPE,
+ BATTOR_CONTROL_MESSAGE_TYPE_RESET,
+ 0x04,
+ 0x04,
+ 0x04,
+ 0x04,
+ BATTOR_CONTROL_BYTE_END,
+ };
+ SendBytesRaw(data, 7);
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL);
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_FALSE(GetReadSuccess());
+}
+
+TEST_F(BattOrConnectionImplTest, ReadMessageEndsMidMessageByte) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ const char data[] = {
+ BATTOR_CONTROL_BYTE_START,
+ BATTOR_MESSAGE_TYPE_CONTROL,
+ BATTOR_CONTROL_BYTE_ESCAPE,
+ BATTOR_CONTROL_MESSAGE_TYPE_RESET,
+ 0x04,
+ };
+ SendBytesRaw(data, 5);
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL);
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_FALSE(GetReadSuccess());
+}
+
+TEST_F(BattOrConnectionImplTest, ReadMessageMissingEndByte) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ const char data[] = {
+ BATTOR_CONTROL_BYTE_START,
+ BATTOR_MESSAGE_TYPE_CONTROL,
+ BATTOR_CONTROL_BYTE_ESCAPE,
+ BATTOR_CONTROL_MESSAGE_TYPE_RESET,
+ 0x04,
+ 0x04,
+ 0x04,
+ 0x04,
+ };
+ SendBytesRaw(data, 6);
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL);
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_FALSE(GetReadSuccess());
+}
+
+TEST_F(BattOrConnectionImplTest, ReadMessageWithEscapeCharacters) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ const char data[] = {
+ BATTOR_CONTROL_BYTE_START,
+ BATTOR_MESSAGE_TYPE_CONTROL,
+ BATTOR_CONTROL_BYTE_ESCAPE,
+ BATTOR_CONTROL_MESSAGE_TYPE_RESET,
+ BATTOR_CONTROL_BYTE_ESCAPE,
+ 0x00,
+ 0x04,
+ 0x04,
+ 0x04,
+ BATTOR_CONTROL_BYTE_END,
+ };
+ SendBytesRaw(data, 10);
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL);
+
+ const char expected[] = {BATTOR_CONTROL_MESSAGE_TYPE_RESET, 0x00};
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_TRUE(GetReadSuccess());
+ ASSERT_EQ(BATTOR_MESSAGE_TYPE_CONTROL, GetReadType());
+ ASSERT_EQ(0, std::memcmp(GetReadMessage()->data(), expected, 2));
+}
+
+TEST_F(BattOrConnectionImplTest, ReadControlMessage) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_RESET, 4, 7);
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL);
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_TRUE(GetReadSuccess());
+ ASSERT_EQ(BATTOR_MESSAGE_TYPE_CONTROL, GetReadType());
+
+ BattOrControlMessage* msg =
+ reinterpret_cast<BattOrControlMessage*>(GetReadMessage()->data());
+
+ ASSERT_EQ(BATTOR_CONTROL_MESSAGE_TYPE_RESET, msg->type);
+ ASSERT_EQ(4, msg->param1);
+ ASSERT_EQ(7, msg->param2);
+}
+
+TEST_F(BattOrConnectionImplTest, ReadMessageExtraBytesStoredBetweenReads) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ // Send a samples frame with length and sequence number of zero.
+ const char data[] = {
+ BATTOR_CONTROL_BYTE_START,
+ BATTOR_MESSAGE_TYPE_SAMPLES,
+ 0x02,
+ 0x00,
+ 0x02,
+ 0x00,
+ 0x02,
+ 0x00,
+ BATTOR_CONTROL_BYTE_END,
+ };
+ SendBytesRaw(data, 9);
+ SendControlMessage(BATTOR_CONTROL_MESSAGE_TYPE_INIT, 5, 8);
+
+ // When reading sample frames, we're forced to read lots because each frame
+ // could be up to 50kB long. By reading a really short sample frame (like the
+ // zero-length one above), the BattOrConnection is forced to store whatever
+ // extra data it finds in the serial stream - in this case, the init control
+ // message that we sent.
+ ReadMessage(BATTOR_MESSAGE_TYPE_SAMPLES);
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_TRUE(GetReadSuccess());
+ ASSERT_EQ(BATTOR_MESSAGE_TYPE_SAMPLES, GetReadType());
+
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL);
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_TRUE(GetReadSuccess());
+ ASSERT_EQ(BATTOR_MESSAGE_TYPE_CONTROL, GetReadType());
+
+ BattOrControlMessage* init_msg =
+ reinterpret_cast<BattOrControlMessage*>(GetReadMessage()->data());
+
+ ASSERT_EQ(BATTOR_CONTROL_MESSAGE_TYPE_INIT, init_msg->type);
+ ASSERT_EQ(5, init_msg->param1);
+ ASSERT_EQ(8, init_msg->param2);
+}
+
+TEST_F(BattOrConnectionImplTest, ReadMessageFailsWithControlButExpectingAck) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ const char data[] = {
+ BATTOR_CONTROL_BYTE_START,
+ BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ BATTOR_CONTROL_BYTE_ESCAPE,
+ BATTOR_CONTROL_MESSAGE_TYPE_RESET,
+ 0x04,
+ BATTOR_CONTROL_BYTE_END,
+ };
+ SendBytesRaw(data, 6);
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL);
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_FALSE(GetReadSuccess());
+}
+
+TEST_F(BattOrConnectionImplTest, ReadMessageFailsWithAckButExpectingControl) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ const char data[] = {
+ BATTOR_CONTROL_BYTE_START, BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ BATTOR_CONTROL_MESSAGE_TYPE_RESET, 0x04,
+ BATTOR_CONTROL_BYTE_END,
+ };
+ SendBytesRaw(data, 5);
+ ReadMessage(BATTOR_MESSAGE_TYPE_CONTROL);
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_FALSE(GetReadSuccess());
+}
+
+TEST_F(BattOrConnectionImplTest, ReadMessageControlTypePrintFails) {
+ OpenConnection();
+ ASSERT_TRUE(GetOpenSuccess());
+
+ const char data[] = {
+ BATTOR_CONTROL_BYTE_START, BATTOR_MESSAGE_TYPE_PRINT,
+ BATTOR_CONTROL_BYTE_END,
+ };
+ SendBytesRaw(data, 3);
+ ReadMessage(BATTOR_MESSAGE_TYPE_PRINT);
+
+ ASSERT_TRUE(IsReadComplete());
+ ASSERT_FALSE(GetReadSuccess());
+}
+
+} // namespace battor
diff --git a/chromium/tools/battor_agent/battor_error.cc b/chromium/tools/battor_agent/battor_error.cc
new file mode 100644
index 00000000000..fa25fe5aa97
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_error.cc
@@ -0,0 +1,31 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/battor_agent/battor_error.h"
+
+#include "base/logging.h"
+
+namespace battor {
+
+std::string BattOrErrorToString(BattOrError error) {
+ switch (error) {
+ case BATTOR_ERROR_NONE:
+ return "NONE";
+ case BATTOR_ERROR_CONNECTION_FAILED:
+ return "CONNECTION FAILED";
+ case BATTOR_ERROR_TIMEOUT:
+ return "TIMEOUT";
+ case BATTOR_ERROR_SEND_ERROR:
+ return "SEND ERROR";
+ case BATTOR_ERROR_RECEIVE_ERROR:
+ return "RECEIVE ERROR";
+ case BATTOR_ERROR_UNEXPECTED_MESSAGE:
+ return "UNEXPECTED MESSAGE";
+ }
+
+ NOTREACHED();
+ return std::string();
+}
+
+} // namespace battor
diff --git a/chromium/tools/battor_agent/battor_error.h b/chromium/tools/battor_agent/battor_error.h
new file mode 100644
index 00000000000..fef354a703e
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_error.h
@@ -0,0 +1,26 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_BATTOR_AGENT_BATTOR_ERROR_H_
+#define TOOLS_BATTOR_AGENT_BATTOR_ERROR_H_
+
+#include <string>
+
+namespace battor {
+
+// A BattOrError is an error that occurs when communicating with a BattOr.
+enum BattOrError {
+ BATTOR_ERROR_NONE,
+ BATTOR_ERROR_CONNECTION_FAILED,
+ BATTOR_ERROR_TIMEOUT,
+ BATTOR_ERROR_SEND_ERROR,
+ BATTOR_ERROR_RECEIVE_ERROR,
+ BATTOR_ERROR_UNEXPECTED_MESSAGE,
+};
+
+std::string BattOrErrorToString(BattOrError error);
+
+}
+
+#endif // TOOLS_BATTOR_AGENT_BATTOR_ERROR_H_
diff --git a/chromium/tools/battor_agent/battor_finder.cc b/chromium/tools/battor_agent/battor_finder.cc
new file mode 100644
index 00000000000..0aa54ac12b3
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_finder.cc
@@ -0,0 +1,60 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/battor_agent/battor_finder.h"
+
+#include "base/command_line.h"
+#include "base/logging.h"
+#include "device/serial/serial.mojom.h"
+#include "device/serial/serial_device_enumerator.h"
+#include "mojo/public/cpp/bindings/array.h"
+
+namespace battor {
+
+namespace {
+
+// The USB display name prefix that all BattOrs have.
+const char kBattOrDisplayNamePrefix[] = "BattOr";
+
+// The command line switch used to hard-code a BattOr path. Hard-coding
+// this path disables the normal method of finding a BattOr, which is to
+// search through serial devices for one with a matching display name.
+const char kBattOrPathSwitch[] = "battor-path";
+
+} // namespace
+
+std::string BattOrFinder::FindBattOr() {
+ std::unique_ptr<device::SerialDeviceEnumerator> serial_device_enumerator =
+ device::SerialDeviceEnumerator::Create();
+
+ mojo::Array<device::serial::DeviceInfoPtr> devices =
+ serial_device_enumerator->GetDevices();
+
+ std::string switch_specified_path =
+ base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(
+ kBattOrPathSwitch);
+ if (switch_specified_path.empty()) {
+ // If we have no switch-specified path, look for a device with the right
+ // display name.
+ for (size_t i = 0; i < devices.size(); i++) {
+ std::string display_name = devices[i]->display_name.get();
+ if (display_name.find(kBattOrDisplayNamePrefix) != std::string::npos) {
+ LOG(INFO) << "Found BattOr with display name " << display_name
+ << " at path " << devices[i]->path;
+ return devices[i]->path;
+ }
+ }
+ } else {
+ // If we have a switch-specified path, make sure it actually exists before
+ // returning it.
+ for (size_t i = 0; i < devices.size(); i++) {
+ if (devices[i]->path == switch_specified_path)
+ return switch_specified_path;
+ }
+ }
+
+ return std::string();
+}
+
+} // namespace battor
diff --git a/chromium/tools/battor_agent/battor_finder.h b/chromium/tools/battor_agent/battor_finder.h
new file mode 100644
index 00000000000..06d24ab9b77
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_finder.h
@@ -0,0 +1,24 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_BATTOR_AGENT_BATTOR_FINDER_H_
+#define TOOLS_BATTOR_AGENT_BATTOR_FINDER_H_
+
+#include <string>
+
+#include "base/macros.h"
+
+namespace battor {
+
+class BattOrFinder {
+ public:
+ // Returns the path of the first BattOr that we find.
+ static std::string FindBattOr();
+
+ DISALLOW_COPY_AND_ASSIGN(BattOrFinder);
+};
+
+} // namespace battor
+
+#endif // TOOLS_BATTOR_AGENT_BATTOR_FINDER_H_
diff --git a/chromium/tools/battor_agent/battor_protocol_types.h b/chromium/tools/battor_agent/battor_protocol_types.h
new file mode 100644
index 00000000000..6402b3f1d2b
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_protocol_types.h
@@ -0,0 +1,146 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_BATTOR_AGENT_BATTOR_PROTOCOL_H_
+#define TOOLS_BATTOR_AGENT_BATTOR_PROTOCOL_H_
+
+#include <stdint.h>
+
+namespace battor {
+
+// Control characters in the BattOr protocol.
+enum BattOrControlByte : uint8_t {
+ // Indicates the start of a message in the protocol. All other instances of
+ // this byte must be escaped (with BATTOR_SPECIAL_BYTE_ESCAPE).
+ BATTOR_CONTROL_BYTE_START = 0x00,
+ // Indicates the end of a message in the protocol. All other instances of
+ // this byte must be escaped (with BATTOR_SPECIAL_BYTE_ESCAPE).
+ BATTOR_CONTROL_BYTE_END = 0x01,
+ // Indicates that the next byte should not be interpreted as a special
+ // character, but should instead be interpreted as itself.
+ BATTOR_CONTROL_BYTE_ESCAPE = 0x02,
+};
+
+// Types of BattOr messages that can be sent.
+enum BattOrMessageType : uint8_t {
+ // Indicates a control message sent from the client to the BattOr to tell the
+ // BattOr to do something.
+ BATTOR_MESSAGE_TYPE_CONTROL = 0x03,
+ // Indicates a control message ack sent from the BattOr back to the client to
+ // signal that the BattOr received the control message.
+ BATTOR_MESSAGE_TYPE_CONTROL_ACK,
+ // Indicates that the message contains Voltage and current measurements.
+ BATTOR_MESSAGE_TYPE_SAMPLES,
+ // TODO(charliea): Figure out what this is.
+ BATTOR_MESSAGE_TYPE_PRINT,
+};
+
+// Types of BattOr control messages that can be sent.
+enum BattOrControlMessageType : uint8_t {
+ // Tells the BattOr to initialize itself.
+ BATTOR_CONTROL_MESSAGE_TYPE_INIT = 0x00,
+ // Tells the BattOr to reset itself.
+ BATTOR_CONTROL_MESSAGE_TYPE_RESET,
+ // Tells the BattOr to run a self test.
+ BATTOR_CONTROL_MESSAGE_TYPE_SELF_TEST,
+ // Tells the BattOr to send its EEPROM contents over the serial connection.
+ BATTOR_CONTROL_MESSAGE_TYPE_READ_EEPROM,
+ // Sets the current measurement's gain.
+ BATTOR_CONTROL_MESSAGE_TYPE_SET_GAIN,
+ // Tells the BattOr to start taking samples and sending them over the
+ // connection.
+ BATTOR_CONTROL_MESSAGE_TYPE_START_SAMPLING_UART,
+ // Tells the BattOr to start taking samples and storing them on its SD card.
+ BATTOR_CONTROL_MESSAGE_TYPE_START_SAMPLING_SD,
+ // Tells the BattOr to start streaming the samples stored on its SD card over
+ // the connection.
+ BATTOR_CONTROL_MESSAGE_TYPE_READ_SD_UART,
+ // Tells the BattOr to send back the number of samples it's collected so far.
+ // This is used for syncing the clocks between the agent and the BattOr.
+ BATTOR_CONTROL_MESSAGE_TYPE_READ_SAMPLE_COUNT,
+};
+
+// The gain level for the BattOr to use.
+enum BattOrGain : uint8_t { BATTOR_GAIN_LOW = 0, BATTOR_GAIN_HIGH };
+
+// The data types below are packed to ensure byte-compatibility with the BattOr
+// firmware.
+#pragma pack(push, 1)
+
+// See: BattOrMessageType::BATTOR_MESSAGE_TYPE_CONTROL above.
+struct BattOrControlMessage {
+ BattOrControlMessageType type;
+ uint16_t param1;
+ uint16_t param2;
+};
+
+// See: BattOrMessageType::BATTOR_MESSAGE_TYPE_CONTROL_ACK above.
+struct BattOrControlMessageAck {
+ BattOrControlMessageType type;
+ uint8_t param;
+};
+
+// TODO(charliea, aschulman): Write better descriptions for the EEPROM fields
+// when we actually start doing the math to convert raw BattOr readings to
+// accurate ones.
+
+// The BattOr's EEPROM is persistent storage that contains information that we
+// need in order to convert raw BattOr readings into accurate voltage and
+// current measurements.
+struct BattOrEEPROM {
+ uint8_t magic[4];
+ uint16_t version;
+ char serial_num[20];
+ uint32_t timestamp;
+ float r1;
+ float r2;
+ float r3;
+ float low_gain;
+ float low_gain_correction_factor;
+ float low_gain_correction_offset;
+ uint16_t low_gain_amppot;
+ float high_gain;
+ float high_gain_correction_factor;
+ float high_gain_correction_offset;
+ uint16_t high_gain_amppot;
+ uint32_t sd_sample_rate;
+ uint16_t sd_tdiv;
+ uint16_t sd_tovf;
+ uint16_t sd_filpot;
+ uint32_t uart_sr;
+ uint16_t uart_tdiv;
+ uint16_t uart_tovf;
+ uint16_t uart_filpot;
+ uint32_t crc32;
+};
+
+// The BattOrFrameHeader begins every frame containing BattOr samples.
+struct BattOrFrameHeader {
+ // The number of frames that have preceded this one.
+ uint32_t sequence_number;
+ // The number of bytes of raw samples in this frame.
+ uint16_t length;
+};
+
+// A single BattOr sample. These samples are raw because they come directly from
+// the BattOr's analog to digital converter and comprise only part of the
+// equation to calculate meaningful voltage and current measurements.
+struct RawBattOrSample {
+ int16_t voltage_raw;
+ int16_t current_raw;
+};
+
+// A single BattOr sample after timestamp assignment and conversion to unitful
+// numbers.
+struct BattOrSample {
+ double time_ms;
+ double voltage_mV;
+ double current_mA;
+};
+
+#pragma pack(pop)
+
+} // namespace battor
+
+#endif // TOOLS_BATTOR_AGENT_BATTOR_PROTOCOL_H_
diff --git a/chromium/tools/battor_agent/battor_protocol_types_unittest.cc b/chromium/tools/battor_agent/battor_protocol_types_unittest.cc
new file mode 100644
index 00000000000..46416cdd44b
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_protocol_types_unittest.cc
@@ -0,0 +1,66 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/battor_agent/battor_protocol_types.h"
+
+#include <iostream>
+#include <string>
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+using namespace testing;
+
+namespace battor {
+
+namespace {
+
+// Prints the character array as hex to a comma-separated list.
+std::string CharArrayToFormattedString(const unsigned char* arr,
+ size_t length) {
+ std::string s;
+
+ char num_buff[6];
+ for (size_t i = 0; i < length; ++i) {
+ // We use sprintf because stringstream's hex support wants to print our
+ // characters as signed.
+ sprintf(num_buff, "0x%02hhx,", arr[i]);
+ s += num_buff;
+ }
+
+ return s;
+}
+
+const BattOrEEPROM kUnserializedEEPROM{
+ {0, 0, 0, 1}, 2, "serialno", 3, 4, 5, 6, 7, 8, 9, 10, 11,
+ 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 24,
+};
+
+// The serialized version of the above EEPROM.
+const unsigned char kSerializedEEPROM[] = {
+ 0x00, 0x00, 0x00, 0x01, 0x02, 0x00, 0x73, 0x65, 0x72, 0x69, 0x61, 0x6c,
+ 0x6e, 0x6f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
+ 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x80, 0x40, 0x00, 0x00,
+ 0xa0, 0x40, 0x00, 0x00, 0xc0, 0x40, 0x00, 0x00, 0xe0, 0x40, 0x00, 0x00,
+ 0x00, 0x41, 0x00, 0x00, 0x10, 0x41, 0x0a, 0x00, 0x00, 0x00, 0x30, 0x41,
+ 0x00, 0x00, 0x40, 0x41, 0x00, 0x00, 0x50, 0x41, 0x0e, 0x00, 0x0f, 0x00,
+ 0x00, 0x00, 0x10, 0x00, 0x11, 0x00, 0x12, 0x00, 0x13, 0x00, 0x00, 0x00,
+ 0x14, 0x00, 0x15, 0x00, 0x16, 0x00, 0x18, 0x00, 0x00, 0x00,
+};
+
+} // namespace
+
+TEST(BattOrProtocolTypeTest, EEPROMSerializesCorrectly) {
+ // The easier way to write this test would be using memcmp. However, because
+ // the EEPROM will change in the future and we'll need to update the
+ // serialized version when it does, it makes sense to print the bytes as a
+ // string that can just be copied and pasted into kSerializedEEPROM.
+ const unsigned char* eeprom_bytes =
+ reinterpret_cast<const unsigned char*>(&kUnserializedEEPROM);
+
+ ASSERT_EQ(
+ CharArrayToFormattedString(kSerializedEEPROM, sizeof(kSerializedEEPROM)),
+ CharArrayToFormattedString(eeprom_bytes, sizeof(kUnserializedEEPROM)));
+}
+
+} // namespace battor
diff --git a/chromium/tools/battor_agent/battor_sample_converter.cc b/chromium/tools/battor_agent/battor_sample_converter.cc
new file mode 100644
index 00000000000..e13afa171b0
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_sample_converter.cc
@@ -0,0 +1,108 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "battor_sample_converter.h"
+
+#include <stdlib.h>
+
+namespace battor {
+
+namespace {
+
+// The analog to digital converts an analog signal to a signed 12 bit integer,
+// meaning that it can output numbers in the range [-2048, 2047].
+const int16_t kAnalogDigitalConverterMinValue = -2048;
+const int16_t kAnalogDigitalConverterMaxValue = 2047;
+
+// The maximum voltage that the BattOr is capable of measuring.
+const double kMaxVoltage = 1.2;
+
+// Converts a raw voltage to a unitful one.
+double ToUnitfulVoltage(double voltage_raw) {
+ // Raw voltage samples are collected directly from the BattOr's analog to
+ // digital converter, which converts numbers in the domain [-1.2V, 1.2V] to
+ // numbers in the range [-2048, 2047]. A zero voltage has the same meaning in
+ // both the domain and range. Because of this, one negative unit in that range
+ // represents a slightly smaller domain (1.2 / 2048) than one positive unit
+ // in that range (1.2 / 2047). We take this into account when reversing the
+ // transformation here.
+ int16_t extreme_value = voltage_raw >= 0 ? kAnalogDigitalConverterMaxValue
+ : kAnalogDigitalConverterMinValue;
+
+ return voltage_raw / abs(extreme_value) * kMaxVoltage;
+}
+
+} // namespace
+
+BattOrSampleConverter::BattOrSampleConverter(
+ const BattOrEEPROM& eeprom,
+ const std::vector<RawBattOrSample>& calibration_frame)
+ : eeprom_(eeprom) {
+ baseline_current_ = baseline_voltage_ = 0;
+ for (auto sample : calibration_frame) {
+ baseline_current_ += ToUnitfulVoltage(sample.current_raw);
+ baseline_voltage_ += ToUnitfulVoltage(sample.voltage_raw);
+ }
+
+ baseline_current_ /= calibration_frame.size();
+ baseline_voltage_ /= calibration_frame.size();
+}
+
+BattOrSampleConverter::~BattOrSampleConverter() {}
+
+BattOrSample BattOrSampleConverter::ToSample(const RawBattOrSample& sample,
+ size_t sample_number) const {
+ // Subtract out the baseline current and voltage that the BattOr reads even
+ // when it's not attached to anything.
+ double current = ToUnitfulVoltage(sample.current_raw) - baseline_current_;
+ double voltage = ToUnitfulVoltage(sample.voltage_raw) - baseline_voltage_;
+
+ // The BattOr has to amplify the voltage so that it's on a similar scale as
+ // the reference voltage. This is done in the circuit using resistors (with
+ // known resistances r2 and r3). Here we undo that amplification.
+ double voltage_divider = eeprom_.r3 / (eeprom_.r2 + eeprom_.r3);
+ voltage /= voltage_divider;
+
+ // Convert to millivolts.
+ voltage *= 1000;
+
+ // The BattOr multiplies the current by the gain, so we have to undo that
+ // amplification, too.
+ current /= eeprom_.low_gain;
+
+ // The current is measured indirectly and is actually given to us as a voltage
+ // across a resistor with a known resistance r1. Because
+ //
+ // V (voltage) = i (current) * R (resistance)
+ //
+ // we can get the current by dividing this voltage by the resistance.
+ current /= eeprom_.r1;
+
+ // Convert to milliamps.
+ current *= 1000;
+
+ // Each BattOr is individually factory-calibrated. Apply these calibrations.
+ current -= eeprom_.low_gain_correction_offset;
+ current /= eeprom_.low_gain_correction_factor;
+
+ double time_ms = double(sample_number) / eeprom_.sd_sample_rate * 1000;
+
+ return BattOrSample{time_ms, voltage, current};
+}
+
+BattOrSample BattOrSampleConverter::MinSample() const {
+ // Create a minimum raw sample.
+ RawBattOrSample sample_raw = {kAnalogDigitalConverterMinValue,
+ kAnalogDigitalConverterMinValue};
+ return ToSample(sample_raw, 0);
+}
+
+BattOrSample BattOrSampleConverter::MaxSample() const {
+ // Create a maximum raw sample.
+ RawBattOrSample sample_raw = {kAnalogDigitalConverterMaxValue,
+ kAnalogDigitalConverterMaxValue};
+ return ToSample(sample_raw, 0);
+}
+
+} // namespace battor
diff --git a/chromium/tools/battor_agent/battor_sample_converter.h b/chromium/tools/battor_agent/battor_sample_converter.h
new file mode 100644
index 00000000000..89e312c87b1
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_sample_converter.h
@@ -0,0 +1,55 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_BATTOR_AGENT_BATTOR_SAMPLE_CONVERTER_H_
+#define TOOLS_BATTOR_AGENT_BATTOR_SAMPLE_CONVERTER_H_
+
+#include <stddef.h>
+#include <vector>
+
+#include "base/macros.h"
+#include "tools/battor_agent/battor_protocol_types.h"
+
+namespace battor {
+
+// Converter capable of taking raw samples from the BattOr and using
+// configuration information to turn them into samples with real units.
+class BattOrSampleConverter {
+ public:
+ // Constructs a BattOrSampleConverter.
+ //
+ // - eeprom: The BattOr's EEPROM, which contains some required conversion
+ // parameters.
+ // - calibration_frame: The first frame sent back from the BattOr when
+ // streaming samples. This frame gives current and voltage measurements
+ // that ignore whatever the BattOr's connected to, and therefore provide
+ // a means for us to determine baseline current and voltage.
+ BattOrSampleConverter(const BattOrEEPROM& eeprom,
+ const std::vector<RawBattOrSample>& calibration_frame);
+ virtual ~BattOrSampleConverter();
+
+ // Converts a raw sample to a unitful one with a timestamp.
+ BattOrSample ToSample(const RawBattOrSample& sample,
+ size_t sample_number) const;
+
+ // Returns the lowest magnitude sample that the BattOr can collect.
+ BattOrSample MinSample() const;
+
+ // Returns the highest magnitude sample that the BattOr can collect.
+ BattOrSample MaxSample() const;
+
+ private:
+ // The BattOr's EEPROM, which stores some conversion parameters we need.
+ BattOrEEPROM eeprom_;
+
+ // The baseline current and voltage calculated from the calibration frame.
+ double baseline_current_;
+ double baseline_voltage_;
+
+ DISALLOW_COPY_AND_ASSIGN(BattOrSampleConverter);
+};
+
+} // namespace battor
+
+#endif // TOOLS_BATTOR_AGENT_BATTOR_SAMPLE_CONVERTER_H_
diff --git a/chromium/tools/battor_agent/battor_sample_converter_unittest.cc b/chromium/tools/battor_agent/battor_sample_converter_unittest.cc
new file mode 100644
index 00000000000..01ad5e5cdb7
--- /dev/null
+++ b/chromium/tools/battor_agent/battor_sample_converter_unittest.cc
@@ -0,0 +1,161 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/battor_agent/battor_sample_converter.h"
+
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/battor_agent/battor_protocol_types.h"
+
+using namespace testing;
+
+namespace battor {
+
+TEST(BattOrSampleConverterTest, ToSampleSimple) {
+ BattOrEEPROM eeprom;
+ eeprom.r1 = 1.0f;
+ eeprom.r2 = 1.0f;
+ eeprom.r3 = 1.0f;
+ eeprom.low_gain = 1.0f;
+ eeprom.low_gain_correction_offset = 0.0f;
+ eeprom.low_gain_correction_factor = 1.0f;
+ eeprom.sd_sample_rate = 1000;
+
+ // Create a calibration frame with a baseline voltage and current of zero.
+ std::vector<RawBattOrSample> calibration_frame;
+ calibration_frame.push_back(RawBattOrSample{0, 0});
+ BattOrSampleConverter converter(eeprom, calibration_frame);
+
+ // Set both the voltage and current to their max values.
+ RawBattOrSample raw_one{2048, 2048};
+ BattOrSample one = converter.ToSample(raw_one, 0);
+
+ ASSERT_DOUBLE_EQ(0, one.time_ms);
+ ASSERT_DOUBLE_EQ(2401.172447484123, one.voltage_mV);
+ ASSERT_DOUBLE_EQ(1200.5862237420615, one.current_mA);
+}
+
+TEST(BattOrSampleConverterTest, ToSampleNonZeroBaseline) {
+ BattOrEEPROM eeprom;
+ eeprom.r1 = 1.0f;
+ eeprom.r2 = 1.0f;
+ eeprom.r3 = 1.0f;
+ eeprom.low_gain = 1.0f;
+ eeprom.low_gain_correction_offset = 0.0f;
+ eeprom.low_gain_correction_factor = 1.0f;
+ eeprom.sd_sample_rate = 1000;
+
+ // Create a calibration frame with a baseline voltage and current of zero.
+ std::vector<RawBattOrSample> calibration_frame;
+ calibration_frame.push_back(RawBattOrSample{1024, 1024});
+ BattOrSampleConverter converter(eeprom, calibration_frame);
+
+ // Set both the voltage and current to their max values.
+ RawBattOrSample raw_one{2048, 2048};
+ BattOrSample one = converter.ToSample(raw_one, 0);
+
+ ASSERT_DOUBLE_EQ(0, one.time_ms);
+ ASSERT_DOUBLE_EQ(1200.586223742061, one.voltage_mV);
+ ASSERT_DOUBLE_EQ(600.29311187103076, one.current_mA);
+}
+
+TEST(BattOrSampleConverterTest, ToSampleNonZeroMultiSampleBaseline) {
+ BattOrEEPROM eeprom;
+ eeprom.r1 = 1.0f;
+ eeprom.r2 = 1.0f;
+ eeprom.r3 = 1.0f;
+ eeprom.low_gain = 1.0f;
+ eeprom.low_gain_correction_offset = 0.0f;
+ eeprom.low_gain_correction_factor = 1.0f;
+ eeprom.sd_sample_rate = 1000;
+
+ // Create a calibration frame with a baseline voltage and current of zero.
+ std::vector<RawBattOrSample> calibration_frame;
+ calibration_frame.push_back(RawBattOrSample{1000, 1000});
+ calibration_frame.push_back(RawBattOrSample{1048, 1048});
+ BattOrSampleConverter converter(eeprom, calibration_frame);
+
+ // Set both the voltage and current to their max values.
+ RawBattOrSample raw_one{2048, 2048};
+ BattOrSample one = converter.ToSample(raw_one, 0);
+
+ ASSERT_DOUBLE_EQ(0, one.time_ms);
+ ASSERT_DOUBLE_EQ(1200.5862237420615, one.voltage_mV);
+ ASSERT_DOUBLE_EQ(600.29311187103076, one.current_mA);
+}
+
+TEST(BattOrSampleConverterTest, ToSampleRealValues) {
+ BattOrEEPROM eeprom;
+ eeprom.r1 = 10.0f;
+ eeprom.r2 = 14.0f;
+ eeprom.r3 = 17.0f;
+ eeprom.low_gain = 1.5;
+ eeprom.low_gain_correction_offset = 0.03f;
+ eeprom.low_gain_correction_factor = 4.0f;
+ eeprom.sd_sample_rate = 1000;
+
+ // Create a calibration frame with a baseline voltage and current of zero.
+ std::vector<RawBattOrSample> calibration_frame;
+ calibration_frame.push_back(RawBattOrSample{800, 900});
+ calibration_frame.push_back(RawBattOrSample{1000, 1100});
+ BattOrSampleConverter converter(eeprom, calibration_frame);
+
+ // Set both the voltage and current to their max values.
+ RawBattOrSample raw_one{1900, 2000};
+ BattOrSample one = converter.ToSample(raw_one, 0);
+
+ ASSERT_DOUBLE_EQ(0, one.time_ms);
+ ASSERT_DOUBLE_EQ(1068.996209287540, one.voltage_mV);
+ ASSERT_DOUBLE_EQ(9.7628957011935285, one.current_mA);
+}
+
+TEST(BattOrSampleConverterTest, ToSampleRealNegativeValues) {
+ BattOrEEPROM eeprom;
+ eeprom.r1 = 10.0f;
+ eeprom.r2 = 14.0f;
+ eeprom.r3 = 17.0f;
+ eeprom.low_gain = 1.5;
+ eeprom.low_gain_correction_offset = 0.03f;
+ eeprom.low_gain_correction_factor = 4.0f;
+ eeprom.sd_sample_rate = 1000;
+
+ // Create a calibration frame with a baseline voltage and current of zero.
+ std::vector<RawBattOrSample> calibration_frame;
+ calibration_frame.push_back(RawBattOrSample{800, 900});
+ BattOrSampleConverter converter(eeprom, calibration_frame);
+
+ // Set both the voltage and current to their max values.
+ RawBattOrSample raw_one{-1900, -2000};
+ BattOrSample one = converter.ToSample(raw_one, 0);
+
+ ASSERT_DOUBLE_EQ(0, one.time_ms);
+ ASSERT_DOUBLE_EQ(-2885.2980205462577, one.voltage_mV);
+ ASSERT_DOUBLE_EQ(-28.332106130755665, one.current_mA);
+}
+
+TEST(BattOrSampleConverterTest, ToSampleMultipleSamples) {
+ BattOrEEPROM eeprom;
+ eeprom.r1 = 1.0f;
+ eeprom.r2 = 1.0f;
+ eeprom.r3 = 1.0f;
+ eeprom.low_gain = 1.0f;
+ eeprom.low_gain_correction_offset = 0.0f;
+ eeprom.low_gain_correction_factor = 1.0f;
+ eeprom.sd_sample_rate = 50;
+
+ std::vector<RawBattOrSample> calibration_frame;
+ calibration_frame.push_back(RawBattOrSample{0, 0});
+ BattOrSampleConverter converter(eeprom, calibration_frame);
+
+ BattOrSample one = converter.ToSample(RawBattOrSample{0, 0}, 0);
+ BattOrSample two = converter.ToSample(RawBattOrSample{0, 0}, 1);
+ BattOrSample three = converter.ToSample(RawBattOrSample{0, 0}, 2);
+
+ ASSERT_DOUBLE_EQ(0, one.time_ms);
+ ASSERT_DOUBLE_EQ(20, two.time_ms);
+ ASSERT_DOUBLE_EQ(40, three.time_ms);
+}
+
+} // namespace battor
diff --git a/chromium/tools/binary_size/OWNERS b/chromium/tools/binary_size/OWNERS
new file mode 100644
index 00000000000..c598cde507c
--- /dev/null
+++ b/chromium/tools/binary_size/OWNERS
@@ -0,0 +1,3 @@
+andrewhayden@chromium.org
+bratell@opera.com
+primiano@chromium.org
diff --git a/chromium/tools/binary_size/PRESUBMIT.py b/chromium/tools/binary_size/PRESUBMIT.py
new file mode 100644
index 00000000000..4f7095ad987
--- /dev/null
+++ b/chromium/tools/binary_size/PRESUBMIT.py
@@ -0,0 +1,32 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""binary_size presubmit script
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details about the presubmit API built into depot_tools.
+"""
+
+def CommonChecks(input_api, output_api):
+ output = []
+ output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
+ output.extend(
+ input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api,
+ input_api.PresubmitLocalPath(),
+ whitelist=[r'.+_unittest\.py$']))
+
+ if input_api.is_committing:
+ output.extend(input_api.canned_checks.PanProjectChecks(input_api,
+ output_api,
+ owners_check=False))
+ return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CommonChecks(input_api, output_api)
diff --git a/chromium/tools/binary_size/README.txt b/chromium/tools/binary_size/README.txt
new file mode 100644
index 00000000000..8deeb303afe
--- /dev/null
+++ b/chromium/tools/binary_size/README.txt
@@ -0,0 +1,152 @@
+================================================================================
+ __________ .__
+ \______ \ |__| ____ _____ _______ ___.__.
+ | | _/ | | / \ \__ \ \_ __ \ < | |
+ | | \ | | | | \ / __ \_ | | \/ \___ |
+ |______ / |__| |___| / (____ / |__| / ____|
+ \/ \/ \/ \/
+ _________ .__ ___________ .__
+ / _____/ |__| ________ ____ \__ ___/ ____ ____ | |
+ \_____ \ | | \___ / _/ __ \ | | / _ \ / _ \ | |
+ / \ | | / / \ ___/ | | ( <_> ) ( <_> ) | |__
+ /_______ / |__| /_____ \ \___ > |____| \____/ \____/ |____/
+ \/ \/ \/
+================================================================================
+
+--------------------------------------------------------------------------------
+Introduction
+--------------------------------------------------------------------------------
+The ever-increasing size of binaries is a problem for everybody. Increased
+binary size means longer download times and a bigger on-disk footprint after
+installation. Mobile devices suffer the worst, as they frequently have
+sub-optimal connectivity and limited storage capacity. Developers currently
+have almost no visibility into how the space in the existing binaries is
+divided nor how their contributions change the space within those binaries.
+The first step to reducing the size of binaries is to make the size information
+accessible to everyone so that developers can take action.
+
+There are two parts to the Binary Size Tool:
+1. run_binary_size_analysis.py
+ This script will produce a detailed breakdown of a binary, including an HTML
+ report and (optionally) a detailed ""nm"-formatted dump of all the symbols
+ with their sources resolved by addr2line. This tool is great for finding the
+ bloat in binaries.
+
+2. explain_binary_size_delta.py
+ This script takes the "nm"-formatted input from two runs of the first tool
+ (run_binary_size_analysis.py) and produces a detailed breakdown of how the
+ symbols have changed between the two binaries that were originally analyzed.
+ The breakdown shows the size changes of symbols as well as which symbols have
+ been added, removed, or changed. This tool is great for thoroughly
+ characterizing the size change resulting from a code change.
+
+ Because this tool relies solely upon the "nm" output from
+ run_binary_size_analysis.py, it can be run at any time even if the source
+ code described by the "nm" output is no longer available. It is also much
+ faster than run_binary_size_analysis.py, typically completing in a few
+ seconds for even very large binaries.
+
+--------------------------------------------------------------------------------
+How to Run: run_binary_size_analysis.py
+--------------------------------------------------------------------------------
+Running the tool is fairly simple. For the sake of this example we will
+pretend that you are building the Content Shell APK for Android.
+
+ 1. Build your product as you normally would*, e.g.:
+ ninja -C out/Release -j 100 content_shell_apk
+
+ * For results that are as spatially accurate as possible, you should always
+ build with a Release configuration so that the end product is as close to
+ the real thing as possible. However, it can sometimes be useful to improve
+ consistency and accuracy of symbol lookup even if it perturbs the overall
+ accuracy of the tool. Consider adding these GN args:
+ is_clang = true
+ Anecdotally produces more stable symbol names over time.
+ enable_profiling = true
+ Anecdotally makes symbol lookup more accurate (note that it
+ doesn't work with clang on ARM/Android builds, see
+ https://crbug.com/417323 for more information.
+ enable_full_stack_frames_for_profiling = true
+ With enable_profiling, further improves symbol lookup accuracy but
+ will completely disable inlining, decreasing spatial accuracy.
+
+ 2. Run the tool specifying the library and the output report directory.
+ This command will run the analysis on the Content Shell native library for
+ Android, producing an HTML report in /tmp/report and saving the NM output
+ (useful for re-running the tool or analyzing deltas between two builds)
+ under /tmp/report/nm.out:
+ tools/binary_size/run_binary_size_analysis.py \
+ --library out/Release/lib.unstripped/libcontent_shell_content_view.so \
+ --destdir /tmp/report
+
+Of course, there are additional options that you can see by running the tool
+with "--help".
+
+This whole process takes about an hour on a modern (circa 2014) machine. If you
+have LOTS of RAM, you can use the "--jobs" argument to add more addr2line
+workers; doing so will *greatly* reduce the processing time but will devour
+system memory. If you've got the horsepower, 10 workers can thrash through the
+binary in about 5 minutes at a cost of around 60 GB of RAM. The default number
+of jobs is 1. Patches to job number auto-selection are welcome!
+
+When the tool finishes its work you'll find an HTML report in the output
+directory that you specified with "--destdir". Open the index.html file in your
+*cough* browser of choice *cough* and have a look around. The index.html page
+is likely to evolve over time, but will always be your starting point for
+investigation. From here you'll find links to various views of the data such
+as treemap visualizations, overall statistics and "top n" lists of various
+kinds.
+
+The report is completely standalone. No external resources are required, so the
+report may be saved and viewed offline with no problems.
+
+--------------------------------------------------------------------------------
+How to Run: explain_binary_size_delta.py
+--------------------------------------------------------------------------------
+Continuing the example, assume that run_binary_size_analysis.py has been run
+both before and after a code change and that the "nm.out" files have been saved
+to "nm.out.before" and "nm.out.after". To generate an explanation of the symbol
+differences between the two runs:
+
+ tools/binary_size/explain_binary_size_delta.py \
+ --nm1 nm.out.before --nm2 nm.out.after
+
+This will output a concise summary of the symbol changes between the two
+libraries. Much more information is available by specifying flags like
+"--showsources" and (for the comprehensive answer) "--showsymbols". Use "--help"
+for a full list of options.
+
+Unlike run_binary_size_analysis.py, this tool doesn't (yet) produce any kind of
+HTML report. Contributions are welcome.
+
+--------------------------------------------------------------------------------
+Caveats
+--------------------------------------------------------------------------------
+The tool is not perfect and has several shortcomings:
+
+ * Not all space in the binary is accounted for. The causes are still under
+ investigation, but there are of course sections in the binary that do not
+ contain symbol information, etceteras. The vast majority of the binary is
+ generally symbols, though, so the discrepancy should be very small.
+ * When dealing with inlining and such, the size cost is attributed to the
+ resource in which the code gets inlined. Depending upon your goals for
+ analysis, this may be either good or bad; fundamentally, the more trickery
+ that the compiler and/or linker do, the less simple the relationship
+ between the original source and the resultant binary.
+ * The Javascript code in the HTML report assumes code lives in Chromium for
+ generated hyperlinks and will not hyperlink any file that starts with the
+ substring "out".
+ * There is as yet no way to configure project-specific bindings for symbols/
+ source files to locations on disk. Such configuration would be useful for
+ manually deduping and disambiguating results. Some day, hopefully, this will
+ be supported.
+
+--------------------------------------------------------------------------------
+Feature Requests and Bug Reports
+--------------------------------------------------------------------------------
+Please file bugs and feature requests here, making sure to use the label
+"Tools-BinarySize":
+ https://code.google.com/p/chromium/issues/entry?labels=Tools-BinarySize
+
+View all open issues here:
+ https://code.google.com/p/chromium/issues/list?can=2&q=label:Tools-BinarySize
diff --git a/chromium/tools/binary_size/binary_size_utils.py b/chromium/tools/binary_size/binary_size_utils.py
new file mode 100644
index 00000000000..67335c2b6d3
--- /dev/null
+++ b/chromium/tools/binary_size/binary_size_utils.py
@@ -0,0 +1,71 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common utilities for tools that deal with binary size information.
+"""
+
+import logging
+import re
+
+
+def ParseNm(nm_lines):
+ """Parse nm output, returning data for all relevant (to binary size)
+ symbols and ignoring the rest.
+
+ Args:
+ nm_lines: an iterable over lines of nm output.
+
+ Yields:
+ (symbol name, symbol type, symbol size, source file path).
+
+ Path may be None if nm couldn't figure out the source file.
+ """
+
+ # Match lines with size, symbol, optional location, optional discriminator
+ sym_re = re.compile(r'^([0-9a-f]{8,}) ' # address (8+ hex digits)
+ '([0-9a-f]{8,}) ' # size (8+ hex digits)
+ '(.) ' # symbol type, one character
+ '([^\t]+)' # symbol name, separated from next by tab
+ '(?:\t(.*):[\d\?]+)?.*$') # location
+ # Match lines with addr but no size.
+ addr_re = re.compile(r'^[0-9a-f]{8,} (.) ([^\t]+)(?:\t.*)?$')
+ # Match lines that don't have an address at all -- typically external symbols.
+ noaddr_re = re.compile(r'^ {8,} (.) (.*)$')
+ # Match lines with no symbol name, only addr and type
+ addr_only_re = re.compile(r'^[0-9a-f]{8,} (.)$')
+
+ seen_lines = set()
+ for line in nm_lines:
+ line = line.rstrip()
+ if line in seen_lines:
+ # nm outputs identical lines at times. We don't want to treat
+ # those as distinct symbols because that would make no sense.
+ continue
+ seen_lines.add(line)
+ match = sym_re.match(line)
+ if match:
+ address, size, sym_type, sym = match.groups()[0:4]
+ size = int(size, 16)
+ if sym_type in ('B', 'b'):
+ continue # skip all BSS for now.
+ path = match.group(5)
+ yield sym, sym_type, size, path, address
+ continue
+ match = addr_re.match(line)
+ if match:
+ # sym_type, sym = match.groups()[0:2]
+ continue # No size == we don't care.
+ match = noaddr_re.match(line)
+ if match:
+ sym_type, sym = match.groups()
+ if sym_type in ('U', 'w'):
+ continue # external or weak symbol
+ match = addr_only_re.match(line)
+ if match:
+ continue # Nothing to do.
+
+
+ # If we reach this part of the loop, there was something in the
+ # line that we didn't expect or recognize.
+ logging.warning('nm output parser failed to parse: %s', repr(line))
diff --git a/chromium/tools/binary_size/explain_binary_size_delta.py b/chromium/tools/binary_size/explain_binary_size_delta.py
new file mode 100755
index 00000000000..45c1236271f
--- /dev/null
+++ b/chromium/tools/binary_size/explain_binary_size_delta.py
@@ -0,0 +1,484 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Describe the size difference of two binaries.
+
+Generates a description of the size difference of two binaries based
+on the difference of the size of various symbols.
+
+This tool needs "nm" dumps of each binary with full symbol
+information. You can obtain the necessary dumps by running the
+run_binary_size_analysis.py script upon each binary, with the
+"--nm-out" parameter set to the location in which you want to save the
+dumps. Example:
+
+ # obtain symbol data from first binary in /tmp/nm1.dump
+ cd $CHECKOUT1_SRC
+ ninja -C out/Release binary_size_tool
+ tools/binary_size/run_binary_size_analysis \
+ --library <path_to_library>
+ --destdir /tmp/throwaway
+ --nm-out /tmp/nm1.dump
+
+ # obtain symbol data from second binary in /tmp/nm2.dump
+ cd $CHECKOUT2_SRC
+ ninja -C out/Release binary_size_tool
+ tools/binary_size/run_binary_size_analysis \
+ --library <path_to_library>
+ --destdir /tmp/throwaway
+ --nm-out /tmp/nm2.dump
+
+ # cleanup useless files
+ rm -r /tmp/throwaway
+
+ # run this tool
+ explain_binary_size_delta.py --nm1 /tmp/nm1.dump --nm2 /tmp/nm2.dump
+"""
+
+import collections
+from collections import Counter
+from math import ceil
+import operator
+import optparse
+import os
+import sys
+
+import binary_size_utils
+
+
+def CalculateSharedAddresses(symbols):
+ """Checks how many symbols share the same memory space. This returns a
+Counter result where result[address] will tell you how many times address was
+used by symbols."""
+ count = Counter()
+ for _, _, _, _, address in symbols:
+ count[address] += 1
+
+ return count
+
+
+def CalculateEffectiveSize(share_count, address, symbol_size):
+ """Given a raw symbol_size and an address, this method returns the
+ size we should blame on this symbol considering it might share the
+ machine code/data with other symbols. Using the raw symbol_size for
+ each symbol would in those cases over estimate the true cost of that
+ block.
+
+ """
+ shared_count = share_count[address]
+ if shared_count == 1:
+ return symbol_size
+
+ assert shared_count > 1
+ return int(ceil(symbol_size / float(shared_count)))
+
+class SymbolDelta(object):
+ """Stores old size, new size and some metadata."""
+ def __init__(self, shared):
+ self.old_size = None
+ self.new_size = None
+ self.shares_space_with_other_symbols = shared
+
+ def __eq__(self, other):
+ return (self.old_size == other.old_size and
+ self.new_size == other.new_size and
+ self.shares_space_with_other_symbols ==
+ other.shares_space_with_other_symbols)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def copy_symbol_delta(self):
+ symbol_delta = SymbolDelta(self.shares_space_with_other_symbols)
+ symbol_delta.old_size = self.old_size
+ symbol_delta.new_size = self.new_size
+ return symbol_delta
+
+class DeltaInfo(SymbolDelta):
+ """Summary of a the change for one symbol between two instances."""
+ def __init__(self, file_path, symbol_type, symbol_name, shared):
+ SymbolDelta.__init__(self, shared)
+ self.file_path = file_path
+ self.symbol_type = symbol_type
+ self.symbol_name = symbol_name
+
+ def __eq__(self, other):
+ return (self.file_path == other.file_path and
+ self.symbol_type == other.symbol_type and
+ self.symbol_name == other.symbol_name and
+ SymbolDelta.__eq__(self, other))
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def ExtractSymbolDelta(self):
+ """Returns a copy of the SymbolDelta for this DeltaInfo."""
+ return SymbolDelta.copy_symbol_delta(self)
+
+def Compare(symbols1, symbols2):
+ """Executes a comparison of the symbols in symbols1 and symbols2.
+
+ Returns:
+ tuple of lists: (added_symbols, removed_symbols, changed_symbols, others)
+ where each list contains DeltaInfo objects.
+ """
+ added = [] # tuples
+ removed = [] # tuples
+ changed = [] # tuples
+ unchanged = [] # tuples
+
+ cache1 = {}
+ cache2 = {}
+ # Make a map of (file, symbol_type) : (symbol_name, effective_symbol_size)
+ share_count1 = CalculateSharedAddresses(symbols1)
+ share_count2 = CalculateSharedAddresses(symbols2)
+ for cache, symbols, share_count in ((cache1, symbols1, share_count1),
+ (cache2, symbols2, share_count2)):
+ for symbol_name, symbol_type, symbol_size, file_path, address in symbols:
+ if 'vtable for ' in symbol_name:
+ symbol_type = '@' # hack to categorize these separately
+ if file_path:
+ file_path = os.path.normpath(file_path)
+ if sys.platform.startswith('win'):
+ file_path = file_path.replace('\\', '/')
+ else:
+ file_path = '(No Path)'
+ # Take into consideration that multiple symbols might share the same
+ # block of code.
+ effective_symbol_size = CalculateEffectiveSize(share_count, address,
+ symbol_size)
+ key = (file_path, symbol_type)
+ bucket = cache.setdefault(key, {})
+ size_list = bucket.setdefault(symbol_name, [])
+ size_list.append((effective_symbol_size,
+ effective_symbol_size != symbol_size))
+
+ # Now diff them. We iterate over the elements in cache1. For each symbol
+ # that we find in cache2, we record whether it was deleted, changed, or
+ # unchanged. We then remove it from cache2; all the symbols that remain
+ # in cache2 at the end of the iteration over cache1 are the 'new' symbols.
+ for key, bucket1 in cache1.items():
+ bucket2 = cache2.get(key)
+ file_path, symbol_type = key;
+ if not bucket2:
+ # A file was removed. Everything in bucket1 is dead.
+ for symbol_name, symbol_size_list in bucket1.items():
+ for (symbol_size, shared) in symbol_size_list:
+ delta_info = DeltaInfo(file_path, symbol_type, symbol_name, shared)
+ delta_info.old_size = symbol_size
+ removed.append(delta_info)
+ else:
+ # File still exists, look for changes within.
+ for symbol_name, symbol_size_list in bucket1.items():
+ size_list2 = bucket2.get(symbol_name)
+ if size_list2 is None:
+ # Symbol no longer exists in bucket2.
+ for (symbol_size, shared) in symbol_size_list:
+ delta_info = DeltaInfo(file_path, symbol_type, symbol_name, shared)
+ delta_info.old_size = symbol_size
+ removed.append(delta_info)
+ else:
+ del bucket2[symbol_name] # Symbol is not new, delete from cache2.
+ if len(symbol_size_list) == 1 and len(size_list2) == 1:
+ symbol_size, shared1 = symbol_size_list[0]
+ size2, shared2 = size_list2[0]
+ delta_info = DeltaInfo(file_path, symbol_type, symbol_name,
+ shared1 or shared2)
+ delta_info.old_size = symbol_size
+ delta_info.new_size = size2
+ if symbol_size != size2:
+ # Symbol has change size in bucket.
+ changed.append(delta_info)
+ else:
+ # Symbol is unchanged.
+ unchanged.append(delta_info)
+ else:
+ # Complex comparison for when a symbol exists multiple times
+ # in the same file (where file can be "unknown file").
+ symbol_size_counter = collections.Counter(symbol_size_list)
+ delta_counter = collections.Counter(symbol_size_list)
+ delta_counter.subtract(size_list2)
+ for delta_counter_key in sorted(delta_counter.keys()):
+ delta = delta_counter[delta_counter_key]
+ unchanged_count = symbol_size_counter[delta_counter_key]
+ (symbol_size, shared) = delta_counter_key
+ if delta > 0:
+ unchanged_count -= delta
+ for _ in range(unchanged_count):
+ delta_info = DeltaInfo(file_path, symbol_type,
+ symbol_name, shared)
+ delta_info.old_size = symbol_size
+ delta_info.new_size = symbol_size
+ unchanged.append(delta_info)
+ if delta > 0: # Used to be more of these than there is now.
+ for _ in range(delta):
+ delta_info = DeltaInfo(file_path, symbol_type,
+ symbol_name, shared)
+ delta_info.old_size = symbol_size
+ removed.append(delta_info)
+ elif delta < 0: # More of this (symbol,size) now.
+ for _ in range(-delta):
+ delta_info = DeltaInfo(file_path, symbol_type,
+ symbol_name, shared)
+ delta_info.new_size = symbol_size
+ added.append(delta_info)
+
+ if len(bucket2) == 0:
+ del cache1[key] # Entire bucket is empty, delete from cache2
+
+ # We have now analyzed all symbols that are in cache1 and removed all of
+ # the encountered symbols from cache2. What's left in cache2 is the new
+ # symbols.
+ for key, bucket2 in cache2.iteritems():
+ file_path, symbol_type = key;
+ for symbol_name, symbol_size_list in bucket2.items():
+ for (symbol_size, shared) in symbol_size_list:
+ delta_info = DeltaInfo(file_path, symbol_type, symbol_name, shared)
+ delta_info.new_size = symbol_size
+ added.append(delta_info)
+ return (added, removed, changed, unchanged)
+
+
+def DeltaStr(number):
+ """Returns the number as a string with a '+' prefix if it's > 0 and
+ a '-' prefix if it's < 0."""
+ result = str(number)
+ if number > 0:
+ result = '+' + result
+ return result
+
+
+def SharedInfoStr(symbol_info):
+ """Returns a string (prefixed by space) explaining that numbers are
+ adjusted because of shared space between symbols, or an empty string
+ if space had not been shared."""
+
+ if symbol_info.shares_space_with_other_symbols:
+ return " (adjusted sizes because of memory sharing)"
+
+ return ""
+
+class CrunchStatsData(object):
+ """Stores a summary of data of a certain kind."""
+ def __init__(self, symbols):
+ self.symbols = symbols
+ self.sources = set()
+ self.before_size = 0
+ self.after_size = 0
+ self.symbols_by_path = {}
+
+
+def CrunchStats(added, removed, changed, unchanged, showsources, showsymbols):
+ """Outputs to stdout a summary of changes based on the symbol lists."""
+ # Split changed into grown and shrunk because that is easier to
+ # discuss.
+ grown = []
+ shrunk = []
+ for item in changed:
+ if item.old_size < item.new_size:
+ grown.append(item)
+ else:
+ shrunk.append(item)
+
+ new_symbols = CrunchStatsData(added)
+ removed_symbols = CrunchStatsData(removed)
+ grown_symbols = CrunchStatsData(grown)
+ shrunk_symbols = CrunchStatsData(shrunk)
+ sections = [new_symbols, removed_symbols, grown_symbols, shrunk_symbols]
+ for section in sections:
+ for item in section.symbols:
+ section.sources.add(item.file_path)
+ if item.old_size is not None:
+ section.before_size += item.old_size
+ if item.new_size is not None:
+ section.after_size += item.new_size
+ bucket = section.symbols_by_path.setdefault(item.file_path, [])
+ bucket.append((item.symbol_name, item.symbol_type,
+ item.ExtractSymbolDelta()))
+
+ total_change = sum(s.after_size - s.before_size for s in sections)
+ summary = 'Total change: %s bytes' % DeltaStr(total_change)
+ print(summary)
+ print('=' * len(summary))
+ for section in sections:
+ if not section.symbols:
+ continue
+ if section.before_size == 0:
+ description = ('added, totalling %s bytes' % DeltaStr(section.after_size))
+ elif section.after_size == 0:
+ description = ('removed, totalling %s bytes' %
+ DeltaStr(-section.before_size))
+ else:
+ if section.after_size > section.before_size:
+ type_str = 'grown'
+ else:
+ type_str = 'shrunk'
+ description = ('%s, for a net change of %s bytes '
+ '(%d bytes before, %d bytes after)' %
+ (type_str, DeltaStr(section.after_size - section.before_size),
+ section.before_size, section.after_size))
+ print(' %d %s across %d sources' %
+ (len(section.symbols), description, len(section.sources)))
+
+ maybe_unchanged_sources = set()
+ unchanged_symbols_size = 0
+ for item in unchanged:
+ maybe_unchanged_sources.add(item.file_path)
+ unchanged_symbols_size += item.old_size # == item.new_size
+ print(' %d unchanged, totalling %d bytes' %
+ (len(unchanged), unchanged_symbols_size))
+
+ # High level analysis, always output.
+ unchanged_sources = maybe_unchanged_sources
+ for section in sections:
+ unchanged_sources = unchanged_sources - section.sources
+ new_sources = (new_symbols.sources -
+ maybe_unchanged_sources -
+ removed_symbols.sources)
+ removed_sources = (removed_symbols.sources -
+ maybe_unchanged_sources -
+ new_symbols.sources)
+ partially_changed_sources = (grown_symbols.sources |
+ shrunk_symbols.sources | new_symbols.sources |
+ removed_symbols.sources) - removed_sources - new_sources
+ allFiles = set()
+ for section in sections:
+ allFiles = allFiles | section.sources
+ allFiles = allFiles | maybe_unchanged_sources
+ print 'Source stats:'
+ print(' %d sources encountered.' % len(allFiles))
+ print(' %d completely new.' % len(new_sources))
+ print(' %d removed completely.' % len(removed_sources))
+ print(' %d partially changed.' % len(partially_changed_sources))
+ print(' %d completely unchanged.' % len(unchanged_sources))
+ remainder = (allFiles - new_sources - removed_sources -
+ partially_changed_sources - unchanged_sources)
+ assert len(remainder) == 0
+
+ if not showsources:
+ return # Per-source analysis, only if requested
+ print 'Per-source Analysis:'
+ delta_by_path = {}
+ for section in sections:
+ for path in section.symbols_by_path:
+ entry = delta_by_path.get(path)
+ if not entry:
+ entry = {'plus': 0, 'minus': 0}
+ delta_by_path[path] = entry
+ for symbol_name, symbol_type, symbol_delta in \
+ section.symbols_by_path[path]:
+ if symbol_delta.old_size is None:
+ delta = symbol_delta.new_size
+ elif symbol_delta.new_size is None:
+ delta = -symbol_delta.old_size
+ else:
+ delta = symbol_delta.new_size - symbol_delta.old_size
+
+ if delta > 0:
+ entry['plus'] += delta
+ else:
+ entry['minus'] += (-1 * delta)
+
+ def delta_sort_key(item):
+ _path, size_data = item
+ growth = size_data['plus'] - size_data['minus']
+ return growth
+
+ for path, size_data in sorted(delta_by_path.iteritems(), key=delta_sort_key,
+ reverse=True):
+ gain = size_data['plus']
+ loss = size_data['minus']
+ delta = size_data['plus'] - size_data['minus']
+ header = ' %s - Source: %s - (gained %d, lost %d)' % (DeltaStr(delta),
+ path, gain, loss)
+ divider = '-' * len(header)
+ print ''
+ print divider
+ print header
+ print divider
+ if showsymbols:
+ def ExtractNewSize(tup):
+ symbol_delta = tup[2]
+ return symbol_delta.new_size
+ def ExtractOldSize(tup):
+ symbol_delta = tup[2]
+ return symbol_delta.old_size
+ if path in new_symbols.symbols_by_path:
+ print ' New symbols:'
+ for symbol_name, symbol_type, symbol_delta in \
+ sorted(new_symbols.symbols_by_path[path],
+ key=ExtractNewSize,
+ reverse=True):
+ print (' %8s: %s type=%s, size=%d bytes%s' %
+ (DeltaStr(symbol_delta.new_size), symbol_name, symbol_type,
+ symbol_delta.new_size, SharedInfoStr(symbol_delta)))
+ if path in removed_symbols.symbols_by_path:
+ print ' Removed symbols:'
+ for symbol_name, symbol_type, symbol_delta in \
+ sorted(removed_symbols.symbols_by_path[path],
+ key=ExtractOldSize):
+ print (' %8s: %s type=%s, size=%d bytes%s' %
+ (DeltaStr(-symbol_delta.old_size), symbol_name, symbol_type,
+ symbol_delta.old_size,
+ SharedInfoStr(symbol_delta)))
+ for (changed_symbols_by_path, type_str) in [
+ (grown_symbols.symbols_by_path, "Grown"),
+ (shrunk_symbols.symbols_by_path, "Shrunk")]:
+ if path in changed_symbols_by_path:
+ print ' %s symbols:' % type_str
+ def changed_symbol_sortkey(item):
+ symbol_name, _symbol_type, symbol_delta = item
+ return (symbol_delta.old_size - symbol_delta.new_size, symbol_name)
+ for symbol_name, symbol_type, symbol_delta in \
+ sorted(changed_symbols_by_path[path], key=changed_symbol_sortkey):
+ print (' %8s: %s type=%s, (was %d bytes, now %d bytes)%s'
+ % (DeltaStr(symbol_delta.new_size - symbol_delta.old_size),
+ symbol_name, symbol_type,
+ symbol_delta.old_size, symbol_delta.new_size,
+ SharedInfoStr(symbol_delta)))
+
+
+def main():
+ usage = """%prog [options]
+
+ Analyzes the symbolic differences between two binary files
+ (typically, not necessarily, two different builds of the same
+ library) and produces a detailed description of symbols that have
+ been added, removed, or whose size has changed.
+
+ Example:
+ explain_binary_size_delta.py --nm1 /tmp/nm1.dump --nm2 /tmp/nm2.dump
+
+ Options are available via '--help'.
+ """
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option('--nm1', metavar='PATH',
+ help='the nm dump of the first library')
+ parser.add_option('--nm2', metavar='PATH',
+ help='the nm dump of the second library')
+ parser.add_option('--showsources', action='store_true', default=False,
+ help='show per-source statistics')
+ parser.add_option('--showsymbols', action='store_true', default=False,
+ help='show all symbol information; implies --showsources')
+ parser.add_option('--verbose', action='store_true', default=False,
+ help='output internal debugging stuff')
+ opts, _args = parser.parse_args()
+
+ if not opts.nm1:
+ parser.error('--nm1 is required')
+ if not opts.nm2:
+ parser.error('--nm2 is required')
+ symbols = []
+ for path in [opts.nm1, opts.nm2]:
+ with file(path, 'r') as nm_input:
+ if opts.verbose:
+ print 'parsing ' + path + '...'
+ symbols.append(list(binary_size_utils.ParseNm(nm_input)))
+ (added, removed, changed, unchanged) = Compare(symbols[0], symbols[1])
+ CrunchStats(added, removed, changed, unchanged,
+ opts.showsources | opts.showsymbols, opts.showsymbols)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/binary_size/explain_binary_size_delta_unittest.py b/chromium/tools/binary_size/explain_binary_size_delta_unittest.py
new file mode 100755
index 00000000000..d818d83f24c
--- /dev/null
+++ b/chromium/tools/binary_size/explain_binary_size_delta_unittest.py
@@ -0,0 +1,621 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Check that explain_binary_size_delta seems to work."""
+
+import cStringIO
+import sys
+import unittest
+
+import explain_binary_size_delta
+
+
+class ExplainBinarySizeDeltaTest(unittest.TestCase):
+
+ def testCompare(self):
+ # List entries have form:
+ # symbol_name, symbol_type, symbol_size, file_path, memory_address
+ symbol_list1 = (
+ # File with one symbol, left as-is.
+ ( 'unchanged', 't', 1000, '/file_unchanged', 0x1 ),
+ # File with one symbol, changed.
+ ( 'changed', 't', 1000, '/file_all_changed', 0x2 ),
+ # File with one symbol, deleted.
+ ( 'removed', 't', 1000, '/file_all_deleted', 0x3 ),
+ # File with two symbols, one unchanged, one changed, same bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_changed', 0x4 ),
+ ( 'changed', 't', 1000, '/file_pair_unchanged_changed', 0x5 ),
+ # File with two symbols, one unchanged, one deleted, same bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_removed', 0x6 ),
+ ( 'removed', 't', 1000, '/file_pair_unchanged_removed', 0x7 ),
+ # File with two symbols, one unchanged, one added, same bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_added', 0x8 ),
+ # File with two symbols, one unchanged, one changed, different bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_diffbuck_changed', 0x9 ),
+ ( 'changed', '@', 1000, '/file_pair_unchanged_diffbuck_changed', 0xa ),
+ # File with two symbols, one unchanged, one deleted, different bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_diffbuck_removed', 0xb ),
+ ( 'removed', '@', 1000, '/file_pair_unchanged_diffbuck_removed', 0xc ),
+ # File with two symbols, one unchanged, one added, different bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_diffbuck_added', 0xd ),
+ # File with four symbols, one added, one removed,
+ # one changed, one unchanged
+ ( 'size_changed', 't', 1000, '/file_tetra', 0xe ),
+ ( 'removed', 't', 1000, '/file_tetra', 0xf ),
+ ( 'unchanged', 't', 1000, '/file_tetra', 0x10 ),
+ )
+
+ symbol_list2 = (
+ # File with one symbol, left as-is.
+ ( 'unchanged', 't', 1000, '/file_unchanged', 0x1 ),
+ # File with one symbol, changed.
+ ( 'changed', 't', 2000, '/file_all_changed', 0x2 ),
+ # File with two symbols, one unchanged, one changed, same bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_changed', 0x3 ),
+ ( 'changed', 't', 2000, '/file_pair_unchanged_changed', 0x4 ),
+ # File with two symbols, one unchanged, one deleted, same bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_removed', 0x5 ),
+ # File with two symbols, one unchanged, one added, same bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_added', 0x6 ),
+ ( 'added', 't', 1000, '/file_pair_unchanged_added', 0x7 ),
+ # File with two symbols, one unchanged, one changed, different bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_diffbuck_changed', 0x8 ),
+ ( 'changed', '@', 2000, '/file_pair_unchanged_diffbuck_changed', 0x9 ),
+ # File with two symbols, one unchanged, one deleted, different bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_diffbuck_removed', 0xa ),
+ # File with two symbols, one unchanged, one added, different bucket
+ ( 'unchanged', 't', 1000, '/file_pair_unchanged_diffbuck_added', 0xb ),
+ ( 'added', '@', 1000, '/file_pair_unchanged_diffbuck_added', 0xc ),
+ # File with four symbols, one added, one removed,
+ # one changed, one unchanged
+ ( 'size_changed', 't', 2000, '/file_tetra', 0xd ),
+ ( 'unchanged', 't', 1000, '/file_tetra', 0xe ),
+ ( 'added', 't', 1000, '/file_tetra', 0xf ),
+ # New file with one symbol added
+ ( 'added', 't', 1000, '/file_new', 0x10 ),
+ )
+
+ # Here we go
+ (added, removed, changed, unchanged) = \
+ explain_binary_size_delta.Compare(symbol_list1, symbol_list2)
+
+ def delta(file_path, symbol_type, symbol_name, old_size, new_size):
+ delta_info = explain_binary_size_delta.DeltaInfo(
+ file_path, symbol_type, symbol_name, False)
+ delta_info.old_size = old_size
+ delta_info.new_size = new_size
+ return delta_info
+
+ # File with one symbol, left as-is.
+ assert delta('/file_unchanged', 't', 'unchanged', 1000, 1000) in unchanged
+ # File with one symbol, changed.
+ assert delta('/file_all_changed', 't', 'changed', 1000, 2000) in changed
+ # File with one symbol, deleted.
+ assert delta('/file_all_deleted', 't', 'removed', 1000, None) in removed
+ # New file with one symbol added
+ assert delta('/file_new', 't', 'added', None, 1000) in added
+ # File with two symbols, one unchanged, one changed, same bucket
+ assert delta('/file_pair_unchanged_changed',
+ 't', 'unchanged', 1000, 1000) in unchanged
+ assert delta('/file_pair_unchanged_changed',
+ 't', 'changed', 1000, 2000) in changed
+ # File with two symbols, one unchanged, one removed, same bucket
+ assert delta('/file_pair_unchanged_removed',
+ 't', 'unchanged', 1000, 1000) in unchanged
+ assert delta('/file_pair_unchanged_removed',
+ 't', 'removed', 1000, None) in removed
+ # File with two symbols, one unchanged, one added, same bucket
+ assert delta('/file_pair_unchanged_added',
+ 't', 'unchanged', 1000, 1000) in unchanged
+ assert delta('/file_pair_unchanged_added',
+ 't', 'added', None, 1000) in added
+ # File with two symbols, one unchanged, one changed, different bucket
+ assert delta('/file_pair_unchanged_diffbuck_changed',
+ 't', 'unchanged', 1000, 1000) in unchanged
+ assert delta('/file_pair_unchanged_diffbuck_changed',
+ '@', 'changed', 1000, 2000) in changed
+ # File with two symbols, one unchanged, one removed, different bucket
+ assert delta('/file_pair_unchanged_diffbuck_removed',
+ 't', 'unchanged', 1000, 1000) in unchanged
+ assert delta('/file_pair_unchanged_diffbuck_removed',
+ '@', 'removed', 1000, None) in removed
+ # File with two symbols, one unchanged, one added, different bucket
+ assert delta('/file_pair_unchanged_diffbuck_added',
+ 't', 'unchanged', 1000, 1000) in unchanged
+ assert delta('/file_pair_unchanged_diffbuck_added',
+ '@', 'added', None, 1000) in added
+ # File with four symbols, one added, one removed, one changed, one unchanged
+ assert delta('/file_tetra', 't', 'size_changed', 1000, 2000) in changed
+ assert delta('/file_tetra', 't', 'unchanged', 1000, 1000) in unchanged
+ assert delta('/file_tetra', 't', 'added', None, 1000) in added
+ assert delta('/file_tetra', 't', 'removed', 1000, None) in removed
+
+ # Now check final stats.
+ orig_stdout = sys.stdout
+ output_collector = cStringIO.StringIO()
+ sys.stdout = output_collector
+ try:
+ explain_binary_size_delta.CrunchStats(added, removed, changed,
+ unchanged, True, True)
+ finally:
+ sys.stdout = orig_stdout
+ result = output_collector.getvalue()
+
+ expected_output = """\
+Total change: +4000 bytes
+=========================
+ 4 added, totalling +4000 bytes across 4 sources
+ 4 removed, totalling -4000 bytes across 4 sources
+ 4 grown, for a net change of +4000 bytes \
+(4000 bytes before, 8000 bytes after) across 4 sources
+ 8 unchanged, totalling 8000 bytes
+Source stats:
+ 11 sources encountered.
+ 1 completely new.
+ 1 removed completely.
+ 8 partially changed.
+ 1 completely unchanged.
+Per-source Analysis:
+
+--------------------------------------------------
+ +1000 - Source: /file_new - (gained 1000, lost 0)
+--------------------------------------------------
+ New symbols:
+ +1000: added type=t, size=1000 bytes
+
+---------------------------------------------------------------------
+ +1000 - Source: /file_pair_unchanged_changed - (gained 1000, lost 0)
+---------------------------------------------------------------------
+ Grown symbols:
+ +1000: changed type=t, (was 1000 bytes, now 2000 bytes)
+
+----------------------------------------------------------------------------
+ +1000 - Source: /file_pair_unchanged_diffbuck_added - (gained 1000, lost 0)
+----------------------------------------------------------------------------
+ New symbols:
+ +1000: added type=@, size=1000 bytes
+
+-------------------------------------------------------------------
+ +1000 - Source: /file_pair_unchanged_added - (gained 1000, lost 0)
+-------------------------------------------------------------------
+ New symbols:
+ +1000: added type=t, size=1000 bytes
+
+------------------------------------------------------------------------------
+ +1000 - Source: /file_pair_unchanged_diffbuck_changed - (gained 1000, lost 0)
+------------------------------------------------------------------------------
+ Grown symbols:
+ +1000: changed type=@, (was 1000 bytes, now 2000 bytes)
+
+----------------------------------------------------------
+ +1000 - Source: /file_all_changed - (gained 1000, lost 0)
+----------------------------------------------------------
+ Grown symbols:
+ +1000: changed type=t, (was 1000 bytes, now 2000 bytes)
+
+-------------------------------------------------------
+ +1000 - Source: /file_tetra - (gained 2000, lost 1000)
+-------------------------------------------------------
+ New symbols:
+ +1000: added type=t, size=1000 bytes
+ Removed symbols:
+ -1000: removed type=t, size=1000 bytes
+ Grown symbols:
+ +1000: size_changed type=t, (was 1000 bytes, now 2000 bytes)
+
+------------------------------------------------------------------------------
+ -1000 - Source: /file_pair_unchanged_diffbuck_removed - (gained 0, lost 1000)
+------------------------------------------------------------------------------
+ Removed symbols:
+ -1000: removed type=@, size=1000 bytes
+
+----------------------------------------------------------
+ -1000 - Source: /file_all_deleted - (gained 0, lost 1000)
+----------------------------------------------------------
+ Removed symbols:
+ -1000: removed type=t, size=1000 bytes
+
+---------------------------------------------------------------------
+ -1000 - Source: /file_pair_unchanged_removed - (gained 0, lost 1000)
+---------------------------------------------------------------------
+ Removed symbols:
+ -1000: removed type=t, size=1000 bytes
+"""
+
+ self.maxDiff = None
+ self.assertMultiLineEqual(expected_output, result)
+
+
+ def testCompareStringEntries(self):
+ # List entries have form:
+ # symbol_name, symbol_type, symbol_size, file_path, memory_address
+ symbol_list1 = (
+ # File with one string.
+ ( '.L.str107', 'r', 8, '/file_with_strs', 0x1 ),
+ )
+
+ symbol_list2 = (
+ # Two files with one string each, same name.
+ ( '.L.str107', 'r', 8, '/file_with_strs', 0x1 ),
+ ( '.L.str107', 'r', 7, '/other_file_with_strs', 0x2 ),
+ )
+
+ # Here we go
+ (added, removed, changed, unchanged) = \
+ explain_binary_size_delta.Compare(symbol_list1, symbol_list2)
+
+
+ # Now check final stats.
+ orig_stdout = sys.stdout
+ output_collector = cStringIO.StringIO()
+ sys.stdout = output_collector
+ try:
+ explain_binary_size_delta.CrunchStats(added, removed, changed,
+ unchanged, True, True)
+ finally:
+ sys.stdout = orig_stdout
+ result = output_collector.getvalue()
+
+ expected_output = """\
+Total change: +7 bytes
+======================
+ 1 added, totalling +7 bytes across 1 sources
+ 1 unchanged, totalling 8 bytes
+Source stats:
+ 2 sources encountered.
+ 1 completely new.
+ 0 removed completely.
+ 0 partially changed.
+ 1 completely unchanged.
+Per-source Analysis:
+
+--------------------------------------------------------
+ +7 - Source: /other_file_with_strs - (gained 7, lost 0)
+--------------------------------------------------------
+ New symbols:
+ +7: .L.str107 type=r, size=7 bytes
+"""
+
+ self.maxDiff = None
+ self.assertMultiLineEqual(expected_output, result)
+
+ def testCompareStringEntriesWithNoFile(self):
+ # List entries have form:
+ # symbol_name, symbol_type, symbol_size, file_path, memory_address
+ symbol_list1 = (
+ ( '.L.str104', 'r', 21, '??', 0x1 ), # Will change size.
+ ( '.L.str105', 'r', 17, '??', 0x2 ), # Same.
+ ( '.L.str106', 'r', 13, '??', 0x3 ), # Will be removed.
+ ( '.L.str106', 'r', 3, '??', 0x4 ), # Same.
+ ( '.L.str106', 'r', 3, '??', 0x5 ), # Will be removed.
+ ( '.L.str107', 'r', 8, '??', 0x6 ), # Will be removed (other sizes).
+ )
+
+ symbol_list2 = (
+ # Two files with one string each, same name.
+ ( '.L.str104', 'r', 19, '??', 0x1 ), # Changed.
+ ( '.L.str105', 'r', 11, '??', 0x2 ), # New size for multi-symbol.
+ ( '.L.str105', 'r', 17, '??', 0x3 ), # New of same size for multi-symbol.
+ ( '.L.str105', 'r', 17, '??', 0x4 ), # Same.
+ ( '.L.str106', 'r', 3, '??', 0x5 ), # Same.
+ ( '.L.str107', 'r', 5, '??', 0x6 ), # New size for symbol.
+ ( '.L.str107', 'r', 7, '??', 0x7 ), # New size for symbol.
+ ( '.L.str108', 'r', 8, '??', 0x8 ), # New symbol.
+ )
+
+ # Here we go
+ (added, removed, changed, unchanged) = \
+ explain_binary_size_delta.Compare(symbol_list1, symbol_list2)
+
+
+ # Now check final stats.
+ orig_stdout = sys.stdout
+ output_collector = cStringIO.StringIO()
+ sys.stdout = output_collector
+ try:
+ explain_binary_size_delta.CrunchStats(added, removed, changed,
+ unchanged, True, True)
+ finally:
+ sys.stdout = orig_stdout
+ result = output_collector.getvalue()
+
+ expected_output = """\
+Total change: +22 bytes
+=======================
+ 5 added, totalling +48 bytes across 1 sources
+ 3 removed, totalling -24 bytes across 1 sources
+ 1 shrunk, for a net change of -2 bytes (21 bytes before, 19 bytes after) \
+across 1 sources
+ 2 unchanged, totalling 20 bytes
+Source stats:
+ 1 sources encountered.
+ 0 completely new.
+ 0 removed completely.
+ 1 partially changed.
+ 0 completely unchanged.
+Per-source Analysis:
+
+----------------------------------------
+ +22 - Source: ?? - (gained 48, lost 26)
+----------------------------------------
+ New symbols:
+ +17: .L.str105 type=r, size=17 bytes
+ +11: .L.str105 type=r, size=11 bytes
+ +8: .L.str108 type=r, size=8 bytes
+ +7: .L.str107 type=r, size=7 bytes
+ +5: .L.str107 type=r, size=5 bytes
+ Removed symbols:
+ -3: .L.str106 type=r, size=3 bytes
+ -8: .L.str107 type=r, size=8 bytes
+ -13: .L.str106 type=r, size=13 bytes
+ Shrunk symbols:
+ -2: .L.str104 type=r, (was 21 bytes, now 19 bytes)
+"""
+
+ self.maxDiff = None
+ self.assertMultiLineEqual(expected_output, result)
+
+ def testCompareSharedSpace(self):
+ # List entries have form:
+ # symbol_name, symbol_type, symbol_size, file_path, memory_address
+ symbol_list1 = (
+ # File with two symbols, same address.
+ ( 'sym1', 'r', 8, '/file', 0x1 ),
+ ( 'sym2', 'r', 8, '/file', 0x1 ),
+ )
+
+ symbol_list2 = (
+ # File with two symbols, same address.
+ ( 'sym1', 'r', 4, '/file', 0x1 ),
+ ( 'sym2', 'r', 4, '/file', 0x1 ),
+ )
+
+ # Here we go
+ (added, removed, changed, unchanged) = \
+ explain_binary_size_delta.Compare(symbol_list1, symbol_list2)
+
+
+ # Now check final stats.
+ orig_stdout = sys.stdout
+ output_collector = cStringIO.StringIO()
+ sys.stdout = output_collector
+ try:
+ explain_binary_size_delta.CrunchStats(added, removed, changed,
+ unchanged, True, True)
+ finally:
+ sys.stdout = orig_stdout
+ result = output_collector.getvalue()
+
+ expected_output = """\
+Total change: -4 bytes
+======================
+ 2 shrunk, for a net change of -4 bytes (8 bytes before, 4 bytes after) \
+across 1 sources
+ 0 unchanged, totalling 0 bytes
+Source stats:
+ 1 sources encountered.
+ 0 completely new.
+ 0 removed completely.
+ 1 partially changed.
+ 0 completely unchanged.
+Per-source Analysis:
+
+----------------------------------------
+ -4 - Source: /file - (gained 0, lost 4)
+----------------------------------------
+ Shrunk symbols:
+ -2: sym1 type=r, (was 4 bytes, now 2 bytes) (adjusted sizes because \
+of memory sharing)
+ -2: sym2 type=r, (was 4 bytes, now 2 bytes) (adjusted sizes because \
+of memory sharing)
+"""
+
+ self.maxDiff = None
+ self.assertMultiLineEqual(expected_output, result)
+
+
+ def testCompareSharedSpaceDuplicateSymbols(self):
+ # List entries have form:
+ # symbol_name, symbol_type, symbol_size, file_path, memory_address
+ symbol_list1 = (
+ # File with two symbols, same address.
+ ( 'sym1', 'r', 7, '/file', 0x2 ),
+ ( 'sym1', 'r', 8, '/file', 0x1 ),
+ ( 'sym2', 'r', 8, '/file', 0x1 ),
+ )
+
+ symbol_list2 = (
+ # File with two symbols, same address.
+ ( 'sym1', 'r', 7, '/file', 0x2 ),
+ ( 'sym1', 'r', 4, '/file', 0x1 ),
+ ( 'sym2', 'r', 4, '/file', 0x1 ),
+ )
+
+ # Here we go
+ (added, removed, changed, unchanged) = \
+ explain_binary_size_delta.Compare(symbol_list1, symbol_list2)
+
+
+ # Now check final stats.
+ orig_stdout = sys.stdout
+ output_collector = cStringIO.StringIO()
+ sys.stdout = output_collector
+ try:
+ explain_binary_size_delta.CrunchStats(added, removed, changed,
+ unchanged, True, True)
+ finally:
+ sys.stdout = orig_stdout
+ result = output_collector.getvalue()
+
+ expected_output = """\
+Total change: -4 bytes
+======================
+ 1 added, totalling +2 bytes across 1 sources
+ 1 removed, totalling -4 bytes across 1 sources
+ 1 shrunk, for a net change of -2 bytes (4 bytes before, 2 bytes after) \
+across 1 sources
+ 1 unchanged, totalling 7 bytes
+Source stats:
+ 1 sources encountered.
+ 0 completely new.
+ 0 removed completely.
+ 1 partially changed.
+ 0 completely unchanged.
+Per-source Analysis:
+
+----------------------------------------
+ -4 - Source: /file - (gained 2, lost 6)
+----------------------------------------
+ New symbols:
+ +2: sym1 type=r, size=2 bytes (adjusted sizes because of memory \
+sharing)
+ Removed symbols:
+ -4: sym1 type=r, size=4 bytes (adjusted sizes because of memory \
+sharing)
+ Shrunk symbols:
+ -2: sym2 type=r, (was 4 bytes, now 2 bytes) (adjusted sizes because \
+of memory sharing)
+"""
+
+ self.maxDiff = None
+ self.assertMultiLineEqual(expected_output, result)
+
+ def testCompareSharedSpaceBecomingUnshared(self):
+ # List entries have form:
+ # symbol_name, symbol_type, symbol_size, file_path, memory_address
+ symbol_list1 = (
+ # File with two symbols, same address.
+ ( 'sym1', 'r', 8, '/file', 0x1 ),
+ ( 'sym2', 'r', 8, '/file', 0x1 ),
+ )
+
+ symbol_list2 = (
+ # File with two symbols, not the same address.
+ ( 'sym1', 'r', 8, '/file', 0x1 ),
+ ( 'sym2', 'r', 6, '/file', 0x2 ),
+ )
+
+ # Here we go
+ (added, removed, changed, unchanged) = \
+ explain_binary_size_delta.Compare(symbol_list1, symbol_list2)
+
+
+ # Now check final stats.
+ orig_stdout = sys.stdout
+ output_collector = cStringIO.StringIO()
+ sys.stdout = output_collector
+ try:
+ explain_binary_size_delta.CrunchStats(added, removed, changed,
+ unchanged, True, True)
+ finally:
+ sys.stdout = orig_stdout
+ result = output_collector.getvalue()
+
+ expected_output = """\
+Total change: +6 bytes
+======================
+ 2 grown, for a net change of +6 bytes (8 bytes before, 14 bytes after) \
+across 1 sources
+ 0 unchanged, totalling 0 bytes
+Source stats:
+ 1 sources encountered.
+ 0 completely new.
+ 0 removed completely.
+ 1 partially changed.
+ 0 completely unchanged.
+Per-source Analysis:
+
+----------------------------------------
+ +6 - Source: /file - (gained 6, lost 0)
+----------------------------------------
+ Grown symbols:
+ +4: sym1 type=r, (was 4 bytes, now 8 bytes) (adjusted sizes because \
+of memory sharing)
+ +2: sym2 type=r, (was 4 bytes, now 6 bytes) (adjusted sizes because \
+of memory sharing)
+"""
+
+ self.maxDiff = None
+ self.assertMultiLineEqual(expected_output, result)
+
+ def testCompareSymbolsBecomingUnshared(self):
+ # List entries have form:
+ # symbol_name, symbol_type, symbol_size, file_path, memory_address
+ symbol_list1 = (
+ # File with two symbols, not the same address.
+ ( 'sym1', 'r', 8, '/file', 0x1 ),
+ ( 'sym2', 'r', 6, '/file', 0x2 ),
+ )
+
+ symbol_list2 = (
+ # File with two symbols, same address.
+ ( 'sym1', 'r', 8, '/file', 0x1 ),
+ ( 'sym2', 'r', 8, '/file', 0x1 ),
+ )
+
+ # Here we go
+ (added, removed, changed, unchanged) = \
+ explain_binary_size_delta.Compare(symbol_list1, symbol_list2)
+
+
+ # Now check final stats.
+ orig_stdout = sys.stdout
+ output_collector = cStringIO.StringIO()
+ sys.stdout = output_collector
+ try:
+ explain_binary_size_delta.CrunchStats(added, removed, changed,
+ unchanged, True, True)
+ finally:
+ sys.stdout = orig_stdout
+ result = output_collector.getvalue()
+
+ expected_output = """\
+Total change: -6 bytes
+======================
+ 2 shrunk, for a net change of -6 bytes (14 bytes before, 8 bytes after) \
+across 1 sources
+ 0 unchanged, totalling 0 bytes
+Source stats:
+ 1 sources encountered.
+ 0 completely new.
+ 0 removed completely.
+ 1 partially changed.
+ 0 completely unchanged.
+Per-source Analysis:
+
+----------------------------------------
+ -6 - Source: /file - (gained 0, lost 6)
+----------------------------------------
+ Shrunk symbols:
+ -2: sym2 type=r, (was 6 bytes, now 4 bytes) (adjusted sizes because \
+of memory sharing)
+ -4: sym1 type=r, (was 8 bytes, now 4 bytes) (adjusted sizes because \
+of memory sharing)
+"""
+
+ self.maxDiff = None
+ self.assertMultiLineEqual(expected_output, result)
+
+ def testDeltaInfo(self):
+ x = explain_binary_size_delta.DeltaInfo("path", "t", "sym_name", False)
+ assert x == x
+ y = explain_binary_size_delta.DeltaInfo("path", "t", "sym_name", False)
+ assert x == y
+
+ y.new_size = 12
+ assert x != y
+
+ x.new_size = 12
+ assert x == y
+
+ z = explain_binary_size_delta.DeltaInfo("path", "t", "sym_name", True)
+ assert not (x == z)
+ assert x != z
+
+ w = explain_binary_size_delta.DeltaInfo("other_path", "t", "sym_name", True)
+ assert w != z
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/binary_size/run_binary_size_analysis.py b/chromium/tools/binary_size/run_binary_size_analysis.py
new file mode 100755
index 00000000000..6bd111bac65
--- /dev/null
+++ b/chromium/tools/binary_size/run_binary_size_analysis.py
@@ -0,0 +1,679 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate a spatial analysis against an arbitrary library.
+
+To use, build the 'binary_size_tool' target. Then run this tool, passing
+in the location of the library to be analyzed along with any other options
+you desire.
+"""
+
+import collections
+import json
+import logging
+import multiprocessing
+import optparse
+import os
+import re
+import shutil
+import struct
+import subprocess
+import sys
+import tempfile
+import time
+
+import binary_size_utils
+
+# This path change is not beautiful. Temporary (I hope) measure until
+# the chromium project has figured out a proper way to organize the
+# library of python tools. http://crbug.com/375725
+elf_symbolizer_path = os.path.abspath(os.path.join(
+ os.path.dirname(__file__),
+ '..',
+ '..',
+ 'build',
+ 'android',
+ 'pylib'))
+sys.path.append(elf_symbolizer_path)
+import symbols.elf_symbolizer as elf_symbolizer # pylint: disable=F0401
+
+
+# Node dictionary keys. These are output in json read by the webapp so
+# keep them short to save file size.
+# Note: If these change, the webapp must also change.
+NODE_TYPE_KEY = 'k'
+NODE_NAME_KEY = 'n'
+NODE_CHILDREN_KEY = 'children'
+NODE_SYMBOL_TYPE_KEY = 't'
+NODE_SYMBOL_SIZE_KEY = 'value'
+NODE_MAX_DEPTH_KEY = 'maxDepth'
+NODE_LAST_PATH_ELEMENT_KEY = 'lastPathElement'
+
+# The display name of the bucket where we put symbols without path.
+NAME_NO_PATH_BUCKET = '(No Path)'
+
+# Try to keep data buckets smaller than this to avoid killing the
+# graphing lib.
+BIG_BUCKET_LIMIT = 3000
+
+
+def _MkChild(node, name):
+ child = node[NODE_CHILDREN_KEY].get(name)
+ if child is None:
+ child = {NODE_NAME_KEY: name,
+ NODE_CHILDREN_KEY: {}}
+ node[NODE_CHILDREN_KEY][name] = child
+ return child
+
+
+
+def SplitNoPathBucket(node):
+ """NAME_NO_PATH_BUCKET can be too large for the graphing lib to
+ handle. Split it into sub-buckets in that case."""
+ root_children = node[NODE_CHILDREN_KEY]
+ if NAME_NO_PATH_BUCKET in root_children:
+ no_path_bucket = root_children[NAME_NO_PATH_BUCKET]
+ old_children = no_path_bucket[NODE_CHILDREN_KEY]
+ count = 0
+ for symbol_type, symbol_bucket in old_children.iteritems():
+ count += len(symbol_bucket[NODE_CHILDREN_KEY])
+ if count > BIG_BUCKET_LIMIT:
+ new_children = {}
+ no_path_bucket[NODE_CHILDREN_KEY] = new_children
+ current_bucket = None
+ index = 0
+ for symbol_type, symbol_bucket in old_children.iteritems():
+ for symbol_name, value in symbol_bucket[NODE_CHILDREN_KEY].iteritems():
+ if index % BIG_BUCKET_LIMIT == 0:
+ group_no = (index / BIG_BUCKET_LIMIT) + 1
+ current_bucket = _MkChild(no_path_bucket,
+ '%s subgroup %d' % (NAME_NO_PATH_BUCKET,
+ group_no))
+ assert not NODE_TYPE_KEY in node or node[NODE_TYPE_KEY] == 'p'
+ node[NODE_TYPE_KEY] = 'p' # p for path
+ index += 1
+ symbol_size = value[NODE_SYMBOL_SIZE_KEY]
+ AddSymbolIntoFileNode(current_bucket, symbol_type,
+ symbol_name, symbol_size)
+
+
+def MakeChildrenDictsIntoLists(node):
+ largest_list_len = 0
+ if NODE_CHILDREN_KEY in node:
+ largest_list_len = len(node[NODE_CHILDREN_KEY])
+ child_list = []
+ for child in node[NODE_CHILDREN_KEY].itervalues():
+ child_largest_list_len = MakeChildrenDictsIntoLists(child)
+ if child_largest_list_len > largest_list_len:
+ largest_list_len = child_largest_list_len
+ child_list.append(child)
+ node[NODE_CHILDREN_KEY] = child_list
+
+ return largest_list_len
+
+
+def AddSymbolIntoFileNode(node, symbol_type, symbol_name, symbol_size):
+ """Puts symbol into the file path node |node|.
+ Returns the number of added levels in tree. I.e. returns 2."""
+
+ # 'node' is the file node and first step is to find its symbol-type bucket.
+ node[NODE_LAST_PATH_ELEMENT_KEY] = True
+ node = _MkChild(node, symbol_type)
+ assert not NODE_TYPE_KEY in node or node[NODE_TYPE_KEY] == 'b'
+ node[NODE_SYMBOL_TYPE_KEY] = symbol_type
+ node[NODE_TYPE_KEY] = 'b' # b for bucket
+
+ # 'node' is now the symbol-type bucket. Make the child entry.
+ node = _MkChild(node, symbol_name)
+ if NODE_CHILDREN_KEY in node:
+ if node[NODE_CHILDREN_KEY]:
+ logging.warning('A container node used as symbol for %s.' % symbol_name)
+ # This is going to be used as a leaf so no use for child list.
+ del node[NODE_CHILDREN_KEY]
+ node[NODE_SYMBOL_SIZE_KEY] = symbol_size
+ node[NODE_SYMBOL_TYPE_KEY] = symbol_type
+ node[NODE_TYPE_KEY] = 's' # s for symbol
+
+ return 2 # Depth of the added subtree.
+
+
+def MakeCompactTree(symbols, symbol_path_origin_dir):
+ result = {NODE_NAME_KEY: '/',
+ NODE_CHILDREN_KEY: {},
+ NODE_TYPE_KEY: 'p',
+ NODE_MAX_DEPTH_KEY: 0}
+ seen_symbol_with_path = False
+ cwd = os.path.abspath(os.getcwd())
+ for symbol_name, symbol_type, symbol_size, file_path, _address in symbols:
+
+ if 'vtable for ' in symbol_name:
+ symbol_type = '@' # hack to categorize these separately
+ # Take path like '/foo/bar/baz', convert to ['foo', 'bar', 'baz']
+ if file_path and file_path != "??":
+ file_path = os.path.abspath(os.path.join(symbol_path_origin_dir,
+ file_path))
+ # Let the output structure be relative to $CWD if inside $CWD,
+ # otherwise relative to the disk root. This is to avoid
+ # unnecessary click-through levels in the output.
+ if file_path.startswith(cwd + os.sep):
+ file_path = file_path[len(cwd):]
+ if file_path.startswith('/'):
+ file_path = file_path[1:]
+ seen_symbol_with_path = True
+ else:
+ file_path = NAME_NO_PATH_BUCKET
+
+ path_parts = file_path.split('/')
+
+ # Find pre-existing node in tree, or update if it already exists
+ node = result
+ depth = 0
+ while len(path_parts) > 0:
+ path_part = path_parts.pop(0)
+ if len(path_part) == 0:
+ continue
+ depth += 1
+ node = _MkChild(node, path_part)
+ assert not NODE_TYPE_KEY in node or node[NODE_TYPE_KEY] == 'p'
+ node[NODE_TYPE_KEY] = 'p' # p for path
+
+ depth += AddSymbolIntoFileNode(node, symbol_type, symbol_name, symbol_size)
+ result[NODE_MAX_DEPTH_KEY] = max(result[NODE_MAX_DEPTH_KEY], depth)
+
+ if not seen_symbol_with_path:
+ logging.warning('Symbols lack paths. Data will not be structured.')
+
+ # The (no path) bucket can be extremely large if we failed to get
+ # path information. Split it into subgroups if needed.
+ SplitNoPathBucket(result)
+
+ largest_list_len = MakeChildrenDictsIntoLists(result)
+
+ if largest_list_len > BIG_BUCKET_LIMIT:
+ logging.warning('There are sections with %d nodes. '
+ 'Results might be unusable.' % largest_list_len)
+ return result
+
+
+def DumpCompactTree(symbols, symbol_path_origin_dir, outfile):
+ tree_root = MakeCompactTree(symbols, symbol_path_origin_dir)
+ with open(outfile, 'w') as out:
+ out.write('var tree_data=')
+ # Use separators without whitespace to get a smaller file.
+ json.dump(tree_root, out, separators=(',', ':'))
+ print('Writing %d bytes json' % os.path.getsize(outfile))
+
+
+def MakeSourceMap(symbols):
+ sources = {}
+ for _sym, _symbol_type, size, path, _address in symbols:
+ key = None
+ if path:
+ key = os.path.normpath(path)
+ else:
+ key = '[no path]'
+ if key not in sources:
+ sources[key] = {'path': path, 'symbol_count': 0, 'size': 0}
+ record = sources[key]
+ record['size'] += size
+ record['symbol_count'] += 1
+ return sources
+
+
+# Regex for parsing "nm" output. A sample line looks like this:
+# 0167b39c 00000018 t ACCESS_DESCRIPTION_free /path/file.c:95
+#
+# The fields are: address, size, type, name, source location
+# Regular expression explained ( see also: https://xkcd.com/208 ):
+# ([0-9a-f]{8,}+) The address
+# [\s]+ Whitespace separator
+# ([0-9a-f]{8,}+) The size. From here on out it's all optional.
+# [\s]+ Whitespace separator
+# (\S?) The symbol type, which is any non-whitespace char
+# [\s*] Whitespace separator
+# ([^\t]*) Symbol name, any non-tab character (spaces ok!)
+# [\t]? Tab separator
+# (.*) The location (filename[:linennum|?][ (discriminator n)]
+sNmPattern = re.compile(
+ r'([0-9a-f]{8,})[\s]+([0-9a-f]{8,})[\s]*(\S?)[\s*]([^\t]*)[\t]?(.*)')
+
+class Progress():
+ def __init__(self):
+ self.count = 0
+ self.skip_count = 0
+ self.collisions = 0
+ self.time_last_output = time.time()
+ self.count_last_output = 0
+ self.disambiguations = 0
+ self.was_ambiguous = 0
+
+
+def RunElfSymbolizer(outfile, library, addr2line_binary, nm_binary, jobs,
+ disambiguate, src_path):
+ nm_output = RunNm(library, nm_binary)
+ nm_output_lines = nm_output.splitlines()
+ nm_output_lines_len = len(nm_output_lines)
+ address_symbol = {}
+ progress = Progress()
+ def map_address_symbol(symbol, addr):
+ progress.count += 1
+ if addr in address_symbol:
+ # 'Collision between %s and %s.' % (str(symbol.name),
+ # str(address_symbol[addr].name))
+ progress.collisions += 1
+ else:
+ if symbol.disambiguated:
+ progress.disambiguations += 1
+ if symbol.was_ambiguous:
+ progress.was_ambiguous += 1
+
+ address_symbol[addr] = symbol
+
+ progress_output()
+
+ def progress_output():
+ progress_chunk = 100
+ if progress.count % progress_chunk == 0:
+ time_now = time.time()
+ time_spent = time_now - progress.time_last_output
+ if time_spent > 1.0:
+ # Only output at most once per second.
+ progress.time_last_output = time_now
+ chunk_size = progress.count - progress.count_last_output
+ progress.count_last_output = progress.count
+ if time_spent > 0:
+ speed = chunk_size / time_spent
+ else:
+ speed = 0
+ progress_percent = (100.0 * (progress.count + progress.skip_count) /
+ nm_output_lines_len)
+ disambiguation_percent = 0
+ if progress.disambiguations != 0:
+ disambiguation_percent = (100.0 * progress.disambiguations /
+ progress.was_ambiguous)
+
+ sys.stdout.write('\r%.1f%%: Looked up %d symbols (%d collisions, '
+ '%d disambiguations where %.1f%% succeeded)'
+ ' - %.1f lookups/s.' %
+ (progress_percent, progress.count, progress.collisions,
+ progress.disambiguations, disambiguation_percent, speed))
+
+ # In case disambiguation was disabled, we remove the source path (which upon
+ # being set signals the symbolizer to enable disambiguation)
+ if not disambiguate:
+ src_path = None
+ symbolizer = elf_symbolizer.ELFSymbolizer(library, addr2line_binary,
+ map_address_symbol,
+ max_concurrent_jobs=jobs,
+ source_root_path=src_path)
+ user_interrupted = False
+ try:
+ for line in nm_output_lines:
+ match = sNmPattern.match(line)
+ if match:
+ location = match.group(5)
+ if not location:
+ addr = int(match.group(1), 16)
+ size = int(match.group(2), 16)
+ if addr in address_symbol: # Already looked up, shortcut
+ # ELFSymbolizer.
+ map_address_symbol(address_symbol[addr], addr)
+ continue
+ elif size == 0:
+ # Save time by not looking up empty symbols (do they even exist?)
+ print('Empty symbol: ' + line)
+ else:
+ symbolizer.SymbolizeAsync(addr, addr)
+ continue
+
+ progress.skip_count += 1
+ except KeyboardInterrupt:
+ user_interrupted = True
+ print('Interrupting - killing subprocesses. Please wait.')
+
+ try:
+ symbolizer.Join()
+ except KeyboardInterrupt:
+ # Don't want to abort here since we will be finished in a few seconds.
+ user_interrupted = True
+ print('Patience you must have my young padawan.')
+
+ print ''
+
+ if user_interrupted:
+ print('Skipping the rest of the file mapping. '
+ 'Output will not be fully classified.')
+
+ symbol_path_origin_dir = os.path.dirname(os.path.abspath(library))
+
+ with open(outfile, 'w') as out:
+ for line in nm_output_lines:
+ match = sNmPattern.match(line)
+ if match:
+ location = match.group(5)
+ if not location:
+ addr = int(match.group(1), 16)
+ symbol = address_symbol.get(addr)
+ if symbol is not None:
+ path = '??'
+ if symbol.source_path is not None:
+ path = os.path.abspath(os.path.join(symbol_path_origin_dir,
+ symbol.source_path))
+ line_number = 0
+ if symbol.source_line is not None:
+ line_number = symbol.source_line
+ out.write('%s\t%s:%d\n' % (line, path, line_number))
+ continue
+
+ out.write('%s\n' % line)
+
+ print('%d symbols in the results.' % len(address_symbol))
+
+
+def RunNm(binary, nm_binary):
+ cmd = [nm_binary, '-C', '--print-size', '--size-sort', '--reverse-sort',
+ binary]
+ nm_process = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (process_output, err_output) = nm_process.communicate()
+
+ if nm_process.returncode != 0:
+ if err_output:
+ raise Exception, err_output
+ else:
+ raise Exception, process_output
+
+ return process_output
+
+
+def GetNmSymbols(nm_infile, outfile, library, jobs, verbose,
+ addr2line_binary, nm_binary, disambiguate, src_path):
+ if nm_infile is None:
+ if outfile is None:
+ outfile = tempfile.NamedTemporaryFile(delete=False).name
+
+ if verbose:
+ print 'Running parallel addr2line, dumping symbols to ' + outfile
+ RunElfSymbolizer(outfile, library, addr2line_binary, nm_binary, jobs,
+ disambiguate, src_path)
+
+ nm_infile = outfile
+
+ elif verbose:
+ print 'Using nm input from ' + nm_infile
+ with file(nm_infile, 'r') as infile:
+ return list(binary_size_utils.ParseNm(infile))
+
+
+PAK_RESOURCE_ID_TO_STRING = { "inited": False }
+
+def LoadPakIdsFromResourceFile(filename):
+ """Given a file name, it loads everything that looks like a resource id
+ into PAK_RESOURCE_ID_TO_STRING."""
+ with open(filename) as resource_header:
+ for line in resource_header:
+ if line.startswith("#define "):
+ line_data = line.split()
+ if len(line_data) == 3:
+ try:
+ resource_number = int(line_data[2])
+ resource_name = line_data[1]
+ PAK_RESOURCE_ID_TO_STRING[resource_number] = resource_name
+ except ValueError:
+ pass
+
+def GetReadablePakResourceName(pak_file, resource_id):
+ """Pak resources have a numeric identifier. It is not helpful when
+ trying to locate where footprint is generated. This does its best to
+ map the number to a usable string."""
+ if not PAK_RESOURCE_ID_TO_STRING['inited']:
+ # Try to find resource header files generated by grit when
+ # building the pak file. We'll look for files named *resources.h"
+ # and lines of the type:
+ # #define MY_RESOURCE_JS 1234
+ PAK_RESOURCE_ID_TO_STRING['inited'] = True
+ gen_dir = os.path.join(os.path.dirname(pak_file), 'gen')
+ if os.path.isdir(gen_dir):
+ for dirname, _dirs, files in os.walk(gen_dir):
+ for filename in files:
+ if filename.endswith('resources.h'):
+ LoadPakIdsFromResourceFile(os.path.join(dirname, filename))
+ return PAK_RESOURCE_ID_TO_STRING.get(resource_id,
+ 'Pak Resource %d' % resource_id)
+
+def AddPakData(symbols, pak_file):
+ """Adds pseudo-symbols from a pak file."""
+ pak_file = os.path.abspath(pak_file)
+ with open(pak_file, 'rb') as pak:
+ data = pak.read()
+
+ PAK_FILE_VERSION = 4
+ HEADER_LENGTH = 2 * 4 + 1 # Two uint32s. (file version, number of entries)
+ # and one uint8 (encoding of text resources)
+ INDEX_ENTRY_SIZE = 2 + 4 # Each entry is a uint16 and a uint32.
+ version, num_entries, _encoding = struct.unpack('<IIB', data[:HEADER_LENGTH])
+ assert version == PAK_FILE_VERSION, ('Unsupported pak file '
+ 'version (%d) in %s. Only '
+ 'support version %d' %
+ (version, pak_file, PAK_FILE_VERSION))
+ if num_entries > 0:
+ # Read the index and data.
+ data = data[HEADER_LENGTH:]
+ for _ in range(num_entries):
+ resource_id, offset = struct.unpack('<HI', data[:INDEX_ENTRY_SIZE])
+ data = data[INDEX_ENTRY_SIZE:]
+ _next_id, next_offset = struct.unpack('<HI', data[:INDEX_ENTRY_SIZE])
+ resource_size = next_offset - offset
+
+ symbol_name = GetReadablePakResourceName(pak_file, resource_id)
+ symbol_path = pak_file
+ symbol_type = 'd' # Data. Approximation.
+ symbol_size = resource_size
+ symbols.append((symbol_name, symbol_type, symbol_size, symbol_path))
+
+def _find_in_system_path(binary):
+ """Locate the full path to binary in the system path or return None
+ if not found."""
+ system_path = os.environ["PATH"].split(os.pathsep)
+ for path in system_path:
+ binary_path = os.path.join(path, binary)
+ if os.path.isfile(binary_path):
+ return binary_path
+ return None
+
+def CheckDebugFormatSupport(library, addr2line_binary):
+ """Kills the program if debug data is in an unsupported format.
+
+ There are two common versions of the DWARF debug formats and
+ since we are right now transitioning from DWARF2 to newer formats,
+ it's possible to have a mix of tools that are not compatible. Detect
+ that and abort rather than produce meaningless output."""
+ tool_output = subprocess.check_output([addr2line_binary, '--version'])
+ version_re = re.compile(r'^GNU [^ ]+ .* (\d+).(\d+).*?$', re.M)
+ parsed_output = version_re.match(tool_output)
+ major = int(parsed_output.group(1))
+ minor = int(parsed_output.group(2))
+ supports_dwarf4 = major > 2 or major == 2 and minor > 22
+
+ if supports_dwarf4:
+ return
+
+ print('Checking version of debug information in %s.' % library)
+ debug_info = subprocess.check_output(['readelf', '--debug-dump=info',
+ '--dwarf-depth=1', library])
+ dwarf_version_re = re.compile(r'^\s+Version:\s+(\d+)$', re.M)
+ parsed_dwarf_format_output = dwarf_version_re.search(debug_info)
+ version = int(parsed_dwarf_format_output.group(1))
+ if version > 2:
+ print('The supplied tools only support DWARF2 debug data but the binary\n' +
+ 'uses DWARF%d. Update the tools or compile the binary\n' % version +
+ 'with -gdwarf-2.')
+ sys.exit(1)
+
+
+def main():
+ usage = """%prog [options]
+
+ Runs a spatial analysis on a given library, looking up the source locations
+ of its symbols and calculating how much space each directory, source file,
+ and so on is taking. The result is a report that can be used to pinpoint
+ sources of large portions of the binary, etceteras.
+
+ Under normal circumstances, you only need to pass two arguments, thusly:
+
+ %prog --library /path/to/library --destdir /path/to/output
+
+ In this mode, the program will dump the symbols from the specified library
+ and map those symbols back to source locations, producing a web-based
+ report in the specified output directory.
+
+ Other options are available via '--help'.
+ """
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option('--nm-in', metavar='PATH',
+ help='if specified, use nm input from <path> instead of '
+ 'generating it. Note that source locations should be '
+ 'present in the file; i.e., no addr2line symbol lookups '
+ 'will be performed when this option is specified. '
+ 'Mutually exclusive with --library.')
+ parser.add_option('--destdir', metavar='PATH',
+ help='write output to the specified directory. An HTML '
+ 'report is generated here along with supporting files; '
+ 'any existing report will be overwritten.')
+ parser.add_option('--library', metavar='PATH',
+ help='if specified, process symbols in the library at '
+ 'the specified path. Mutually exclusive with --nm-in.')
+ parser.add_option('--pak', metavar='PATH',
+ help='if specified, includes the contents of the '
+ 'specified *.pak file in the output.')
+ parser.add_option('--nm-binary',
+ help='use the specified nm binary to analyze library. '
+ 'This is to be used when the nm in the path is not for '
+ 'the right architecture or of the right version.')
+ parser.add_option('--addr2line-binary',
+ help='use the specified addr2line binary to analyze '
+ 'library. This is to be used when the addr2line in '
+ 'the path is not for the right architecture or '
+ 'of the right version.')
+ parser.add_option('--jobs', type='int',
+ help='number of jobs to use for the parallel '
+ 'addr2line processing pool; defaults to 1. More '
+ 'jobs greatly improve throughput but eat RAM like '
+ 'popcorn, and take several gigabytes each. Start low '
+ 'and ramp this number up until your machine begins to '
+ 'struggle with RAM. '
+ 'This argument is only valid when using --library.')
+ parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
+ help='be verbose, printing lots of status information.')
+ parser.add_option('--nm-out', metavar='PATH',
+ help='(deprecated) No-op. nm.out is stored in --destdir.')
+ parser.add_option('--no-nm-out', action='store_true',
+ help='do not keep the nm output file. This file is useful '
+ 'if you want to see the fully processed nm output after '
+ 'the symbols have been mapped to source locations, or if '
+ 'you plan to run explain_binary_size_delta.py. By default '
+ 'the file \'nm.out\' is placed alongside the generated '
+ 'report. The nm.out file is only created when using '
+ '--library.')
+ parser.add_option('--disable-disambiguation', action='store_true',
+ help='disables the disambiguation process altogether,'
+ ' NOTE: this may, depending on your toolchain, produce'
+ ' output with some symbols at the top layer if addr2line'
+ ' could not get the entire source path.')
+ parser.add_option('--source-path', default='./',
+ help='the path to the source code of the output binary, '
+ 'default set to current directory. Used in the'
+ ' disambiguation process.')
+ opts, _args = parser.parse_args()
+
+ if ((not opts.library) and (not opts.nm_in)) or (opts.library and opts.nm_in):
+ parser.error('exactly one of --library or --nm-in is required')
+ if opts.nm_out:
+ print >> sys.stderr, ('WARNING: --nm-out is deprecated and has no effect.')
+ if (opts.nm_in):
+ if opts.jobs:
+ print >> sys.stderr, ('WARNING: --jobs has no effect '
+ 'when used with --nm-in')
+ if not opts.destdir:
+ parser.error('--destdir is a required argument')
+ if not opts.jobs:
+ # Use the number of processors but cap between 2 and 4 since raw
+ # CPU power isn't the limiting factor. It's I/O limited, memory
+ # bus limited and available-memory-limited. Too many processes and
+ # the computer will run out of memory and it will be slow.
+ opts.jobs = max(2, min(4, str(multiprocessing.cpu_count())))
+
+ if opts.addr2line_binary:
+ assert os.path.isfile(opts.addr2line_binary)
+ addr2line_binary = opts.addr2line_binary
+ else:
+ addr2line_binary = _find_in_system_path('addr2line')
+ assert addr2line_binary, 'Unable to find addr2line in the path. '\
+ 'Use --addr2line-binary to specify location.'
+
+ if opts.nm_binary:
+ assert os.path.isfile(opts.nm_binary)
+ nm_binary = opts.nm_binary
+ else:
+ nm_binary = _find_in_system_path('nm')
+ assert nm_binary, 'Unable to find nm in the path. Use --nm-binary '\
+ 'to specify location.'
+
+ if opts.pak:
+ assert os.path.isfile(opts.pak), 'Could not find ' % opts.pak
+
+ print('addr2line: %s' % addr2line_binary)
+ print('nm: %s' % nm_binary)
+
+ if opts.library:
+ CheckDebugFormatSupport(opts.library, addr2line_binary)
+
+ # Prepare output directory and report guts
+ if not os.path.exists(opts.destdir):
+ os.makedirs(opts.destdir, 0755)
+ nm_out = os.path.join(opts.destdir, 'nm.out')
+ if opts.no_nm_out:
+ nm_out = None
+
+ # Copy report boilerplate into output directory. This also proves that the
+ # output directory is safe for writing, so there should be no problems writing
+ # the nm.out file later.
+ data_js_file_name = os.path.join(opts.destdir, 'data.js')
+ d3_out = os.path.join(opts.destdir, 'd3')
+ if not os.path.exists(d3_out):
+ os.makedirs(d3_out, 0755)
+ d3_src = os.path.join(os.path.dirname(__file__),
+ '..',
+ '..',
+ 'third_party', 'd3', 'src')
+ template_src = os.path.join(os.path.dirname(__file__),
+ 'template')
+ shutil.copy(os.path.join(d3_src, 'LICENSE'), d3_out)
+ shutil.copy(os.path.join(d3_src, 'd3.js'), d3_out)
+ shutil.copy(os.path.join(template_src, 'index.html'), opts.destdir)
+ shutil.copy(os.path.join(template_src, 'D3SymbolTreeMap.js'), opts.destdir)
+
+ # Run nm and/or addr2line to gather the data
+ symbols = GetNmSymbols(opts.nm_in, nm_out, opts.library,
+ opts.jobs, opts.verbose is True,
+ addr2line_binary, nm_binary,
+ opts.disable_disambiguation is None,
+ opts.source_path)
+
+ # Post-processing
+ if opts.pak:
+ AddPakData(symbols, opts.pak)
+ if opts.library:
+ symbol_path_origin_dir = os.path.dirname(os.path.abspath(opts.library))
+ else:
+ # Just a guess. Hopefully all paths in the input file are absolute.
+ symbol_path_origin_dir = os.path.abspath(os.getcwd())
+ # Dump JSON for the HTML report.
+ DumpCompactTree(symbols, symbol_path_origin_dir, data_js_file_name)
+ print 'Report saved to ' + opts.destdir + '/index.html'
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/binary_size/template/D3SymbolTreeMap.js b/chromium/tools/binary_size/template/D3SymbolTreeMap.js
new file mode 100644
index 00000000000..4bbe82f1a32
--- /dev/null
+++ b/chromium/tools/binary_size/template/D3SymbolTreeMap.js
@@ -0,0 +1,938 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO:
+// 1. Visibility functions: base on boxPadding.t, not 15
+// 2. Track a maxDisplayDepth that is user-settable:
+// maxDepth == currentRoot.depth + maxDisplayDepth
+function D3SymbolTreeMap(mapWidth, mapHeight, levelsToShow) {
+ this._mapContainer = undefined;
+ this._mapWidth = mapWidth;
+ this._mapHeight = mapHeight;
+ this.boxPadding = {'l': 5, 'r': 5, 't': 20, 'b': 5};
+ this.infobox = undefined;
+ this._maskContainer = undefined;
+ this._highlightContainer = undefined;
+ // Transition in this order:
+ // 1. Exiting items go away.
+ // 2. Updated items move.
+ // 3. New items enter.
+ this._exitDuration=500;
+ this._updateDuration=500;
+ this._enterDuration=500;
+ this._firstTransition=true;
+ this._layout = undefined;
+ this._currentRoot = undefined;
+ this._currentNodes = undefined;
+ this._treeData = undefined;
+ this._maxLevelsToShow = levelsToShow;
+ this._currentMaxDepth = this._maxLevelsToShow;
+}
+
+/**
+ * Make a number pretty, with comma separators.
+ */
+D3SymbolTreeMap._pretty = function(num) {
+ var asString = String(num);
+ var result = '';
+ var counter = 0;
+ for (var x = asString.length - 1; x >= 0; x--) {
+ counter++;
+ if (counter === 4) {
+ result = ',' + result;
+ counter = 1;
+ }
+ result = asString.charAt(x) + result;
+ }
+ return result;
+}
+
+/**
+ * Express a number in terms of KiB, MiB, GiB, etc.
+ * Note that these are powers of 2, not of 10.
+ */
+D3SymbolTreeMap._byteify = function(num) {
+ var suffix;
+ if (num >= 1024) {
+ if (num >= 1024 * 1024 * 1024) {
+ suffix = 'GiB';
+ num = num / (1024 * 1024 * 1024);
+ } else if (num >= 1024 * 1024) {
+ suffix = 'MiB';
+ num = num / (1024 * 1024);
+ } else if (num >= 1024) {
+ suffix = 'KiB'
+ num = num / 1024;
+ }
+ return num.toFixed(2) + ' ' + suffix;
+ }
+ return num + ' B';
+}
+
+D3SymbolTreeMap._NM_SYMBOL_TYPE_DESCRIPTIONS = {
+ // Definitions concisely derived from the nm 'man' page
+ 'A': 'Global absolute (A)',
+ 'B': 'Global uninitialized data (B)',
+ 'b': 'Local uninitialized data (b)',
+ 'C': 'Global uninitialized common (C)',
+ 'D': 'Global initialized data (D)',
+ 'd': 'Local initialized data (d)',
+ 'G': 'Global small initialized data (G)',
+ 'g': 'Local small initialized data (g)',
+ 'i': 'Indirect function (i)',
+ 'N': 'Debugging (N)',
+ 'p': 'Stack unwind (p)',
+ 'R': 'Global read-only data (R)',
+ 'r': 'Local read-only data (r)',
+ 'S': 'Global small uninitialized data (S)',
+ 's': 'Local small uninitialized data (s)',
+ 'T': 'Global code (T)',
+ 't': 'Local code (t)',
+ 'U': 'Undefined (U)',
+ 'u': 'Unique (u)',
+ 'V': 'Global weak object (V)',
+ 'v': 'Local weak object (v)',
+ 'W': 'Global weak symbol (W)',
+ 'w': 'Local weak symbol (w)',
+ '@': 'Vtable entry (@)', // non-standard, hack.
+ '-': 'STABS debugging (-)',
+ '?': 'Unrecognized (?)',
+};
+D3SymbolTreeMap._NM_SYMBOL_TYPES = '';
+for (var symbol_type in D3SymbolTreeMap._NM_SYMBOL_TYPE_DESCRIPTIONS) {
+ D3SymbolTreeMap._NM_SYMBOL_TYPES += symbol_type;
+}
+
+/**
+ * Given a symbol type code, look up and return a human-readable description
+ * of that symbol type. If the symbol type does not match one of the known
+ * types, the unrecognized description (corresponding to symbol type '?') is
+ * returned instead of null or undefined.
+ */
+D3SymbolTreeMap._getSymbolDescription = function(type) {
+ var result = D3SymbolTreeMap._NM_SYMBOL_TYPE_DESCRIPTIONS[type];
+ if (result === undefined) {
+ result = D3SymbolTreeMap._NM_SYMBOL_TYPE_DESCRIPTIONS['?'];
+ }
+ return result;
+}
+
+// Qualitative 12-value pastel Brewer palette.
+D3SymbolTreeMap._colorArray = [
+ 'rgb(141,211,199)',
+ 'rgb(255,255,179)',
+ 'rgb(190,186,218)',
+ 'rgb(251,128,114)',
+ 'rgb(128,177,211)',
+ 'rgb(253,180,98)',
+ 'rgb(179,222,105)',
+ 'rgb(252,205,229)',
+ 'rgb(217,217,217)',
+ 'rgb(188,128,189)',
+ 'rgb(204,235,197)',
+ 'rgb(255,237,111)'];
+
+D3SymbolTreeMap._initColorMap = function() {
+ var map = {};
+ var numColors = D3SymbolTreeMap._colorArray.length;
+ var count = 0;
+ for (var key in D3SymbolTreeMap._NM_SYMBOL_TYPE_DESCRIPTIONS) {
+ var index = count++ % numColors;
+ map[key] = d3.rgb(D3SymbolTreeMap._colorArray[index]);
+ }
+ D3SymbolTreeMap._colorMap = map;
+}
+D3SymbolTreeMap._initColorMap();
+
+D3SymbolTreeMap.getColorForType = function(type) {
+ var result = D3SymbolTreeMap._colorMap[type];
+ if (result === undefined) return d3.rgb('rgb(255,255,255)');
+ return result;
+}
+
+D3SymbolTreeMap.prototype.init = function() {
+ this.infobox = this._createInfoBox();
+ this._mapContainer = d3.select('body').append('div')
+ .style('position', 'relative')
+ .style('width', this._mapWidth)
+ .style('height', this._mapHeight)
+ .style('padding', 0)
+ .style('margin', 0)
+ .style('box-shadow', '5px 5px 5px #888');
+ this._layout = this._createTreeMapLayout();
+ this._setData(tree_data); // TODO: Don't use global 'tree_data'
+}
+
+/**
+ * Sets the data displayed by the treemap and layint out the map.
+ */
+D3SymbolTreeMap.prototype._setData = function(data) {
+ this._treeData = data;
+ console.time('_crunchStats');
+ this._crunchStats(data);
+ console.timeEnd('_crunchStats');
+ this._currentRoot = this._treeData;
+ this._currentNodes = this._layout.nodes(this._currentRoot);
+ this._currentMaxDepth = this._maxLevelsToShow;
+ this._doLayout();
+}
+
+/**
+ * Recursively traverses the entire tree starting from the specified node,
+ * computing statistics and recording metadata as it goes. Call this method
+ * only once per imported tree.
+ */
+D3SymbolTreeMap.prototype._crunchStats = function(node) {
+ var stack = [];
+ stack.idCounter = 0;
+ this._crunchStatsHelper(stack, node);
+}
+
+/**
+ * Invoke the specified visitor function on all data elements currently shown
+ * in the treemap including any and all of their children, starting at the
+ * currently-displayed root and descening recursively. The function will be
+ * passed the datum element representing each node. No traversal guarantees
+ * are made.
+ */
+D3SymbolTreeMap.prototype.visitFromDisplayedRoot = function(visitor) {
+ this._visit(this._currentRoot, visitor);
+}
+
+/**
+ * Helper function for visit functions.
+ */
+D3SymbolTreeMap.prototype._visit = function(datum, visitor) {
+ visitor.call(this, datum);
+ if (datum.children) for (var i = 0; i < datum.children.length; i++) {
+ this._visit(datum.children[i], visitor);
+ }
+}
+
+D3SymbolTreeMap.prototype._crunchStatsHelper = function(stack, node) {
+ // Only overwrite the node ID if it isn't already set.
+ // This allows stats to be crunched multiple times on subsets of data
+ // without breaking the data-to-ID bindings. New nodes get new IDs.
+ if (node.id === undefined) node.id = stack.idCounter++;
+ if (node.children === undefined) {
+ // Leaf node (symbol); accumulate stats.
+ for (var i = 0; i < stack.length; i++) {
+ var ancestor = stack[i];
+ if (!ancestor.symbol_stats) ancestor.symbol_stats = {};
+ if (ancestor.symbol_stats[node.t] === undefined) {
+ // New symbol type we haven't seen before, just record.
+ ancestor.symbol_stats[node.t] = {'count': 1,
+ 'size': node.value};
+ } else {
+ // Existing symbol type, increment.
+ ancestor.symbol_stats[node.t].count++;
+ ancestor.symbol_stats[node.t].size += node.value;
+ }
+ }
+ } else for (var i = 0; i < node.children.length; i++) {
+ stack.push(node);
+ this._crunchStatsHelper(stack, node.children[i]);
+ stack.pop();
+ }
+}
+
+D3SymbolTreeMap.prototype._createTreeMapLayout = function() {
+ var result = d3.layout.treemap()
+ .padding([this.boxPadding.t, this.boxPadding.r,
+ this.boxPadding.b, this.boxPadding.l])
+ .size([this._mapWidth, this._mapHeight]);
+ return result;
+}
+
+D3SymbolTreeMap.prototype.resize = function(width, height) {
+ this._mapWidth = width;
+ this._mapHeight = height;
+ this._mapContainer.style('width', width).style('height', height);
+ this._layout.size([this._mapWidth, this._mapHeight]);
+ this._currentNodes = this._layout.nodes(this._currentRoot);
+ this._doLayout();
+}
+
+D3SymbolTreeMap.prototype._zoomDatum = function(datum) {
+ if (this._currentRoot === datum) return; // already here
+ this._hideHighlight(datum);
+ this._hideInfoBox(datum);
+ this._currentRoot = datum;
+ this._currentNodes = this._layout.nodes(this._currentRoot);
+ this._currentMaxDepth = this._currentRoot.depth + this._maxLevelsToShow;
+ console.log('zooming into datum ' + this._currentRoot.n);
+ this._doLayout();
+}
+
+D3SymbolTreeMap.prototype.setMaxLevels = function(levelsToShow) {
+ this._maxLevelsToShow = levelsToShow;
+ this._currentNodes = this._layout.nodes(this._currentRoot);
+ this._currentMaxDepth = this._currentRoot.depth + this._maxLevelsToShow;
+ console.log('setting max levels to show: ' + this._maxLevelsToShow);
+ this._doLayout();
+}
+
+/**
+ * Clone the specified tree, returning an independent copy of the data.
+ * Only the original attributes expected to exist prior to invoking
+ * _crunchStatsHelper are retained, with the exception of the 'id' attribute
+ * (which must be retained for proper transitions).
+ * If the optional filter parameter is provided, it will be called with 'this'
+ * set to this treemap instance and passed the 'datum' object as an argument.
+ * When specified, the copy will retain only the data for which the filter
+ * function returns true.
+ */
+D3SymbolTreeMap.prototype._clone = function(datum, filter) {
+ var trackingStats = false;
+ if (this.__cloneState === undefined) {
+ console.time('_clone');
+ trackingStats = true;
+ this.__cloneState = {'accepted': 0, 'rejected': 0,
+ 'forced': 0, 'pruned': 0};
+ }
+
+ // Must go depth-first. All parents of children that are accepted by the
+ // filter must be preserved!
+ var copy = {'n': datum.n, 'k': datum.k};
+ var childAccepted = false;
+ if (datum.children !== undefined) {
+ for (var i = 0; i < datum.children.length; i++) {
+ var copiedChild = this._clone(datum.children[i], filter);
+ if (copiedChild !== undefined) {
+ childAccepted = true; // parent must also be accepted.
+ if (copy.children === undefined) copy.children = [];
+ copy.children.push(copiedChild);
+ }
+ }
+ }
+
+ // Ignore nodes that don't match the filter, when present.
+ var accept = false;
+ if (childAccepted) {
+ // Parent of an accepted child must also be accepted.
+ this.__cloneState.forced++;
+ accept = true;
+ } else if (filter !== undefined && filter.call(this, datum) !== true) {
+ this.__cloneState.rejected++;
+ } else if (datum.children === undefined) {
+ // Accept leaf nodes that passed the filter
+ this.__cloneState.accepted++;
+ accept = true;
+ } else {
+ // Non-leaf node. If no children are accepted, prune it.
+ this.__cloneState.pruned++;
+ }
+
+ if (accept) {
+ if (datum.id !== undefined) copy.id = datum.id;
+ if (datum.lastPathElement !== undefined) {
+ copy.lastPathElement = datum.lastPathElement;
+ }
+ if (datum.t !== undefined) copy.t = datum.t;
+ if (datum.value !== undefined && datum.children === undefined) {
+ copy.value = datum.value;
+ }
+ } else {
+ // Discard the copy we were going to return
+ copy = undefined;
+ }
+
+ if (trackingStats === true) {
+ // We are the fist call in the recursive chain.
+ console.timeEnd('_clone');
+ var totalAccepted = this.__cloneState.accepted +
+ this.__cloneState.forced;
+ console.log(
+ totalAccepted + ' nodes retained (' +
+ this.__cloneState.forced + ' forced by accepted children, ' +
+ this.__cloneState.accepted + ' accepted on their own merits), ' +
+ this.__cloneState.rejected + ' nodes (and their children) ' +
+ 'filtered out,' +
+ this.__cloneState.pruned + ' nodes pruned because because no ' +
+ 'children remained.');
+ delete this.__cloneState;
+ }
+ return copy;
+}
+
+D3SymbolTreeMap.prototype.filter = function(filter) {
+ // Ensure we have a copy of the original root.
+ if (this._backupTree === undefined) this._backupTree = this._treeData;
+ this._mapContainer.selectAll('div').remove();
+ this._setData(this._clone(this._backupTree, filter));
+}
+
+D3SymbolTreeMap.prototype._doLayout = function() {
+ console.time('_doLayout');
+ this._handleInodes();
+ this._handleLeaves();
+ this._firstTransition = false;
+ console.timeEnd('_doLayout');
+}
+
+D3SymbolTreeMap.prototype._highlightElement = function(datum, selection) {
+ this._showHighlight(datum, selection);
+}
+
+D3SymbolTreeMap.prototype._unhighlightElement = function(datum, selection) {
+ this._hideHighlight(datum, selection);
+}
+
+D3SymbolTreeMap.prototype._handleInodes = function() {
+ console.time('_handleInodes');
+ var thisTreeMap = this;
+ var inodes = this._currentNodes.filter(function(datum){
+ return (datum.depth <= thisTreeMap._currentMaxDepth) &&
+ datum.children !== undefined;
+ });
+ var cellsEnter = this._mapContainer.selectAll('div.inode')
+ .data(inodes, function(datum) { return datum.id; })
+ .enter()
+ .append('div').attr('class', 'inode').attr('id', function(datum){
+ return 'node-' + datum.id;});
+
+
+ // Define enter/update/exit for inodes
+ cellsEnter
+ .append('div')
+ .attr('class', 'rect inode_rect_entering')
+ .style('z-index', function(datum) { return datum.id * 2; })
+ .style('position', 'absolute')
+ .style('left', function(datum) { return datum.x; })
+ .style('top', function(datum){ return datum.y; })
+ .style('width', function(datum){ return datum.dx; })
+ .style('height', function(datum){ return datum.dy; })
+ .style('opacity', '0')
+ .style('border', '1px solid black')
+ .style('background-image', function(datum) {
+ return thisTreeMap._makeSymbolBucketBackgroundImage.call(
+ thisTreeMap, datum);
+ })
+ .style('background-color', function(datum) {
+ if (datum.t === undefined) return 'rgb(220,220,220)';
+ return D3SymbolTreeMap.getColorForType(datum.t).toString();
+ })
+ .on('mouseover', function(datum){
+ thisTreeMap._highlightElement.call(
+ thisTreeMap, datum, d3.select(this));
+ thisTreeMap._showInfoBox.call(thisTreeMap, datum);
+ })
+ .on('mouseout', function(datum){
+ thisTreeMap._unhighlightElement.call(
+ thisTreeMap, datum, d3.select(this));
+ thisTreeMap._hideInfoBox.call(thisTreeMap, datum);
+ })
+ .on('mousemove', function(){
+ thisTreeMap._moveInfoBox.call(thisTreeMap, event);
+ })
+ .on('dblclick', function(datum){
+ if (datum !== thisTreeMap._currentRoot) {
+ // Zoom into the selection
+ thisTreeMap._zoomDatum(datum);
+ } else if (datum.parent) {
+ console.log('event.shiftKey=' + event.shiftKey);
+ if (event.shiftKey === true) {
+ // Back to root
+ thisTreeMap._zoomDatum(thisTreeMap._treeData);
+ } else {
+ // Zoom out of the selection
+ thisTreeMap._zoomDatum(datum.parent);
+ }
+ }
+ });
+ cellsEnter
+ .append('div')
+ .attr('class', 'label inode_label_entering')
+ .style('z-index', function(datum) { return (datum.id * 2) + 1; })
+ .style('position', 'absolute')
+ .style('left', function(datum){ return datum.x; })
+ .style('top', function(datum){ return datum.y; })
+ .style('width', function(datum) { return datum.dx; })
+ .style('height', function(datum) { return thisTreeMap.boxPadding.t; })
+ .style('opacity', '0')
+ .style('pointer-events', 'none')
+ .style('-webkit-user-select', 'none')
+ .style('overflow', 'hidden') // required for ellipsis
+ .style('white-space', 'nowrap') // required for ellipsis
+ .style('text-overflow', 'ellipsis')
+ .style('text-align', 'center')
+ .style('vertical-align', 'top')
+ .style('visibility', function(datum) {
+ return (datum.dx < 15 || datum.dy < 15) ? 'hidden' : 'visible';
+ })
+ .text(function(datum) {
+ var sizeish = ' [' + D3SymbolTreeMap._byteify(datum.value) + ']'
+ var text;
+ if (datum.k === 'b') { // bucket
+ if (datum === thisTreeMap._currentRoot) {
+ text = thisTreeMap.pathFor(datum) + ': '
+ + D3SymbolTreeMap._getSymbolDescription(datum.t)
+ } else {
+ text = D3SymbolTreeMap._getSymbolDescription(datum.t);
+ }
+ } else if (datum === thisTreeMap._currentRoot) {
+ // The top-most level should always show the complete path
+ text = thisTreeMap.pathFor(datum);
+ } else {
+ // Anything that isn't a bucket or a leaf (symbol) or the
+ // current root should just show its name.
+ text = datum.n;
+ }
+ return text + sizeish;
+ }
+ );
+
+ // Complicated transition logic:
+ // For nodes that are entering, we want to fade them in in-place AFTER
+ // any adjusting nodes have resized and moved around. That way, new nodes
+ // seamlessly appear in the right spot after their containers have resized
+ // and moved around.
+ // To do this we do some trickery:
+ // 1. Define a '_entering' class on the entering elements
+ // 2. Use this to select only the entering elements and apply the opacity
+ // transition.
+ // 3. Use the same transition to drop the '_entering' suffix, so that they
+ // will correctly update in later zoom/resize/whatever operations.
+ // 4. The update transition is achieved by selecting the elements without
+ // the '_entering_' suffix and applying movement and resizing transition
+ // effects.
+ this._mapContainer.selectAll('div.inode_rect_entering').transition()
+ .duration(thisTreeMap._enterDuration).delay(
+ this._firstTransition ? 0 : thisTreeMap._exitDuration +
+ thisTreeMap._updateDuration)
+ .attr('class', 'rect inode_rect')
+ .style('opacity', '1')
+ this._mapContainer.selectAll('div.inode_label_entering').transition()
+ .duration(thisTreeMap._enterDuration).delay(
+ this._firstTransition ? 0 : thisTreeMap._exitDuration +
+ thisTreeMap._updateDuration)
+ .attr('class', 'label inode_label')
+ .style('opacity', '1')
+ this._mapContainer.selectAll('div.inode_rect').transition()
+ .duration(thisTreeMap._updateDuration).delay(thisTreeMap._exitDuration)
+ .style('opacity', '1')
+ .style('background-image', function(datum) {
+ return thisTreeMap._makeSymbolBucketBackgroundImage.call(
+ thisTreeMap, datum);
+ })
+ .style('left', function(datum) { return datum.x; })
+ .style('top', function(datum){ return datum.y; })
+ .style('width', function(datum){ return datum.dx; })
+ .style('height', function(datum){ return datum.dy; });
+ this._mapContainer.selectAll('div.inode_label').transition()
+ .duration(thisTreeMap._updateDuration).delay(thisTreeMap._exitDuration)
+ .style('opacity', '1')
+ .style('visibility', function(datum) {
+ return (datum.dx < 15 || datum.dy < 15) ? 'hidden' : 'visible';
+ })
+ .style('left', function(datum){ return datum.x; })
+ .style('top', function(datum){ return datum.y; })
+ .style('width', function(datum) { return datum.dx; })
+ .style('height', function(datum) { return thisTreeMap.boxPadding.t; })
+ .text(function(datum) {
+ var sizeish = ' [' + D3SymbolTreeMap._byteify(datum.value) + ']'
+ var text;
+ if (datum.k === 'b') {
+ if (datum === thisTreeMap._currentRoot) {
+ text = thisTreeMap.pathFor(datum) + ': ' +
+ D3SymbolTreeMap._getSymbolDescription(datum.t)
+ } else {
+ text = D3SymbolTreeMap._getSymbolDescription(datum.t);
+ }
+ } else if (datum === thisTreeMap._currentRoot) {
+ // The top-most level should always show the complete path
+ text = thisTreeMap.pathFor(datum);
+ } else {
+ // Anything that isn't a bucket or a leaf (symbol) or the
+ // current root should just show its name.
+ text = datum.n;
+ }
+ return text + sizeish;
+ });
+ var exit = this._mapContainer.selectAll('div.inode')
+ .data(inodes, function(datum) { return 'inode-' + datum.id; })
+ .exit();
+ exit.selectAll('div.inode_rect').transition().duration(
+ thisTreeMap._exitDuration).style('opacity', 0);
+ exit.selectAll('div.inode_label').transition().duration(
+ thisTreeMap._exitDuration).style('opacity', 0);
+ exit.transition().delay(thisTreeMap._exitDuration + 1).remove();
+
+ console.log(inodes.length + ' inodes layed out.');
+ console.timeEnd('_handleInodes');
+}
+
+D3SymbolTreeMap.prototype._handleLeaves = function() {
+ console.time('_handleLeaves');
+ var color_fn = d3.scale.category10();
+ var thisTreeMap = this;
+ var leaves = this._currentNodes.filter(function(datum){
+ return (datum.depth <= thisTreeMap._currentMaxDepth) &&
+ datum.children === undefined; });
+ var cellsEnter = this._mapContainer.selectAll('div.leaf')
+ .data(leaves, function(datum) { return datum.id; })
+ .enter()
+ .append('div').attr('class', 'leaf').attr('id', function(datum){
+ return 'node-' + datum.id;
+ });
+
+ // Define enter/update/exit for leaves
+ cellsEnter
+ .append('div')
+ .attr('class', 'rect leaf_rect_entering')
+ .style('z-index', function(datum) { return datum.id * 2; })
+ .style('position', 'absolute')
+ .style('left', function(datum){ return datum.x; })
+ .style('top', function(datum){ return datum.y; })
+ .style('width', function(datum){ return datum.dx; })
+ .style('height', function(datum){ return datum.dy; })
+ .style('opacity', '0')
+ .style('background-color', function(datum) {
+ if (datum.t === undefined) return 'rgb(220,220,220)';
+ return D3SymbolTreeMap.getColorForType(datum.t)
+ .darker(0.3).toString();
+ })
+ .style('border', '1px solid black')
+ .on('mouseover', function(datum){
+ thisTreeMap._highlightElement.call(
+ thisTreeMap, datum, d3.select(this));
+ thisTreeMap._showInfoBox.call(thisTreeMap, datum);
+ })
+ .on('mouseout', function(datum){
+ thisTreeMap._unhighlightElement.call(
+ thisTreeMap, datum, d3.select(this));
+ thisTreeMap._hideInfoBox.call(thisTreeMap, datum);
+ })
+ .on('mousemove', function(){ thisTreeMap._moveInfoBox.call(
+ thisTreeMap, event);
+ });
+ cellsEnter
+ .append('div')
+ .attr('class', 'label leaf_label_entering')
+ .style('z-index', function(datum) { return (datum.id * 2) + 1; })
+ .style('position', 'absolute')
+ .style('left', function(datum){ return datum.x; })
+ .style('top', function(datum){ return datum.y; })
+ .style('width', function(datum) { return datum.dx; })
+ .style('height', function(datum) { return datum.dy; })
+ .style('opacity', '0')
+ .style('pointer-events', 'none')
+ .style('-webkit-user-select', 'none')
+ .style('overflow', 'hidden') // required for ellipsis
+ .style('white-space', 'nowrap') // required for ellipsis
+ .style('text-overflow', 'ellipsis')
+ .style('text-align', 'center')
+ .style('vertical-align', 'middle')
+ .style('visibility', function(datum) {
+ return (datum.dx < 15 || datum.dy < 15) ? 'hidden' : 'visible';
+ })
+ .text(function(datum) { return datum.n; });
+
+ // Complicated transition logic: See note in _handleInodes()
+ this._mapContainer.selectAll('div.leaf_rect_entering').transition()
+ .duration(thisTreeMap._enterDuration).delay(
+ this._firstTransition ? 0 : thisTreeMap._exitDuration +
+ thisTreeMap._updateDuration)
+ .attr('class', 'rect leaf_rect')
+ .style('opacity', '1')
+ this._mapContainer.selectAll('div.leaf_label_entering').transition()
+ .duration(thisTreeMap._enterDuration).delay(
+ this._firstTransition ? 0 : thisTreeMap._exitDuration +
+ thisTreeMap._updateDuration)
+ .attr('class', 'label leaf_label')
+ .style('opacity', '1')
+ this._mapContainer.selectAll('div.leaf_rect').transition()
+ .duration(thisTreeMap._updateDuration).delay(thisTreeMap._exitDuration)
+ .style('opacity', '1')
+ .style('left', function(datum){ return datum.x; })
+ .style('top', function(datum){ return datum.y; })
+ .style('width', function(datum){ return datum.dx; })
+ .style('height', function(datum){ return datum.dy; });
+ this._mapContainer.selectAll('div.leaf_label').transition()
+ .duration(thisTreeMap._updateDuration).delay(thisTreeMap._exitDuration)
+ .style('opacity', '1')
+ .style('visibility', function(datum) {
+ return (datum.dx < 15 || datum.dy < 15) ? 'hidden' : 'visible';
+ })
+ .style('left', function(datum){ return datum.x; })
+ .style('top', function(datum){ return datum.y; })
+ .style('width', function(datum) { return datum.dx; })
+ .style('height', function(datum) { return datum.dy; });
+ var exit = this._mapContainer.selectAll('div.leaf')
+ .data(leaves, function(datum) { return 'leaf-' + datum.id; })
+ .exit();
+ exit.selectAll('div.leaf_rect').transition()
+ .duration(thisTreeMap._exitDuration)
+ .style('opacity', 0);
+ exit.selectAll('div.leaf_label').transition()
+ .duration(thisTreeMap._exitDuration)
+ .style('opacity', 0);
+ exit.transition().delay(thisTreeMap._exitDuration + 1).remove();
+
+ console.log(leaves.length + ' leaves layed out.');
+ console.timeEnd('_handleLeaves');
+}
+
+D3SymbolTreeMap.prototype._makeSymbolBucketBackgroundImage = function(datum) {
+ if (!(datum.t === undefined && datum.depth == this._currentMaxDepth)) {
+ return 'none';
+ }
+ var text = '';
+ var lastStop = 0;
+ for (var x = 0; x < D3SymbolTreeMap._NM_SYMBOL_TYPES.length; x++) {
+ symbol_type = D3SymbolTreeMap._NM_SYMBOL_TYPES.charAt(x);
+ var stats = datum.symbol_stats[symbol_type];
+ if (stats !== undefined) {
+ if (text.length !== 0) {
+ text += ', ';
+ }
+ var percent = 100 * (stats.size / datum.value);
+ var nowStop = lastStop + percent;
+ var tempcolor = D3SymbolTreeMap.getColorForType(symbol_type);
+ var color = d3.rgb(tempcolor).toString();
+ text += color + ' ' + lastStop + '%, ' + color + ' ' +
+ nowStop + '%';
+ lastStop = nowStop;
+ }
+ }
+ return 'linear-gradient(' + (datum.dx > datum.dy ? 'to right' :
+ 'to bottom') + ', ' + text + ')';
+}
+
+D3SymbolTreeMap.prototype.pathFor = function(datum) {
+ if (datum.__path) return datum.__path;
+ parts=[];
+ node = datum;
+ while (node) {
+ if (node.k === 'p') { // path node
+ if(node.n !== '/') parts.unshift(node.n);
+ }
+ node = node.parent;
+ }
+ datum.__path = '/' + parts.join('/');
+ return datum.__path;
+}
+
+D3SymbolTreeMap.prototype._createHighlight = function(datum, selection) {
+ var x = parseInt(selection.style('left'));
+ var y = parseInt(selection.style('top'));
+ var w = parseInt(selection.style('width'));
+ var h = parseInt(selection.style('height'));
+ datum.highlight = this._mapContainer.append('div')
+ .attr('id', 'h-' + datum.id)
+ .attr('class', 'highlight')
+ .style('pointer-events', 'none')
+ .style('-webkit-user-select', 'none')
+ .style('z-index', '999999')
+ .style('position', 'absolute')
+ .style('top', y-2)
+ .style('left', x-2)
+ .style('width', w+4)
+ .style('height', h+4)
+ .style('margin', 0)
+ .style('padding', 0)
+ .style('border', '4px outset rgba(250,40,200,0.9)')
+ .style('box-sizing', 'border-box')
+ .style('opacity', 0.0);
+}
+
+D3SymbolTreeMap.prototype._showHighlight = function(datum, selection) {
+ if (datum === this._currentRoot) return;
+ if (datum.highlight === undefined) {
+ this._createHighlight(datum, selection);
+ }
+ datum.highlight.transition().duration(200).style('opacity', 1.0);
+}
+
+D3SymbolTreeMap.prototype._hideHighlight = function(datum, selection) {
+ if (datum.highlight === undefined) return;
+ datum.highlight.transition().duration(750)
+ .style('opacity', 0)
+ .each('end', function(){
+ if (datum.highlight) datum.highlight.remove();
+ delete datum.highlight;
+ });
+}
+
+D3SymbolTreeMap.prototype._createInfoBox = function() {
+ return d3.select('body')
+ .append('div')
+ .attr('id', 'infobox')
+ .style('z-index', '2147483647') // (2^31) - 1: Hopefully safe :)
+ .style('position', 'absolute')
+ .style('visibility', 'hidden')
+ .style('background-color', 'rgba(255,255,255, 0.9)')
+ .style('border', '1px solid black')
+ .style('padding', '10px')
+ .style('-webkit-user-select', 'none')
+ .style('box-shadow', '3px 3px rgba(70,70,70,0.5)')
+ .style('border-radius', '10px')
+ .style('white-space', 'nowrap');
+}
+
+D3SymbolTreeMap.prototype._showInfoBox = function(datum) {
+ this.infobox.text('');
+ var numSymbols = 0;
+ var sizeish = D3SymbolTreeMap._pretty(datum.value) + ' bytes (' +
+ D3SymbolTreeMap._byteify(datum.value) + ')';
+ if (datum.k === 'p' || datum.k === 'b') { // path or bucket
+ if (datum.symbol_stats) { // can be empty if filters are applied
+ for (var x = 0; x < D3SymbolTreeMap._NM_SYMBOL_TYPES.length; x++) {
+ symbol_type = D3SymbolTreeMap._NM_SYMBOL_TYPES.charAt(x);
+ var stats = datum.symbol_stats[symbol_type];
+ if (stats !== undefined) numSymbols += stats.count;
+ }
+ }
+ } else if (datum.k === 's') { // symbol
+ numSymbols = 1;
+ }
+
+ if (datum.k === 'p' && !datum.lastPathElement) {
+ this.infobox.append('div').text('Directory: ' + this.pathFor(datum))
+ this.infobox.append('div').text('Size: ' + sizeish);
+ } else {
+ if (datum.k === 'p') { // path
+ this.infobox.append('div').text('File: ' + this.pathFor(datum))
+ this.infobox.append('div').text('Size: ' + sizeish);
+ } else if (datum.k === 'b') { // bucket
+ this.infobox.append('div').text('Symbol Bucket: ' +
+ D3SymbolTreeMap._getSymbolDescription(datum.t));
+ this.infobox.append('div').text('Count: ' + numSymbols);
+ this.infobox.append('div').text('Size: ' + sizeish);
+ this.infobox.append('div').text('Location: ' + this.pathFor(datum))
+ } else if (datum.k === 's') { // symbol
+ this.infobox.append('div').text('Symbol: ' + datum.n);
+ this.infobox.append('div').text('Type: ' +
+ D3SymbolTreeMap._getSymbolDescription(datum.t));
+ this.infobox.append('div').text('Size: ' + sizeish);
+ this.infobox.append('div').text('Location: ' + this.pathFor(datum))
+ }
+ }
+ if (datum.k === 'p') {
+ this.infobox.append('div')
+ .text('Number of symbols: ' + D3SymbolTreeMap._pretty(numSymbols));
+ if (datum.symbol_stats) { // can be empty if filters are applied
+ var table = this.infobox.append('table')
+ .attr('border', 1).append('tbody');
+ var header = table.append('tr');
+ header.append('th').text('Type');
+ header.append('th').text('Count');
+ header.append('th')
+ .style('white-space', 'nowrap')
+ .text('Total Size (Bytes)');
+ for (var x = 0; x < D3SymbolTreeMap._NM_SYMBOL_TYPES.length; x++) {
+ symbol_type = D3SymbolTreeMap._NM_SYMBOL_TYPES.charAt(x);
+ var stats = datum.symbol_stats[symbol_type];
+ if (stats !== undefined) {
+ var tr = table.append('tr');
+ tr.append('td')
+ .style('white-space', 'nowrap')
+ .text(D3SymbolTreeMap._getSymbolDescription(
+ symbol_type));
+ tr.append('td').text(D3SymbolTreeMap._pretty(stats.count));
+ tr.append('td').text(D3SymbolTreeMap._pretty(stats.size));
+ }
+ }
+ }
+ }
+ this.infobox.style('visibility', 'visible');
+}
+
+D3SymbolTreeMap.prototype._hideInfoBox = function(datum) {
+ this.infobox.style('visibility', 'hidden');
+}
+
+D3SymbolTreeMap.prototype._moveInfoBox = function(event) {
+ var element = document.getElementById('infobox');
+ var w = element.offsetWidth;
+ var h = element.offsetHeight;
+ var offsetLeft = 10;
+ var offsetTop = 10;
+
+ var rightLimit = window.innerWidth;
+ var rightEdge = event.pageX + offsetLeft + w;
+ if (rightEdge > rightLimit) {
+ // Too close to screen edge, reflect around the cursor
+ offsetLeft = -1 * (w + offsetLeft);
+ }
+
+ var bottomLimit = window.innerHeight;
+ var bottomEdge = event.pageY + offsetTop + h;
+ if (bottomEdge > bottomLimit) {
+ // Too close ot screen edge, reflect around the cursor
+ offsetTop = -1 * (h + offsetTop);
+ }
+
+ this.infobox.style('top', (event.pageY + offsetTop) + 'px')
+ .style('left', (event.pageX + offsetLeft) + 'px');
+}
+
+D3SymbolTreeMap.prototype.biggestSymbols = function(maxRecords) {
+ var result = undefined;
+ var smallest = undefined;
+ var sortFunction = function(a,b) {
+ var result = b.value - a.value;
+ if (result !== 0) return result; // sort by size
+ var pathA = treemap.pathFor(a); // sort by path
+ var pathB = treemap.pathFor(b);
+ if (pathA > pathB) return 1;
+ if (pathB > pathA) return -1;
+ return a.n - b.n; // sort by symbol name
+ };
+ this.visitFromDisplayedRoot(function(datum) {
+ if (datum.children) return; // ignore non-leaves
+ if (!result) { // first element
+ result = [datum];
+ smallest = datum.value;
+ return;
+ }
+ if (result.length < maxRecords) { // filling the array
+ result.push(datum);
+ return;
+ }
+ if (datum.value > smallest) { // array is already full
+ result.push(datum);
+ result.sort(sortFunction);
+ result.pop(); // get rid of smallest element
+ smallest = result[maxRecords - 1].value; // new threshold for entry
+ }
+ });
+ result.sort(sortFunction);
+ return result;
+}
+
+D3SymbolTreeMap.prototype.biggestPaths = function(maxRecords) {
+ var result = undefined;
+ var smallest = undefined;
+ var sortFunction = function(a,b) {
+ var result = b.value - a.value;
+ if (result !== 0) return result; // sort by size
+ var pathA = treemap.pathFor(a); // sort by path
+ var pathB = treemap.pathFor(b);
+ if (pathA > pathB) return 1;
+ if (pathB > pathA) return -1;
+ console.log('warning, multiple entries for the same path: ' + pathA);
+ return 0; // should be impossible
+ };
+ this.visitFromDisplayedRoot(function(datum) {
+ if (!datum.lastPathElement) return; // ignore non-files
+ if (!result) { // first element
+ result = [datum];
+ smallest = datum.value;
+ return;
+ }
+ if (result.length < maxRecords) { // filling the array
+ result.push(datum);
+ return;
+ }
+ if (datum.value > smallest) { // array is already full
+ result.push(datum);
+ result.sort(sortFunction);
+ result.pop(); // get rid of smallest element
+ smallest = result[maxRecords - 1].value; // new threshold for entry
+ }
+ });
+ result.sort(sortFunction);
+ return result;
+}
diff --git a/chromium/tools/binary_size/template/index.html b/chromium/tools/binary_size/template/index.html
new file mode 100644
index 00000000000..7e1a1fca491
--- /dev/null
+++ b/chromium/tools/binary_size/template/index.html
@@ -0,0 +1,525 @@
+<!--
+ Copyright 2014 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+<html>
+<head>
+<title>Binary Size Analysis</title>
+<script src="d3/d3.js" charset="utf-8"></script>
+<script src="D3SymbolTreeMap.js" charset="utf-8"></script>
+<script src="data.js" charset="utf-8"></script>
+<style>
+body {
+ margin: 0px;
+ padding: 5px;
+}
+.swatch {
+ border: 1px solid rgb(100,100,100);
+ -webkit-user-select: none;
+ cursor: default;
+}
+</style>
+<script>
+var treemap;
+var filterChanging = false;
+var savedSettings = {};
+
+function init() {
+ if (window.metadata !== undefined && window.metadata.subtitle) {
+ document.getElementById('subtitle').innerHTML = ': ' + escape(metadata.subtitle);
+ }
+ initFilterOptions();
+ treemap = new D3SymbolTreeMap(
+ savedSettings.width,
+ savedSettings.height,
+ savedSettings.maxLevels);
+ treemap.init();
+}
+
+function getIdealSizes() {
+ var width = window.innerWidth - 20;
+ var height = window.innerHeight - 70;
+ return {'width': width, 'height': height};
+}
+
+function showReport(title, data, headers, dataFunction, styleFunction) {
+ var div = d3.select('body').append('div')
+ .style('margin', '0')
+ .style('padding', '5px')
+ .style('position', 'absolute')
+ .style('top', '10%')
+ .style('left', '10%')
+ .style('background-color', 'rgba(255,255,255,0.9)')
+ .style('width', '80%')
+ .style('height', '80%')
+ .style('z-index', '2147483647')
+ .style('border', '3px ridge grey')
+ .style('box-shadow', '10px 10px 5px rgba(80,80,80,0.7)')
+ .style('text-align', 'center')
+ .style('border-radius', '10px');
+ var titlebar = div.append('div')
+ .style('margin', '0')
+ .style('padding', '5px')
+ .style('position', 'absolute')
+ .style('top', '0%')
+ .style('left', '0%')
+ .style('width', '100%')
+ .style('height', '10%')
+ .style('font-size', 'x-large');
+ titlebar.text(title);
+ var controls = div.append('div')
+ .style('margin', '0')
+ .style('padding', '5px')
+ .style('position', 'absolute')
+ .style('top', '90%')
+ .style('left', '0%')
+ .style('width', '100%')
+ .style('height', '10%');
+ controls.append('input').attr('type', 'button')
+ .attr('value', 'Dismiss')
+ .on('click', function(){div.remove();});
+
+ var tableDiv = div.append('div')
+ .style('overflow', 'auto')
+ .style('position', 'absolute')
+ .style('top', '10%')
+ .style('left', '0%')
+ .style('width', '100%')
+ .style('height', '80%')
+ .style('border-top', '1px solid rgb(230,230,230)')
+ .style('border-bottom', '1px solid rgb(230,230,230)');
+ var table = tableDiv.append('table')
+ .attr('border', '1')
+ .attr('cellspacing', '0')
+ .attr('cellpadding', '2')
+ .style('margin-left', 'auto')
+ .style('margin-right', 'auto');
+ var header = table.append('tr');
+ for (var i = 0; i < headers.length; i++) {
+ header.append('th').text(headers[i]);
+ }
+
+ for (var i = 0; i < data.length; i++) {
+ var row = table.append('tr');
+ for (j = 0; j < headers.length; j++) {
+ var td = row.append('td');
+ if (styleFunction) {
+ styleFunction.call(this, td, j);
+ }
+ dataFunction.call(this, data[i], j, td);
+ }
+ }
+}
+
+function bigSymbolsReport() {
+ var list = treemap.biggestSymbols(100);
+ var headers = ['Rank', 'Size (Bytes)', 'Type', 'Location'];
+ var styleFunction = function(selection, index) {
+ if (index === 3) {
+ selection.style('font-family', 'monospace');
+ }
+ };
+ var recordIndex = 1;
+ var dataFunction = function(record, index, cell) {
+ if (index === 0) {
+ cell.text(recordIndex++);
+ } else if (index === 1) {
+ cell.text(D3SymbolTreeMap._pretty(record.value));
+ } else if (index === 2) {
+ cell.text(record.t);
+ } else {
+ if (treemap.pathFor(record).indexOf('/out') == 0) {
+ cell.append('span').text(treemap.pathFor(record));
+ cell.append('br');
+ cell.append('span').text('Symbol: ');
+ cell.append('span').text(record.n);
+ } else {
+ var href = 'https://code.google.com/p/chromium/codesearch#chromium/src'
+ + treemap.pathFor(record)
+ + '&q='
+ + record.n;
+ cell.append('a')
+ .attr('href', href)
+ .attr('target', '_blank')
+ .text(treemap.pathFor(record));
+ cell.append('br');
+ cell.append('span').text('Symbol: ');
+ cell.append('span').text(record.n);
+ }
+ }
+ };
+ showReport('100 Largest Symbols', list, headers, dataFunction, styleFunction);
+}
+
+function bigPathsReport() {
+ var list = treemap.biggestPaths(100);
+ var headers = ['Rank', 'Size (Bytes)', 'Location'];
+ var styleFunction = function(selection, index) {
+ if (index === 2) {
+ selection.style('font-family', 'monospace');
+ }
+ };
+ var recordIndex = 1;
+ var dataFunction = function(record, index, cell) {
+ if (index === 0) {
+ cell.text(recordIndex++);
+ } else if (index === 1) {
+ cell.text(D3SymbolTreeMap._pretty(record.value));
+ } else if (index === 2) {
+ if (treemap.pathFor(record).indexOf('/out') == 0) {
+ cell.text(treemap.pathFor(record));
+ } else {
+ var href = 'https://code.google.com/p/chromium/codesearch#chromium/src' + treemap.pathFor(record);
+ cell.append('a')
+ .attr('href', href)
+ .attr('target', '_blank')
+ .text(treemap.pathFor(record));
+ }
+
+ }
+ };
+ showReport('100 Largest Paths', list, headers, dataFunction, styleFunction);
+}
+
+function symbolFilterTextChanged() {
+ if (filterChanging) return true;
+ filterChanging = true;
+ var enabled = document.getElementById('symbol_types_filter').value;
+ for (var x=0; x<=25; x++) {
+ var checkBox = document.getElementById('check_' + x);
+ checkBox.checked = (enabled.indexOf(checkBox.value) != -1);
+ }
+ filterChanging = false;
+}
+
+function updateFilterText() {
+ if (filterChanging) return true;
+ filterChanging = true;
+ var text = '';
+ for (var x=0; x<=25; x++) {
+ var checkBox = document.getElementById('check_' + x);
+ if (checkBox.checked) {
+ text += checkBox.value;
+ }
+ }
+ document.getElementById('symbol_types_filter').value=text;
+ filterChanging = false;
+}
+
+function initFilterOptions() {
+ updateFilterText();
+ for (var x=0; x<=25; x++) {
+ var checkBox = document.getElementById('check_' + x);
+ checkBox.onchange=updateFilterText;
+ var swatch = document.getElementById('swatch_' + x);
+ swatch.style.backgroundColor = D3SymbolTreeMap.getColorForType(checkBox.value).toString();
+ }
+ var gteCheckbox = document.getElementById('check_gte');
+ gteCheckbox.onchange = function() {
+ document.getElementById('symbol_filter_gte').disabled = !gteCheckbox.checked;
+ }
+ var regexCheckbox = document.getElementById('check_regex');
+ regexCheckbox.onchange = function() {
+ document.getElementById('symbol_filter_regex').disabled = !regexCheckbox.checked;
+ }
+ var excludeRegexCheckbox = document.getElementById('check_exclude_regex');
+ excludeRegexCheckbox.onchange = function() {
+ document.getElementById('symbol_filter_exclude_regex').disabled = !excludeRegexCheckbox.checked;
+ }
+ var idealSizes = getIdealSizes();
+ document.getElementById('width').value = idealSizes.width;
+ document.getElementById('height').value = idealSizes.height;
+ saveFilterSettings();
+}
+
+function filterSetAll(enabled) {
+ for (var x=0; x<=25; x++) {
+ var checkBox = document.getElementById('check_' + x);
+ checkBox.checked = enabled;
+ }
+ updateFilterText();
+}
+
+function showOptions() {
+ loadFilterSettings();
+ var container = document.getElementById('options_container');
+ var w = container.offsetWidth;
+ var h = container.offsetHeight;
+ container.style.margin = '-' + (h/2) + 'px 0 0 -' + (w/2) + 'px';
+ container.style.visibility = 'visible';
+}
+
+function hideOptions() {
+ var container = document.getElementById('options_container');
+ container.style.visibility = 'hidden';
+}
+
+function applySettings() {
+ hideOptions();
+ var oldWidth = savedSettings.width;
+ var oldHeight = savedSettings.height;
+ var oldSymbols = savedSettings.symbolTypes;
+ var oldRegex = savedSettings.regex;
+ var oldExcludeRegex = savedSettings.excludeRegex;
+ var oldGte = savedSettings.gte;
+ var oldMaxLevels = savedSettings.maxLevels;
+ saveFilterSettings();
+ var resizeNeeded = oldWidth !== savedSettings.width || oldHeight !== savedSettings.height;
+ var regexChanged = oldRegex !== savedSettings.regex;
+ var excludeRegexChanged = oldExcludeRegex !== savedSettings.excludeRegex;
+ var symbolsChanged = oldSymbols !== savedSettings.symbolTypes;
+ var gteChanged = oldGte !== savedSettings.gte;
+ var filterChanged = regexChanged || excludeRegexChanged || symbolsChanged || gteChanged;
+ var maxLevelsChanged = oldMaxLevels !== savedSettings.maxLevels;
+
+ if (filterChanged) {
+ // Type filters
+ typeFilter = function(datum) {
+ if (datum.depth === 0) return true; // root node
+ if (datum.t === undefined) return true;
+ return savedSettings.symbolTypes !== undefined &&
+ savedSettings.symbolTypes.indexOf(datum.t) !== -1;
+ }
+
+ // Regex filter
+ var regexFilter = undefined;
+ if (savedSettings.regex !== undefined && savedSettings.regex.length > 0) {
+ console.log('filter: regex is "' + savedSettings.regex + '"');
+ var regex = new RegExp(savedSettings.regex);
+ regexFilter = function(datum) {
+ if (datum.depth === 0) return true; // root node
+ var fullName = this.pathFor(datum);
+ if (datum.children === undefined) { // it is a leaf node (symbol)
+ fullName += ':' + datum.n;
+ }
+ return regex.test(fullName);
+ }
+ }
+
+ // Exclude regex filter
+ var excludeRegexFilter = undefined;
+ if (savedSettings.excludeRegex !== undefined && savedSettings.excludeRegex.length > 0) {
+ console.log('filter: exclude-regex is "' + savedSettings.excludeRegex + '"');
+ var excludeRegex = new RegExp(savedSettings.excludeRegex);
+ excludeRegexFilter = function(datum) {
+ if (datum.depth === 0) return true; // root node
+ var fullName = this.pathFor(datum);
+ if (datum.children === undefined) { // it is a leaf node (symbol)
+ fullName += ':' + datum.n;
+ }
+ return !excludeRegex.test(fullName);
+ }
+ }
+
+ // Size filter
+ var sizeFilter = undefined;
+ if (savedSettings.gte !== undefined) {
+ console.log('filter: minimum size is ' + savedSettings.gte + ' bytes');
+ sizeFilter = function(datum) {
+ if (datum.children !== undefined) return true; // non-leaf
+ if (datum.value === undefined) console.log('whoops');
+ return datum.value >= savedSettings.gte;
+ }
+ }
+
+ // Make a filter to apply to the tree
+ var filter = function(datum) {
+ if (typeFilter && !typeFilter.call(this, datum)) return false;
+ if (regexFilter && !regexFilter.call(this, datum)) return false;
+ if (excludeRegexFilter && !excludeRegexFilter.call(this, datum)) return false;
+ if (sizeFilter && !sizeFilter.call(this, datum)) return false;
+ return true;
+ };
+ treemap.filter(filter);
+ }
+
+ // Adjust levels if needed.
+ if (maxLevelsChanged) {
+ treemap.setMaxLevels(savedSettings.maxLevels);
+ }
+
+ // Resize map if necessary.
+ if (resizeNeeded) {
+ console.log('desired treemap dimensions have changed, requesting resize');
+ treemap.resize(savedSettings.width, savedSettings.height);
+ }
+}
+
+function cancelSettings() {
+ hideOptions();
+ loadFilterSettings();
+}
+
+function saveFilterSettings() {
+ savedSettings.symbolTypes = document.getElementById('symbol_types_filter').value;
+ if (document.getElementById('check_regex').checked) {
+ savedSettings.regex = document.getElementById('symbol_filter_regex').value;
+ } else {
+ savedSettings.regex = undefined;
+ }
+ if (document.getElementById('check_exclude_regex').checked) {
+ savedSettings.excludeRegex = document.getElementById('symbol_filter_exclude_regex').value;
+ } else {
+ savedSettings.excludeRegex = undefined;
+ }
+ if (document.getElementById('check_gte').checked) {
+ savedSettings.gte = parseInt(document.getElementById('symbol_filter_gte').value);
+ } else {
+ savedSettings.gte = undefined;
+ }
+ savedSettings.width = parseInt(document.getElementById('width').value);
+ savedSettings.height = parseInt(document.getElementById('height').value);
+ savedSettings.maxLevels = parseInt(document.getElementById('max_levels').value);
+}
+
+function loadFilterSettings() {
+ document.getElementById('symbol_types_filter').value = savedSettings.symbolTypes;
+ symbolFilterTextChanged();
+ if (savedSettings.regex !== undefined) {
+ document.getElementById('check_regex').checked = true;
+ document.getElementById('symbol_filter_regex').value = savedSettings.regex;
+ } else {
+ document.getElementById('check_regex').checked = false;
+ }
+ if (savedSettings.excludeRegex !== undefined) {
+ document.getElementById('check_exclude_regex').checked = true;
+ document.getElementById('symbol_filter_exclude_regex').value = savedSettings.excludeRegex;
+ } else {
+ document.getElementById('check_exclude_regex').checked = false;
+ }
+ if (savedSettings.gte !== undefined) {
+ document.getElementById('check_gte').checked = true;
+ document.getElementById('symbol_filter_gte').value = savedSettings.gte;
+ } else {
+ document.getElementById('check_gte').checked = false;
+ }
+ document.getElementById('width').value = savedSettings.width;
+ document.getElementById('height').value = savedSettings.height;
+ document.getElementById('max_levels').value = savedSettings.maxLevels;
+}
+
+function escape(str) {
+ return str.replace(/&/g, '&amp;')
+ .replace(/"/g, '&quot;')
+ .replace(/</g, '&lt;')
+ .replace(/>/g, '&gt;');
+}
+</script>
+</head>
+<body onload='init()'>
+<div style='position: absolute; top: 5px; left: 5px;'>
+ <input type='button' onclick='showOptions()' value='Options &amp; Legend...'>
+ <span style='-webkit-user-select: none; cursor: help;' title='Click to view the symbol legend or to configure filters and options for the treemap'>[?]</span>
+</div>
+<div style='position: absolute; right: 5px; top: 5px; white-space: nowrap;'>
+ Reports:
+ <input type='button' onclick='bigSymbolsReport()' value='Large Symbols' title='Click to view a report of the largest 100 symbols that are with the bounds of the treemap that is currently displayed.'>
+ <input type='button' onclick='bigPathsReport()' value='Large Files' title='Click to view a report of the largest 100 source files that are with the bounds of the treemap that is currently displayed.'>
+</div>
+<div style='text-align: center; margin-bottom: 5px;'>
+ <span style='font-size: x-large; font-weight: bold; font-variant: small-caps'>Binary Size Analysis<span id='subtitle'></span></span>
+ <br><span style='font-size: small; font-style: italic;'>Double-click a box to zoom in, double-click outermost title to zoom out.</span>
+</div>
+<table id='options_container' style='visibility: hidden; border: 3px ridge grey; padding: 0px; top: 50%; left: 50%; position: fixed; z-index: 2147483646; overflow: auto; background-color: rgba(255,255,255,0.9); border-radius: 10px; box-shadow: 10px 10px 5px rgba(80,80,80,0.7);'><tr><td style='vertical-align: top'>
+ <table cellspacing=0 cellborder=0 style='width:100%'>
+ <tr><th colspan=3 style='padding-bottom: .25em; text-decoration: underline;'>Symbol Types To Show</th></tr>
+ <tr>
+ <td style='width: 33%; white-space: nowrap; vertical-align: top;'>
+ <span class='swatch' id='swatch_0'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_0' value='A'>Global absolute (A)
+ <br><span class='swatch' id='swatch_1'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_1' value='B'>Global uninitialized data (B)
+ <br><span class='swatch' id='swatch_2'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_2' value='b'>Local uninitialized data (b)
+ <br><span class='swatch' id='swatch_3'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_3' value='C'>Global uninitialized common (C)
+ <br><span class='swatch' id='swatch_4'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_4' value='D'>Global initialized data (D)
+ <br><span class='swatch' id='swatch_5'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_5' value='d'>Local initialized data (d)
+ <br><span class='swatch' id='swatch_6'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_6' value='G'>Global small initialized data (G)
+ <br><span class='swatch' id='swatch_7'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_7' value='g'>Local small initialized data (g)
+ <br><span class='swatch' id='swatch_8'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_8' value='i'>Indirect function (i)
+ </td>
+ <td style='width: 33%; white-space: nowrap; vertical-align: top;'>
+ <span class='swatch' id='swatch_9'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_9' value='N'>Debugging (N)
+ <br><span class='swatch' id='swatch_10'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_10' value='p'>Stack unwind (p)
+ <br><span class='swatch' id='swatch_11'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_11' value='R'>Global read-only data (R)
+ <br><span class='swatch' id='swatch_12'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_12' value='r'>Local read-only data (r)
+ <br><span class='swatch' id='swatch_13'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_13' value='S'>Global small uninitialized data (S)
+ <br><span class='swatch' id='swatch_14'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_14' value='s'>Local small uninitialized data (s)
+ <br><span class='swatch' id='swatch_15'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_15' value='T'>Global code (T)
+ <br><span class='swatch' id='swatch_16'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_16' value='t'>Local code (t)
+ <br><span class='swatch' id='swatch_17'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_17' value='U'>Undefined (U)
+ </td>
+ <td style='width: 33%; white-space: nowrap; vertical-align: top;'>
+ <span class='swatch' id='swatch_18'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_18' value='u'>Unique (u)
+ <br><span class='swatch' id='swatch_19'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_19' value='V'>Global weak object (V)
+ <br><span class='swatch' id='swatch_20'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_20' value='v'>Local weak object (v)
+ <br><span class='swatch' id='swatch_21'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_21' value='W'>Global weak symbol (W)
+ <br><span class='swatch' id='swatch_22'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_22' value='w'>Local weak symbol (w)
+ <br><span class='swatch' id='swatch_23'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_23' value='@'>Vtable entry (@)
+ <br><span class='swatch' id='swatch_24'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_24' value='-'>STABS debugging (-)
+ <br><span class='swatch' id='swatch_25'>&nbsp;&nbsp;&nbsp;</span><input checked type='checkbox' id='check_25' value='?'>Unrecognized (?)
+ </td>
+ </tr>
+ <tr><td colspan=3 style='text-align: center; white-space: nowrap; padding-top: 1em;'>
+ Select <input type='button' onclick='filterSetAll(true)' value='All'>,
+ <input type='button' onclick='filterSetAll(false)' value='None'>,
+ or type a string: <input id='symbol_types_filter' size=30 value='' onkeyup='symbolFilterTextChanged()' onblur='updateFilterText()'>
+ <span style='-webkit-user-select: none; cursor: help;' title='Enter codes from the list above for the symbols you want to see. The checkboxes will update automatically to match the string that you enter.'>[?]</span>
+ </td></tr>
+ </table>
+</td></tr><tr><td style='vertical-align: top; padding-top: 10px; border-top: 1px solid grey;'>
+ <table cellspacing=0 cellborder=0 style='width: 100%'>
+ <tr><th colspan=2 style='padding-bottom: .25em; text-decoration: underline;'>Advanced Options</th></tr>
+ <tr>
+ <td style='white-space: nowrap; vertical-align: top;'>
+ <input type='checkbox' id='check_regex'>
+ Only include symbols matching this regex:
+ </td>
+ <td style='text-align: right; vertical-align: top;'>
+ <input disabled id='symbol_filter_regex' size=30 value='' style='text-align: right;'>
+ <span style='-webkit-user-select: none; cursor: help;' title='Enter a javascript regex. Only symbols that match this regex will be shown. This filter applies before any exclusion regex specified below. The format of each symbol is [path]:[symbol_name]'>[?]</span>
+ </td>
+ </tr>
+ <tr>
+ <td style='white-space: nowrap; vertical-align: top;'>
+ <input type='checkbox' id='check_exclude_regex'>
+ Exclude all symbols matching this regex:
+ </td>
+ <td style='text-align: right; vertical-align: top;'>
+ <input disabled id='symbol_filter_exclude_regex' size=30 value='' style='text-align: right;'>
+ <span style='-webkit-user-select: none; cursor: help;' title='Enter a javascript regex. Symbols that match this tegex will not be shown. This filter applies after any inclusion filter specified above. The format of each symbol is [path]:[symbol_name]'>[?]</span>
+ </td>
+ </tr>
+ <tr>
+ <td style='white-space: nowrap; vertical-align: top;'>
+ <input type='checkbox' id='check_gte'>
+ Only include symbols that are at least <span style='font-style: italic;'>n</span> bytes:
+ </td>
+ <td style='text-align: right; vertical-align: top;'>
+ <input disabled id='symbol_filter_gte' size=8 value='' style='text-align: right;'>
+ <span style='-webkit-user-select: none; cursor: help;' title='Symbols whose size is less than this value will be hidden.'>[?]</span>
+ </td>
+ </tr>
+ <tr>
+ <td style='white-space: nowrap vertical-align: top;;'>
+ Show at most <span style='font-style: italic;'>n</span> levels of detail at a time:
+ </td>
+ <td style='text-align: right; vertical-align: top;'>
+ <input id='max_levels' size=4 value='2' style='text-align: right;'><span style='-webkit-user-select: none; cursor: help;' title='Increasing this value shows more detail without the need to zoom, but uses more computing power.'>[?]</span>
+ </td>
+ </tr>
+ <tr>
+ <td style='white-space: nowrap vertical-align: top;;'>
+ Set the size of the treemap to <span style='font-style: italic;'>W x H</span> pixels:
+ </td>
+ <td style='text-align: right; vertical-align: top;'>
+ <input id='width' size=4 value='' style='text-align: right;'>
+ &nbsp;x&nbsp;<input id='height' size=4 value='' style='text-align: right;'>
+ </td>
+ </tr>
+ </table>
+</td></tr>
+<tr><td style='padding-top: 10px; text-align: right; border-top: 1px solid grey'>
+ <input type='button' value='Apply' onclick='applySettings()'>
+ <input type='button' value='Cancel' onclick='cancelSettings()'>
+</td></tr></table>
+</body>
+</html>
diff --git a/chromium/tools/binary_size/template/test-data-generator.html b/chromium/tools/binary_size/template/test-data-generator.html
new file mode 100644
index 00000000000..9c6790a8f9e
--- /dev/null
+++ b/chromium/tools/binary_size/template/test-data-generator.html
@@ -0,0 +1,157 @@
+<!DOCTYPE html>
+<!--
+ Copyright 2014 The Chromium Authors. All rights reserved.
+ Use of this source code is governed by a BSD-style license that can be
+ found in the LICENSE file.
+-->
+<html>
+<head>
+<script>
+function rnd(max) {
+ return Math.round(Math.random()*max);
+}
+
+function gen() {
+ var dirs1=['usr1', 'etc1', 'var1'];
+ var dirs2=['aaa2', 'bbb2', 'ccc2', 'ddd2', 'eee2', 'fff2', 'ggg2', 'hhh2',
+ 'frobozz2', 'kazaam2', 'shazam2'];
+ var dirs3=['iii3', 'jjj3', 'kkk3', 'lll3', 'mmm3', 'nnn3', 'ooo3', 'ppp3',
+ 'wonderllama3', 'excelsior3', 'src3'];
+ var filenames=['awesome.cc', 'rad.h', 'tubular.cxx', 'cool.cc', 'groovy.h',
+ 'excellent.c', 'gnarly.h', 'green.C', 'articulate.cc'];
+ //All possible types (we only see a subset in practice): 'ABbCDdGgiNpRrSsTtUuVvWw-?';
+ var nm_symbol_types = 'trd';
+ var minSize = 4;
+ var maxSize = 10000;
+ var numGen = 300000;
+ var text = 'var nm_data=[\n';
+ var vtablePercent = 5;
+ for (var x=0; x<numGen; x++) {
+ var path = '/' +
+ dirs1[rnd(dirs1.length - 1)] + '/' +
+ dirs2[rnd(dirs2.length - 1)] + '/' +
+ dirs3[rnd(dirs3.length - 1)] + '/' +
+ filenames[rnd(filenames.length - 1)];
+ var isVtable = Math.floor((Math.random()*100)+1) <= vtablePercent;
+ var size = rnd(maxSize);
+ var symbol_name;
+ var type;
+ if (!isVtable) {
+ symbol_name = 'sym' + x.toString(16);
+ type = nm_symbol_types.charAt(rnd(nm_symbol_types.length - 1));
+ } else {
+ symbol_name = 'vtable for ' + x.toString(16);
+ type = '@'
+ }
+ text = text + "{'n': '" + symbol_name +
+ "', 't': '" + type +
+ "', 's': " + size +
+ ", 'p': '" + path + "'},\n";
+ }
+ text += '];';
+
+ eval(text);
+ var treeified = to_d3_tree(nm_data);
+ generateDownloadLink('tree_data=' + JSON.stringify(treeified));
+}
+
+function generateDownloadLink(content) {
+ var blob = new Blob([content], {type: 'text/plain'});
+ var link = document.createElement('a');
+ link.download = 'generated-content.txt';
+ link.href = window.URL.createObjectURL(blob);
+ link.textContent = 'Download ready, click here.';
+ link.dataset.downloadurl = ['text/plain', link.download, link.href].join(':');
+ link.onclick = function(e) {
+ if ('disabled' in this.dataset) { return false; }
+ link.dataset.disabled = true;
+ setTimeout(function() { window.URL.revokeObjectURL(link.href); }, 1500);
+ };
+ document.getElementById('linkcontainer').innerHTML = '';
+ document.getElementById('linkcontainer').appendChild(link);
+}
+
+/**
+ * This function takes in an array of nm records and converts them into a
+ * hierarchical data structure suitable for use in a d3-base treemap layout.
+ * Leaves are individual symbols. The parents of the leaves are logical
+ * groupings by common symbol-type (for BSS, read-only data, code, etc).
+ * Above this, each node represents part of a filesystem path relative
+ * to the parent node. The root node has the name '/', and represents
+ * a root (though not necessarily THE root) of a file system traversal.
+ * The root node also has a special property, 'maxDepth', to which is bound
+ * the deepest level of nesting that was found during conversion: for the
+ * record at path /a/b/c/d.foo, the maxDepth will be 6; the file 'd.foo'
+ * is at depth 4, the type-bucket is depth 5 and the symbols are depth 6.
+ */
+function to_d3_tree(records) {
+ var result = {'n': '/', 'children': [], 'k': 'p'};
+ var maxDepth = 0;
+ //{'n': 'symbol1', 't': 'b', 's': 1000, 'p': '/usr/local/foo/foo.cc'},
+ for (index in records) {
+ var record = records[index];
+ var parts = record.p.split("/");
+ var node = result;
+ var depth = 0;
+ // Walk the tree and find the file that is named by the "location"
+ // field of the record. We create any intermediate nodes required.
+ // This is directly analogous to "mkdir -p".
+ while(parts.length > 0) {
+ var part = parts.shift();
+ if (part.length == 0) continue;
+ depth++;
+ node = _mk_child(node, part, record.s);
+ node.k = 'p'; // p for path
+ }
+ node.lastPathElement = true;
+
+ // 'node' is now the file node. Find the symbol-type bucket.
+ node = _mk_child(node, record.t, record.s);
+ node.t = record.t;
+ node.k = 'b'; // b for bucket
+ depth++;
+ // 'node' is now the symbol-type bucket. Make the child entry.
+ node = _mk_child(node, record.n, record.s);
+ delete node.children;
+ node.value = record.s;
+ node.t = record.t;
+ node.k = 's'; // s for symbol
+ depth++;
+
+ maxDepth = Math.max(maxDepth, depth);
+ }
+ result.maxDepth = maxDepth;
+ return result;
+}
+
+/**
+ * Given a node and a name, return the child within node.children whose
+ * name matches the specified name. If necessary, a new child node is
+ * created and appended to node.children.
+ * If this method creates a new node, the 'name' attribute is set to the
+ * specified name and the 'children' attribute is an empty array, and
+ * total_size is the specified size. Otherwise, the existing node is
+ * returned and its total_size value is incremented by the specified size.
+ */
+function _mk_child(node, name, size) {
+ var child = undefined;
+ for (child_index in node.children) {
+ if (node.children[child_index].n == name) {
+ child = node.children[child_index];
+ }
+ }
+ if (child === undefined) {
+ child = {'n': name, 'children': []};
+ node.children.push(child);
+ }
+ return child;
+}
+</script>
+</head>
+<body style='white-space: pre; font-family: monospace;'>
+This script generates sample data for use in D3SymbolTreeMap, and can be used
+for testing.
+<input type=button onclick='gen();' value='Generate data'></input>
+<div id='linkcontainer'></div>
+</body>
+</html>
diff --git a/chromium/tools/bisect-builds.py b/chromium/tools/bisect-builds.py
new file mode 100755
index 00000000000..e2c577d3184
--- /dev/null
+++ b/chromium/tools/bisect-builds.py
@@ -0,0 +1,1309 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Snapshot Build Bisect Tool
+
+This script bisects a snapshot archive using binary search. It starts at
+a bad revision (it will try to guess HEAD) and asks for a last known-good
+revision. It will then binary search across this revision range by downloading,
+unzipping, and opening Chromium for you. After testing the specific revision,
+it will ask you whether it is good or bad before continuing the search.
+"""
+
+# The base URL for stored build archives.
+CHROMIUM_BASE_URL = ('http://commondatastorage.googleapis.com'
+ '/chromium-browser-snapshots')
+WEBKIT_BASE_URL = ('http://commondatastorage.googleapis.com'
+ '/chromium-webkit-snapshots')
+ASAN_BASE_URL = ('http://commondatastorage.googleapis.com'
+ '/chromium-browser-asan')
+
+# GS bucket name.
+GS_BUCKET_NAME = 'chrome-unsigned/desktop-W15K3Y'
+
+# Base URL for downloading official builds.
+GOOGLE_APIS_URL = 'commondatastorage.googleapis.com'
+
+# The base URL for official builds.
+OFFICIAL_BASE_URL = 'http://%s/%s' % (GOOGLE_APIS_URL, GS_BUCKET_NAME)
+
+# URL template for viewing changelogs between revisions.
+CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/src/+log/%s..%s')
+
+# URL to convert SVN revision to git hash.
+CRREV_URL = ('https://cr-rev.appspot.com/_ah/api/crrev/v1/redirect/')
+
+# URL template for viewing changelogs between official versions.
+OFFICIAL_CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/'
+ 'src/+log/%s..%s?pretty=full')
+
+# DEPS file URL.
+DEPS_FILE_OLD = ('http://src.chromium.org/viewvc/chrome/trunk/src/'
+ 'DEPS?revision=%d')
+DEPS_FILE_NEW = ('https://chromium.googlesource.com/chromium/src/+/%s/DEPS')
+
+# Blink changelogs URL.
+BLINK_CHANGELOG_URL = ('http://build.chromium.org'
+ '/f/chromium/perf/dashboard/ui/changelog_blink.html'
+ '?url=/trunk&range=%d%%3A%d')
+
+DONE_MESSAGE_GOOD_MIN = ('You are probably looking for a change made after %s ('
+ 'known good), but no later than %s (first known bad).')
+DONE_MESSAGE_GOOD_MAX = ('You are probably looking for a change made after %s ('
+ 'known bad), but no later than %s (first known good).')
+
+CHROMIUM_GITHASH_TO_SVN_URL = (
+ 'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
+
+BLINK_GITHASH_TO_SVN_URL = (
+ 'https://chromium.googlesource.com/chromium/blink/+/%s?format=json')
+
+GITHASH_TO_SVN_URL = {
+ 'chromium': CHROMIUM_GITHASH_TO_SVN_URL,
+ 'blink': BLINK_GITHASH_TO_SVN_URL,
+}
+
+# Search pattern to be matched in the JSON output from
+# CHROMIUM_GITHASH_TO_SVN_URL to get the chromium revision (svn revision).
+CHROMIUM_SEARCH_PATTERN_OLD = (
+ r'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ')
+CHROMIUM_SEARCH_PATTERN = (
+ r'Cr-Commit-Position: refs/heads/master@{#(\d+)}')
+
+# Search pattern to be matched in the json output from
+# BLINK_GITHASH_TO_SVN_URL to get the blink revision (svn revision).
+BLINK_SEARCH_PATTERN = (
+ r'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ')
+
+SEARCH_PATTERN = {
+ 'chromium': CHROMIUM_SEARCH_PATTERN,
+ 'blink': BLINK_SEARCH_PATTERN,
+}
+
+CREDENTIAL_ERROR_MESSAGE = ('You are attempting to access protected data with '
+ 'no configured credentials')
+
+###############################################################################
+
+import httplib
+import json
+import optparse
+import os
+import re
+import shlex
+import shutil
+import subprocess
+import sys
+import tempfile
+import threading
+import urllib
+from distutils.version import LooseVersion
+from xml.etree import ElementTree
+import zipfile
+
+
+class PathContext(object):
+ """A PathContext is used to carry the information used to construct URLs and
+ paths when dealing with the storage server and archives."""
+ def __init__(self, base_url, platform, good_revision, bad_revision,
+ is_official, is_asan, use_local_cache, flash_path = None):
+ super(PathContext, self).__init__()
+ # Store off the input parameters.
+ self.base_url = base_url
+ self.platform = platform # What's passed in to the '-a/--archive' option.
+ self.good_revision = good_revision
+ self.bad_revision = bad_revision
+ self.is_official = is_official
+ self.is_asan = is_asan
+ self.build_type = 'release'
+ self.flash_path = flash_path
+ # Dictionary which stores svn revision number as key and it's
+ # corresponding git hash as value. This data is populated in
+ # _FetchAndParse and used later in GetDownloadURL while downloading
+ # the build.
+ self.githash_svn_dict = {}
+ # The name of the ZIP file in a revision directory on the server.
+ self.archive_name = None
+
+ # Whether to cache and use the list of known revisions in a local file to
+ # speed up the initialization of the script at the next run.
+ self.use_local_cache = use_local_cache
+
+ # Locate the local checkout to speed up the script by using locally stored
+ # metadata.
+ abs_file_path = os.path.abspath(os.path.realpath(__file__))
+ local_src_path = os.path.join(os.path.dirname(abs_file_path), '..')
+ if abs_file_path.endswith(os.path.join('tools', 'bisect-builds.py')) and\
+ os.path.exists(os.path.join(local_src_path, '.git')):
+ self.local_src_path = os.path.normpath(local_src_path)
+ else:
+ self.local_src_path = None
+
+ # Set some internal members:
+ # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
+ # _archive_extract_dir = Uncompressed directory in the archive_name file.
+ # _binary_name = The name of the executable to run.
+ if self.platform in ('linux', 'linux64', 'linux-arm', 'chromeos'):
+ self._binary_name = 'chrome'
+ elif self.platform in ('mac', 'mac64'):
+ self.archive_name = 'chrome-mac.zip'
+ self._archive_extract_dir = 'chrome-mac'
+ elif self.platform in ('win', 'win64'):
+ self.archive_name = 'chrome-win32.zip'
+ self._archive_extract_dir = 'chrome-win32'
+ self._binary_name = 'chrome.exe'
+ else:
+ raise Exception('Invalid platform: %s' % self.platform)
+
+ if is_official:
+ if self.platform == 'linux':
+ self._listing_platform_dir = 'precise32/'
+ self.archive_name = 'chrome-precise32.zip'
+ self._archive_extract_dir = 'chrome-precise32'
+ elif self.platform == 'linux64':
+ self._listing_platform_dir = 'precise64/'
+ self.archive_name = 'chrome-precise64.zip'
+ self._archive_extract_dir = 'chrome-precise64'
+ elif self.platform == 'mac':
+ self._listing_platform_dir = 'mac/'
+ self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
+ elif self.platform == 'mac64':
+ self._listing_platform_dir = 'mac64/'
+ self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
+ elif self.platform == 'win':
+ self._listing_platform_dir = 'win/'
+ self.archive_name = 'chrome-win.zip'
+ self._archive_extract_dir = 'chrome-win'
+ elif self.platform == 'win64':
+ self._listing_platform_dir = 'win64/'
+ self.archive_name = 'chrome-win64.zip'
+ self._archive_extract_dir = 'chrome-win64'
+ else:
+ if self.platform in ('linux', 'linux64', 'linux-arm', 'chromeos'):
+ self.archive_name = 'chrome-linux.zip'
+ self._archive_extract_dir = 'chrome-linux'
+ if self.platform == 'linux':
+ self._listing_platform_dir = 'Linux/'
+ elif self.platform == 'linux64':
+ self._listing_platform_dir = 'Linux_x64/'
+ elif self.platform == 'linux-arm':
+ self._listing_platform_dir = 'Linux_ARM_Cross-Compile/'
+ elif self.platform == 'chromeos':
+ self._listing_platform_dir = 'Linux_ChromiumOS_Full/'
+ # There is no 64-bit distinction for non-official mac builds.
+ elif self.platform in ('mac', 'mac64'):
+ self._listing_platform_dir = 'Mac/'
+ self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
+ elif self.platform == 'win':
+ self._listing_platform_dir = 'Win/'
+
+ def GetASANPlatformDir(self):
+ """ASAN builds are in directories like "linux-release", or have filenames
+ like "asan-win32-release-277079.zip". This aligns to our platform names
+ except in the case of Windows where they use "win32" instead of "win"."""
+ if self.platform == 'win':
+ return 'win32'
+ else:
+ return self.platform
+
+ def GetListingURL(self, marker=None):
+ """Returns the URL for a directory listing, with an optional marker."""
+ marker_param = ''
+ if marker:
+ marker_param = '&marker=' + str(marker)
+ if self.is_asan:
+ prefix = '%s-%s' % (self.GetASANPlatformDir(), self.build_type)
+ return self.base_url + '/?delimiter=&prefix=' + prefix + marker_param
+ else:
+ return (self.base_url + '/?delimiter=/&prefix=' +
+ self._listing_platform_dir + marker_param)
+
+ def GetDownloadURL(self, revision):
+ """Gets the download URL for a build archive of a specific revision."""
+ if self.is_asan:
+ return '%s/%s-%s/%s-%d.zip' % (
+ ASAN_BASE_URL, self.GetASANPlatformDir(), self.build_type,
+ self.GetASANBaseName(), revision)
+ if self.is_official:
+ return '%s/%s/%s%s' % (
+ OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
+ self.archive_name)
+ else:
+ if str(revision) in self.githash_svn_dict:
+ revision = self.githash_svn_dict[str(revision)]
+ return '%s/%s%s/%s' % (self.base_url, self._listing_platform_dir,
+ revision, self.archive_name)
+
+ def GetLastChangeURL(self):
+ """Returns a URL to the LAST_CHANGE file."""
+ return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE'
+
+ def GetASANBaseName(self):
+ """Returns the base name of the ASAN zip file."""
+ if 'linux' in self.platform:
+ return 'asan-symbolized-%s-%s' % (self.GetASANPlatformDir(),
+ self.build_type)
+ else:
+ return 'asan-%s-%s' % (self.GetASANPlatformDir(), self.build_type)
+
+ def GetLaunchPath(self, revision):
+ """Returns a relative path (presumably from the archive extraction location)
+ that is used to run the executable."""
+ if self.is_asan:
+ extract_dir = '%s-%d' % (self.GetASANBaseName(), revision)
+ else:
+ extract_dir = self._archive_extract_dir
+ return os.path.join(extract_dir, self._binary_name)
+
+ def ParseDirectoryIndex(self, last_known_rev):
+ """Parses the Google Storage directory listing into a list of revision
+ numbers."""
+
+ def _GetMarkerForRev(revision):
+ if self.is_asan:
+ return '%s-%s/%s-%d.zip' % (
+ self.GetASANPlatformDir(), self.build_type,
+ self.GetASANBaseName(), revision)
+ return '%s%d' % (self._listing_platform_dir, revision)
+
+ def _FetchAndParse(url):
+ """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
+ next-marker is not None, then the listing is a partial listing and another
+ fetch should be performed with next-marker being the marker= GET
+ parameter."""
+ handle = urllib.urlopen(url)
+ document = ElementTree.parse(handle)
+
+ # All nodes in the tree are namespaced. Get the root's tag name to extract
+ # the namespace. Etree does namespaces as |{namespace}tag|.
+ root_tag = document.getroot().tag
+ end_ns_pos = root_tag.find('}')
+ if end_ns_pos == -1:
+ raise Exception('Could not locate end namespace for directory index')
+ namespace = root_tag[:end_ns_pos + 1]
+
+ # Find the prefix (_listing_platform_dir) and whether or not the list is
+ # truncated.
+ prefix_len = len(document.find(namespace + 'Prefix').text)
+ next_marker = None
+ is_truncated = document.find(namespace + 'IsTruncated')
+ if is_truncated is not None and is_truncated.text.lower() == 'true':
+ next_marker = document.find(namespace + 'NextMarker').text
+ # Get a list of all the revisions.
+ revisions = []
+ githash_svn_dict = {}
+ if self.is_asan:
+ asan_regex = re.compile(r'.*%s-(\d+)\.zip$' % (self.GetASANBaseName()))
+ # Non ASAN builds are in a <revision> directory. The ASAN builds are
+ # flat
+ all_prefixes = document.findall(namespace + 'Contents/' +
+ namespace + 'Key')
+ for prefix in all_prefixes:
+ m = asan_regex.match(prefix.text)
+ if m:
+ try:
+ revisions.append(int(m.group(1)))
+ except ValueError:
+ pass
+ else:
+ all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
+ namespace + 'Prefix')
+ # The <Prefix> nodes have content of the form of
+ # |_listing_platform_dir/revision/|. Strip off the platform dir and the
+ # trailing slash to just have a number.
+ for prefix in all_prefixes:
+ revnum = prefix.text[prefix_len:-1]
+ try:
+ revnum = int(revnum)
+ revisions.append(revnum)
+ # Notes:
+ # Ignore hash in chromium-browser-snapshots as they are invalid
+ # Resulting in 404 error in fetching pages:
+ # https://chromium.googlesource.com/chromium/src/+/[rev_hash]
+ except ValueError:
+ pass
+ return (revisions, next_marker, githash_svn_dict)
+
+ # Fetch the first list of revisions.
+ if last_known_rev:
+ revisions = []
+ # Optimization: Start paging at the last known revision (local cache).
+ next_marker = _GetMarkerForRev(last_known_rev)
+ # Optimization: Stop paging at the last known revision (remote).
+ last_change_rev = GetChromiumRevision(self, self.GetLastChangeURL())
+ if last_known_rev == last_change_rev:
+ return []
+ else:
+ (revisions, next_marker, new_dict) = _FetchAndParse(self.GetListingURL())
+ self.githash_svn_dict.update(new_dict)
+ last_change_rev = None
+
+ # If the result list was truncated, refetch with the next marker. Do this
+ # until an entire directory listing is done.
+ while next_marker:
+ sys.stdout.write('\rFetching revisions at marker %s' % next_marker)
+ sys.stdout.flush()
+
+ next_url = self.GetListingURL(next_marker)
+ (new_revisions, next_marker, new_dict) = _FetchAndParse(next_url)
+ revisions.extend(new_revisions)
+ self.githash_svn_dict.update(new_dict)
+ if last_change_rev and last_change_rev in new_revisions:
+ break
+ sys.stdout.write('\r')
+ sys.stdout.flush()
+ return revisions
+
+ def _GetSVNRevisionFromGitHashWithoutGitCheckout(self, git_sha1, depot):
+ json_url = GITHASH_TO_SVN_URL[depot] % git_sha1
+ response = urllib.urlopen(json_url)
+ if response.getcode() == 200:
+ try:
+ data = json.loads(response.read()[4:])
+ except ValueError:
+ print 'ValueError for JSON URL: %s' % json_url
+ raise ValueError
+ else:
+ raise ValueError
+ if 'message' in data:
+ message = data['message'].split('\n')
+ message = [line for line in message if line.strip()]
+ search_pattern = re.compile(SEARCH_PATTERN[depot])
+ result = search_pattern.search(message[len(message)-1])
+ if result:
+ return result.group(1)
+ else:
+ if depot == 'chromium':
+ result = re.search(CHROMIUM_SEARCH_PATTERN_OLD,
+ message[len(message)-1])
+ if result:
+ return result.group(1)
+ print 'Failed to get svn revision number for %s' % git_sha1
+ raise ValueError
+
+ def _GetSVNRevisionFromGitHashFromGitCheckout(self, git_sha1, depot):
+ def _RunGit(command, path):
+ command = ['git'] + command
+ shell = sys.platform.startswith('win')
+ proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, cwd=path)
+ (output, _) = proc.communicate()
+ return (output, proc.returncode)
+
+ path = self.local_src_path
+ if depot == 'blink':
+ path = os.path.join(self.local_src_path, 'third_party', 'WebKit')
+ revision = None
+ try:
+ command = ['svn', 'find-rev', git_sha1]
+ (git_output, return_code) = _RunGit(command, path)
+ if not return_code:
+ revision = git_output.strip('\n')
+ except ValueError:
+ pass
+ if not revision:
+ command = ['log', '-n1', '--format=%s', git_sha1]
+ (git_output, return_code) = _RunGit(command, path)
+ if not return_code:
+ revision = re.match('SVN changes up to revision ([0-9]+)', git_output)
+ revision = revision.group(1) if revision else None
+ if revision:
+ return revision
+ raise ValueError
+
+ def GetSVNRevisionFromGitHash(self, git_sha1, depot='chromium'):
+ if not self.local_src_path:
+ return self._GetSVNRevisionFromGitHashWithoutGitCheckout(git_sha1, depot)
+ else:
+ return self._GetSVNRevisionFromGitHashFromGitCheckout(git_sha1, depot)
+
+ def GetRevList(self):
+ """Gets the list of revision numbers between self.good_revision and
+ self.bad_revision."""
+
+ cache = {}
+ # The cache is stored in the same directory as bisect-builds.py
+ cache_filename = os.path.join(
+ os.path.abspath(os.path.dirname(__file__)),
+ '.bisect-builds-cache.json')
+ cache_dict_key = self.GetListingURL()
+
+ def _LoadBucketFromCache():
+ if self.use_local_cache:
+ try:
+ with open(cache_filename) as cache_file:
+ for (key, value) in json.load(cache_file).items():
+ cache[key] = value
+ revisions = cache.get(cache_dict_key, [])
+ githash_svn_dict = cache.get('githash_svn_dict', {})
+ if revisions:
+ print 'Loaded revisions %d-%d from %s' % (revisions[0],
+ revisions[-1], cache_filename)
+ return (revisions, githash_svn_dict)
+ except (EnvironmentError, ValueError):
+ pass
+ return ([], {})
+
+ def _SaveBucketToCache():
+ """Save the list of revisions and the git-svn mappings to a file.
+ The list of revisions is assumed to be sorted."""
+ if self.use_local_cache:
+ cache[cache_dict_key] = revlist_all
+ cache['githash_svn_dict'] = self.githash_svn_dict
+ try:
+ with open(cache_filename, 'w') as cache_file:
+ json.dump(cache, cache_file)
+ print 'Saved revisions %d-%d to %s' % (
+ revlist_all[0], revlist_all[-1], cache_filename)
+ except EnvironmentError:
+ pass
+
+ # Download the revlist and filter for just the range between good and bad.
+ minrev = min(self.good_revision, self.bad_revision)
+ maxrev = max(self.good_revision, self.bad_revision)
+
+ (revlist_all, self.githash_svn_dict) = _LoadBucketFromCache()
+ last_known_rev = revlist_all[-1] if revlist_all else 0
+ if last_known_rev < maxrev:
+ revlist_all.extend(map(int, self.ParseDirectoryIndex(last_known_rev)))
+ revlist_all = list(set(revlist_all))
+ revlist_all.sort()
+ _SaveBucketToCache()
+
+ revlist = [x for x in revlist_all if x >= int(minrev) and x <= int(maxrev)]
+
+ # Set good and bad revisions to be legit revisions.
+ if revlist:
+ if self.good_revision < self.bad_revision:
+ self.good_revision = revlist[0]
+ self.bad_revision = revlist[-1]
+ else:
+ self.bad_revision = revlist[0]
+ self.good_revision = revlist[-1]
+
+ # Fix chromium rev so that the deps blink revision matches REVISIONS file.
+ if self.base_url == WEBKIT_BASE_URL:
+ revlist_all.sort()
+ self.good_revision = FixChromiumRevForBlink(revlist,
+ revlist_all,
+ self,
+ self.good_revision)
+ self.bad_revision = FixChromiumRevForBlink(revlist,
+ revlist_all,
+ self,
+ self.bad_revision)
+ return revlist
+
+ def GetOfficialBuildsList(self):
+ """Gets the list of official build numbers between self.good_revision and
+ self.bad_revision."""
+
+ def CheckDepotToolsInPath():
+ delimiter = ';' if sys.platform.startswith('win') else ':'
+ path_list = os.environ['PATH'].split(delimiter)
+ for path in path_list:
+ if path.rstrip(os.path.sep).endswith('depot_tools'):
+ return path
+ return None
+
+ def RunGsutilCommand(args):
+ gsutil_path = CheckDepotToolsInPath()
+ if gsutil_path is None:
+ print ('Follow the instructions in this document '
+ 'http://dev.chromium.org/developers/how-tos/install-depot-tools'
+ ' to install depot_tools and then try again.')
+ sys.exit(1)
+ gsutil_path = os.path.join(gsutil_path, 'third_party', 'gsutil', 'gsutil')
+ gsutil = subprocess.Popen([sys.executable, gsutil_path] + args,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ env=None)
+ stdout, stderr = gsutil.communicate()
+ if gsutil.returncode:
+ if (re.findall(r'status[ |=]40[1|3]', stderr) or
+ stderr.startswith(CREDENTIAL_ERROR_MESSAGE)):
+ print ('Follow these steps to configure your credentials and try'
+ ' running the bisect-builds.py again.:\n'
+ ' 1. Run "python %s config" and follow its instructions.\n'
+ ' 2. If you have a @google.com account, use that account.\n'
+ ' 3. For the project-id, just enter 0.' % gsutil_path)
+ sys.exit(1)
+ else:
+ raise Exception('Error running the gsutil command: %s' % stderr)
+ return stdout
+
+ def GsutilList(bucket):
+ query = 'gs://%s/' % bucket
+ stdout = RunGsutilCommand(['ls', query])
+ return [url[len(query):].strip('/') for url in stdout.splitlines()]
+
+ # Download the revlist and filter for just the range between good and bad.
+ minrev = min(self.good_revision, self.bad_revision)
+ maxrev = max(self.good_revision, self.bad_revision)
+ build_numbers = GsutilList(GS_BUCKET_NAME)
+ revision_re = re.compile(r'(\d\d\.\d\.\d{4}\.\d+)')
+ build_numbers = filter(lambda b: revision_re.search(b), build_numbers)
+ final_list = []
+ parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
+ connection = httplib.HTTPConnection(GOOGLE_APIS_URL)
+ for build_number in sorted(parsed_build_numbers):
+ if build_number > maxrev:
+ break
+ if build_number < minrev:
+ continue
+ path = ('/' + GS_BUCKET_NAME + '/' + str(build_number) + '/' +
+ self._listing_platform_dir + self.archive_name)
+ connection.request('HEAD', path)
+ response = connection.getresponse()
+ if response.status == 200:
+ final_list.append(str(build_number))
+ response.read()
+ connection.close()
+ return final_list
+
+def UnzipFilenameToDir(filename, directory):
+ """Unzip |filename| to |directory|."""
+ cwd = os.getcwd()
+ if not os.path.isabs(filename):
+ filename = os.path.join(cwd, filename)
+ zf = zipfile.ZipFile(filename)
+ # Make base.
+ if not os.path.isdir(directory):
+ os.mkdir(directory)
+ os.chdir(directory)
+ # Extract files.
+ for info in zf.infolist():
+ name = info.filename
+ if name.endswith('/'): # dir
+ if not os.path.isdir(name):
+ os.makedirs(name)
+ else: # file
+ directory = os.path.dirname(name)
+ if not os.path.isdir(directory):
+ os.makedirs(directory)
+ out = open(name, 'wb')
+ out.write(zf.read(name))
+ out.close()
+ # Set permissions. Permission info in external_attr is shifted 16 bits.
+ os.chmod(name, info.external_attr >> 16L)
+ os.chdir(cwd)
+
+
+def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
+ """Downloads and unzips revision |rev|.
+ @param context A PathContext instance.
+ @param rev The Chromium revision number/tag to download.
+ @param filename The destination for the downloaded file.
+ @param quit_event A threading.Event which will be set by the master thread to
+ indicate that the download should be aborted.
+ @param progress_event A threading.Event which will be set by the master thread
+ to indicate that the progress of the download should be
+ displayed.
+ """
+ def ReportHook(blocknum, blocksize, totalsize):
+ if quit_event and quit_event.isSet():
+ raise RuntimeError('Aborting download of revision %s' % str(rev))
+ if progress_event and progress_event.isSet():
+ size = blocknum * blocksize
+ if totalsize == -1: # Total size not known.
+ progress = 'Received %d bytes' % size
+ else:
+ size = min(totalsize, size)
+ progress = 'Received %d of %d bytes, %.2f%%' % (
+ size, totalsize, 100.0 * size / totalsize)
+ # Send a \r to let all progress messages use just one line of output.
+ sys.stdout.write('\r' + progress)
+ sys.stdout.flush()
+ download_url = context.GetDownloadURL(rev)
+ try:
+ urllib.urlretrieve(download_url, filename, ReportHook)
+ if progress_event and progress_event.isSet():
+ print
+
+ except RuntimeError:
+ pass
+
+
+def RunRevision(context, revision, zip_file, profile, num_runs, command, args):
+ """Given a zipped revision, unzip it and run the test."""
+ print 'Trying revision %s...' % str(revision)
+
+ # Create a temp directory and unzip the revision into it.
+ cwd = os.getcwd()
+ tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
+ UnzipFilenameToDir(zip_file, tempdir)
+
+ # Hack: Chrome OS archives are missing icudtl.dat; try to copy it from
+ # the local directory.
+ if context.platform == 'chromeos':
+ icudtl_path = 'third_party/icu/source/data/in/icudtl.dat'
+ if not os.access(icudtl_path, os.F_OK):
+ print 'Couldn\'t find: ' + icudtl_path
+ sys.exit()
+ os.system('cp %s %s/chrome-linux/' % (icudtl_path, tempdir))
+
+ os.chdir(tempdir)
+
+ # Run the build as many times as specified.
+ testargs = ['--user-data-dir=%s' % profile] + args
+ # The sandbox must be run as root on Official Chrome, so bypass it.
+ if ((context.is_official or context.flash_path) and
+ context.platform.startswith('linux')):
+ testargs.append('--no-sandbox')
+ if context.flash_path:
+ testargs.append('--ppapi-flash-path=%s' % context.flash_path)
+ # We have to pass a large enough Flash version, which currently needs not
+ # be correct. Instead of requiring the user of the script to figure out and
+ # pass the correct version we just spoof it.
+ testargs.append('--ppapi-flash-version=99.9.999.999')
+
+ runcommand = []
+ for token in shlex.split(command):
+ if token == '%a':
+ runcommand.extend(testargs)
+ else:
+ runcommand.append(
+ token.replace('%p', os.path.abspath(context.GetLaunchPath(revision))).
+ replace('%s', ' '.join(testargs)))
+
+ results = []
+ for _ in range(num_runs):
+ subproc = subprocess.Popen(runcommand,
+ bufsize=-1,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (stdout, stderr) = subproc.communicate()
+ results.append((subproc.returncode, stdout, stderr))
+ os.chdir(cwd)
+ try:
+ shutil.rmtree(tempdir, True)
+ except Exception:
+ pass
+
+ for (returncode, stdout, stderr) in results:
+ if returncode:
+ return (returncode, stdout, stderr)
+ return results[0]
+
+
+# The arguments official_builds, status, stdout and stderr are unused.
+# They are present here because this function is passed to Bisect which then
+# calls it with 5 arguments.
+# pylint: disable=W0613
+def AskIsGoodBuild(rev, official_builds, exit_status, stdout, stderr):
+ """Asks the user whether build |rev| is good or bad."""
+ # Loop until we get a response that we can parse.
+ while True:
+ response = raw_input('Revision %s is '
+ '[(g)ood/(b)ad/(r)etry/(u)nknown/(s)tdout/(q)uit]: ' %
+ str(rev))
+ if response in ('g', 'b', 'r', 'u'):
+ return response
+ if response == 'q':
+ raise SystemExit()
+ if response == 's':
+ print stdout
+ print stderr
+
+
+def IsGoodASANBuild(rev, official_builds, exit_status, stdout, stderr):
+ """Determine if an ASAN build |rev| is good or bad
+
+ Will examine stderr looking for the error message emitted by ASAN. If not
+ found then will fallback to asking the user."""
+ if stderr:
+ bad_count = 0
+ for line in stderr.splitlines():
+ print line
+ if line.find('ERROR: AddressSanitizer:') != -1:
+ bad_count += 1
+ if bad_count > 0:
+ print 'Revision %d determined to be bad.' % rev
+ return 'b'
+ return AskIsGoodBuild(rev, official_builds, exit_status, stdout, stderr)
+
+
+def DidCommandSucceed(rev, official_builds, exit_status, stdout, stderr):
+ if exit_status:
+ print 'Bad revision: %s' % rev
+ return 'b'
+ else:
+ print 'Good revision: %s' % rev
+ return 'g'
+
+
+class DownloadJob(object):
+ """DownloadJob represents a task to download a given Chromium revision."""
+
+ def __init__(self, context, name, rev, zip_file):
+ super(DownloadJob, self).__init__()
+ # Store off the input parameters.
+ self.context = context
+ self.name = name
+ self.rev = rev
+ self.zip_file = zip_file
+ self.quit_event = threading.Event()
+ self.progress_event = threading.Event()
+ self.thread = None
+
+ def Start(self):
+ """Starts the download."""
+ fetchargs = (self.context,
+ self.rev,
+ self.zip_file,
+ self.quit_event,
+ self.progress_event)
+ self.thread = threading.Thread(target=FetchRevision,
+ name=self.name,
+ args=fetchargs)
+ self.thread.start()
+
+ def Stop(self):
+ """Stops the download which must have been started previously."""
+ assert self.thread, 'DownloadJob must be started before Stop is called.'
+ self.quit_event.set()
+ self.thread.join()
+ os.unlink(self.zip_file)
+
+ def WaitFor(self):
+ """Prints a message and waits for the download to complete. The download
+ must have been started previously."""
+ assert self.thread, 'DownloadJob must be started before WaitFor is called.'
+ print 'Downloading revision %s...' % str(self.rev)
+ self.progress_event.set() # Display progress of download.
+ try:
+ while self.thread.isAlive():
+ # The parameter to join is needed to keep the main thread responsive to
+ # signals. Without it, the program will not respond to interruptions.
+ self.thread.join(1)
+ except (KeyboardInterrupt, SystemExit):
+ self.Stop()
+ raise
+
+
+def VerifyEndpoint(fetch, context, rev, profile, num_runs, command, try_args,
+ evaluate, expected_answer):
+ fetch.WaitFor()
+ try:
+ (exit_status, stdout, stderr) = RunRevision(
+ context, rev, fetch.zip_file, profile, num_runs, command, try_args)
+ except Exception, e:
+ print >> sys.stderr, e
+ if (evaluate(rev, context.is_official, exit_status, stdout, stderr) !=
+ expected_answer):
+ print 'Unexpected result at a range boundary! Your range is not correct.'
+ raise SystemExit
+
+
+def Bisect(context,
+ num_runs=1,
+ command='%p %a',
+ try_args=(),
+ profile=None,
+ evaluate=AskIsGoodBuild,
+ verify_range=False):
+ """Given known good and known bad revisions, run a binary search on all
+ archived revisions to determine the last known good revision.
+
+ @param context PathContext object initialized with user provided parameters.
+ @param num_runs Number of times to run each build for asking good/bad.
+ @param try_args A tuple of arguments to pass to the test application.
+ @param profile The name of the user profile to run with.
+ @param evaluate A function which returns 'g' if the argument build is good,
+ 'b' if it's bad or 'u' if unknown.
+ @param verify_range If true, tests the first and last revisions in the range
+ before proceeding with the bisect.
+
+ Threading is used to fetch Chromium revisions in the background, speeding up
+ the user's experience. For example, suppose the bounds of the search are
+ good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
+ whether revision 50 is good or bad, the next revision to check will be either
+ 25 or 75. So, while revision 50 is being checked, the script will download
+ revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
+ known:
+
+ - If rev 50 is good, the download of rev 25 is cancelled, and the next test
+ is run on rev 75.
+
+ - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
+ is run on rev 25.
+ """
+
+ if not profile:
+ profile = 'profile'
+
+ good_rev = context.good_revision
+ bad_rev = context.bad_revision
+ cwd = os.getcwd()
+
+ print 'Downloading list of known revisions...',
+ if not context.use_local_cache and not context.is_official:
+ print '(use --use-local-cache to cache and re-use the list of revisions)'
+ else:
+ print
+ _GetDownloadPath = lambda rev: os.path.join(cwd,
+ '%s-%s' % (str(rev), context.archive_name))
+ if context.is_official:
+ revlist = context.GetOfficialBuildsList()
+ else:
+ revlist = context.GetRevList()
+
+ # Get a list of revisions to bisect across.
+ if len(revlist) < 2: # Don't have enough builds to bisect.
+ msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
+ raise RuntimeError(msg)
+
+ # Figure out our bookends and first pivot point; fetch the pivot revision.
+ minrev = 0
+ maxrev = len(revlist) - 1
+ pivot = maxrev / 2
+ rev = revlist[pivot]
+ fetch = DownloadJob(context, 'initial_fetch', rev, _GetDownloadPath(rev))
+ fetch.Start()
+
+ if verify_range:
+ minrev_fetch = DownloadJob(
+ context, 'minrev_fetch', revlist[minrev],
+ _GetDownloadPath(revlist[minrev]))
+ maxrev_fetch = DownloadJob(
+ context, 'maxrev_fetch', revlist[maxrev],
+ _GetDownloadPath(revlist[maxrev]))
+ minrev_fetch.Start()
+ maxrev_fetch.Start()
+ try:
+ VerifyEndpoint(minrev_fetch, context, revlist[minrev], profile, num_runs,
+ command, try_args, evaluate, 'b' if bad_rev < good_rev else 'g')
+ VerifyEndpoint(maxrev_fetch, context, revlist[maxrev], profile, num_runs,
+ command, try_args, evaluate, 'g' if bad_rev < good_rev else 'b')
+ except (KeyboardInterrupt, SystemExit):
+ print 'Cleaning up...'
+ fetch.Stop()
+ sys.exit(0)
+ finally:
+ minrev_fetch.Stop()
+ maxrev_fetch.Stop()
+
+ fetch.WaitFor()
+
+ # Binary search time!
+ while fetch and fetch.zip_file and maxrev - minrev > 1:
+ if bad_rev < good_rev:
+ min_str, max_str = 'bad', 'good'
+ else:
+ min_str, max_str = 'good', 'bad'
+ print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str,
+ revlist[maxrev], max_str)
+
+ # Pre-fetch next two possible pivots
+ # - down_pivot is the next revision to check if the current revision turns
+ # out to be bad.
+ # - up_pivot is the next revision to check if the current revision turns
+ # out to be good.
+ down_pivot = int((pivot - minrev) / 2) + minrev
+ down_fetch = None
+ if down_pivot != pivot and down_pivot != minrev:
+ down_rev = revlist[down_pivot]
+ down_fetch = DownloadJob(context, 'down_fetch', down_rev,
+ _GetDownloadPath(down_rev))
+ down_fetch.Start()
+
+ up_pivot = int((maxrev - pivot) / 2) + pivot
+ up_fetch = None
+ if up_pivot != pivot and up_pivot != maxrev:
+ up_rev = revlist[up_pivot]
+ up_fetch = DownloadJob(context, 'up_fetch', up_rev,
+ _GetDownloadPath(up_rev))
+ up_fetch.Start()
+
+ # Run test on the pivot revision.
+ exit_status = None
+ stdout = None
+ stderr = None
+ try:
+ (exit_status, stdout, stderr) = RunRevision(
+ context, rev, fetch.zip_file, profile, num_runs, command, try_args)
+ except Exception, e:
+ print >> sys.stderr, e
+
+ # Call the evaluate function to see if the current revision is good or bad.
+ # On that basis, kill one of the background downloads and complete the
+ # other, as described in the comments above.
+ try:
+ answer = evaluate(rev, context.is_official, exit_status, stdout, stderr)
+ if ((answer == 'g' and good_rev < bad_rev)
+ or (answer == 'b' and bad_rev < good_rev)):
+ fetch.Stop()
+ minrev = pivot
+ if down_fetch:
+ down_fetch.Stop() # Kill the download of the older revision.
+ fetch = None
+ if up_fetch:
+ up_fetch.WaitFor()
+ pivot = up_pivot
+ fetch = up_fetch
+ elif ((answer == 'b' and good_rev < bad_rev)
+ or (answer == 'g' and bad_rev < good_rev)):
+ fetch.Stop()
+ maxrev = pivot
+ if up_fetch:
+ up_fetch.Stop() # Kill the download of the newer revision.
+ fetch = None
+ if down_fetch:
+ down_fetch.WaitFor()
+ pivot = down_pivot
+ fetch = down_fetch
+ elif answer == 'r':
+ pass # Retry requires no changes.
+ elif answer == 'u':
+ # Nuke the revision from the revlist and choose a new pivot.
+ fetch.Stop()
+ revlist.pop(pivot)
+ maxrev -= 1 # Assumes maxrev >= pivot.
+
+ if maxrev - minrev > 1:
+ # Alternate between using down_pivot or up_pivot for the new pivot
+ # point, without affecting the range. Do this instead of setting the
+ # pivot to the midpoint of the new range because adjacent revisions
+ # are likely affected by the same issue that caused the (u)nknown
+ # response.
+ if up_fetch and down_fetch:
+ fetch = [up_fetch, down_fetch][len(revlist) % 2]
+ elif up_fetch:
+ fetch = up_fetch
+ else:
+ fetch = down_fetch
+ fetch.WaitFor()
+ if fetch == up_fetch:
+ pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
+ else:
+ pivot = down_pivot
+
+ if down_fetch and fetch != down_fetch:
+ down_fetch.Stop()
+ if up_fetch and fetch != up_fetch:
+ up_fetch.Stop()
+ else:
+ assert False, 'Unexpected return value from evaluate(): ' + answer
+ except (KeyboardInterrupt, SystemExit):
+ print 'Cleaning up...'
+ for f in [_GetDownloadPath(rev),
+ _GetDownloadPath(revlist[down_pivot]),
+ _GetDownloadPath(revlist[up_pivot])]:
+ try:
+ os.unlink(f)
+ except OSError:
+ pass
+ sys.exit(0)
+
+ rev = revlist[pivot]
+
+ return (revlist[minrev], revlist[maxrev], context)
+
+
+def GetBlinkDEPSRevisionForChromiumRevision(self, rev):
+ """Returns the blink revision that was in REVISIONS file at
+ chromium revision |rev|."""
+
+ def _GetBlinkRev(url, blink_re):
+ m = blink_re.search(url.read())
+ url.close()
+ if m:
+ return m.group(1)
+
+ url = urllib.urlopen(DEPS_FILE_OLD % rev)
+ if url.getcode() == 200:
+ # . doesn't match newlines without re.DOTALL, so this is safe.
+ blink_re = re.compile(r'webkit_revision\D*(\d+)')
+ return int(_GetBlinkRev(url, blink_re))
+ else:
+ url = urllib.urlopen(DEPS_FILE_NEW % GetGitHashFromSVNRevision(rev))
+ if url.getcode() == 200:
+ blink_re = re.compile(r'webkit_revision\D*\d+;\D*\d+;(\w+)')
+ blink_git_sha = _GetBlinkRev(url, blink_re)
+ return self.GetSVNRevisionFromGitHash(blink_git_sha, 'blink')
+ raise Exception('Could not get Blink revision for Chromium rev %d' % rev)
+
+
+def GetBlinkRevisionForChromiumRevision(context, rev):
+ """Returns the blink revision that was in REVISIONS file at
+ chromium revision |rev|."""
+ def _IsRevisionNumber(revision):
+ if isinstance(revision, int):
+ return True
+ else:
+ return revision.isdigit()
+ if str(rev) in context.githash_svn_dict:
+ rev = context.githash_svn_dict[str(rev)]
+ file_url = '%s/%s%s/REVISIONS' % (context.base_url,
+ context._listing_platform_dir, rev)
+ url = urllib.urlopen(file_url)
+ if url.getcode() == 200:
+ try:
+ data = json.loads(url.read())
+ except ValueError:
+ print 'ValueError for JSON URL: %s' % file_url
+ raise ValueError
+ else:
+ raise ValueError
+ url.close()
+ if 'webkit_revision' in data:
+ blink_rev = data['webkit_revision']
+ if not _IsRevisionNumber(blink_rev):
+ blink_rev = int(context.GetSVNRevisionFromGitHash(blink_rev, 'blink'))
+ return blink_rev
+ else:
+ raise Exception('Could not get blink revision for cr rev %d' % rev)
+
+
+def FixChromiumRevForBlink(revisions_final, revisions, self, rev):
+ """Returns the chromium revision that has the correct blink revision
+ for blink bisect, DEPS and REVISIONS file might not match since
+ blink snapshots point to tip of tree blink.
+ Note: The revisions_final variable might get modified to include
+ additional revisions."""
+ blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(self, rev)
+
+ while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev):
+ idx = revisions.index(rev)
+ if idx > 0:
+ rev = revisions[idx-1]
+ if rev not in revisions_final:
+ revisions_final.insert(0, rev)
+
+ revisions_final.sort()
+ return rev
+
+
+def GetChromiumRevision(context, url):
+ """Returns the chromium revision read from given URL."""
+ try:
+ # Location of the latest build revision number
+ latest_revision = urllib.urlopen(url).read()
+ if latest_revision.isdigit():
+ return int(latest_revision)
+ return context.GetSVNRevisionFromGitHash(latest_revision)
+ except Exception:
+ print 'Could not determine latest revision. This could be bad...'
+ return 999999999
+
+def GetGitHashFromSVNRevision(svn_revision):
+ crrev_url = CRREV_URL + str(svn_revision)
+ url = urllib.urlopen(crrev_url)
+ if url.getcode() == 200:
+ data = json.loads(url.read())
+ if 'git_sha' in data:
+ return data['git_sha']
+
+def PrintChangeLog(min_chromium_rev, max_chromium_rev):
+ """Prints the changelog URL."""
+
+ print (' ' + CHANGELOG_URL % (GetGitHashFromSVNRevision(min_chromium_rev),
+ GetGitHashFromSVNRevision(max_chromium_rev)))
+
+
+def main():
+ usage = ('%prog [options] [-- chromium-options]\n'
+ 'Perform binary search on the snapshot builds to find a minimal\n'
+ 'range of revisions where a behavior change happened. The\n'
+ 'behaviors are described as "good" and "bad".\n'
+ 'It is NOT assumed that the behavior of the later revision is\n'
+ 'the bad one.\n'
+ '\n'
+ 'Revision numbers should use\n'
+ ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
+ ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
+ ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
+ ' for earlier revs.\n'
+ ' Chrome\'s about: build number and omahaproxy branch_revision\n'
+ ' are incorrect, they are from branches.\n'
+ '\n'
+ 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
+ parser = optparse.OptionParser(usage=usage)
+ # Strangely, the default help output doesn't include the choice list.
+ choices = ['mac', 'mac64', 'win', 'win64', 'linux', 'linux64', 'linux-arm',
+ 'chromeos']
+ parser.add_option('-a', '--archive',
+ choices=choices,
+ help='The buildbot archive to bisect [%s].' %
+ '|'.join(choices))
+ parser.add_option('-o',
+ action='store_true',
+ dest='official_builds',
+ help='Bisect across official Chrome builds (internal '
+ 'only) instead of Chromium archives.')
+ parser.add_option('-b', '--bad',
+ type='str',
+ help='A bad revision to start bisection. '
+ 'May be earlier or later than the good revision. '
+ 'Default is HEAD.')
+ parser.add_option('-f', '--flash_path',
+ type='str',
+ help='Absolute path to a recent Adobe Pepper Flash '
+ 'binary to be used in this bisection (e.g. '
+ 'on Windows C:\...\pepflashplayer.dll and on Linux '
+ '/opt/google/chrome/PepperFlash/'
+ 'libpepflashplayer.so).')
+ parser.add_option('-g', '--good',
+ type='str',
+ help='A good revision to start bisection. ' +
+ 'May be earlier or later than the bad revision. ' +
+ 'Default is 0.')
+ parser.add_option('-p', '--profile', '--user-data-dir',
+ type='str',
+ default='profile',
+ help='Profile to use; this will not reset every run. '
+ 'Defaults to a clean profile.')
+ parser.add_option('-t', '--times',
+ type='int',
+ default=1,
+ help='Number of times to run each build before asking '
+ 'if it\'s good or bad. Temporary profiles are reused.')
+ parser.add_option('-c', '--command',
+ type='str',
+ default='%p %a',
+ help='Command to execute. %p and %a refer to Chrome '
+ 'executable and specified extra arguments '
+ 'respectively. Use %s to specify all extra arguments '
+ 'as one string. Defaults to "%p %a". Note that any '
+ 'extra paths specified should be absolute.')
+ parser.add_option('-l', '--blink',
+ action='store_true',
+ help='Use Blink bisect instead of Chromium. ')
+ parser.add_option('', '--not-interactive',
+ action='store_true',
+ default=False,
+ help='Use command exit code to tell good/bad revision.')
+ parser.add_option('--asan',
+ dest='asan',
+ action='store_true',
+ default=False,
+ help='Allow the script to bisect ASAN builds')
+ parser.add_option('--use-local-cache',
+ dest='use_local_cache',
+ action='store_true',
+ default=False,
+ help='Use a local file in the current directory to cache '
+ 'a list of known revisions to speed up the '
+ 'initialization of this script.')
+ parser.add_option('--verify-range',
+ dest='verify_range',
+ action='store_true',
+ default=False,
+ help='Test the first and last revisions in the range ' +
+ 'before proceeding with the bisect.')
+
+ (opts, args) = parser.parse_args()
+
+ if opts.archive is None:
+ print 'Error: missing required parameter: --archive'
+ print
+ parser.print_help()
+ return 1
+
+ if opts.asan:
+ supported_platforms = ['linux', 'mac', 'win']
+ if opts.archive not in supported_platforms:
+ print 'Error: ASAN bisecting only supported on these platforms: [%s].' % (
+ '|'.join(supported_platforms))
+ return 1
+ if opts.official_builds:
+ print 'Error: Do not yet support bisecting official ASAN builds.'
+ return 1
+
+ if opts.asan:
+ base_url = ASAN_BASE_URL
+ elif opts.blink:
+ base_url = WEBKIT_BASE_URL
+ else:
+ base_url = CHROMIUM_BASE_URL
+
+ # Create the context. Initialize 0 for the revisions as they are set below.
+ context = PathContext(base_url, opts.archive, opts.good, opts.bad,
+ opts.official_builds, opts.asan, opts.use_local_cache,
+ opts.flash_path)
+
+ # Pick a starting point, try to get HEAD for this.
+ if not opts.bad:
+ context.bad_revision = '999.0.0.0'
+ context.bad_revision = GetChromiumRevision(
+ context, context.GetLastChangeURL())
+
+ # Find out when we were good.
+ if not opts.good:
+ context.good_revision = '0.0.0.0' if opts.official_builds else 0
+
+ if opts.flash_path:
+ msg = 'Could not find Flash binary at %s' % opts.flash_path
+ assert os.path.exists(opts.flash_path), msg
+
+ if opts.official_builds:
+ context.good_revision = LooseVersion(context.good_revision)
+ context.bad_revision = LooseVersion(context.bad_revision)
+ else:
+ context.good_revision = int(context.good_revision)
+ context.bad_revision = int(context.bad_revision)
+
+ if opts.times < 1:
+ print('Number of times to run (%d) must be greater than or equal to 1.' %
+ opts.times)
+ parser.print_help()
+ return 1
+
+ if opts.not_interactive:
+ evaluator = DidCommandSucceed
+ elif opts.asan:
+ evaluator = IsGoodASANBuild
+ else:
+ evaluator = AskIsGoodBuild
+
+ # Save these revision numbers to compare when showing the changelog URL
+ # after the bisect.
+ good_rev = context.good_revision
+ bad_rev = context.bad_revision
+
+ (min_chromium_rev, max_chromium_rev, context) = Bisect(
+ context, opts.times, opts.command, args, opts.profile,
+ evaluator, opts.verify_range)
+
+ # Get corresponding blink revisions.
+ try:
+ min_blink_rev = GetBlinkRevisionForChromiumRevision(context,
+ min_chromium_rev)
+ max_blink_rev = GetBlinkRevisionForChromiumRevision(context,
+ max_chromium_rev)
+ except Exception:
+ # Silently ignore the failure.
+ min_blink_rev, max_blink_rev = 0, 0
+
+ if opts.blink:
+ # We're done. Let the user know the results in an official manner.
+ if good_rev > bad_rev:
+ print DONE_MESSAGE_GOOD_MAX % (str(min_blink_rev), str(max_blink_rev))
+ else:
+ print DONE_MESSAGE_GOOD_MIN % (str(min_blink_rev), str(max_blink_rev))
+
+ print 'BLINK CHANGELOG URL:'
+ print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev)
+
+ else:
+ # We're done. Let the user know the results in an official manner.
+ if good_rev > bad_rev:
+ print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev),
+ str(max_chromium_rev))
+ else:
+ print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev),
+ str(max_chromium_rev))
+ if min_blink_rev != max_blink_rev:
+ print ('NOTE: There is a Blink roll in the range, '
+ 'you might also want to do a Blink bisect.')
+
+ print 'CHANGELOG URL:'
+ if opts.official_builds:
+ print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
+ else:
+ PrintChangeLog(min_chromium_rev, max_chromium_rev)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/bisect-manual-test.py b/chromium/tools/bisect-manual-test.py
new file mode 100755
index 00000000000..c1b6329f31b
--- /dev/null
+++ b/chromium/tools/bisect-manual-test.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple script which asks user to manually check result of bisection.
+
+Typically used as by the run-bisect-manual-test.py script.
+"""
+
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),
+ 'perf'))
+from chrome_telemetry_build import chromium_config
+sys.path.append(chromium_config.GetTelemetryDir())
+
+from telemetry.internal.browser import browser_finder
+from telemetry.internal.browser import browser_options
+
+
+def _StartManualTest(options):
+ """Start browser then ask the user whether build is good or bad."""
+ browser_to_create = browser_finder.FindBrowser(options)
+ print 'Starting browser: %s.' % options.browser_type
+ with browser_to_create.Create(options) as _:
+ # Loop until we get a response that we can parse.
+ while True:
+ sys.stderr.write('Revision is [(g)ood/(b)ad]: ')
+ response = raw_input()
+ if response and response in ('g', 'b'):
+ if response in ('g'):
+ print 'RESULT manual_test: manual_test= 1'
+ else:
+ print 'RESULT manual_test: manual_test= 0'
+ break
+
+
+def main():
+ usage = ('%prog [options]\n'
+ 'Starts browser with an optional url and asks user whether '
+ 'revision is good or bad.\n')
+
+ options = browser_options.BrowserFinderOptions()
+ parser = options.CreateParser(usage)
+ options, _ = parser.parse_args()
+
+ _StartManualTest(options)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/bisect_test.py b/chromium/tools/bisect_test.py
new file mode 100644
index 00000000000..b970f84e36d
--- /dev/null
+++ b/chromium/tools/bisect_test.py
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+bisect_builds = __import__('bisect-builds')
+
+
+class BisectTest(unittest.TestCase):
+
+ patched = []
+ max_rev = 10000
+
+ def monkey_patch(self, obj, name, new):
+ self.patched.append((obj, name, getattr(obj, name)))
+ setattr(obj, name, new)
+
+ def clear_patching(self):
+ for obj, name, old in self.patched:
+ setattr(obj, name, old)
+ self.patched = []
+
+ def setUp(self):
+ self.monkey_patch(bisect_builds.DownloadJob, 'Start', lambda *args: None)
+ self.monkey_patch(bisect_builds.DownloadJob, 'Stop', lambda *args: None)
+ self.monkey_patch(bisect_builds.DownloadJob, 'WaitFor', lambda *args: None)
+ self.monkey_patch(bisect_builds, 'RunRevision', lambda *args: (0, "", ""))
+ self.monkey_patch(bisect_builds.PathContext, 'ParseDirectoryIndex',
+ lambda *args: range(self.max_rev))
+
+ def tearDown(self):
+ self.clear_patching()
+
+ def bisect(self, good_rev, bad_rev, evaluate):
+ return bisect_builds.Bisect(good_rev=good_rev,
+ bad_rev=bad_rev,
+ evaluate=evaluate,
+ num_runs=1,
+ official_builds=False,
+ platform='linux',
+ profile=None,
+ try_args=())
+
+ def testBisectConsistentAnswer(self):
+ self.assertEqual(self.bisect(1000, 100, lambda *args: 'g'), (100, 101))
+ self.assertEqual(self.bisect(100, 1000, lambda *args: 'b'), (100, 101))
+ self.assertEqual(self.bisect(2000, 200, lambda *args: 'b'), (1999, 2000))
+ self.assertEqual(self.bisect(200, 2000, lambda *args: 'g'), (1999, 2000))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/boilerplate.py b/chromium/tools/boilerplate.py
new file mode 100755
index 00000000000..3ab0731f95f
--- /dev/null
+++ b/chromium/tools/boilerplate.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create files with copyright boilerplate and header include guards.
+
+Usage: tools/boilerplate.py path/to/file.{h,cc}
+"""
+
+from datetime import date
+import os
+import os.path
+import sys
+
+LINES = [
+ 'Copyright %d The Chromium Authors. All rights reserved.' %
+ date.today().year,
+ 'Use of this source code is governed by a BSD-style license that can be',
+ 'found in the LICENSE file.'
+]
+
+EXTENSIONS_TO_COMMENTS = {
+ 'h': '//',
+ 'cc': '//',
+ 'mm': '//',
+ 'js': '//',
+ 'py': '#',
+ 'gn': '#',
+ 'gni': '#',
+}
+
+def _GetHeader(filename):
+ _, ext = os.path.splitext(filename)
+ ext = ext[1:]
+ comment = EXTENSIONS_TO_COMMENTS[ext] + ' '
+ return '\n'.join([comment + line for line in LINES])
+
+
+def _CppHeader(filename):
+ guard = filename.replace('/', '_').replace('.', '_').upper() + '_'
+ return '\n'.join([
+ '',
+ '#ifndef ' + guard,
+ '#define ' + guard,
+ '',
+ '#endif // ' + guard,
+ ''
+ ])
+
+
+def _CppImplementation(filename):
+ base, _ = os.path.splitext(filename)
+ include = '#include "' + base + '.h"'
+ return '\n'.join(['', include])
+
+
+def _ObjCppImplementation(filename):
+ base, _ = os.path.splitext(filename)
+ include = '#import "' + base + '.h"'
+ return '\n'.join(['', include])
+
+
+def _CreateFile(filename):
+ contents = _GetHeader(filename) + '\n'
+
+ if filename.endswith('.h'):
+ contents += _CppHeader(filename)
+ elif filename.endswith('.cc'):
+ contents += _CppImplementation(filename)
+ elif filename.endswith('.mm'):
+ contents += _ObjCppImplementation(filename)
+
+ fd = open(filename, 'w')
+ fd.write(contents)
+ fd.close()
+
+
+def Main():
+ files = sys.argv[1:]
+ if len(files) < 1:
+ print >> sys.stderr, 'Usage: boilerplate.py path/to/file.h path/to/file.cc'
+ return 1
+
+ # Perform checks first so that the entire operation is atomic.
+ for f in files:
+ _, ext = os.path.splitext(f)
+ if not ext[1:] in EXTENSIONS_TO_COMMENTS:
+ print >> sys.stderr, 'Unknown file type for %s' % f
+ return 2
+
+ if os.path.exists(f):
+ print >> sys.stderr, 'A file at path %s already exists' % f
+ return 2
+
+ for f in files:
+ _CreateFile(f)
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/chromium/tools/cfi/OWNERS b/chromium/tools/cfi/OWNERS
new file mode 100644
index 00000000000..ba0dc3272cf
--- /dev/null
+++ b/chromium/tools/cfi/OWNERS
@@ -0,0 +1,2 @@
+krasin@chromium.org
+pcc@chromium.org
diff --git a/chromium/tools/cfi/blacklist.txt b/chromium/tools/cfi/blacklist.txt
new file mode 100644
index 00000000000..ab60ab63256
--- /dev/null
+++ b/chromium/tools/cfi/blacklist.txt
@@ -0,0 +1,80 @@
+# e.g. RolloverProtectedTickClock
+fun:*MutableInstance*
+
+# WTF allocators.
+fun:*allocate*Backing*
+
+# WTF::ThreadSpecific
+fun:*ThreadSpecific*
+
+# Mesa contains several bad casts.
+src:*third_party/mesa*
+
+# Deliberate bad cast to derived class to hide functions.
+type:*BlockIUnknownMethods*
+type:*BlockRefType*
+type:*SkAutoTUnref*
+type:*SkBlockComRef*
+# https://crbug.com/517959
+type:*NoAddRefRelease*
+
+# All mojo::test:: types.
+# They are loaded from libmojo_public_test_support.so
+# https://crbug.com/515347
+type:mojo::test::*
+
+# All types and sources from libclearkeycdm.so.
+# See https://crbug.com/557969
+type:media::CdmVideoDecoder
+type:media::ClearKeyCdm
+type:media::FakeCdmVideoDecoder
+type:media::FFmpegCdmAudioDecoder
+type:media::FFmpegCdmVideoDecoder
+type:media::LibvpxCdmVideoDecoder
+src:*external_clear_key*
+type:cdm::*
+
+# invalid downcasts for IPC messages
+# https://crbug.com/520760
+src:*nacl_message_scanner.cc*
+
+# src/base/win/event_trace_provider_unittest.cc
+type:*EtwTraceProvider*
+
+# These classes are used to communicate between chrome.exe and
+# chrome_child.dll (see src/sandbox/win/src/sandbox.h,
+# src/chrome/app/chrome_main.cc).
+type:sandbox::BrokerServices
+type:sandbox::TargetPolicy
+type:sandbox::TargetServices
+
+# Likewise (base/win/scoped_handle.cc).
+type:*ActiveVerifier*
+
+#############################################################################
+# Base class's constructor accesses a derived class.
+
+fun:*DoublyLinkedListNode*
+
+# RenderFrameObserverTracker<T>::RenderFrameObserverTracker()
+fun:*content*RenderFrameObserverTracker*RenderFrame*
+
+# RenderViewObserverTracker<T>::RenderViewObserverTracker()
+fun:*content*RenderViewObserverTracker*RenderView*
+
+fun:*RefCountedGarbageCollected*makeKeepAlive*
+fun:*ThreadSafeRefCountedGarbageCollected*makeKeepAlive*
+
+#############################################################################
+# Base class's destructor accesses a derived class.
+
+fun:*DatabaseContext*contextDestroyed*
+
+# FIXME: Cannot handle template function LifecycleObserver<>::setContext,
+# so exclude source file for now.
+src:*LifecycleObserver.h*
+
+# Blink wtf::Vector cast on non-initialized data
+# https://crbug.com/568891
+src:*wtf/Vector.h*
+src:*wtf/PartitionAllocator.h*
diff --git a/chromium/tools/check_ecs_deps/check_ecs_deps.py b/chromium/tools/check_ecs_deps/check_ecs_deps.py
new file mode 100755
index 00000000000..4ac06bb2989
--- /dev/null
+++ b/chromium/tools/check_ecs_deps/check_ecs_deps.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+''' Verifies that builds of the embedded content_shell do not included
+unnecessary dependencies.'''
+
+import os
+import re
+import string
+import subprocess
+import sys
+import optparse
+
+kUndesiredLibraryList = [
+ 'libX11',
+ 'libXau',
+ 'libXcomposite',
+ 'libXcursor',
+ 'libXdamage',
+ 'libXdmcp',
+ 'libXext',
+ 'libXfixes',
+ 'libXi',
+ 'libXrandr',
+ 'libXrender',
+ 'libXtst',
+ 'libasound',
+ 'libcairo',
+ 'libdbus',
+ 'libffi',
+ 'libgconf',
+ 'libgio',
+ 'libglib',
+ 'libgmodule',
+ 'libgobject',
+ 'libpango',
+ 'libpcre',
+ 'libpixman',
+ 'libpng',
+ 'libselinux',
+ 'libudev',
+ 'libxcb',
+]
+
+kAllowedLibraryList = [
+ # Toolchain libraries (gcc/glibc)
+ 'ld-linux',
+ 'libc',
+ 'libdl',
+ 'libgcc_s',
+ 'libm',
+ 'libpthread',
+ 'libresolv',
+ 'librt',
+ 'libstdc++',
+ 'linux-vdso',
+
+ # Needed for default ozone platforms
+ 'libdrm',
+
+ # NSS & NSPR
+ 'libnss3',
+ 'libnssutil3',
+ 'libnspr4',
+ 'libplc4',
+ 'libplds4',
+ 'libsmime3',
+
+ # OpenSSL
+ 'libcrypto',
+
+ # Miscellaneous
+ 'libcap',
+ 'libexpat',
+ 'libfontconfig',
+ 'libz',
+]
+
+binary_target = 'content_shell'
+
+def stdmsg(_final, errors):
+ if errors:
+ for message in errors:
+ print message
+
+def bbmsg(final, errors):
+ if errors:
+ for message in errors:
+ print '@@@STEP_TEXT@%s@@@' % message
+ if final:
+ print '\n@@@STEP_%s@@@' % final
+
+
+def _main():
+ output = {
+ 'message': lambda x: stdmsg(None, x),
+ 'fail': lambda x: stdmsg('FAILED', x),
+ 'warn': lambda x: stdmsg('WARNING', x),
+ 'abend': lambda x: stdmsg('FAILED', x),
+ 'ok': lambda x: stdmsg('SUCCESS', x),
+ 'verbose': lambda x: None,
+ }
+
+ parser = optparse.OptionParser(
+ "usage: %prog -b <dir> --target <Debug|Release>")
+ parser.add_option("", "--annotate", dest='annotate', action='store_true',
+ default=False, help="include buildbot annotations in output")
+ parser.add_option("", "--noannotate", dest='annotate', action='store_false')
+ parser.add_option("-b", "--build-dir",
+ help="the location of the compiler output")
+ parser.add_option("--target", help="Debug or Release")
+ parser.add_option('-v', '--verbose', default=False, action='store_true')
+
+ options, args = parser.parse_args()
+ if args:
+ parser.usage()
+ return -1
+
+ # Bake target into build_dir.
+ if options.target and options.build_dir:
+ assert (options.target !=
+ os.path.basename(os.path.dirname(options.build_dir)))
+ options.build_dir = os.path.join(os.path.abspath(options.build_dir),
+ options.target)
+
+ if options.build_dir != None:
+ build_dir = os.path.abspath(options.build_dir)
+ else:
+ build_dir = os.getcwd()
+
+ target = os.path.join(build_dir, binary_target)
+
+ if options.annotate:
+ output.update({
+ 'message': lambda x: bbmsg(None, x),
+ 'fail': lambda x: bbmsg('FAILURE', x),
+ 'warn': lambda x: bbmsg('WARNINGS', x),
+ 'abend': lambda x: bbmsg('EXCEPTIONS', x),
+ 'ok': lambda x: bbmsg(None, x),
+ })
+
+ if options.verbose:
+ output['verbose'] = lambda x: stdmsg(None, x)
+
+ forbidden_regexp = re.compile(string.join(map(re.escape,
+ kUndesiredLibraryList), '|'))
+ mapping_regexp = re.compile(r"\s*([^/]*) => (.*)")
+ blessed_regexp = re.compile(r"(%s)[-0-9.]*\.so" % string.join(map(re.escape,
+ kAllowedLibraryList), '|'))
+ built_regexp = re.compile(re.escape(build_dir + os.sep))
+
+ success = 0
+ warning = 0
+
+ p = subprocess.Popen(['ldd', target], stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ out, err = p.communicate()
+
+ if err != '':
+ output['abend']([
+ 'Failed to execute ldd to analyze dependencies for ' + target + ':',
+ ' ' + err,
+ ])
+ return 1
+
+ if out == '':
+ output['abend']([
+ 'No output to scan for forbidden dependencies.'
+ ])
+ return 1
+
+ success = 1
+ deps = string.split(out, '\n')
+ for d in deps:
+ libmatch = mapping_regexp.match(d)
+ if libmatch:
+ lib = libmatch.group(1)
+ source = libmatch.group(2)
+ if forbidden_regexp.search(lib):
+ success = 0
+ output['message'](['Forbidden library: ' + lib])
+ elif built_regexp.match(source):
+ output['verbose'](['Built library: ' + lib])
+ elif blessed_regexp.match(lib):
+ output['verbose'](['Blessed library: ' + lib])
+ else:
+ warning = 1
+ output['message'](['Unexpected library: ' + lib])
+
+ if success == 1:
+ if warning == 1:
+ output['warn'](None)
+ else:
+ output['ok'](None)
+ return 0
+ else:
+ output['fail'](None)
+ return 1
+
+if __name__ == "__main__":
+ # handle arguments...
+ # do something reasonable if not run with one...
+ sys.exit(_main())
diff --git a/chromium/tools/check_git_config.py b/chromium/tools/check_git_config.py
new file mode 100755
index 00000000000..17b0d1ef4d7
--- /dev/null
+++ b/chromium/tools/check_git_config.py
@@ -0,0 +1,540 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script that attempts to push to a special git repository to verify that git
+credentials are configured correctly. It also verifies that gclient solution is
+configured to use git checkout.
+
+It will be added as gclient hook shortly before Chromium switches to git and
+removed after the switch.
+
+When running as hook in *.corp.google.com network it will also report status
+of the push attempt to the server (on appengine), so that chrome-infra team can
+collect information about misconfigured Git accounts.
+"""
+
+import contextlib
+import datetime
+import errno
+import getpass
+import json
+import logging
+import netrc
+import optparse
+import os
+import pprint
+import shutil
+import socket
+import ssl
+import subprocess
+import sys
+import tempfile
+import time
+import urllib2
+import urlparse
+
+
+# Absolute path to src/ directory.
+REPO_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Absolute path to a file with gclient solutions.
+GCLIENT_CONFIG = os.path.join(os.path.dirname(REPO_ROOT), '.gclient')
+
+# Incremented whenever some changes to scrip logic are made. Change in version
+# will cause the check to be rerun on next gclient runhooks invocation.
+CHECKER_VERSION = 1
+
+# Do not attempt to upload a report after this date.
+UPLOAD_DISABLE_TS = datetime.datetime(2014, 10, 1)
+
+# URL to POST json with results to.
+MOTHERSHIP_URL = (
+ 'https://chromium-git-access.appspot.com/'
+ 'git_access/api/v1/reports/access_check')
+
+# Repository to push test commits to.
+TEST_REPO_URL = 'https://chromium.googlesource.com/a/playground/access_test'
+
+# Git-compatible gclient solution.
+GOOD_GCLIENT_SOLUTION = {
+ 'name': 'src',
+ 'deps_file': 'DEPS',
+ 'managed': False,
+ 'url': 'https://chromium.googlesource.com/chromium/src.git',
+}
+
+# Possible chunks of git push response in case .netrc is misconfigured.
+BAD_ACL_ERRORS = (
+ '(prohibited by Gerrit)',
+ 'does not match your user account',
+ 'Git repository not found',
+ 'Invalid user name or password',
+ 'Please make sure you have the correct access rights',
+)
+
+# Git executable to call.
+GIT_EXE = 'git.bat' if sys.platform == 'win32' else 'git'
+
+
+def is_on_bot():
+ """True when running under buildbot."""
+ return os.environ.get('CHROME_HEADLESS') == '1'
+
+
+def is_in_google_corp():
+ """True when running in google corp network."""
+ try:
+ return socket.getfqdn().endswith('.corp.google.com')
+ except socket.error:
+ logging.exception('Failed to get FQDN')
+ return False
+
+
+def is_using_git():
+ """True if git checkout is used."""
+ return os.path.exists(os.path.join(REPO_ROOT, '.git', 'objects'))
+
+
+def is_using_svn():
+ """True if svn checkout is used."""
+ return os.path.exists(os.path.join(REPO_ROOT, '.svn'))
+
+
+def read_git_config(prop):
+ """Reads git config property of src.git repo.
+
+ Returns empty string in case of errors.
+ """
+ try:
+ proc = subprocess.Popen(
+ [GIT_EXE, 'config', prop], stdout=subprocess.PIPE, cwd=REPO_ROOT)
+ out, _ = proc.communicate()
+ return out.strip().decode('utf-8')
+ except OSError as exc:
+ if exc.errno != errno.ENOENT:
+ logging.exception('Unexpected error when calling git')
+ return ''
+
+
+def read_netrc_user(netrc_obj, host):
+ """Reads 'user' field of a host entry in netrc.
+
+ Returns empty string if netrc is missing, or host is not there.
+ """
+ if not netrc_obj:
+ return ''
+ entry = netrc_obj.authenticators(host)
+ if not entry:
+ return ''
+ return entry[0]
+
+
+def get_git_version():
+ """Returns version of git or None if git is not available."""
+ try:
+ proc = subprocess.Popen([GIT_EXE, '--version'], stdout=subprocess.PIPE)
+ out, _ = proc.communicate()
+ return out.strip() if proc.returncode == 0 else ''
+ except OSError as exc:
+ if exc.errno != errno.ENOENT:
+ logging.exception('Unexpected error when calling git')
+ return ''
+
+
+def read_gclient_solution():
+ """Read information about 'src' gclient solution from .gclient file.
+
+ Returns tuple:
+ (url, deps_file, managed)
+ or
+ (None, None, None) if no such solution.
+ """
+ try:
+ env = {}
+ execfile(GCLIENT_CONFIG, env, env)
+ for sol in (env.get('solutions') or []):
+ if sol.get('name') == 'src':
+ return sol.get('url'), sol.get('deps_file'), sol.get('managed')
+ return None, None, None
+ except Exception:
+ logging.exception('Failed to read .gclient solution')
+ return None, None, None
+
+
+def read_git_insteadof(host):
+ """Reads relevant insteadOf config entries."""
+ try:
+ proc = subprocess.Popen([GIT_EXE, 'config', '-l'], stdout=subprocess.PIPE)
+ out, _ = proc.communicate()
+ lines = []
+ for line in out.strip().split('\n'):
+ line = line.lower()
+ if 'insteadof=' in line and host in line:
+ lines.append(line)
+ return '\n'.join(lines)
+ except OSError as exc:
+ if exc.errno != errno.ENOENT:
+ logging.exception('Unexpected error when calling git')
+ return ''
+
+
+def scan_configuration():
+ """Scans local environment for git related configuration values."""
+ # Git checkout?
+ is_git = is_using_git()
+
+ # On Windows HOME should be set.
+ if 'HOME' in os.environ:
+ netrc_path = os.path.join(
+ os.environ['HOME'],
+ '_netrc' if sys.platform.startswith('win') else '.netrc')
+ else:
+ netrc_path = None
+
+ # Netrc exists?
+ is_using_netrc = netrc_path and os.path.exists(netrc_path)
+
+ # Read it.
+ netrc_obj = None
+ if is_using_netrc:
+ try:
+ netrc_obj = netrc.netrc(netrc_path)
+ except Exception:
+ logging.exception('Failed to read netrc from %s', netrc_path)
+ netrc_obj = None
+
+ # Read gclient 'src' solution.
+ gclient_url, gclient_deps, gclient_managed = read_gclient_solution()
+
+ return {
+ 'checker_version': CHECKER_VERSION,
+ 'is_git': is_git,
+ 'is_home_set': 'HOME' in os.environ,
+ 'is_using_netrc': is_using_netrc,
+ 'netrc_file_mode': os.stat(netrc_path).st_mode if is_using_netrc else 0,
+ 'git_version': get_git_version(),
+ 'platform': sys.platform,
+ 'username': getpass.getuser(),
+ 'git_user_email': read_git_config('user.email') if is_git else '',
+ 'git_user_name': read_git_config('user.name') if is_git else '',
+ 'git_insteadof': read_git_insteadof('chromium.googlesource.com'),
+ 'chromium_netrc_email':
+ read_netrc_user(netrc_obj, 'chromium.googlesource.com'),
+ 'chrome_internal_netrc_email':
+ read_netrc_user(netrc_obj, 'chrome-internal.googlesource.com'),
+ 'gclient_deps': gclient_deps,
+ 'gclient_managed': gclient_managed,
+ 'gclient_url': gclient_url,
+ }
+
+
+def last_configuration_path():
+ """Path to store last checked configuration."""
+ if is_using_git():
+ return os.path.join(REPO_ROOT, '.git', 'check_git_push_access_conf.json')
+ elif is_using_svn():
+ return os.path.join(REPO_ROOT, '.svn', 'check_git_push_access_conf.json')
+ else:
+ return os.path.join(REPO_ROOT, '.check_git_push_access_conf.json')
+
+
+def read_last_configuration():
+ """Reads last checked configuration if it exists."""
+ try:
+ with open(last_configuration_path(), 'r') as f:
+ return json.load(f)
+ except (IOError, ValueError):
+ return None
+
+
+def write_last_configuration(conf):
+ """Writes last checked configuration to a file."""
+ try:
+ with open(last_configuration_path(), 'w') as f:
+ json.dump(conf, f, indent=2, sort_keys=True)
+ except IOError:
+ logging.exception('Failed to write JSON to %s', path)
+
+
+@contextlib.contextmanager
+def temp_directory():
+ """Creates a temp directory, then nukes it."""
+ tmp = tempfile.mkdtemp()
+ try:
+ yield tmp
+ finally:
+ try:
+ shutil.rmtree(tmp)
+ except (OSError, IOError):
+ logging.exception('Failed to remove temp directory %s', tmp)
+
+
+class Runner(object):
+ """Runs a bunch of commands in some directory, collects logs from them."""
+
+ def __init__(self, cwd, verbose):
+ self.cwd = cwd
+ self.verbose = verbose
+ self.log = []
+
+ def run(self, cmd):
+ self.append_to_log('> ' + ' '.join(cmd))
+ retcode = -1
+ try:
+ proc = subprocess.Popen(
+ cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ cwd=self.cwd)
+ out, _ = proc.communicate()
+ out = out.strip()
+ retcode = proc.returncode
+ except OSError as exc:
+ out = str(exc)
+ if retcode:
+ out += '\n(exit code: %d)' % retcode
+ self.append_to_log(out)
+ return retcode
+
+ def append_to_log(self, text):
+ if text:
+ self.log.append(text)
+ if self.verbose:
+ logging.warning(text)
+
+
+def check_git_config(conf, report_url, verbose):
+ """Attempts to push to a git repository, reports results to a server.
+
+ Returns True if the check finished without incidents (push itself may
+ have failed) and should NOT be retried on next invocation of the hook.
+ """
+ # Don't even try to push if netrc is not configured.
+ if not conf['chromium_netrc_email']:
+ return upload_report(
+ conf,
+ report_url,
+ verbose,
+ push_works=False,
+ push_log='',
+ push_duration_ms=0)
+
+ # Ref to push to, each user has its own ref.
+ ref = 'refs/push-test/%s' % conf['chromium_netrc_email']
+
+ push_works = False
+ flake = False
+ started = time.time()
+ try:
+ logging.warning('Checking push access to the git repository...')
+ with temp_directory() as tmp:
+ # Prepare a simple commit on a new timeline.
+ runner = Runner(tmp, verbose)
+ runner.run([GIT_EXE, 'init', '.'])
+ if conf['git_user_name']:
+ runner.run([GIT_EXE, 'config', 'user.name', conf['git_user_name']])
+ if conf['git_user_email']:
+ runner.run([GIT_EXE, 'config', 'user.email', conf['git_user_email']])
+ with open(os.path.join(tmp, 'timestamp'), 'w') as f:
+ f.write(str(int(time.time() * 1000)))
+ runner.run([GIT_EXE, 'add', 'timestamp'])
+ runner.run([GIT_EXE, 'commit', '-m', 'Push test.'])
+ # Try to push multiple times if it fails due to issues other than ACLs.
+ attempt = 0
+ while attempt < 5:
+ attempt += 1
+ logging.info('Pushing to %s %s', TEST_REPO_URL, ref)
+ ret = runner.run(
+ [GIT_EXE, 'push', TEST_REPO_URL, 'HEAD:%s' % ref, '-f'])
+ if not ret:
+ push_works = True
+ break
+ if any(x in runner.log[-1] for x in BAD_ACL_ERRORS):
+ push_works = False
+ break
+ except Exception:
+ logging.exception('Unexpected exception when pushing')
+ flake = True
+
+ if push_works:
+ logging.warning('Git push works!')
+ else:
+ logging.warning(
+ 'Git push doesn\'t work, which is fine if you are not a committer.')
+
+ uploaded = upload_report(
+ conf,
+ report_url,
+ verbose,
+ push_works=push_works,
+ push_log='\n'.join(runner.log),
+ push_duration_ms=int((time.time() - started) * 1000))
+ return uploaded and not flake
+
+
+def check_gclient_config(conf):
+ """Shows warning if gclient solution is not properly configured for git."""
+ # Ignore configs that do not have 'src' solution at all.
+ if not conf['gclient_url']:
+ return
+ current = {
+ 'name': 'src',
+ 'deps_file': conf['gclient_deps'] or 'DEPS',
+ 'managed': conf['gclient_managed'] or False,
+ 'url': conf['gclient_url'],
+ }
+ # After depot_tools r291592 both DEPS and .DEPS.git are valid.
+ good = GOOD_GCLIENT_SOLUTION.copy()
+ good['deps_file'] = current['deps_file']
+ if current == good:
+ return
+ # Show big warning if url or deps_file is wrong.
+ if current['url'] != good['url'] or current['deps_file'] != good['deps_file']:
+ print '-' * 80
+ print 'Your gclient solution is not set to use supported git workflow!'
+ print
+ print 'Your \'src\' solution (in %s):' % GCLIENT_CONFIG
+ print pprint.pformat(current, indent=2)
+ print
+ print 'Correct \'src\' solution to use git:'
+ print pprint.pformat(good, indent=2)
+ print
+ print 'Please update your .gclient file ASAP.'
+ print '-' * 80
+ # Show smaller (additional) warning about managed workflow.
+ if current['managed']:
+ print '-' * 80
+ print (
+ 'You are using managed gclient mode with git, which was deprecated '
+ 'on 8/22/13:')
+ print (
+ 'https://groups.google.com/a/chromium.org/'
+ 'forum/#!topic/chromium-dev/n9N5N3JL2_U')
+ print
+ print (
+ 'It is strongly advised to switch to unmanaged mode. For more '
+ 'information about managed mode and reasons for its deprecation see:')
+ print 'http://www.chromium.org/developers/how-tos/get-the-code/gclient-managed-mode'
+ print
+ print (
+ 'There\'s also a large suite of tools to assist managing git '
+ 'checkouts.\nSee \'man depot_tools\' (or read '
+ 'depot_tools/man/html/depot_tools.html).')
+ print '-' * 80
+
+
+def upload_report(
+ conf, report_url, verbose, push_works, push_log, push_duration_ms):
+ """Posts report to the server, returns True if server accepted it.
+
+ Uploads the report only if script is running in Google corp network. Otherwise
+ just prints the report.
+ """
+ report = conf.copy()
+ report.update(
+ push_works=push_works,
+ push_log=push_log,
+ push_duration_ms=push_duration_ms)
+
+ as_bytes = json.dumps({'access_check': report}, indent=2, sort_keys=True)
+ if verbose:
+ print 'Status of git push attempt:'
+ print as_bytes
+
+ # Do not upload it outside of corp or if server side is already disabled.
+ if not is_in_google_corp() or datetime.datetime.now() > UPLOAD_DISABLE_TS:
+ if verbose:
+ print (
+ 'You can send the above report to chrome-git-migration@google.com '
+ 'if you need help to set up you committer git account.')
+ return True
+
+ req = urllib2.Request(
+ url=report_url,
+ data=as_bytes,
+ headers={'Content-Type': 'application/json; charset=utf-8'})
+
+ attempt = 0
+ success = False
+ while not success and attempt < 10:
+ attempt += 1
+ try:
+ logging.warning(
+ 'Attempting to upload the report to %s...',
+ urlparse.urlparse(report_url).netloc)
+ resp = urllib2.urlopen(req, timeout=5)
+ report_id = None
+ try:
+ report_id = json.load(resp)['report_id']
+ except (ValueError, TypeError, KeyError):
+ pass
+ logging.warning('Report uploaded: %s', report_id)
+ success = True
+ except (urllib2.URLError, socket.error, ssl.SSLError) as exc:
+ logging.warning('Failed to upload the report: %s', exc)
+ return success
+
+
+def main(args):
+ parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
+ parser.add_option(
+ '--running-as-hook',
+ action='store_true',
+ help='Set when invoked from gclient hook')
+ parser.add_option(
+ '--report-url',
+ default=MOTHERSHIP_URL,
+ help='URL to submit the report to')
+ parser.add_option(
+ '--verbose',
+ action='store_true',
+ help='More logging')
+ options, args = parser.parse_args()
+ if args:
+ parser.error('Unknown argument %s' % args)
+ logging.basicConfig(
+ format='%(message)s',
+ level=logging.INFO if options.verbose else logging.WARN)
+
+ # When invoked not as a hook, always run the check.
+ if not options.running_as_hook:
+ config = scan_configuration()
+ check_gclient_config(config)
+ check_git_config(config, options.report_url, True)
+ return 0
+
+ # Always do nothing on bots.
+ if is_on_bot():
+ return 0
+
+ # Read current config, verify gclient solution looks correct.
+ config = scan_configuration()
+ check_gclient_config(config)
+
+ # Do not attempt to push from non-google owned machines.
+ if not is_in_google_corp():
+ logging.info('Skipping git push check: non *.corp.google.com machine.')
+ return 0
+
+ # Skip git push check if current configuration was already checked.
+ if config == read_last_configuration():
+ logging.info('Check already performed, skipping.')
+ return 0
+
+ # Run the check. Mark configuration as checked only on success. Ignore any
+ # exceptions or errors. This check must not break gclient runhooks.
+ try:
+ ok = check_git_config(config, options.report_url, False)
+ if ok:
+ write_last_configuration(config)
+ else:
+ logging.warning('Check failed and will be retried on the next run')
+ except Exception:
+ logging.exception('Unexpected exception when performing git access check')
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/chromium/tools/check_grd_for_unused_strings.py b/chromium/tools/check_grd_for_unused_strings.py
new file mode 100755
index 00000000000..8124b57604a
--- /dev/null
+++ b/chromium/tools/check_grd_for_unused_strings.py
@@ -0,0 +1,183 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Without any args, this simply loads the IDs out of a bunch of the Chrome GRD
+files, and then checks the subset of the code that loads the strings to try
+and figure out what isn't in use any more.
+You can give paths to GRD files and source directories to control what is
+check instead.
+"""
+
+import os
+import re
+import sys
+import xml.sax
+
+# Extra messages along the way
+# 1 - Print ids that are found in sources but not in the found id set
+# 2 - Files that aren't processes (don't match the source name regex)
+DEBUG = 0
+
+
+class GrdIDExtractor(xml.sax.handler.ContentHandler):
+ """Extracts the IDs from messages in GRIT files"""
+ def __init__(self):
+ self.id_set_ = set()
+
+ def startElement(self, name, attrs):
+ if name == 'message':
+ self.id_set_.add(attrs['name'])
+
+ def allIDs(self):
+ """Return all the IDs found"""
+ return self.id_set_.copy()
+
+
+def CheckForUnusedGrdIDsInSources(grd_files, src_dirs):
+ """Will collect the message ids out of the given GRD files and then scan
+ the source directories to try and figure out what ids are not currently
+ being used by any source.
+
+ grd_files:
+ A list of GRD files to collect the ids from.
+ src_dirs:
+ A list of directories to walk looking for source files.
+ """
+ # Collect all the ids into a large map
+ all_ids = set()
+ file_id_map = {}
+ for y in grd_files:
+ handler = GrdIDExtractor()
+ xml.sax.parse(y, handler)
+ files_ids = handler.allIDs()
+ file_id_map[y] = files_ids
+ all_ids |= files_ids
+
+
+ # The regex that will be used to check sources
+ id_regex = re.compile('IDS_[A-Z0-9_]+')
+
+ # Make sure the regex matches every id found.
+ got_err = False
+ for x in all_ids:
+ match = id_regex.search(x)
+ if match is None:
+ print 'ERROR: "%s" did not match our regex' % (x)
+ got_err = True
+ if not match.group(0) is x:
+ print 'ERROR: "%s" did not fully match our regex' % (x)
+ got_err = True
+ if got_err:
+ return 1
+
+ # The regex for deciding what is a source file
+ src_regex = re.compile('\.(([chm])|(mm)|(cc)|(cp)|(cpp)|(xib)|(py))$')
+
+ ids_left = all_ids.copy()
+
+ # Scanning time.
+ for src_dir in src_dirs:
+ for root, dirs, files in os.walk(src_dir):
+ # Remove svn directories from recursion
+ if '.svn' in dirs:
+ dirs.remove('.svn')
+ for file in files:
+ if src_regex.search(file.lower()):
+ full_path = os.path.join(root, file)
+ src_file_contents = open(full_path).read()
+ for match in sorted(set(id_regex.findall(src_file_contents))):
+ if match in ids_left:
+ ids_left.remove(match)
+ if DEBUG:
+ if not match in all_ids:
+ print '%s had "%s", which was not in the found IDs' % \
+ (full_path, match)
+ elif DEBUG > 1:
+ full_path = os.path.join(root, file)
+ print 'Skipping %s.' % (full_path)
+
+ # Anything left?
+ if len(ids_left) > 0:
+ print 'The following ids are in GRD files, but *appear* to be unused:'
+ for file_path, file_ids in file_id_map.iteritems():
+ missing = ids_left.intersection(file_ids)
+ if len(missing) > 0:
+ print ' %s:' % (file_path)
+ print '\n'.join(' %s' % (x) for x in sorted(missing))
+
+ return 0
+
+
+def main():
+ # script lives in src/tools
+ tools_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
+ src_dir = os.path.dirname(tools_dir)
+
+ # Collect the args into the right buckets
+ src_dirs = []
+ grd_files = []
+ for arg in sys.argv[1:]:
+ if arg.lower().endswith('.grd'):
+ grd_files.append(arg)
+ else:
+ src_dirs.append(arg)
+
+ # If no GRD files were given, default them:
+ if len(grd_files) == 0:
+ ash_base_dir = os.path.join(src_dir, 'ash')
+ chrome_dir = os.path.join(src_dir, 'chrome')
+ chrome_app_dir = os.path.join(chrome_dir, 'app')
+ chrome_app_res_dir = os.path.join(chrome_app_dir, 'resources')
+ device_base_dir = os.path.join(src_dir, 'device')
+ ui_dir = os.path.join(src_dir, 'ui')
+ ui_strings_dir = os.path.join(ui_dir, 'strings')
+ ui_chromeos_dir = os.path.join(ui_dir, 'chromeos')
+ grd_files = [
+ os.path.join(ash_base_dir, 'ash_strings.grd'),
+ os.path.join(ash_base_dir, 'resources', 'ash_resources.grd'),
+ os.path.join(chrome_app_dir, 'chromium_strings.grd'),
+ os.path.join(chrome_app_dir, 'generated_resources.grd'),
+ os.path.join(chrome_app_dir, 'google_chrome_strings.grd'),
+ os.path.join(chrome_app_res_dir, 'locale_settings.grd'),
+ os.path.join(chrome_app_res_dir, 'locale_settings_chromiumos.grd'),
+ os.path.join(chrome_app_res_dir, 'locale_settings_google_chromeos.grd'),
+ os.path.join(chrome_app_res_dir, 'locale_settings_linux.grd'),
+ os.path.join(chrome_app_res_dir, 'locale_settings_mac.grd'),
+ os.path.join(chrome_app_res_dir, 'locale_settings_win.grd'),
+ os.path.join(chrome_app_dir, 'theme', 'theme_resources.grd'),
+ os.path.join(chrome_dir, 'browser', 'browser_resources.grd'),
+ os.path.join(chrome_dir, 'common', 'common_resources.grd'),
+ os.path.join(chrome_dir, 'renderer', 'resources',
+ 'renderer_resources.grd'),
+ os.path.join(device_base_dir, 'bluetooth', 'bluetooth_strings.grd'),
+ os.path.join(src_dir, 'extensions', 'extensions_strings.grd'),
+ os.path.join(src_dir, 'ui', 'resources', 'ui_resources.grd'),
+ os.path.join(src_dir, 'ui', 'webui', 'resources', 'webui_resources.grd'),
+ os.path.join(ui_strings_dir, 'app_locale_settings.grd'),
+ os.path.join(ui_strings_dir, 'ui_strings.grd'),
+ os.path.join(ui_chromeos_dir, 'ui_chromeos_strings.grd'),
+ ]
+
+ # If no source directories were given, default them:
+ if len(src_dirs) == 0:
+ src_dirs = [
+ os.path.join(src_dir, 'app'),
+ os.path.join(src_dir, 'ash'),
+ os.path.join(src_dir, 'chrome'),
+ os.path.join(src_dir, 'components'),
+ os.path.join(src_dir, 'content'),
+ os.path.join(src_dir, 'device'),
+ os.path.join(src_dir, 'extensions'),
+ os.path.join(src_dir, 'ui'),
+ # nsNSSCertHelper.cpp has a bunch of ids
+ os.path.join(src_dir, 'third_party', 'mozilla_security_manager'),
+ os.path.join(chrome_dir, 'installer'),
+ ]
+
+ return CheckForUnusedGrdIDsInSources(grd_files, src_dirs)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/checkbins/checkbins.py b/chromium/tools/checkbins/checkbins.py
new file mode 100755
index 00000000000..74ed0ae821a
--- /dev/null
+++ b/chromium/tools/checkbins/checkbins.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Makes sure that all EXE and DLL files in the provided directory were built
+correctly.
+
+In essense it runs a subset of BinScope tests ensuring that binaries have
+/NXCOMPAT, /DYNAMICBASE and /SAFESEH.
+"""
+
+import json
+import os
+import optparse
+import sys
+
+# Find /third_party/pefile based on current directory and script path.
+sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..',
+ 'third_party', 'pefile'))
+import pefile
+
+PE_FILE_EXTENSIONS = ['.exe', '.dll']
+DYNAMICBASE_FLAG = 0x0040
+NXCOMPAT_FLAG = 0x0100
+NO_SEH_FLAG = 0x0400
+MACHINE_TYPE_AMD64 = 0x8664
+
+# Please do not add your file here without confirming that it indeed doesn't
+# require /NXCOMPAT and /DYNAMICBASE. Contact cpu@chromium.org or your local
+# Windows guru for advice.
+EXCLUDED_FILES = ['chrome_frame_mini_installer.exe',
+ 'mini_installer.exe',
+ 'wow_helper.exe'
+ ]
+
+def IsPEFile(path):
+ return (os.path.isfile(path) and
+ os.path.splitext(path)[1].lower() in PE_FILE_EXTENSIONS and
+ os.path.basename(path) not in EXCLUDED_FILES)
+
+def main(options, args):
+ directory = args[0]
+ pe_total = 0
+ pe_passed = 0
+
+ failures = []
+
+ for file in os.listdir(directory):
+ path = os.path.abspath(os.path.join(directory, file))
+ if not IsPEFile(path):
+ continue
+ pe = pefile.PE(path, fast_load=True)
+ pe.parse_data_directories(directories=[
+ pefile.DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG']])
+ pe_total = pe_total + 1
+ success = True
+
+ # Check for /DYNAMICBASE.
+ if pe.OPTIONAL_HEADER.DllCharacteristics & DYNAMICBASE_FLAG:
+ if options.verbose:
+ print "Checking %s for /DYNAMICBASE... PASS" % path
+ else:
+ success = False
+ print "Checking %s for /DYNAMICBASE... FAIL" % path
+
+ # Check for /NXCOMPAT.
+ if pe.OPTIONAL_HEADER.DllCharacteristics & NXCOMPAT_FLAG:
+ if options.verbose:
+ print "Checking %s for /NXCOMPAT... PASS" % path
+ else:
+ success = False
+ print "Checking %s for /NXCOMPAT... FAIL" % path
+
+ # Check for /SAFESEH. Binaries should meet one of the following
+ # criteria:
+ # 1) Have no SEH table as indicated by the DLL characteristics
+ # 2) Have a LOAD_CONFIG section containing a valid SEH table
+ # 3) Be a 64-bit binary, in which case /SAFESEH isn't required
+ #
+ # Refer to the following MSDN article for more information:
+ # http://msdn.microsoft.com/en-us/library/9a89h429.aspx
+ if (pe.OPTIONAL_HEADER.DllCharacteristics & NO_SEH_FLAG or
+ (hasattr(pe, "DIRECTORY_ENTRY_LOAD_CONFIG") and
+ pe.DIRECTORY_ENTRY_LOAD_CONFIG.struct.SEHandlerCount > 0 and
+ pe.DIRECTORY_ENTRY_LOAD_CONFIG.struct.SEHandlerTable != 0) or
+ pe.FILE_HEADER.Machine == MACHINE_TYPE_AMD64):
+ if options.verbose:
+ print "Checking %s for /SAFESEH... PASS" % path
+ else:
+ success = False
+ print "Checking %s for /SAFESEH... FAIL" % path
+
+ # ASLR is weakened on Windows 64-bit when the ImageBase is below 4GB
+ # (because the loader will never be rebase the image above 4GB).
+ if pe.FILE_HEADER.Machine == MACHINE_TYPE_AMD64:
+ if pe.OPTIONAL_HEADER.ImageBase <= 0xFFFFFFFF:
+ print("Checking %s ImageBase (0x%X < 4GB)... FAIL" %
+ (path, pe.OPTIONAL_HEADER.ImageBase))
+ success = False
+ elif options.verbose:
+ print("Checking %s ImageBase (0x%X > 4GB)... PASS" %
+ (path, pe.OPTIONAL_HEADER.ImageBase))
+
+ # Update tally.
+ if success:
+ pe_passed = pe_passed + 1
+ else:
+ failures.append(path)
+
+ print "Result: %d files found, %d files passed" % (pe_total, pe_passed)
+
+ if options.json:
+ with open(options.json, 'w') as f:
+ json.dump(failures, f)
+
+ if pe_passed != pe_total:
+ sys.exit(1)
+
+if __name__ == '__main__':
+ usage = "Usage: %prog [options] DIRECTORY"
+ option_parser = optparse.OptionParser(usage=usage)
+ option_parser.add_option("-v", "--verbose", action="store_true",
+ default=False, help="Print debug logging")
+ option_parser.add_option("--json", help="Path to JSON output file")
+ options, args = option_parser.parse_args()
+ if not args:
+ option_parser.print_help()
+ sys.exit(0)
+ main(options, args)
diff --git a/chromium/tools/checklicenses/OWNERS b/chromium/tools/checklicenses/OWNERS
new file mode 100644
index 00000000000..2abfa66e8a9
--- /dev/null
+++ b/chromium/tools/checklicenses/OWNERS
@@ -0,0 +1,3 @@
+set noparent
+phajdan.jr@chromium.org
+thestig@chromium.org
diff --git a/chromium/tools/checklicenses/checklicenses.py b/chromium/tools/checklicenses/checklicenses.py
new file mode 100755
index 00000000000..cfd418b2e7f
--- /dev/null
+++ b/chromium/tools/checklicenses/checklicenses.py
@@ -0,0 +1,764 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Makes sure that all files contain proper licensing information."""
+
+
+import json
+import optparse
+import os.path
+import subprocess
+import sys
+
+
+def PrintUsage():
+ print """Usage: python checklicenses.py [--root <root>] [tocheck]
+ --root Specifies the repository root. This defaults to "../.." relative
+ to the script file. This will be correct given the normal location
+ of the script in "<root>/tools/checklicenses".
+
+ --ignore-suppressions Ignores path-specific license whitelist. Useful when
+ trying to remove a suppression/whitelist entry.
+
+ tocheck Specifies the directory, relative to root, to check. This defaults
+ to "." so it checks everything.
+
+Examples:
+ python checklicenses.py
+ python checklicenses.py --root ~/chromium/src third_party"""
+
+
+WHITELISTED_LICENSES = [
+ 'APSL (v2) BSD (4 clause)',
+ 'APSL (v2)',
+ 'Anti-Grain Geometry',
+ 'Apache (v2.0) BSD (2 clause)',
+ 'Apache (v2.0) BSD-like',
+ 'Apache (v2.0) GPL (v2)',
+ 'Apache (v2.0)',
+ 'Apple MIT', # https://fedoraproject.org/wiki/Licensing/Apple_MIT_License
+ 'BSD (2 clause) ISC',
+ 'BSD (2 clause) MIT/X11 (BSD like)',
+ 'BSD (2 clause)',
+ 'BSD (3 clause) GPL (v2)',
+ 'BSD (3 clause) ISC',
+ 'BSD (3 clause) LGPL (v2 or later)',
+ 'BSD (3 clause) LGPL (v2.1 or later)',
+ 'BSD (3 clause) MIT/X11 (BSD like)',
+ 'BSD (3 clause)',
+ 'BSD (4 clause)',
+ 'BSD',
+ 'BSD-like',
+
+ # TODO(phajdan.jr): Make licensecheck not print BSD-like twice.
+ 'BSD MIT/X11 (BSD like)',
+ 'BSD-like MIT/X11 (BSD like)',
+
+ 'BSL (v1.0)',
+ 'BSL (v1) LGPL (v2.1 or later)',
+ 'FreeType (BSD like) with patent clause',
+ 'FreeType (BSD like)',
+ 'GPL (v2 or later) with Bison parser exception',
+ 'GPL (v2 or later) with libtool exception',
+ 'GPL (v2) LGPL (v2.1 or later)',
+ 'GPL (v3 or later) LGPL (v2.1 or later) with Bison parser exception',
+ 'GPL (v3 or later) with Bison parser exception',
+ 'GPL with Bison parser exception',
+ 'ISC',
+ 'Independent JPEG Group License',
+ 'LGPL (unversioned/unknown version)',
+ 'LGPL (v2 or later)',
+ 'LGPL (v2)',
+ 'LGPL (v2.1 or later)',
+ 'LGPL (v2.1)',
+ 'LGPL (v3 or later)',
+ 'MIT/X11 (BSD like) LGPL (v2.1 or later)',
+ 'MIT/X11 (BSD like)',
+ 'MPL (v1.0) LGPL (v2 or later)',
+ 'MPL (v1.1) BSD (3 clause) GPL (v2) LGPL (v2.1 or later)',
+ 'MPL (v1.1) BSD (3 clause) LGPL (v2.1 or later)',
+ 'MPL (v1.1) BSD-like GPL (unversioned/unknown version)',
+ 'MPL (v1.1) BSD-like GPL (v2) LGPL (v2.1 or later)',
+ 'MPL (v1.1) BSD-like',
+ 'MPL (v1.1) GPL (unversioned/unknown version)',
+ 'MPL (v1.1) GPL (v2) LGPL (v2 or later)',
+ 'MPL (v1.1) GPL (v2) LGPL (v2.1 or later)',
+ 'MPL (v1.1) GPL (v2)',
+ 'MPL (v1.1) LGPL (v2 or later)',
+ 'MPL (v1.1) LGPL (v2.1 or later)',
+ 'MPL (v1.1)',
+ 'MPL (v2.0)',
+ 'Ms-PL',
+ 'Public domain BSD (3 clause)',
+ 'Public domain BSD',
+ 'Public domain BSD-like',
+ 'Public domain LGPL (v2.1 or later)',
+ 'Public domain University of Illinois/NCSA Open Source License (BSD like)',
+ 'Public domain',
+ 'SGI Free Software License B',
+ 'SunSoft (BSD like)',
+ 'libpng',
+ 'zlib/libpng',
+ 'University of Illinois/NCSA Open Source License (BSD like)',
+ ('University of Illinois/NCSA Open Source License (BSD like) '
+ 'MIT/X11 (BSD like)'),
+]
+
+
+PATH_SPECIFIC_WHITELISTED_LICENSES = {
+ 'base/third_party/icu': [ # http://crbug.com/98087
+ 'UNKNOWN',
+ ],
+
+ 'base/third_party/libevent': [ # http://crbug.com/98309
+ 'UNKNOWN',
+ ],
+
+ # http://code.google.com/p/google-breakpad/issues/detail?id=450
+ 'breakpad/src': [
+ 'UNKNOWN',
+ ],
+
+ 'buildtools/third_party/libc++/trunk/test': [
+ # http://llvm.org/bugs/show_bug.cgi?id=25980
+ 'UNKNOWN',
+ ],
+ # http://llvm.org/bugs/show_bug.cgi?id=25976
+ 'buildtools/third_party/libc++/trunk/src/include/atomic_support.h': [
+ 'UNKNOWN'
+ ],
+ 'buildtools/third_party/libc++/trunk/utils/gen_link_script': [ 'UNKNOWN' ],
+ 'buildtools/third_party/libc++/trunk/utils/not': [ 'UNKNOWN' ],
+ 'buildtools/third_party/libc++/trunk/utils/sym_check': [ 'UNKNOWN' ],
+ 'buildtools/third_party/libc++abi/trunk/test': [ 'UNKNOWN' ],
+
+ 'chrome/common/extensions/docs/examples': [ # http://crbug.com/98092
+ 'UNKNOWN',
+ ],
+ # This contains files copied from elsewhere from the tree. Since the copied
+ # directories might have suppressions below (like simplejson), whitelist the
+ # whole directory. This is also not shipped code.
+ 'chrome/common/extensions/docs/server2/third_party': [
+ 'UNKNOWN',
+ ],
+ 'courgette/third_party/bsdiff_create.cc': [ # http://crbug.com/98095
+ 'UNKNOWN',
+ ],
+ 'courgette/third_party/qsufsort.h': [ # http://crbug.com/98095
+ 'UNKNOWN',
+ ],
+ 'native_client': [ # http://crbug.com/98099
+ 'UNKNOWN',
+ ],
+ 'native_client/toolchain': [
+ 'BSD GPL (v2 or later)',
+ 'BSD (2 clause) GPL (v2 or later)',
+ 'BSD (3 clause) GPL (v2 or later)',
+ 'BSD (4 clause) ISC',
+ 'BSL (v1.0) GPL',
+ 'BSL (v1.0) GPL (v3.1)',
+ 'GPL',
+ 'GPL (unversioned/unknown version)',
+ 'GPL (v2)',
+ 'GPL (v2 or later)',
+ 'GPL (v3.1)',
+ 'GPL (v3 or later)',
+ 'MPL (v1.1) LGPL (unversioned/unknown version)',
+ ],
+
+ # The project is BSD-licensed but the individual files do not have
+ # consistent license headers. Also, this is just used in a utility
+ # and not shipped. https://github.com/waylan/Python-Markdown/issues/435
+ 'third_party/Python-Markdown': [
+ 'UNKNOWN',
+ ],
+
+ 'third_party/WebKit': [
+ 'UNKNOWN',
+ ],
+
+ # http://code.google.com/p/angleproject/issues/detail?id=217
+ 'third_party/angle': [
+ 'UNKNOWN',
+ ],
+
+ # http://crbug.com/222828
+ # http://bugs.python.org/issue17514
+ 'third_party/chromite/third_party/argparse.py': [
+ 'UNKNOWN',
+ ],
+
+ # http://crbug.com/326117
+ # https://bitbucket.org/chrisatlee/poster/issue/21
+ 'third_party/chromite/third_party/poster': [
+ 'UNKNOWN',
+ ],
+
+ # http://crbug.com/333508
+ 'buildtools/clang_format/script': [
+ 'UNKNOWN',
+ ],
+
+ # https://mail.python.org/pipermail/cython-devel/2014-July/004062.html
+ 'third_party/cython': [
+ 'UNKNOWN',
+ ],
+
+ 'third_party/devscripts': [
+ 'GPL (v2 or later)',
+ ],
+ 'third_party/catapult/firefighter/default/tracing/third_party/devscripts': [
+ 'GPL (v2 or later)',
+ ],
+ 'third_party/catapult/tracing/third_party/devscripts': [
+ 'GPL (v2 or later)',
+ ],
+
+ # https://github.com/shazow/apiclient/issues/8
+ # MIT license.
+ 'third_party/catapult/third_party/apiclient': [
+ 'UNKNOWN',
+ ],
+
+ # https://bugs.launchpad.net/beautifulsoup/+bug/1481316
+ # MIT license.
+ 'third_party/catapult/third_party/beautifulsoup': [
+ 'UNKNOWN'
+ ],
+
+ # https://bitbucket.org/ned/coveragepy/issue/313/add-license-file-containing-2-3-or-4
+ # Apache (v2.0) license, not shipped
+ 'third_party/catapult/third_party/coverage': [
+ 'UNKNOWN'
+ ],
+
+ # https://code.google.com/p/graphy/issues/detail?id=6
+ # Apache (v2.0)
+ 'third_party/catapult/third_party/graphy': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/GoogleCloudPlatform/gsutil/issues/305
+ ('third_party/catapult/third_party/gsutil/gslib/third_party/'
+ 'storage_apitools'): [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/google/apitools/issues/63
+ 'third_party/catapult/third_party/gsutil/third_party/apitools': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/boto/boto/issues/3373
+ 'third_party/catapult/third_party/gsutil/third_party/boto': [
+ 'UNKNOWN',
+ ],
+
+ # https://bitbucket.org/cmcqueen1975/crcmod/issues/1/please-add-per-file-licenses
+ # Includes third_party/catapult/third_party/gsutil/third_party/crcmod_osx.
+ 'third_party/catapult/third_party/gsutil/third_party/crcmod': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/jcgregorio/httplib2/issues/307
+ 'third_party/catapult/third_party/gsutil/third_party/httplib2': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/google/oauth2client/issues/331
+ 'third_party/catapult/third_party/gsutil/third_party/oauth2client': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/google/protorpc/issues/14
+ 'third_party/catapult/third_party/gsutil/third_party/protorpc': [
+ 'UNKNOWN',
+ ],
+
+ # https://sourceforge.net/p/pyasn1/tickets/4/
+ # Includes
+ # third_party/catapult/third_party/gsutil/third_party/pyasn1-modules.
+ 'third_party/catapult/third_party/gsutil/third_party/pyasn1': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/pnpnpn/retry-decorator/issues/4
+ 'third_party/catapult/third_party/gsutil/third_party/retry-decorator': [
+ 'UNKNOWN',
+ ],
+
+ # https://bitbucket.org/sybren/python-rsa/issues/28/please-add-per-file-licenses
+ 'third_party/catapult/third_party/gsutil/third_party/rsa': [
+ 'UNKNOWN',
+ ],
+
+ # https://bitbucket.org/gutworth/six/issues/137/please-add-per-file-licenses
+ # Already fixed upstream. https://crbug.com/573341
+ 'third_party/catapult/third_party/gsutil/third_party/six': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/html5lib/html5lib-python/issues/125
+ # MIT license.
+ 'third_party/catapult/third_party/html5lib-python': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/GoogleCloudPlatform/appengine-mapreduce/issues/71
+ # Apache (v2.0)
+ 'third_party/catapult/third_party/mapreduce': [
+ 'UNKNOWN',
+ ],
+
+ # https://code.google.com/p/webapp-improved/issues/detail?id=103
+ # Apache (v2.0).
+ 'third_party/catapult/third_party/webapp2': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/Pylons/webob/issues/211
+ # MIT license.
+ 'third_party/catapult/third_party/WebOb': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/Pylons/webtest/issues/141
+ # MIT license.
+ 'third_party/catapult/third_party/webtest': [
+ 'UNKNOWN',
+ ],
+
+ # https://bitbucket.org/ianb/paste/issues/12/add-license-headers-to-source-files
+ # MIT license.
+ 'third_party/catapult/third_party/Paste': [
+ 'UNKNOWN',
+ ],
+
+ 'third_party/expat/files/lib': [ # http://crbug.com/98121
+ 'UNKNOWN',
+ ],
+ 'third_party/ffmpeg': [
+ 'GPL',
+ 'GPL (v2)',
+ 'GPL (v2 or later)',
+ 'GPL (v3 or later)',
+ 'UNKNOWN', # http://crbug.com/98123
+ ],
+ 'third_party/fontconfig': [
+ # https://bugs.freedesktop.org/show_bug.cgi?id=73401
+ 'UNKNOWN',
+ ],
+ 'third_party/freetype2': [ # http://crbug.com/177319
+ 'UNKNOWN',
+ ],
+ 'third_party/freetype-android': [ # http://crbug.com/177319
+ 'UNKNOWN',
+ ],
+ 'third_party/hunspell': [ # http://crbug.com/98134
+ 'UNKNOWN',
+ ],
+ 'third_party/iccjpeg': [ # http://crbug.com/98137
+ 'UNKNOWN',
+ ],
+ 'third_party/icu': [ # http://crbug.com/98301
+ 'UNKNOWN',
+ ],
+ 'third_party/jmake': [ # Used only at build time.
+ 'GPL (v2)',
+ ],
+ 'third_party/jsoncpp/source': [
+ # https://github.com/open-source-parsers/jsoncpp/issues/234
+ 'UNKNOWN',
+ ],
+ 'third_party/junit/src': [
+ # Pulled in via DEPS for Android only.
+ # Eclipse Public License / not shipped.
+ # Bug filed but upstream prefers not to fix.
+ # https://github.com/junit-team/junit/issues/1132
+ 'UNKNOWN',
+ ],
+ 'third_party/lcov': [ # http://crbug.com/98304
+ 'UNKNOWN',
+ ],
+ 'third_party/lcov/contrib/galaxy/genflat.pl': [
+ 'GPL (v2 or later)',
+ ],
+ 'third_party/libjingle/source/talk': [ # http://crbug.com/98310
+ 'UNKNOWN',
+ ],
+ 'third_party/libjpeg_turbo': [ # http://crbug.com/98314
+ 'UNKNOWN',
+ ],
+
+ # Many liblouis files are mirrored but not used in the NaCl module.
+ # They are not excluded from the mirror because of lack of infrastructure
+ # support. Getting license headers added to the files where missing is
+ # tracked in https://github.com/liblouis/liblouis/issues/22.
+ 'third_party/liblouis/src': [
+ 'GPL (v3 or later)',
+ 'UNKNOWN',
+ ],
+
+ # The following files lack license headers, but are trivial.
+ 'third_party/libusb/src/libusb/os/poll_posix.h': [
+ 'UNKNOWN',
+ ],
+
+ 'third_party/libvpx/source': [ # http://crbug.com/98319
+ 'UNKNOWN',
+ ],
+ 'third_party/libxml': [
+ 'UNKNOWN',
+ ],
+ 'third_party/libxslt': [
+ 'UNKNOWN',
+ ],
+ 'third_party/lzma_sdk': [
+ 'UNKNOWN',
+ ],
+ 'third_party/mesa/src': [
+ 'GPL (v2)',
+ 'GPL (v3 or later)',
+ 'MIT/X11 (BSD like) GPL (v3 or later) with Bison parser exception',
+ 'UNKNOWN', # http://crbug.com/98450
+ ],
+ 'third_party/modp_b64': [
+ 'UNKNOWN',
+ ],
+ # Missing license headers in openh264 sources: https://github.com/cisco/openh264/issues/2233
+ 'third_party/openh264/src': [
+ 'UNKNOWN',
+ ],
+ 'third_party/openmax_dl/dl' : [
+ 'Khronos Group',
+ ],
+ 'third_party/opus/src/autogen.sh' : [ # https://trac.xiph.org/ticket/2253#ticket
+ 'UNKNOWN',
+ ],
+ 'third_party/boringssl': [
+ # There are some files in BoringSSL which came from OpenSSL and have no
+ # license in them. We don't wish to add the license header ourselves
+ # thus we don't expect to pass license checks.
+ 'UNKNOWN',
+ ],
+ 'third_party/molokocacao': [ # http://crbug.com/98453
+ 'UNKNOWN',
+ ],
+ 'third_party/ocmock/OCMock': [ # http://crbug.com/98454
+ 'UNKNOWN',
+ ],
+ 'third_party/protobuf': [ # http://crbug.com/98455
+ 'UNKNOWN',
+ ],
+
+ # https://bitbucket.org/ned/coveragepy/issue/313/add-license-file-containing-2-3-or-4
+ # BSD 2-clause license.
+ 'third_party/pycoverage': [
+ 'UNKNOWN',
+ ],
+
+ 'third_party/pyelftools': [ # http://crbug.com/222831
+ 'UNKNOWN',
+ ],
+ 'third_party/scons-2.0.1/engine/SCons': [ # http://crbug.com/98462
+ 'UNKNOWN',
+ ],
+ 'third_party/sfntly/src/java': [ # Apache 2.0, not shipped.
+ 'UNKNOWN',
+ ],
+ 'third_party/simplejson': [
+ 'UNKNOWN',
+ ],
+ 'third_party/skia': [ # http://crbug.com/98463
+ 'UNKNOWN',
+ ],
+ 'third_party/snappy/src': [ # http://crbug.com/98464
+ 'UNKNOWN',
+ ],
+ 'third_party/smhasher/src': [ # http://crbug.com/98465
+ 'UNKNOWN',
+ ],
+ 'third_party/speech-dispatcher/libspeechd.h': [
+ 'GPL (v2 or later)',
+ ],
+ 'third_party/sqlite': [
+ 'UNKNOWN',
+ ],
+
+ # New BSD license. http://crbug.com/98455
+ 'tools/swarming_client/third_party/google': [
+ 'UNKNOWN',
+ ],
+
+ # Apache v2.0.
+ 'tools/swarming_client/third_party/googleapiclient': [
+ 'UNKNOWN',
+ ],
+
+ # http://crbug.com/334668
+ # MIT license.
+ 'tools/swarming_client/third_party/httplib2': [
+ 'UNKNOWN',
+ ],
+
+ # http://crbug.com/334668
+ # Apache v2.0.
+ 'tools/swarming_client/third_party/oauth2client': [
+ 'UNKNOWN',
+ ],
+
+ # http://crbug.com/471372
+ # BSD
+ 'tools/swarming_client/third_party/pyasn1': [
+ 'UNKNOWN',
+ ],
+
+ # http://crbug.com/471372
+ # Apache v2.0.
+ 'tools/swarming_client/third_party/rsa': [
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/kennethreitz/requests/issues/1610
+ 'tools/swarming_client/third_party/requests': [
+ 'UNKNOWN',
+ ],
+
+ # BSD License. http://bugzilla.maptools.org/show_bug.cgi?id=2532
+ 'third_party/pdfium/third_party/libtiff/tif_ojpeg.c': [
+ 'UNKNOWN',
+ ],
+ 'third_party/pdfium/third_party/libtiff/tiffvers.h': [
+ 'UNKNOWN',
+ ],
+ 'third_party/pdfium/third_party/libtiff/uvcode.h': [
+ 'UNKNOWN',
+ ],
+
+ 'third_party/talloc': [
+ 'GPL (v3 or later)',
+ 'UNKNOWN', # http://crbug.com/98588
+ ],
+ 'third_party/tcmalloc': [
+ 'UNKNOWN', # http://crbug.com/98589
+ ],
+ 'third_party/tlslite': [
+ 'UNKNOWN',
+ ],
+ # MIT license but some files contain no licensing info. e.g. autogen.sh.
+ # Files missing licensing info are not shipped.
+ 'third_party/wayland': [ # http://crbug.com/553573
+ 'UNKNOWN',
+ ],
+ 'third_party/webdriver': [ # http://crbug.com/98590
+ 'UNKNOWN',
+ ],
+
+ # https://github.com/html5lib/html5lib-python/issues/125
+ # https://github.com/KhronosGroup/WebGL/issues/435
+ 'third_party/webgl/src': [
+ 'UNKNOWN',
+ ],
+
+ 'third_party/webrtc': [ # http://crbug.com/98592
+ 'UNKNOWN',
+ ],
+ 'third_party/xdg-utils': [ # http://crbug.com/98593
+ 'UNKNOWN',
+ ],
+ 'third_party/yasm/source': [ # http://crbug.com/98594
+ 'UNKNOWN',
+ ],
+ 'third_party/zlib/contrib/minizip': [
+ 'UNKNOWN',
+ ],
+ 'third_party/zlib/trees.h': [
+ 'UNKNOWN',
+ ],
+ 'tools/emacs': [ # http://crbug.com/98595
+ 'UNKNOWN',
+ ],
+ 'tools/gyp/test': [
+ 'UNKNOWN',
+ ],
+ 'tools/python/google/__init__.py': [
+ 'UNKNOWN',
+ ],
+ 'tools/stats_viewer/Properties/AssemblyInfo.cs': [
+ 'UNKNOWN',
+ ],
+ 'tools/symsrc/pefile.py': [
+ 'UNKNOWN',
+ ],
+ # Not shipped, MIT license but the header files contain no licensing info.
+ 'tools/telemetry/third_party/altgraph': [
+ 'UNKNOWN',
+ ],
+ # Not shipped, MIT license but the header files contain no licensing info.
+ 'tools/telemetry/third_party/modulegraph': [
+ 'UNKNOWN',
+ ],
+ 'tools/telemetry/third_party/pyserial': [
+ # https://sourceforge.net/p/pyserial/feature-requests/35/
+ 'UNKNOWN',
+ ],
+ # Not shipped, MIT license but the header files contain no licensing info.
+ 'third_party/catapult/telemetry/third_party/altgraph': [
+ 'UNKNOWN',
+ ],
+ # Not shipped, MIT license but the header files contain no licensing info.
+ 'third_party/catapult/telemetry/third_party/modulegraph': [
+ 'UNKNOWN',
+ ],
+ 'third_party/catapult/telemetry/third_party/pyserial': [
+ # https://sourceforge.net/p/pyserial/feature-requests/35/
+ 'UNKNOWN',
+ ],
+}
+
+EXCLUDED_PATHS = [
+ # Don't check generated files
+ 'out/',
+
+ # Don't check downloaded goma client binaries
+ 'build/goma/client',
+
+ # Don't check sysroot directories
+ 'build/linux/debian_wheezy_amd64-sysroot',
+ 'build/linux/debian_wheezy_arm-sysroot',
+ 'build/linux/debian_wheezy_i386-sysroot',
+ 'build/linux/debian_wheezy_mips-sysroot',
+]
+
+
+def check_licenses(options, args):
+ # Figure out which directory we have to check.
+ if len(args) == 0:
+ # No directory to check specified, use the repository root.
+ start_dir = options.base_directory
+ elif len(args) == 1:
+ # Directory specified. Start here. It's supposed to be relative to the
+ # base directory.
+ start_dir = os.path.abspath(os.path.join(options.base_directory, args[0]))
+ else:
+ # More than one argument, we don't handle this.
+ PrintUsage()
+ return 1
+
+ print "Using base directory:", options.base_directory
+ print "Checking:", start_dir
+ print
+
+ licensecheck_path = os.path.abspath(os.path.join(options.base_directory,
+ 'third_party',
+ 'devscripts',
+ 'licensecheck.pl'))
+
+ licensecheck = subprocess.Popen([licensecheck_path,
+ '-l', '100',
+ '-r', start_dir],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = licensecheck.communicate()
+ if options.verbose:
+ print '----------- licensecheck stdout -----------'
+ print stdout
+ print '--------- end licensecheck stdout ---------'
+ if licensecheck.returncode != 0 or stderr:
+ print '----------- licensecheck stderr -----------'
+ print stderr
+ print '--------- end licensecheck stderr ---------'
+ print "\nFAILED\n"
+ return 1
+
+ used_suppressions = set()
+ errors = []
+
+ for line in stdout.splitlines():
+ filename, license = line.split(':', 1)
+ filename = os.path.relpath(filename.strip(), options.base_directory)
+
+ # Check if the file belongs to one of the excluded paths.
+ if any((filename.startswith(path) for path in EXCLUDED_PATHS)):
+ continue
+
+ # For now we're just interested in the license.
+ license = license.replace('*No copyright*', '').strip()
+
+ # Skip generated files.
+ if 'GENERATED FILE' in license:
+ continue
+
+ if license in WHITELISTED_LICENSES:
+ continue
+
+ if not options.ignore_suppressions:
+ matched_prefixes = [
+ prefix for prefix in PATH_SPECIFIC_WHITELISTED_LICENSES
+ if filename.startswith(prefix) and
+ license in PATH_SPECIFIC_WHITELISTED_LICENSES[prefix]]
+ if matched_prefixes:
+ used_suppressions.update(set(matched_prefixes))
+ continue
+
+ errors.append({'filename': filename, 'license': license})
+
+ if options.json:
+ with open(options.json, 'w') as f:
+ json.dump(errors, f)
+
+ if errors:
+ for error in errors:
+ print "'%s' has non-whitelisted license '%s'" % (
+ error['filename'], error['license'])
+ print "\nFAILED\n"
+ print "Please read",
+ print "http://www.chromium.org/developers/adding-3rd-party-libraries"
+ print "for more info how to handle the failure."
+ print
+ print "Please respect OWNERS of checklicenses.py. Changes violating"
+ print "this requirement may be reverted."
+
+ # Do not print unused suppressions so that above message is clearly
+ # visible and gets proper attention. Too much unrelated output
+ # would be distracting and make the important points easier to miss.
+
+ return 1
+
+ print "\nSUCCESS\n"
+
+ if not len(args):
+ unused_suppressions = set(
+ PATH_SPECIFIC_WHITELISTED_LICENSES.iterkeys()).difference(
+ used_suppressions)
+ if unused_suppressions:
+ print "\nNOTE: unused suppressions detected:\n"
+ print '\n'.join(unused_suppressions)
+
+ return 0
+
+
+def main():
+ default_root = os.path.abspath(
+ os.path.join(os.path.dirname(__file__), '..', '..'))
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('--root', default=default_root,
+ dest='base_directory',
+ help='Specifies the repository root. This defaults '
+ 'to "../.." relative to the script file, which '
+ 'will normally be the repository root.')
+ option_parser.add_option('-v', '--verbose', action='store_true',
+ default=False, help='Print debug logging')
+ option_parser.add_option('--ignore-suppressions',
+ action='store_true',
+ default=False,
+ help='Ignore path-specific license whitelist.')
+ option_parser.add_option('--json', help='Path to JSON output file')
+ options, args = option_parser.parse_args()
+ return check_licenses(options, args)
+
+
+if '__main__' == __name__:
+ sys.exit(main())
diff --git a/chromium/tools/checkperms/OWNERS b/chromium/tools/checkperms/OWNERS
new file mode 100644
index 00000000000..1967bf567e8
--- /dev/null
+++ b/chromium/tools/checkperms/OWNERS
@@ -0,0 +1 @@
+thestig@chromium.org
diff --git a/chromium/tools/checkperms/PRESUBMIT.py b/chromium/tools/checkperms/PRESUBMIT.py
new file mode 100644
index 00000000000..b7041c8864f
--- /dev/null
+++ b/chromium/tools/checkperms/PRESUBMIT.py
@@ -0,0 +1,27 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Top-level presubmit script for checkperms.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into depot_tools.
+"""
+
+
+def CommonChecks(input_api, output_api):
+ output = []
+ output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
+ # Run it like if it were a unit test.
+ output.extend(
+ input_api.canned_checks.RunUnitTests(
+ input_api, output_api, ['./checkperms.py']))
+ return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CommonChecks(input_api, output_api)
diff --git a/chromium/tools/checkperms/checkperms.py b/chromium/tools/checkperms/checkperms.py
new file mode 100755
index 00000000000..81b0f4f0ba9
--- /dev/null
+++ b/chromium/tools/checkperms/checkperms.py
@@ -0,0 +1,486 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Makes sure files have the right permissions.
+
+Some developers have broken SCM configurations that flip the executable
+permission on for no good reason. Unix developers who run ls --color will then
+see .cc files in green and get confused.
+
+- For file extensions that must be executable, add it to EXECUTABLE_EXTENSIONS.
+- For file extensions that must not be executable, add it to
+ NOT_EXECUTABLE_EXTENSIONS.
+- To ignore all the files inside a directory, add it to IGNORED_PATHS.
+- For file base name with ambiguous state and that should not be checked for
+ shebang, add it to IGNORED_FILENAMES.
+
+Any file not matching the above will be opened and looked if it has a shebang
+or an ELF header. If this does not match the executable bit on the file, the
+file will be flagged.
+
+Note that all directory separators must be slashes (Unix-style) and not
+backslashes. All directories should be relative to the source root and all
+file paths should be only lowercase.
+"""
+
+import json
+import logging
+import optparse
+import os
+import stat
+import string
+import subprocess
+import sys
+
+#### USER EDITABLE SECTION STARTS HERE ####
+
+# Files with these extensions must have executable bit set.
+#
+# Case-sensitive.
+EXECUTABLE_EXTENSIONS = (
+ 'bat',
+ 'dll',
+ 'exe',
+)
+
+# Files for which the executable bit may or may not be set.
+IGNORED_EXTENSIONS = (
+ 'dylib',
+)
+
+# These files must have executable bit set.
+#
+# Case-insensitive, lower-case only.
+EXECUTABLE_PATHS = (
+ 'chrome/test/data/app_shim/app_shim_32_bit.app/contents/'
+ 'macos/app_mode_loader',
+ 'chrome/test/data/extensions/uitest/plugins/plugin.plugin/contents/'
+ 'macos/testnetscapeplugin',
+ 'chrome/test/data/extensions/uitest/plugins_private/plugin.plugin/contents/'
+ 'macos/testnetscapeplugin',
+)
+
+# These files must not have the executable bit set. This is mainly a performance
+# optimization as these files are not checked for shebang. The list was
+# partially generated from:
+# git ls-files | grep "\\." | sed 's/.*\.//' | sort | uniq -c | sort -b -g
+#
+# Case-sensitive.
+NON_EXECUTABLE_EXTENSIONS = (
+ '1',
+ '3ds',
+ 'S',
+ 'am',
+ 'applescript',
+ 'asm',
+ 'c',
+ 'cc',
+ 'cfg',
+ 'chromium',
+ 'cpp',
+ 'crx',
+ 'cs',
+ 'css',
+ 'cur',
+ 'def',
+ 'der',
+ 'expected',
+ 'gif',
+ 'grd',
+ 'gyp',
+ 'gypi',
+ 'h',
+ 'hh',
+ 'htm',
+ 'html',
+ 'hyph',
+ 'ico',
+ 'idl',
+ 'java',
+ 'jpg',
+ 'js',
+ 'json',
+ 'm',
+ 'm4',
+ 'mm',
+ 'mms',
+ 'mock-http-headers',
+ 'nexe',
+ 'nmf',
+ 'onc',
+ 'pat',
+ 'patch',
+ 'pdf',
+ 'pem',
+ 'plist',
+ 'png',
+ 'proto',
+ 'rc',
+ 'rfx',
+ 'rgs',
+ 'rules',
+ 'spec',
+ 'sql',
+ 'srpc',
+ 'svg',
+ 'tcl',
+ 'test',
+ 'tga',
+ 'txt',
+ 'vcproj',
+ 'vsprops',
+ 'webm',
+ 'word',
+ 'xib',
+ 'xml',
+ 'xtb',
+ 'zip',
+)
+
+# These files must not have executable bit set.
+#
+# Case-insensitive, lower-case only.
+NON_EXECUTABLE_PATHS = (
+ 'build/android/tests/symbolize/liba.so',
+ 'build/android/tests/symbolize/libb.so',
+ 'chrome/installer/mac/sign_app.sh.in',
+ 'chrome/installer/mac/sign_versioned_dir.sh.in',
+ 'chrome/test/data/extensions/uitest/plugins/plugin32.so',
+ 'chrome/test/data/extensions/uitest/plugins/plugin64.so',
+ 'chrome/test/data/extensions/uitest/plugins_private/plugin32.so',
+ 'chrome/test/data/extensions/uitest/plugins_private/plugin64.so',
+ 'courgette/testdata/elf-32-1',
+ 'courgette/testdata/elf-32-2',
+ 'courgette/testdata/elf-64',
+)
+
+# File names that are always whitelisted. (These are mostly autoconf spew.)
+#
+# Case-sensitive.
+IGNORED_FILENAMES = (
+ 'config.guess',
+ 'config.sub',
+ 'configure',
+ 'depcomp',
+ 'install-sh',
+ 'missing',
+ 'mkinstalldirs',
+ 'naclsdk',
+ 'scons',
+)
+
+# File paths starting with one of these will be ignored as well.
+# Please consider fixing your file permissions, rather than adding to this list.
+#
+# Case-insensitive, lower-case only.
+IGNORED_PATHS = (
+ 'base/third_party/libevent/autogen.sh',
+ 'base/third_party/libevent/test/test.sh',
+ 'native_client_sdk/src/build_tools/sdk_tools/third_party/fancy_urllib/'
+ '__init__.py',
+ 'out/',
+ # TODO(maruel): Fix these.
+ 'third_party/bintrees/',
+ 'third_party/closure_linter/',
+ 'third_party/devscripts/licensecheck.pl.vanilla',
+ 'third_party/hyphen/',
+ 'third_party/lcov-1.9/contrib/galaxy/conglomerate_functions.pl',
+ 'third_party/lcov-1.9/contrib/galaxy/gen_makefile.sh',
+ 'third_party/lcov/contrib/galaxy/conglomerate_functions.pl',
+ 'third_party/lcov/contrib/galaxy/gen_makefile.sh',
+ 'third_party/libxml/linux/xml2-config',
+ 'third_party/libxml/src/ltmain.sh',
+ 'third_party/mesa/',
+ 'third_party/protobuf/',
+ 'third_party/python_gflags/gflags.py',
+ 'third_party/sqlite/',
+ 'third_party/talloc/script/mksyms.sh',
+ 'third_party/tcmalloc/',
+ 'third_party/tlslite/setup.py',
+ # TODO(nednguyen): Remove this when telemetry is moved to catapult
+ 'tools/telemetry/third_party/',
+)
+
+#### USER EDITABLE SECTION ENDS HERE ####
+
+assert (set(EXECUTABLE_EXTENSIONS) & set(IGNORED_EXTENSIONS) &
+ set(NON_EXECUTABLE_EXTENSIONS) == set())
+assert set(EXECUTABLE_PATHS) & set(NON_EXECUTABLE_PATHS) == set()
+
+VALID_CHARS = set(string.ascii_lowercase + string.digits + '/-_.')
+for paths in (EXECUTABLE_PATHS, NON_EXECUTABLE_PATHS, IGNORED_PATHS):
+ assert all([set(path).issubset(VALID_CHARS) for path in paths])
+
+
+def capture(cmd, cwd):
+ """Returns the output of a command.
+
+ Ignores the error code or stderr.
+ """
+ logging.debug('%s; cwd=%s' % (' '.join(cmd), cwd))
+ env = os.environ.copy()
+ env['LANGUAGE'] = 'en_US.UTF-8'
+ p = subprocess.Popen(
+ cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd, env=env)
+ return p.communicate()[0]
+
+
+def get_git_root(dir_path):
+ """Returns the git checkout root or None."""
+ root = capture(['git', 'rev-parse', '--show-toplevel'], dir_path).strip()
+ if root:
+ return root
+
+
+def is_ignored(rel_path):
+ """Returns True if rel_path is in our whitelist of files to ignore."""
+ rel_path = rel_path.lower()
+ return (
+ os.path.basename(rel_path) in IGNORED_FILENAMES or
+ rel_path.lower().startswith(IGNORED_PATHS))
+
+
+def must_be_executable(rel_path):
+ """The file name represents a file type that must have the executable bit
+ set.
+ """
+ return (os.path.splitext(rel_path)[1][1:] in EXECUTABLE_EXTENSIONS or
+ rel_path.lower() in EXECUTABLE_PATHS)
+
+
+def ignored_extension(rel_path):
+ """The file name represents a file type that may or may not have the
+ executable set.
+ """
+ return os.path.splitext(rel_path)[1][1:] in IGNORED_EXTENSIONS
+
+
+def must_not_be_executable(rel_path):
+ """The file name represents a file type that must not have the executable
+ bit set.
+ """
+ return (os.path.splitext(rel_path)[1][1:] in NON_EXECUTABLE_EXTENSIONS or
+ rel_path.lower() in NON_EXECUTABLE_PATHS)
+
+
+def has_executable_bit(full_path):
+ """Returns if any executable bit is set."""
+ permission = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
+ return bool(permission & os.stat(full_path).st_mode)
+
+
+def has_shebang_or_is_elf(full_path):
+ """Returns if the file starts with #!/ or is an ELF binary.
+
+ full_path is the absolute path to the file.
+ """
+ with open(full_path, 'rb') as f:
+ data = f.read(4)
+ return (data[:3] == '#!/' or data == '#! /', data == '\x7fELF')
+
+
+def check_file(root_path, rel_path):
+ """Checks the permissions of the file whose path is root_path + rel_path and
+ returns an error if it is inconsistent. Returns None on success.
+
+ It is assumed that the file is not ignored by is_ignored().
+
+ If the file name is matched with must_be_executable() or
+ must_not_be_executable(), only its executable bit is checked.
+ Otherwise, the first few bytes of the file are read to verify if it has a
+ shebang or ELF header and compares this with the executable bit on the file.
+ """
+ full_path = os.path.join(root_path, rel_path)
+ def result_dict(error):
+ return {
+ 'error': error,
+ 'full_path': full_path,
+ 'rel_path': rel_path,
+ }
+ try:
+ bit = has_executable_bit(full_path)
+ except OSError:
+ # It's faster to catch exception than call os.path.islink(). The Chromium
+ # tree may have invalid symlinks.
+ return None
+
+ if must_be_executable(rel_path):
+ if not bit:
+ return result_dict('Must have executable bit set')
+ return
+ if must_not_be_executable(rel_path):
+ if bit:
+ return result_dict('Must not have executable bit set')
+ return
+ if ignored_extension(rel_path):
+ return
+
+ # For the others, it depends on the file header.
+ (shebang, elf) = has_shebang_or_is_elf(full_path)
+ if bit != (shebang or elf):
+ if bit:
+ return result_dict('Has executable bit but not shebang or ELF header')
+ if shebang:
+ return result_dict('Has shebang but not executable bit')
+ return result_dict('Has ELF header but not executable bit')
+
+
+def check_files(root, files):
+ gen = (check_file(root, f) for f in files if not is_ignored(f))
+ return filter(None, gen)
+
+
+class ApiBase(object):
+ def __init__(self, root_dir, bare_output):
+ self.root_dir = root_dir
+ self.bare_output = bare_output
+ self.count = 0
+ self.count_read_header = 0
+
+ def check_file(self, rel_path):
+ logging.debug('check_file(%s)' % rel_path)
+ self.count += 1
+
+ if (not must_be_executable(rel_path) and
+ not must_not_be_executable(rel_path)):
+ self.count_read_header += 1
+
+ return check_file(self.root_dir, rel_path)
+
+ def check_dir(self, rel_path):
+ return self.check(rel_path)
+
+ def check(self, start_dir):
+ """Check the files in start_dir, recursively check its subdirectories."""
+ errors = []
+ items = self.list_dir(start_dir)
+ logging.info('check(%s) -> %d' % (start_dir, len(items)))
+ for item in items:
+ full_path = os.path.join(self.root_dir, start_dir, item)
+ rel_path = full_path[len(self.root_dir) + 1:]
+ if is_ignored(rel_path):
+ continue
+ if os.path.isdir(full_path):
+ # Depth first.
+ errors.extend(self.check_dir(rel_path))
+ else:
+ error = self.check_file(rel_path)
+ if error:
+ errors.append(error)
+ return errors
+
+ def list_dir(self, start_dir):
+ """Lists all the files and directory inside start_dir."""
+ return sorted(
+ x for x in os.listdir(os.path.join(self.root_dir, start_dir))
+ if not x.startswith('.')
+ )
+
+
+class ApiAllFilesAtOnceBase(ApiBase):
+ _files = None
+
+ def list_dir(self, start_dir):
+ """Lists all the files and directory inside start_dir."""
+ if self._files is None:
+ self._files = sorted(self._get_all_files())
+ if not self.bare_output:
+ print 'Found %s files' % len(self._files)
+ start_dir = start_dir[len(self.root_dir) + 1:]
+ return [
+ x[len(start_dir):] for x in self._files if x.startswith(start_dir)
+ ]
+
+ def _get_all_files(self):
+ """Lists all the files and directory inside self._root_dir."""
+ raise NotImplementedError()
+
+
+class ApiGit(ApiAllFilesAtOnceBase):
+ def _get_all_files(self):
+ return capture(['git', 'ls-files'], cwd=self.root_dir).splitlines()
+
+
+def get_scm(dir_path, bare):
+ """Returns a properly configured ApiBase instance."""
+ cwd = os.getcwd()
+ root = get_git_root(dir_path or cwd)
+ if root:
+ if not bare:
+ print('Found git repository at %s' % root)
+ return ApiGit(dir_path or root, bare)
+
+ # Returns a non-scm aware checker.
+ if not bare:
+ print('Failed to determine the SCM for %s' % dir_path)
+ return ApiBase(dir_path or cwd, bare)
+
+
+def main():
+ usage = """Usage: python %prog [--root <root>] [tocheck]
+ tocheck Specifies the directory, relative to root, to check. This defaults
+ to "." so it checks everything.
+
+Examples:
+ python %prog
+ python %prog --root /path/to/source chrome"""
+
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option(
+ '--root',
+ help='Specifies the repository root. This defaults '
+ 'to the checkout repository root')
+ parser.add_option(
+ '-v', '--verbose', action='count', default=0, help='Print debug logging')
+ parser.add_option(
+ '--bare',
+ action='store_true',
+ default=False,
+ help='Prints the bare filename triggering the checks')
+ parser.add_option(
+ '--file', action='append', dest='files',
+ help='Specifics a list of files to check the permissions of. Only these '
+ 'files will be checked')
+ parser.add_option('--json', help='Path to JSON output file')
+ options, args = parser.parse_args()
+
+ levels = [logging.ERROR, logging.INFO, logging.DEBUG]
+ logging.basicConfig(level=levels[min(len(levels) - 1, options.verbose)])
+
+ if len(args) > 1:
+ parser.error('Too many arguments used')
+
+ if options.root:
+ options.root = os.path.abspath(options.root)
+
+ if options.files:
+ errors = check_files(options.root, options.files)
+ else:
+ api = get_scm(options.root, options.bare)
+ start_dir = args[0] if args else api.root_dir
+ errors = api.check(start_dir)
+
+ if not options.bare:
+ print('Processed %s files, %d files where tested for shebang/ELF '
+ 'header' % (api.count, api.count_read_header))
+
+ if options.json:
+ with open(options.json, 'w') as f:
+ json.dump(errors, f)
+
+ if errors:
+ if options.bare:
+ print '\n'.join(e['full_path'] for e in errors)
+ else:
+ print '\nFAILED\n'
+ print '\n'.join('%s: %s' % (e['full_path'], e['error']) for e in errors)
+ return 1
+ if not options.bare:
+ print '\nSUCCESS\n'
+ return 0
+
+
+if '__main__' == __name__:
+ sys.exit(main())
diff --git a/chromium/tools/chrome_extensions/chromium_code_coverage/js/app.js b/chromium/tools/chrome_extensions/chromium_code_coverage/js/app.js
new file mode 100644
index 00000000000..322c309857d
--- /dev/null
+++ b/chromium/tools/chrome_extensions/chromium_code_coverage/js/app.js
@@ -0,0 +1,420 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/**
+ * @fileoverview Main module for the Chromium Code Coverage extension. This
+ * extension adds incremental and absolute code coverage stats
+ * to the deprecated Rietveld UI. Stats are added inline with
+ * file names as percentage of lines covered.
+ */
+
+ var coverage = coverage || {};
+
+/**
+ * Contains all required configuration information.
+ *
+ * @type {Object}
+ * @const
+ */
+coverage.CONFIG = {};
+
+/**
+ * URLs necessary for each project. These are necessary because the Rietveld
+ * sites are used by other projects as well, and is is only possible to find
+ * coverage stats for the projects registered here.
+ *
+ * @type {Object}
+ * @const
+ */
+coverage.CONFIG.COVERAGE_REPORT_URLS = {
+ 'Android': {
+ prefix: 'https://build.chromium.org/p/tryserver.chromium.linux/builders/' +
+ 'android_coverage/builds/',
+ suffix: '/steps/Incremental%20coverage%20report/logs/json.output',
+ botUrl: 'http://build.chromium.org/p/tryserver.chromium.linux/builders/' +
+ 'android_coverage'
+ },
+ 'iOS': {
+ prefix: 'https://uberchromegw.corp.google.com/i/internal.bling.tryserver/' +
+ 'builders/coverage/builds/',
+ suffix: '/steps/coverage/logs/json.output',
+ botUrl: 'https://uberchromegw.corp.google.com/i/internal.bling.tryserver/' +
+ 'builders/coverage'
+ }
+};
+
+/**
+ * URLs where Rietveld apps are served. URLs should be escaped properly so that
+ * they are ready to be used in regular expressions.
+ *
+ * @type {Array.<string>}
+ */
+coverage.CONFIG.CODE_REVIEW_URLS = [
+ 'https:\\/\\/codereview\\.chromium\\.org',
+ 'https:\\/\\/chromereviews\\.googleplex\\.com'
+];
+
+/**
+ * String representing absolute coverage.
+ *
+ * @type {string}
+ * @const
+*/
+coverage.ABSOLUTE_COVERAGE = 'absolute';
+
+/**
+ * String representing incremental coverage.
+ *
+ * @type {string}
+ * @const
+*/
+coverage.INCREMENTAL_COVERAGE = 'incremental';
+
+/**
+ * String representing patch incremental coverage.
+ *
+ * @type {string}
+ * @const
+ */
+coverage.PATCH_COVERAGE = 'patch';
+
+/**
+ * Fetches detailed coverage stats for a given patch set and injects them into
+ * the code review page.
+ *
+ * @param {Element} patchElement Div containing a single patch set.
+ * @param {string} botUrl Location of the detailed coverage bot results.
+ * @param {string} projectName The name of project to which code was submitted.
+ */
+coverage.injectCoverageStats = function(patchElement, botUrl, projectName) {
+ var buildNumber = botUrl.split('/').pop();
+ var patch = new coverage.PatchSet(projectName, buildNumber);
+ patch.getCoverageData(function(patchStats) {
+ coverage.updateUi(patchStats, patchElement, patch.getCoverageReportUrl());
+ });
+};
+
+/**
+ * Adds coverage stats to the table containing files changed for a given patch.
+ *
+ * @param {Object} patchStats Object containing stats for a given patch set.
+ * @param {Element} patchElement Div containing a patch single set.
+ * @param {string} reportUrl Location of the detailed coverage stats for this
+ * patch.
+ */
+coverage.updateUi = function(patchStats, patchElement, reportUrl) {
+ // Add absolute and incremental coverage column headers.
+ var patchSetTableBody = patchElement.getElementsByTagName('tbody')[0];
+ var headerRow = patchSetTableBody.firstElementChild;
+ coverage.appendElementBeforeChild(headerRow, 'th', '&Delta;Cov.', 1);
+ coverage.appendElementBeforeChild(headerRow, 'th', '|Cov.|', 1);
+
+ // Add absolute and incremental coverage stats for each file.
+ var fileRows = patchElement.querySelectorAll('[name=patch]');
+ for (var i = 0; i < fileRows.length; i++) {
+ var sourceFileRow = fileRows[i];
+ var fileName = sourceFileRow.children[2].textContent.trim();
+
+ var incrementalPercent = null;
+ var absolutePercent = null;
+ if (patchStats[fileName]) {
+ incrementalPercent = patchStats[fileName][coverage.INCREMENTAL_COVERAGE];
+ absolutePercent = patchStats[fileName][coverage.ABSOLUTE_COVERAGE];
+ }
+
+ coverage.appendElementBeforeChild(
+ sourceFileRow, 'td', coverage.formatPercent(incrementalPercent), 2);
+
+ coverage.appendElementBeforeChild(
+ sourceFileRow, 'td', coverage.formatPercent(absolutePercent), 2);
+ }
+ // Add the overall coverage stats for the patch.
+ coverage.addPatchSummaryStats(
+ patchElement, patchStats[coverage.PATCH_COVERAGE], reportUrl);
+};
+
+/**
+ * Formats percent for presentation on the page.
+ *
+ * @param {number} coveragePercent
+ * @return {string} Formatted string ready to be added to the the DOM.
+ */
+coverage.formatPercent = function(coveragePercent) {
+ if (!coveragePercent) {
+ return '-';
+ } else {
+ return coveragePercent + '%';
+ }
+};
+
+/**
+ * Adds summary line to a patch element: "Cov. for this patch: 45%. Details".
+ *
+ * @param {Element} patchElement Div containing a patch single patch set.
+ * @param {number} coveragePercent Incremental coverage for entire patch.
+ * @param {string} coverageReportUrl Location of detailed coverage report.
+ */
+coverage.addPatchSummaryStats = function(
+ patchElement, coveragePercent, coverageReportUrl) {
+ var summaryElement = document.createElement('div');
+ var patchSummaryHtml = '&Delta;Cov. for this patch: ' +
+ coverage.formatPercent(coveragePercent) + '.&nbsp;';
+ var detailsHtml = '<a href="' + coverageReportUrl + '">Details</a>';
+ summaryElement.innerHTML = patchSummaryHtml + ' ' + detailsHtml;
+
+ // Insert the summary line immediately after the table containing the changed
+ // files for the patch.
+ var tableElement = patchElement.getElementsByTagName('table')[0];
+ tableElement.parentNode.insertBefore(
+ summaryElement, tableElement.nextSibling);
+};
+
+/**
+ * Creates and prepends an element before another.
+ *
+ * @param {Element} parentElement The parent of the element to prepend a new
+ * element to.
+ * @param {string} elementType The tag name for the new element.
+ * @param {string} innerHtml The value to set as the new element's innerHTML
+ * @param {number} childNumber The index of the child to prepend to.
+ */
+coverage.appendElementBeforeChild = function(
+ parentElement, elementType, innerHtml, childNumber) {
+ var newElement = document.createElement(elementType);
+ newElement.innerHTML = innerHtml;
+ parentElement.insertBefore(newElement, parentElement.children[childNumber]);
+};
+
+/**
+ * Checks if the given URL has been registered or not.
+ *
+ * @param {string} botUrl The URL to be verified.
+ * @return {boolean} Whether or not the provided URL was valid.
+ */
+coverage.isValidBotUrl = function(botUrl) {
+ if (!botUrl) {
+ return false;
+ }
+ for (var project in coverage.CONFIG.COVERAGE_REPORT_URLS) {
+ var candidateUrl = coverage.CONFIG.COVERAGE_REPORT_URLS[project]['botUrl'];
+ if (botUrl.indexOf(candidateUrl) > - 1) {
+ return true;
+ }
+ }
+ return false;
+};
+
+/**
+ * Returns the project name for the given bot URL. This function expects the bot
+ * URL to be valid.
+ *
+ * @param {botUrl} botUrl
+ * @return {string} The project name for the given bot URL.
+ * @throws {Error} If an invalid bot URL is supplied.
+ */
+coverage.getProjectNameFromBotUrl = function(botUrl) {
+ if (!botUrl) {
+ throw Error(botUrl + ' is an invalid bot url.');
+ }
+ for (var project in coverage.CONFIG.COVERAGE_REPORT_URLS) {
+ var candidateUrl = coverage.CONFIG.COVERAGE_REPORT_URLS[project]['botUrl'];
+ if (botUrl.indexOf(candidateUrl) > - 1) {
+ return project;
+ }
+ }
+ throw Error(botUrl + ' is not registered.');
+};
+
+
+/**
+ * Finds the coverage bot URL.
+ *
+ * @param {Element} patchElement Div to search for bot URL.
+ * @return {string} Returns the URL to the bot details page.
+ */
+coverage.getValidBotUrl = function(patchElement) {
+ var bots = patchElement.getElementsByClassName('build-result');
+ for (var i = 0; i < bots.length; i++) {
+ if (bots[i].getAttribute('status') === 'success' &&
+ coverage.isValidBotUrl(bots[i].href)) {
+ return bots[i].href;
+ }
+ }
+ return null;
+};
+
+/**
+ * Checks to see if the URL points to a CL review and not another page on the
+ * code review site (i.e. settings).
+ *
+ * @param {string} url The URL to verify.
+ * @return {boolean} Whether or not the URL points to a CL review.
+ */
+coverage.isValidReviewUrl = function(url) {
+ baseUrls = coverage.CONFIG.CODE_REVIEW_URLS.join('|');
+ // Matches baseurl.com/numeric-digits and baseurl.com/numeric-digits/anything
+ var re = new RegExp('(' + baseUrls + ')/[\\d]+(\\/|$)', 'i');
+ return !!url.match(re);
+};
+
+/**
+ * Verifies that the user is using the deprecated UI.
+ *
+ * @return {boolean} Whether or not the deprecated UI is being used.
+ */
+coverage.isDeprecatedUi = function() {
+ // The tag is present in the new UI only.
+ return document.getElementsByTagName('cr-app').length == 0;
+};
+
+/**
+ * Returns the newest patch set element.
+ *
+ * @return {Element} The main div for the last patch set.
+ */
+coverage.getLastPatchElement = function() {
+ var patchElement = document.querySelectorAll('div[id^="ps-"');
+ return patchElement[patchElement.length - 1];
+};
+
+/**
+ * Model that describes a patch set.
+ *
+ * @param {string} projectName The name of the project.
+ * @param {string} buildNumber The build number for the bot run corresponding to
+ * this patch set.
+ * @constructor
+ */
+coverage.PatchSet = function(projectName, buildNumber) {
+ /**
+ * Location of the detailed coverage JSON report.
+ * @type {string}
+ * @private
+ */
+ this.coverageReportUrl_ = this.getCoverageReportUrl(projectName, buildNumber);
+};
+
+/**
+ * Returns the coverage report URL.
+ *
+ * @param {string} projectName The name of the project.
+ * @param {string} buildNumber The build number for the bot run corresponding
+ * to this patch set.
+ * @return {string} The URL to the detailed coverage report.
+ */
+coverage.PatchSet.prototype.getCoverageReportUrl = function(
+ projectName, buildNumber) {
+ if (!this.coverageReportUrl_) {
+ var reportUrl = coverage.CONFIG.COVERAGE_REPORT_URLS[projectName];
+ this.coverageReportUrl_ = reportUrl['prefix'] + buildNumber +
+ reportUrl['suffix'];
+ }
+ return this.coverageReportUrl_;
+};
+
+/**
+ * Returns the detailed coverage report. Caller must handle what happens
+ * when the report is received. No side effects if report isn't sent.
+ *
+ * @param {function} success The callback to be invoked when the report is
+ * received. Invoked with an object mapping file names to
+ * coverage stats as the only argument.
+ */
+coverage.PatchSet.prototype.getCoverageData = function(success) {
+ var client = new coverage.HttpClient();
+ client.get(this.coverageReportUrl_, (function(data) {
+ var resultDict = JSON.parse(data);
+ var coveragePercentages = this.getCoveragePercentForFiles(resultDict);
+ success(coveragePercentages);
+ }).bind(this));
+};
+
+/**
+ * Extracts the coverage percent for each file from the coverage report.
+ *
+ * @param {Object} reportDict The detailed coverage report.
+ * @return {Object} An object containing the coverage percent for each file and
+ * the patch coverage percent.
+ */
+coverage.PatchSet.prototype.getCoveragePercentForFiles = function(reportDict) {
+ var fileDict = reportDict['files'];
+ var coveragePercentages = {};
+
+ for (var fileName in fileDict) {
+ if (fileDict.hasOwnProperty(fileName)) {
+ coveragePercentages[fileName] = {};
+ var coverageDict = fileDict[fileName];
+
+ coveragePercentages[fileName][coverage.ABSOLUTE_COVERAGE] =
+ this.getCoveragePercent(coverageDict, coverage.ABSOLUTE_COVERAGE);
+
+ coveragePercentages[fileName][coverage.INCREMENTAL_COVERAGE] =
+ this.getCoveragePercent(coverageDict, coverage.INCREMENTAL_COVERAGE);
+ }
+ }
+ coveragePercentages[coverage.PATCH_COVERAGE] =
+ this.getCoveragePercent(reportDict[coverage.PATCH_COVERAGE],
+ coverage.INCREMENTAL_COVERAGE);
+ return coveragePercentages;
+};
+
+/**
+ * Returns the coverage percent given the number of total and covered lines.
+ *
+ * @param {Object} coverageDict Object containing absolute and incremental
+ * number of lines covered.
+ * @param {string} coverageType Either 'incremental' or 'absolute'.
+ * @return {number} The coverage percent.
+ */
+coverage.PatchSet.prototype.getCoveragePercent = function(
+ coverageDict, coverageType) {
+ if (!coverageDict ||
+ (coverageType !== coverage.INCREMENTAL_COVERAGE &&
+ coverageType !== coverage.ABSOLUTE_COVERAGE) ||
+ parseFloat(total) === 0) {
+ return null;
+ }
+ var covered = coverageDict[coverageType]['covered'];
+ var total = coverageDict[coverageType]['total'];
+ return Math.round(
+ (parseFloat(covered) / parseFloat(total)) * 100);
+};
+
+/**
+ * Model describing a simple HTTP client. Only supports GET requests.
+ */
+coverage.HttpClient = function() {
+};
+
+/**
+ * HTTP GET that only handles successful requests.
+ *
+ * @param {string} url The URL to make a GET request to.
+ * @param {function} success The callback invoked when the request is finished
+ * successfully. Callback is invoked with response text as
+ * the only argument.
+ */
+coverage.HttpClient.prototype.get = function(url, success) {
+ // TODO(estevenson): Handle failure when user isn't authenticated.
+ var http = new XMLHttpRequest();
+ http.onreadystatechange = function() {
+ if (http.readyState === 4 && http.status === 200) {
+ success(http.responseText);
+ }
+ };
+
+ http.open('GET', url + '/text', true);
+ http.send(null);
+};
+
+// Verifies that page might contain a patch set with a valid coverage bot.
+if (coverage.isDeprecatedUi() &&
+ coverage.isValidReviewUrl(window.location.href)) {
+ var patchElement = coverage.getLastPatchElement();
+ var botUrl = coverage.getValidBotUrl(patchElement);
+ if (botUrl) {
+ var projectName = coverage.getProjectNameFromBotUrl(botUrl);
+ coverage.injectCoverageStats(patchElement, botUrl, projectName);
+ }
+}
diff --git a/chromium/tools/chrome_extensions/chromium_code_coverage/manifest.json b/chromium/tools/chrome_extensions/chromium_code_coverage/manifest.json
new file mode 100644
index 00000000000..b8eb440c5df
--- /dev/null
+++ b/chromium/tools/chrome_extensions/chromium_code_coverage/manifest.json
@@ -0,0 +1,18 @@
+{
+ "manifest_version": 2,
+ "name": "Chromium code coverage: deprecated UI",
+ "description": "Adds coverage stats to Rietveld.",
+ "version": "1.0.0",
+ "permissions": [
+ "https://uberchromegw.corp.google.com/",
+ "https://build.chromium.org/"
+ ],
+ "content_scripts": [{
+ "matches": ["https://codereview.chromium.org/*",
+ "https://chromereviews.googleplex.com/*"],
+ "js": [
+ "js/app.js"
+ ],
+ "run_at": "document_end"
+ }]
+}
diff --git a/chromium/tools/chrome_proxy/OWNERS b/chromium/tools/chrome_proxy/OWNERS
new file mode 100644
index 00000000000..8dfefe6053f
--- /dev/null
+++ b/chromium/tools/chrome_proxy/OWNERS
@@ -0,0 +1,8 @@
+bengr@chromium.org
+bolian@chromium.org
+bustamante@chromium.org
+kundaji@chromium.org
+marq@chromium.org
+megjablon@chromium.org
+sclittle@chromium.org
+tbansal@chromium.org
diff --git a/chromium/tools/chrome_proxy/chrome_proxy_config.py b/chromium/tools/chrome_proxy/chrome_proxy_config.py
new file mode 100644
index 00000000000..d178890d634
--- /dev/null
+++ b/chromium/tools/chrome_proxy/chrome_proxy_config.py
@@ -0,0 +1,20 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'perf'))
+
+from chrome_telemetry_build import chromium_config
+
+TELEMETRY_DIR = chromium_config.GetTelemetryDir()
+
+_top_level_dir = os.path.dirname(os.path.realpath(__file__))
+
+def Config(benchmark_subdirs):
+ return chromium_config.ChromiumConfig(
+ top_level_dir=_top_level_dir,
+ benchmark_dirs=[os.path.join(_top_level_dir, subdir)
+ for subdir in benchmark_subdirs])
diff --git a/chromium/tools/chrome_proxy/common/__init__.py b/chromium/tools/chrome_proxy/common/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/chrome_proxy/common/__init__.py
diff --git a/chromium/tools/chrome_proxy/common/chrome_proxy_benchmark.py b/chromium/tools/chrome_proxy/common/chrome_proxy_benchmark.py
new file mode 100644
index 00000000000..7c4aa826666
--- /dev/null
+++ b/chromium/tools/chrome_proxy/common/chrome_proxy_benchmark.py
@@ -0,0 +1,21 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from common import chrome_proxy_measurements as measurements
+from telemetry import benchmark
+
+
+class ChromeProxyBenchmark(benchmark.Benchmark):
+ @classmethod
+ def AddCommandLineArgs(cls, parser):
+ parser.add_option(
+ '--extra-chrome-proxy-via-header',
+ type='string', dest="extra_header",
+ help='Adds an expected Via header for the Chrome-Proxy tests.')
+
+ @classmethod
+ def ProcessCommandLineArgs(cls, parser, args):
+ if args.extra_header:
+ measurements.ChromeProxyValidation.extra_via_header = args.extra_header
+
diff --git a/chromium/tools/chrome_proxy/common/chrome_proxy_measurements.py b/chromium/tools/chrome_proxy/common/chrome_proxy_measurements.py
new file mode 100644
index 00000000000..097c3273c1b
--- /dev/null
+++ b/chromium/tools/chrome_proxy/common/chrome_proxy_measurements.py
@@ -0,0 +1,104 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import logging
+
+from common import chrome_proxy_metrics as metrics
+from telemetry.core import exceptions
+from telemetry.page import page_test
+
+
+def WaitForViaHeader(tab, url="http://check.googlezip.net/test.html"):
+ """Wait until responses start coming back with the Chrome Proxy via header.
+
+ Poll |url| in |tab| until the Chrome Proxy via header is present in a
+ response.
+
+ This function is useful when testing with the Data Saver API, since Chrome
+ won't actually start sending requests to the Data Reduction Proxy until the
+ Data Saver API fetch completes. This function can be used to wait for the Data
+ Saver API fetch to complete.
+ """
+
+ tab.Navigate('data:text/html;base64,%s' % base64.b64encode(
+ '<html><body><script>'
+ 'window.via_header_found = false;'
+ 'function PollDRPCheck(url, wanted_via) {'
+ 'if (via_header_found) { return true; }'
+ 'try {'
+ 'var xmlhttp = new XMLHttpRequest();'
+ 'xmlhttp.open("GET",url,true);'
+ 'xmlhttp.onload=function(e) {'
+ # Store the last response received for debugging, this will be shown
+ # in telemetry dumps if the request fails or times out.
+ 'window.last_xhr_response_headers = xmlhttp.getAllResponseHeaders();'
+ 'var via=xmlhttp.getResponseHeader("via");'
+ 'if (via && via.indexOf(wanted_via) != -1) {'
+ 'window.via_header_found = true;'
+ '}'
+ '};'
+ 'xmlhttp.timeout=30000;'
+ 'xmlhttp.send();'
+ '} catch (err) {'
+ '/* Return normally if the xhr request failed. */'
+ '}'
+ 'return false;'
+ '}'
+ '</script>'
+ 'Waiting for Chrome to start using the DRP...'
+ '</body></html>'))
+
+ # Ensure the page has started loading before attempting the DRP check.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 60)
+
+ expected_via_header = metrics.CHROME_PROXY_VIA_HEADER
+ if ChromeProxyValidation.extra_via_header:
+ expected_via_header = ChromeProxyValidation.extra_via_header
+
+ tab.WaitForJavaScriptExpression(
+ 'PollDRPCheck("%s", "%s")' % (url, expected_via_header), 60)
+
+
+class ChromeProxyValidation(page_test.PageTest):
+ """Base class for all chrome proxy correctness measurements."""
+
+ # Value of the extra via header. |None| if no extra via header is expected.
+ extra_via_header = None
+
+ def __init__(self, restart_after_each_page=False, metrics=None):
+ super(ChromeProxyValidation, self).__init__(
+ needs_browser_restart_after_each_page=restart_after_each_page)
+ self._metrics = metrics
+ self._page = None
+
+ def CustomizeBrowserOptions(self, options):
+ # Enable the chrome proxy (data reduction proxy).
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+ def WillNavigateToPage(self, page, tab):
+ WaitForViaHeader(tab)
+
+ tab.ClearCache(force=True)
+ assert self._metrics
+ self._metrics.Start(page, tab)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ self._page = page
+ # Wait for the load event.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ assert self._metrics
+ self._metrics.Stop(page, tab)
+ if ChromeProxyValidation.extra_via_header:
+ self._metrics.AddResultsForExtraViaHeader(
+ tab, results, ChromeProxyValidation.extra_via_header)
+ self.AddResults(tab, results)
+
+ def AddResults(self, tab, results):
+ raise NotImplementedError
+
+ def StopBrowserAfterPage(self, browser, page): # pylint: disable=W0613
+ if hasattr(page, 'restart_after') and page.restart_after:
+ return True
+ return False
diff --git a/chromium/tools/chrome_proxy/common/chrome_proxy_metrics.py b/chromium/tools/chrome_proxy/common/chrome_proxy_metrics.py
new file mode 100644
index 00000000000..47a6d6b0673
--- /dev/null
+++ b/chromium/tools/chrome_proxy/common/chrome_proxy_metrics.py
@@ -0,0 +1,116 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+from common import network_metrics
+from telemetry.page import page_test
+from telemetry.value import scalar
+
+
+CHROME_PROXY_VIA_HEADER = 'Chrome-Compression-Proxy'
+
+
+class ChromeProxyMetricException(page_test.MeasurementFailure):
+ pass
+
+
+class ChromeProxyResponse(network_metrics.HTTPResponse):
+ """ Represents an HTTP response from a timeline event."""
+ def __init__(self, event):
+ super(ChromeProxyResponse, self).__init__(event)
+
+ def ShouldHaveChromeProxyViaHeader(self):
+ resp = self.response
+ # Ignore https and data url
+ if resp.url.startswith('https') or resp.url.startswith('data:'):
+ return False
+ # Ignore 304 Not Modified and cache hit.
+ if resp.status == 304 or resp.served_from_cache:
+ return False
+ # Ignore invalid responses that don't have any header. Log a warning.
+ if not resp.headers:
+ logging.warning('response for %s does not any have header '
+ '(refer=%s, status=%s)',
+ resp.url, resp.GetHeader('Referer'), resp.status)
+ return False
+ return True
+
+ def HasResponseHeader(self, key, value):
+ response_header = self.response.GetHeader(key)
+ if not response_header:
+ return False
+ values = [v.strip() for v in response_header.split(',')]
+ return any(v == value for v in values)
+
+ def HasRequestHeader(self, key, value):
+ if key not in self.response.request_headers:
+ return False
+ request_header = self.response.request_headers[key]
+ values = [v.strip() for v in request_header.split(',')]
+ return any(v == value for v in values)
+
+ def HasChromeProxyViaHeader(self):
+ via_header = self.response.GetHeader('Via')
+ if not via_header:
+ return False
+ vias = [v.strip(' ') for v in via_header.split(',')]
+ # The Via header is valid if it has a 4-character version prefix followed by
+ # the proxy name, for example, "1.1 Chrome-Compression-Proxy".
+ return any(v[4:] == CHROME_PROXY_VIA_HEADER for v in vias)
+
+ def HasExtraViaHeader(self, extra_header):
+ return self.HasResponseHeader('Via', extra_header)
+
+ def IsValidByViaHeader(self):
+ return (not self.ShouldHaveChromeProxyViaHeader() or
+ self.HasChromeProxyViaHeader())
+
+ def GetChromeProxyRequestHeaderValue(self, key):
+ """Get a specific Chrome-Proxy request header value.
+
+ Returns:
+ The value for a specific Chrome-Proxy request header value for a
+ given key. Returns None if no such key is present.
+ """
+ if 'Chrome-Proxy' not in self.response.request_headers:
+ return None
+
+ chrome_proxy_request_header = self.response.request_headers['Chrome-Proxy']
+ values = [v.strip() for v in chrome_proxy_request_header.split(',')]
+ for value in values:
+ kvp = value.split('=', 1)
+ if len(kvp) == 2 and kvp[0].strip() == key:
+ return kvp[1].strip()
+ return None
+
+ def GetChromeProxyClientType(self):
+ """Get the client type directive from the Chrome-Proxy request header.
+
+ Returns:
+ The client type directive from the Chrome-Proxy request header for the
+ request that lead to this response. For example, if the request header
+ "Chrome-Proxy: c=android" is present, then this method would return
+ "android". Returns None if no client type directive is present.
+ """
+ return self.GetChromeProxyRequestHeaderValue('c')
+
+ def HasChromeProxyLoFiRequest(self):
+ return self.HasRequestHeader('Chrome-Proxy', "q=low")
+
+ def HasChromeProxyLoFiResponse(self):
+ return self.HasResponseHeader('Chrome-Proxy', "q=low")
+
+ def HasChromeProxyLoFiPreviewRequest(self):
+ return self.HasRequestHeader('Chrome-Proxy', "q=preview")
+
+ def HasChromeProxyLoFiPreviewExpRequest(self):
+ return self.HasRequestHeader('Chrome-Proxy', "exp=ignore_preview_blacklist")
+
+ def HasChromeProxyLoFiPreviewResponse(self):
+ return self.HasResponseHeader('Chrome-Proxy', "q=preview")
+
+ def HasChromeProxyPassThroughRequest(self):
+ return self.HasRequestHeader('Chrome-Proxy', "pass-through")
diff --git a/chromium/tools/chrome_proxy/common/chrome_proxy_metrics_unittest.py b/chromium/tools/chrome_proxy/common/chrome_proxy_metrics_unittest.py
new file mode 100644
index 00000000000..35c9e1bfb91
--- /dev/null
+++ b/chromium/tools/chrome_proxy/common/chrome_proxy_metrics_unittest.py
@@ -0,0 +1,44 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import unittest
+
+from common import chrome_proxy_metrics as metrics
+from common import network_metrics_unittest as network_unittest
+
+
+class ChromeProxyMetricTest(unittest.TestCase):
+
+ def testChromeProxyResponse(self):
+ # An https non-proxy response.
+ resp = metrics.ChromeProxyResponse(
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='https://test.url',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Length': str(len(network_unittest.HTML_BODY)),
+ 'Via': 'some other via',
+ },
+ body=network_unittest.HTML_BODY))
+ self.assertFalse(resp.ShouldHaveChromeProxyViaHeader())
+ self.assertFalse(resp.HasChromeProxyViaHeader())
+ self.assertTrue(resp.IsValidByViaHeader())
+
+ # A proxied JPEG image response
+ resp = metrics.ChromeProxyResponse(
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'Via': '1.1 ' + metrics.CHROME_PROXY_VIA_HEADER,
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True))
+ self.assertTrue(resp.ShouldHaveChromeProxyViaHeader())
+ self.assertTrue(resp.HasChromeProxyViaHeader())
+ self.assertTrue(resp.IsValidByViaHeader())
+
diff --git a/chromium/tools/chrome_proxy/common/inspector_network.py b/chromium/tools/chrome_proxy/common/inspector_network.py
new file mode 100644
index 00000000000..051a8fb909d
--- /dev/null
+++ b/chromium/tools/chrome_proxy/common/inspector_network.py
@@ -0,0 +1,293 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import logging
+
+from telemetry.core import exceptions
+from telemetry.timeline import trace_data
+from telemetry.timeline import model
+
+
+class InspectorNetworkException(Exception):
+ pass
+
+
+class InspectorNetworkResponseData(object):
+ def __init__(self, inspector_network, params, initiator):
+ """Creates a new InspectorNetworkResponseData instance.
+
+ Args:
+ inspector_network: InspectorNetwork instance.
+ params: the 'params' field of the devtools Network.responseReceived event.
+ initiator: initiator of the request, as gathered from
+ Network.requestWillBeSent.
+ """
+ self._inspector_network = inspector_network
+ self._request_id = params['requestId']
+ self._timestamp = params['timestamp']
+ self._initiator = initiator
+
+ self._response = params['response']
+ if not self._response:
+ raise InspectorNetworkException('response must exist')
+
+ # Response headers.
+ headers = self._response['headers']
+ self._header_map = {}
+ for k, v in headers.iteritems():
+ # Camel-case header keys.
+ self._header_map[k.title()] = v
+
+ # Request headers.
+ self._request_header_map = {}
+ if 'requestHeaders' in self._response:
+ # Camel-case header keys.
+ for k, v in self._response['requestHeaders'].iteritems():
+ self._request_header_map[k.title()] = v
+
+ self._body = None
+ self._base64_encoded = False
+ if self._inspector_network:
+ self._served_from_cache = (
+ self._inspector_network.HTTPResponseServedFromCache(self._request_id))
+ else:
+ self._served_from_cache = False
+
+ # Whether constructed from a timeline event.
+ self._from_event = False
+
+ @property
+ def status(self):
+ return self._response['status']
+
+ @property
+ def status_text(self):
+ return self._response['status_text']
+
+ @property
+ def headers(self):
+ return self._header_map
+
+ @property
+ def request_headers(self):
+ return self._request_header_map
+
+ @property
+ def timestamp(self):
+ return self._timestamp
+
+ @property
+ def timing(self):
+ if 'timing' in self._response:
+ return self._response['timing']
+ return None
+
+ @property
+ def url(self):
+ return self._response['url']
+
+ @property
+ def request_id(self):
+ return self._request_id
+
+ @property
+ def served_from_cache(self):
+ return self._served_from_cache
+
+ @property
+ def initiator(self):
+ return self._initiator
+
+ def GetHeader(self, name):
+ if name in self.headers:
+ return self.headers[name]
+ return None
+
+ def GetBody(self, timeout=60):
+ if not self._body and not self._from_event:
+ self._body, self._base64_encoded = (
+ self._inspector_network.GetHTTPResponseBody(self._request_id, timeout))
+ return self._body, self._base64_encoded
+
+ def AsTimelineEvent(self):
+ event = {}
+ event['type'] = 'HTTPResponse'
+ event['startTime'] = self.timestamp
+ # There is no end time. Just return the timestamp instead.
+ event['endTime'] = self.timestamp
+ event['requestId'] = self.request_id
+ event['response'] = self._response
+ event['body'], event['base64_encoded_body'] = self.GetBody()
+ event['served_from_cache'] = self.served_from_cache
+ event['initiator'] = self._initiator
+ return event
+
+ @staticmethod
+ def FromTimelineEvent(event):
+ assert event.name == 'HTTPResponse'
+ params = {}
+ params['timestamp'] = event.start
+ params['requestId'] = event.args['requestId']
+ params['response'] = event.args['response']
+ recorded = InspectorNetworkResponseData(None, params, None)
+ # pylint: disable=protected-access
+ recorded._body = event.args['body']
+ recorded._base64_encoded = event.args['base64_encoded_body']
+ recorded._served_from_cache = event.args['served_from_cache']
+ recorded._initiator = event.args.get('initiator', None)
+ recorded._from_event = True
+ return recorded
+
+
+class InspectorNetwork(object):
+ def __init__(self, inspector_websocket):
+ self._inspector_websocket = inspector_websocket
+ self._http_responses = []
+ self._served_from_cache = set()
+ self._timeline_recorder = None
+ self._initiators = {}
+ self._finished = {}
+
+ def ClearCache(self, timeout=60):
+ """Clears the browser's disk and memory cache."""
+ res = self._inspector_websocket.SyncRequest({
+ 'method': 'Network.canClearBrowserCache'
+ }, timeout)
+ assert res['result'], 'Cache clearing is not supported by this browser.'
+ self._inspector_websocket.SyncRequest({
+ 'method': 'Network.clearBrowserCache'
+ }, timeout)
+
+ def StartMonitoringNetwork(self):
+ """Starts monitoring network notifications and recording HTTP responses."""
+ self.ClearResponseData()
+ self._inspector_websocket.RegisterDomain(
+ 'Network',
+ self._OnNetworkNotification)
+ request = {
+ 'method': 'Network.enable'
+ }
+ self._inspector_websocket.SyncRequest(request)
+
+ def StopMonitoringNetwork(self):
+ """Stops monitoring network notifications and recording HTTP responses."""
+ request = {
+ 'method': 'Network.disable'
+ }
+ self._inspector_websocket.SyncRequest(request)
+ # There may be queued messages that don't appear until the SyncRequest
+ # happens. Wait to unregister until after sending the disable command.
+ self._inspector_websocket.UnregisterDomain('Network')
+
+ def GetResponseData(self):
+ """Returns all recorded HTTP responses."""
+ return [self._AugmentResponse(rsp) for rsp in self._http_responses]
+
+ def ClearResponseData(self):
+ """Clears recorded HTTP responses."""
+ self._http_responses = []
+ self._served_from_cache.clear()
+ self._initiators.clear()
+
+ def _AugmentResponse(self, response):
+ """Augments an InspectorNetworkResponseData for final output.
+
+ Join the loadingFinished timing event to the response. This event is
+ timestamped with epoch seconds. In the response timing object, all timing
+ aside from requestTime is in millis relative to requestTime, so
+ loadingFinished is converted to be consistent.
+
+ Args:
+ response: an InspectorNetworkResponseData instance to augment.
+
+ Returns:
+ The same response, modifed as described above.
+
+ """
+ if response.timing is None:
+ return response
+
+ if response.request_id not in self._finished:
+ response.timing['loadingFinished'] = -1
+ else:
+ delta_ms = 1000 * (self._finished[response.request_id] -
+ response.timing['requestTime'])
+ if delta_ms < 0:
+ delta_ms = -1
+ response.timing['loadingFinished'] = delta_ms
+ return response
+
+ def _OnNetworkNotification(self, msg):
+ if msg['method'] == 'Network.requestWillBeSent':
+ self._ProcessRequestWillBeSent(msg['params'])
+ if msg['method'] == 'Network.responseReceived':
+ self._RecordHTTPResponse(msg['params'])
+ elif msg['method'] == 'Network.requestServedFromCache':
+ self._served_from_cache.add(msg['params']['requestId'])
+ elif msg['method'] == 'Network.loadingFinished':
+ assert msg['params']['requestId'] not in self._finished
+ self._finished[msg['params']['requestId']] = msg['params']['timestamp']
+
+ def _ProcessRequestWillBeSent(self, params):
+ request_id = params['requestId']
+ self._initiators[request_id] = params['initiator']
+
+ def _RecordHTTPResponse(self, params):
+ required_fields = ['requestId', 'timestamp', 'response']
+ for field in required_fields:
+ if field not in params:
+ logging.warning('HTTP Response missing required field: %s', field)
+ return
+ request_id = params['requestId']
+ assert request_id in self._initiators
+ initiator = self._initiators[request_id]
+ self._http_responses.append(
+ InspectorNetworkResponseData(self, params, initiator))
+
+ def GetHTTPResponseBody(self, request_id, timeout=60):
+ try:
+ res = self._inspector_websocket.SyncRequest({
+ 'method': 'Network.getResponseBody',
+ 'params': {
+ 'requestId': request_id,
+ }
+ }, timeout)
+ except exceptions.TimeoutException:
+ logging.warning('Timeout during fetching body for %s' % request_id)
+ return None, False
+ if 'error' in res:
+ return None, False
+ return res['result']['body'], res['result']['base64Encoded']
+
+ def HTTPResponseServedFromCache(self, request_id):
+ return request_id and request_id in self._served_from_cache
+
+ @property
+ def timeline_recorder(self):
+ if not self._timeline_recorder:
+ self._timeline_recorder = TimelineRecorder(self)
+ return self._timeline_recorder
+
+
+class TimelineRecorder(object):
+ def __init__(self, inspector_network):
+ self._inspector_network = inspector_network
+ self._is_recording = False
+
+ def Start(self):
+ assert not self._is_recording, 'Start should only be called once.'
+ self._is_recording = True
+ self._inspector_network.StartMonitoringNetwork()
+
+ def Stop(self):
+ if not self._is_recording:
+ return None
+ responses = self._inspector_network.GetResponseData()
+ events = [r.AsTimelineEvent() for r in list(responses)]
+ self._inspector_network.StopMonitoringNetwork()
+ self._is_recording = False
+ if len(events) == 0:
+ return None
+ builder = trace_data.TraceDataBuilder()
+ builder.AddEventsTo(trace_data.INSPECTOR_TRACE_PART, events)
+ return model.TimelineModel(builder.AsData(), shift_world_to_zero=False)
diff --git a/chromium/tools/chrome_proxy/common/inspector_network_unittest.py b/chromium/tools/chrome_proxy/common/inspector_network_unittest.py
new file mode 100644
index 00000000000..cb5a72c57f0
--- /dev/null
+++ b/chromium/tools/chrome_proxy/common/inspector_network_unittest.py
@@ -0,0 +1,126 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import urlparse
+
+from common import inspector_network
+from common import network_metrics
+from telemetry import decorators
+from telemetry.testing import tab_test_case
+
+
+class InspectorNetworkTabTest(tab_test_case.TabTestCase):
+ class TestCase(object):
+ def __init__(self, responses_count=0,
+ subresources=None):
+ # Number of responses expected for this case.
+ self.responses_count = responses_count
+ # List of subresource links for this case.
+ self.subresources = subresources
+
+ def __init__(self, *args):
+ super(InspectorNetworkTabTest, self).__init__(*args)
+
+ def _FilterFaviconEvents(self, events):
+ for event in events:
+ if 'favicon.ico' in event.args['response']['url']:
+ events.remove(event)
+ return events
+
+ def _NavigateAndGetHTTPResponseEvents(self, page):
+ network = inspector_network.InspectorNetwork(
+ self._tab._inspector_backend._websocket)
+ network.timeline_recorder.Start()
+ self.Navigate(page)
+ timeline_model = network.timeline_recorder.Stop()
+ self.assertTrue(timeline_model)
+ return timeline_model.GetAllEventsOfName('HTTPResponse')
+
+ def testHTTPResponseTimelineRecorder(self):
+ tests = {
+ 'blank.html': InspectorNetworkTabTest.TestCase(responses_count=1),
+ 'green_rect.html': InspectorNetworkTabTest.TestCase(responses_count=1),
+ 'image_decoding.html': InspectorNetworkTabTest.TestCase(
+ responses_count=2, subresources=['image.png']),
+ }
+ for page, test in tests.iteritems():
+ events = self._FilterFaviconEvents(
+ self._NavigateAndGetHTTPResponseEvents(page))
+ self.assertEqual(test.responses_count, len(events))
+
+ # Verify required event fields
+ for event in events:
+ self.assertEqual('HTTPResponse', event.name)
+ resp = inspector_network.InspectorNetworkResponseData.FromTimelineEvent(
+ event)
+ self.assertLess(0.0, resp.timestamp)
+
+ self.assertTrue(resp.headers)
+ self.assertTrue(resp.headers['Content-Length'])
+ body, base64_encoded = resp.GetBody()
+
+ link = resp.url[resp.url.rfind('/') + 1 :]
+ self.assertTrue(link == page or link in test.subresources)
+ if link == page:
+ self.assertEqual(resp.GetHeader('Content-Type'), 'text/html')
+ self.assertTrue('<!DOCTYPE HTML>' in body)
+ self.assertFalse(base64_encoded)
+ self.assertEqual('other', resp.initiator['type'])
+ else:
+ # We know this is the only subresource type in our setup.
+ self.assertEqual(resp.GetHeader('Content-Type'), 'image/png')
+ self.assertFalse('<!DOCTYPE HTML>' in body)
+ self.assertTrue(base64_encoded)
+ self.assertEqual('parser', resp.initiator['type'])
+
+ def testNetworkTiming(self):
+ test = (
+ 'image_decoding.html', InspectorNetworkTabTest.TestCase(
+ responses_count=2, subresources=['image.png'])
+ )
+
+ network = inspector_network.InspectorNetwork(
+ self._tab._inspector_backend._websocket)
+ network.StartMonitoringNetwork()
+ self.Navigate(test[0])
+ network.StopMonitoringNetwork()
+ response_data = network.GetResponseData()
+ path_to_response = {urlparse.urlparse(r.url).path: r
+ for r in response_data}
+ self.assertTrue('/image_decoding.html' in path_to_response)
+ self.assertTrue(
+ path_to_response['/image_decoding.html'].timing['requestTime'] > 0)
+ self.assertTrue(
+ path_to_response['/image_decoding.html'].timing['loadingFinished'] > 0)
+ self.assertTrue('/image.png' in path_to_response)
+ self.assertTrue(
+ path_to_response['/image.png'].timing['requestTime'] > 0)
+ self.assertTrue(
+ path_to_response['/image.png'].timing['loadingFinished'] > 0)
+
+ # Flaky on many platforms (at least Win, Linux, and Mac).
+ # http://crbug.com/424706
+ @decorators.Disabled('all')
+ def testCacheableHTTPResponse(self):
+ # We know this page has one PNG image and its cacheable.
+ events = self._NavigateAndGetHTTPResponseEvents('image_decoding.html')
+ images_first = []
+ for event in events:
+ resp = inspector_network.InspectorNetworkResponseData.FromTimelineEvent(
+ event)
+ if resp.GetHeader('Content-Type') == 'image/png':
+ images_first.append(resp)
+
+ self.assertEqual(1, len(images_first))
+ self.assertFalse(images_first[0].served_from_cache)
+
+ events = self._NavigateAndGetHTTPResponseEvents('image_decoding.html')
+ images_second = []
+ for event in events:
+ resp = inspector_network.InspectorNetworkResponseData.FromTimelineEvent(
+ event)
+ if resp.GetHeader('Content-Type') == 'image/png':
+ images_second.append(resp)
+ self.assertEqual(1, len(images_second))
+ # On the second fetch, the image is served from cache.
+ self.assertTrue(images_second[0].served_from_cache)
diff --git a/chromium/tools/chrome_proxy/common/network_metrics.py b/chromium/tools/chrome_proxy/common/network_metrics.py
new file mode 100644
index 00000000000..9699456db5a
--- /dev/null
+++ b/chromium/tools/chrome_proxy/common/network_metrics.py
@@ -0,0 +1,219 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import gzip
+import hashlib
+import io
+import logging
+import os
+import sys
+import zlib
+
+from common import inspector_network
+from telemetry.timeline import model
+
+sys.path.append(
+ os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, 'perf'))
+from metrics import Metric
+
+from telemetry.page import page_test
+# All network metrics are Chrome only for now.
+from telemetry.value import scalar
+
+
+class NetworkMetricException(page_test.MeasurementFailure):
+ pass
+
+
+class HTTPResponse(object):
+ """ Represents an HTTP response from a timeline event."""
+ def __init__(self, event):
+ self._response = (
+ inspector_network.InspectorNetworkResponseData.FromTimelineEvent(event))
+ self._remote_port = None
+ if 'response' in event.args and 'remotePort' in event.args['response']:
+ self._remote_port = event.args['response']['remotePort']
+ self._content_length = None
+
+ @property
+ def response(self):
+ return self._response
+
+ @property
+ def remote_port(self):
+ return self._remote_port
+
+ @property
+ def url_signature(self):
+ return hashlib.md5(self.response.url).hexdigest()
+
+ @property
+ def content_length(self):
+ if self._content_length is None:
+ self._content_length = self.GetContentLength()
+ return self._content_length
+
+ @property
+ def has_original_content_length(self):
+ return 'X-Original-Content-Length' in self.response.headers
+
+ @property
+ def original_content_length(self):
+ if self.has_original_content_length:
+ return int(self.response.GetHeader('X-Original-Content-Length'))
+ return 0
+
+ @property
+ def data_saving_rate(self):
+ if (self.response.served_from_cache or
+ not self.has_original_content_length or
+ self.original_content_length <= 0):
+ return 0.0
+ return (float(self.original_content_length - self.content_length) /
+ self.original_content_length)
+
+ def GetContentLengthFromBody(self):
+ resp = self.response
+ body, base64_encoded = resp.GetBody()
+ if not body:
+ return 0
+ # The binary data like images, etc is base64_encoded. Decode it to get
+ # the actualy content length.
+ if base64_encoded:
+ decoded = base64.b64decode(body)
+ return len(decoded)
+
+ encoding = resp.GetHeader('Content-Encoding')
+ if not encoding:
+ return len(body)
+ # The response body returned from a timeline event is always decompressed.
+ # So, we need to compress it to get the actual content length if headers
+ # say so.
+ encoding = encoding.lower()
+ if encoding == 'gzip':
+ return self.GetGizppedBodyLength(body)
+ elif encoding == 'deflate':
+ return len(zlib.compress(body, 9))
+ else:
+ raise NetworkMetricException, (
+ 'Unknown Content-Encoding %s for %s' % (encoding, resp.url))
+
+ def GetContentLength(self):
+ cl = 0
+ try:
+ cl = self.GetContentLengthFromBody()
+ except Exception, e:
+ logging.warning('Fail to get content length for %s from body: %s',
+ self.response.url[:100], e)
+ if cl == 0:
+ resp = self.response
+ cl_header = resp.GetHeader('Content-Length')
+ if cl_header:
+ cl = int(cl_header)
+ else:
+ body, _ = resp.GetBody()
+ if body:
+ cl = len(body)
+ return cl
+
+ @staticmethod
+ def GetGizppedBodyLength(body):
+ if not body:
+ return 0
+ bio = io.BytesIO()
+ try:
+ with gzip.GzipFile(fileobj=bio, mode="wb", compresslevel=9) as f:
+ f.write(body.encode('utf-8'))
+ except Exception, e:
+ logging.warning('Fail to gzip response body: %s', e)
+ raise e
+ return len(bio.getvalue())
+
+
+class NetworkMetric(Metric):
+ """A network metric based on timeline events."""
+
+ def __init__(self):
+ super(NetworkMetric, self).__init__()
+
+ # Whether to add detailed result for each sub-resource in a page.
+ self.add_result_for_resource = False
+ self.compute_data_saving = False
+ self._events = None
+
+ def Start(self, page, tab):
+ self._events = None
+ network = inspector_network.InspectorNetwork(
+ tab._inspector_backend._websocket)
+ self._timeline_recorder = network.timeline_recorder
+ self._timeline_recorder.Start()
+
+ def Stop(self, page, tab):
+ assert self._events is None
+ self._timeline_model = self._timeline_recorder.Stop()
+
+ def IterResponses(self, tab):
+ if self._events is None:
+ if self._timeline_model is None:
+ return
+ self._events = self._timeline_model.GetAllEventsOfName('HTTPResponse')
+ if len(self._events) == 0:
+ return
+ for e in self._events:
+ yield self.ResponseFromEvent(e)
+
+ def ResponseFromEvent(self, event):
+ return HTTPResponse(event)
+
+ def AddResults(self, tab, results):
+ content_length = 0
+ original_content_length = 0
+
+ for resp in self.IterResponses(tab):
+ # Ignore content length calculation for cache hit.
+ if resp.response.served_from_cache:
+ continue
+
+ resource = resp.response.url
+ resource_signature = resp.url_signature
+ cl = resp.content_length
+ if resp.has_original_content_length:
+ ocl = resp.original_content_length
+ if ocl < cl:
+ logging.warning('original content length (%d) is less than content '
+ 'length (%d) for resource %s', ocl, cl, resource)
+ if self.add_result_for_resource:
+ results.AddValue(scalar.ScalarValue(
+ results.current_page,
+ 'resource_data_saving_' + resource_signature, 'percent',
+ resp.data_saving_rate * 100))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page,
+ 'resource_original_content_length_' + resource_signature, 'bytes',
+ ocl))
+ original_content_length += ocl
+ else:
+ original_content_length += cl
+ if self.add_result_for_resource:
+ results.AddValue(scalar.ScalarValue(
+ results.current_page,
+ 'resource_content_length_' + resource_signature, 'bytes', cl))
+ content_length += cl
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'content_length', 'bytes', content_length))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'original_content_length', 'bytes',
+ original_content_length))
+ if self.compute_data_saving:
+ if (original_content_length > 0 and
+ original_content_length >= content_length):
+ saving = (float(original_content_length-content_length) * 100 /
+ original_content_length)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'data_saving', 'percent', saving))
+ else:
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'data_saving', 'percent', 0.0))
diff --git a/chromium/tools/chrome_proxy/common/network_metrics_unittest.py b/chromium/tools/chrome_proxy/common/network_metrics_unittest.py
new file mode 100644
index 00000000000..fe1e48f035f
--- /dev/null
+++ b/chromium/tools/chrome_proxy/common/network_metrics_unittest.py
@@ -0,0 +1,176 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import unittest
+
+from common import network_metrics
+from telemetry.testing import test_page_test_results
+from telemetry.timeline import event
+
+
+HTML_BODY = """<!DOCTYPE HTML>
+ <html>
+ <head> </head>
+ <body>
+ <div id="test"> TEST HTML</div>
+ </body>
+ </html>"""
+IMAGE_BODY = """fake image data"""
+GZIPPED_HTML_LEN = network_metrics.HTTPResponse.GetGizppedBodyLength(HTML_BODY)
+# Make up original content length for the image.
+IMAGE_OCL = 3 * len(IMAGE_BODY)
+
+
+class NetworkMetricTest(unittest.TestCase):
+ @staticmethod
+ def MakeNetworkTimelineEvent(
+ url, response_headers, body=None, base64_encoded_body=False,
+ served_from_cache=False, request_headers=None, status=200,
+ remote_port=None):
+ if not request_headers:
+ request_headers = {}
+ e = event.TimelineEvent('network', 'HTTPResponse', 0, 0)
+ e.args = {}
+ e.args['requestId'] = 0
+ e.args['response'] = {
+ 'status': status,
+ 'url': url,
+ 'headers': response_headers,
+ 'requestHeaders': request_headers,
+ 'remotePort': remote_port,
+ }
+ e.args['body'] = body
+ e.args['base64_encoded_body'] = base64_encoded_body
+ e.args['served_from_cache'] = served_from_cache
+ return e
+
+ def testHTTPResponse(self):
+ url = 'http://test.url'
+ self.assertLess(GZIPPED_HTML_LEN, len(HTML_BODY))
+
+ # A plain text HTML response
+ resp = network_metrics.HTTPResponse(self.MakeNetworkTimelineEvent(
+ url=url,
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Length': str(len(HTML_BODY)),
+ },
+ body=HTML_BODY))
+ self.assertEqual(url, resp.response.url)
+ body, base64_encoded = resp.response.GetBody()
+ self.assertEqual(HTML_BODY, body)
+ self.assertFalse(base64_encoded)
+ self.assertEqual('text/html', resp.response.GetHeader('Content-Type'))
+
+ self.assertEqual(len(HTML_BODY), resp.content_length)
+ self.assertEqual(None, resp.response.GetHeader('Content-Encoding'))
+ self.assertFalse(resp.has_original_content_length)
+ self.assertEqual(0.0, resp.data_saving_rate)
+
+ # A gzipped HTML response
+ resp = network_metrics.HTTPResponse(self.MakeNetworkTimelineEvent(
+ url=url,
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(len(HTML_BODY)),
+ },
+ body=HTML_BODY))
+ body, base64_encoded = resp.response.GetBody()
+ self.assertFalse(base64_encoded)
+ self.assertEqual(GZIPPED_HTML_LEN, resp.content_length)
+ self.assertEqual('gzip', resp.response.GetHeader('Content-Encoding'))
+ self.assertTrue(resp.has_original_content_length)
+ self.assertEqual(len(HTML_BODY), resp.original_content_length)
+ self.assertEqual(
+ float(len(HTML_BODY) - GZIPPED_HTML_LEN) / len(HTML_BODY),
+ resp.data_saving_rate)
+
+ # A JPEG image response.
+ resp = network_metrics.HTTPResponse(self.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(IMAGE_OCL),
+ },
+ body=base64.b64encode(IMAGE_BODY),
+ base64_encoded_body=True))
+ body, base64_encoded = resp.response.GetBody()
+ self.assertTrue(base64_encoded)
+ self.assertEqual(IMAGE_BODY, base64.b64decode(body))
+ self.assertEqual(len(IMAGE_BODY), resp.content_length)
+ self.assertTrue(resp.has_original_content_length)
+ self.assertEqual(IMAGE_OCL, resp.original_content_length)
+ self.assertFalse(resp.response.served_from_cache)
+ self.assertEqual(float(IMAGE_OCL - len(IMAGE_BODY)) / IMAGE_OCL,
+ resp.data_saving_rate)
+
+ # A JPEG image response from cache.
+ resp = network_metrics.HTTPResponse(self.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(IMAGE_OCL),
+ },
+ body=base64.b64encode(IMAGE_BODY),
+ base64_encoded_body=True,
+ served_from_cache=True))
+ self.assertEqual(len(IMAGE_BODY), resp.content_length)
+ self.assertTrue(resp.has_original_content_length)
+ self.assertEqual(IMAGE_OCL, resp.original_content_length)
+ # Cached resource has zero saving.
+ self.assertTrue(resp.response.served_from_cache)
+ self.assertEqual(0.0, resp.data_saving_rate)
+
+ def testNetworkMetricResults(self):
+ events = [
+ # A plain text HTML.
+ self.MakeNetworkTimelineEvent(
+ url='http://test.html1',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Length': str(len(HTML_BODY)),
+ },
+ body=HTML_BODY),
+ # A compressed HTML.
+ self.MakeNetworkTimelineEvent(
+ url='http://test.html2',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(len(HTML_BODY)),
+ },
+ body=HTML_BODY),
+ # A base64 encoded image.
+ self.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(IMAGE_OCL),
+ },
+ body=base64.b64encode(IMAGE_BODY),
+ base64_encoded_body=True),
+ ]
+ metric = network_metrics.NetworkMetric()
+ metric._events = events
+ metric.compute_data_saving = True
+
+ self.assertTrue(len(events), len(list(metric.IterResponses(None))))
+ results = test_page_test_results.TestPageTestResults(self)
+ metric.AddResults(None, results)
+
+ cl = len(HTML_BODY) + GZIPPED_HTML_LEN + len(IMAGE_BODY)
+ results.AssertHasPageSpecificScalarValue('content_length', 'bytes', cl)
+
+ ocl = len(HTML_BODY) + len(HTML_BODY) + IMAGE_OCL
+ results.AssertHasPageSpecificScalarValue(
+ 'original_content_length', 'bytes', ocl)
+
+ saving_percent = float(ocl - cl) * 100/ ocl
+ results.AssertHasPageSpecificScalarValue(
+ 'data_saving', 'percent', saving_percent)
diff --git a/chromium/tools/chrome_proxy/integration_tests/__init__.py b/chromium/tools/chrome_proxy/integration_tests/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/__init__.py
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
new file mode 100644
index 00000000000..8129f4dc81e
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_benchmark.py
@@ -0,0 +1,288 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from common.chrome_proxy_benchmark import ChromeProxyBenchmark
+from integration_tests import chrome_proxy_measurements as measurements
+from integration_tests import chrome_proxy_pagesets as pagesets
+from telemetry import benchmark
+
+DESKTOP_PLATFORMS = ['mac', 'linux', 'win', 'chromeos']
+WEBVIEW_PLATFORMS = ['android-webview', 'android-webview-shell']
+
+class ChromeProxyClientType(ChromeProxyBenchmark):
+ tag = 'client_type'
+ test = measurements.ChromeProxyClientType
+ page_set = pagesets.ClientTypeStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.client_type.client_type'
+
+
+@benchmark.Disabled(*WEBVIEW_PLATFORMS)
+class ChromeProxyLoFi(ChromeProxyBenchmark):
+ tag = 'lo_fi'
+ test = measurements.ChromeProxyLoFi
+ page_set = pagesets.LoFiStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.lo_fi.lo_fi'
+
+
+@benchmark.Disabled(*WEBVIEW_PLATFORMS)
+class ChromeProxyPreviewLoFi(ChromeProxyBenchmark):
+ tag = 'lo_fi_preview'
+ test = measurements.ChromeProxyLoFiPreview
+ page_set = pagesets.LoFiPreviewStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.lo_fi_preview.lo_fi_preview'
+
+
+class ChromeProxyExpDirective(ChromeProxyBenchmark):
+ tag = 'exp_directive'
+ test = measurements.ChromeProxyExpDirective
+ page_set = pagesets.ExpDirectiveStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.exp_directive.exp_directive'
+
+
+class ChromeProxyPassThrough(ChromeProxyBenchmark):
+ tag = 'pass_through'
+ test = measurements.ChromeProxyPassThrough
+ page_set = pagesets.PassThroughStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.pass_through.pass_through'
+
+
+class ChromeProxyBypass(ChromeProxyBenchmark):
+ tag = 'bypass'
+ test = measurements.ChromeProxyBypass
+ page_set = pagesets.BypassStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.bypass.bypass'
+
+
+class ChromeProxyHTTPSBypass(ChromeProxyBenchmark):
+ tag = 'https_bypass'
+ test = measurements.ChromeProxyHTTPSBypass
+ page_set = pagesets.HTTPSBypassStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.https_bypass.https_bypass'
+
+
+class ChromeProxyHTML5Test(ChromeProxyBenchmark):
+ tag = 'html5test'
+ test = measurements.ChromeProxyHTML5Test
+ page_set = pagesets.HTML5TestStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.html5test.html5test'
+
+
+@benchmark.Enabled(*DESKTOP_PLATFORMS)
+class ChromeProxyYouTube(ChromeProxyBenchmark):
+ tag = 'youtube'
+ test = measurements.ChromeProxyYouTube
+ page_set = pagesets.YouTubeStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.youtube.youtube'
+
+
+class ChromeProxyCorsBypass(ChromeProxyBenchmark):
+ tag = 'bypass'
+ test = measurements.ChromeProxyCorsBypass
+ page_set = pagesets.CorsBypassStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.bypass.corsbypass'
+
+
+class ChromeProxyBlockOnce(ChromeProxyBenchmark):
+ tag = 'block_once'
+ test = measurements.ChromeProxyBlockOnce
+ page_set = pagesets.BlockOnceStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.block_once.block_once'
+
+
+@benchmark.Disabled(*(DESKTOP_PLATFORMS + WEBVIEW_PLATFORMS))
+# Safebrowsing is enabled for Android and iOS.
+class ChromeProxySafeBrowsingOn(ChromeProxyBenchmark):
+ tag = 'safebrowsing_on'
+ test = measurements.ChromeProxySafebrowsingOn
+
+ # Override CreateStorySet so that we can instantiate SafebrowsingStorySet
+ # with a non default param.
+ def CreateStorySet(self, options):
+ del options # unused
+ return pagesets.SafebrowsingStorySet(expect_timeout=True)
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.safebrowsing_on.safebrowsing'
+
+
+@benchmark.Enabled(*(DESKTOP_PLATFORMS + WEBVIEW_PLATFORMS))
+# Safebrowsing is switched off for Android Webview and all desktop platforms.
+class ChromeProxySafeBrowsingOff(ChromeProxyBenchmark):
+ tag = 'safebrowsing_off'
+ test = measurements.ChromeProxySafebrowsingOff
+ page_set = pagesets.SafebrowsingStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.safebrowsing_off.safebrowsing'
+
+
+class ChromeProxyHTTPFallbackProbeURL(ChromeProxyBenchmark):
+ tag = 'fallback_probe'
+ test = measurements.ChromeProxyHTTPFallbackProbeURL
+ page_set = pagesets.SyntheticStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.fallback_probe.synthetic'
+
+
+class ChromeProxyHTTPFallbackViaHeader(ChromeProxyBenchmark):
+ tag = 'fallback_viaheader'
+ test = measurements.ChromeProxyHTTPFallbackViaHeader
+ page_set = pagesets.FallbackViaHeaderStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.fallback_viaheader.fallback_viaheader'
+
+
+class ChromeProxyHTTPToDirectFallback(ChromeProxyBenchmark):
+ tag = 'http_to_direct_fallback'
+ test = measurements.ChromeProxyHTTPToDirectFallback
+ page_set = pagesets.HTTPToDirectFallbackStorySet
+
+ @classmethod
+ def Name(cls):
+ return ('chrome_proxy_benchmark.http_to_direct_fallback.'
+ 'http_to_direct_fallback')
+
+
+class ChromeProxyReenableAfterBypass(ChromeProxyBenchmark):
+ tag = 'reenable_after_bypass'
+ test = measurements.ChromeProxyReenableAfterBypass
+ page_set = pagesets.ReenableAfterBypassStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.reenable_after_bypass.reenable_after_bypass'
+
+
+class ChromeProxyReenableAfterSetBypass(ChromeProxyBenchmark):
+ tag = 'reenable_after_set_bypass'
+ test = measurements.ChromeProxyReenableAfterSetBypass
+ page_set = pagesets.ReenableAfterSetBypassStorySet
+
+ @classmethod
+ def Name(cls):
+ return ('chrome_proxy_benchmark.reenable_after_set_bypass' +
+ '.reenable_after_set_bypass')
+
+
+class ChromeProxySmoke(ChromeProxyBenchmark):
+ tag = 'smoke'
+ test = measurements.ChromeProxySmoke
+ page_set = pagesets.SmokeStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.smoke.smoke'
+
+
+class ChromeProxyClientConfig(ChromeProxyBenchmark):
+ tag = 'client_config'
+ test = measurements.ChromeProxyClientConfig
+ page_set = pagesets.SyntheticStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.client_config.synthetic'
+
+
+@benchmark.Enabled(*DESKTOP_PLATFORMS)
+class ChromeProxyVideoDirect(benchmark.Benchmark):
+ tag = 'video'
+ test = measurements.ChromeProxyVideoValidation
+ page_set = pagesets.VideoDirectStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.video.direct'
+
+
+@benchmark.Enabled(*DESKTOP_PLATFORMS)
+class ChromeProxyVideoProxied(benchmark.Benchmark):
+ tag = 'video'
+ test = measurements.ChromeProxyVideoValidation
+ page_set = pagesets.VideoProxiedStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.video.proxied'
+
+
+@benchmark.Enabled(*DESKTOP_PLATFORMS)
+class ChromeProxyVideoCompare(benchmark.Benchmark):
+ """Comparison of direct and proxied video fetches.
+
+ This benchmark runs the ChromeProxyVideoDirect and ChromeProxyVideoProxied
+ benchmarks, then compares their results.
+ """
+
+ tag = 'video'
+ test = measurements.ChromeProxyVideoValidation
+ page_set = pagesets.VideoCompareStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.video.compare'
+
+@benchmark.Enabled(*DESKTOP_PLATFORMS)
+class ChromeProxyVideoFrames(benchmark.Benchmark):
+ """Check for video frames similar to original video."""
+
+ tag = 'video'
+ test = measurements.ChromeProxyInstrumentedVideoValidation
+ page_set = pagesets.VideoFrameStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.video.frames'
+
+@benchmark.Enabled(*DESKTOP_PLATFORMS)
+class ChromeProxyVideoAudio(benchmark.Benchmark):
+ """Check that audio is similar to original video."""
+
+ tag = 'video'
+ test = measurements.ChromeProxyInstrumentedVideoValidation
+ page_set = pagesets.VideoAudioStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.video.audio'
+
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
new file mode 100644
index 00000000000..2af7ec70b37
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
@@ -0,0 +1,563 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import logging
+import urlparse
+
+from common import chrome_proxy_measurements as measurements
+from common.chrome_proxy_measurements import ChromeProxyValidation
+from integration_tests import chrome_proxy_metrics as metrics
+from metrics import loading
+from telemetry.core import exceptions
+from telemetry.page import page_test
+
+
+class ChromeProxyDataSaving(page_test.PageTest):
+ """Chrome proxy data saving measurement."""
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyDataSaving, self).__init__(*args, **kwargs)
+ self._metrics = metrics.ChromeProxyMetric()
+ self._enable_proxy = True
+
+ def CustomizeBrowserOptions(self, options):
+ if self._enable_proxy:
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+ def WillNavigateToPage(self, page, tab):
+ if self._enable_proxy:
+ measurements.WaitForViaHeader(tab)
+ tab.ClearCache(force=True)
+ self._metrics.Start(page, tab)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ # Wait for the load event.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ self._metrics.Stop(page, tab)
+ self._metrics.AddResultsForDataSaving(tab, results)
+
+
+class ChromeProxyHeaders(ChromeProxyValidation):
+ """Correctness measurement for response headers."""
+
+ def __init__(self):
+ super(ChromeProxyHeaders, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForHeaderValidation(tab, results)
+
+
+class ChromeProxyBypass(ChromeProxyValidation):
+ """Correctness measurement for bypass responses."""
+
+ def __init__(self):
+ super(ChromeProxyBypass, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForBypass(tab, results)
+
+
+class ChromeProxyHTTPSBypass(ChromeProxyValidation):
+ """Correctness measurement for bypass responses."""
+
+ def __init__(self):
+ super(ChromeProxyHTTPSBypass, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForHTTPSBypass(tab, results)
+
+
+class ChromeProxyYouTube(ChromeProxyValidation):
+ """Correctness measurement for youtube video playback."""
+
+ def __init__(self):
+ super(ChromeProxyYouTube, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForYouTube(tab, results)
+
+
+class ChromeProxyHTML5Test(ChromeProxyValidation):
+ """Correctness measurement for html5test page."""
+
+ def __init__(self):
+ super(ChromeProxyHTML5Test, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForHTML5Test(tab, results)
+
+
+class ChromeProxyCorsBypass(ChromeProxyValidation):
+ """Correctness measurement for bypass responses for CORS requests."""
+
+ def __init__(self):
+ super(ChromeProxyCorsBypass, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ # The test page sets window.xhrRequestCompleted to true when the XHR fetch
+ # finishes.
+ tab.WaitForJavaScriptExpression('window.xhrRequestCompleted', 300)
+ super(ChromeProxyCorsBypass,
+ self).ValidateAndMeasurePage(page, tab, results)
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForCorsBypass(tab, results)
+
+
+class ChromeProxyBlockOnce(ChromeProxyValidation):
+ """Correctness measurement for block-once responses."""
+
+ def __init__(self):
+ super(ChromeProxyBlockOnce, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForBlockOnce(tab, results)
+
+
+class ChromeProxySafebrowsingOn(ChromeProxyValidation):
+ """Correctness measurement for safebrowsing."""
+
+ def __init__(self):
+ super(ChromeProxySafebrowsingOn, self).__init__(
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForSafebrowsingOn(tab, results)
+
+class ChromeProxySafebrowsingOff(ChromeProxyValidation):
+ """Correctness measurement for safebrowsing."""
+
+ def __init__(self):
+ super(ChromeProxySafebrowsingOff, self).__init__(
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForSafebrowsingOff(tab, results)
+
+_FAKE_PROXY_AUTH_VALUE = 'aabbccdd3b7579186c1b0620614fdb1f0000ffff'
+_TEST_SERVER = 'chromeproxy-test.appspot.com'
+_TEST_SERVER_DEFAULT_URL = 'http://' + _TEST_SERVER + '/default'
+
+
+# We rely on the chromeproxy-test server to facilitate some of the tests.
+# The test server code is at <TBD location> and runs at _TEST_SERVER
+#
+# The test server allow request to override response status, headers, and
+# body through query parameters. See GetResponseOverrideURL.
+def GetResponseOverrideURL(url=_TEST_SERVER_DEFAULT_URL, respStatus=0,
+ respHeader="", respBody=""):
+ """ Compose the request URL with query parameters to override
+ the chromeproxy-test server response.
+ """
+
+ queries = []
+ if respStatus > 0:
+ queries.append('respStatus=%d' % respStatus)
+ if respHeader:
+ queries.append('respHeader=%s' % base64.b64encode(respHeader))
+ if respBody:
+ queries.append('respBody=%s' % base64.b64encode(respBody))
+ if len(queries) == 0:
+ return url
+ "&".join(queries)
+ # url has query already
+ if urlparse.urlparse(url).query:
+ return url + '&' + "&".join(queries)
+ else:
+ return url + '?' + "&".join(queries)
+
+
+class ChromeProxyHTTPFallbackProbeURL(ChromeProxyValidation):
+ """Correctness measurement for proxy fallback.
+
+ In this test, the probe URL does not return 'OK'. Chrome is expected
+ to use the fallback proxy.
+ """
+
+ def __init__(self):
+ super(ChromeProxyHTTPFallbackProbeURL, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def CustomizeBrowserOptions(self, options):
+ super(ChromeProxyHTTPFallbackProbeURL,
+ self).CustomizeBrowserOptions(options)
+ # Set the secure proxy check URL to the google.com favicon, which will be
+ # interpreted as a secure proxy check failure since the response body is not
+ # "OK". The google.com favicon is used because it will load reliably fast,
+ # and there have been problems with chromeproxy-test.appspot.com being slow
+ # and causing tests to flake.
+ options.AppendExtraBrowserArgs(
+ '--data-reduction-proxy-secure-proxy-check-url='
+ 'http://www.google.com/favicon.ico')
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForHTTPFallback(tab, results)
+
+
+class ChromeProxyHTTPFallbackViaHeader(ChromeProxyValidation):
+ """Correctness measurement for proxy fallback.
+
+ In this test, the configured proxy is the chromeproxy-test server which
+ will send back a response without the expected Via header. Chrome is
+ expected to use the fallback proxy and add the configured proxy to the
+ bad proxy list.
+ """
+
+ def __init__(self):
+ super(ChromeProxyHTTPFallbackViaHeader, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def CustomizeBrowserOptions(self, options):
+ super(ChromeProxyHTTPFallbackViaHeader,
+ self).CustomizeBrowserOptions(options)
+ options.AppendExtraBrowserArgs('--ignore-certificate-errors')
+ # Set the primary Data Reduction Proxy to be the test server. The test
+ # doesn't know if Chrome is configuring the DRP using the Data Saver API or
+ # not, so the appropriate flags are set for both cases.
+ options.AppendExtraBrowserArgs(
+ '--spdy-proxy-auth-origin=http://%s' % _TEST_SERVER)
+ options.AppendExtraBrowserArgs(
+ '--data-reduction-proxy-http-proxies='
+ 'http://%s;http://compress.googlezip.net' % _TEST_SERVER)
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForHTTPFallback(tab, results)
+
+
+class ChromeProxyClientType(ChromeProxyValidation):
+ """Correctness measurement for Chrome-Proxy header client type directives."""
+
+ def __init__(self):
+ super(ChromeProxyClientType, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+ self._chrome_proxy_client_type = None
+
+ def AddResults(self, tab, results):
+ # Get the Chrome-Proxy client type from the first page in the page set, so
+ # that the client type value can be used to determine which of the later
+ # pages in the page set should be bypassed.
+ if not self._chrome_proxy_client_type:
+ client_type = self._metrics.GetClientTypeFromRequests(tab)
+ if client_type:
+ self._chrome_proxy_client_type = client_type
+
+ self._metrics.AddResultsForClientType(tab,
+ results,
+ self._chrome_proxy_client_type,
+ self._page.bypass_for_client_type)
+
+
+class ChromeProxyLoFi(ChromeProxyValidation):
+ """Correctness measurement for Lo-Fi in Chrome-Proxy header."""
+
+ def __init__(self):
+ super(ChromeProxyLoFi, self).__init__(restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def CustomizeBrowserOptions(self, options):
+ super(ChromeProxyLoFi, self).CustomizeBrowserOptions(options)
+ options.AppendExtraBrowserArgs('--data-reduction-proxy-lo-fi=always-on')
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForLoFi(tab, results)
+
+class ChromeProxyLoFiPreview(ChromeProxyValidation):
+ """Correctness measurement for Lo-Fi preview in Chrome-Proxy header."""
+
+ def __init__(self):
+ super(ChromeProxyLoFiPreview, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def CustomizeBrowserOptions(self, options):
+ super(ChromeProxyLoFiPreview, self).CustomizeBrowserOptions(options)
+ options.AppendExtraBrowserArgs(
+ '--data-reduction-proxy-lo-fi=always-on')
+ options.AppendExtraBrowserArgs(
+ '--enable-data-reduction-proxy-lo-fi-preview')
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForLoFiPreview(tab, results)
+
+class ChromeProxyExpDirective(ChromeProxyValidation):
+ """Correctness measurement for experiment directives in Chrome-Proxy header.
+
+ This test verifies that "exp=test" in the Chrome-Proxy request header
+ causes a bypass on the experiment test page.
+ """
+
+ def __init__(self):
+ super(ChromeProxyExpDirective, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def CustomizeBrowserOptions(self, options):
+ super(ChromeProxyExpDirective, self).CustomizeBrowserOptions(options)
+ options.AppendExtraBrowserArgs('--data-reduction-proxy-experiment=test')
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForBypass(tab, results, url_pattern='/exp/')
+
+class ChromeProxyPassThrough(ChromeProxyValidation):
+ """Correctness measurement for Chrome-Proxy pass-through directives.
+
+ This test verifies that "pass-through" in the Chrome-Proxy request header
+ causes a resource to be loaded without Data Reduction Proxy transformations.
+ """
+
+ def __init__(self):
+ super(ChromeProxyPassThrough, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def CustomizeBrowserOptions(self, options):
+ super(ChromeProxyPassThrough, self).CustomizeBrowserOptions(options)
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForPassThrough(tab, results)
+
+class ChromeProxyHTTPToDirectFallback(ChromeProxyValidation):
+ """Correctness measurement for HTTP proxy fallback to direct."""
+
+ def __init__(self):
+ super(ChromeProxyHTTPToDirectFallback, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def CustomizeBrowserOptions(self, options):
+ super(ChromeProxyHTTPToDirectFallback,
+ self).CustomizeBrowserOptions(options)
+ # Set the primary proxy to something that will fail to be resolved so that
+ # this test will run using the HTTP fallback proxy. The test doesn't know if
+ # Chrome is configuring the DRP using the Data Saver API or not, so the
+ # appropriate flags are set for both cases.
+ options.AppendExtraBrowserArgs(
+ '--spdy-proxy-auth-origin=http://nonexistent.googlezip.net')
+ options.AppendExtraBrowserArgs(
+ '--data-reduction-proxy-http-proxies='
+ 'http://nonexistent.googlezip.net;http://compress.googlezip.net')
+
+ def WillNavigateToPage(self, page, tab):
+ super(ChromeProxyHTTPToDirectFallback, self).WillNavigateToPage(page, tab)
+ # Attempt to load a page through the nonexistent primary proxy in order to
+ # cause a proxy fallback, and have this test run starting from the HTTP
+ # fallback proxy.
+ tab.Navigate(_TEST_SERVER_DEFAULT_URL)
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForHTTPToDirectFallback(tab, results, _TEST_SERVER)
+
+
+class ChromeProxyReenableAfterBypass(ChromeProxyValidation):
+ """Correctness measurement for re-enabling proxies after bypasses.
+
+ This test loads a page that causes all data reduction proxies to be bypassed
+ for 1 to 5 minutes, then waits 5 minutes and verifies that the proxy is no
+ longer bypassed.
+ """
+
+ def __init__(self):
+ super(ChromeProxyReenableAfterBypass, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForReenableAfterBypass(
+ tab, results, self._page.bypass_seconds_min,
+ self._page.bypass_seconds_max)
+
+
+class ChromeProxyReenableAfterSetBypass(ChromeProxyValidation):
+ """Correctness test for re-enabling proxies after bypasses with set duration.
+
+ This test loads a page that causes all data reduction proxies to be bypassed
+ for 20 seconds.
+ """
+
+ def __init__(self):
+ super(ChromeProxyReenableAfterSetBypass, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForReenableAfterSetBypass(
+ tab, results, self._page.BYPASS_SECONDS)
+
+
+class ChromeProxySmoke(ChromeProxyValidation):
+ """Smoke measurement for basic chrome proxy correctness."""
+
+ def __init__(self):
+ super(ChromeProxySmoke, self).__init__(restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def AddResults(self, tab, results):
+ # Map a page name to its AddResults func.
+ page_to_metrics = {
+ 'header validation': [self._metrics.AddResultsForHeaderValidation],
+ 'compression: image': [
+ self._metrics.AddResultsForHeaderValidation,
+ self._metrics.AddResultsForDataSaving,
+ ],
+ 'compression: javascript': [
+ self._metrics.AddResultsForHeaderValidation,
+ self._metrics.AddResultsForDataSaving,
+ ],
+ 'compression: css': [
+ self._metrics.AddResultsForHeaderValidation,
+ self._metrics.AddResultsForDataSaving,
+ ],
+ 'bypass': [self._metrics.AddResultsForBypass],
+ }
+ if not self._page.name in page_to_metrics:
+ raise page_test.MeasurementFailure(
+ 'Invalid page name (%s) in smoke. Page name must be one of:\n%s' % (
+ self._page.name, page_to_metrics.keys()))
+ for add_result in page_to_metrics[self._page.name]:
+ add_result(tab, results)
+
+
+PROXIED = metrics.PROXIED
+DIRECT = metrics.DIRECT
+
+class ChromeProxyClientConfig(ChromeProxyValidation):
+ """Chrome proxy client configuration service validation."""
+
+ def __init__(self):
+ super(ChromeProxyClientConfig, self).__init__(
+ restart_after_each_page=True,
+ metrics=metrics.ChromeProxyMetric())
+
+ def CustomizeBrowserOptions(self, options):
+ super(ChromeProxyClientConfig, self).CustomizeBrowserOptions(options)
+ options.AppendExtraBrowserArgs(
+ '--enable-data-reduction-proxy-config-client')
+
+ def AddResults(self, tab, results):
+ self._metrics.AddResultsForClientConfig(tab, results)
+
+class ChromeProxyVideoValidation(page_test.PageTest):
+ """Validation for video pages.
+
+ Measures pages using metrics.ChromeProxyVideoMetric. Pages can be fetched
+ either direct from the origin server or via the proxy. If a page is fetched
+ both ways, then the PROXIED and DIRECT measurements are compared to ensure
+ the same video was loaded in both cases.
+ """
+
+ def __init__(self):
+ super(ChromeProxyVideoValidation, self).__init__(
+ needs_browser_restart_after_each_page=True,
+ clear_cache_before_each_run=True)
+ # The type is _allMetrics[url][PROXIED,DIRECT][metricName] = value,
+ # where (metricName,value) is a metric computed by videowrapper.js.
+ self._allMetrics = {}
+
+ def WillNavigateToPage(self, page, tab):
+ if page.use_chrome_proxy:
+ measurements.WaitForViaHeader(tab)
+ super(ChromeProxyVideoValidation, self).WillNavigateToPage(page, tab)
+
+ def DidNavigateToPage(self, page, tab):
+ self._currMetrics = metrics.ChromeProxyVideoMetric(tab)
+ self._currMetrics.Start(page, tab)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ assert self._currMetrics
+ self._currMetrics.Stop(page, tab)
+ if page.url not in self._allMetrics:
+ self._allMetrics[page.url] = {}
+
+ # Verify this page.
+ if page.use_chrome_proxy:
+ self._currMetrics.AddResultsForProxied(tab, results)
+ self._allMetrics[page.url][PROXIED] = self._currMetrics.videoMetrics
+ else:
+ self._currMetrics.AddResultsForDirect(tab, results)
+ self._allMetrics[page.url][DIRECT] = self._currMetrics.videoMetrics
+ self._currMetrics = None
+
+ # Compare proxied and direct results for this url, if they exist.
+ m = self._allMetrics[page.url]
+ if PROXIED in m and DIRECT in m:
+ self._CompareProxiedAndDirectMetrics(page.url, m[PROXIED], m[DIRECT])
+
+ def _CompareProxiedAndDirectMetrics(self, url, pm, dm):
+ """Compare metrics from PROXIED and DIRECT fetches.
+
+ Compares video metrics computed by videowrapper.js for pages that were
+ fetch both PROXIED and DIRECT.
+
+ Args:
+ url: The url for the page being tested.
+ pm: Metrics when loaded by the Flywheel proxy.
+ dm: Metrics when loaded directly from the origin server.
+
+ Raises:
+ ChromeProxyMetricException on failure.
+ """
+ def err(s):
+ raise ChromeProxyMetricException, s
+
+ if not pm['ready']:
+ err('Proxied page did not load video: %s' % page.url)
+ if not dm['ready']:
+ err('Direct page did not load video: %s' % page.url)
+
+ # Compare metrics that should match for PROXIED and DIRECT.
+ for x in ('video_height', 'video_width', 'video_duration',
+ 'decoded_frames'):
+ if x not in pm:
+ err('Proxied page has no %s: %s' % (x, page.url))
+ if x not in dm:
+ err('Direct page has no %s: %s' % (x, page.url))
+ if pm[x] != dm[x]:
+ err('Mismatch for %s (proxied=%s direct=%s): %s' %
+ (x, str(pm[x]), str(dm[x]), page.url))
+
+ # Proxied XOCL should match direct CL.
+ pxocl = pm['x_original_content_length_header']
+ dcl = dm['content_length_header']
+ if pxocl != dcl:
+ err('Mismatch for content length (proxied=%s direct=%s): %s' %
+ (str(pxocl), str(dcl), page.url))
+
+class ChromeProxyInstrumentedVideoValidation(page_test.PageTest):
+ """Tests a specially instrumented page for correct video transcoding."""
+
+ def __init__(self):
+ super(ChromeProxyInstrumentedVideoValidation, self).__init__(
+ needs_browser_restart_after_each_page=True,
+ clear_cache_before_each_run=True)
+ self._metrics = metrics.ChromeProxyInstrumentedVideoMetric()
+
+ def CustomizeBrowserOptions(self, options):
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+ def WillNavigateToPage(self, page, tab):
+ measurements.WaitForViaHeader(tab)
+ tab.ClearCache(force=True)
+ self._metrics.Start(page, tab)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ self._metrics.Stop(page, tab)
+ self._metrics.AddResults(tab, results)
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
new file mode 100644
index 00000000000..4dd4846f52e
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics.py
@@ -0,0 +1,907 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import time
+
+from common import chrome_proxy_metrics
+from common import network_metrics
+from common.chrome_proxy_metrics import ChromeProxyMetricException
+from telemetry.page import page_test
+from telemetry.value import scalar
+from metrics import Metric
+
+class ChromeProxyMetric(network_metrics.NetworkMetric):
+ """A Chrome proxy timeline metric."""
+
+ def __init__(self):
+ super(ChromeProxyMetric, self).__init__()
+ self.compute_data_saving = True
+
+ def SetEvents(self, events):
+ """Used for unittest."""
+ self._events = events
+
+ def ResponseFromEvent(self, event):
+ return chrome_proxy_metrics.ChromeProxyResponse(event)
+
+ def AddResults(self, tab, results):
+ raise NotImplementedError
+
+ def AddResultsForDataSaving(self, tab, results):
+ resources_via_proxy = 0
+ resources_from_cache = 0
+ resources_direct = 0
+
+ super(ChromeProxyMetric, self).AddResults(tab, results)
+ for resp in self.IterResponses(tab):
+ if resp.response.served_from_cache:
+ resources_from_cache += 1
+ if resp.HasChromeProxyViaHeader():
+ resources_via_proxy += 1
+ else:
+ resources_direct += 1
+
+ if resources_from_cache + resources_via_proxy + resources_direct == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response, but zero responses were received.')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'resources_via_proxy', 'count',
+ resources_via_proxy))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'resources_from_cache', 'count',
+ resources_from_cache))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'resources_direct', 'count', resources_direct))
+
+ def AddResultsForHeaderValidation(self, tab, results):
+ via_count = 0
+
+ for resp in self.IterResponses(tab):
+ if resp.IsValidByViaHeader():
+ via_count += 1
+ else:
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ '%s: Via header (%s) is not valid (refer=%s, status=%d)' % (
+ r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
+
+ if via_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response through the proxy, but zero such '
+ 'responses were received.')
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'checked_via_header', 'count', via_count))
+
+ def AddResultsForLatency(self, tab, results):
+ # TODO(bustamante): This is a hack to workaround crbug.com/467174,
+ # once fixed just pull down window.performance.timing object and
+ # reference that everywhere.
+ load_event_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.loadEventStart')
+ navigation_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.navigationStart')
+ dom_content_loaded_event_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.domContentLoadedEventStart')
+ fetch_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.fetchStart')
+ request_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.requestStart')
+ domain_lookup_end = tab.EvaluateJavaScript(
+ 'window.performance.timing.domainLookupEnd')
+ domain_lookup_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.domainLookupStart')
+ connect_end = tab.EvaluateJavaScript(
+ 'window.performance.timing.connectEnd')
+ connect_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.connectStart')
+ response_end = tab.EvaluateJavaScript(
+ 'window.performance.timing.responseEnd')
+ response_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.responseStart')
+
+ # NavigationStart relative markers in milliseconds.
+ load_start = (float(load_event_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'load_start', 'ms', load_start))
+
+ dom_content_loaded_start = (
+ float(dom_content_loaded_event_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'dom_content_loaded_start', 'ms',
+ dom_content_loaded_start))
+
+ fetch_start = (float(fetch_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'fetch_start', 'ms', fetch_start,
+ important=False))
+
+ request_start = (float(request_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'request_start', 'ms', request_start,
+ important=False))
+
+ response_start = (float(response_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'response_start', 'ms', response_start,
+ important=False))
+
+ response_end = (float(response_end) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'response_end', 'ms', response_end,
+ important=False))
+
+ # Phase measurements in milliseconds.
+ domain_lookup_duration = (float(domain_lookup_end) - domain_lookup_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'domain_lookup_duration', 'ms',
+ domain_lookup_duration, important=False))
+
+ connect_duration = (float(connect_end) - connect_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'connect_duration', 'ms', connect_duration,
+ important=False))
+
+ request_duration = (float(response_start) - request_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'request_duration', 'ms', request_duration,
+ important=False))
+
+ response_duration = (float(response_end) - response_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'response_duration', 'ms', response_duration,
+ important=False))
+
+ def AddResultsForExtraViaHeader(self, tab, results, extra_via_header):
+ extra_via_count = 0
+
+ for resp in self.IterResponses(tab):
+ if resp.HasChromeProxyViaHeader():
+ if resp.HasExtraViaHeader(extra_via_header):
+ extra_via_count += 1
+ else:
+ raise ChromeProxyMetricException, (
+ '%s: Should have via header %s.' % (resp.response.url,
+ extra_via_header))
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'extra_via_header', 'count', extra_via_count))
+
+ def GetClientTypeFromRequests(self, tab):
+ """Get the Chrome-Proxy client type value from requests made in this tab.
+
+ Returns:
+ The client type value from the first request made in this tab that
+ specifies a client type in the Chrome-Proxy request header. See
+ ChromeProxyResponse.GetChromeProxyClientType for more details about the
+ Chrome-Proxy client type. Returns None if none of the requests made in
+ this tab specify a client type.
+ """
+ for resp in self.IterResponses(tab):
+ client_type = resp.GetChromeProxyClientType()
+ if client_type:
+ return client_type
+ return None
+
+ def AddResultsForClientType(self, tab, results, client_type,
+ bypass_for_client_type):
+ via_count = 0
+ bypass_count = 0
+
+ for resp in self.IterResponses(tab):
+ if resp.HasChromeProxyViaHeader():
+ via_count += 1
+ if client_type.lower() == bypass_for_client_type.lower():
+ raise ChromeProxyMetricException, (
+ '%s: Response for client of type "%s" has via header, but should '
+ 'be bypassed.' % (resp.response.url, bypass_for_client_type))
+ elif resp.ShouldHaveChromeProxyViaHeader():
+ bypass_count += 1
+ if client_type.lower() != bypass_for_client_type.lower():
+ raise ChromeProxyMetricException, (
+ '%s: Response missing via header. Only "%s" clients should '
+ 'bypass for this page, but this client is "%s".' % (
+ resp.response.url, bypass_for_client_type, client_type))
+
+ if via_count + bypass_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response that was eligible to be proxied, but '
+ 'zero such responses were received.')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'via', 'count', via_count))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'bypass', 'count', bypass_count))
+
+ def AddResultsForLoFi(self, tab, results):
+ lo_fi_request_count = 0
+ lo_fi_response_count = 0
+
+ for resp in self.IterResponses(tab):
+ if 'favicon.ico' in resp.response.url:
+ continue
+
+ if resp.HasChromeProxyLoFiRequest():
+ lo_fi_request_count += 1
+ else:
+ raise ChromeProxyMetricException, (
+ '%s: LoFi not in request header.' % (resp.response.url))
+
+ if resp.HasChromeProxyLoFiResponse():
+ lo_fi_response_count += 1
+ else:
+ raise ChromeProxyMetricException, (
+ '%s: LoFi not in response header.' % (resp.response.url))
+
+ if resp.content_length > 100:
+ raise ChromeProxyMetricException, (
+ 'Image %s is %d bytes. Expecting less than 100 bytes.' %
+ (resp.response.url, resp.content_length))
+
+ if lo_fi_request_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one LoFi request, but zero such requests were '
+ 'sent.')
+ if lo_fi_response_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one LoFi response, but zero such responses were '
+ 'received.')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'lo_fi_request', 'count', lo_fi_request_count))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'lo_fi_response', 'count', lo_fi_response_count))
+ super(ChromeProxyMetric, self).AddResults(tab, results)
+
+ def AddResultsForLoFiPreview(self, tab, results):
+ lo_fi_preview_request_count = 0
+ lo_fi_preview_exp_request_count = 0
+ lo_fi_preview_response_count = 0
+
+ for resp in self.IterResponses(tab):
+ if '/csi?' in resp.response.url:
+ continue
+ if 'favicon.ico' in resp.response.url:
+ continue
+ if resp.response.url.startswith('data:'):
+ continue
+
+ if resp.HasChromeProxyLoFiPreviewRequest():
+ lo_fi_preview_request_count += 1
+
+ if resp.HasChromeProxyLoFiPreviewExpRequest():
+ lo_fi_preview_exp_request_count += 1
+
+ if resp.HasChromeProxyLoFiPreviewResponse():
+ lo_fi_preview_response_count += 1
+
+ if resp.HasChromeProxyLoFiRequest():
+ raise ChromeProxyMetricException, (
+ '%s: Lo-Fi directive should not be in preview request header.' %
+ (resp.response.url))
+
+ if lo_fi_preview_request_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one LoFi preview request, but zero such requests '
+ 'were sent.')
+ if lo_fi_preview_exp_request_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one LoFi preview exp=ignore_preview_blacklist '
+ 'request, but zero such requests were sent.')
+ if lo_fi_preview_response_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one LoFi preview response, but zero such '
+ 'responses were received.')
+
+ results.AddValue(
+ scalar.ScalarValue(
+ results.current_page, 'lo_fi_preview_request',
+ 'count', lo_fi_preview_request_count))
+ results.AddValue(
+ scalar.ScalarValue(
+ results.current_page, 'lo_fi_preview_exp_request',
+ 'count', lo_fi_preview_exp_request_count))
+ results.AddValue(
+ scalar.ScalarValue(
+ results.current_page, 'lo_fi_preview_response',
+ 'count', lo_fi_preview_response_count))
+ super(ChromeProxyMetric, self).AddResults(tab, results)
+
+ def AddResultsForPassThrough(self, tab, results):
+ compressed_count = 0
+ compressed_size = 0
+ pass_through_count = 0
+ pass_through_size = 0
+
+ for resp in self.IterResponses(tab):
+ if 'favicon.ico' in resp.response.url:
+ continue
+ if not resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ '%s: Should have Via header (%s) (refer=%s, status=%d)' % (
+ r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
+ if resp.HasChromeProxyPassThroughRequest():
+ pass_through_count += 1
+ pass_through_size = resp.content_length
+ else:
+ compressed_count += 1
+ compressed_size = resp.content_length
+
+ if pass_through_count != 1:
+ raise ChromeProxyMetricException, (
+ 'Expected exactly one Chrome-Proxy pass-through request, but %d '
+ 'such requests were sent.' % (pass_through_count))
+
+ if compressed_count != 1:
+ raise ChromeProxyMetricException, (
+ 'Expected exactly one compressed request, but %d such requests were '
+ 'received.' % (compressed_count))
+
+ if compressed_size >= pass_through_size:
+ raise ChromeProxyMetricException, (
+ 'Compressed image is %d bytes and pass-through image is %d. '
+ 'Expecting compressed image size to be less than pass-through '
+ 'image.' % (compressed_size, pass_through_size))
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'compressed', 'count', compressed_count))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'compressed_size', 'bytes', compressed_size))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'pass_through', 'count', pass_through_count))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'pass_through_size', 'bytes', pass_through_size))
+
+ def AddResultsForHTTPSBypass(self, tab, results):
+ bypass_count = 0
+
+ for resp in self.IterResponses(tab):
+ # Only check https url's
+ if "https://" not in resp.response.url:
+ continue
+
+ # If a Chrome Proxy Via appears fail the test
+ if resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
+ r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
+ bypass_count += 1
+
+ if bypass_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one https response was expected, but zero such '
+ 'responses were received.')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'bypass', 'count', bypass_count))
+
+ def AddResultsForHTML5Test(self, tab, results):
+ # Wait for the number of "points" of HTML5 compatibility to appear to verify
+ # the HTML5 elements have loaded successfully.
+ tab.WaitForJavaScriptExpression(
+ 'document.getElementsByClassName("pointsPanel")', 15)
+
+ def AddResultsForYouTube(self, tab, results):
+ # Wait for the video to begin playing.
+ tab.WaitForJavaScriptExpression(
+ 'window.playerState == YT.PlayerState.PLAYING', 30)
+
+ def AddResultsForBypass(self, tab, results, url_pattern=""):
+ bypass_count = 0
+ skipped_count = 0
+
+ for resp in self.IterResponses(tab):
+ # Only check the url's that contain the specified pattern.
+ if url_pattern and url_pattern not in resp.response.url:
+ skipped_count += 1
+ continue
+
+ if resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ '%s: Should not have Via header (%s) (refer=%s, status=%d)' % (
+ r.url, r.GetHeader('Via'), r.GetHeader('Referer'), r.status))
+ bypass_count += 1
+
+ if bypass_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response to be bypassed, but zero such '
+ 'responses were received.')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'bypass', 'count', bypass_count))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'skipped', 'count', skipped_count))
+
+ def AddResultsForCorsBypass(self, tab, results):
+ eligible_response_count = 0
+ bypass_count = 0
+ bypasses = {}
+ for resp in self.IterResponses(tab):
+ logging.warn('got a resource %s' % (resp.response.url))
+
+ for resp in self.IterResponses(tab):
+ if resp.ShouldHaveChromeProxyViaHeader():
+ eligible_response_count += 1
+ if not resp.HasChromeProxyViaHeader():
+ bypass_count += 1
+ elif resp.response.status == 502:
+ bypasses[resp.response.url] = 0
+
+ for resp in self.IterResponses(tab):
+ if resp.ShouldHaveChromeProxyViaHeader():
+ if not resp.HasChromeProxyViaHeader():
+ if resp.response.status == 200:
+ if (bypasses.has_key(resp.response.url)):
+ bypasses[resp.response.url] = bypasses[resp.response.url] + 1
+
+ for url in bypasses:
+ if bypasses[url] == 0:
+ raise ChromeProxyMetricException, (
+ '%s: Got a 502 without a subsequent 200' % (url))
+ elif bypasses[url] > 1:
+ raise ChromeProxyMetricException, (
+ '%s: Got a 502 and multiple 200s: %d' % (url, bypasses[url]))
+ if bypass_count == 0:
+ raise ChromeProxyMetricException, (
+ 'At least one response should be bypassed. '
+ '(eligible_response_count=%d, bypass_count=%d)\n' % (
+ eligible_response_count, bypass_count))
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'cors_bypass', 'count', bypass_count))
+
+ def AddResultsForBlockOnce(self, tab, results):
+ eligible_response_count = 0
+ via_proxy = 0
+ visited_urls = []
+
+ for resp in self.IterResponses(tab):
+ # Add debug information in case of failure
+ visited_urls.append(resp.response.url)
+
+ # Block-once test URLs (Data Reduction Proxy always returns
+ # block-once) should not have the Chrome-Compression-Proxy Via header.
+ if (IsTestUrlForBlockOnce(resp.response.url)):
+ eligible_response_count += 1
+ if resp.HasChromeProxyViaHeader():
+ raise ChromeProxyMetricException, (
+ 'Response has a Chrome-Compression-Proxy Via header: ' +
+ resp.response.url)
+ elif resp.ShouldHaveChromeProxyViaHeader():
+ via_proxy += 1
+ if not resp.HasChromeProxyViaHeader():
+ # For all other URLs, confirm that via header is present if expected.
+ raise ChromeProxyMetricException, (
+ 'Missing Chrome-Compression-Proxy Via header.' +
+ resp.response.url)
+
+ if via_proxy == 0:
+ raise ChromeProxyMetricException, (
+ 'None of the requests went via data reduction proxy')
+
+ if (eligible_response_count != 2):
+ raise ChromeProxyMetricException, (
+ 'Did not make expected number of requests to whitelisted block-once'
+ ' test URLs. Expected: 2, Actual: %s, Visited URLs: %s' %
+ (eligible_response_count, visited_urls))
+
+ results.AddValue(scalar.ScalarValue(results.current_page,
+ 'eligible_responses', 'count', 2))
+ results.AddValue(scalar.ScalarValue(results.current_page,
+ 'via_proxy', 'count', via_proxy))
+
+ def AddResultsForSafebrowsingOn(self, tab, results):
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'safebrowsing', 'timeout responses', 1))
+
+ def AddResultsForSafebrowsingOff(self, tab, results):
+ response_count = 0
+ for resp in self.IterResponses(tab):
+ # Data reduction proxy should return the real response for sites with
+ # malware.
+ response_count += 1
+ if not resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ '%s: Safebrowsing feature should be off for desktop and webview.\n'
+ 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+
+ if response_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Safebrowsing test failed: No valid responses received')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'safebrowsing', 'responses', response_count))
+
+ def AddResultsForHTTPFallback(self, tab, results):
+ via_fallback_count = 0
+
+ for resp in self.IterResponses(tab):
+ if resp.ShouldHaveChromeProxyViaHeader():
+ # All responses should have come through the HTTP fallback proxy, which
+ # means that they should have the via header, and if a remote port is
+ # defined, it should be port 80.
+ if (not resp.HasChromeProxyViaHeader() or
+ (resp.remote_port and resp.remote_port != 80)):
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ '%s: Should have come through the fallback proxy.\n'
+ 'Reponse: remote_port=%s status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, str(resp.remote_port), r.status, r.status_text,
+ r.headers))
+ via_fallback_count += 1
+
+ if via_fallback_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response through the fallback proxy, but zero '
+ 'such responses were received.')
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'via_fallback', 'count', via_fallback_count))
+
+ def AddResultsForHTTPToDirectFallback(self, tab, results,
+ fallback_response_host):
+ via_fallback_count = 0
+ bypass_count = 0
+ responses = self.IterResponses(tab)
+
+ # The first response(s) coming from fallback_response_host should be
+ # through the HTTP fallback proxy.
+ resp = next(responses, None)
+ while resp and fallback_response_host in resp.response.url:
+ if fallback_response_host in resp.response.url:
+ if (not resp.HasChromeProxyViaHeader() or resp.remote_port != 80):
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should have come through the fallback proxy.\n'
+ 'Response: remote_port=%s status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, str(resp.remote_port), r.status, r.status_text,
+ r.headers))
+ else:
+ via_fallback_count += 1
+ resp = next(responses, None)
+
+ # All other responses should be bypassed.
+ while resp:
+ if resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should not have via header.\n'
+ 'Response: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+ else:
+ bypass_count += 1
+ resp = next(responses, None)
+
+ # At least one response should go through the http proxy and be bypassed.
+ if via_fallback_count == 0 or bypass_count == 0:
+ raise ChromeProxyMetricException(
+ 'There should be at least one response through the fallback proxy '
+ '(actual %s) and at least one bypassed response (actual %s)' %
+ (via_fallback_count, bypass_count))
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'via_fallback', 'count', via_fallback_count))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'bypass', 'count', bypass_count))
+
+ def AddResultsForReenableAfterBypass(
+ self, tab, results, bypass_seconds_min, bypass_seconds_max):
+ """Verify results for a re-enable after bypass test.
+
+ Args:
+ tab: the tab for the test.
+ results: the results object to add the results values to.
+ bypass_seconds_min: the minimum duration of the bypass.
+ bypass_seconds_max: the maximum duration of the bypass.
+ """
+ bypass_count = 0
+ via_count = 0
+
+ for resp in self.IterResponses(tab):
+ if resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should not have via header.\n'
+ 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+ else:
+ bypass_count += 1
+
+ # Wait until 30 seconds before the bypass should expire, and fetch a page.
+ # It should not have the via header because the proxy should still be
+ # bypassed.
+ time.sleep(bypass_seconds_min - 30)
+
+ tab.ClearCache(force=True)
+ before_metrics = ChromeProxyMetric()
+ before_metrics.Start(results.current_page, tab)
+ tab.Navigate('http://chromeproxy-test.appspot.com/default')
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
+ before_metrics.Stop(results.current_page, tab)
+
+ for resp in before_metrics.IterResponses(tab):
+ if resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should not have via header; proxy should still '
+ 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+ else:
+ bypass_count += 1
+ if bypass_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response to be bypassed before the bypass '
+ 'expired, but zero such responses were received.')
+
+ # Wait until 30 seconds after the bypass should expire, and fetch a page. It
+ # should have the via header since the proxy should no longer be bypassed.
+ time.sleep((bypass_seconds_max + 30) - (bypass_seconds_min - 30))
+
+ tab.ClearCache(force=True)
+ after_metrics = ChromeProxyMetric()
+ after_metrics.Start(results.current_page, tab)
+ tab.Navigate('http://chromeproxy-test.appspot.com/default')
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
+ after_metrics.Stop(results.current_page, tab)
+
+ for resp in after_metrics.IterResponses(tab):
+ if not resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should have via header; proxy should no longer '
+ 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+ else:
+ via_count += 1
+ if via_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response through the proxy after the bypass '
+ 'expired, but zero such responses were received.')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'bypass', 'count', bypass_count))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'via', 'count', via_count))
+
+ def AddResultsForReenableAfterSetBypass(
+ self, tab, results, bypass_seconds):
+ """Verify results for a re-enable after bypass test.
+
+ Args:
+ tab: the tab for the test.
+ results: the results object to add the results values to.
+ bypass_seconds: the duration of the bypass
+ """
+ bypass_count = 0
+ via_count = 0
+
+ # Verify the bypass url was bypassed.
+ for resp in self.IterResponses(tab):
+ if resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should not have via header.\n'
+ 'Reponse: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+ else:
+ bypass_count += 1
+
+ # Navigate to a test page and verify it's being bypassed.
+ tab.ClearCache(force=True)
+ before_metrics = ChromeProxyMetric()
+ before_metrics.Start(results.current_page, tab)
+ tab.Navigate('http://chromeproxy-test.appspot.com/default')
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
+ before_metrics.Stop(results.current_page, tab)
+
+ for resp in before_metrics.IterResponses(tab):
+ if resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should not have via header; proxy should still '
+ 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+ else:
+ bypass_count += 1
+ if bypass_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response to be bypassed before the bypass '
+ 'expired, but zero such responses were received.')
+
+ # Wait for the bypass to expire, with the overhead of the previous steps
+ # the bypass duration will have been exceeded after this delay.
+ time.sleep(bypass_seconds)
+
+ # Navigate to the test pass again and verify data saver is no longer
+ # bypassed.
+ tab.ClearCache(force=True)
+ after_metrics = ChromeProxyMetric()
+ after_metrics.Start(results.current_page, tab)
+ tab.Navigate('http://chromeproxy-test.appspot.com/default')
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 10)
+ after_metrics.Stop(results.current_page, tab)
+
+ for resp in after_metrics.IterResponses(tab):
+ if not resp.HasChromeProxyViaHeader():
+ r = resp.response
+ raise ChromeProxyMetricException, (
+ 'Response for %s should have via header; proxy should no longer '
+ 'be bypassed.\nReponse: status=(%d, %s)\nHeaders:\n %s' % (
+ r.url, r.status, r.status_text, r.headers))
+ else:
+ via_count += 1
+ if via_count == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response through the proxy after the bypass '
+ 'expired, but zero such responses were received.')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'bypass', 'count', bypass_count))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'via', 'count', via_count))
+
+ def AddResultsForClientConfig(self, tab, results):
+ resources_with_old_auth = 0
+ resources_with_new_auth = 0
+
+ super(ChromeProxyMetric, self).AddResults(tab, results)
+ for resp in self.IterResponses(tab):
+ if resp.GetChromeProxyRequestHeaderValue('s') != None:
+ resources_with_new_auth += 1
+ if resp.GetChromeProxyRequestHeaderValue('ps') != None:
+ resources_with_old_auth += 1
+
+ if resources_with_old_auth != 0:
+ raise ChromeProxyMetricException, (
+ 'Expected zero responses with the old authentication scheme but '
+ 'received %d.' % resources_with_old_auth)
+
+ if resources_with_new_auth == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response with the new authentication scheme, '
+ 'but zero such responses were received.')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'new_auth', 'count', resources_with_new_auth))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'old_auth', 'count', resources_with_old_auth))
+
+PROXIED = 'proxied'
+DIRECT = 'direct'
+
+
+class ChromeProxyVideoMetric(network_metrics.NetworkMetric):
+ """Metrics for video pages.
+
+ Wraps the video metrics produced by videowrapper.js, such as the video
+ duration and size in pixels. Also checks a few basic HTTP response headers
+ such as Content-Type and Content-Length in the video responses.
+ """
+
+ def __init__(self, tab):
+ super(ChromeProxyVideoMetric, self).__init__()
+ with open(os.path.join(os.path.dirname(__file__), 'videowrapper.js')) as f:
+ js = f.read()
+ tab.ExecuteJavaScript(js)
+
+ def Start(self, page, tab):
+ tab.ExecuteJavaScript('window.__chromeProxyCreateVideoWrappers()')
+ self.videoMetrics = None
+ super(ChromeProxyVideoMetric, self).Start(page, tab)
+
+ def Stop(self, page, tab):
+ tab.WaitForJavaScriptExpression('window.__chromeProxyVideoLoaded', 30)
+ m = tab.EvaluateJavaScript('window.__chromeProxyVideoMetrics')
+
+ # Now wait for the video to stop playing.
+ # Give it 2x the total duration to account for buffering.
+ waitTime = 2 * m['video_duration']
+ tab.WaitForJavaScriptExpression('window.__chromeProxyVideoEnded', waitTime)
+
+ # Load the final metrics.
+ m = tab.EvaluateJavaScript('window.__chromeProxyVideoMetrics')
+ self.videoMetrics = m
+ # Cast this to an integer as it is often approximate (for an unknown reason)
+ m['video_duration'] = int(m['video_duration'])
+ super(ChromeProxyVideoMetric, self).Stop(page, tab)
+
+ def ResponseFromEvent(self, event):
+ return chrome_proxy_metrics.ChromeProxyResponse(event)
+
+ def AddResults(self, tab, results):
+ raise NotImplementedError
+
+ def AddResultsForProxied(self, tab, results):
+ return self._AddResultsShared(PROXIED, tab, results)
+
+ def AddResultsForDirect(self, tab, results):
+ return self._AddResultsShared(DIRECT, tab, results)
+
+ def _AddResultsShared(self, kind, tab, results):
+ def err(s):
+ raise ChromeProxyMetricException, s
+
+ # Should have played the video.
+ if not self.videoMetrics['ready']:
+ err('%s: video not played' % kind)
+
+ # Should have an HTTP response for the video.
+ wantContentType = 'video/webm' if kind == PROXIED else 'video/mp4'
+ found = False
+ for r in self.IterResponses(tab):
+ resp = r.response
+ if kind == DIRECT and r.HasChromeProxyViaHeader():
+ err('%s: page has proxied Via header' % kind)
+ if resp.GetHeader('Content-Type') != wantContentType:
+ continue
+ if found:
+ err('%s: multiple video responses' % kind)
+ found = True
+
+ cl = resp.GetHeader('Content-Length')
+ xocl = resp.GetHeader('X-Original-Content-Length')
+ if cl != None:
+ self.videoMetrics['content_length_header'] = int(cl)
+ if xocl != None:
+ self.videoMetrics['x_original_content_length_header'] = int(xocl)
+
+ # Should have CL always.
+ if cl == None:
+ err('%s: missing ContentLength' % kind)
+ # Proxied: should have CL < XOCL
+ # Direct: should not have XOCL
+ if kind == PROXIED:
+ if xocl == None or int(cl) >= int(xocl):
+ err('%s: bigger response (%s > %s)' % (kind, str(cl), str(xocl)))
+ else:
+ if xocl != None:
+ err('%s: has XOriginalContentLength' % kind)
+
+ if not found:
+ err('%s: missing video response' % kind)
+
+ # Finally, add all the metrics to the results.
+ for (k, v) in self.videoMetrics.iteritems():
+ k = "%s_%s" % (k, kind)
+ results.AddValue(scalar.ScalarValue(results.current_page, k, "", v))
+
+
+class ChromeProxyInstrumentedVideoMetric(Metric):
+ """Metric for pages instrumented to evaluate video transcoding."""
+
+ def __init__(self):
+ super(ChromeProxyInstrumentedVideoMetric, self).__init__()
+
+ def Stop(self, page, tab):
+ waitTime = tab.EvaluateJavaScript('test.waitTime')
+ tab.WaitForJavaScriptExpression('test.metrics.complete', waitTime)
+ super(ChromeProxyInstrumentedVideoMetric, self).Stop(page, tab)
+
+ def AddResults(self, tab, results):
+ metrics = tab.EvaluateJavaScript('test.metrics')
+ for (k, v) in metrics.iteritems():
+ results.AddValue(scalar.ScalarValue(results.current_page, k, '', v))
+ try:
+ complete = metrics['complete']
+ failed = metrics['failed']
+ if not complete:
+ raise ChromeProxyMetricException, 'Test not complete'
+ if failed:
+ raise ChromeProxyMetricException, 'failed'
+ except KeyError:
+ raise ChromeProxyMetricException, 'No metrics found'
+
+# Returns whether |url| is a block-once test URL. Data Reduction Proxy has been
+# configured to always return block-once for these URLs.
+def IsTestUrlForBlockOnce(url):
+ return (url == 'http://check.googlezip.net/blocksingle/' or
+ url == ('http://chromeproxy-test.appspot.com/default?respBody=T0s='
+ '&respHeader=eyJBY2Nlc3MtQ29udHJvbC1BbGxvdy1PcmlnaW4iOlsiKiJ'
+ 'dfQ==&respStatus=200&flywheelAction=block-once'))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py
new file mode 100644
index 00000000000..16ed6404b9b
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_metrics_unittest.py
@@ -0,0 +1,398 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import unittest
+
+from common import chrome_proxy_metrics as common_metrics
+from common import network_metrics_unittest as network_unittest
+from integration_tests import chrome_proxy_metrics as metrics
+from telemetry.testing import test_page_test_results
+
+TEST_EXTRA_VIA_HEADER = '1.1 EXTRA_VIA_HEADER'
+
+# Timeline events used in tests.
+# An HTML not via proxy.
+EVENT_HTML_DIRECT = network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.html1',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Length': str(len(network_unittest.HTML_BODY)),
+ },
+ body=network_unittest.HTML_BODY)
+
+# A BlockOnce response not via proxy.
+EVENT_HTML_BLOCKONCE = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://check.googlezip.net/blocksingle/',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Length': str(len(network_unittest.HTML_BODY)),
+ },
+ body=network_unittest.HTML_BODY))
+
+# An HTML via proxy.
+EVENT_HTML_PROXY_VIA = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.html2',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=network_unittest.HTML_BODY,
+ remote_port=443))
+
+# An HTML via proxy with extra header.
+EVENT_HTML_PROXY_EXTRA_VIA = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.html2',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER + ", " +
+ TEST_EXTRA_VIA_HEADER,
+ },
+ body=network_unittest.HTML_BODY,
+ remote_port=443))
+
+# An HTML via the HTTP fallback proxy.
+EVENT_HTML_PROXY_VIA_HTTP_FALLBACK = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.html2',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=network_unittest.HTML_BODY,
+ remote_port=80))
+
+# An image via proxy with Via header.
+EVENT_IMAGE_PROXY_VIA = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True,
+ remote_port=443))
+
+# An image via the HTTP fallback proxy.
+EVENT_IMAGE_PROXY_VIA_HTTP_FALLBACK = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True,
+ remote_port=80))
+
+# An image via proxy with Via header and it is cached.
+EVENT_IMAGE_PROXY_CACHED = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True,
+ served_from_cache=True))
+
+# An image fetched directly.
+EVENT_IMAGE_DIRECT = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True))
+
+# A safe-browsing malware response.
+EVENT_MALWARE_PROXY = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.malware',
+ response_headers={
+ 'X-Malware-Url': '1',
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ 'Location': 'http://test.malware',
+ },
+ status=307))
+
+# An HTML via proxy with the Via header.
+EVENT_IMAGE_BYPASS = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Chrome-Proxy': 'bypass=1',
+ 'Content-Type': 'text/html',
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ status=502))
+
+# An image fetched directly.
+EVENT_IMAGE_DIRECT = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True))
+
+
+class ChromeProxyMetricTest(unittest.TestCase):
+
+ _test_proxy_info = {}
+
+ def _StubGetProxyInfo(self, info):
+ def stub(unused_tab, unused_url=''): # pylint: disable=W0613
+ return ChromeProxyMetricTest._test_proxy_info
+ metrics.GetProxyInfoFromNetworkInternals = stub
+ ChromeProxyMetricTest._test_proxy_info = info
+
+ def testChromeProxyMetricForHeaderValidation(self):
+ metric = metrics.ChromeProxyMetric()
+ metric.SetEvents([
+ EVENT_HTML_DIRECT,
+ EVENT_HTML_PROXY_VIA,
+ EVENT_IMAGE_PROXY_CACHED,
+ EVENT_IMAGE_DIRECT])
+
+ results = test_page_test_results.TestPageTestResults(self)
+
+ missing_via_exception = False
+ try:
+ metric.AddResultsForHeaderValidation(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ missing_via_exception = True
+ # Only the HTTP image response does not have a valid Via header.
+ self.assertTrue(missing_via_exception)
+
+ # Two events with valid Via headers.
+ metric.SetEvents([
+ EVENT_HTML_PROXY_VIA,
+ EVENT_IMAGE_PROXY_CACHED])
+ metric.AddResultsForHeaderValidation(None, results)
+ results.AssertHasPageSpecificScalarValue('checked_via_header', 'count', 2)
+
+ # Passing in zero responses should cause a failure.
+ metric.SetEvents([])
+ no_responses_exception = False
+ try:
+ metric.AddResultsForHeaderValidation(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ no_responses_exception = True
+ self.assertTrue(no_responses_exception)
+
+ def testChromeProxyMetricForExtraViaHeader(self):
+ metric = metrics.ChromeProxyMetric()
+ metric.SetEvents([EVENT_HTML_DIRECT,
+ EVENT_HTML_PROXY_EXTRA_VIA])
+ results = test_page_test_results.TestPageTestResults(self)
+ metric.AddResultsForExtraViaHeader(None, results, TEST_EXTRA_VIA_HEADER)
+ # The direct page should not count an extra via header, but should also not
+ # throw an exception.
+ results.AssertHasPageSpecificScalarValue('extra_via_header', 'count', 1)
+
+ metric.SetEvents([EVENT_HTML_PROXY_VIA])
+ exception_occurred = False
+ try:
+ metric.AddResultsForExtraViaHeader(None, results, TEST_EXTRA_VIA_HEADER)
+ except common_metrics.ChromeProxyMetricException:
+ exception_occurred = True
+ # The response had the chrome proxy via header, but not the extra expected
+ # via header.
+ self.assertTrue(exception_occurred)
+
+ def testChromeProxyMetricForBypass(self):
+ metric = metrics.ChromeProxyMetric()
+ metric.SetEvents([
+ EVENT_HTML_DIRECT,
+ EVENT_HTML_PROXY_VIA,
+ EVENT_IMAGE_PROXY_CACHED,
+ EVENT_IMAGE_DIRECT])
+ results = test_page_test_results.TestPageTestResults(self)
+
+ bypass_exception = False
+ try:
+ metric.AddResultsForBypass(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ bypass_exception = True
+ # Two of the first three events have Via headers.
+ self.assertTrue(bypass_exception)
+
+ # Use directly fetched image only. It is treated as bypassed.
+ metric.SetEvents([EVENT_IMAGE_DIRECT])
+ metric.AddResultsForBypass(None, results)
+ results.AssertHasPageSpecificScalarValue('bypass', 'count', 1)
+
+ # Passing in zero responses should cause a failure.
+ metric.SetEvents([])
+ no_responses_exception = False
+ try:
+ metric.AddResultsForBypass(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ no_responses_exception = True
+ self.assertTrue(no_responses_exception)
+
+ def testChromeProxyMetricForCorsBypass(self):
+ metric = metrics.ChromeProxyMetric()
+ metric.SetEvents([EVENT_HTML_PROXY_VIA,
+ EVENT_IMAGE_BYPASS,
+ EVENT_IMAGE_DIRECT])
+ results = test_page_test_results.TestPageTestResults(self)
+ metric.AddResultsForCorsBypass(None, results)
+ results.AssertHasPageSpecificScalarValue('cors_bypass', 'count', 1)
+
+ # Passing in zero responses should cause a failure.
+ metric.SetEvents([])
+ no_responses_exception = False
+ try:
+ metric.AddResultsForCorsBypass(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ no_responses_exception = True
+ self.assertTrue(no_responses_exception)
+
+ def testChromeProxyMetricForBlockOnce(self):
+ metric = metrics.ChromeProxyMetric()
+ metric.SetEvents([EVENT_HTML_BLOCKONCE,
+ EVENT_HTML_BLOCKONCE,
+ EVENT_IMAGE_PROXY_VIA])
+ results = test_page_test_results.TestPageTestResults(self)
+ metric.AddResultsForBlockOnce(None, results)
+ results.AssertHasPageSpecificScalarValue('eligible_responses', 'count', 2)
+
+ metric.SetEvents([EVENT_HTML_BLOCKONCE,
+ EVENT_HTML_BLOCKONCE,
+ EVENT_IMAGE_DIRECT])
+ exception_occurred = False
+ try:
+ metric.AddResultsForBlockOnce(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ exception_occurred = True
+ # The second response was over direct, but was expected via proxy.
+ self.assertTrue(exception_occurred)
+
+ # Passing in zero responses should cause a failure.
+ metric.SetEvents([])
+ no_responses_exception = False
+ try:
+ metric.AddResultsForBlockOnce(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ no_responses_exception = True
+ self.assertTrue(no_responses_exception)
+
+ def testChromeProxyMetricForSafebrowsingOn(self):
+ metric = metrics.ChromeProxyMetric()
+ metric.SetEvents([EVENT_MALWARE_PROXY])
+ results = test_page_test_results.TestPageTestResults(self)
+
+ metric.AddResultsForSafebrowsingOn(None, results)
+ results.AssertHasPageSpecificScalarValue(
+ 'safebrowsing', 'timeout responses', 1)
+
+ # Clear results and metrics to test no response for safebrowsing
+ results = test_page_test_results.TestPageTestResults(self)
+ metric.SetEvents([])
+ metric.AddResultsForSafebrowsingOn(None, results)
+ results.AssertHasPageSpecificScalarValue(
+ 'safebrowsing', 'timeout responses', 1)
+
+ def testChromeProxyMetricForHTTPFallback(self):
+ metric = metrics.ChromeProxyMetric()
+ metric.SetEvents([EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
+ EVENT_IMAGE_PROXY_VIA_HTTP_FALLBACK])
+ results = test_page_test_results.TestPageTestResults(self)
+ metric.AddResultsForHTTPFallback(None, results)
+ results.AssertHasPageSpecificScalarValue('via_fallback', 'count', 2)
+
+ metric.SetEvents([EVENT_HTML_PROXY_VIA,
+ EVENT_IMAGE_PROXY_VIA])
+ exception_occurred = False
+ try:
+ metric.AddResultsForHTTPFallback(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ exception_occurred = True
+ # The responses came through the SPDY proxy, but were expected through the
+ # HTTP fallback proxy.
+ self.assertTrue(exception_occurred)
+
+ # Passing in zero responses should cause a failure.
+ metric.SetEvents([])
+ no_responses_exception = False
+ try:
+ metric.AddResultsForHTTPFallback(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ no_responses_exception = True
+ self.assertTrue(no_responses_exception)
+
+ def testChromeProxyMetricForHTTPToDirectFallback(self):
+ metric = metrics.ChromeProxyMetric()
+ metric.SetEvents([EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
+ EVENT_HTML_DIRECT,
+ EVENT_IMAGE_DIRECT])
+ results = test_page_test_results.TestPageTestResults(self)
+ metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
+ results.AssertHasPageSpecificScalarValue('via_fallback', 'count', 1)
+ results.AssertHasPageSpecificScalarValue('bypass', 'count', 2)
+
+ metric.SetEvents([EVENT_HTML_PROXY_VIA,
+ EVENT_HTML_DIRECT])
+ exception_occurred = False
+ try:
+ metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
+ except common_metrics.ChromeProxyMetricException:
+ exception_occurred = True
+ # The first response was expected through the HTTP fallback proxy.
+ self.assertTrue(exception_occurred)
+
+ metric.SetEvents([EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
+ EVENT_HTML_PROXY_VIA_HTTP_FALLBACK,
+ EVENT_IMAGE_PROXY_VIA_HTTP_FALLBACK])
+ exception_occurred = False
+ try:
+ metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
+ except common_metrics.ChromeProxyMetricException:
+ exception_occurred = True
+ # All but the first response were expected to be over direct.
+ self.assertTrue(exception_occurred)
+
+ metric.SetEvents([EVENT_HTML_DIRECT,
+ EVENT_HTML_DIRECT,
+ EVENT_IMAGE_DIRECT])
+ exception_occurred = False
+ try:
+ metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
+ except common_metrics.ChromeProxyMetricException:
+ exception_occurred = True
+ # The first response was expected through the HTTP fallback proxy.
+ self.assertTrue(exception_occurred)
+
+ # Passing in zero responses should cause a failure.
+ metric.SetEvents([])
+ no_responses_exception = False
+ try:
+ metric.AddResultsForHTTPToDirectFallback(None, results, 'test.html2')
+ except common_metrics.ChromeProxyMetricException:
+ no_responses_exception = True
+ self.assertTrue(no_responses_exception)
+
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/__init__.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/__init__.py
new file mode 100644
index 00000000000..799c2462114
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/__init__.py
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import inspect
+import os
+import sys
+
+from telemetry.core import discover
+from telemetry import story
+import video
+
+
+# Import all submodules' StorySet classes.
+start_dir = os.path.dirname(os.path.abspath(__file__))
+top_level_dir = os.path.abspath(os.path.join(start_dir, os.pardir, os.pardir))
+base_class = story.StorySet
+for cls in discover.DiscoverClasses(
+ start_dir, top_level_dir, base_class).values():
+ setattr(sys.modules[__name__], cls.__name__, cls)
+
+# DiscoverClasses makes the assumption that there is exactly one matching
+# class per file, however the following are declared in the same file.
+for cls in (video.VideoDirectStorySet,
+ video.VideoProxiedStorySet,
+ video.VideoCompareStorySet):
+ setattr(sys.modules[__name__], cls.__name__, cls)
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/block_once.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/block_once.py
new file mode 100644
index 00000000000..fc1327a7492
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/block_once.py
@@ -0,0 +1,52 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class BlockOncePage(page_module.Page):
+
+ def __init__(self, url, page_set):
+ super(BlockOncePage, self).__init__(url=url, page_set=page_set)
+
+ def RunNavigateSteps(self, action_runner):
+ super(BlockOncePage, self).RunNavigateSteps(action_runner)
+ # Test block-once on a POST request.
+ # Ensure that a subsequent request uses the data reduction proxy.
+ action_runner.ExecuteJavaScript('''
+ (function() {
+ window.post_request_completed = false;
+ var request = new XMLHttpRequest();
+ request.open("POST",
+ "http://chromeproxy-test.appspot.com/default?" +
+ "respBody=T0s=&respHeader=eyJBY2Nlc3MtQ29udHJvbC1BbGxvdy1Pcml" +
+ "naW4iOlsiKiJdfQ==&respStatus=200&flywheelAction=block-once");
+ request.onload = function() {
+ window.post_request_completed = true;
+ var viaProxyRequest = new XMLHttpRequest();
+ viaProxyRequest.open("GET",
+ "http://check.googlezip.net/image.png");
+ viaProxyRequest.send();
+ };
+ request.send();
+ })();
+ ''')
+ action_runner.WaitForJavaScriptCondition(
+ "window.post_request_completed == true", 30)
+
+class BlockOnceStorySet(story.StorySet):
+
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(BlockOnceStorySet, self).__init__()
+
+ # Test block-once for a GET request.
+ urls_list = [
+ 'http://check.googlezip.net/blocksingle/',
+ ]
+
+ for url in urls_list:
+ self.AddStory(BlockOncePage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/bypass.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/bypass.py
new file mode 100644
index 00000000000..1958bb284ab
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/bypass.py
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class BypassPage(page_module.Page):
+
+ def __init__(self, url, page_set):
+ super(BypassPage, self).__init__(url=url, page_set=page_set)
+
+
+class BypassStorySet(story.StorySet):
+
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(BypassStorySet, self).__init__()
+
+ urls_list = [
+ 'http://check.googlezip.net/block/',
+ ]
+
+ for url in urls_list:
+ self.AddStory(BypassPage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/client_type.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/client_type.py
new file mode 100644
index 00000000000..0b91897beb5
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/client_type.py
@@ -0,0 +1,74 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class ClientTypePage(page_module.Page):
+ """A test page for the chrome proxy client type tests.
+
+ Attributes:
+ bypass_for_client_type: The client type Chrome-Proxy header directive that
+ would get a bypass when this page is fetched through the data
+ reduction proxy. For example, a value of "android" means that this
+ page would cause a bypass when fetched from a client that sets
+ "Chrome-Proxy: c=android".
+ """
+
+ def __init__(self, url, page_set, bypass_for_client_type):
+ super(ClientTypePage, self).__init__(url=url, page_set=page_set)
+ self.bypass_for_client_type = bypass_for_client_type
+
+
+class ClientTypeStorySet(story.StorySet):
+ """Chrome proxy test sites"""
+
+ def __init__(self):
+ super(ClientTypeStorySet, self).__init__()
+
+ # Page that should not bypass for any client types. This page is here in
+ # order to determine the Chrome-Proxy client type value before running any
+ # of the following pages, since there's no way to get the client type value
+ # from a request that was bypassed.
+ self.AddStory(ClientTypePage(
+ url='http://check.googlezip.net/test.html',
+ page_set=self,
+ bypass_for_client_type='none'))
+
+ # Page that should cause a bypass for android chrome clients.
+ self.AddStory(ClientTypePage(
+ url='http://check.googlezip.net/chrome-proxy-header/c_android/',
+ page_set=self,
+ bypass_for_client_type='android'))
+
+ # Page that should cause a bypass for android webview clients.
+ self.AddStory(ClientTypePage(
+ url='http://check.googlezip.net/chrome-proxy-header/c_webview/',
+ page_set=self,
+ bypass_for_client_type='webview'))
+
+ # Page that should cause a bypass for iOS clients.
+ self.AddStory(ClientTypePage(
+ url='http://check.googlezip.net/chrome-proxy-header/c_ios/',
+ page_set=self,
+ bypass_for_client_type='ios'))
+
+ # Page that should cause a bypass for Linux clients.
+ self.AddStory(ClientTypePage(
+ url='http://check.googlezip.net/chrome-proxy-header/c_linux/',
+ page_set=self,
+ bypass_for_client_type='linux'))
+
+ # Page that should cause a bypass for Windows clients.
+ self.AddStory(ClientTypePage(
+ url='http://check.googlezip.net/chrome-proxy-header/c_win/',
+ page_set=self,
+ bypass_for_client_type='win'))
+
+ # Page that should cause a bypass for ChromeOS clients.
+ self.AddStory(ClientTypePage(
+ url='http://check.googlezip.net/chrome-proxy-header/c_chromeos/',
+ page_set=self,
+ bypass_for_client_type='chromeos'))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/corsbypass.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/corsbypass.py
new file mode 100644
index 00000000000..04c56f1611b
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/corsbypass.py
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class CorsBypassPage(page_module.Page):
+
+ def __init__(self, url, page_set):
+ super(CorsBypassPage, self).__init__(url=url, page_set=page_set)
+
+
+class CorsBypassStorySet(story.StorySet):
+
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(CorsBypassStorySet, self).__init__()
+
+ urls_list = [
+ 'http://www.gstatic.com/chrome/googlezip/cors/',
+ ]
+
+ for url in urls_list:
+ self.AddStory(CorsBypassPage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/exp_directive.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/exp_directive.py
new file mode 100644
index 00000000000..450ea83c650
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/exp_directive.py
@@ -0,0 +1,27 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class ExpDirectivePage(page_module.Page):
+ """A test page for the experiment Chrome-Proxy directive tests."""
+
+ def __init__(self, url, page_set):
+ super(ExpDirectivePage, self).__init__(url=url, page_set=page_set)
+
+
+class ExpDirectiveStorySet(story.StorySet):
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(ExpDirectiveStorySet, self).__init__()
+
+ urls_list = [
+ 'http://check.googlezip.net/exp/',
+ ]
+
+ for url in urls_list:
+ self.AddStory(ExpDirectivePage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/fallback_viaheader.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/fallback_viaheader.py
new file mode 100644
index 00000000000..8b3dc56acf8
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/fallback_viaheader.py
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class FallbackViaHeaderPage(page_module.Page):
+
+ def __init__(self, url, page_set):
+ super(FallbackViaHeaderPage, self).__init__(url=url, page_set=page_set)
+
+
+class FallbackViaHeaderStorySet(story.StorySet):
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(FallbackViaHeaderStorySet, self).__init__()
+
+ urls_list = [
+ 'http://chromeproxy-test.appspot.com/default?respStatus=200',
+ 'http://chromeproxy-test.appspot.com/default?respStatus=413',
+ ]
+
+ for url in urls_list:
+ self.AddStory(FallbackViaHeaderPage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/html5test.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/html5test.py
new file mode 100644
index 00000000000..00139cf062e
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/html5test.py
@@ -0,0 +1,27 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class HTML5TestPage(page_module.Page):
+
+ def __init__(self, url, page_set):
+ super(HTML5TestPage, self).__init__(url=url, page_set=page_set)
+
+
+class HTML5TestStorySet(story.StorySet):
+
+ """ Chrome proxy test page for traffic over https. """
+
+ def __init__(self):
+ super(HTML5TestStorySet, self).__init__()
+
+ urls_list = [
+ 'http://html5test.com/',
+ ]
+
+ for url in urls_list:
+ self.AddStory(HTML5TestPage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/http_to_direct_fallback.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/http_to_direct_fallback.py
new file mode 100644
index 00000000000..c43dc6d07b4
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/http_to_direct_fallback.py
@@ -0,0 +1,26 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class HTTPToDirectFallbackPage(page_module.Page):
+ """Page that tests falling back from the HTTP proxy to a direct connection."""
+ def __init__(self, url, page_set):
+ super(HTTPToDirectFallbackPage, self).__init__(url=url, page_set=page_set)
+
+
+class HTTPToDirectFallbackStorySet(story.StorySet):
+ """Chrome proxy test sites"""
+ def __init__(self):
+ super(HTTPToDirectFallbackStorySet, self).__init__()
+
+ urls_list = [
+ 'http://check.googlezip.net/fallback/',
+ 'http://check.googlezip.net/block/',
+ ]
+
+ for url in urls_list:
+ self.AddStory(HTTPToDirectFallbackPage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/https_bypass.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/https_bypass.py
new file mode 100644
index 00000000000..69235d883f3
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/https_bypass.py
@@ -0,0 +1,27 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class HTTPSBypassPage(page_module.Page):
+
+ def __init__(self, url, page_set):
+ super(HTTPSBypassPage, self).__init__(url=url, page_set=page_set)
+
+
+class HTTPSBypassStorySet(story.StorySet):
+
+ """ Chrome proxy test page for traffic over https. """
+
+ def __init__(self):
+ super(HTTPSBypassStorySet, self).__init__()
+
+ urls_list = [
+ 'https://check.googlezip.net/test.html',
+ ]
+
+ for url in urls_list:
+ self.AddStory(HTTPSBypassPage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/lo_fi.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/lo_fi.py
new file mode 100644
index 00000000000..31355c01b86
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/lo_fi.py
@@ -0,0 +1,30 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class LoFiPage(page_module.Page):
+ """
+ A test page for the chrome proxy Lo-Fi tests.
+ Checks that the compressed image is below a certain threshold.
+ """
+
+ def __init__(self, url, page_set):
+ super(LoFiPage, self).__init__(url=url, page_set=page_set)
+
+
+class LoFiStorySet(story.StorySet):
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(LoFiStorySet, self).__init__()
+
+ urls_list = [
+ 'http://check.googlezip.net/lofi.png',
+ ]
+
+ for url in urls_list:
+ self.AddStory(LoFiPage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/lo_fi_preview.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/lo_fi_preview.py
new file mode 100644
index 00000000000..61b00710115
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/lo_fi_preview.py
@@ -0,0 +1,30 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class LoFiPreviewPage(page_module.Page):
+ """
+ A test page for the chrome proxy Lo-Fi preview tests.
+ Checks that a LoFi preview page is served.
+ """
+
+ def __init__(self, url, page_set):
+ super(LoFiPreviewPage, self).__init__(url=url, page_set=page_set)
+
+
+class LoFiPreviewStorySet(story.StorySet):
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(LoFiPreviewStorySet, self).__init__()
+
+ urls_list = [
+ 'http://check.googlezip.net/test.html',
+ ]
+
+ for url in urls_list:
+ self.AddStory(LoFiPreviewPage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/pass_through.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/pass_through.py
new file mode 100644
index 00000000000..259dc70f11b
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/pass_through.py
@@ -0,0 +1,39 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+class PassThroughPage(page_module.Page):
+ """
+ A test page for the chrome proxy pass-through tests.
+ """
+
+ def __init__(self, url, page_set):
+ super(PassThroughPage, self).__init__(url=url, page_set=page_set)
+
+ def RunNavigateSteps(self, action_runner):
+ super(PassThroughPage, self).RunNavigateSteps(action_runner)
+ action_runner.ExecuteJavaScript('''
+ (function() {
+ var request = new XMLHttpRequest();
+ request.open("GET", "%s");
+ request.setRequestHeader("Chrome-Proxy", "pass-through");
+ request.send(null);
+ })();''' % (self.url))
+ action_runner.Wait(1)
+
+
+class PassThroughStorySet(story.StorySet):
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(PassThroughStorySet, self).__init__()
+
+ urls_list = [
+ 'http://check.googlezip.net/image.png',
+ ]
+
+ for url in urls_list:
+ self.AddStory(PassThroughPage(url, self)) \ No newline at end of file
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_bypass.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_bypass.py
new file mode 100644
index 00000000000..8b35428fdaf
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_bypass.py
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class ReenableAfterBypassPage(page_module.Page):
+ """A test page for the re-enable after bypass tests.
+
+ Attributes:
+ bypass_seconds_min: The minimum number of seconds that the bypass
+ triggered by loading this page should last.
+ bypass_seconds_max: The maximum number of seconds that the bypass
+ triggered by loading this page should last.
+ """
+
+ def __init__(self,
+ url,
+ page_set,
+ bypass_seconds_min,
+ bypass_seconds_max):
+ super(ReenableAfterBypassPage, self).__init__(url=url, page_set=page_set)
+ self.bypass_seconds_min = bypass_seconds_min
+ self.bypass_seconds_max = bypass_seconds_max
+
+
+class ReenableAfterBypassStorySet(story.StorySet):
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(ReenableAfterBypassStorySet, self).__init__()
+
+ # Test page for "Chrome-Proxy: block=0". Loading this page should cause all
+ # data reduction proxies to be bypassed for one to five minutes.
+ self.AddStory(ReenableAfterBypassPage(
+ url="http://check.googlezip.net/block/",
+ page_set=self,
+ bypass_seconds_min=60,
+ bypass_seconds_max=300))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_set_bypass.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_set_bypass.py
new file mode 100644
index 00000000000..d9597f6871e
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_set_bypass.py
@@ -0,0 +1,29 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class ReenableAfterSetBypassPage(page_module.Page):
+ """A test page for the re-enable after bypass tests with set duration."""
+
+ BYPASS_SECONDS = 20
+
+ def __init__(self, url, page_set):
+ super(ReenableAfterSetBypassPage, self).__init__(
+ url=url, page_set=page_set)
+
+
+class ReenableAfterSetBypassStorySet(story.StorySet):
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(ReenableAfterSetBypassStorySet, self).__init__()
+
+ # Test page for "Chrome-Proxy: block=20". Loading this page should cause all
+ # data reduction proxies to be bypassed for ten seconds.
+ self.AddStory(ReenableAfterSetBypassPage(
+ url="http://check.googlezip.net/block20/",
+ page_set=self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/safebrowsing.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/safebrowsing.py
new file mode 100644
index 00000000000..5c47806c12d
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/safebrowsing.py
@@ -0,0 +1,40 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+import logging
+
+from telemetry.core import exceptions
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class SafebrowsingPage(page_module.Page):
+
+ """
+ Why: Expect 'malware ahead' page. Use a short navigation timeout because no
+ response will be received.
+ """
+
+ def __init__(self, url, page_set, expect_timeout):
+ super(SafebrowsingPage, self).__init__(url=url, page_set=page_set)
+ self._expect_timeout = expect_timeout
+
+ def RunNavigateSteps(self, action_runner):
+ try:
+ action_runner.Navigate(self.url, timeout_in_seconds=5)
+ except exceptions.TimeoutException as e:
+ if self._expect_timeout:
+ logging.warning('Navigation timeout on page %s', self.url)
+ else:
+ raise e
+
+
+class SafebrowsingStorySet(story.StorySet):
+
+ """ Chrome proxy test sites """
+
+ def __init__(self, expect_timeout=False):
+ super(SafebrowsingStorySet, self).__init__()
+
+ self.AddStory(
+ SafebrowsingPage('http://www.ianfette.org/', self, expect_timeout))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/smoke.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/smoke.py
new file mode 100644
index 00000000000..92431143246
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/smoke.py
@@ -0,0 +1,92 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class SmokePage(page_module.Page):
+
+ def __init__(self, url, page_set, name=''):
+ super(SmokePage, self).__init__(url=url, page_set=page_set, name=name)
+
+
+class Page1(SmokePage):
+
+ """
+ Why: Check chrome proxy response headers.
+ """
+
+ def __init__(self, page_set):
+ super(Page1, self).__init__(
+ url='http://check.googlezip.net/test.html',
+ page_set=page_set,
+ name='header validation')
+
+
+class Page2(SmokePage):
+
+ """
+ Why: Check data compression
+ """
+
+ def __init__(self, page_set):
+ super(Page2, self).__init__(
+ url='http://check.googlezip.net/static/',
+ page_set=page_set,
+ name='compression: image')
+
+
+class Page3(SmokePage):
+
+ """
+ Why: Check bypass
+ """
+
+ def __init__(self, page_set):
+ super(Page3, self).__init__(
+ url='http://check.googlezip.net/block/',
+ page_set=page_set,
+ name='bypass')
+
+
+class Page4(SmokePage):
+
+ """
+ Why: Check data compression
+ """
+
+ def __init__(self, page_set):
+ super(Page4, self).__init__(
+ url='http://check.googlezip.net/static/',
+ page_set=page_set,
+ name='compression: javascript')
+
+
+class Page5(SmokePage):
+
+ """
+ Why: Check data compression
+ """
+
+ def __init__(self, page_set):
+ super(Page5, self).__init__(
+ url='http://check.googlezip.net/static/',
+ page_set=page_set,
+ name='compression: css')
+
+
+
+class SmokeStorySet(story.StorySet):
+
+ """ Chrome proxy test sites """
+
+ def __init__(self):
+ super(SmokeStorySet, self).__init__()
+
+ self.AddStory(Page1(self))
+ self.AddStory(Page2(self))
+ self.AddStory(Page3(self))
+ self.AddStory(Page4(self))
+ self.AddStory(Page5(self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/synthetic.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/synthetic.py
new file mode 100644
index 00000000000..2840d9a3bd3
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/synthetic.py
@@ -0,0 +1,28 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class SyntheticPage(page_module.Page):
+
+ def __init__(self, url, page_set):
+ super(SyntheticPage, self).__init__(url=url, page_set=page_set)
+
+
+class SyntheticStorySet(story.StorySet):
+
+ """ Chrome proxy synthetic test pages. """
+
+ def __init__(self):
+ super(SyntheticStorySet, self).__init__()
+
+ urls_list = [
+ 'http://check.googlezip.net/test.html',
+ 'http://check.googlezip.net/static/'
+ ]
+
+ for url in urls_list:
+ self.AddStory(SyntheticPage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/video.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/video.py
new file mode 100644
index 00000000000..9ba03946fc5
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/video.py
@@ -0,0 +1,73 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry.page import shared_page_state
+from telemetry import story
+
+
+class ControllableProxySharedState(shared_page_state.SharedPageState):
+
+ def WillRunStory(self, page):
+ if page.use_chrome_proxy:
+ self._finder_options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+ super(ControllableProxySharedState, self).WillRunStory(page)
+
+
+class VideoPage(page_module.Page):
+ """A test page containing a video.
+
+ Attributes:
+ use_chrome_proxy: If true, fetches use the data reduction proxy.
+ Otherwise, fetches are sent directly to the origin.
+ """
+
+ def __init__(self, url, page_set, use_chrome_proxy):
+ super(VideoPage, self).__init__(
+ url=url, page_set=page_set,
+ shared_page_state_class=ControllableProxySharedState)
+ self.use_chrome_proxy = use_chrome_proxy
+
+
+class VideoStorySet(story.StorySet):
+ """Base class for Chrome proxy video tests."""
+
+ def __init__(self, mode):
+ super(VideoStorySet, self).__init__()
+ urls_list = [
+ 'http://check.googlezip.net/cacheable/video/buck_bunny_tiny.html',
+ ]
+ for url in urls_list:
+ self._AddStoryForURL(url)
+
+ def _AddStoryForURL(self, url):
+ raise NotImplementedError
+
+
+class VideoDirectStorySet(VideoStorySet):
+ """Chrome proxy video tests: direct fetch."""
+ def __init__(self):
+ super(VideoDirectStorySet, self).__init__('direct')
+
+ def _AddStoryForURL(self, url):
+ self.AddStory(VideoPage(url, self, False))
+
+
+class VideoProxiedStorySet(VideoStorySet):
+ """Chrome proxy video tests: proxied fetch."""
+ def __init__(self):
+ super(VideoProxiedStorySet, self).__init__('proxied')
+
+ def _AddStoryForURL(self, url):
+ self.AddStory(VideoPage(url, self, True))
+
+
+class VideoCompareStorySet(VideoStorySet):
+ """Chrome proxy video tests: compare direct and proxied fetches."""
+ def __init__(self):
+ super(VideoCompareStorySet, self).__init__('compare')
+
+ def _AddStoryForURL(self, url):
+ self.AddStory(VideoPage(url, self, False))
+ self.AddStory(VideoPage(url, self, True))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/video_instrumented.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/video_instrumented.py
new file mode 100644
index 00000000000..8cd993b2b5a
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/video_instrumented.py
@@ -0,0 +1,25 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+class VideoFrameStorySet(story.StorySet):
+ """Chrome proxy video tests: verify frames of transcoded videos"""
+ def __init__(self):
+ super(VideoFrameStorySet, self).__init__()
+ for url in [
+ 'http://check.googlezip.net/cacheable/video/buck_bunny_640x360_24fps_video.html',
+ 'http://check.googlezip.net/cacheable/video/buck_bunny_60fps_video.html',
+ ]:
+ self.AddStory(page_module.Page(url, self))
+
+class VideoAudioStorySet(story.StorySet):
+ """Chrome proxy video tests: verify audio of transcoded videos"""
+ def __init__(self):
+ super(VideoAudioStorySet, self).__init__()
+ for url in [
+ 'http://check.googlezip.net/cacheable/video/buck_bunny_640x360_24fps_audio.html',
+ ]:
+ self.AddStory(page_module.Page(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/youtube.py b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/youtube.py
new file mode 100644
index 00000000000..e4397bf2616
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/youtube.py
@@ -0,0 +1,27 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class YouTubePage(page_module.Page):
+
+ def __init__(self, url, page_set):
+ super(YouTubePage, self).__init__(url=url, page_set=page_set)
+
+
+class YouTubeStorySet(story.StorySet):
+
+ """ Chrome proxy test site to verify YouTube functionality. """
+
+ def __init__(self):
+ super(YouTubeStorySet, self).__init__()
+
+ urls_list = [
+ 'http://data-saver-test.appspot.com/youtube',
+ ]
+
+ for url in urls_list:
+ self.AddStory(YouTubePage(url, self))
diff --git a/chromium/tools/chrome_proxy/integration_tests/videowrapper.js b/chromium/tools/chrome_proxy/integration_tests/videowrapper.js
new file mode 100644
index 00000000000..242b7764565
--- /dev/null
+++ b/chromium/tools/chrome_proxy/integration_tests/videowrapper.js
@@ -0,0 +1,100 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This script finds the first video element on a page and collect metrics
+// for that element. This is based on src/tools/perf/metrics/media.js.
+
+(function() {
+ // VideoWrapper attaches event listeners to collect metrics.
+ // The constructor starts playing the video.
+ function VideoWrapper(element) {
+ if (!(element instanceof HTMLVideoElement))
+ throw new Error('Unrecognized video element type ' + element);
+ metrics['ready'] = false;
+ this.element = element;
+ element.loop = false;
+ // Set the basic event handlers for this HTML5 video element.
+ this.element.addEventListener('loadedmetadata', this.onLoaded.bind(this));
+ this.element.addEventListener('canplay', this.onCanplay.bind(this));
+ this.element.addEventListener('ended', this.onEnded.bind(this));
+ this.playbackTimer = new Timer();
+ element.play()
+ }
+
+ VideoWrapper.prototype.onLoaded = function(e) {
+ if (this.element.readyState == HTMLMediaElement.HAVE_NOTHING) {
+ return
+ }
+ metrics['ready'] = true;
+ metrics['video_height'] = this.element.videoHeight;
+ metrics['video_width'] = this.element.videoWidth;
+ metrics['video_duration'] = this.element.duration;
+ window.__chromeProxyVideoLoaded = true;
+ };
+
+ VideoWrapper.prototype.onCanplay = function(event) {
+ metrics['time_to_play_ms'] = this.playbackTimer.stop();
+ };
+
+ VideoWrapper.prototype.onEnded = function(event) {
+ var time_to_end = this.playbackTimer.stop() - metrics['time_to_play_ms'];
+ metrics['buffering_time_ms'] = time_to_end - this.element.duration * 1000;
+ metrics['decoded_audio_bytes'] = this.element.webkitAudioDecodedByteCount;
+ metrics['decoded_video_bytes'] = this.element.webkitVideoDecodedByteCount;
+ metrics['decoded_frames'] = this.element.webkitDecodedFrameCount;
+ metrics['dropped_frames'] = this.element.webkitDroppedFrameCount;
+ window.__chromeProxyVideoEnded = true;
+ };
+
+ function MediaMetric(element) {
+ if (element instanceof HTMLMediaElement)
+ return new VideoWrapper(element);
+ throw new Error('Unrecognized media element type.');
+ }
+
+ function Timer() {
+ this.start();
+ }
+
+ Timer.prototype = {
+ start: function() {
+ this.start_ = getCurrentTime();
+ },
+
+ stop: function() {
+ // Return delta time since start in millisecs.
+ return Math.round((getCurrentTime() - this.start_) * 1000) / 1000;
+ }
+ };
+
+ function getCurrentTime() {
+ if (window.performance)
+ return (performance.now ||
+ performance.mozNow ||
+ performance.msNow ||
+ performance.oNow ||
+ performance.webkitNow).call(window.performance);
+ else
+ return Date.now();
+ }
+
+ function createVideoWrappersForDocument() {
+ var videos = document.querySelectorAll('video');
+ switch (videos.length) {
+ case 0:
+ throw new Error('Page has no videos.');
+ case 1:
+ break;
+ default:
+ throw new Error('Page too many videos: ' + videos.length.toString());
+ }
+ new VideoWrapper(videos[0])
+ }
+
+ metrics = {};
+ window.__chromeProxyCreateVideoWrappers = createVideoWrappersForDocument;
+ window.__chromeProxyVideoMetrics = metrics;
+ window.__chromeProxyVideoLoaded = false;
+ window.__chromeProxyVideoEnded = false;
+})();
diff --git a/chromium/tools/chrome_proxy/live_tests/__init__.py b/chromium/tools/chrome_proxy/live_tests/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/chrome_proxy/live_tests/__init__.py
diff --git a/chromium/tools/chrome_proxy/live_tests/chrome_proxy_benchmark.py b/chromium/tools/chrome_proxy/live_tests/chrome_proxy_benchmark.py
new file mode 100644
index 00000000000..68e75c13bbe
--- /dev/null
+++ b/chromium/tools/chrome_proxy/live_tests/chrome_proxy_benchmark.py
@@ -0,0 +1,67 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from live_tests import chrome_proxy_measurements as measurements
+from live_tests import pagesets
+from telemetry import benchmark
+
+
+class ChromeProxyLatency(benchmark.Benchmark):
+ tag = 'latency'
+ test = measurements.ChromeProxyLatency
+ page_set = pagesets.Top20StorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.latency.top_20'
+
+
+class ChromeProxyLatencyDirect(benchmark.Benchmark):
+ tag = 'latency_direct'
+ test = measurements.ChromeProxyLatencyDirect
+ page_set = pagesets.Top20StorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.latency_direct.top_20'
+
+
+class ChromeProxyLatencyMetrics(benchmark.Benchmark):
+ tag = 'latency_metrics'
+ test = measurements.ChromeProxyLatencyDirect
+ page_set = pagesets.MetricsStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.latency_metrics.metrics'
+
+
+class ChromeProxyDataSaving(benchmark.Benchmark):
+ tag = 'data_saving'
+ test = measurements.ChromeProxyDataSaving
+ page_set = pagesets.Top20StorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.data_saving.top_20'
+
+
+class ChromeProxyDataSavingDirect(benchmark.Benchmark):
+ tag = 'data_saving_direct'
+ test = measurements.ChromeProxyDataSavingDirect
+ page_set = pagesets.Top20StorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.data_saving_direct.top_20'
+
+class ChromeProxyDataSavingMetrics(benchmark.Benchmark):
+ tag = 'data_saving_metrics'
+ test = measurements.ChromeProxyDataSavingDirect
+ page_set = pagesets.MetricsStorySet
+
+ @classmethod
+ def Name(cls):
+ return 'chrome_proxy_benchmark.data_saving_metrics.metrics'
+
diff --git a/chromium/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py b/chromium/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py
new file mode 100644
index 00000000000..96b5a4b4031
--- /dev/null
+++ b/chromium/tools/chrome_proxy/live_tests/chrome_proxy_measurements.py
@@ -0,0 +1,87 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+import chrome_proxy_metrics as metrics
+from common import chrome_proxy_measurements as measurements
+from telemetry.core import exceptions
+from telemetry.page import page_test
+
+class ChromeProxyLatencyBase(page_test.PageTest):
+ """Chrome latency measurement."""
+
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyLatencyBase, self).__init__(*args, **kwargs)
+ self._metrics = metrics.ChromeProxyMetric()
+
+ def WillNavigateToPage(self, page, tab):
+ tab.ClearCache(force=True)
+ self._metrics.Start(page, tab)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ # Wait for the load event.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ self._metrics.Stop(page, tab)
+ self._metrics.AddResultsForLatency(tab, results)
+
+
+class ChromeProxyLatency(ChromeProxyLatencyBase):
+ """Chrome proxy latency measurement."""
+
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyLatency, self).__init__(*args, **kwargs)
+
+ def CustomizeBrowserOptions(self, options):
+ # NOTE: When using the Data Saver API, the first few requests for this test
+ # could go over direct instead of through the Data Reduction Proxy if the
+ # Data Saver API fetch is slow to finish. This test can't just use
+ # measurements.WaitForViaHeader(tab) since that would affect the results of
+ # the latency measurement, e.g. Chrome would have a hot proxy connection.
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+
+class ChromeProxyLatencyDirect(ChromeProxyLatencyBase):
+ """Direct connection latency measurement."""
+
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyLatencyDirect, self).__init__(*args, **kwargs)
+
+
+class ChromeProxyDataSavingBase(page_test.PageTest):
+ """Chrome data saving measurement."""
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyDataSavingBase, self).__init__(*args, **kwargs)
+ self._metrics = metrics.ChromeProxyMetric()
+
+ def WillNavigateToPage(self, page, tab):
+ tab.ClearCache(force=True)
+ self._metrics.Start(page, tab)
+
+ def ValidateAndMeasurePage(self, page, tab, results):
+ # Wait for the load event.
+ tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300)
+ self._metrics.Stop(page, tab)
+ self._metrics.AddResultsForDataSaving(tab, results)
+
+
+class ChromeProxyDataSaving(ChromeProxyDataSavingBase):
+ """Chrome proxy data saving measurement."""
+
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyDataSaving, self).__init__(*args, **kwargs)
+
+ def CustomizeBrowserOptions(self, options):
+ options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth')
+
+ def WillNavigateToPage(self, page, tab):
+ measurements.WaitForViaHeader(tab)
+ super(ChromeProxyDataSaving, self).WillNavigateToPage(page, tab)
+
+
+class ChromeProxyDataSavingDirect(ChromeProxyDataSavingBase):
+ """Direct connection data saving measurement."""
+
+ def __init__(self, *args, **kwargs):
+ super(ChromeProxyDataSavingDirect, self).__init__(*args, **kwargs)
diff --git a/chromium/tools/chrome_proxy/live_tests/chrome_proxy_metrics.py b/chromium/tools/chrome_proxy/live_tests/chrome_proxy_metrics.py
new file mode 100644
index 00000000000..e456a00f364
--- /dev/null
+++ b/chromium/tools/chrome_proxy/live_tests/chrome_proxy_metrics.py
@@ -0,0 +1,136 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+from common import chrome_proxy_metrics
+from common import network_metrics
+from common.chrome_proxy_metrics import ChromeProxyMetricException
+from telemetry.page import page_test
+from telemetry.value import scalar
+
+
+class ChromeProxyMetric(network_metrics.NetworkMetric):
+ """A Chrome proxy timeline metric."""
+
+ def __init__(self):
+ super(ChromeProxyMetric, self).__init__()
+ self.compute_data_saving = True
+
+ def SetEvents(self, events):
+ """Used for unittest."""
+ self._events = events
+
+ def ResponseFromEvent(self, event):
+ return chrome_proxy_metrics.ChromeProxyResponse(event)
+
+ def AddResults(self, tab, results):
+ raise NotImplementedError
+
+ def AddResultsForDataSaving(self, tab, results):
+ resources_via_proxy = 0
+ resources_from_cache = 0
+ resources_direct = 0
+
+ super(ChromeProxyMetric, self).AddResults(tab, results)
+ for resp in self.IterResponses(tab):
+ if resp.response.served_from_cache:
+ resources_from_cache += 1
+ if resp.HasChromeProxyViaHeader():
+ resources_via_proxy += 1
+ else:
+ resources_direct += 1
+
+ if resources_from_cache + resources_via_proxy + resources_direct == 0:
+ raise ChromeProxyMetricException, (
+ 'Expected at least one response, but zero responses were received.')
+
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'resources_via_proxy', 'count',
+ resources_via_proxy))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'resources_from_cache', 'count',
+ resources_from_cache))
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'resources_direct', 'count', resources_direct))
+
+ def AddResultsForLatency(self, tab, results):
+ # TODO(bustamante): This is a hack to workaround crbug.com/467174,
+ # once fixed just pull down window.performance.timing object and
+ # reference that everywhere.
+ load_event_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.loadEventStart')
+ navigation_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.navigationStart')
+ dom_content_loaded_event_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.domContentLoadedEventStart')
+ fetch_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.fetchStart')
+ request_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.requestStart')
+ domain_lookup_end = tab.EvaluateJavaScript(
+ 'window.performance.timing.domainLookupEnd')
+ domain_lookup_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.domainLookupStart')
+ connect_end = tab.EvaluateJavaScript(
+ 'window.performance.timing.connectEnd')
+ connect_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.connectStart')
+ response_end = tab.EvaluateJavaScript(
+ 'window.performance.timing.responseEnd')
+ response_start = tab.EvaluateJavaScript(
+ 'window.performance.timing.responseStart')
+
+ # NavigationStart relative markers in milliseconds.
+ load_start = (float(load_event_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'load_start', 'ms', load_start))
+
+ dom_content_loaded_start = (
+ float(dom_content_loaded_event_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'dom_content_loaded_start', 'ms',
+ dom_content_loaded_start))
+
+ fetch_start = (float(fetch_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'fetch_start', 'ms', fetch_start,
+ important=False))
+
+ request_start = (float(request_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'request_start', 'ms', request_start,
+ important=False))
+
+ response_start = (float(response_start) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'response_start', 'ms', response_start,
+ important=False))
+
+ response_end = (float(response_end) - navigation_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'response_end', 'ms', response_end,
+ important=False))
+
+ # Phase measurements in milliseconds.
+ domain_lookup_duration = (float(domain_lookup_end) - domain_lookup_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'domain_lookup_duration', 'ms',
+ domain_lookup_duration, important=False))
+
+ connect_duration = (float(connect_end) - connect_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'connect_duration', 'ms', connect_duration,
+ important=False))
+
+ request_duration = (float(response_start) - request_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'request_duration', 'ms', request_duration,
+ important=False))
+
+ response_duration = (float(response_end) - response_start)
+ results.AddValue(scalar.ScalarValue(
+ results.current_page, 'response_duration', 'ms', response_duration,
+ important=False))
diff --git a/chromium/tools/chrome_proxy/live_tests/chrome_proxy_metrics_unittest.py b/chromium/tools/chrome_proxy/live_tests/chrome_proxy_metrics_unittest.py
new file mode 100644
index 00000000000..1bb9c4afb81
--- /dev/null
+++ b/chromium/tools/chrome_proxy/live_tests/chrome_proxy_metrics_unittest.py
@@ -0,0 +1,106 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import unittest
+
+from common import chrome_proxy_metrics as common_metrics
+from common import network_metrics_unittest as network_unittest
+from live_tests import chrome_proxy_metrics as metrics
+from telemetry.testing import test_page_test_results
+
+TEST_EXTRA_VIA_HEADER = '1.1 EXTRA_VIA_HEADER'
+
+# Timeline events used in tests.
+# An HTML not via proxy.
+EVENT_HTML_DIRECT = network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.html1',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Length': str(len(network_unittest.HTML_BODY)),
+ },
+ body=network_unittest.HTML_BODY)
+
+# An HTML via proxy.
+EVENT_HTML_PROXY_VIA = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.html2',
+ response_headers={
+ 'Content-Type': 'text/html',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(len(network_unittest.HTML_BODY)),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=network_unittest.HTML_BODY,
+ remote_port=443))
+
+# An image via proxy with Via header.
+EVENT_IMAGE_PROXY_VIA = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True,
+ remote_port=443))
+
+# An image via proxy with Via header and it is cached.
+EVENT_IMAGE_PROXY_CACHED = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ 'X-Original-Content-Length': str(network_unittest.IMAGE_OCL),
+ 'Via': '1.1 ' + common_metrics.CHROME_PROXY_VIA_HEADER,
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True,
+ served_from_cache=True))
+
+
+# An image fetched directly.
+EVENT_IMAGE_DIRECT = (
+ network_unittest.NetworkMetricTest.MakeNetworkTimelineEvent(
+ url='http://test.image',
+ response_headers={
+ 'Content-Type': 'image/jpeg',
+ 'Content-Encoding': 'gzip',
+ },
+ body=base64.b64encode(network_unittest.IMAGE_BODY),
+ base64_encoded_body=True))
+
+
+class ChromeProxyMetricTest(unittest.TestCase):
+
+ def testChromeProxyMetricForDataSaving(self):
+ metric = metrics.ChromeProxyMetric()
+ events = [
+ EVENT_HTML_DIRECT,
+ EVENT_HTML_PROXY_VIA,
+ EVENT_IMAGE_PROXY_CACHED,
+ EVENT_IMAGE_DIRECT]
+ metric.SetEvents(events)
+
+ self.assertTrue(len(events), len(list(metric.IterResponses(None))))
+ results = test_page_test_results.TestPageTestResults(self)
+
+ metric.AddResultsForDataSaving(None, results)
+ results.AssertHasPageSpecificScalarValue('resources_via_proxy', 'count', 2)
+ results.AssertHasPageSpecificScalarValue('resources_from_cache', 'count', 1)
+ results.AssertHasPageSpecificScalarValue('resources_direct', 'count', 2)
+
+ # Passing in zero responses should cause a failure.
+ metric.SetEvents([])
+ no_responses_exception = False
+ try:
+ metric.AddResultsForDataSaving(None, results)
+ except common_metrics.ChromeProxyMetricException:
+ no_responses_exception = True
+ self.assertTrue(no_responses_exception)
+
diff --git a/chromium/tools/chrome_proxy/live_tests/pagesets/__init__.py b/chromium/tools/chrome_proxy/live_tests/pagesets/__init__.py
new file mode 100644
index 00000000000..45ce4af9df8
--- /dev/null
+++ b/chromium/tools/chrome_proxy/live_tests/pagesets/__init__.py
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import inspect
+import os
+import sys
+
+from telemetry.core import discover
+from telemetry import story
+
+
+# Import all submodules' StorySet classes.
+start_dir = os.path.dirname(os.path.abspath(__file__))
+top_level_dir = os.path.abspath(os.path.join(start_dir, os.pardir, os.pardir))
+base_class = story.StorySet
+for cls in discover.DiscoverClasses(
+ start_dir, top_level_dir, base_class).values():
+ setattr(sys.modules[__name__], cls.__name__, cls)
+
diff --git a/chromium/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20.json b/chromium/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20.json
new file mode 100644
index 00000000000..7fdb5572f97
--- /dev/null
+++ b/chromium/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20.json
@@ -0,0 +1,27 @@
+{
+ "description": "Describes the Web Page Replay archives for a page set. Don't edit by hand! Use record_wpr for updating.",
+ "archives": {
+ "chrome_proxy_top_20_000.wpr": [
+ "https://www.google.com/#hl=en&q=barack+obama",
+ "http://www.youtube.com",
+ "Wordpress",
+ "Facebook",
+ "LinkedIn",
+ "Wikipedia (1 tab)",
+ "Twitter",
+ "Pinterest",
+ "ESPN",
+ "http://news.yahoo.com",
+ "http://www.cnn.com",
+ "Weather.com",
+ "http://www.amazon.com",
+ "http://www.ebay.com",
+ "http://games.yahoo.com",
+ "http://booking.com",
+ "http://answers.yahoo.com",
+ "http://sports.yahoo.com/",
+ "http://techcrunch.com",
+ "http://www.nytimes.com"
+ ]
+ }
+} \ No newline at end of file
diff --git a/chromium/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20_000.wpr.sha1 b/chromium/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20_000.wpr.sha1
new file mode 100644
index 00000000000..b9a9f39ac34
--- /dev/null
+++ b/chromium/tools/chrome_proxy/live_tests/pagesets/data/chrome_proxy_top_20_000.wpr.sha1
@@ -0,0 +1 @@
+ff4418fccf62dd0f9ca0b738917d836964dde801 \ No newline at end of file
diff --git a/chromium/tools/chrome_proxy/live_tests/pagesets/metrics.py b/chromium/tools/chrome_proxy/live_tests/pagesets/metrics.py
new file mode 100644
index 00000000000..0bdf6cd3f75
--- /dev/null
+++ b/chromium/tools/chrome_proxy/live_tests/pagesets/metrics.py
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class MetricsPage(page_module.Page):
+
+ def __init__(self, url, page_set):
+ super(MetricsPage, self).__init__(url=url, page_set=page_set)
+
+
+class MetricsStorySet(story.StorySet):
+
+ """ Chrome proxy test sites for measuring data savings """
+
+ def __init__(self):
+ super(MetricsStorySet, self).__init__()
+
+ urls_list = [
+ 'http://check.googlezip.net/metrics/',
+ ]
+
+ for url in urls_list:
+ self.AddStory(MetricsPage(url, self))
diff --git a/chromium/tools/chrome_proxy/live_tests/pagesets/top_20.py b/chromium/tools/chrome_proxy/live_tests/pagesets/top_20.py
new file mode 100644
index 00000000000..3e5897dd1ab
--- /dev/null
+++ b/chromium/tools/chrome_proxy/live_tests/pagesets/top_20.py
@@ -0,0 +1,91 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from telemetry.page import page as page_module
+from telemetry import story
+
+
+class Top20Page(page_module.Page):
+
+ def __init__(self, url, page_set, name=''):
+ super(Top20Page, self).__init__(url=url, page_set=page_set, name=name)
+ self.archive_data_file = '../data/chrome_proxy_top_20.json'
+
+class Top20StorySet(story.StorySet):
+
+ """ Pages hand-picked for Chrome Proxy tests. """
+
+ def __init__(self):
+ super(Top20StorySet, self).__init__(
+ archive_data_file='../data/chrome_proxy_top_20.json')
+
+ # Why: top google property; a google tab is often open
+ self.AddStory(Top20Page('https://www.google.com/#hl=en&q=barack+obama',
+ self))
+
+ # Why: #3 (Alexa global)
+ self.AddStory(Top20Page('http://www.youtube.com', self))
+
+ # Why: #18 (Alexa global), Picked an interesting post
+ self.AddStory(Top20Page(
+ # pylint: disable=C0301
+ 'http://en.blog.wordpress.com/2012/09/04/freshly-pressed-editors-picks-for-august-2012/',
+ self, 'Wordpress'))
+
+ # Why: top social,Public profile
+ self.AddStory(Top20Page('http://www.facebook.com/barackobama', self,
+ 'Facebook'))
+
+ # Why: #12 (Alexa global),Public profile
+ self.AddStory(Top20Page('http://www.linkedin.com/in/linustorvalds',
+ self, 'LinkedIn'))
+
+ # Why: #6 (Alexa) most visited worldwide,Picked an interesting page
+ self.AddStory(Top20Page('http://en.wikipedia.org/wiki/Wikipedia', self,
+ 'Wikipedia (1 tab)'))
+
+ # Why: #8 (Alexa global),Picked an interesting page
+ self.AddStory(Top20Page('https://twitter.com/katyperry', self,
+ 'Twitter'))
+
+ # Why: #37 (Alexa global)
+ self.AddStory(Top20Page('http://pinterest.com', self, 'Pinterest'))
+
+ # Why: #1 sports
+ self.AddStory(Top20Page('http://espn.go.com', self, 'ESPN'))
+
+ # Why: #1 news worldwide (Alexa global)
+ self.AddStory(Top20Page('http://news.yahoo.com', self))
+
+ # Why: #2 news worldwide
+ self.AddStory(Top20Page('http://www.cnn.com', self))
+
+ # Why: #7 (Alexa news); #27 total time spent,Picked interesting page
+ self.AddStory(Top20Page(
+ 'http://www.weather.com/weather/right-now/Mountain+View+CA+94043',
+ self, 'Weather.com'))
+
+ # Why: #1 world commerce website by visits; #3 commerce in the US by time
+ # spent
+ self.AddStory(Top20Page('http://www.amazon.com', self))
+
+ # Why: #1 commerce website by time spent by users in US
+ self.AddStory(Top20Page('http://www.ebay.com', self))
+
+ # Why: #1 games according to Alexa (with actual games in it)
+ self.AddStory(Top20Page('http://games.yahoo.com', self))
+
+ # Why: #1 Alexa recreation
+ self.AddStory(Top20Page('http://booking.com', self))
+
+ # Why: #1 Alexa reference
+ self.AddStory(Top20Page('http://answers.yahoo.com', self))
+
+ # Why: #1 Alexa sports
+ self.AddStory(Top20Page('http://sports.yahoo.com/', self))
+
+ # Why: top tech blog
+ self.AddStory(Top20Page('http://techcrunch.com', self))
+
+ self.AddStory(Top20Page('http://www.nytimes.com', self))
diff --git a/chromium/tools/chrome_proxy/run_benchmark b/chromium/tools/chrome_proxy/run_benchmark
new file mode 100755
index 00000000000..c71db43d849
--- /dev/null
+++ b/chromium/tools/chrome_proxy/run_benchmark
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+import chrome_proxy_config
+
+sys.path.insert(1, chrome_proxy_config.TELEMETRY_DIR)
+
+from telemetry import benchmark_runner
+
+
+if __name__ == '__main__':
+ sys.exit(benchmark_runner.main(chrome_proxy_config.Config(
+ ['integration_tests'])))
diff --git a/chromium/tools/chrome_proxy/run_livetests b/chromium/tools/chrome_proxy/run_livetests
new file mode 100755
index 00000000000..e0c627eb0d4
--- /dev/null
+++ b/chromium/tools/chrome_proxy/run_livetests
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+import chrome_proxy_config
+
+sys.path.insert(1, chrome_proxy_config.TELEMETRY_DIR)
+
+from telemetry import benchmark_runner
+
+
+if __name__ == '__main__':
+ sys.exit(benchmark_runner.main(chrome_proxy_config.Config(
+ ['live_tests'])))
diff --git a/chromium/tools/chrome_proxy/run_tests b/chromium/tools/chrome_proxy/run_tests
new file mode 100755
index 00000000000..12e41ddc181
--- /dev/null
+++ b/chromium/tools/chrome_proxy/run_tests
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script runs unit tests of the code in the perf directory.
+
+This script DOES NOT run benchmarks. run_benchmark does that.
+"""
+
+import os
+import sys
+
+import chrome_proxy_config
+
+sys.path.insert(1, chrome_proxy_config.TELEMETRY_DIR)
+
+from telemetry.testing import unittest_runner
+
+
+if __name__ == '__main__':
+ sys.exit(unittest_runner.Run(chrome_proxy_config.Config(['integration_tests',
+ 'live_tests'])))
diff --git a/chromium/tools/chrome_proxy/testserver/app.yaml b/chromium/tools/chrome_proxy/testserver/app.yaml
new file mode 100644
index 00000000000..a73e1ee96d3
--- /dev/null
+++ b/chromium/tools/chrome_proxy/testserver/app.yaml
@@ -0,0 +1,14 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+application: chromeproxy-test
+version: 1
+runtime: go
+api_version: go1
+
+handlers:
+- url: /image
+ static_dir: image
+- url: /.*
+ script: _go_app
diff --git a/chromium/tools/chrome_proxy/testserver/data/image1.png b/chromium/tools/chrome_proxy/testserver/data/image1.png
new file mode 100644
index 00000000000..0e96b29e12e
--- /dev/null
+++ b/chromium/tools/chrome_proxy/testserver/data/image1.png
Binary files differ
diff --git a/chromium/tools/chrome_proxy/testserver/image/image1.png b/chromium/tools/chrome_proxy/testserver/image/image1.png
new file mode 100644
index 00000000000..0e96b29e12e
--- /dev/null
+++ b/chromium/tools/chrome_proxy/testserver/image/image1.png
Binary files differ
diff --git a/chromium/tools/chrome_proxy/testserver/server.go b/chromium/tools/chrome_proxy/testserver/server.go
new file mode 100644
index 00000000000..698e45ce46d
--- /dev/null
+++ b/chromium/tools/chrome_proxy/testserver/server.go
@@ -0,0 +1,163 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Test server to facilitate the data reduction proxy Telemetry tests.
+//
+// The server runs at http://chromeproxy-test.appspot.com/. Please contact
+// people in OWNERS for server issues.
+//
+// For running an AppEngine Go server, see:
+// https://developers.google.com/appengine/docs/go/gettingstarted/introduction.
+//
+// The goal is to keep the test logic on the client side (Telemetry)
+// as much as possible. This server will only return a resource
+// and/or override the response as specified by the data encoded
+// in the request URL queries.
+//
+// For example, on receiving the query
+// /default?respBody=bmV3IGJvZHk=&respHeader=eyJWaWEiOlsiVmlhMSIsIlZpYTIiXX0%3D&respStatus=204
+// the server sends back a response with
+// Status code: 204
+// Additional response headers: "Via: Via1" and "Via: Via2"
+// Response body: "new body"
+// where the overriding headers and body are base64 encoded in the request query.
+
+package server
+
+import (
+ "bytes"
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "os"
+ "strconv"
+)
+
+func init() {
+ http.HandleFunc("/requestHeader", requestHeader)
+ http.HandleFunc("/resource", resource)
+ http.HandleFunc("/default", defaultResponse)
+}
+
+// requestHander returns request headers in response body as text.
+func requestHeader(w http.ResponseWriter, r *http.Request) {
+ r.Header.Write(w)
+}
+
+// resource returns the content of a data file specified by "r=" query as the response body.
+// The response could be overridden by request queries.
+// See parseOverrideQuery.
+func resource(w http.ResponseWriter, r *http.Request) {
+ wroteBody, err := applyOverride(w, r)
+ if err != nil || wroteBody {
+ return
+ }
+ path, ok := r.URL.Query()["r"]
+ if !ok || len(path) != 1 {
+ w.WriteHeader(http.StatusBadRequest)
+ w.Write([]byte("no resource in query"))
+ return
+ }
+ if _, err := writeFromFile(w, path[0]); err != nil {
+ w.WriteHeader(http.StatusBadRequest)
+ w.Write([]byte(fmt.Sprintf("Failed to get %s: %v", path[0], err)))
+ return
+ }
+}
+
+// defaultResponse returns "ok" as response body, if the body is not overridden.
+// The response could be overridden by request queries.
+// See parseOverrideQuery.
+func defaultResponse(w http.ResponseWriter, r *http.Request) {
+ wroteBody, err := applyOverride(w, r)
+ if err != nil {
+ return
+ }
+ if !wroteBody {
+ w.Write([]byte("ok"))
+ }
+}
+
+type override struct {
+ status int
+ header http.Header
+ body io.Reader
+}
+
+// parseOverrideQuery parses the queries in r and returns an override.
+// It supports the following queries:
+// "respStatus": an integer to override response status code;
+// "respHeader": base64 encoded JSON data to override the response headers;
+// "respBody": base64 encoded JSON data to override the response body.
+func parseOverrideQuery(r *http.Request) (*override, error) {
+ q := r.URL.Query()
+ resp := &override{0, nil, nil}
+ if v, ok := q["respStatus"]; ok && len(v) == 1 && len(v[0]) > 0 {
+ status, err := strconv.ParseInt(v[0], 10, 0)
+ if err != nil {
+ return nil, errors.New(fmt.Sprintf("respStatus: %v", err))
+ }
+ resp.status = int(status)
+ }
+ if v, ok := q["respHeader"]; ok && len(v) == 1 && len(v[0]) > 0 {
+ // Example header after base64 decoding:
+ // {"Via": ["Telemetry Test", "Test2"], "Name": ["XYZ"], "Cache-Control": ["public"]}
+ headerValue, err := base64.URLEncoding.DecodeString(v[0])
+ if err != nil {
+ return nil, errors.New(fmt.Sprintf("Decoding respHeader: %v", err))
+ }
+ var header http.Header
+ err = json.Unmarshal(headerValue, &header)
+ if err != nil {
+ return nil, errors.New(
+ fmt.Sprintf("Unmarlshal (%s) error: %v", string(headerValue), err))
+ }
+ resp.header = header
+ }
+ if v, ok := q["respBody"]; ok && len(v) == 1 && len(v[0]) > 0 {
+ body, err := base64.URLEncoding.DecodeString(v[0])
+ if err != nil {
+ return nil, errors.New(
+ fmt.Sprintf("Decoding respBody error: %v", err))
+ }
+ resp.body = bytes.NewBuffer(body)
+ }
+ return resp, nil
+}
+
+// applyOverride applies the override queries in r to w and returns whether the response
+// body is overridden.
+func applyOverride(w http.ResponseWriter, r *http.Request) (wroteBody bool, err error) {
+ resp, err := parseOverrideQuery(r)
+ if err != nil {
+ w.WriteHeader(http.StatusBadRequest)
+ w.Write([]byte(err.Error()))
+ return false, err
+ }
+ headers := w.Header()
+ if resp.header != nil {
+ for k, v := range resp.header {
+ headers[k] = v
+ }
+ }
+ if resp.status > 0 {
+ w.WriteHeader(resp.status)
+ }
+ if resp.body != nil {
+ _, err := io.Copy(w, resp.body)
+ return true, err
+ }
+ return false, nil
+}
+
+func writeFromFile(w io.Writer, filename string) (int64, error) {
+ f, err := os.Open(filename)
+ if err != nil {
+ return 0, err
+ }
+ return io.Copy(w, f)
+}
diff --git a/chromium/tools/chrome_proxy/testserver/server_test.go b/chromium/tools/chrome_proxy/testserver/server_test.go
new file mode 100644
index 00000000000..d369b5515f7
--- /dev/null
+++ b/chromium/tools/chrome_proxy/testserver/server_test.go
@@ -0,0 +1,103 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package server
+
+import (
+ "encoding/base64"
+ "encoding/json"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "reflect"
+ "strconv"
+ "testing"
+)
+
+func composeQuery(path string, code int, headers http.Header, body []byte) (string, error) {
+ u, err := url.Parse(path)
+ if err != nil {
+ return "", err
+ }
+ q := u.Query()
+ if code > 0 {
+ q.Set("respStatus", strconv.Itoa(code))
+ }
+ if headers != nil {
+ h, err := json.Marshal(headers)
+ if err != nil {
+ return "", err
+ }
+ q.Set("respHeader", base64.URLEncoding.EncodeToString(h))
+ }
+ if len(body) > 0 {
+ q.Set("respBody", base64.URLEncoding.EncodeToString(body))
+ }
+ u.RawQuery = q.Encode()
+ return u.String(), nil
+}
+
+func TestResponseOverride(t *testing.T) {
+ tests := []struct {
+ name string
+ code int
+ headers http.Header
+ body []byte
+ }{
+ {name: "code", code: 204},
+ {name: "body", body: []byte("new body")},
+ {
+ name: "headers",
+ headers: http.Header{
+ "Via": []string{"Via1", "Via2"},
+ "Content-Type": []string{"random content"},
+ },
+ },
+ {
+ name: "everything",
+ code: 204,
+ body: []byte("new body"),
+ headers: http.Header{
+ "Via": []string{"Via1", "Via2"},
+ "Content-Type": []string{"random content"},
+ },
+ },
+ }
+
+ for _, test := range tests {
+ u, err := composeQuery("http://test.com/override", test.code, test.headers, test.body)
+ if err != nil {
+ t.Errorf("%s: composeQuery: %v", test.name, err)
+ return
+ }
+ req, err := http.NewRequest("GET", u, nil)
+ if err != nil {
+ t.Errorf("%s: http.NewRequest: %v", test.name, err)
+ return
+ }
+ w := httptest.NewRecorder()
+ defaultResponse(w, req)
+ if test.code > 0 {
+ if got, want := w.Code, test.code; got != want {
+ t.Errorf("%s: response code: got %d want %d", test.name, got, want)
+ return
+ }
+ }
+ if test.headers != nil {
+ for k, want := range test.headers {
+ got, ok := w.HeaderMap[k]
+ if !ok || !reflect.DeepEqual(got, want) {
+ t.Errorf("%s: header %s: code: got %v want %v", test.name, k, got, want)
+ return
+ }
+ }
+ }
+ if test.body != nil {
+ if got, want := string(w.Body.Bytes()), string(test.body); got != want {
+ t.Errorf("%s: body: got %s want %s", test.name, got, want)
+ return
+ }
+ }
+ }
+}
diff --git a/chromium/tools/clang/CMakeLists.txt b/chromium/tools/clang/CMakeLists.txt
index 21b5a9fcea7..f7c93c383a9 100644
--- a/chromium/tools/clang/CMakeLists.txt
+++ b/chromium/tools/clang/CMakeLists.txt
@@ -28,19 +28,21 @@ link_directories("${CMAKE_SOURCE_DIR}/lib"
"${CMAKE_BINARY_DIR}/lib"
"${CMAKE_BINARY_DIR}/tools/clang/lib")
+# Tests for all enabled tools can be run by building this target.
+add_custom_target(cr-check-all COMMAND ${CMAKE_CTEST_COMMAND} -V)
+
# cr_add_test(
# name
# testprog
# arguments...
# )
function(cr_add_test name testprog)
- add_test(NAME ${name} COMMAND ${testprog} ${ARGN})
+ add_custom_target(
+ ${name} COMMAND ${testprog} ${ARGN}
+ WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}")
add_dependencies(cr-check-all ${name})
endfunction(cr_add_test)
-# Tests for all enabled tools can be run by building this target.
-add_custom_target(cr-check-all COMMAND ${CMAKE_CTEST_COMMAND} -V)
-
function(cr_install)
install(${ARGN} COMPONENT chrome-tools OPTIONAL)
endfunction(cr_install)
diff --git a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp
index c90510e93d7..7c90ddd3504 100644
--- a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp
+++ b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.cpp
@@ -325,19 +325,11 @@ void BlinkGCPluginConsumer::HandleTranslationUnit(ASTContext& context) {
}
}
- for (CollectVisitor::RecordVector::iterator it =
- visitor.record_decls().begin();
- it != visitor.record_decls().end();
- ++it) {
- CheckRecord(cache_.Lookup(*it));
- }
+ for (const auto& record : visitor.record_decls())
+ CheckRecord(cache_.Lookup(record));
- for (CollectVisitor::MethodVector::iterator it =
- visitor.trace_decls().begin();
- it != visitor.trace_decls().end();
- ++it) {
- CheckTracingMethod(*it);
- }
+ for (const auto& method : visitor.trace_decls())
+ CheckTracingMethod(method);
if (json_) {
json_->CloseList();
@@ -401,12 +393,9 @@ void BlinkGCPluginConsumer::CheckClass(RecordInfo* info) {
// Check consistency of stack-allocated hierarchies.
if (info->IsStackAllocated()) {
- for (RecordInfo::Bases::iterator it = info->GetBases().begin();
- it != info->GetBases().end();
- ++it) {
- if (!it->second.info()->IsStackAllocated())
- ReportDerivesNonStackAllocated(info, &it->second);
- }
+ for (auto& base : info->GetBases())
+ if (!base.second.info()->IsStackAllocated())
+ ReportDerivesNonStackAllocated(info, &base.second);
}
if (CXXMethodDecl* trace = info->GetTraceMethod()) {
@@ -427,7 +416,7 @@ void BlinkGCPluginConsumer::CheckClass(RecordInfo* info) {
{
CheckFieldsVisitor visitor(options_);
if (visitor.ContainsInvalidFields(info))
- ReportClassContainsInvalidFields(info, &visitor.invalid_fields());
+ ReportClassContainsInvalidFields(info, visitor.invalid_fields());
}
if (info->IsGCDerived()) {
@@ -442,7 +431,7 @@ void BlinkGCPluginConsumer::CheckClass(RecordInfo* info) {
{
CheckGCRootsVisitor visitor;
if (visitor.ContainsGCRoots(info))
- ReportClassContainsGCRoots(info, &visitor.gc_roots());
+ ReportClassContainsGCRoots(info, visitor.gc_roots());
}
if (info->NeedsFinalization())
@@ -645,7 +634,7 @@ void BlinkGCPluginConsumer::CheckFinalization(RecordInfo* info) {
visitor.TraverseCXXMethodDecl(dtor);
if (!visitor.finalized_fields().empty()) {
ReportFinalizerAccessesFinalizedFields(
- dtor, &visitor.finalized_fields());
+ dtor, visitor.finalized_fields());
}
}
return;
@@ -662,19 +651,13 @@ void BlinkGCPluginConsumer::CheckFinalization(RecordInfo* info) {
if (dtor && dtor->isUserProvided())
NoteUserDeclaredDestructor(dtor);
- for (RecordInfo::Bases::iterator it = info->GetBases().begin();
- it != info->GetBases().end();
- ++it) {
- if (it->second.info()->NeedsFinalization())
- NoteBaseRequiresFinalization(&it->second);
- }
+ for (auto& base : info->GetBases())
+ if (base.second.info()->NeedsFinalization())
+ NoteBaseRequiresFinalization(&base.second);
- for (RecordInfo::Fields::iterator it = info->GetFields().begin();
- it != info->GetFields().end();
- ++it) {
- if (it->second.edge()->NeedsFinalization())
- NoteField(&it->second, diag_field_requires_finalization_note_);
- }
+ for (auto& field : info->GetFields())
+ if (field.second.edge()->NeedsFinalization())
+ NoteField(&field.second, diag_field_requires_finalization_note_);
}
void BlinkGCPluginConsumer::CheckUnneededFinalization(RecordInfo* info) {
@@ -688,18 +671,14 @@ bool BlinkGCPluginConsumer::HasNonEmptyFinalizer(RecordInfo* info) {
if (!dtor->hasBody() || !EmptyStmtVisitor::isEmpty(dtor->getBody()))
return true;
}
- for (RecordInfo::Bases::iterator it = info->GetBases().begin();
- it != info->GetBases().end();
- ++it) {
- if (HasNonEmptyFinalizer(it->second.info()))
+ for (auto& base : info->GetBases())
+ if (HasNonEmptyFinalizer(base.second.info()))
return true;
- }
- for (RecordInfo::Fields::iterator it = info->GetFields().begin();
- it != info->GetFields().end();
- ++it) {
- if (it->second.edge()->NeedsFinalization())
+
+ for (auto& field : info->GetFields())
+ if (field.second.edge()->NeedsFinalization())
return true;
- }
+
return false;
}
@@ -743,13 +722,9 @@ void BlinkGCPluginConsumer::CheckTraceMethod(
Config::TraceMethodType trace_type) {
// A trace method must not override any non-virtual trace methods.
if (trace_type == Config::TRACE_METHOD) {
- for (RecordInfo::Bases::iterator it = parent->GetBases().begin();
- it != parent->GetBases().end();
- ++it) {
- RecordInfo* base = it->second.info();
- if (CXXMethodDecl* other = base->InheritsNonVirtualTrace())
+ for (auto& base : parent->GetBases())
+ if (CXXMethodDecl* other = base.second.info()->InheritsNonVirtualTrace())
ReportOverriddenNonVirtualTrace(parent, trace, other);
- }
}
CheckTraceVisitor visitor(trace, parent, &cache_);
@@ -761,17 +736,12 @@ void BlinkGCPluginConsumer::CheckTraceMethod(
if (visitor.delegates_to_traceimpl())
return;
- for (RecordInfo::Bases::iterator it = parent->GetBases().begin();
- it != parent->GetBases().end();
- ++it) {
- if (!it->second.IsProperlyTraced())
- ReportBaseRequiresTracing(parent, trace, it->first);
- }
+ for (auto& base : parent->GetBases())
+ if (!base.second.IsProperlyTraced())
+ ReportBaseRequiresTracing(parent, trace, base.first);
- for (RecordInfo::Fields::iterator it = parent->GetFields().begin();
- it != parent->GetFields().end();
- ++it) {
- if (!it->second.IsProperlyTraced()) {
+ for (auto& field : parent->GetFields()) {
+ if (!field.second.IsProperlyTraced()) {
// Discontinue once an untraced-field error is found.
ReportFieldsRequireTracing(parent, trace);
break;
@@ -851,25 +821,17 @@ void BlinkGCPluginConsumer::DumpClass(RecordInfo* info) {
DumpEdgeVisitor visitor(json_);
- RecordInfo::Bases& bases = info->GetBases();
- for (RecordInfo::Bases::iterator it = bases.begin();
- it != bases.end();
- ++it) {
+ for (auto& base : info->GetBases())
visitor.DumpEdge(info,
- it->second.info(),
+ base.second.info(),
"<super>",
Edge::kStrong,
- GetLocString(it->second.spec().getLocStart()));
- }
+ GetLocString(base.second.spec().getLocStart()));
- RecordInfo::Fields& fields = info->GetFields();
- for (RecordInfo::Fields::iterator it = fields.begin();
- it != fields.end();
- ++it) {
+ for (auto& field : info->GetFields())
visitor.DumpField(info,
- &it->second,
- GetLocString(it->second.field()->getLocStart()));
- }
+ &field.second,
+ GetLocString(field.second.field()->getLocStart()));
}
DiagnosticsEngine::Level BlinkGCPluginConsumer::getErrorLevel() {
@@ -914,9 +876,8 @@ bool BlinkGCPluginConsumer::InIgnoredDirectory(RecordInfo* info) {
#if defined(LLVM_ON_WIN32)
std::replace(filename.begin(), filename.end(), '\\', '/');
#endif
- std::vector<std::string>::iterator it = options_.ignored_directories.begin();
- for (; it != options_.ignored_directories.end(); ++it)
- if (filename.find(*it) != std::string::npos)
+ for (const auto& dir : options_.ignored_directories)
+ if (filename.find(dir) != std::string::npos)
return true;
return false;
}
@@ -973,19 +934,13 @@ void BlinkGCPluginConsumer::ReportClassRequiresTraceMethod(RecordInfo* info) {
diag_class_requires_trace_method_)
<< info->record();
- for (RecordInfo::Bases::iterator it = info->GetBases().begin();
- it != info->GetBases().end();
- ++it) {
- if (it->second.NeedsTracing().IsNeeded())
- NoteBaseRequiresTracing(&it->second);
- }
+ for (auto& base : info->GetBases())
+ if (base.second.NeedsTracing().IsNeeded())
+ NoteBaseRequiresTracing(&base.second);
- for (RecordInfo::Fields::iterator it = info->GetFields().begin();
- it != info->GetFields().end();
- ++it) {
- if (!it->second.IsProperlyTraced())
- NoteFieldRequiresTracing(info, it->first);
- }
+ for (auto& field : info->GetFields())
+ if (!field.second.IsProperlyTraced())
+ NoteFieldRequiresTracing(info, field.first);
}
void BlinkGCPluginConsumer::ReportBaseRequiresTracing(
@@ -1001,71 +956,65 @@ void BlinkGCPluginConsumer::ReportFieldsRequireTracing(
CXXMethodDecl* trace) {
ReportDiagnostic(trace->getLocStart(), diag_fields_require_tracing_)
<< info->record();
- for (RecordInfo::Fields::iterator it = info->GetFields().begin();
- it != info->GetFields().end();
- ++it) {
- if (!it->second.IsProperlyTraced())
- NoteFieldRequiresTracing(info, it->first);
- }
+ for (auto& field : info->GetFields())
+ if (!field.second.IsProperlyTraced())
+ NoteFieldRequiresTracing(info, field.first);
}
void BlinkGCPluginConsumer::ReportClassContainsInvalidFields(
RecordInfo* info,
- CheckFieldsVisitor::Errors* errors) {
+ const CheckFieldsVisitor::Errors& errors) {
bool only_warnings = options_.warn_raw_ptr;
- for (CheckFieldsVisitor::Errors::iterator it = errors->begin();
- only_warnings && it != errors->end();
- ++it) {
- if (!CheckFieldsVisitor::IsWarning(it->second))
+ for (auto& error : errors)
+ if (!CheckFieldsVisitor::IsWarning(error.second))
only_warnings = false;
- }
+
ReportDiagnostic(info->record()->getLocStart(),
only_warnings ?
diag_class_contains_invalid_fields_warning_ :
diag_class_contains_invalid_fields_)
<< info->record();
- for (CheckFieldsVisitor::Errors::iterator it = errors->begin();
- it != errors->end();
- ++it) {
- unsigned error;
- if (CheckFieldsVisitor::IsRawPtrError(it->second)) {
- error = diag_raw_ptr_to_gc_managed_class_note_;
- } else if (CheckFieldsVisitor::IsReferencePtrError(it->second)) {
- error = diag_reference_ptr_to_gc_managed_class_note_;
- } else if (it->second == CheckFieldsVisitor::kRefPtrToGCManaged) {
- error = diag_ref_ptr_to_gc_managed_class_note_;
- } else if (it->second == CheckFieldsVisitor::kOwnPtrToGCManaged) {
- error = diag_own_ptr_to_gc_managed_class_note_;
- } else if (it->second == CheckFieldsVisitor::kMemberToGCUnmanaged) {
- error = diag_member_to_gc_unmanaged_class_note_;
- } else if (it->second == CheckFieldsVisitor::kMemberInUnmanaged) {
- error = diag_member_in_unmanaged_class_note_;
- } else if (it->second == CheckFieldsVisitor::kPtrFromHeapToStack) {
- error = diag_stack_allocated_field_note_;
- } else if (it->second == CheckFieldsVisitor::kGCDerivedPartObject) {
- error = diag_part_object_to_gc_derived_class_note_;
+ for (auto& error : errors) {
+ unsigned note;
+ if (CheckFieldsVisitor::IsRawPtrError(error.second)) {
+ note = diag_raw_ptr_to_gc_managed_class_note_;
+ } else if (CheckFieldsVisitor::IsReferencePtrError(error.second)) {
+ note = diag_reference_ptr_to_gc_managed_class_note_;
+ } else if (error.second == CheckFieldsVisitor::kRefPtrToGCManaged) {
+ note = diag_ref_ptr_to_gc_managed_class_note_;
+ } else if (error.second == CheckFieldsVisitor::kOwnPtrToGCManaged) {
+ note = diag_own_ptr_to_gc_managed_class_note_;
+ } else if (error.second == CheckFieldsVisitor::kMemberToGCUnmanaged) {
+ note = diag_member_to_gc_unmanaged_class_note_;
+ } else if (error.second == CheckFieldsVisitor::kMemberInUnmanaged) {
+ note = diag_member_in_unmanaged_class_note_;
+ } else if (error.second == CheckFieldsVisitor::kPtrFromHeapToStack) {
+ note = diag_stack_allocated_field_note_;
+ } else if (error.second == CheckFieldsVisitor::kGCDerivedPartObject) {
+ note = diag_part_object_to_gc_derived_class_note_;
} else {
assert(false && "Unknown field error");
}
- NoteField(it->first, error);
+ NoteField(error.first, note);
}
}
void BlinkGCPluginConsumer::ReportClassContainsGCRoots(
RecordInfo* info,
- CheckGCRootsVisitor::Errors* errors) {
- for (CheckGCRootsVisitor::Errors::iterator it = errors->begin();
- it != errors->end();
- ++it) {
- CheckGCRootsVisitor::RootPath::iterator path = it->begin();
- FieldPoint* point = *path;
- ReportDiagnostic(info->record()->getLocStart(),
- diag_class_contains_gc_root_)
- << info->record() << point->field();
- while (++path != it->end()) {
+ const CheckGCRootsVisitor::Errors& errors) {
+ for (auto& error : errors) {
+ FieldPoint* point = nullptr;
+ for (FieldPoint* path : error) {
+ if (!point) {
+ point = path;
+ ReportDiagnostic(info->record()->getLocStart(),
+ diag_class_contains_gc_root_)
+ << info->record() << point->field();
+ continue;
+ }
NotePartObjectContainsGCRoot(point);
- point = *path;
+ point = path;
}
NoteFieldContainsGCRoot(point);
}
@@ -1073,20 +1022,18 @@ void BlinkGCPluginConsumer::ReportClassContainsGCRoots(
void BlinkGCPluginConsumer::ReportFinalizerAccessesFinalizedFields(
CXXMethodDecl* dtor,
- CheckFinalizerVisitor::Errors* fields) {
- for (CheckFinalizerVisitor::Errors::iterator it = fields->begin();
- it != fields->end();
- ++it) {
- bool as_eagerly_finalized = it->as_eagerly_finalized;
+ const CheckFinalizerVisitor::Errors& errors) {
+ for (auto& error : errors) {
+ bool as_eagerly_finalized = error.as_eagerly_finalized;
unsigned diag_error = as_eagerly_finalized ?
diag_finalizer_eagerly_finalized_field_ :
diag_finalizer_accesses_finalized_field_;
unsigned diag_note = as_eagerly_finalized ?
diag_eagerly_finalized_field_note_ :
diag_finalized_field_note_;
- ReportDiagnostic(it->member->getLocStart(), diag_error)
- << dtor << it->field->field();
- NoteField(it->field, diag_note);
+ ReportDiagnostic(error.member->getLocStart(), diag_error)
+ << dtor << error.field->field();
+ NoteField(error.field, diag_note);
}
}
diff --git a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h
index b1e35ce0695..d0b49d89bf2 100644
--- a/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h
+++ b/chromium/tools/clang/blink_gc_plugin/BlinkGCPluginConsumer.h
@@ -96,13 +96,14 @@ class BlinkGCPluginConsumer : public clang::ASTConsumer {
clang::CXXRecordDecl* base);
void ReportFieldsRequireTracing(RecordInfo* info,
clang::CXXMethodDecl* trace);
- void ReportClassContainsInvalidFields(RecordInfo* info,
- CheckFieldsVisitor::Errors* errors);
+ void ReportClassContainsInvalidFields(
+ RecordInfo* info,
+ const CheckFieldsVisitor::Errors& errors);
void ReportClassContainsGCRoots(RecordInfo* info,
- CheckGCRootsVisitor::Errors* errors);
+ const CheckGCRootsVisitor::Errors& errors);
void ReportFinalizerAccessesFinalizedFields(
clang::CXXMethodDecl* dtor,
- CheckFinalizerVisitor::Errors* fields);
+ const CheckFinalizerVisitor::Errors& errors);
void ReportClassRequiresFinalization(RecordInfo* info);
void ReportClassDoesNotRequireFinalization(RecordInfo* info);
void ReportClassMustDeclareGCMixinTraceMethod(RecordInfo* info);
diff --git a/chromium/tools/clang/blink_gc_plugin/CMakeLists.txt b/chromium/tools/clang/blink_gc_plugin/CMakeLists.txt
index 560bd0fd751..60ad1e52a74 100644
--- a/chromium/tools/clang/blink_gc_plugin/CMakeLists.txt
+++ b/chromium/tools/clang/blink_gc_plugin/CMakeLists.txt
@@ -30,8 +30,7 @@ if(WIN32)
set_property(TARGET clang APPEND PROPERTY SOURCES ${absolute_sources})
cr_add_test(blink_gc_plugin_test
- python
- ${CMAKE_CURRENT_SOURCE_DIR}/tests/test.py
+ python tests/test.py
${CMAKE_BINARY_DIR}/bin/clang
)
else()
@@ -41,8 +40,7 @@ else()
cr_install(TARGETS "lib${LIBRARYNAME}" LIBRARY DESTINATION lib)
cr_add_test(blink_gc_plugin_test
- python
- ${CMAKE_CURRENT_SOURCE_DIR}/tests/test.py
+ python tests/test.py
${CMAKE_BINARY_DIR}/bin/clang
$<TARGET_FILE:lib${LIBRARYNAME}>
)
diff --git a/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp b/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp
index 3c166dd4c43..b4937abd20f 100644
--- a/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp
+++ b/chromium/tools/clang/blink_gc_plugin/RecordInfo.cpp
@@ -399,7 +399,7 @@ RecordInfo::Bases* RecordInfo::CollectBases() {
TracingStatus status = info->InheritsTrace()
? TracingStatus::Needed()
: TracingStatus::Unneeded();
- bases->insert(std::make_pair(base, BasePoint(spec, info, status)));
+ bases->push_back(std::make_pair(base, BasePoint(spec, info, status)));
}
return bases;
}
diff --git a/chromium/tools/clang/blink_gc_plugin/RecordInfo.h b/chromium/tools/clang/blink_gc_plugin/RecordInfo.h
index 1a25d8342b8..b294ffe03b6 100644
--- a/chromium/tools/clang/blink_gc_plugin/RecordInfo.h
+++ b/chromium/tools/clang/blink_gc_plugin/RecordInfo.h
@@ -69,8 +69,15 @@ class FieldPoint : public GraphPoint {
// Wrapper class to lazily collect information about a C++ record.
class RecordInfo {
public:
- typedef std::map<clang::CXXRecordDecl*, BasePoint> Bases;
- typedef std::map<clang::FieldDecl*, FieldPoint> Fields;
+ typedef std::vector<std::pair<clang::CXXRecordDecl*, BasePoint>> Bases;
+
+ struct FieldDeclCmp {
+ bool operator()(clang::FieldDecl* a, clang::FieldDecl *b) const {
+ return a->getLocStart() < b->getLocStart();
+ }
+ };
+ typedef std::map<clang::FieldDecl*, FieldPoint, FieldDeclCmp> Fields;
+
typedef std::vector<const clang::Type*> TemplateArgs;
~RecordInfo();
diff --git a/chromium/tools/clang/pass_to_move/CMakeLists.txt b/chromium/tools/clang/pass_to_move/CMakeLists.txt
index b3ceee1bc5e..f600dd00164 100644
--- a/chromium/tools/clang/pass_to_move/CMakeLists.txt
+++ b/chromium/tools/clang/pass_to_move/CMakeLists.txt
@@ -3,6 +3,7 @@ set(LLVM_LINK_COMPONENTS
MCParser
Option
X86AsmParser
+ X86CodeGen
)
add_llvm_executable(pass_to_move
diff --git a/chromium/tools/clang/plugins/CMakeLists.txt b/chromium/tools/clang/plugins/CMakeLists.txt
index e4eea86c7bf..6be8b2eb699 100644
--- a/chromium/tools/clang/plugins/CMakeLists.txt
+++ b/chromium/tools/clang/plugins/CMakeLists.txt
@@ -1,7 +1,8 @@
set(plugin_sources
ChromeClassTester.cpp
FindBadConstructsAction.cpp
- FindBadConstructsConsumer.cpp)
+ FindBadConstructsConsumer.cpp
+ CheckIPCVisitor.cpp)
if(WIN32)
# Clang doesn't support loadable modules on Windows. Unfortunately, building
@@ -18,15 +19,19 @@ if(WIN32)
list(APPEND absolute_sources ${CMAKE_CURRENT_SOURCE_DIR}/${source})
endforeach()
set_property(TARGET clang APPEND PROPERTY SOURCES ${absolute_sources})
+
+ cr_add_test(plugins_test
+ python tests/test.py
+ ${CMAKE_BINARY_DIR}/bin/clang
+ )
else()
add_llvm_loadable_module(libFindBadConstructs ${plugin_sources})
add_dependencies(libFindBadConstructs clang)
cr_install(TARGETS libFindBadConstructs LIBRARY DESTINATION lib)
- # TODO(dcheng): Rewrite the test framework so it works on Windows too.
cr_add_test(plugins_test
- ${CMAKE_CURRENT_SOURCE_DIR}/tests/test.sh
+ python tests/test.py
${CMAKE_BINARY_DIR}/bin/clang
$<TARGET_FILE:libFindBadConstructs>
)
diff --git a/chromium/tools/clang/plugins/CheckIPCVisitor.cpp b/chromium/tools/clang/plugins/CheckIPCVisitor.cpp
new file mode 100644
index 00000000000..b123b0130ed
--- /dev/null
+++ b/chromium/tools/clang/plugins/CheckIPCVisitor.cpp
@@ -0,0 +1,288 @@
+// Copyright (c) 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "CheckIPCVisitor.h"
+
+using namespace clang;
+
+namespace chrome_checker {
+
+namespace {
+
+const char kWriteParamBadType[] =
+ "[chromium-ipc] IPC::WriteParam() is called on blacklisted type '%0'%1.";
+
+const char kTupleBadType[] =
+ "[chromium-ipc] IPC tuple references banned type '%0'%1.";
+
+const char kWriteParamBadSignature[] =
+ "[chromium-ipc] IPC::WriteParam() is expected to have two arguments.";
+
+const char kNoteSeeHere[] =
+ "see here";
+
+} // namespace
+
+CheckIPCVisitor::CheckIPCVisitor(CompilerInstance& compiler)
+ : compiler_(compiler), context_(nullptr) {
+ auto& diagnostics = compiler_.getDiagnostics();
+ error_write_param_bad_type_ = diagnostics.getCustomDiagID(
+ DiagnosticsEngine::Error, kWriteParamBadType);
+ error_tuple_bad_type_ = diagnostics.getCustomDiagID(
+ DiagnosticsEngine::Error, kTupleBadType);
+ error_write_param_bad_signature_ = diagnostics.getCustomDiagID(
+ DiagnosticsEngine::Error, kWriteParamBadSignature);
+ note_see_here_ = diagnostics.getCustomDiagID(
+ DiagnosticsEngine::Note, kNoteSeeHere);
+
+ blacklisted_typedefs_ = llvm::StringSet<>({
+ "intmax_t",
+ "uintmax_t",
+ "intptr_t",
+ "uintptr_t",
+ "wint_t",
+ "size_t",
+ "rsize_t",
+ "ssize_t",
+ "ptrdiff_t",
+ "dev_t",
+ "off_t",
+ "clock_t",
+ "time_t",
+ "suseconds_t"
+ });
+}
+
+void CheckIPCVisitor::BeginDecl(Decl* decl) {
+ decl_stack_.push_back(decl);
+}
+
+void CheckIPCVisitor::EndDecl() {
+ decl_stack_.pop_back();
+}
+
+void CheckIPCVisitor::VisitTemplateSpecializationType(
+ TemplateSpecializationType* spec) {
+ ValidateCheckedTuple(spec);
+}
+
+void CheckIPCVisitor::VisitCallExpr(CallExpr* call_expr) {
+ ValidateWriteParam(call_expr);
+}
+
+bool CheckIPCVisitor::ValidateWriteParam(const CallExpr* call_expr) {
+ const FunctionDecl* callee_decl = call_expr->getDirectCallee();
+ if (!callee_decl ||
+ callee_decl->getQualifiedNameAsString() != "IPC::WriteParam") {
+ return true;
+ }
+
+ return ValidateWriteParamSignature(call_expr) &&
+ ValidateWriteParamArgument(call_expr->getArg(1));
+}
+
+// Checks that IPC::WriteParam() has expected signature.
+bool CheckIPCVisitor::ValidateWriteParamSignature(
+ const CallExpr* call_expr) {
+ if (call_expr->getNumArgs() != 2) {
+ compiler_.getDiagnostics().Report(
+ call_expr->getExprLoc(), error_write_param_bad_signature_);
+ return false;
+ }
+ return true;
+}
+
+// Checks that IPC::WriteParam() argument type is allowed.
+// See CheckType() for specifics.
+bool CheckIPCVisitor::ValidateWriteParamArgument(const Expr* arg_expr) {
+ if (auto* parent_fn_decl = GetParentDecl<FunctionDecl>()) {
+ auto template_kind = parent_fn_decl->getTemplatedKind();
+ if (template_kind != FunctionDecl::TK_NonTemplate &&
+ template_kind != FunctionDecl::TK_FunctionTemplate) {
+ // Skip all specializations - we don't check WriteParam() on dependent
+ // types (typedef info gets lost), and we checked all non-dependent uses
+ // earlier (when we checked the template itself).
+ return true;
+ }
+ }
+
+ QualType arg_type;
+
+ arg_expr = arg_expr->IgnoreImplicit();
+ if (auto* cast_expr = dyn_cast<ExplicitCastExpr>(arg_expr)) {
+ arg_type = cast_expr->getTypeAsWritten();
+ } else {
+ arg_type = arg_expr->getType();
+ }
+
+ CheckDetails details;
+ if (CheckType(arg_type, &details)) {
+ return true;
+ }
+
+ ReportCheckError(details,
+ arg_expr->getExprLoc(),
+ error_write_param_bad_type_);
+
+ return false;
+}
+
+// Checks that IPC::CheckedTuple<> is specialized with allowed types.
+// See CheckType() above for specifics.
+bool CheckIPCVisitor::ValidateCheckedTuple(
+ const TemplateSpecializationType* spec) {
+ TemplateDecl* decl = spec->getTemplateName().getAsTemplateDecl();
+ if (!decl || decl->getQualifiedNameAsString() != "IPC::CheckedTuple") {
+ return true;
+ }
+
+ bool valid = true;
+ for (unsigned i = 0; i != spec->getNumArgs(); ++i) {
+ const TemplateArgument& arg = spec->getArg(i);
+ CheckDetails details;
+ if (CheckTemplateArgument(arg, &details)) {
+ continue;
+ }
+
+ valid = false;
+
+ auto* parent_decl = GetParentDecl<Decl>();
+ ReportCheckError(
+ details,
+ parent_decl ? parent_decl->getLocStart() : SourceLocation(),
+ error_tuple_bad_type_);
+ }
+
+ return valid;
+}
+
+template <typename T>
+const T* CheckIPCVisitor::GetParentDecl() const {
+ for (auto i = decl_stack_.rbegin(); i != decl_stack_.rend(); ++i) {
+ if (auto* parent = dyn_cast_or_null<T>(*i)) {
+ return parent;
+ }
+ }
+ return nullptr;
+}
+
+
+bool CheckIPCVisitor::IsBlacklistedType(QualType type) const {
+ return context_->hasSameUnqualifiedType(type, context_->LongTy) ||
+ context_->hasSameUnqualifiedType(type, context_->UnsignedLongTy);
+}
+
+bool CheckIPCVisitor::IsBlacklistedTypedef(const TypedefNameDecl* tdef) const {
+ return blacklisted_typedefs_.find(tdef->getName()) !=
+ blacklisted_typedefs_.end();
+}
+
+// Checks that integer type is allowed (not blacklisted).
+bool CheckIPCVisitor::CheckIntegerType(QualType type,
+ CheckDetails* details) const {
+ bool seen_typedef = false;
+ while (true) {
+ details->exit_type = type;
+
+ if (auto* tdef = dyn_cast<TypedefType>(type)) {
+ if (IsBlacklistedTypedef(tdef->getDecl())) {
+ return false;
+ }
+ details->typedefs.push_back(tdef);
+ seen_typedef = true;
+ }
+
+ QualType desugared_type =
+ type->getLocallyUnqualifiedSingleStepDesugaredType();
+ if (desugared_type == type) {
+ break;
+ }
+
+ type = desugared_type;
+ }
+
+ return seen_typedef || !IsBlacklistedType(type);
+}
+
+// Checks that |type| is allowed (not blacklisted), recursively visiting
+// template specializations.
+bool CheckIPCVisitor::CheckType(QualType type, CheckDetails* details) const {
+ if (type->isReferenceType()) {
+ type = type->getPointeeType();
+ }
+ type = type.getLocalUnqualifiedType();
+
+ if (details->entry_type.isNull()) {
+ details->entry_type = type;
+ }
+
+ if (type->isIntegerType()) {
+ return CheckIntegerType(type, details);
+ }
+
+ while (true) {
+ if (auto* spec = dyn_cast<TemplateSpecializationType>(type)) {
+ for (const TemplateArgument& arg: *spec) {
+ if (!CheckTemplateArgument(arg, details)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ if (auto* record = dyn_cast<RecordType>(type)) {
+ if (auto* spec = dyn_cast<ClassTemplateSpecializationDecl>(
+ record->getDecl())) {
+ const TemplateArgumentList& args = spec->getTemplateArgs();
+ for (unsigned i = 0; i != args.size(); ++i) {
+ if (!CheckTemplateArgument(args[i], details)) {
+ return false;
+ }
+ }
+ }
+ return true;
+ }
+
+ if (auto* tdef = dyn_cast<TypedefType>(type)) {
+ details->typedefs.push_back(tdef);
+ }
+
+ QualType desugared_type =
+ type->getLocallyUnqualifiedSingleStepDesugaredType();
+ if (desugared_type == type) {
+ break;
+ }
+
+ type = desugared_type;
+ }
+
+ return true;
+}
+
+bool CheckIPCVisitor::CheckTemplateArgument(const TemplateArgument& arg,
+ CheckDetails* details) const {
+ return arg.getKind() != TemplateArgument::Type ||
+ CheckType(arg.getAsType(), details);
+}
+
+void CheckIPCVisitor::ReportCheckError(const CheckDetails& details,
+ SourceLocation loc,
+ unsigned error) {
+ DiagnosticsEngine& diagnostics = compiler_.getDiagnostics();
+
+ std::string entry_type = details.entry_type.getAsString();
+ std::string exit_type = details.exit_type.getAsString();
+
+ std::string via;
+ if (entry_type != exit_type) {
+ via = " via '" + entry_type + "'";
+ }
+ diagnostics.Report(loc, error) << exit_type << via;
+
+ for (const TypedefType* tdef: details.typedefs) {
+ diagnostics.Report(tdef->getDecl()->getLocation(), note_see_here_);
+ }
+}
+
+} // namespace chrome_checker
diff --git a/chromium/tools/clang/plugins/CheckIPCVisitor.h b/chromium/tools/clang/plugins/CheckIPCVisitor.h
new file mode 100644
index 00000000000..2d88e6b931a
--- /dev/null
+++ b/chromium/tools/clang/plugins/CheckIPCVisitor.h
@@ -0,0 +1,99 @@
+// Copyright (c) 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This check ensures that 32/64-bit unstable types are not used in IPC.
+//
+// A type (or typedef) is unstable if it changes size between 32/ 64-bit
+// platforms. However, it's impossible to accurately identify unstable
+// typedefs, because their definitions rely on the preprocessor. For
+// example uintptr_t is either unsigned int or unsigned long.
+//
+// So we're not trying to be accurate, and just blacklisting some types
+// that are known to be unstable:
+// 1. Types: long / unsigned long (but not typedefs to)
+// 2. Typedefs: intmax_t, uintmax_t, intptr_t, uintptr_t, wint_t,
+// size_t, rsize_t, ssize_t, ptrdiff_t, dev_t, off_t, clock_t,
+// time_t, suseconds_t (including typedefs to)
+//
+// Additionally, templates referencing blacklisted types (e.g. vector<long>)
+// are also blacklisted.
+//
+// Blacklisted types are checked in:
+// 1. IPC::WriteParam() calls
+// 2. IPC::CheckedTuple<> specializations
+//
+
+#ifndef TOOLS_CLANG_PLUGINS_CHECKIPC_VISITOR_H_
+#define TOOLS_CLANG_PLUGINS_CHECKIPC_VISITOR_H_
+
+#include <vector>
+
+#include "clang/AST/AST.h"
+#include "clang/AST/ASTConsumer.h"
+#include "clang/AST/RecursiveASTVisitor.h"
+#include "clang/Frontend/CompilerInstance.h"
+#include "llvm/ADT/StringSet.h"
+
+namespace chrome_checker {
+
+class CheckIPCVisitor {
+ public:
+ explicit CheckIPCVisitor(clang::CompilerInstance& compiler);
+
+ void set_context(clang::ASTContext* context) { context_ = context; }
+
+ void BeginDecl(clang::Decl* decl);
+ void EndDecl();
+ void VisitTemplateSpecializationType(
+ clang::TemplateSpecializationType* spec);
+ void VisitCallExpr(clang::CallExpr* call_expr);
+
+ private:
+ // ValidateXXX functions return false if validation failed and diagnostic
+ // was reported. They return true otherwise (not applicable / validation
+ // succeeded).
+
+ bool ValidateWriteParam(const clang::CallExpr* call_expr);
+ bool ValidateWriteParamSignature(const clang::CallExpr* call_expr);
+ bool ValidateWriteParamArgument(const clang::Expr* arg_expr);
+ bool ValidateCheckedTuple(
+ const clang::TemplateSpecializationType* spec);
+
+ template <typename T>
+ const T* GetParentDecl() const;
+
+ bool IsBlacklistedType(clang::QualType type) const;
+ bool IsBlacklistedTypedef(const clang::TypedefNameDecl* tdef) const;
+
+ struct CheckDetails {
+ clang::QualType entry_type;
+ clang::QualType exit_type;
+ llvm::SmallVector<const clang::TypedefType*, 5> typedefs;
+ };
+
+ bool CheckType(clang::QualType type, CheckDetails* details) const;
+ bool CheckIntegerType(clang::QualType type, CheckDetails* details) const;
+ bool CheckTemplateArgument(const clang::TemplateArgument& arg,
+ CheckDetails* details) const;
+
+ void ReportCheckError(const CheckDetails& details,
+ clang::SourceLocation loc,
+ unsigned error);
+
+ clang::CompilerInstance& compiler_;
+ clang::ASTContext* context_;
+
+ unsigned error_write_param_bad_type_;
+ unsigned error_tuple_bad_type_;
+ unsigned error_write_param_bad_signature_;
+ unsigned note_see_here_;
+
+ std::vector<const clang::Decl*> decl_stack_;
+
+ llvm::StringSet<> blacklisted_typedefs_;
+};
+
+} // namespace chrome_checker
+
+#endif // TOOLS_CLANG_PLUGINS_CHECKIPC_VISITOR_H_
diff --git a/chromium/tools/clang/plugins/ChromeClassTester.cpp b/chromium/tools/clang/plugins/ChromeClassTester.cpp
index a6f9fff9a29..08e0d40d0a9 100644
--- a/chromium/tools/clang/plugins/ChromeClassTester.cpp
+++ b/chromium/tools/clang/plugins/ChromeClassTester.cpp
@@ -41,18 +41,6 @@ ChromeClassTester::ChromeClassTester(CompilerInstance& instance,
ChromeClassTester::~ChromeClassTester() {}
-void ChromeClassTester::HandleTagDeclDefinition(TagDecl* tag) {
- pending_class_decls_.push_back(tag);
-}
-
-bool ChromeClassTester::HandleTopLevelDecl(DeclGroupRef group_ref) {
- for (size_t i = 0; i < pending_class_decls_.size(); ++i)
- CheckTag(pending_class_decls_[i]);
- pending_class_decls_.clear();
-
- return true; // true means continue parsing.
-}
-
void ChromeClassTester::CheckTag(TagDecl* tag) {
// We handle class types here where we have semantic information. We can only
// check structs/classes/enums here, but we get a bunch of nice semantic
@@ -132,7 +120,11 @@ bool ChromeClassTester::InBannedDirectory(SourceLocation loc) {
#if defined(LLVM_ON_UNIX)
// Resolve the symlinktastic relative path and make it absolute.
char resolvedPath[MAXPATHLEN];
- if (realpath(filename.c_str(), resolvedPath)) {
+ if (options_.no_realpath) {
+ // Same reason as windows below, but we don't need to do
+ // the '\\' manipulation on linux.
+ filename.insert(filename.begin(), '/');
+ } else if (realpath(filename.c_str(), resolvedPath)) {
filename = resolvedPath;
}
#endif
@@ -187,6 +179,21 @@ std::string ChromeClassTester::GetNamespace(const Decl* record) {
return GetNamespaceImpl(record->getDeclContext(), "");
}
+bool ChromeClassTester::HasIgnoredBases(const CXXRecordDecl* record) {
+ for (const auto& base : record->bases()) {
+ CXXRecordDecl* base_record = base.getType()->getAsCXXRecordDecl();
+ if (!base_record)
+ continue;
+
+ const std::string& base_name = base_record->getQualifiedNameAsString();
+ if (ignored_base_classes_.count(base_name) > 0)
+ return true;
+ if (HasIgnoredBases(base_record))
+ return true;
+ }
+ return false;
+}
+
bool ChromeClassTester::InImplementationFile(SourceLocation record_location) {
std::string filename;
@@ -236,15 +243,9 @@ void ChromeClassTester::BuildBannedLists() {
banned_directories_.emplace("/breakpad/");
banned_directories_.emplace("/courgette/");
banned_directories_.emplace("/ppapi/");
- banned_directories_.emplace("/usr/include/");
- banned_directories_.emplace("/usr/lib/");
- banned_directories_.emplace("/usr/local/include/");
- banned_directories_.emplace("/usr/local/lib/");
banned_directories_.emplace("/testing/");
banned_directories_.emplace("/v8/");
- banned_directories_.emplace("/dart/");
banned_directories_.emplace("/sdch/");
- banned_directories_.emplace("/icu4c/");
banned_directories_.emplace("/frameworks/");
// Don't check autogenerated headers.
@@ -291,6 +292,10 @@ void ChromeClassTester::BuildBannedLists() {
// Enum type with _LAST members where _LAST doesn't mean last enum value.
ignored_record_names_.emplace("ViewID");
+
+ // Ignore IPC::NoParams bases, since these structs are generated via
+ // macros and it makes it difficult to add explicit ctors.
+ ignored_base_classes_.emplace("IPC::NoParams");
}
std::string ChromeClassTester::GetNamespaceImpl(const DeclContext* context,
diff --git a/chromium/tools/clang/plugins/ChromeClassTester.h b/chromium/tools/clang/plugins/ChromeClassTester.h
index e3d7e3caab2..22af1586936 100644
--- a/chromium/tools/clang/plugins/ChromeClassTester.h
+++ b/chromium/tools/clang/plugins/ChromeClassTester.h
@@ -15,16 +15,13 @@
// A class on top of ASTConsumer that forwards classes defined in Chromium
// headers to subclasses which implement CheckChromeClass().
-class ChromeClassTester : public clang::ASTConsumer {
+// TODO(vmpstr): Fold this class into FindBadConstructsConsumer.
+class ChromeClassTester {
public:
ChromeClassTester(clang::CompilerInstance& instance,
const chrome_checker::Options& options);
virtual ~ChromeClassTester();
- // clang::ASTConsumer:
- virtual void HandleTagDeclDefinition(clang::TagDecl* tag);
- virtual bool HandleTopLevelDecl(clang::DeclGroupRef group_ref);
-
void CheckTag(clang::TagDecl*);
clang::DiagnosticsEngine::Level getErrorLevel();
@@ -50,6 +47,10 @@ class ChromeClassTester : public clang::ASTConsumer {
// "<anonymous namespace>".
std::string GetNamespace(const clang::Decl* record);
+ // Utility method to check whether the given record has any of the ignored
+ // base classes.
+ bool HasIgnoredBases(const clang::CXXRecordDecl* record);
+
// Utility method for subclasses to check if this class is within an
// implementation (.cc, .cpp, .mm) file.
bool InImplementationFile(clang::SourceLocation location);
@@ -97,8 +98,8 @@ class ChromeClassTester : public clang::ASTConsumer {
// List of types that we don't check.
std::set<std::string> ignored_record_names_;
- // List of decls to check once the current top-level decl is parsed.
- std::vector<clang::TagDecl*> pending_class_decls_;
+ // List of base classes that we skip when checking complex class ctors/dtors.
+ std::set<std::string> ignored_base_classes_;
};
#endif // TOOLS_CLANG_PLUGINS_CHROMECLASSTESTER_H_
diff --git a/chromium/tools/clang/plugins/FindBadConstructsAction.cpp b/chromium/tools/clang/plugins/FindBadConstructsAction.cpp
index 67622970519..f857ac2bbe8 100644
--- a/chromium/tools/clang/plugins/FindBadConstructsAction.cpp
+++ b/chromium/tools/clang/plugins/FindBadConstructsAction.cpp
@@ -21,7 +21,7 @@ class PluginConsumer : public ASTConsumer {
: visitor_(*instance, options) {}
void HandleTranslationUnit(clang::ASTContext& context) override {
- visitor_.TraverseDecl(context.getTranslationUnitDecl());
+ visitor_.Traverse(context);
}
private:
@@ -36,9 +36,7 @@ FindBadConstructsAction::FindBadConstructsAction() {
std::unique_ptr<ASTConsumer> FindBadConstructsAction::CreateASTConsumer(
CompilerInstance& instance,
llvm::StringRef ref) {
- if (options_.with_ast_visitor)
- return llvm::make_unique<PluginConsumer>(&instance, options_);
- return llvm::make_unique<FindBadConstructsConsumer>(instance, options_);
+ return llvm::make_unique<PluginConsumer>(&instance, options_);
}
bool FindBadConstructsAction::ParseArgs(const CompilerInstance& instance,
@@ -57,12 +55,16 @@ bool FindBadConstructsAction::ParseArgs(const CompilerInstance& instance,
// TODO(tsepez): Enable this by default once http://crbug.com/356815
// and http://crbug.com/356816 are fixed.
options_.check_enum_last_value = true;
- } else if (args[i] == "with-ast-visitor") {
- options_.with_ast_visitor = true;
} else if (args[i] == "check-templates") {
options_.check_templates = true;
} else if (args[i] == "follow-macro-expansion") {
options_.follow_macro_expansion = true;
+ } else if (args[i] == "check-implicit-copy-ctors") {
+ options_.check_implicit_copy_ctors = true;
+ } else if (args[i] == "no-realpath") {
+ options_.no_realpath = true;
+ } else if (args[i] == "check-ipc") {
+ options_.check_ipc = true;
} else {
parsed = false;
llvm::errs() << "Unknown clang plugin argument: " << args[i] << "\n";
diff --git a/chromium/tools/clang/plugins/FindBadConstructsConsumer.cpp b/chromium/tools/clang/plugins/FindBadConstructsConsumer.cpp
index c79a764eb95..f37770fc251 100644
--- a/chromium/tools/clang/plugins/FindBadConstructsConsumer.cpp
+++ b/chromium/tools/clang/plugins/FindBadConstructsConsumer.cpp
@@ -7,6 +7,7 @@
#include "clang/Frontend/CompilerInstance.h"
#include "clang/AST/Attr.h"
#include "clang/Lex/Lexer.h"
+#include "clang/Sema/Sema.h"
#include "llvm/Support/raw_ostream.h"
using namespace clang;
@@ -48,13 +49,6 @@ const char kNotePublicDtor[] =
const char kNoteProtectedNonVirtualDtor[] =
"[chromium-style] Protected non-virtual destructor declared here";
-bool TypeHasNonTrivialDtor(const Type* type) {
- if (const CXXRecordDecl* cxx_r = type->getAsCXXRecordDecl())
- return !cxx_r->hasTrivialDestructor();
-
- return false;
-}
-
// Returns the underlying Type for |type| by expanding typedefs and removing
// any namespace qualifiers. This is similar to desugaring, except that for
// ElaboratedTypes, desugar will unwrap too much.
@@ -98,11 +92,32 @@ bool IsPodOrTemplateType(const CXXRecordDecl& record) {
record.isDependentType();
}
+// Use a local RAV implementation to simply collect all FunctionDecls marked for
+// late template parsing. This happens with the flag -fdelayed-template-parsing,
+// which is on by default in MSVC-compatible mode.
+std::set<FunctionDecl*> GetLateParsedFunctionDecls(TranslationUnitDecl* decl) {
+ struct Visitor : public RecursiveASTVisitor<Visitor> {
+ bool VisitFunctionDecl(FunctionDecl* function_decl) {
+ if (function_decl->isLateTemplateParsed())
+ late_parsed_decls.insert(function_decl);
+ return true;
+ }
+
+ std::set<FunctionDecl*> late_parsed_decls;
+ } v;
+ v.TraverseDecl(decl);
+ return v.late_parsed_decls;
+}
+
} // namespace
FindBadConstructsConsumer::FindBadConstructsConsumer(CompilerInstance& instance,
const Options& options)
: ChromeClassTester(instance, options) {
+ if (options.check_ipc) {
+ ipc_visitor_.reset(new CheckIPCVisitor(instance));
+ }
+
// Messages for virtual method specifiers.
diag_method_requires_override_ =
diagnostic().getCustomDiagID(getErrorLevel(), kMethodRequiresOverride);
@@ -136,6 +151,22 @@ FindBadConstructsConsumer::FindBadConstructsConsumer(CompilerInstance& instance,
DiagnosticsEngine::Note, kNoteProtectedNonVirtualDtor);
}
+void FindBadConstructsConsumer::Traverse(ASTContext& context) {
+ if (ipc_visitor_) {
+ ipc_visitor_->set_context(&context);
+ ParseFunctionTemplates(context.getTranslationUnitDecl());
+ }
+ RecursiveASTVisitor::TraverseDecl(context.getTranslationUnitDecl());
+ if (ipc_visitor_) ipc_visitor_->set_context(nullptr);
+}
+
+bool FindBadConstructsConsumer::TraverseDecl(Decl* decl) {
+ if (ipc_visitor_) ipc_visitor_->BeginDecl(decl);
+ bool result = RecursiveASTVisitor::TraverseDecl(decl);
+ if (ipc_visitor_) ipc_visitor_->EndDecl();
+ return result;
+}
+
bool FindBadConstructsConsumer::VisitDecl(clang::Decl* decl) {
clang::TagDecl* tag_decl = dyn_cast<clang::TagDecl>(decl);
if (tag_decl && tag_decl->isCompleteDefinition())
@@ -143,6 +174,17 @@ bool FindBadConstructsConsumer::VisitDecl(clang::Decl* decl) {
return true;
}
+bool FindBadConstructsConsumer::VisitTemplateSpecializationType(
+ TemplateSpecializationType* spec) {
+ if (ipc_visitor_) ipc_visitor_->VisitTemplateSpecializationType(spec);
+ return true;
+}
+
+bool FindBadConstructsConsumer::VisitCallExpr(CallExpr* call_expr) {
+ if (ipc_visitor_) ipc_visitor_->VisitCallExpr(call_expr);
+ return true;
+}
+
void FindBadConstructsConsumer::CheckChromeClass(SourceLocation record_location,
CXXRecordDecl* record) {
// By default, the clang checker doesn't check some types (templates, etc).
@@ -230,6 +272,14 @@ void FindBadConstructsConsumer::CheckCtorDtorWeight(
if (record->getIdentifier() == NULL)
return;
+ // We don't handle unions.
+ if (record->isUnion())
+ return;
+
+ // Skip records that derive from ignored base classes.
+ if (HasIgnoredBases(record))
+ return;
+
// Count the number of templated base classes as a feature of whether the
// destructor can be inlined.
int templated_base_classes = 0;
@@ -288,7 +338,7 @@ void FindBadConstructsConsumer::CheckCtorDtorWeight(
// The current check is buggy. An implicit copy constructor does not
// have an inline body, so this check never fires for classes with a
// user-declared out-of-line constructor.
- if (it->hasInlineBody()) {
+ if (it->hasInlineBody() && options_.check_implicit_copy_ctors) {
if (it->isCopyConstructor() &&
!record->hasUserDeclaredCopyConstructor()) {
// In general, implicit constructors are generated on demand. But
@@ -560,12 +610,17 @@ void FindBadConstructsConsumer::CountType(const Type* type,
int* templated_non_trivial_member) {
switch (type->getTypeClass()) {
case Type::Record: {
+ auto* record_decl = type->getAsCXXRecordDecl();
// Simplifying; the whole class isn't trivial if the dtor is, but
// we use this as a signal about complexity.
- if (TypeHasNonTrivialDtor(type))
- (*non_trivial_member)++;
- else
+ // Note that if a record doesn't have a definition, it doesn't matter how
+ // it's counted, since the translation unit will fail to build. In that
+ // case, just count it as a trivial member to avoid emitting warnings that
+ // might be spurious.
+ if (!record_decl->hasDefinition() || record_decl->hasTrivialDestructor())
(*trivial_member)++;
+ else
+ (*non_trivial_member)++;
break;
}
case Type::TemplateSpecialization: {
@@ -720,7 +775,7 @@ unsigned FindBadConstructsConsumer::DiagnosticForIssue(RefcountIssue issue) {
// ref-counting classes (base::RefCounted / base::RefCountedThreadSafe),
// ensure that there are no public destructors in the class hierarchy. This
// is to guard against accidentally stack-allocating a RefCounted class or
-// sticking it in a non-ref-counted container (like scoped_ptr<>).
+// sticking it in a non-ref-counted container (like std::unique_ptr<>).
void FindBadConstructsConsumer::CheckRefCountedDtors(
SourceLocation record_location,
CXXRecordDecl* record) {
@@ -872,4 +927,26 @@ void FindBadConstructsConsumer::CheckWeakPtrFactoryMembers(
}
}
+// Copied from BlinkGCPlugin, see crrev.com/1135333007
+void FindBadConstructsConsumer::ParseFunctionTemplates(
+ TranslationUnitDecl* decl) {
+ if (!instance().getLangOpts().DelayedTemplateParsing)
+ return; // Nothing to do.
+
+ std::set<FunctionDecl*> late_parsed_decls = GetLateParsedFunctionDecls(decl);
+ clang::Sema& sema = instance().getSema();
+
+ for (const FunctionDecl* fd : late_parsed_decls) {
+ assert(fd->isLateTemplateParsed());
+
+ if (instance().getSourceManager().isInSystemHeader(
+ instance().getSourceManager().getSpellingLoc(fd->getLocation())))
+ continue;
+
+ // Parse and build AST for yet-uninstantiated template functions.
+ clang::LateParsedTemplate* lpt = sema.LateParsedTemplateMap[fd];
+ sema.LateTemplateParser(sema.OpaqueParser, *lpt);
+ }
+}
+
} // namespace chrome_checker
diff --git a/chromium/tools/clang/plugins/FindBadConstructsConsumer.h b/chromium/tools/clang/plugins/FindBadConstructsConsumer.h
index 8f8fc870466..62bf9cf0e68 100644
--- a/chromium/tools/clang/plugins/FindBadConstructsConsumer.h
+++ b/chromium/tools/clang/plugins/FindBadConstructsConsumer.h
@@ -20,6 +20,8 @@
#ifndef TOOLS_CLANG_PLUGINS_FINDBADCONSTRUCTSCONSUMER_H_
#define TOOLS_CLANG_PLUGINS_FINDBADCONSTRUCTSCONSUMER_H_
+#include <memory>
+
#include "clang/AST/AST.h"
#include "clang/AST/ASTConsumer.h"
#include "clang/AST/Attr.h"
@@ -29,6 +31,7 @@
#include "clang/Basic/SourceManager.h"
#include "clang/Basic/SourceLocation.h"
+#include "CheckIPCVisitor.h"
#include "ChromeClassTester.h"
#include "Options.h"
#include "SuppressibleDiagnosticBuilder.h"
@@ -43,8 +46,13 @@ class FindBadConstructsConsumer
FindBadConstructsConsumer(clang::CompilerInstance& instance,
const Options& options);
+ void Traverse(clang::ASTContext& context);
+
// RecursiveASTVisitor:
+ bool TraverseDecl(clang::Decl* decl);
bool VisitDecl(clang::Decl* decl);
+ bool VisitTemplateSpecializationType(clang::TemplateSpecializationType* spec);
+ bool VisitCallExpr(clang::CallExpr* call_expr);
// ChromeClassTester overrides:
void CheckChromeClass(clang::SourceLocation record_location,
@@ -98,6 +106,8 @@ class FindBadConstructsConsumer
void CheckWeakPtrFactoryMembers(clang::SourceLocation record_location,
clang::CXXRecordDecl* record);
+ void ParseFunctionTemplates(clang::TranslationUnitDecl* decl);
+
unsigned diag_method_requires_override_;
unsigned diag_redundant_virtual_specifier_;
unsigned diag_base_method_virtual_and_final_;
@@ -110,6 +120,8 @@ class FindBadConstructsConsumer
unsigned diag_note_implicit_dtor_;
unsigned diag_note_public_dtor_;
unsigned diag_note_protected_non_virtual_dtor_;
+
+ std::unique_ptr<CheckIPCVisitor> ipc_visitor_;
};
} // namespace chrome_checker
diff --git a/chromium/tools/clang/plugins/Options.h b/chromium/tools/clang/plugins/Options.h
index bb5857f0d74..684dab52bbb 100644
--- a/chromium/tools/clang/plugins/Options.h
+++ b/chromium/tools/clang/plugins/Options.h
@@ -8,21 +8,19 @@
namespace chrome_checker {
struct Options {
- Options()
- : check_base_classes(false),
- enforce_in_pdf(false),
- enforce_in_thirdparty_webkit(false),
- check_enum_last_value(false),
- with_ast_visitor(false),
- check_templates(false) {}
-
- bool check_base_classes;
- bool enforce_in_pdf;
- bool enforce_in_thirdparty_webkit; // Use in Blink code itself
- bool check_enum_last_value;
- bool with_ast_visitor;
- bool check_templates;
+ bool check_base_classes = false;
+ bool enforce_in_pdf = false;
+ bool enforce_in_thirdparty_webkit = false; // Use in Blink code itself
+ bool check_enum_last_value = false;
+ bool check_templates = false;
bool follow_macro_expansion = false;
+ // This is needed during the migration from ASTConsumer approach to the
+ // RecursiveASTVisitor approach. See https://crbug.com/436357 for details.
+ bool check_implicit_copy_ctors = false;
+ // This is needed for some distributed build-sytems to respect banned
+ // paths. See https://crbug.com/583454 for details.
+ bool no_realpath = false;
+ bool check_ipc = false;
};
} // namespace chrome_checker
diff --git a/chromium/tools/clang/pylib/__init__.py b/chromium/tools/clang/pylib/__init__.py
new file mode 100644
index 00000000000..ca3e206fdd8
--- /dev/null
+++ b/chromium/tools/clang/pylib/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/chromium/tools/clang/pylib/clang/__init__.py b/chromium/tools/clang/pylib/clang/__init__.py
new file mode 100644
index 00000000000..ca3e206fdd8
--- /dev/null
+++ b/chromium/tools/clang/pylib/clang/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/chromium/tools/clang/pylib/clang/compile_db.py b/chromium/tools/clang/pylib/clang/compile_db.py
new file mode 100755
index 00000000000..8bae555ec81
--- /dev/null
+++ b/chromium/tools/clang/pylib/clang/compile_db.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import subprocess
+
+
+def GenerateWithNinja(path):
+ """Generates a compile database using ninja.
+
+ Args:
+ path: The build directory to generate a compile database for.
+ """
+ # TODO(dcheng): Incorporate Windows-specific compile DB munging from
+ # https://codereview.chromium.org/718873004
+ print 'Generating compile database in %s...' % path
+ args = ['ninja', '-C', path, '-t', 'compdb', 'cc', 'cxx', 'objc', 'objcxx']
+ output = subprocess.check_output(args)
+ with file(os.path.join(path, 'compile_commands.json'), 'w') as f:
+ f.write(output)
+
+
+def Read(path):
+ """Reads a compile database into memory.
+
+ Args:
+ path: Directory that contains the compile database.
+ """
+ with open(os.path.join(path, 'compile_commands.json'), 'rb') as db:
+ return json.load(db)
diff --git a/chromium/tools/clang/pylib/clang/plugin_testing.py b/chromium/tools/clang/pylib/clang/plugin_testing.py
new file mode 100755
index 00000000000..bede36e29a0
--- /dev/null
+++ b/chromium/tools/clang/pylib/clang/plugin_testing.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import os
+import subprocess
+import sys
+
+
+class ClangPluginTest(object):
+ """Test harness for clang plugins."""
+
+ def __init__(self, test_base, clang_path, plugin_path, plugin_name,
+ reset_results):
+ """Constructor.
+
+ Args:
+ test_base: Path to the directory containing the tests.
+ clang_path: Path to the clang binary.
+ plugin_path: Optional path to the plugin binary. May be None, such as on
+ Windows, where the plugin is built directly into the clang
+ binary.
+ plugin_name: Name of the plugin.
+ reset_results: If true, resets expected results to the actual test output.
+ """
+ self._test_base = test_base
+ self._clang_path = clang_path
+ self._plugin_path = plugin_path
+ self._plugin_name = plugin_name
+ self._reset_results = reset_results
+
+ def AddPluginArg(self, clang_cmd, plugin_arg):
+ """Helper to add an argument for the tested plugin."""
+ clang_cmd.extend(['-Xclang', '-plugin-arg-%s' % self._plugin_name,
+ '-Xclang', plugin_arg])
+
+ def AdjustClangArguments(self, clang_cmd):
+ """Tests can override this to customize the command line for clang."""
+ pass
+
+ def Run(self):
+ """Runs the tests.
+
+ The working directory is temporarily changed to self._test_base while
+ running the tests.
+
+ Returns: the number of failing tests.
+ """
+ print 'Using clang %s...' % self._clang_path
+ print 'Using plugin %s...' % self._plugin_path
+
+ os.chdir(self._test_base)
+
+ clang_cmd = [self._clang_path, '-c', '-std=c++11']
+ if self._plugin_path:
+ clang_cmd.extend(['-Xclang', '-load', '-Xclang', self._plugin_path])
+ clang_cmd.extend(['-Xclang', '-add-plugin', '-Xclang', self._plugin_name])
+ self.AdjustClangArguments(clang_cmd)
+
+ passing = []
+ failing = []
+ tests = glob.glob('*.cpp')
+ for test in tests:
+ sys.stdout.write('Testing %s... ' % test)
+ test_name, _ = os.path.splitext(test)
+
+ cmd = clang_cmd[:]
+ try:
+ # Some tests need to run with extra flags.
+ cmd.extend(file('%s.flags' % test_name).read().split())
+ except IOError:
+ pass
+ cmd.append(test)
+
+ failure_message = self.RunOneTest(test_name, cmd)
+ if failure_message:
+ print 'failed: %s' % failure_message
+ failing.append(test_name)
+ else:
+ print 'passed!'
+ passing.append(test_name)
+
+ print 'Ran %d tests: %d succeeded, %d failed' % (
+ len(passing) + len(failing), len(passing), len(failing))
+ for test in failing:
+ print ' %s' % test
+ return len(failing)
+
+ def RunOneTest(self, test_name, cmd):
+ try:
+ actual = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as e:
+ # Some plugin tests intentionally trigger compile errors, so just ignore
+ # an exit code that indicates failure.
+ actual = e.output
+ except Exception as e:
+ return 'could not execute %s (%s)' % (cmd, e)
+
+ return self.ProcessOneResult(test_name, actual)
+
+ def ProcessOneResult(self, test_name, actual):
+ """Tests can override this for custom result processing."""
+ # On Windows, clang emits CRLF as the end of line marker. Normalize it to LF
+ # to match posix systems.
+ actual = actual.replace('\r\n', '\n')
+
+ result_file = '%s.txt%s' % (test_name, '' if self._reset_results else
+ '.actual')
+ try:
+ expected = open('%s.txt' % test_name).read()
+ except IOError:
+ open(result_file, 'w').write(actual)
+ return 'no expected file found'
+
+ if expected != actual:
+ open(result_file, 'w').write(actual)
+ error = 'expected and actual differed\n'
+ error += 'Actual:\n' + actual
+ error += 'Expected:\n' + expected
+ return error
diff --git a/chromium/tools/clang/rewrite_scoped_refptr/CMakeLists.txt b/chromium/tools/clang/rewrite_scoped_refptr/CMakeLists.txt
index aa47400ce3d..b61fd653dc9 100644
--- a/chromium/tools/clang/rewrite_scoped_refptr/CMakeLists.txt
+++ b/chromium/tools/clang/rewrite_scoped_refptr/CMakeLists.txt
@@ -3,6 +3,7 @@ set(LLVM_LINK_COMPONENTS
MCParser
Option
X86AsmParser
+ X86CodeGen
)
add_llvm_executable(rewrite_scoped_refptr
diff --git a/chromium/tools/clang/rewrite_scoped_refptr/RewriteScopedRefptr.cpp b/chromium/tools/clang/rewrite_scoped_refptr/RewriteScopedRefptr.cpp
index e11d5f9299e..d6a3d042ac2 100644
--- a/chromium/tools/clang/rewrite_scoped_refptr/RewriteScopedRefptr.cpp
+++ b/chromium/tools/clang/rewrite_scoped_refptr/RewriteScopedRefptr.cpp
@@ -22,7 +22,7 @@
#include "clang/Tooling/Refactoring.h"
#include "clang/Tooling/Tooling.h"
#include "llvm/Support/CommandLine.h"
-#include "llvm/support/TargetSelect.h"
+#include "llvm/Support/TargetSelect.h"
using namespace clang::ast_matchers;
using clang::tooling::CommonOptionsParser;
@@ -268,14 +268,14 @@ int main(int argc, const char* argv[]) {
MatchFinder match_finder;
Replacements replacements;
- auto is_scoped_refptr = recordDecl(isSameOrDerivedFrom("::scoped_refptr"),
- isTemplateInstantiation());
+ auto is_scoped_refptr = cxxRecordDecl(isSameOrDerivedFrom("::scoped_refptr"),
+ isTemplateInstantiation());
// Finds all calls to conversion operator member function. This catches calls
// to "operator T*", "operator Testable", and "operator bool" equally.
- auto base_matcher = memberCallExpr(thisPointerType(is_scoped_refptr),
- callee(conversionDecl()),
- on(id("arg", expr())));
+ auto base_matcher =
+ cxxMemberCallExpr(thisPointerType(is_scoped_refptr),
+ callee(conversionDecl()), on(id("arg", expr())));
// The heuristic for whether or not converting a temporary is 'unsafe'. An
// unsafe conversion is one where a temporary scoped_refptr<T> is converted to
@@ -285,7 +285,7 @@ int main(int argc, const char* argv[]) {
// retains the necessary reference, since this is a common idiom to see in
// loop bodies.
auto is_unsafe_temporary_conversion =
- on(bindTemporaryExpr(unless(has(operatorCallExpr()))));
+ on(cxxBindTemporaryExpr(unless(has(cxxOperatorCallExpr()))));
// Returning a scoped_refptr<T> as a T* is considered unsafe if either are
// true:
@@ -322,12 +322,13 @@ int main(int argc, const char* argv[]) {
auto is_logging_helper =
functionDecl(anyOf(hasName("CheckEQImpl"), hasName("CheckNEImpl")));
auto is_gtest_helper = functionDecl(
- anyOf(methodDecl(ofClass(recordDecl(isSameOrDerivedFrom(
- hasName("::testing::internal::EqHelper")))),
- hasName("Compare")),
+ anyOf(cxxMethodDecl(ofClass(cxxRecordDecl(isSameOrDerivedFrom(
+ hasName("::testing::internal::EqHelper")))),
+ hasName("Compare")),
hasName("::testing::internal::CmpHelperNE")));
- auto is_gtest_assertion_result_ctor = constructorDecl(ofClass(
- recordDecl(isSameOrDerivedFrom(hasName("::testing::AssertionResult")))));
+ auto is_gtest_assertion_result_ctor =
+ cxxConstructorDecl(ofClass(cxxRecordDecl(
+ isSameOrDerivedFrom(hasName("::testing::AssertionResult")))));
// Find all calls to an operator overload that are 'safe'.
//
@@ -336,7 +337,7 @@ int main(int argc, const char* argv[]) {
// the call ambiguous.
GetRewriterCallback get_callback(&replacements);
match_finder.addMatcher(
- memberCallExpr(
+ cxxMemberCallExpr(
base_matcher,
// Excluded since the conversion may be unsafe.
unless(anyOf(is_unsafe_temporary_conversion, is_unsafe_return)),
@@ -345,21 +346,20 @@ int main(int argc, const char* argv[]) {
// result in an incorrect replacement that changes the helper function
// itself. Instead, the right replacement is to rewrite the macro's
// arguments.
- unless(hasAncestor(decl(anyOf(is_logging_helper,
- is_gtest_helper,
+ unless(hasAncestor(decl(anyOf(is_logging_helper, is_gtest_helper,
is_gtest_assertion_result_ctor))))),
&get_callback);
// Find temporary scoped_refptr<T>'s being unsafely assigned to a T*.
VarRewriterCallback var_callback(&replacements);
auto initialized_with_temporary = ignoringImpCasts(exprWithCleanups(
- has(memberCallExpr(base_matcher, is_unsafe_temporary_conversion))));
+ has(cxxMemberCallExpr(base_matcher, is_unsafe_temporary_conversion))));
match_finder.addMatcher(id("var",
varDecl(hasInitializer(initialized_with_temporary),
hasType(pointerType()))),
&var_callback);
match_finder.addMatcher(
- constructorDecl(forEachConstructorInitializer(
+ cxxConstructorDecl(forEachConstructorInitializer(
allOf(withInitializer(initialized_with_temporary),
forField(id("var", fieldDecl(hasType(pointerType()))))))),
&var_callback);
@@ -367,7 +367,7 @@ int main(int argc, const char* argv[]) {
// Rewrite functions that unsafely turn a scoped_refptr<T> into a T* when
// returning a value.
FunctionRewriterCallback fn_callback(&replacements);
- match_finder.addMatcher(memberCallExpr(base_matcher, is_unsafe_return),
+ match_finder.addMatcher(cxxMemberCallExpr(base_matcher, is_unsafe_return),
&fn_callback);
// Rewrite logging / gtest expressions that result in an implicit conversion.
@@ -407,7 +407,7 @@ int main(int argc, const char* argv[]) {
// However, the tool does need to handle the _TRUE counterparts, since the
// conversion occurs inside the constructor in those cases.
match_finder.addMatcher(
- constructExpr(
+ cxxConstructExpr(
argumentCountIs(2),
hasArgument(0, id("expr", expr(hasType(is_scoped_refptr)))),
hasDeclaration(is_gtest_assertion_result_ctor)),
diff --git a/chromium/tools/clang/rewrite_to_chrome_style/CMakeLists.txt b/chromium/tools/clang/rewrite_to_chrome_style/CMakeLists.txt
index 10219e8ad6d..8fa96efc994 100644
--- a/chromium/tools/clang/rewrite_to_chrome_style/CMakeLists.txt
+++ b/chromium/tools/clang/rewrite_to_chrome_style/CMakeLists.txt
@@ -3,6 +3,7 @@ set(LLVM_LINK_COMPONENTS
MCParser
Option
X86AsmParser
+ X86CodeGen
)
add_llvm_executable(rewrite_to_chrome_style
diff --git a/chromium/tools/clang/rewrite_to_chrome_style/RewriteToChromeStyle.cpp b/chromium/tools/clang/rewrite_to_chrome_style/RewriteToChromeStyle.cpp
index 41e3e5cb143..acd6bd34ebb 100644
--- a/chromium/tools/clang/rewrite_to_chrome_style/RewriteToChromeStyle.cpp
+++ b/chromium/tools/clang/rewrite_to_chrome_style/RewriteToChromeStyle.cpp
@@ -14,9 +14,10 @@
#include <assert.h>
#include <algorithm>
+#include <fstream>
#include <memory>
#include <string>
-#include <unordered_set>
+#include <unordered_map>
#include "clang/AST/ASTContext.h"
#include "clang/ASTMatchers/ASTMatchFinder.h"
@@ -32,6 +33,13 @@
#include "llvm/Support/CommandLine.h"
#include "llvm/Support/TargetSelect.h"
+#if defined(_WIN32)
+#include <windows.h>
+#else
+#include <sys/file.h>
+#include <unistd.h>
+#endif
+
using namespace clang::ast_matchers;
using clang::tooling::CommonOptionsParser;
using clang::tooling::Replacement;
@@ -40,19 +48,175 @@ using llvm::StringRef;
namespace {
+const char kBlinkFieldPrefix[] = "m_";
+const char kBlinkStaticMemberPrefix[] = "s_";
+const char kGeneratedFileRegex[] = "^gen/|/gen/";
+
+const clang::ast_matchers::internal::
+ VariadicDynCastAllOfMatcher<clang::Expr, clang::UnresolvedLookupExpr>
+ unresolvedLookupExpr;
+
+const clang::ast_matchers::internal::
+ VariadicDynCastAllOfMatcher<clang::Expr, clang::UnresolvedMemberExpr>
+ unresolvedMemberExpr;
+
AST_MATCHER(clang::FunctionDecl, isOverloadedOperator) {
return Node.isOverloadedOperator();
}
-constexpr char kBlinkFieldPrefix[] = "m_";
-constexpr char kBlinkStaticMemberPrefix[] = "s_";
+AST_MATCHER_P(clang::FunctionTemplateDecl,
+ templatedDecl,
+ clang::ast_matchers::internal::Matcher<clang::FunctionDecl>,
+ InnerMatcher) {
+ return InnerMatcher.matches(*Node.getTemplatedDecl(), Finder, Builder);
+}
+
+// This will narrow CXXCtorInitializers down for both FieldDecls and
+// IndirectFieldDecls (ie. anonymous unions and such). In both cases
+// getAnyMember() will return a FieldDecl which we can match against.
+AST_MATCHER_P(clang::CXXCtorInitializer,
+ forAnyField,
+ clang::ast_matchers::internal::Matcher<clang::FieldDecl>,
+ InnerMatcher) {
+ const clang::FieldDecl* NodeAsDecl = Node.getAnyMember();
+ return (NodeAsDecl != nullptr &&
+ InnerMatcher.matches(*NodeAsDecl, Finder, Builder));
+}
-bool GetNameForDecl(const clang::FunctionDecl& decl, std::string& name) {
- name = decl.getNameAsString();
- name[0] = clang::toUppercase(name[0]);
+// Matches if all the overloads in the lookup set match the provided matcher.
+AST_MATCHER_P(clang::OverloadExpr,
+ allOverloadsMatch,
+ clang::ast_matchers::internal::Matcher<clang::NamedDecl>,
+ InnerMatcher) {
+ if (Node.getNumDecls() == 0)
+ return false;
+
+ for (clang::NamedDecl* decl : Node.decls()) {
+ if (!InnerMatcher.matches(*decl, Finder, Builder))
+ return false;
+ }
return true;
}
+bool IsDeclContextInWTF(const clang::DeclContext* decl_context) {
+ auto* namespace_decl = clang::dyn_cast_or_null<clang::NamespaceDecl>(
+ decl_context->getEnclosingNamespaceContext());
+ if (!namespace_decl)
+ return false;
+ if (namespace_decl->getParent()->isTranslationUnit() &&
+ namespace_decl->getName() == "WTF")
+ return true;
+ return IsDeclContextInWTF(namespace_decl->getParent());
+}
+
+template <typename T>
+bool MatchAllOverriddenMethods(
+ const clang::CXXMethodDecl& decl,
+ T&& inner_matcher,
+ clang::ast_matchers::internal::ASTMatchFinder* finder,
+ clang::ast_matchers::internal::BoundNodesTreeBuilder* builder) {
+ bool override_matches = false;
+ bool override_not_matches = false;
+
+ for (auto it = decl.begin_overridden_methods();
+ it != decl.end_overridden_methods(); ++it) {
+ if (MatchAllOverriddenMethods(**it, inner_matcher, finder, builder))
+ override_matches = true;
+ else
+ override_not_matches = true;
+ }
+
+ // If this fires we have a class overriding a method that matches, and a
+ // method that does not match the inner matcher. In that case we will match
+ // one ancestor method but not the other. If we rename one of the and not the
+ // other it will break what this class overrides, disconnecting it from the
+ // one we did not rename which creates a behaviour change. So assert and
+ // demand the user to fix the code first (or add the method to our
+ // blacklist T_T).
+ if (override_matches || override_not_matches)
+ assert(override_matches != override_not_matches);
+
+ // If the method overrides something that doesn't match, so the method itself
+ // doesn't match.
+ if (override_not_matches)
+ return false;
+ // If the method overrides something that matches, so the method ifself
+ // matches.
+ if (override_matches)
+ return true;
+
+ return inner_matcher.matches(decl, finder, builder);
+}
+
+AST_MATCHER_P(clang::CXXMethodDecl,
+ includeAllOverriddenMethods,
+ clang::ast_matchers::internal::Matcher<clang::CXXMethodDecl>,
+ InnerMatcher) {
+ return MatchAllOverriddenMethods(Node, InnerMatcher, Finder, Builder);
+}
+
+bool IsMethodOverrideOf(const clang::CXXMethodDecl& decl,
+ const char* class_name) {
+ if (decl.getParent()->getQualifiedNameAsString() == class_name)
+ return true;
+ for (auto it = decl.begin_overridden_methods();
+ it != decl.end_overridden_methods(); ++it) {
+ if (IsMethodOverrideOf(**it, class_name))
+ return true;
+ }
+ return false;
+}
+
+bool IsBlacklistedFunction(const clang::FunctionDecl& decl) {
+ // swap() functions should match the signature of std::swap for ADL tricks.
+ return decl.getName() == "swap";
+}
+
+bool IsBlacklistedMethod(const clang::CXXMethodDecl& decl) {
+ if (decl.isStatic())
+ return false;
+
+ clang::StringRef name = decl.getName();
+
+ // These methods should never be renamed.
+ static const char* kBlacklistMethods[] = {"trace", "lock", "unlock",
+ "try_lock"};
+ for (const auto& b : kBlacklistMethods) {
+ if (name == b)
+ return true;
+ }
+
+ // Iterator methods shouldn't be renamed to work with stl and range-for
+ // loops.
+ std::string ret_type = decl.getReturnType().getAsString();
+ if (ret_type.find("iterator") != std::string::npos ||
+ ret_type.find("Iterator") != std::string::npos) {
+ static const char* kIteratorBlacklist[] = {"begin", "end", "rbegin",
+ "rend"};
+ for (const auto& b : kIteratorBlacklist) {
+ if (name == b)
+ return true;
+ }
+ }
+
+ // Subclasses of InspectorAgent will subclass "disable()" from both blink and
+ // from gen/, which is problematic, but DevTools folks don't want to rename
+ // it or split this up. So don't rename it at all.
+ if (name.equals("disable") &&
+ IsMethodOverrideOf(decl, "blink::InspectorAgent"))
+ return true;
+
+ return false;
+}
+
+AST_MATCHER(clang::FunctionDecl, isBlacklistedFunction) {
+ return IsBlacklistedFunction(Node);
+}
+
+AST_MATCHER(clang::CXXMethodDecl, isBlacklistedMethod) {
+ return IsBlacklistedMethod(Node);
+}
+
// Helper to convert from a camelCaseName to camel_case_name. It uses some
// heuristics to try to handle acronyms in camel case names correctly.
std::string CamelCaseToUnderscoreCase(StringRef input) {
@@ -60,6 +224,7 @@ std::string CamelCaseToUnderscoreCase(StringRef input) {
bool needs_underscore = false;
bool was_lowercase = false;
bool was_uppercase = false;
+ bool first_char = true;
// Iterate in reverse to minimize the amount of backtracking.
for (const unsigned char* i = input.bytes_end() - 1; i >= input.bytes_begin();
--i) {
@@ -70,7 +235,9 @@ std::string CamelCaseToUnderscoreCase(StringRef input) {
// Transitioning from upper to lower case requires an underscore. This is
// needed to handle names with acronyms, e.g. handledHTTPRequest needs a '_'
// in 'dH'. This is a complement to the non-acronym case further down.
- if (needs_underscore || (was_uppercase && is_lowercase)) {
+ if (was_uppercase && is_lowercase)
+ needs_underscore = true;
+ if (needs_underscore) {
output += '_';
needs_underscore = false;
}
@@ -78,32 +245,18 @@ std::string CamelCaseToUnderscoreCase(StringRef input) {
// Handles the non-acronym case: transitioning from lower to upper case
// requires an underscore when emitting the next character, e.g. didLoad
// needs a '_' in 'dL'.
- if (i != input.bytes_end() - 1 && was_lowercase && is_uppercase)
+ if (!first_char && was_lowercase && is_uppercase)
needs_underscore = true;
was_lowercase = is_lowercase;
was_uppercase = is_uppercase;
+ first_char = false;
}
std::reverse(output.begin(), output.end());
return output;
}
-bool GetNameForDecl(const clang::FieldDecl& decl, std::string& name) {
- StringRef original_name = decl.getName();
- // Blink style field names are prefixed with `m_`. If this prefix isn't
- // present, assume it's already been converted to Google style.
- if (original_name.size() < strlen(kBlinkFieldPrefix) ||
- !original_name.startswith(kBlinkFieldPrefix))
- return false;
- name = CamelCaseToUnderscoreCase(
- original_name.substr(strlen(kBlinkFieldPrefix)));
- // The few examples I could find used struct-style naming with no `_` suffix
- // for unions.
- if (decl.getParent()->isClass())
- name += '_';
- return true;
-}
-
-bool IsProbablyConst(const clang::VarDecl& decl) {
+bool IsProbablyConst(const clang::VarDecl& decl,
+ const clang::ASTContext& context) {
clang::QualType type = decl.getType();
if (!type.isConstQualified())
return false;
@@ -116,30 +269,81 @@ bool IsProbablyConst(const clang::VarDecl& decl) {
if (decl.getStorageDuration() == clang::SD_Static)
return true;
- // Otherwise, use a simple heuristic: if it's initialized with a literal of
- // some sort, also use kConstantStyle naming.
const clang::Expr* initializer = decl.getInit();
if (!initializer)
return false;
- // Ignore implicit casts, so the literal check below still matches on
- // array-to-pointer decay, e.g.
- // const char* const kConst = "...";
- if (const clang::ImplicitCastExpr* cast_expr =
- clang::dyn_cast<clang::ImplicitCastExpr>(initializer))
- initializer = cast_expr->getSubExprAsWritten();
-
- return clang::isa<clang::CharacterLiteral>(initializer) ||
- clang::isa<clang::CompoundLiteralExpr>(initializer) ||
- clang::isa<clang::CXXBoolLiteralExpr>(initializer) ||
- clang::isa<clang::CXXNullPtrLiteralExpr>(initializer) ||
- clang::isa<clang::FloatingLiteral>(initializer) ||
- clang::isa<clang::IntegerLiteral>(initializer) ||
- clang::isa<clang::StringLiteral>(initializer) ||
- clang::isa<clang::UserDefinedLiteral>(initializer);
+ // If the expression is dependent on a template input, then we are not
+ // sure if it can be compile-time generated as calling isEvaluatable() is
+ // not valid on |initializer|.
+ // TODO(crbug.com/581218): We could probably look at each compiled
+ // instantiation of the template and see if they are all compile-time
+ // isEvaluable().
+ if (initializer->isInstantiationDependent())
+ return false;
+
+ // If the expression can be evaluated at compile time, then it should have a
+ // kFoo style name. Otherwise, not.
+ return initializer->isEvaluatable(context);
+}
+
+bool GetNameForDecl(const clang::FunctionDecl& decl,
+ const clang::ASTContext& context,
+ std::string& name) {
+ name = decl.getName().str();
+ name[0] = clang::toUppercase(name[0]);
+ return true;
+}
+
+bool GetNameForDecl(const clang::EnumConstantDecl& decl,
+ const clang::ASTContext& context,
+ std::string& name) {
+ StringRef original_name = decl.getName();
+
+ // If it's already correct leave it alone.
+ if (original_name.size() >= 2 && original_name[0] == 'k' &&
+ clang::isUppercase(original_name[1]))
+ return false;
+
+ bool is_shouty = true;
+ for (char c : original_name) {
+ if (!clang::isUppercase(c) && !clang::isDigit(c) && c != '_') {
+ is_shouty = false;
+ break;
+ }
+ }
+
+ if (is_shouty)
+ return false;
+
+ name = 'k'; // k prefix on enum values.
+ name += original_name;
+ name[1] = clang::toUppercase(name[1]);
+ return true;
}
-bool GetNameForDecl(const clang::VarDecl& decl, std::string& name) {
+bool GetNameForDecl(const clang::FieldDecl& decl,
+ const clang::ASTContext& context,
+ std::string& name) {
+ StringRef original_name = decl.getName();
+ bool member_prefix = original_name.startswith(kBlinkFieldPrefix);
+
+ StringRef rename_part = !member_prefix
+ ? original_name
+ : original_name.substr(strlen(kBlinkFieldPrefix));
+ name = CamelCaseToUnderscoreCase(rename_part);
+
+ // Assume that prefix of m_ was intentional and always replace it with a
+ // suffix _.
+ if (member_prefix && name.back() != '_')
+ name += '_';
+
+ return true;
+}
+
+bool GetNameForDecl(const clang::VarDecl& decl,
+ const clang::ASTContext& context,
+ std::string& name) {
StringRef original_name = decl.getName();
// Nothing to do for unnamed parameters.
@@ -154,64 +358,139 @@ bool GetNameForDecl(const clang::VarDecl& decl, std::string& name) {
else if (original_name.startswith(kBlinkFieldPrefix))
original_name = original_name.substr(strlen(kBlinkFieldPrefix));
- if (IsProbablyConst(decl)) {
+ bool is_const = IsProbablyConst(decl, context);
+ if (is_const) {
// Don't try to rename constants that already conform to Chrome style.
if (original_name.size() >= 2 && original_name[0] == 'k' &&
clang::isUppercase(original_name[1]))
return false;
+
+ // Struct consts in WTF do not become kFoo cuz stuff like type traits
+ // should stay as lowercase.
+ const clang::DeclContext* decl_context = decl.getDeclContext();
+ bool is_in_wtf = IsDeclContextInWTF(decl_context);
+ const clang::CXXRecordDecl* parent =
+ clang::dyn_cast_or_null<clang::CXXRecordDecl>(decl_context);
+ if (is_in_wtf && parent && parent->isStruct())
+ return false;
+
name = 'k';
name.append(original_name.data(), original_name.size());
name[1] = clang::toUppercase(name[1]);
} else {
name = CamelCaseToUnderscoreCase(original_name);
+
+ // Non-const variables with static storage duration at namespace scope are
+ // prefixed with `g_' to reduce the likelihood of a naming collision.
+ const clang::DeclContext* decl_context = decl.getDeclContext();
+ if (name.find("g_") != 0 && decl.hasGlobalStorage() &&
+ decl_context->isNamespace())
+ name.insert(0, "g_");
}
- if (decl.isStaticDataMember()) {
+ // Static members end with _ just like other members, but constants should
+ // not.
+ if (!is_const && decl.isStaticDataMember()) {
name += '_';
}
return true;
}
+bool GetNameForDecl(const clang::FunctionTemplateDecl& decl,
+ const clang::ASTContext& context,
+ std::string& name) {
+ clang::FunctionDecl* templated_function = decl.getTemplatedDecl();
+ return GetNameForDecl(*templated_function, context, name);
+}
+
+bool GetNameForDecl(const clang::NamedDecl& decl,
+ const clang::ASTContext& context,
+ std::string& name) {
+ if (auto* function = clang::dyn_cast<clang::FunctionDecl>(&decl))
+ return GetNameForDecl(*function, context, name);
+ if (auto* var = clang::dyn_cast<clang::VarDecl>(&decl))
+ return GetNameForDecl(*var, context, name);
+ if (auto* field = clang::dyn_cast<clang::FieldDecl>(&decl))
+ return GetNameForDecl(*field, context, name);
+ if (auto* function_template =
+ clang::dyn_cast<clang::FunctionTemplateDecl>(&decl))
+ return GetNameForDecl(*function_template, context, name);
+ if (auto* enumc = clang::dyn_cast<clang::EnumConstantDecl>(&decl))
+ return GetNameForDecl(*enumc, context, name);
+
+ return false;
+}
+
+bool GetNameForDecl(const clang::UsingDecl& decl,
+ const clang::ASTContext& context,
+ std::string& name) {
+ assert(decl.shadow_size() > 0);
+
+ // If a using declaration's targeted declaration is a set of overloaded
+ // functions, it can introduce multiple shadowed declarations. Just using the
+ // first one is OK, since overloaded functions have the same name, by
+ // definition.
+ return GetNameForDecl(*decl.shadow_begin()->getTargetDecl(), context, name);
+}
+
template <typename Type>
struct TargetNodeTraits;
template <>
struct TargetNodeTraits<clang::NamedDecl> {
- static constexpr char kName[] = "decl";
- static clang::CharSourceRange GetRange(const clang::NamedDecl& decl) {
- return clang::CharSourceRange::getTokenRange(decl.getLocation());
+ static clang::SourceLocation GetLoc(const clang::NamedDecl& decl) {
+ return decl.getLocation();
}
+ static const char* GetName() { return "decl"; }
+ static const char* GetType() { return "NamedDecl"; }
};
-constexpr char TargetNodeTraits<clang::NamedDecl>::kName[];
template <>
struct TargetNodeTraits<clang::MemberExpr> {
- static constexpr char kName[] = "expr";
- static clang::CharSourceRange GetRange(const clang::MemberExpr& expr) {
- return clang::CharSourceRange::getTokenRange(expr.getMemberLoc());
+ static clang::SourceLocation GetLoc(const clang::MemberExpr& expr) {
+ return expr.getMemberLoc();
}
+ static const char* GetName() { return "expr"; }
+ static const char* GetType() { return "MemberExpr"; }
};
-constexpr char TargetNodeTraits<clang::MemberExpr>::kName[];
template <>
struct TargetNodeTraits<clang::DeclRefExpr> {
- static constexpr char kName[] = "expr";
- static clang::CharSourceRange GetRange(const clang::DeclRefExpr& expr) {
- return clang::CharSourceRange::getTokenRange(expr.getLocation());
+ static clang::SourceLocation GetLoc(const clang::DeclRefExpr& expr) {
+ return expr.getLocation();
}
+ static const char* GetName() { return "expr"; }
+ static const char* GetType() { return "DeclRefExpr"; }
};
-constexpr char TargetNodeTraits<clang::DeclRefExpr>::kName[];
template <>
struct TargetNodeTraits<clang::CXXCtorInitializer> {
- static constexpr char kName[] = "initializer";
- static clang::CharSourceRange GetRange(
- const clang::CXXCtorInitializer& init) {
- return clang::CharSourceRange::getTokenRange(init.getSourceLocation());
+ static clang::SourceLocation GetLoc(const clang::CXXCtorInitializer& init) {
+ assert(init.isWritten());
+ return init.getSourceLocation();
+ }
+ static const char* GetName() { return "initializer"; }
+ static const char* GetType() { return "CXXCtorInitializer"; }
+};
+
+template <>
+struct TargetNodeTraits<clang::UnresolvedLookupExpr> {
+ static clang::SourceLocation GetLoc(const clang::UnresolvedLookupExpr& expr) {
+ return expr.getNameLoc();
+ }
+ static const char* GetName() { return "expr"; }
+ static const char* GetType() { return "UnresolvedLookupExpr"; }
+};
+
+template <>
+struct TargetNodeTraits<clang::UnresolvedMemberExpr> {
+ static clang::SourceLocation GetLoc(const clang::UnresolvedMemberExpr& expr) {
+ return expr.getMemberLoc();
}
+ static const char* GetName() { return "expr"; }
+ static const char* GetType() { return "UnresolvedMemberExpr"; }
};
-constexpr char TargetNodeTraits<clang::CXXCtorInitializer>::kName[];
template <typename DeclNode, typename TargetNode>
class RewriterBase : public MatchFinder::MatchCallback {
@@ -220,24 +499,53 @@ class RewriterBase : public MatchFinder::MatchCallback {
: replacements_(replacements) {}
void run(const MatchFinder::MatchResult& result) override {
- std::string name;
- if (!GetNameForDecl(*result.Nodes.getNodeAs<DeclNode>("decl"), name))
+ const DeclNode* decl = result.Nodes.getNodeAs<DeclNode>("decl");
+ // If false, there's no name to be renamed.
+ if (!decl->getIdentifier())
return;
- replacements_->emplace(*result.SourceManager,
- TargetNodeTraits<TargetNode>::GetRange(
- *result.Nodes.getNodeAs<TargetNode>(
- TargetNodeTraits<TargetNode>::kName)),
- name);
+ clang::SourceLocation decl_loc =
+ TargetNodeTraits<clang::NamedDecl>::GetLoc(*decl);
+ if (decl_loc.isMacroID()) {
+ // Get the location of the spelling of the declaration. If token pasting
+ // was used this will be in "scratch space" and we don't know how to get
+ // from there back to/ the actual macro with the foo##bar text. So just
+ // don't replace in that case.
+ clang::SourceLocation spell =
+ result.SourceManager->getSpellingLoc(decl_loc);
+ if (strcmp(result.SourceManager->getBufferName(spell),
+ "<scratch space>") == 0)
+ return;
+ }
+ clang::ASTContext* context = result.Context;
+ std::string new_name;
+ if (!GetNameForDecl(*decl, *context, new_name))
+ return; // If false, the name was not suitable for renaming.
+ llvm::StringRef old_name = decl->getName();
+ if (old_name == new_name)
+ return;
+ clang::SourceLocation loc = TargetNodeTraits<TargetNode>::GetLoc(
+ *result.Nodes.getNodeAs<TargetNode>(
+ TargetNodeTraits<TargetNode>::GetName()));
+ clang::CharSourceRange range = clang::CharSourceRange::getTokenRange(loc);
+ replacements_->emplace(*result.SourceManager, range, new_name);
+ replacement_names_.emplace(old_name.str(), std::move(new_name));
+ }
+
+ const std::unordered_map<std::string, std::string>& replacement_names()
+ const {
+ return replacement_names_;
}
private:
Replacements* const replacements_;
+ std::unordered_map<std::string, std::string> replacement_names_;
};
using FieldDeclRewriter = RewriterBase<clang::FieldDecl, clang::NamedDecl>;
using VarDeclRewriter = RewriterBase<clang::VarDecl, clang::NamedDecl>;
using MemberRewriter = RewriterBase<clang::FieldDecl, clang::MemberExpr>;
using DeclRefRewriter = RewriterBase<clang::VarDecl, clang::DeclRefExpr>;
+using FieldDeclRefRewriter = RewriterBase<clang::FieldDecl, clang::DeclRefExpr>;
using FunctionDeclRewriter =
RewriterBase<clang::FunctionDecl, clang::NamedDecl>;
using FunctionRefRewriter =
@@ -245,86 +553,23 @@ using FunctionRefRewriter =
using ConstructorInitializerRewriter =
RewriterBase<clang::FieldDecl, clang::CXXCtorInitializer>;
-// Helpers for rewriting methods. The tool needs to detect overrides of Blink
-// methods, and uses two matchers to help accomplish this goal:
-// - The first matcher matches all method declarations in Blink. When the
-// callback rewrites the declaration, it also stores a pointer to the
-// canonical declaration, to record it as a Blink method.
-// - The second matcher matches all method declarations that are overrides. When
-// the callback processes the match, it checks if its overriding a method that
-// was marked as a Blink method. If so, it rewrites the declaration.
-// - Because an override is determined based on inclusion in the set of Blink
-// methods, the overridden methods matcher does not need to filter out special
-// member functions: they get filtered out by virtue of the first matcher.
-//
-// This works because per the documentation on MatchFinder:
-// The order of matches is guaranteed to be equivalent to doing a pre-order
-// traversal on the AST, and applying the matchers in the order in which they
-// were added to the MatchFinder.
-//
-// Since classes cannot forward declare their base classes, it is guaranteed
-// that the base class methods will be seen before processing the overridden
-// methods.
-class MethodDeclRewriter
- : public RewriterBase<clang::CXXMethodDecl, clang::NamedDecl> {
- public:
- explicit MethodDeclRewriter(Replacements* replacements)
- : RewriterBase(replacements) {}
-
- void run(const MatchFinder::MatchResult& result) override {
- const clang::CXXMethodDecl* method_decl =
- result.Nodes.getNodeAs<clang::CXXMethodDecl>("decl");
- // TODO(dcheng): Does this need to check for the override attribute, or is
- // this good enough?
- if (method_decl->size_overridden_methods() > 0) {
- if (!IsBlinkOverride(method_decl))
- return;
- } else {
- blink_methods_.emplace(method_decl->getCanonicalDecl());
- }
-
- RewriterBase::run(result);
- }
-
- bool IsBlinkOverride(const clang::CXXMethodDecl* decl) const {
- assert(decl->size_overridden_methods() > 0);
- for (auto it = decl->begin_overridden_methods();
- it != decl->end_overridden_methods(); ++it) {
- if (blink_methods_.find((*it)->getCanonicalDecl()) !=
- blink_methods_.end())
- return true;
- }
- return false;
- }
+using MethodDeclRewriter = RewriterBase<clang::CXXMethodDecl, clang::NamedDecl>;
+using MethodRefRewriter =
+ RewriterBase<clang::CXXMethodDecl, clang::DeclRefExpr>;
+using MethodMemberRewriter =
+ RewriterBase<clang::CXXMethodDecl, clang::MemberExpr>;
- private:
- std::unordered_set<const clang::CXXMethodDecl*> blink_methods_;
-};
-
-template <typename Base>
-class FilteringMethodRewriter : public Base {
- public:
- FilteringMethodRewriter(const MethodDeclRewriter& decl_rewriter,
- Replacements* replacements)
- : Base(replacements), decl_rewriter_(decl_rewriter) {}
-
- void run(const MatchFinder::MatchResult& result) override {
- const clang::CXXMethodDecl* method_decl =
- result.Nodes.getNodeAs<clang::CXXMethodDecl>("decl");
- if (method_decl->size_overridden_methods() > 0 &&
- !decl_rewriter_.IsBlinkOverride(method_decl))
- return;
- Base::run(result);
- }
+using EnumConstantDeclRewriter =
+ RewriterBase<clang::EnumConstantDecl, clang::NamedDecl>;
+using EnumConstantDeclRefRewriter =
+ RewriterBase<clang::EnumConstantDecl, clang::DeclRefExpr>;
- private:
- const MethodDeclRewriter& decl_rewriter_;
-};
+using UnresolvedLookupRewriter =
+ RewriterBase<clang::NamedDecl, clang::UnresolvedLookupExpr>;
+using UnresolvedMemberRewriter =
+ RewriterBase<clang::NamedDecl, clang::UnresolvedMemberExpr>;
-using MethodRefRewriter = FilteringMethodRewriter<
- RewriterBase<clang::CXXMethodDecl, clang::DeclRefExpr>>;
-using MethodMemberRewriter = FilteringMethodRewriter<
- RewriterBase<clang::CXXMethodDecl, clang::MemberExpr>>;
+using UsingDeclRewriter = RewriterBase<clang::UsingDecl, clang::NamedDecl>;
} // namespace
@@ -345,17 +590,22 @@ int main(int argc, const char* argv[]) {
Replacements replacements;
auto in_blink_namespace =
- decl(hasAncestor(namespaceDecl(anyOf(hasName("blink"), hasName("WTF")))));
+ decl(hasAncestor(namespaceDecl(anyOf(hasName("blink"), hasName("WTF")),
+ hasParent(translationUnitDecl()))),
+ unless(isExpansionInFileMatching(kGeneratedFileRegex)));
- // Field and variable declarations ========
+ // Field, variable, and enum declarations ========
// Given
// int x;
// struct S {
// int y;
+ // enum { VALUE };
// };
- // matches |x| and |y|.
+ // matches |x|, |y|, and |VALUE|.
auto field_decl_matcher = id("decl", fieldDecl(in_blink_namespace));
auto var_decl_matcher = id("decl", varDecl(in_blink_namespace));
+ auto enum_member_decl_matcher =
+ id("decl", enumConstantDecl(in_blink_namespace));
FieldDeclRewriter field_decl_rewriter(&replacements);
match_finder.addMatcher(field_decl_matcher, &field_decl_rewriter);
@@ -363,15 +613,29 @@ int main(int argc, const char* argv[]) {
VarDeclRewriter var_decl_rewriter(&replacements);
match_finder.addMatcher(var_decl_matcher, &var_decl_rewriter);
- // Field and variable references ========
+ EnumConstantDeclRewriter enum_member_decl_rewriter(&replacements);
+ match_finder.addMatcher(enum_member_decl_matcher, &enum_member_decl_rewriter);
+
+ // Field, variable, and enum references ========
// Given
// bool x = true;
// if (x) {
// ...
// }
// matches |x| in if (x).
- auto member_matcher = id("expr", memberExpr(member(field_decl_matcher)));
+ auto member_matcher = id(
+ "expr",
+ memberExpr(
+ member(field_decl_matcher),
+ // Needed to avoid matching member references in functions (which will
+ // be an ancestor of the member reference) synthesized by the
+ // compiler, such as a synthesized copy constructor.
+ // This skips explicitly defaulted functions as well, but that's OK:
+ // there's nothing interesting to rewrite in those either.
+ unless(hasAncestor(functionDecl(isDefaulted())))));
auto decl_ref_matcher = id("expr", declRefExpr(to(var_decl_matcher)));
+ auto enum_member_ref_matcher =
+ id("expr", declRefExpr(to(enum_member_decl_matcher)));
MemberRewriter member_rewriter(&replacements);
match_finder.addMatcher(member_matcher, &member_rewriter);
@@ -379,6 +643,22 @@ int main(int argc, const char* argv[]) {
DeclRefRewriter decl_ref_rewriter(&replacements);
match_finder.addMatcher(decl_ref_matcher, &decl_ref_rewriter);
+ EnumConstantDeclRefRewriter enum_member_ref_rewriter(&replacements);
+ match_finder.addMatcher(enum_member_ref_matcher, &enum_member_ref_rewriter);
+
+ // Member references in a non-member context ========
+ // Given
+ // struct S {
+ // typedef int U::*UnspecifiedBoolType;
+ // operator UnspecifiedBoolType() { return s_ ? &U::s_ : 0; }
+ // int s_;
+ // };
+ // matches |&U::s_| but not |s_|.
+ auto member_ref_matcher = id("expr", declRefExpr(to(field_decl_matcher)));
+
+ FieldDeclRefRewriter member_ref_rewriter(&replacements);
+ match_finder.addMatcher(member_ref_matcher, &member_ref_rewriter);
+
// Non-method function declarations ========
// Given
// void f();
@@ -386,8 +666,19 @@ int main(int argc, const char* argv[]) {
// void g();
// };
// matches |f| but not |g|.
- auto function_decl_matcher =
- id("decl", functionDecl(unless(cxxMethodDecl()), in_blink_namespace));
+ auto function_decl_matcher = id(
+ "decl",
+ functionDecl(
+ unless(anyOf(
+ // Methods are covered by the method matchers.
+ cxxMethodDecl(),
+ // Out-of-line overloaded operators have special names and should
+ // never be renamed.
+ isOverloadedOperator(),
+ // Must be checked after filtering out overloaded operators to
+ // prevent asserts about the identifier not being a simple name.
+ isBlacklistedFunction())),
+ in_blink_namespace));
FunctionDeclRewriter function_decl_rewriter(&replacements);
match_finder.addMatcher(function_decl_matcher, &function_decl_rewriter);
@@ -396,8 +687,10 @@ int main(int argc, const char* argv[]) {
// f();
// void (*p)() = &f;
// matches |f()| and |&f|.
- auto function_ref_matcher =
- id("expr", declRefExpr(to(function_decl_matcher)));
+ auto function_ref_matcher = id(
+ "expr", declRefExpr(to(function_decl_matcher),
+ // Ignore template substitutions.
+ unless(hasAncestor(substNonTypeTemplateParmExpr()))));
FunctionRefRewriter function_ref_rewriter(&replacements);
match_finder.addMatcher(function_ref_matcher, &function_ref_rewriter);
@@ -407,31 +700,29 @@ int main(int argc, const char* argv[]) {
// void g();
// };
// matches |g|.
- //
- // Note: the AST matchers don't provide a good way to match against an
- // override from a given base class. Instead, the rewriter uses two matchers:
- // one that matches all method declarations in the Blink namespace, and
- // another which matches all overridden methods not in the Blink namespace.
- // The second list is filtered against the first list to determine which
- // methods are inherited from Blink classes and need to be rewritten.
- auto blink_method_decl_matcher =
- id("decl", cxxMethodDecl(unless(anyOf(
- // Overloaded operators have special names
- // and should never be renamed.
- isOverloadedOperator(),
- // Similarly, constructors and destructors
- // should not be considered for renaming.
- cxxConstructorDecl(), cxxDestructorDecl())),
- in_blink_namespace));
- // Note that the matcher for overridden methods doesn't need to filter for
- // special member functions: see implementation of FunctionDeclRewriter for
- // the full explanation.
- auto non_blink_overridden_method_decl_matcher =
- id("decl", cxxMethodDecl(isOverride(), unless(in_blink_namespace)));
+ // For a method to be considered for rewrite, it must not override something
+ // that we're not rewriting. Any methods that we would not normally consider
+ // but that override something we are rewriting should also be rewritten. So
+ // we use includeAllOverriddenMethods() to check these rules not just for the
+ // method being matched but for the methods it overrides also.
+ auto is_blink_method = includeAllOverriddenMethods(
+ allOf(in_blink_namespace, unless(isBlacklistedMethod())));
+ auto method_decl_matcher = id(
+ "decl",
+ cxxMethodDecl(
+ unless(anyOf(
+ // Overloaded operators have special names and should never be
+ // renamed.
+ isOverloadedOperator(),
+ // Similarly, constructors, destructors, and conversion
+ // functions should not be considered for renaming.
+ cxxConstructorDecl(), cxxDestructorDecl(), cxxConversionDecl())),
+ // Check this last after excluding things, to avoid
+ // asserts about overriding non-blink and blink for the
+ // same method.
+ is_blink_method));
MethodDeclRewriter method_decl_rewriter(&replacements);
- match_finder.addMatcher(blink_method_decl_matcher, &method_decl_rewriter);
- match_finder.addMatcher(non_blink_overridden_method_decl_matcher,
- &method_decl_rewriter);
+ match_finder.addMatcher(method_decl_matcher, &method_decl_rewriter);
// Method references in a non-member context ========
// Given
@@ -439,15 +730,13 @@ int main(int argc, const char* argv[]) {
// s.g();
// void (S::*p)() = &S::g;
// matches |&S::g| but not |s.g()|.
- auto blink_method_ref_matcher =
- id("expr", declRefExpr(to(blink_method_decl_matcher)));
- auto non_blink_overridden_method_ref_matcher =
- id("expr", declRefExpr(to(non_blink_overridden_method_decl_matcher)));
+ auto method_ref_matcher = id(
+ "expr", declRefExpr(to(method_decl_matcher),
+ // Ignore template substitutions.
+ unless(hasAncestor(substNonTypeTemplateParmExpr()))));
- MethodRefRewriter method_ref_rewriter(method_decl_rewriter, &replacements);
- match_finder.addMatcher(blink_method_ref_matcher, &method_ref_rewriter);
- match_finder.addMatcher(non_blink_overridden_method_ref_matcher,
- &method_ref_rewriter);
+ MethodRefRewriter method_ref_rewriter(&replacements);
+ match_finder.addMatcher(method_ref_matcher, &method_ref_rewriter);
// Method references in a member context ========
// Given
@@ -455,16 +744,11 @@ int main(int argc, const char* argv[]) {
// s.g();
// void (S::*p)() = &S::g;
// matches |s.g()| but not |&S::g|.
- auto blink_method_member_matcher =
- id("expr", memberExpr(member(blink_method_decl_matcher)));
- auto non_blink_overridden_method_member_matcher =
- id("expr", memberExpr(member(non_blink_overridden_method_decl_matcher)));
+ auto method_member_matcher =
+ id("expr", memberExpr(member(method_decl_matcher)));
- MethodMemberRewriter method_member_rewriter(method_decl_rewriter,
- &replacements);
- match_finder.addMatcher(blink_method_member_matcher, &method_member_rewriter);
- match_finder.addMatcher(non_blink_overridden_method_member_matcher,
- &method_member_rewriter);
+ MethodMemberRewriter method_member_rewriter(&replacements);
+ match_finder.addMatcher(method_member_matcher, &method_member_rewriter);
// Initializers ========
// Given
@@ -474,20 +758,127 @@ int main(int argc, const char* argv[]) {
// };
// matches each initializer in the constructor for S.
auto constructor_initializer_matcher =
- cxxConstructorDecl(forEachConstructorInitializer(
- id("initializer", cxxCtorInitializer(forField(field_decl_matcher)))));
+ cxxConstructorDecl(forEachConstructorInitializer(id(
+ "initializer",
+ cxxCtorInitializer(forAnyField(field_decl_matcher), isWritten()))));
ConstructorInitializerRewriter constructor_initializer_rewriter(
&replacements);
match_finder.addMatcher(constructor_initializer_matcher,
&constructor_initializer_rewriter);
+ // Unresolved lookup expressions ========
+ // Given
+ // template<typename T> void F(T) { }
+ // template<void G(T)> H(T) { }
+ // H<F<int>>(...);
+ // matches |F| in |H<F<int>>|.
+ //
+ // UnresolvedLookupExprs are similar to DeclRefExprs that reference a
+ // FunctionDecl, but are used when a candidate FunctionDecl can't be selected.
+ // This commonly happens inside uninstantiated template definitions for one of
+ // two reasons:
+ //
+ // 1. If the candidate declaration is a dependent FunctionTemplateDecl, the
+ // actual overload can't be selected until template instantiation time.
+ // 2. Alternatively, there might be multiple declarations in the candidate set
+ // if the candidate function has overloads. If any of the function
+ // arguments has a dependent type, then the actual overload can't be
+ // selected until instantiation time either.
+ //
+ // Another instance where UnresolvedLookupExprs can appear is in a template
+ // argument list, like the provided example.
+ auto function_template_decl_matcher =
+ id("decl", functionTemplateDecl(templatedDecl(function_decl_matcher)));
+ auto method_template_decl_matcher =
+ id("decl", functionTemplateDecl(templatedDecl(method_decl_matcher)));
+ auto unresolved_lookup_matcher = expr(id(
+ "expr",
+ unresolvedLookupExpr(
+ // In order to automatically rename an unresolved lookup, the lookup
+ // candidates must either all be Blink functions/function templates or
+ // all be Blink methods/method templates. Otherwise, we might end up
+ // in a situation where the naming could change depending on the
+ // selected candidate.
+ anyOf(allOverloadsMatch(anyOf(function_decl_matcher,
+ function_template_decl_matcher)),
+ // Note: this matches references to methods in a non-member
+ // context, e.g. Template<&Class::Method>. This and the
+ // UnresolvedMemberExpr matcher below are analogous to how the
+ // rewriter has both a MemberRefRewriter matcher to rewrite
+ // &T::method and a MethodMemberRewriter matcher to rewriter
+ // t.method().
+ allOverloadsMatch(anyOf(method_decl_matcher,
+ method_template_decl_matcher))))));
+ UnresolvedLookupRewriter unresolved_lookup_rewriter(&replacements);
+ match_finder.addMatcher(unresolved_lookup_matcher,
+ &unresolved_lookup_rewriter);
+
+ // Unresolved member expressions ========
+ // Similar to unresolved lookup expressions, but for methods in a member
+ // context, e.g. var_with_templated_type.Method().
+ auto unresolved_member_matcher = expr(id(
+ "expr",
+ unresolvedMemberExpr(
+ // Similar to UnresolvedLookupExprs, all the candidate methods must be
+ // Blink methods/method templates.
+ allOverloadsMatch(
+ anyOf(method_decl_matcher, method_template_decl_matcher)))));
+ UnresolvedMemberRewriter unresolved_member_rewriter(&replacements);
+ match_finder.addMatcher(unresolved_member_matcher,
+ &unresolved_member_rewriter);
+
+ // Using declarations ========
+ // Given
+ // using blink::X;
+ // matches |using blink::X|.
+ auto using_decl_matcher = id(
+ "decl", usingDecl(hasAnyUsingShadowDecl(hasTargetDecl(anyOf(
+ var_decl_matcher, field_decl_matcher, function_decl_matcher,
+ method_decl_matcher, function_template_decl_matcher,
+ method_template_decl_matcher, enum_member_decl_matcher)))));
+ UsingDeclRewriter using_decl_rewriter(&replacements);
+ match_finder.addMatcher(using_decl_matcher, &using_decl_rewriter);
+
std::unique_ptr<clang::tooling::FrontendActionFactory> factory =
clang::tooling::newFrontendActionFactory(&match_finder);
int result = tool.run(factory.get());
if (result != 0)
return result;
+#if defined(_WIN32)
+ HANDLE lockfd = CreateFile("rewrite-sym.lock", GENERIC_READ, FILE_SHARE_READ,
+ NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
+ OVERLAPPED overlapped = {};
+ LockFileEx(lockfd, LOCKFILE_EXCLUSIVE_LOCK, 0, 1, 0, &overlapped);
+#else
+ int lockfd = open("rewrite-sym.lock", O_RDWR | O_CREAT, 0666);
+ while (flock(lockfd, LOCK_EX)) { // :D
+ }
+#endif
+
+ std::ofstream replacement_db_file("rewrite-sym.txt",
+ std::ios_base::out | std::ios_base::app);
+ for (const auto& p : field_decl_rewriter.replacement_names())
+ replacement_db_file << "var:" << p.first << ":" << p.second << "\n";
+ for (const auto& p : var_decl_rewriter.replacement_names())
+ replacement_db_file << "var:" << p.first << ":" << p.second << "\n";
+ for (const auto& p : enum_member_decl_rewriter.replacement_names())
+ replacement_db_file << "enu:" << p.first << ":" << p.second << "\n";
+ for (const auto& p : function_decl_rewriter.replacement_names())
+ replacement_db_file << "fun:" << p.first << ":" << p.second << "\n";
+ for (const auto& p : method_decl_rewriter.replacement_names())
+ replacement_db_file << "fun:" << p.first << ":" << p.second << "\n";
+ replacement_db_file.close();
+
+#if defined(_WIN32)
+ UnlockFileEx(lockfd, 0, 1, 0, &overlapped);
+ CloseHandle(lockfd);
+#else
+ flock(lockfd, LOCK_UN);
+ close(lockfd);
+#endif
+
// Serialization format is documented in tools/clang/scripts/run_tool.py
llvm::outs() << "==== BEGIN EDITS ====\n";
for (const auto& r : replacements) {
diff --git a/chromium/tools/clang/scripts/build_file.py b/chromium/tools/clang/scripts/build_file.py
new file mode 100755
index 00000000000..85f872312dc
--- /dev/null
+++ b/chromium/tools/clang/scripts/build_file.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import json
+import os
+import re
+import shlex
+import sys
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+tool_dir = os.path.abspath(os.path.join(script_dir, '../pylib'))
+sys.path.insert(0, tool_dir)
+
+from clang import compile_db
+
+_PROBABLY_CLANG_RE = re.compile(r'clang(?:\+\+)?$')
+
+
+def ParseArgs():
+ parser = argparse.ArgumentParser(
+ description='Utility to build one Chromium file for debugging clang')
+ parser.add_argument('-p', default='.', help='path to the compile database')
+ parser.add_argument('--generate-compdb',
+ help='regenerate the compile database')
+ parser.add_argument('--prefix',
+ help='optional prefix to prepend, e.g. --prefix=lldb')
+ parser.add_argument(
+ '--compiler',
+ help='compiler to override the compiler specied in the compile db')
+ parser.add_argument('--suffix',
+ help='optional suffix to append, e.g.' +
+ ' --suffix="-Xclang -ast-dump -fsyntax-only"')
+ parser.add_argument('target_file', help='file to build')
+ return parser.parse_args()
+
+
+def BuildIt(record, prefix, compiler, suffix):
+ """Builds the file in the provided compile DB record.
+
+ Args:
+ prefix: Optional prefix to prepend to the build command.
+ compiler: Optional compiler to override the compiler specified the record.
+ suffix: Optional suffix to append to the build command.
+ """
+ raw_args = shlex.split(record['command'])
+ # The compile command might have some goop in front of it, e.g. if the build
+ # is using goma, so shift arguments off the front until raw_args[0] looks like
+ # a clang invocation.
+ while raw_args:
+ if _PROBABLY_CLANG_RE.search(raw_args[0]):
+ break
+ raw_args = raw_args[1:]
+ if not raw_args:
+ print 'error: command %s does not appear to invoke clang!' % record[
+ 'command']
+ return 2
+ args = []
+ if prefix:
+ args.extend(shlex.split(prefix))
+ if compiler:
+ raw_args[0] = compiler
+ args.extend(raw_args)
+ if suffix:
+ args.extend(shlex.split(suffix))
+ print 'Running %s' % ' '.join(args)
+ os.execv(args[0], args)
+
+
+def main():
+ args = ParseArgs()
+ os.chdir(args.p)
+ if args.generate_compdb:
+ compile_db.GenerateWithNinja('.')
+ db = compile_db.Read('.')
+ for record in db:
+ if os.path.normpath(os.path.join(args.p, record[
+ 'file'])) == args.target_file:
+ return BuildIt(record, args.prefix, args.compiler, args.suffix)
+ print 'error: could not find %s in compile DB!' % args.target_file
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/clang/scripts/package.py b/chromium/tools/clang/scripts/package.py
index 554bda460bc..cc4415bffa5 100755
--- a/chromium/tools/clang/scripts/package.py
+++ b/chromium/tools/clang/scripts/package.py
@@ -17,6 +17,7 @@ import tarfile
# Path constants.
THIS_DIR = os.path.dirname(__file__)
+CHROMIUM_DIR = os.path.abspath(os.path.join(THIS_DIR, '..', '..', '..'))
THIRD_PARTY_DIR = os.path.join(THIS_DIR, '..', '..', '..', 'third_party')
LLVM_DIR = os.path.join(THIRD_PARTY_DIR, 'llvm')
LLVM_BOOTSTRAP_DIR = os.path.join(THIRD_PARTY_DIR, 'llvm-bootstrap')
@@ -24,6 +25,7 @@ LLVM_BOOTSTRAP_INSTALL_DIR = os.path.join(THIRD_PARTY_DIR,
'llvm-bootstrap-install')
LLVM_BUILD_DIR = os.path.join(THIRD_PARTY_DIR, 'llvm-build')
LLVM_RELEASE_DIR = os.path.join(LLVM_BUILD_DIR, 'Release+Asserts')
+LLVM_LTO_GOLD_PLUGIN_DIR = os.path.join(THIRD_PARTY_DIR, 'llvm-lto-gold-plugin')
STAMP_FILE = os.path.join(LLVM_BUILD_DIR, 'cr_build_revision')
@@ -57,17 +59,93 @@ def PrintTarProgress(tarinfo):
return tarinfo
-def main():
- if sys.platform == 'win32':
- try:
- subprocess.check_output(['grep', '--help'], shell=True)
- except subprocess.CalledProcessError:
- print 'Add gnuwin32 to your PATH, then try again.'
- return 1
+def GetExpectedStamp():
+ rev_cmd = [sys.executable, os.path.join(THIS_DIR, 'update.py'),
+ '--print-revision']
+ return subprocess.check_output(rev_cmd).rstrip()
+
+
+def GetGsutilPath():
+ if not 'find_depot_tools' in sys.modules:
+ sys.path.insert(0, os.path.join(CHROMIUM_DIR, 'build'))
+ global find_depot_tools
+ import find_depot_tools
+ depot_path = find_depot_tools.add_depot_tools_to_path()
+ if depot_path is None:
+ print ('depot_tools are not found in PATH. '
+ 'Follow the instructions in this document '
+ 'http://dev.chromium.org/developers/how-tos/install-depot-tools'
+ ' to install depot_tools and then try again.')
+ sys.exit(1)
+ gsutil_path = os.path.join(depot_path, 'gsutil.py')
+ return gsutil_path
+
+
+def RunGsutil(args):
+ return subprocess.call([sys.executable, GetGsutilPath()] + args)
+
+
+def GsutilArchiveExists(archive_name, platform):
+ gsutil_args = ['-q', 'stat',
+ 'gs://chromium-browser-clang/%s/%s.tgz' %
+ (platform, archive_name)]
+ return RunGsutil(gsutil_args) == 0
+
+def MaybeUpload(args, archive_name, platform):
+ # We don't want to rewrite the file, if it already exists on the server,
+ # so -n option to gsutil is used. It will warn, if the upload was aborted.
+ gsutil_args = ['cp', '-n', '-a', 'public-read',
+ '%s.tgz' % archive_name,
+ 'gs://chromium-browser-clang/%s/%s.tgz' %
+ (platform, archive_name)]
+ if args.upload:
+ print 'Uploading %s to Google Cloud Storage...' % archive_name
+ exit_code = RunGsutil(gsutil_args)
+ if exit_code != 0:
+ print "gsutil failed, exit_code: %s" % exit_code
+ os.exit(exit_code)
+ else:
+ print 'To upload, run:'
+ print ('gsutil %s' % ' '.join(gsutil_args))
+
+
+def main():
parser = argparse.ArgumentParser(description='build and package clang')
+ parser.add_argument('--upload', action='store_true',
+ help='Upload the target archive to Google Cloud Storage.')
args = parser.parse_args()
+ # Check that the script is not going to upload a toolchain built from HEAD.
+ use_head_revision = 'LLVM_FORCE_HEAD_REVISION' in os.environ
+ if args.upload and use_head_revision:
+ print ("--upload and LLVM_FORCE_HEAD_REVISION could not be used "
+ "at the same time.")
+ return 1
+
+ expected_stamp = GetExpectedStamp()
+ pdir = 'clang-' + expected_stamp
+ golddir = 'llvmgold-' + expected_stamp
+ print pdir
+
+ if sys.platform == 'darwin':
+ platform = 'Mac'
+ elif sys.platform == 'win32':
+ platform = 'Win'
+ else:
+ platform = 'Linux_x64'
+
+ # Check if Google Cloud Storage already has the artifacts we want to build.
+ if (args.upload and GsutilArchiveExists(pdir, platform) and
+ not sys.platform.startswith('linux') or
+ GsutilArchiveExists(golddir, platform)):
+ print ('Desired toolchain revision %s is already available '
+ 'in Google Cloud Storage:') % expected_stamp
+ print 'gs://chromium-browser-clang/%s/%s.tgz' % (platform, pdir)
+ if sys.platform.startswith('linux'):
+ print 'gs://chromium-browser-clang/%s/%s.tgz' % (platform, golddir)
+ return 0
+
with open('buildlog.txt', 'w') as log:
Tee('Diff in llvm:\n', log)
TeeCmd(['svn', 'stat', LLVM_DIR], log, fail_hard=False)
@@ -90,11 +168,6 @@ def main():
log, fail_hard=False)
TeeCmd(['svn', 'diff', os.path.join(LLVM_DIR, 'projects', 'libcxx')],
log, fail_hard=False)
- Tee('Diff in llvm/projects/libcxxabi:\n', log)
- TeeCmd(['svn', 'stat', os.path.join(LLVM_DIR, 'projects', 'libcxxabi')],
- log, fail_hard=False)
- TeeCmd(['svn', 'diff', os.path.join(LLVM_DIR, 'projects', 'libcxxabi')],
- log, fail_hard=False)
Tee('Starting build\n', log)
@@ -103,13 +176,19 @@ def main():
shutil.rmtree(LLVM_BOOTSTRAP_INSTALL_DIR, ignore_errors=True)
shutil.rmtree(LLVM_BUILD_DIR, ignore_errors=True)
+ opt_flags = []
+ if sys.platform.startswith('linux'):
+ opt_flags += ['--lto-gold-plugin']
build_cmd = [sys.executable, os.path.join(THIS_DIR, 'update.py'),
- '--bootstrap', '--force-local-build', '--run-tests']
+ '--bootstrap', '--force-local-build',
+ '--run-tests'] + opt_flags
TeeCmd(build_cmd, log)
stamp = open(STAMP_FILE).read().rstrip()
- pdir = 'clang-' + stamp
- print pdir
+ if stamp != expected_stamp:
+ print 'Actual stamp (%s) != expected stamp (%s).' % (stamp, expected_stamp)
+ return 1
+
shutil.rmtree(pdir, ignore_errors=True)
# Copy a whitelist of files to the directory we're going to tar up.
@@ -131,15 +210,14 @@ def main():
'lib/libBlinkGCPlugin.' + so_ext,
])
if sys.platform == 'darwin':
- want.extend(['bin/libc++.1.dylib',
- # Copy only the OSX (ASan and profile) and iossim (ASan)
+ want.extend([# Copy only the OSX (ASan and profile) and iossim (ASan)
# runtime libraries:
'lib/clang/*/lib/darwin/*asan_osx*',
'lib/clang/*/lib/darwin/*asan_iossim*',
'lib/clang/*/lib/darwin/*profile_osx*',
])
elif sys.platform.startswith('linux'):
- # Copy the stdlibc++.so.6 we linked Clang against so it can run.
+ # Copy the libstdc++.so.6 we linked Clang against so it can run.
want.append('lib/libstdc++.so.6')
# Copy only
# lib/clang/*/lib/linux/libclang_rt.{[atm]san,san,ubsan,profile}-*.a ,
@@ -180,9 +258,9 @@ def main():
if sys.platform != 'win32':
os.symlink('clang', os.path.join(pdir, 'bin', 'clang++'))
os.symlink('clang', os.path.join(pdir, 'bin', 'clang-cl'))
+
+ # Copy libc++ headers.
if sys.platform == 'darwin':
- os.symlink('libc++.1.dylib', os.path.join(pdir, 'bin', 'libc++.dylib'))
- # Also copy libc++ headers.
shutil.copytree(os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'include', 'c++'),
os.path.join(pdir, 'include', 'c++'))
@@ -197,30 +275,18 @@ def main():
for entry in tar_entries:
tar.add(os.path.join(pdir, entry), arcname=entry, filter=PrintTarProgress)
- if sys.platform == 'darwin':
- platform = 'Mac'
- elif sys.platform == 'win32':
- platform = 'Win'
- else:
- platform = 'Linux_x64'
-
- print 'To upload, run:'
- print ('gsutil cp -a public-read %s.tgz '
- 'gs://chromium-browser-clang/%s/%s.tgz') % (pdir, platform, pdir)
+ MaybeUpload(args, pdir, platform)
# Zip up gold plugin on Linux.
if sys.platform.startswith('linux'):
- golddir = 'llvmgold-' + stamp
shutil.rmtree(golddir, ignore_errors=True)
os.makedirs(os.path.join(golddir, 'lib'))
- shutil.copy(os.path.join(LLVM_RELEASE_DIR, 'lib', 'LLVMgold.so'),
+ shutil.copy(os.path.join(LLVM_LTO_GOLD_PLUGIN_DIR, 'lib', 'LLVMgold.so'),
os.path.join(golddir, 'lib'))
with tarfile.open(golddir + '.tgz', 'w:gz') as tar:
tar.add(os.path.join(golddir, 'lib'), arcname='lib',
filter=PrintTarProgress)
- print ('gsutil cp -a public-read %s.tgz '
- 'gs://chromium-browser-clang/%s/%s.tgz') % (golddir, platform,
- golddir)
+ MaybeUpload(args, golddir, platform)
# FIXME: Warn if the file already exists on the server.
diff --git a/chromium/tools/clang/scripts/run_tool.py b/chromium/tools/clang/scripts/run_tool.py
index a1d29d09fb8..68f12e98039 100755
--- a/chromium/tools/clang/scripts/run_tool.py
+++ b/chromium/tools/clang/scripts/run_tool.py
@@ -14,7 +14,7 @@ run_tool.py <tool> <path/to/compiledb> --all
If you only want to run the tool across just chrome/browser and content/browser:
run_tool.py <tool> <path/to/compiledb> chrome/browser content/browser
-Please see https://code.google.com/p/chromium/wiki/ClangToolRefactoring for more
+Please see https://chromium.googlesource.com/chromium/src/+/master/docs/clang_tool_refactoring.md for more
information, which documents the entire automated refactoring flow in Chromium.
Why use this tool:
@@ -40,31 +40,20 @@ across Chromium, regardless of whether some instances failed or not.
import argparse
import collections
import functools
-import json
import multiprocessing
+import os
import os.path
import subprocess
import sys
-Edit = collections.namedtuple('Edit',
- ('edit_type', 'offset', 'length', 'replacement'))
-
+script_dir = os.path.dirname(os.path.realpath(__file__))
+tool_dir = os.path.abspath(os.path.join(script_dir, '../pylib'))
+sys.path.insert(0, tool_dir)
-def _GenerateCompileDatabase(path):
- """Generates a compile database.
+from clang import compile_db
- Note: requires ninja.
-
- Args:
- path: The build directory to generate a compile database for.
- """
- # TODO(dcheng): Incorporate Windows-specific compile DB munging from
- # https://codereview.chromium.org/718873004
- print 'Generating compile database in %s...' % path
- args = ['ninja', '-C', path, '-t', 'compdb', 'cc', 'cxx', 'objc', 'objcxx']
- output = subprocess.check_output(args)
- with file(os.path.join(path, 'compile_commands.json'), 'w') as f:
- f.write(output)
+Edit = collections.namedtuple('Edit',
+ ('edit_type', 'offset', 'length', 'replacement'))
def _GetFilesFromGit(paths=None):
@@ -92,12 +81,8 @@ def _GetFilesFromCompileDB(build_directory):
Args:
build_directory: Directory that contains the compile database.
"""
- compiledb_path = os.path.join(build_directory, 'compile_commands.json')
- with open(compiledb_path, 'rb') as compiledb_file:
- json_commands = json.load(compiledb_file)
-
return [os.path.join(entry['directory'], entry['file'])
- for entry in json_commands]
+ for entry in compile_db.Read(build_directory)]
def _ExtractEditsFromStdout(build_directory, stdout):
@@ -316,8 +301,15 @@ def main():
help='optional paths to filter what files the tool is run on')
args = parser.parse_args()
+ os.environ['PATH'] = '%s%s%s' % (
+ os.path.abspath(os.path.join(
+ os.path.dirname(__file__),
+ '../../../third_party/llvm-build/Release+Asserts/bin')),
+ os.pathsep,
+ os.environ['PATH'])
+
if args.generate_compdb:
- _GenerateCompileDatabase(args.compile_database)
+ compile_db.GenerateWithNinja(args.compile_database)
if args.all:
filenames = set(_GetFilesFromCompileDB(args.compile_database))
diff --git a/chromium/tools/clang/scripts/test_tool.py b/chromium/tools/clang/scripts/test_tool.py
index 986216c33de..728db477c15 100755
--- a/chromium/tools/clang/scripts/test_tool.py
+++ b/chromium/tools/clang/scripts/test_tool.py
@@ -10,15 +10,27 @@ import glob
import json
import os
import os.path
-import subprocess
import shutil
+import subprocess
import sys
+def _RunGit(args):
+ if sys.platform == 'win32':
+ args = ['git.bat'] + args
+ else:
+ args = ['git'] + args
+ subprocess.check_call(args)
+
+
def _GenerateCompileCommands(files, include_paths):
"""Returns a JSON string containing a compilation database for the input."""
- include_path_flags = ' '.join('-I %s' % include_path
- for include_path in include_paths)
+ # Note: in theory, backslashes in the compile DB should work but the tools
+ # that write compile DBs and the tools that read them don't agree on the
+ # escaping convention: https://llvm.org/bugs/show_bug.cgi?id=19687
+ files = [f.replace('\\', '/') for f in files]
+ include_path_flags = ' '.join('-I %s' % include_path.replace('\\', '/')
+ for include_path in include_paths)
return json.dumps([{'directory': '.',
'command': 'clang++ -std=c++11 -fsyntax-only %s -c %s' % (
include_path_flags, f),
@@ -27,7 +39,7 @@ def _GenerateCompileCommands(files, include_paths):
def _NumberOfTestsToString(tests):
"""Returns an English describing the number of tests."""
- return "%d test%s" % (tests, 's' if tests != 1 else '')
+ return '%d test%s' % (tests, 's' if tests != 1 else '')
def main(argv):
@@ -65,9 +77,9 @@ def main(argv):
shutil.copyfile(source, actual)
# Stage the test files in the git index. If they aren't staged, then
# run_tools.py will skip them when applying replacements.
- args = ['git', 'add']
+ args = ['add']
args.extend(actual_files)
- subprocess.check_call(args)
+ _RunGit(args)
# Generate a temporary compilation database to run the tool over.
with open(compile_database, 'w') as f:
f.write(_GenerateCompileCommands(actual_files, include_paths))
@@ -83,9 +95,9 @@ def main(argv):
print 'run_tool failed:\n%s' % stdout
sys.exit(1)
- args = ['git', 'cl', 'format']
+ args = ['cl', 'format']
args.extend(actual_files)
- subprocess.check_call(args)
+ _RunGit(args)
passed = 0
failed = 0
@@ -121,9 +133,9 @@ def main(argv):
finally:
# No matter what, unstage the git changes we made earlier to avoid polluting
# the index.
- args = ['git', 'reset', '--quiet', 'HEAD']
+ args = ['reset', '--quiet', 'HEAD']
args.extend(actual_files)
- subprocess.call(args)
+ _RunGit(args)
if __name__ == '__main__':
diff --git a/chromium/tools/clang/scripts/update.py b/chromium/tools/clang/scripts/update.py
index ef6ca07af8c..0abb42af058 100755
--- a/chromium/tools/clang/scripts/update.py
+++ b/chromium/tools/clang/scripts/update.py
@@ -24,9 +24,9 @@ import urllib2
import zipfile
# Do NOT CHANGE this if you don't know what you're doing -- see
-# https://code.google.com/p/chromium/wiki/UpdatingClang
+# https://chromium.googlesource.com/chromium/src/+/master/docs/updating_clang.md
# Reverting problematic clang rolls is safe, though.
-CLANG_REVISION = '255169'
+CLANG_REVISION = '264915'
use_head_revision = 'LLVM_FORCE_HEAD_REVISION' in os.environ
if use_head_revision:
@@ -45,6 +45,7 @@ LLVM_DIR = os.path.join(THIRD_PARTY_DIR, 'llvm')
LLVM_BOOTSTRAP_DIR = os.path.join(THIRD_PARTY_DIR, 'llvm-bootstrap')
LLVM_BOOTSTRAP_INSTALL_DIR = os.path.join(THIRD_PARTY_DIR,
'llvm-bootstrap-install')
+LLVM_LTO_GOLD_PLUGIN_DIR = os.path.join(THIRD_PARTY_DIR, 'llvm-lto-gold-plugin')
CHROME_TOOLS_SHIM_DIR = os.path.join(LLVM_DIR, 'tools', 'chrometools')
LLVM_BUILD_DIR = os.path.join(CHROMIUM_DIR, 'third_party', 'llvm-build',
'Release+Asserts')
@@ -65,12 +66,17 @@ LLVM_BUILD_TOOLS_DIR = os.path.abspath(
STAMP_FILE = os.path.normpath(
os.path.join(LLVM_DIR, '..', 'llvm-build', 'cr_build_revision'))
BINUTILS_DIR = os.path.join(THIRD_PARTY_DIR, 'binutils')
-VERSION = '3.8.0'
+BINUTILS_BIN_DIR = os.path.join(BINUTILS_DIR, BINUTILS_DIR,
+ 'Linux_x64', 'Release', 'bin')
+BFD_PLUGINS_DIR = os.path.join(BINUTILS_DIR, 'Linux_x64', 'Release',
+ 'lib', 'bfd-plugins')
+VERSION = '3.9.0'
ANDROID_NDK_DIR = os.path.join(
CHROMIUM_DIR, 'third_party', 'android_tools', 'ndk')
# URL for pre-built binaries.
-CDS_URL = 'https://commondatastorage.googleapis.com/chromium-browser-clang'
+CDS_URL = os.environ.get('CDS_CLANG_BUCKET_OVERRIDE',
+ 'https://commondatastorage.googleapis.com/chromium-browser-clang')
LLVM_REPO_URL='https://llvm.org/svn/llvm-project'
if 'LLVM_REPO_URL' in os.environ:
@@ -135,19 +141,19 @@ def DownloadAndUnpack(url, output_dir):
tarfile.open(mode='r:gz', fileobj=f).extractall(path=output_dir)
-def ReadStampFile():
+def ReadStampFile(path=STAMP_FILE):
"""Return the contents of the stamp file, or '' if it doesn't exist."""
try:
- with open(STAMP_FILE, 'r') as f:
+ with open(path, 'r') as f:
return f.read().rstrip()
except IOError:
return ''
-def WriteStampFile(s):
+def WriteStampFile(s, path=STAMP_FILE):
"""Write s to the stamp file."""
- EnsureDirExists(os.path.dirname(STAMP_FILE))
- with open(STAMP_FILE, 'w') as f:
+ EnsureDirExists(os.path.dirname(path))
+ with open(path, 'w') as f:
f.write(s)
f.write('\n')
@@ -172,10 +178,12 @@ def RmTree(dir):
def RmCmakeCache(dir):
- """Delete CMakeCache.txt files under dir recursively."""
- for dirpath, _, files in os.walk(dir):
+ """Delete CMake cache related files from dir."""
+ for dirpath, dirs, files in os.walk(dir):
if 'CMakeCache.txt' in files:
os.remove(os.path.join(dirpath, 'CMakeCache.txt'))
+ if 'CMakeFiles' in dirs:
+ RmTree(os.path.join(dirpath, 'CMakeFiles'))
def RunCommand(command, msvc_arch=None, env=None, fail_hard=True):
@@ -275,23 +283,18 @@ def CreateChromeToolsShim():
f.write('endif (CHROMIUM_TOOLS_SRC)\n')
-def MaybeDownloadHostGcc(args):
- """Downloads gcc 4.8.2 if needed and makes sure args.gcc_toolchain is set."""
+def DownloadHostGcc(args):
+ """Downloads gcc 4.8.2 and makes sure args.gcc_toolchain is set."""
if not sys.platform.startswith('linux') or args.gcc_toolchain:
return
-
- if subprocess.check_output(['gcc', '-dumpversion']).rstrip() < '4.7.0':
- # We need a newer gcc version.
- gcc_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'gcc482precise')
- if not os.path.exists(gcc_dir):
- print 'Downloading pre-built GCC 4.8.2...'
- DownloadAndUnpack(
- CDS_URL + '/tools/gcc482precise.tgz', LLVM_BUILD_TOOLS_DIR)
- args.gcc_toolchain = gcc_dir
- else:
- # Always set gcc_toolchain; llvm-symbolizer needs the bundled libstdc++.
- args.gcc_toolchain = \
- os.path.dirname(os.path.dirname(distutils.spawn.find_executable('gcc')))
+ # Unconditionally download a prebuilt gcc to guarantee the included libstdc++
+ # works on Ubuntu Precise.
+ gcc_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'gcc482precise')
+ if not os.path.exists(gcc_dir):
+ print 'Downloading pre-built GCC 4.8.2...'
+ DownloadAndUnpack(
+ CDS_URL + '/tools/gcc482precise.tgz', LLVM_BUILD_TOOLS_DIR)
+ args.gcc_toolchain = gcc_dir
def AddCMakeToPath():
@@ -308,6 +311,25 @@ def AddCMakeToPath():
DownloadAndUnpack(CDS_URL + '/tools/' + zip_name, LLVM_BUILD_TOOLS_DIR)
os.environ['PATH'] = cmake_dir + os.pathsep + os.environ.get('PATH', '')
+
+def AddGnuWinToPath():
+ """Download some GNU win tools and add them to PATH."""
+ if sys.platform != 'win32':
+ return
+
+ gnuwin_dir = os.path.join(LLVM_BUILD_TOOLS_DIR, 'gnuwin')
+ GNUWIN_VERSION = '1'
+ GNUWIN_STAMP = os.path.join(gnuwin_dir, 'stamp')
+ if ReadStampFile(GNUWIN_STAMP) == GNUWIN_VERSION:
+ print 'GNU Win tools already up to date.'
+ else:
+ zip_name = 'gnuwin-%s.zip' % GNUWIN_VERSION
+ DownloadAndUnpack(CDS_URL + '/tools/' + zip_name, LLVM_BUILD_TOOLS_DIR)
+ WriteStampFile(GNUWIN_VERSION, GNUWIN_STAMP)
+
+ os.environ['PATH'] = gnuwin_dir + os.pathsep + os.environ.get('PATH', '')
+
+
vs_version = None
def GetVSVersion():
global vs_version
@@ -324,7 +346,8 @@ def GetVSVersion():
# or a system-wide installation otherwise.
sys.path.append(os.path.join(CHROMIUM_DIR, 'tools', 'gyp', 'pylib'))
import gyp.MSVSVersion
- vs_version = gyp.MSVSVersion.SelectVisualStudioVersion('2013')
+ vs_version = gyp.MSVSVersion.SelectVisualStudioVersion(
+ vs_toolchain.GetVisualStudioVersion())
return vs_version
@@ -347,7 +370,7 @@ def UpdateClang(args):
if not args.force_local_build:
cds_file = "clang-%s.tgz" % PACKAGE_VERSION
- if sys.platform == 'win32':
+ if sys.platform == 'win32' or sys.platform == 'cygwin':
cds_full_url = CDS_URL + '/Win/' + cds_file
elif sys.platform == 'darwin':
cds_full_url = CDS_URL + '/Mac/' + cds_file
@@ -378,12 +401,13 @@ def UpdateClang(args):
print 'Android NDK not found at ' + ANDROID_NDK_DIR
print 'The Android NDK is needed to build a Clang whose -fsanitize=address'
print 'works on Android. See '
- print 'http://code.google.com/p/chromium/wiki/AndroidBuildInstructions'
+ print 'https://www.chromium.org/developers/how-tos/android-build-instructions'
print 'for how to install the NDK, or pass --without-android.'
return 1
- MaybeDownloadHostGcc(args)
+ DownloadHostGcc(args)
AddCMakeToPath()
+ AddGnuWinToPath()
DeleteChromeToolsShim()
@@ -396,9 +420,9 @@ def UpdateClang(args):
# clang needs a libc++ checkout, else -stdlib=libc++ won't find includes
# (i.e. this is needed for bootstrap builds).
Checkout('libcxx', LLVM_REPO_URL + '/libcxx/trunk', LIBCXX_DIR)
- # While we're bundling our own libc++ on OS X, we need to compile libc++abi
- # into it too (since OS X 10.6 doesn't have libc++abi.dylib either).
- Checkout('libcxxabi', LLVM_REPO_URL + '/libcxxabi/trunk', LIBCXXABI_DIR)
+ # We used to check out libcxxabi on OS X; we no longer need that.
+ if os.path.exists(LIBCXXABI_DIR):
+ RmTree(LIBCXXABI_DIR)
cc, cxx = None, None
libstdcpp = None
@@ -418,38 +442,29 @@ def UpdateClang(args):
[cxx, '-print-file-name=libstdc++.so.6']).rstrip()
os.environ['LD_LIBRARY_PATH'] = os.path.dirname(libstdcpp)
- cflags = cxxflags = ldflags = []
-
- # LLVM uses C++11 starting in llvm 3.5. On Linux, this means libstdc++4.7+ is
- # needed, on OS X it requires libc++. clang only automatically links to libc++
- # when targeting OS X 10.9+, so add stdlib=libc++ explicitly so clang can run
- # on OS X versions as old as 10.7.
- # TODO(thakis): Some bots are still on 10.6 (nacl...), so for now bundle
- # libc++.dylib. Remove this once all bots are on 10.7+, then use
- # -DLLVM_ENABLE_LIBCXX=ON and change deployment_target to 10.7.
- deployment_target = ''
-
- if sys.platform == 'darwin':
- # When building on 10.9, /usr/include usually doesn't exist, and while
- # Xcode's clang automatically sets a sysroot, self-built clangs don't.
- cflags = ['-isysroot', subprocess.check_output(
- ['xcrun', '--show-sdk-path']).rstrip()]
- cxxflags = ['-stdlib=libc++', '-nostdinc++',
- '-I' + os.path.join(LIBCXX_DIR, 'include')] + cflags
- if args.bootstrap:
- deployment_target = '10.6'
+ cflags = []
+ cxxflags = []
+ ldflags = []
base_cmake_args = ['-GNinja',
'-DCMAKE_BUILD_TYPE=Release',
'-DLLVM_ENABLE_ASSERTIONS=ON',
'-DLLVM_ENABLE_THREADS=OFF',
+ '-DLLVM_ENABLE_TIMESTAMPS=OFF',
+ # Statically link MSVCRT to avoid DLL dependencies.
+ '-DLLVM_USE_CRT_RELEASE=MT',
]
+ binutils_incdir = ''
+ if sys.platform.startswith('linux'):
+ binutils_incdir = os.path.join(BINUTILS_DIR, 'Linux_x64/Release/include')
+
if args.bootstrap:
print 'Building bootstrap compiler'
EnsureDirExists(LLVM_BOOTSTRAP_DIR)
os.chdir(LLVM_BOOTSTRAP_DIR)
bootstrap_args = base_cmake_args + [
+ '-DLLVM_BINUTILS_INCDIR=' + binutils_incdir,
'-DLLVM_TARGETS_TO_BUILD=host',
'-DCMAKE_INSTALL_PREFIX=' + LLVM_BOOTSTRAP_INSTALL_DIR,
'-DCMAKE_C_FLAGS=' + ' '.join(cflags),
@@ -486,56 +501,67 @@ def UpdateClang(args):
cxxflags = ['--gcc-toolchain=' + args.gcc_toolchain]
print 'Building final compiler'
- if sys.platform == 'darwin':
- # Build libc++.dylib while some bots are still on OS X 10.6.
- libcxxbuild = os.path.join(LLVM_BUILD_DIR, 'libcxxbuild')
- if os.path.isdir(libcxxbuild):
- RmTree(libcxxbuild)
- libcxxflags = ['-O3', '-std=c++11', '-fstrict-aliasing']
-
- # libcxx and libcxxabi both have a file stdexcept.cpp, so put their .o files
- # into different subdirectories.
- os.makedirs(os.path.join(libcxxbuild, 'libcxx'))
- os.chdir(os.path.join(libcxxbuild, 'libcxx'))
- RunCommand(['c++', '-c'] + cxxflags + libcxxflags +
- glob.glob(os.path.join(LIBCXX_DIR, 'src', '*.cpp')))
-
- os.makedirs(os.path.join(libcxxbuild, 'libcxxabi'))
- os.chdir(os.path.join(libcxxbuild, 'libcxxabi'))
- RunCommand(['c++', '-c'] + cxxflags + libcxxflags +
- glob.glob(os.path.join(LIBCXXABI_DIR, 'src', '*.cpp')) +
- ['-I' + os.path.join(LIBCXXABI_DIR, 'include')])
-
- os.chdir(libcxxbuild)
- libdir = os.path.join(LIBCXX_DIR, 'lib')
- RunCommand(['cc'] + glob.glob('libcxx/*.o') + glob.glob('libcxxabi/*.o') +
- ['-o', 'libc++.1.dylib', '-dynamiclib', '-nodefaultlibs',
- '-current_version', '1', '-compatibility_version', '1', '-lSystem',
- '-install_name', '@executable_path/libc++.dylib',
- '-Wl,-unexported_symbols_list,' + libdir + '/libc++unexp.exp',
- '-Wl,-force_symbols_not_weak_list,' + libdir + '/notweak.exp',
- '-Wl,-force_symbols_weak_list,' + libdir + '/weak.exp'])
- if os.path.exists('libc++.dylib'):
- os.remove('libc++.dylib')
- os.symlink('libc++.1.dylib', 'libc++.dylib')
- ldflags += ['-stdlib=libc++', '-L' + libcxxbuild]
-
- if args.bootstrap:
- # Now that the libc++ headers have been installed and libc++.dylib is
- # built, delete the libc++ checkout again so that it's not part of the
- # main build below -- the libc++(abi) tests don't pass on OS X in
- # bootstrap builds (http://llvm.org/PR24068)
- RmTree(LIBCXX_DIR)
- RmTree(LIBCXXABI_DIR)
- cxxflags = ['-stdlib=libc++', '-nostdinc++',
- '-I' + os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR,
- 'include/c++/v1')
- ] + cflags
+ # Build LLVM gold plugin with LTO. That speeds up the linker by ~10%.
+ # We only use LTO for Linux now.
+ if args.bootstrap and args.lto_gold_plugin:
+ print 'Building LTO LLVM Gold plugin'
+ if os.path.exists(LLVM_LTO_GOLD_PLUGIN_DIR):
+ RmTree(LLVM_LTO_GOLD_PLUGIN_DIR)
+ EnsureDirExists(LLVM_LTO_GOLD_PLUGIN_DIR)
+ os.chdir(LLVM_LTO_GOLD_PLUGIN_DIR)
+
+ # Create a symlink to LLVMgold.so build in the previous step so that ar
+ # and ranlib could find it while linking LLVMgold.so with LTO.
+ EnsureDirExists(BFD_PLUGINS_DIR)
+ RunCommand(['ln', '-sf',
+ os.path.join(LLVM_BOOTSTRAP_INSTALL_DIR, 'lib', 'LLVMgold.so'),
+ os.path.join(BFD_PLUGINS_DIR, 'LLVMgold.so')])
+
+ lto_cflags = ['-flto']
+ lto_ldflags = ['-fuse-ld=gold']
+ if args.gcc_toolchain:
+ # Tell the bootstrap compiler to use a specific gcc prefix to search
+ # for standard library headers and shared object files.
+ lto_cflags += ['--gcc-toolchain=' + args.gcc_toolchain]
+ lto_cmake_args = base_cmake_args + [
+ '-DLLVM_BINUTILS_INCDIR=' + binutils_incdir,
+ '-DCMAKE_C_COMPILER=' + cc,
+ '-DCMAKE_CXX_COMPILER=' + cxx,
+ '-DCMAKE_C_FLAGS=' + ' '.join(lto_cflags),
+ '-DCMAKE_CXX_FLAGS=' + ' '.join(lto_cflags),
+ '-DCMAKE_EXE_LINKER_FLAGS=' + ' '.join(lto_ldflags),
+ '-DCMAKE_SHARED_LINKER_FLAGS=' + ' '.join(lto_ldflags),
+ '-DCMAKE_MODULE_LINKER_FLAGS=' + ' '.join(lto_ldflags)]
+
+ # We need to use the proper binutils which support LLVM Gold plugin.
+ lto_env = os.environ.copy()
+ lto_env['PATH'] = BINUTILS_BIN_DIR + os.pathsep + lto_env.get('PATH', '')
+
+ RmCmakeCache('.')
+ RunCommand(['cmake'] + lto_cmake_args + [LLVM_DIR], env=lto_env)
+ RunCommand(['ninja', 'LLVMgold'], env=lto_env)
+
+
+ # LLVM uses C++11 starting in llvm 3.5. On Linux, this means libstdc++4.7+ is
+ # needed, on OS X it requires libc++. clang only automatically links to libc++
+ # when targeting OS X 10.9+, so add stdlib=libc++ explicitly so clang can run
+ # on OS X versions as old as 10.7.
+ deployment_target = ''
+
+ if sys.platform == 'darwin' and args.bootstrap:
+ # When building on 10.9, /usr/include usually doesn't exist, and while
+ # Xcode's clang automatically sets a sysroot, self-built clangs don't.
+ cflags = ['-isysroot', subprocess.check_output(
+ ['xcrun', '--show-sdk-path']).rstrip()]
+ cxxflags = ['-stdlib=libc++'] + cflags
+ ldflags += ['-stdlib=libc++']
+ deployment_target = '10.7'
+ # Running libc++ tests takes a long time. Since it was only needed for
+ # the install step above, don't build it as part of the main build.
+ # This makes running package.py over 10% faster (30 min instead of 34 min)
+ RmTree(LIBCXX_DIR)
# Build clang.
- binutils_incdir = ''
- if sys.platform.startswith('linux'):
- binutils_incdir = os.path.join(BINUTILS_DIR, 'Linux_x64/Release/include')
# If building at head, define a macro that plugins can use for #ifdefing
# out code that builds at head, but not at CLANG_REVISION or vice versa.
@@ -590,13 +616,13 @@ def UpdateClang(args):
RunCommand(['ninja', 'cr-install'], msvc_arch='x64')
if sys.platform == 'darwin':
- CopyFile(os.path.join(libcxxbuild, 'libc++.1.dylib'),
- os.path.join(LLVM_BUILD_DIR, 'bin'))
# See http://crbug.com/256342
RunCommand(['strip', '-x', os.path.join(LLVM_BUILD_DIR, 'bin', 'clang')])
elif sys.platform.startswith('linux'):
RunCommand(['strip', os.path.join(LLVM_BUILD_DIR, 'bin', 'clang')])
+ # TODO(thakis): Check that `clang --version` matches VERSION.
+
# Do an out-of-tree build of compiler-rt.
# On Windows, this is used to get the 32-bit ASan run-time.
# TODO(hans): Remove once the regular build above produces this.
@@ -753,6 +779,8 @@ def main():
parser.add_argument('--gcc-toolchain', help='set the version for which gcc '
'version be used for building; --gcc-toolchain=/opt/foo '
'picks /opt/foo/bin/gcc')
+ parser.add_argument('--lto-gold-plugin', action='store_true',
+ help='build LLVM Gold plugin with LTO')
parser.add_argument('--print-revision', action='store_true',
help='print current clang revision and exit.')
parser.add_argument('--print-clang-version', action='store_true',
@@ -768,6 +796,13 @@ def main():
default=sys.platform.startswith('linux'))
args = parser.parse_args()
+ if args.lto_gold_plugin and not args.bootstrap:
+ print '--lto-gold-plugin requires --bootstrap'
+ return 1
+ if args.lto_gold_plugin and not sys.platform.startswith('linux'):
+ print '--lto-gold-plugin is only effective on Linux. Ignoring the option.'
+ args.lto_gold_plugin = False
+
if args.if_needed:
is_clang_required = False
# clang is always used on Mac and Linux.
@@ -787,11 +822,6 @@ def main():
print 'Skipping Clang update (make_clang_dir= was set in GYP_DEFINES).'
return 0
- if use_head_revision:
- # TODO(hans): Remove after the next roll.
- global VERSION
- VERSION = '3.9.0'
-
global CLANG_REVISION, PACKAGE_VERSION
if args.print_revision:
if use_head_revision:
diff --git a/chromium/tools/clang/scripts/upload_revision.py b/chromium/tools/clang/scripts/upload_revision.py
new file mode 100755
index 00000000000..b6331df6cdb
--- /dev/null
+++ b/chromium/tools/clang/scripts/upload_revision.py
@@ -0,0 +1,81 @@
+#!/usr/bin/env python
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script takes a Clang revision as an argument, it then
+creates a feature branch, puts this revision into update.py, uploads
+a CL, triggers Clang Upload try bots, and tells what to do next"""
+
+import argparse
+import fnmatch
+import itertools
+import os
+import re
+import shutil
+import subprocess
+import sys
+
+# Path constants.
+THIS_DIR = os.path.dirname(__file__)
+UPDATE_PY_PATH = os.path.join(THIS_DIR, "update.py")
+CHROMIUM_DIR = os.path.abspath(os.path.join(THIS_DIR, '..', '..', '..'))
+
+
+def PatchRevision(clang_revision, clang_sub_revision):
+ with open(UPDATE_PY_PATH, 'r') as f:
+ content = f.read()
+ m = re.search("CLANG_REVISION = '([0-9]+)'", content)
+ clang_old_revision = m.group(1)
+ content = re.sub("CLANG_REVISION = '[0-9]+'",
+ "CLANG_REVISION = '{}'".format(clang_revision),
+ content, count=1)
+ content = re.sub("CLANG_SUB_REVISION=[0-9]+",
+ "CLANG_SUB_REVISION={}".format(clang_sub_revision),
+ content, count=1)
+ with open(UPDATE_PY_PATH, 'w') as f:
+ f.write(content)
+ return clang_old_revision
+
+
+def Git(args):
+ subprocess.check_call(["git"] + args)
+
+def main():
+ parser = argparse.ArgumentParser(description='upload new clang revision')
+ parser.add_argument('clang_revision', metavar='CLANG_REVISION',
+ type=int, nargs=1,
+ help='Clang revision to build the toolchain for.')
+ parser.add_argument('clang_sub_revision', metavar='CLANG_SUB_REVISION',
+ type=int, nargs='?', default=1,
+ help='Clang sub-revision to build the toolchain for.')
+
+ args = parser.parse_args()
+
+ clang_revision = args.clang_revision[0]
+ clang_sub_revision = args.clang_sub_revision
+ git_revision = subprocess.check_output(
+ ["git", "rev-parse", "origin/master"]).strip()
+ print "Making a patch for Clang revision r{}-{}".format(
+ clang_revision, clang_sub_revision)
+ print "Chrome revision: {}".format(git_revision)
+ clang_old_revision = PatchRevision(clang_revision, clang_sub_revision)
+
+ Git(["checkout", "-b", "clang-{}-{}".format(
+ clang_revision, clang_sub_revision)])
+ Git(["add", UPDATE_PY_PATH])
+
+ commit_message = 'Ran `{}`.'.format(' '.join(sys.argv))
+ Git(["commit", "-m", "Roll clang {}:{}.\n\n{}".format(
+ clang_old_revision, clang_revision, commit_message)])
+
+ Git(["cl", "upload"])
+ Git(["cl", "try", "-b", "linux_upload_clang", "-r", git_revision])
+ Git(["cl", "try", "-b", "mac_upload_clang", "-r", git_revision])
+ Git(["cl", "try", "-b", "win_upload_clang", "-r", git_revision])
+
+ print ("Please, wait until the try bots succeeded "
+ "and then push the binaries to goma.")
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/clang/translation_unit/TranslationUnitGenerator.cpp b/chromium/tools/clang/translation_unit/TranslationUnitGenerator.cpp
index 5db8d640b74..3590b0c4ea3 100644
--- a/chromium/tools/clang/translation_unit/TranslationUnitGenerator.cpp
+++ b/chromium/tools/clang/translation_unit/TranslationUnitGenerator.cpp
@@ -23,6 +23,7 @@
#include "clang/Basic/SourceManager.h"
#include "clang/Frontend/CompilerInstance.h"
#include "clang/Frontend/FrontendActions.h"
+#include "clang/Lex/HeaderSearchOptions.h"
#include "clang/Lex/PPCallbacks.h"
#include "clang/Lex/Preprocessor.h"
#include "clang/Tooling/CommonOptionsParser.h"
@@ -31,6 +32,7 @@
#include "clang/Tooling/Tooling.h"
#include "llvm/Support/CommandLine.h"
+using clang::HeaderSearchOptions;
using clang::tooling::CommonOptionsParser;
using std::set;
using std::stack;
@@ -43,10 +45,8 @@ class IncludeFinderPPCallbacks : public clang::PPCallbacks {
public:
IncludeFinderPPCallbacks(clang::SourceManager* source_manager,
string* main_source_file,
- set<string>* source_file_paths)
- : source_manager_(source_manager),
- main_source_file_(main_source_file),
- source_file_paths_(source_file_paths) {}
+ set<string>* source_file_paths,
+ const HeaderSearchOptions* header_search_options);
void FileChanged(clang::SourceLocation /*loc*/,
clang::PPCallbacks::FileChangeReason reason,
clang::SrcMgr::CharacteristicKind /*file_type*/,
@@ -64,9 +64,13 @@ class IncludeFinderPPCallbacks : public clang::PPCallbacks {
void EndOfMainFile() override;
private:
+ string DoubleSlashSystemHeaders(const string& search_path,
+ const string& relative_path) const;
+
clang::SourceManager* const source_manager_;
string* const main_source_file_;
set<string>* const source_file_paths_;
+ set<string> system_header_prefixes_;
// The path of the file that was last referenced by an inclusion directive,
// normalized for includes that are relative to a different source file.
string last_inclusion_directive_;
@@ -74,6 +78,39 @@ class IncludeFinderPPCallbacks : public clang::PPCallbacks {
stack<string> current_files_;
};
+IncludeFinderPPCallbacks::IncludeFinderPPCallbacks(
+ clang::SourceManager* source_manager,
+ string* main_source_file,
+ set<string>* source_file_paths,
+ const HeaderSearchOptions* header_search_options)
+ : source_manager_(source_manager),
+ main_source_file_(main_source_file),
+ source_file_paths_(source_file_paths) {
+ // In practice this list seems to be empty, but add it anyway just in case.
+ for (const auto& prefix : header_search_options->SystemHeaderPrefixes) {
+ system_header_prefixes_.insert(prefix.Prefix);
+ }
+
+ // This list contains all the include directories of different type. We add
+ // all system headers to the set - excluding the Quoted and Angled groups
+ // which are from -iquote and -I flags.
+ for (const auto& entry : header_search_options->UserEntries) {
+ switch (entry.Group) {
+ case clang::frontend::System:
+ case clang::frontend::ExternCSystem:
+ case clang::frontend::CSystem:
+ case clang::frontend::CXXSystem:
+ case clang::frontend::ObjCSystem:
+ case clang::frontend::ObjCXXSystem:
+ case clang::frontend::After:
+ system_header_prefixes_.insert(entry.Path);
+ break;
+ default:
+ break;
+ }
+ }
+}
+
void IncludeFinderPPCallbacks::FileChanged(
clang::SourceLocation /*loc*/,
clang::PPCallbacks::FileChangeReason reason,
@@ -135,21 +172,30 @@ void IncludeFinderPPCallbacks::InclusionDirective(
// Otherwise we take the literal path as we stored it for the current
// file, and append the relative path.
- last_inclusion_directive_ = parent + "/" + relative_path.str();
+ last_inclusion_directive_ =
+ DoubleSlashSystemHeaders(parent, relative_path.str());
} else if (!search_path.empty()) {
- // We want to be able to extract the search path relative to which the
- // include statement is defined. Therefore if search_path is an absolute
- // path (indicating it is most likely a system header) we use "//" as a
- // separator between the search path and the relative path.
- last_inclusion_directive_ = search_path.str() +
- (llvm::sys::path::is_absolute(search_path) ? "//" : "/") +
- relative_path.str();
+ last_inclusion_directive_ =
+ DoubleSlashSystemHeaders(search_path.str(), relative_path.str());
} else {
last_inclusion_directive_ = file_name.str();
}
AddFile(last_inclusion_directive_);
}
+string IncludeFinderPPCallbacks::DoubleSlashSystemHeaders(
+ const string& search_path,
+ const string& relative_path) const {
+ // We want to be able to extract the search path relative to which the
+ // include statement is defined. Therefore if search_path is a system header
+ // we use "//" as a separator between the search path and the relative path.
+ const bool is_system_header =
+ system_header_prefixes_.find(search_path) !=
+ system_header_prefixes_.end();
+
+ return search_path + (is_system_header ? "//" : "/") + relative_path;
+}
+
void IncludeFinderPPCallbacks::EndOfMainFile() {
const clang::FileEntry* main_file =
source_manager_->getFileEntryForID(source_manager_->getMainFileID());
@@ -190,7 +236,8 @@ void CompilationIndexerAction::Preprocess() {
preprocessor.addPPCallbacks(llvm::make_unique<IncludeFinderPPCallbacks>(
&getCompilerInstance().getSourceManager(),
&main_source_file_,
- &source_file_paths_));
+ &source_file_paths_,
+ &getCompilerInstance().getHeaderSearchOpts()));
preprocessor.getDiagnostics().setIgnoreAllWarnings(true);
preprocessor.SetSuppressIncludeNotFoundError(true);
preprocessor.EnterMainSourceFile();
@@ -202,7 +249,7 @@ void CompilationIndexerAction::Preprocess() {
void CompilationIndexerAction::EndSourceFileAction() {
std::ofstream out(main_source_file_ + ".filepaths");
- for (string path : source_file_paths_) {
+ for (const string& path : source_file_paths_) {
out << path << std::endl;
}
}
diff --git a/chromium/tools/clang/translation_unit/test_files/compile_commands.json.template b/chromium/tools/clang/translation_unit/test_files/compile_commands.json.template
new file mode 100644
index 00000000000..f7710877fb1
--- /dev/null
+++ b/chromium/tools/clang/translation_unit/test_files/compile_commands.json.template
@@ -0,0 +1,12 @@
+[
+ {
+ "directory": "$test_files_dir",
+ "command": "clang++ -fsyntax-only -std=c++11 -c test.cc",
+ "file": "test.cc"
+ },
+ {
+ "directory": "$test_files_dir",
+ "command": "clang++ -fsyntax-only -std=c++11 --sysroot ./sysroot -c test_relative_sysroot.cc",
+ "file": "test_relative_sysroot.cc"
+ }
+] \ No newline at end of file
diff --git a/chromium/tools/clang/translation_unit/test_files/sysroot/README b/chromium/tools/clang/translation_unit/test_files/sysroot/README
new file mode 100644
index 00000000000..932835881f2
--- /dev/null
+++ b/chromium/tools/clang/translation_unit/test_files/sysroot/README
@@ -0,0 +1,19 @@
+Clang looks for the presence of crtbegin.o to decide whether to add GCC standard
+C++ library directories to the system header lookup path.
+
+The presence of usr/lib/gcc/x86_64-linux-gnu/4.6/crtbegin.o and
+usr/include/c++/4.6 inside this sysroot will cause clang to add the following
+directories to the path:
+ - $SYSROOT/usr/lib/gcc/x86_64-linux-gnu/4.6/../../../../include/c++/4.6
+ - $SYSROOT/usr/lib/gcc/x86_64-linux-gnu/4.6/../../../../include/x86_64-linux-gnu/c++/4.6
+ - $SYSROOT/usr/lib/gcc/x86_64-linux-gnu/4.6/../../../../include/x86_64-unknown-linux-gnu/c++/4.6
+ - $SYSROOT/usr/lib/gcc/x86_64-linux-gnu/4.6/../../../../include/c++/4.6/backward
+
+As well as the standard paths:
+ - $SYSROOT/usr/local/include
+ - $SYSROOT/include
+ - $SYSROOT/usr/include
+
+See clang's lib/Driver/ToolChains.cpp:
+ - FilterNonExistent: checks for crtbegin.o
+ - Linux::AddClangCXXStdlibIncludeArgs: looks for various c++ directories
diff --git a/chromium/tools/clang/translation_unit/test_files/sysroot/usr/include/c++/4.6/string b/chromium/tools/clang/translation_unit/test_files/sysroot/usr/include/c++/4.6/string
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/clang/translation_unit/test_files/sysroot/usr/include/c++/4.6/string
diff --git a/chromium/tools/clang/translation_unit/test_files/sysroot/usr/lib/gcc/x86_64-linux-gnu/4.6/crtbegin.o b/chromium/tools/clang/translation_unit/test_files/sysroot/usr/lib/gcc/x86_64-linux-gnu/4.6/crtbegin.o
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/clang/translation_unit/test_files/sysroot/usr/lib/gcc/x86_64-linux-gnu/4.6/crtbegin.o
diff --git a/chromium/tools/clang/translation_unit/test_files/test.cc.filepaths.expected b/chromium/tools/clang/translation_unit/test_files/test.cc.filepaths.expected
index 5933c1f5973..adb9e6514e9 100644
--- a/chromium/tools/clang/translation_unit/test_files/test.cc.filepaths.expected
+++ b/chromium/tools/clang/translation_unit/test_files/test.cc.filepaths.expected
@@ -1,4 +1,4 @@
-bits/wchar.h
./binomial.h
-./test.cc
./test.h
+//bits/wchar.h
+test.cc
diff --git a/chromium/tools/clang/translation_unit/test_files/test_relative_sysroot.cc b/chromium/tools/clang/translation_unit/test_files/test_relative_sysroot.cc
new file mode 100644
index 00000000000..c6beb1aca01
--- /dev/null
+++ b/chromium/tools/clang/translation_unit/test_files/test_relative_sysroot.cc
@@ -0,0 +1,5 @@
+// Copyright (c) 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <string>
diff --git a/chromium/tools/clang/translation_unit/test_files/test_relative_sysroot.cc.filepaths.expected b/chromium/tools/clang/translation_unit/test_files/test_relative_sysroot.cc.filepaths.expected
new file mode 100644
index 00000000000..706a05e98ae
--- /dev/null
+++ b/chromium/tools/clang/translation_unit/test_files/test_relative_sysroot.cc.filepaths.expected
@@ -0,0 +1,2 @@
+//string
+test_relative_sysroot.cc
diff --git a/chromium/tools/clang/translation_unit/test_translation_unit.py b/chromium/tools/clang/translation_unit/test_translation_unit.py
index a8e8f6865af..0482a7873f6 100755
--- a/chromium/tools/clang/translation_unit/test_translation_unit.py
+++ b/chromium/tools/clang/translation_unit/test_translation_unit.py
@@ -15,11 +15,10 @@ import subprocess
import sys
-def _GenerateCompileCommands(files):
+def _GenerateCompileCommands(template_path, test_files_dir):
"""Returns a JSON string containing a compilation database for the input."""
- return json.dumps([{'directory': '.',
- 'command': 'clang++ -fsyntax-only -std=c++11 -c %s' % f,
- 'file': f} for f in files], indent=2)
+ with open(template_path) as fh:
+ return fh.read().replace('$test_files_dir', test_files_dir)
def _NumberOfTestsToString(tests):
@@ -29,7 +28,7 @@ def _NumberOfTestsToString(tests):
# Before running this test script, please build the translation_unit clang tool
# first. This is explained here:
-# https://code.google.com/p/chromium/wiki/ClangToolRefactoring
+# https://chromium.googlesource.com/chromium/src/+/master/docs/clang_tool_refactoring.md
def main():
tools_clang_directory = os.path.dirname(os.path.dirname(
os.path.realpath(__file__)))
@@ -38,11 +37,13 @@ def main():
tools_clang_directory, 'translation_unit', 'test_files')
compile_database = os.path.join(test_directory_for_tool,
'compile_commands.json')
+ compile_database_template = compile_database + '.template'
source_files = glob.glob(os.path.join(test_directory_for_tool, '*.cc'))
# Generate a temporary compilation database to run the tool over.
with open(compile_database, 'w') as f:
- f.write(_GenerateCompileCommands(source_files))
+ f.write(_GenerateCompileCommands(compile_database_template,
+ test_directory_for_tool))
args = ['python',
os.path.join(tools_clang_scripts_directory, 'run_tool.py'),
@@ -68,13 +69,9 @@ def main():
actual_output = f.readlines()
has_same_filepaths = True
for expected_line, actual_line in zip(expected_output, actual_output):
- if '//' in actual_output:
- if actual_output.split('//')[1] != expected_output:
- sys.stdout.write('expected: %s' % expected_output)
- sys.stdout.write('actual: %s' % actual_output.split('//')[1])
- break
- else:
- continue
+ if '//' in actual_line:
+ actual_line = '//' + actual_line.split('//')[1]
+
if ntpath.basename(expected_line) != ntpath.basename(actual_line):
sys.stdout.write('expected: %s' % ntpath.basename(expected_line))
sys.stdout.write('actual: %s' % ntpath.basename(actual_line))
diff --git a/chromium/tools/code_coverage/croc.css b/chromium/tools/code_coverage/croc.css
new file mode 100644
index 00000000000..071822dafad
--- /dev/null
+++ b/chromium/tools/code_coverage/croc.css
@@ -0,0 +1,102 @@
+/*
+ * Copyright (c) 2012 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+/*
+ * croc.css - styles for croc HTML output
+ */
+
+body {
+ font-family:arial;
+}
+
+table {
+ border-collapse:collapse;
+ border-width:0px;
+ border-style:solid;
+}
+
+thead {
+ background-color:#C0C0E0;
+ text-align:center;
+}
+
+td {
+ padding-right:10px;
+ padding-left:10px;
+ font-size:small;
+ border-width:1px;
+ border-style:solid;
+ border-color:black;
+}
+
+td.secdesc {
+ text-align:center;
+ font-size:medium;
+ font-weight:bold;
+ border-width:0px;
+ border-style:none;
+ padding-top:10px;
+ padding-bottom:5px;
+}
+
+td.section {
+ background-color:#D0D0F0;
+ text-align:center;
+}
+
+td.stat {
+ text-align:center;
+}
+
+td.number {
+ text-align:right;
+}
+
+td.graph {
+ /* Hide the dummy character */
+ color:#FFFFFF;
+ padding-left:6px;
+}
+
+td.high_pct {
+ text-align:right;
+ background-color:#B0FFB0;
+}
+td.mid_pct {
+ text-align:right;
+ background-color:#FFFF90;
+}
+td.low_pct {
+ text-align:right;
+ background-color:#FFB0B0;
+}
+
+
+span.missing {
+ background-color:#FFB0B0;
+}
+span.instr {
+ background-color:#FFFF90;
+}
+span.covered {
+ background-color:#B0FFB0;
+}
+
+span.g_missing {
+ background-color:#FF4040;
+}
+span.g_instr {
+ background-color:#FFFF00;
+}
+span.g_covered {
+ background-color:#40FF40;
+}
+
+p.time {
+ padding-top:10px;
+ font-size:small;
+ font-style:italic;
+}
diff --git a/chromium/tools/code_coverage/croc.py b/chromium/tools/code_coverage/croc.py
new file mode 100755
index 00000000000..1b9908a5f89
--- /dev/null
+++ b/chromium/tools/code_coverage/croc.py
@@ -0,0 +1,722 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Crocodile - compute coverage numbers for Chrome coverage dashboard."""
+
+import optparse
+import os
+import platform
+import re
+import sys
+import croc_html
+import croc_scan
+
+
+class CrocError(Exception):
+ """Coverage error."""
+
+
+class CrocStatError(CrocError):
+ """Error evaluating coverage stat."""
+
+#------------------------------------------------------------------------------
+
+
+class CoverageStats(dict):
+ """Coverage statistics."""
+
+ # Default dictionary values for this stat.
+ DEFAULTS = { 'files_covered': 0,
+ 'files_instrumented': 0,
+ 'files_executable': 0,
+ 'lines_covered': 0,
+ 'lines_instrumented': 0,
+ 'lines_executable': 0 }
+
+ def Add(self, coverage_stats):
+ """Adds a contribution from another coverage stats dict.
+
+ Args:
+ coverage_stats: Statistics to add to this one.
+ """
+ for k, v in coverage_stats.iteritems():
+ if k in self:
+ self[k] += v
+ else:
+ self[k] = v
+
+ def AddDefaults(self):
+ """Add some default stats which might be assumed present.
+
+ Do not clobber if already present. Adds resilience when evaling a
+ croc file which expects certain stats to exist."""
+ for k, v in self.DEFAULTS.iteritems():
+ if not k in self:
+ self[k] = v
+
+#------------------------------------------------------------------------------
+
+
+class CoveredFile(object):
+ """Information about a single covered file."""
+
+ def __init__(self, filename, **kwargs):
+ """Constructor.
+
+ Args:
+ filename: Full path to file, '/'-delimited.
+ kwargs: Keyword args are attributes for file.
+ """
+ self.filename = filename
+ self.attrs = dict(kwargs)
+
+ # Move these to attrs?
+ self.local_path = None # Local path to file
+ self.in_lcov = False # Is file instrumented?
+
+ # No coverage data for file yet
+ self.lines = {} # line_no -> None=executable, 0=instrumented, 1=covered
+ self.stats = CoverageStats()
+
+ def UpdateCoverage(self):
+ """Updates the coverage summary based on covered lines."""
+ exe = instr = cov = 0
+ for l in self.lines.itervalues():
+ exe += 1
+ if l is not None:
+ instr += 1
+ if l == 1:
+ cov += 1
+
+ # Add stats that always exist
+ self.stats = CoverageStats(lines_executable=exe,
+ lines_instrumented=instr,
+ lines_covered=cov,
+ files_executable=1)
+
+ # Add conditional stats
+ if cov:
+ self.stats['files_covered'] = 1
+ if instr or self.in_lcov:
+ self.stats['files_instrumented'] = 1
+
+#------------------------------------------------------------------------------
+
+
+class CoveredDir(object):
+ """Information about a directory containing covered files."""
+
+ def __init__(self, dirpath):
+ """Constructor.
+
+ Args:
+ dirpath: Full path of directory, '/'-delimited.
+ """
+ self.dirpath = dirpath
+
+ # List of covered files directly in this dir, indexed by filename (not
+ # full path)
+ self.files = {}
+
+ # List of subdirs, indexed by filename (not full path)
+ self.subdirs = {}
+
+ # Dict of CoverageStats objects summarizing all children, indexed by group
+ self.stats_by_group = {'all': CoverageStats()}
+ # TODO: by language
+
+ def GetTree(self, indent=''):
+ """Recursively gets stats for the directory and its children.
+
+ Args:
+ indent: indent prefix string.
+
+ Returns:
+ The tree as a string.
+ """
+ dest = []
+
+ # Compile all groupstats
+ groupstats = []
+ for group in sorted(self.stats_by_group):
+ s = self.stats_by_group[group]
+ if not s.get('lines_executable'):
+ continue # Skip groups with no executable lines
+ groupstats.append('%s:%d/%d/%d' % (
+ group, s.get('lines_covered', 0),
+ s.get('lines_instrumented', 0),
+ s.get('lines_executable', 0)))
+
+ outline = '%s%-30s %s' % (indent,
+ os.path.split(self.dirpath)[1] + '/',
+ ' '.join(groupstats))
+ dest.append(outline.rstrip())
+
+ for d in sorted(self.subdirs):
+ dest.append(self.subdirs[d].GetTree(indent=indent + ' '))
+
+ return '\n'.join(dest)
+
+#------------------------------------------------------------------------------
+
+
+class Coverage(object):
+ """Code coverage for a group of files."""
+
+ def __init__(self):
+ """Constructor."""
+ self.files = {} # Map filename --> CoverageFile
+ self.root_dirs = [] # (root, altname)
+ self.rules = [] # (regexp, dict of RHS attrs)
+ self.tree = CoveredDir('')
+ self.print_stats = [] # Dicts of args to PrintStat()
+
+ # Functions which need to be replaced for unit testing
+ self.add_files_walk = os.walk # Walk function for AddFiles()
+ self.scan_file = croc_scan.ScanFile # Source scanner for AddFiles()
+
+ def CleanupFilename(self, filename):
+ """Cleans up a filename.
+
+ Args:
+ filename: Input filename.
+
+ Returns:
+ The cleaned up filename.
+
+ Changes all path separators to '/'.
+ Makes relative paths (those starting with '../' or './' absolute.
+ Replaces all instances of root dirs with alternate names.
+ """
+ # Change path separators
+ filename = filename.replace('\\', '/')
+
+ # Windows doesn't care about case sensitivity.
+ if platform.system() in ['Windows', 'Microsoft']:
+ filename = filename.lower()
+
+ # If path is relative, make it absolute
+ # TODO: Perhaps we should default to relative instead, and only understand
+ # absolute to be files starting with '\', '/', or '[A-Za-z]:'?
+ if filename.split('/')[0] in ('.', '..'):
+ filename = os.path.abspath(filename).replace('\\', '/')
+
+ # Replace alternate roots
+ for root, alt_name in self.root_dirs:
+ # Windows doesn't care about case sensitivity.
+ if platform.system() in ['Windows', 'Microsoft']:
+ root = root.lower()
+ filename = re.sub('^' + re.escape(root) + '(?=(/|$))',
+ alt_name, filename)
+ return filename
+
+ def ClassifyFile(self, filename):
+ """Applies rules to a filename, to see if we care about it.
+
+ Args:
+ filename: Input filename.
+
+ Returns:
+ A dict of attributes for the file, accumulated from the right hand sides
+ of rules which fired.
+ """
+ attrs = {}
+
+ # Process all rules
+ for regexp, rhs_dict in self.rules:
+ if regexp.match(filename):
+ attrs.update(rhs_dict)
+
+ return attrs
+ # TODO: Files can belong to multiple groups?
+ # (test/source)
+ # (mac/pc/win)
+ # (media_test/all_tests)
+ # (small/med/large)
+ # How to handle that?
+
+ def AddRoot(self, root_path, alt_name='_'):
+ """Adds a root directory.
+
+ Args:
+ root_path: Root directory to add.
+ alt_name: If specified, name of root dir. Otherwise, defaults to '_'.
+
+ Raises:
+ ValueError: alt_name was blank.
+ """
+ # Alt name must not be blank. If it were, there wouldn't be a way to
+ # reverse-resolve from a root-replaced path back to the local path, since
+ # '' would always match the beginning of the candidate filename, resulting
+ # in an infinite loop.
+ if not alt_name:
+ raise ValueError('AddRoot alt_name must not be blank.')
+
+ # Clean up root path based on existing rules
+ self.root_dirs.append([self.CleanupFilename(root_path), alt_name])
+
+ def AddRule(self, path_regexp, **kwargs):
+ """Adds a rule.
+
+ Args:
+ path_regexp: Regular expression to match for filenames. These are
+ matched after root directory replacement.
+ kwargs: Keyword arguments are attributes to set if the rule applies.
+
+ Keyword arguments currently supported:
+ include: If True, includes matches; if False, excludes matches. Ignored
+ if None.
+ group: If not None, sets group to apply to matches.
+ language: If not None, sets file language to apply to matches.
+ """
+
+ # Compile regexp ahead of time
+ self.rules.append([re.compile(path_regexp), dict(kwargs)])
+
+ def GetCoveredFile(self, filename, add=False):
+ """Gets the CoveredFile object for the filename.
+
+ Args:
+ filename: Name of file to find.
+ add: If True, will add the file if it's not present. This applies the
+ transformations from AddRoot() and AddRule(), and only adds the file
+ if a rule includes it, and it has a group and language.
+
+ Returns:
+ The matching CoveredFile object, or None if not present.
+ """
+ # Clean filename
+ filename = self.CleanupFilename(filename)
+
+ # Check for existing match
+ if filename in self.files:
+ return self.files[filename]
+
+ # File isn't one we know about. If we can't add it, give up.
+ if not add:
+ return None
+
+ # Check rules to see if file can be added. Files must be included and
+ # have a group and language.
+ attrs = self.ClassifyFile(filename)
+ if not (attrs.get('include')
+ and attrs.get('group')
+ and attrs.get('language')):
+ return None
+
+ # Add the file
+ f = CoveredFile(filename, **attrs)
+ self.files[filename] = f
+
+ # Return the newly covered file
+ return f
+
+ def RemoveCoveredFile(self, cov_file):
+ """Removes the file from the covered file list.
+
+ Args:
+ cov_file: A file object returned by GetCoveredFile().
+ """
+ self.files.pop(cov_file.filename)
+
+ def ParseLcovData(self, lcov_data):
+ """Adds coverage from LCOV-formatted data.
+
+ Args:
+ lcov_data: An iterable returning lines of data in LCOV format. For
+ example, a file or list of strings.
+ """
+ cov_file = None
+ cov_lines = None
+ for line in lcov_data:
+ line = line.strip()
+ if line.startswith('SF:'):
+ # Start of data for a new file; payload is filename
+ cov_file = self.GetCoveredFile(line[3:], add=True)
+ if cov_file:
+ cov_lines = cov_file.lines
+ cov_file.in_lcov = True # File was instrumented
+ elif not cov_file:
+ # Inside data for a file we don't care about - so skip it
+ pass
+ elif line.startswith('DA:'):
+ # Data point - that is, an executable line in current file
+ line_no, is_covered = map(int, line[3:].split(','))
+ if is_covered:
+ # Line is covered
+ cov_lines[line_no] = 1
+ elif cov_lines.get(line_no) != 1:
+ # Line is not covered, so track it as uncovered
+ cov_lines[line_no] = 0
+ elif line == 'end_of_record':
+ cov_file.UpdateCoverage()
+ cov_file = None
+ # (else ignore other line types)
+
+ def ParseLcovFile(self, input_filename):
+ """Adds coverage data from a .lcov file.
+
+ Args:
+ input_filename: Input filename.
+ """
+ # TODO: All manner of error checking
+ lcov_file = None
+ try:
+ lcov_file = open(input_filename, 'rt')
+ self.ParseLcovData(lcov_file)
+ finally:
+ if lcov_file:
+ lcov_file.close()
+
+ def GetStat(self, stat, group='all', default=None):
+ """Gets a statistic from the coverage object.
+
+ Args:
+ stat: Statistic to get. May also be an evaluatable python expression,
+ using the stats. For example, 'stat1 - stat2'.
+ group: File group to match; if 'all', matches all groups.
+ default: Value to return if there was an error evaluating the stat. For
+ example, if the stat does not exist. If None, raises
+ CrocStatError.
+
+ Returns:
+ The evaluated stat, or None if error.
+
+ Raises:
+ CrocStatError: Error evaluating stat.
+ """
+ # TODO: specify a subdir to get the stat from, then walk the tree to
+ # print the stats from just that subdir
+
+ # Make sure the group exists
+ if group not in self.tree.stats_by_group:
+ if default is None:
+ raise CrocStatError('Group %r not found.' % group)
+ else:
+ return default
+
+ stats = self.tree.stats_by_group[group]
+ # Unit tests use real dicts, not CoverageStats objects,
+ # so we can't AddDefaults() on them.
+ if group == 'all' and hasattr(stats, 'AddDefaults'):
+ stats.AddDefaults()
+ try:
+ return eval(stat, {'__builtins__': {'S': self.GetStat}}, stats)
+ except Exception, e:
+ if default is None:
+ raise CrocStatError('Error evaluating stat %r: %s' % (stat, e))
+ else:
+ return default
+
+ def PrintStat(self, stat, format=None, outfile=sys.stdout, **kwargs):
+ """Prints a statistic from the coverage object.
+
+ Args:
+ stat: Statistic to get. May also be an evaluatable python expression,
+ using the stats. For example, 'stat1 - stat2'.
+ format: Format string to use when printing stat. If None, prints the
+ stat and its evaluation.
+ outfile: File stream to output stat to; defaults to stdout.
+ kwargs: Additional args to pass to GetStat().
+ """
+ s = self.GetStat(stat, **kwargs)
+ if format is None:
+ outfile.write('GetStat(%r) = %s\n' % (stat, s))
+ else:
+ outfile.write(format % s + '\n')
+
+ def AddFiles(self, src_dir):
+ """Adds files to coverage information.
+
+ LCOV files only contains files which are compiled and instrumented as part
+ of running coverage. This function finds missing files and adds them.
+
+ Args:
+ src_dir: Directory on disk at which to start search. May be a relative
+ path on disk starting with '.' or '..', or an absolute path, or a
+ path relative to an alt_name for one of the roots
+ (for example, '_/src'). If the alt_name matches more than one root,
+ all matches will be attempted.
+
+ Note that dirs not underneath one of the root dirs and covered by an
+ inclusion rule will be ignored.
+ """
+ # Check for root dir alt_names in the path and replace with the actual
+ # root dirs, then recurse.
+ found_root = False
+ for root, alt_name in self.root_dirs:
+ replaced_root = re.sub('^' + re.escape(alt_name) + '(?=(/|$))', root,
+ src_dir)
+ if replaced_root != src_dir:
+ found_root = True
+ self.AddFiles(replaced_root)
+ if found_root:
+ return # Replaced an alt_name with a root_dir, so already recursed.
+
+ for (dirpath, dirnames, filenames) in self.add_files_walk(src_dir):
+ # Make a copy of the dirnames list so we can modify the original to
+ # prune subdirs we don't need to walk.
+ for d in list(dirnames):
+ # Add trailing '/' to directory names so dir-based regexps can match
+ # '/' instead of needing to specify '(/|$)'.
+ dpath = self.CleanupFilename(dirpath + '/' + d) + '/'
+ attrs = self.ClassifyFile(dpath)
+ if not attrs.get('include'):
+ # Directory has been excluded, so don't traverse it
+ # TODO: Document the slight weirdness caused by this: If you
+ # AddFiles('./A'), and the rules include 'A/B/C/D' but not 'A/B',
+ # then it won't recurse into './A/B' so won't find './A/B/C/D'.
+ # Workarounds are to AddFiles('./A/B/C/D') or AddFiles('./A/B/C').
+ # The latter works because it explicitly walks the contents of the
+ # path passed to AddFiles(), so it finds './A/B/C/D'.
+ dirnames.remove(d)
+
+ for f in filenames:
+ local_path = dirpath + '/' + f
+
+ covf = self.GetCoveredFile(local_path, add=True)
+ if not covf:
+ continue
+
+ # Save where we found the file, for generating line-by-line HTML output
+ covf.local_path = local_path
+
+ if covf.in_lcov:
+ # File already instrumented and doesn't need to be scanned
+ continue
+
+ if not covf.attrs.get('add_if_missing', 1):
+ # Not allowed to add the file
+ self.RemoveCoveredFile(covf)
+ continue
+
+ # Scan file to find potentially-executable lines
+ lines = self.scan_file(covf.local_path, covf.attrs.get('language'))
+ if lines:
+ for l in lines:
+ covf.lines[l] = None
+ covf.UpdateCoverage()
+ else:
+ # File has no executable lines, so don't count it
+ self.RemoveCoveredFile(covf)
+
+ def AddConfig(self, config_data, lcov_queue=None, addfiles_queue=None):
+ """Adds JSON-ish config data.
+
+ Args:
+ config_data: Config data string.
+ lcov_queue: If not None, object to append lcov_files to instead of
+ parsing them immediately.
+ addfiles_queue: If not None, object to append add_files to instead of
+ processing them immediately.
+ """
+ # TODO: All manner of error checking
+ cfg = eval(config_data, {'__builtins__': {}}, {})
+
+ for rootdict in cfg.get('roots', []):
+ self.AddRoot(rootdict['root'], alt_name=rootdict.get('altname', '_'))
+
+ for ruledict in cfg.get('rules', []):
+ regexp = ruledict.pop('regexp')
+ self.AddRule(regexp, **ruledict)
+
+ for add_lcov in cfg.get('lcov_files', []):
+ if lcov_queue is not None:
+ lcov_queue.append(add_lcov)
+ else:
+ self.ParseLcovFile(add_lcov)
+
+ for add_path in cfg.get('add_files', []):
+ if addfiles_queue is not None:
+ addfiles_queue.append(add_path)
+ else:
+ self.AddFiles(add_path)
+
+ self.print_stats += cfg.get('print_stats', [])
+
+ def ParseConfig(self, filename, **kwargs):
+ """Parses a configuration file.
+
+ Args:
+ filename: Config filename.
+ kwargs: Additional parameters to pass to AddConfig().
+ """
+ # TODO: All manner of error checking
+ f = None
+ try:
+ f = open(filename, 'rt')
+ # Need to strip CR's from CRLF-terminated lines or posix systems can't
+ # eval the data.
+ config_data = f.read().replace('\r\n', '\n')
+ # TODO: some sort of include syntax.
+ #
+ # Needs to be done at string-time rather than at eval()-time, so that
+ # it's possible to include parts of dicts. Path from a file to its
+ # include should be relative to the dir containing the file.
+ #
+ # Or perhaps it could be done after eval. In that case, there'd be an
+ # 'include' section with a list of files to include. Those would be
+ # eval()'d and recursively pre- or post-merged with the including file.
+ #
+ # Or maybe just don't worry about it, since multiple configs can be
+ # specified on the command line.
+ self.AddConfig(config_data, **kwargs)
+ finally:
+ if f:
+ f.close()
+
+ def UpdateTreeStats(self):
+ """Recalculates the tree stats from the currently covered files.
+
+ Also calculates coverage summary for files.
+ """
+ self.tree = CoveredDir('')
+ for cov_file in self.files.itervalues():
+ # Add the file to the tree
+ fdirs = cov_file.filename.split('/')
+ parent = self.tree
+ ancestors = [parent]
+ for d in fdirs[:-1]:
+ if d not in parent.subdirs:
+ if parent.dirpath:
+ parent.subdirs[d] = CoveredDir(parent.dirpath + '/' + d)
+ else:
+ parent.subdirs[d] = CoveredDir(d)
+ parent = parent.subdirs[d]
+ ancestors.append(parent)
+ # Final subdir actually contains the file
+ parent.files[fdirs[-1]] = cov_file
+
+ # Now add file's contribution to coverage by dir
+ for a in ancestors:
+ # Add to 'all' group
+ a.stats_by_group['all'].Add(cov_file.stats)
+
+ # Add to group file belongs to
+ group = cov_file.attrs.get('group')
+ if group not in a.stats_by_group:
+ a.stats_by_group[group] = CoverageStats()
+ cbyg = a.stats_by_group[group]
+ cbyg.Add(cov_file.stats)
+
+ def PrintTree(self):
+ """Prints the tree stats."""
+ # Print the tree
+ print 'Lines of code coverage by directory:'
+ print self.tree.GetTree()
+
+#------------------------------------------------------------------------------
+
+
+def Main(argv):
+ """Main routine.
+
+ Args:
+ argv: list of arguments
+
+ Returns:
+ exit code, 0 for normal exit.
+ """
+ # Parse args
+ parser = optparse.OptionParser()
+ parser.add_option(
+ '-i', '--input', dest='inputs', type='string', action='append',
+ metavar='FILE',
+ help='read LCOV input from FILE')
+ parser.add_option(
+ '-r', '--root', dest='roots', type='string', action='append',
+ metavar='ROOT[=ALTNAME]',
+ help='add ROOT directory, optionally map in coverage results as ALTNAME')
+ parser.add_option(
+ '-c', '--config', dest='configs', type='string', action='append',
+ metavar='FILE',
+ help='read settings from configuration FILE')
+ parser.add_option(
+ '-a', '--addfiles', dest='addfiles', type='string', action='append',
+ metavar='PATH',
+ help='add files from PATH to coverage data')
+ parser.add_option(
+ '-t', '--tree', dest='tree', action='store_true',
+ help='print tree of code coverage by group')
+ parser.add_option(
+ '-u', '--uninstrumented', dest='uninstrumented', action='store_true',
+ help='list uninstrumented files')
+ parser.add_option(
+ '-m', '--html', dest='html_out', type='string', metavar='PATH',
+ help='write HTML output to PATH')
+ parser.add_option(
+ '-b', '--base_url', dest='base_url', type='string', metavar='URL',
+ help='include URL in base tag of HTML output')
+
+ parser.set_defaults(
+ inputs=[],
+ roots=[],
+ configs=[],
+ addfiles=[],
+ tree=False,
+ html_out=None,
+ )
+
+ options = parser.parse_args(args=argv)[0]
+
+ cov = Coverage()
+
+ # Set root directories for coverage
+ for root_opt in options.roots:
+ if '=' in root_opt:
+ cov.AddRoot(*root_opt.split('='))
+ else:
+ cov.AddRoot(root_opt)
+
+ # Read config files
+ for config_file in options.configs:
+ cov.ParseConfig(config_file, lcov_queue=options.inputs,
+ addfiles_queue=options.addfiles)
+
+ # Parse lcov files
+ for input_filename in options.inputs:
+ cov.ParseLcovFile(input_filename)
+
+ # Add missing files
+ for add_path in options.addfiles:
+ cov.AddFiles(add_path)
+
+ # Print help if no files specified
+ if not cov.files:
+ print 'No covered files found.'
+ parser.print_help()
+ return 1
+
+ # Update tree stats
+ cov.UpdateTreeStats()
+
+ # Print uninstrumented filenames
+ if options.uninstrumented:
+ print 'Uninstrumented files:'
+ for f in sorted(cov.files):
+ covf = cov.files[f]
+ if not covf.in_lcov:
+ print ' %-6s %-6s %s' % (covf.attrs.get('group'),
+ covf.attrs.get('language'), f)
+
+ # Print tree stats
+ if options.tree:
+ cov.PrintTree()
+
+ # Print stats
+ for ps_args in cov.print_stats:
+ cov.PrintStat(**ps_args)
+
+ # Generate HTML
+ if options.html_out:
+ html = croc_html.CrocHtml(cov, options.html_out, options.base_url)
+ html.Write()
+
+ # Normal exit
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv))
diff --git a/chromium/tools/code_coverage/croc_html.py b/chromium/tools/code_coverage/croc_html.py
new file mode 100644
index 00000000000..7866f472f7b
--- /dev/null
+++ b/chromium/tools/code_coverage/croc_html.py
@@ -0,0 +1,451 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Crocodile HTML output."""
+
+import os
+import shutil
+import time
+import xml.dom
+
+
+class CrocHtmlError(Exception):
+ """Coverage HTML error."""
+
+
+class HtmlElement(object):
+ """Node in a HTML file."""
+
+ def __init__(self, doc, element):
+ """Constructor.
+
+ Args:
+ doc: XML document object.
+ element: XML element.
+ """
+ self.doc = doc
+ self.element = element
+
+ def E(self, name, **kwargs):
+ """Adds a child element.
+
+ Args:
+ name: Name of element.
+ kwargs: Attributes for element. To use an attribute which is a python
+ reserved word (i.e. 'class'), prefix the attribute name with 'e_'.
+
+ Returns:
+ The child element.
+ """
+ he = HtmlElement(self.doc, self.doc.createElement(name))
+ element = he.element
+ self.element.appendChild(element)
+
+ for k, v in kwargs.iteritems():
+ if k.startswith('e_'):
+ # Remove prefix
+ element.setAttribute(k[2:], str(v))
+ else:
+ element.setAttribute(k, str(v))
+
+ return he
+
+ def Text(self, text):
+ """Adds a text node.
+
+ Args:
+ text: Text to add.
+
+ Returns:
+ self.
+ """
+ t = self.doc.createTextNode(str(text))
+ self.element.appendChild(t)
+ return self
+
+
+class HtmlFile(object):
+ """HTML file."""
+
+ def __init__(self, xml_impl, filename):
+ """Constructor.
+
+ Args:
+ xml_impl: DOMImplementation to use to create document.
+ filename: Path to file.
+ """
+ self.xml_impl = xml_impl
+ doctype = xml_impl.createDocumentType(
+ 'HTML', '-//W3C//DTD HTML 4.01//EN',
+ 'http://www.w3.org/TR/html4/strict.dtd')
+ self.doc = xml_impl.createDocument(None, 'html', doctype)
+ self.filename = filename
+
+ # Create head and body elements
+ root = HtmlElement(self.doc, self.doc.documentElement)
+ self.head = root.E('head')
+ self.body = root.E('body')
+
+ def Write(self, cleanup=True):
+ """Writes the file.
+
+ Args:
+ cleanup: If True, calls unlink() on the internal xml document. This
+ frees up memory, but means that you can't use this file for anything
+ else.
+ """
+ f = open(self.filename, 'wt')
+ self.doc.writexml(f, encoding='UTF-8')
+ f.close()
+
+ if cleanup:
+ self.doc.unlink()
+ # Prevent future uses of the doc now that we've unlinked it
+ self.doc = None
+
+#------------------------------------------------------------------------------
+
+COV_TYPE_STRING = {None: 'm', 0: 'i', 1: 'E', 2: ' '}
+COV_TYPE_CLASS = {None: 'missing', 0: 'instr', 1: 'covered', 2: ''}
+
+
+class CrocHtml(object):
+ """Crocodile HTML output class."""
+
+ def __init__(self, cov, output_root, base_url=None):
+ """Constructor."""
+ self.cov = cov
+ self.output_root = output_root
+ self.base_url = base_url
+ self.xml_impl = xml.dom.getDOMImplementation()
+ self.time_string = 'Coverage information generated %s.' % time.asctime()
+
+ def CreateHtmlDoc(self, filename, title):
+ """Creates a new HTML document.
+
+ Args:
+ filename: Filename to write to, relative to self.output_root.
+ title: Title of page
+
+ Returns:
+ The document.
+ """
+ f = HtmlFile(self.xml_impl, self.output_root + '/' + filename)
+
+ f.head.E('title').Text(title)
+
+ if self.base_url:
+ css_href = self.base_url + 'croc.css'
+ base_href = self.base_url + os.path.dirname(filename)
+ if not base_href.endswith('/'):
+ base_href += '/'
+ f.head.E('base', href=base_href)
+ else:
+ css_href = '../' * (len(filename.split('/')) - 1) + 'croc.css'
+
+ f.head.E('link', rel='stylesheet', type='text/css', href=css_href)
+
+ return f
+
+ def AddCaptionForFile(self, body, path):
+ """Adds a caption for the file, with links to each parent dir.
+
+ Args:
+ body: Body elemement.
+ path: Path to file.
+ """
+ # This is slightly different that for subdir, because it needs to have a
+ # link to the current directory's index.html.
+ hdr = body.E('h2')
+ hdr.Text('Coverage for ')
+ dirs = [''] + path.split('/')
+ num_dirs = len(dirs)
+ for i in range(num_dirs - 1):
+ hdr.E('a', href=(
+ '../' * (num_dirs - i - 2) + 'index.html')).Text(dirs[i] + '/')
+ hdr.Text(dirs[-1])
+
+ def AddCaptionForSubdir(self, body, path):
+ """Adds a caption for the subdir, with links to each parent dir.
+
+ Args:
+ body: Body elemement.
+ path: Path to subdir.
+ """
+ # Link to parent dirs
+ hdr = body.E('h2')
+ hdr.Text('Coverage for ')
+ dirs = [''] + path.split('/')
+ num_dirs = len(dirs)
+ for i in range(num_dirs - 1):
+ hdr.E('a', href=(
+ '../' * (num_dirs - i - 1) + 'index.html')).Text(dirs[i] + '/')
+ hdr.Text(dirs[-1] + '/')
+
+ def AddSectionHeader(self, table, caption, itemtype, is_file=False):
+ """Adds a section header to the coverage table.
+
+ Args:
+ table: Table to add rows to.
+ caption: Caption for section, if not None.
+ itemtype: Type of items in this section, if not None.
+ is_file: Are items in this section files?
+ """
+
+ if caption is not None:
+ table.E('tr').E('th', e_class='secdesc', colspan=8).Text(caption)
+
+ sec_hdr = table.E('tr')
+
+ if itemtype is not None:
+ sec_hdr.E('th', e_class='section').Text(itemtype)
+
+ sec_hdr.E('th', e_class='section').Text('Coverage')
+ sec_hdr.E('th', e_class='section', colspan=3).Text(
+ 'Lines executed / instrumented / missing')
+
+ graph = sec_hdr.E('th', e_class='section')
+ graph.E('span', style='color:#00FF00').Text('exe')
+ graph.Text(' / ')
+ graph.E('span', style='color:#FFFF00').Text('inst')
+ graph.Text(' / ')
+ graph.E('span', style='color:#FF0000').Text('miss')
+
+ if is_file:
+ sec_hdr.E('th', e_class='section').Text('Language')
+ sec_hdr.E('th', e_class='section').Text('Group')
+ else:
+ sec_hdr.E('th', e_class='section', colspan=2)
+
+ def AddItem(self, table, itemname, stats, attrs, link=None):
+ """Adds a bar graph to the element. This is a series of <td> elements.
+
+ Args:
+ table: Table to add item to.
+ itemname: Name of item.
+ stats: Stats object.
+ attrs: Attributes dictionary; if None, no attributes will be printed.
+ link: Destination for itemname hyperlink, if not None.
+ """
+ row = table.E('tr')
+
+ # Add item name
+ if itemname is not None:
+ item_elem = row.E('td')
+ if link is not None:
+ item_elem = item_elem.E('a', href=link)
+ item_elem.Text(itemname)
+
+ # Get stats
+ stat_exe = stats.get('lines_executable', 0)
+ stat_ins = stats.get('lines_instrumented', 0)
+ stat_cov = stats.get('lines_covered', 0)
+
+ percent = row.E('td')
+
+ # Add text
+ row.E('td', e_class='number').Text(stat_cov)
+ row.E('td', e_class='number').Text(stat_ins)
+ row.E('td', e_class='number').Text(stat_exe - stat_ins)
+
+ # Add percent and graph; only fill in if there's something in there
+ graph = row.E('td', e_class='graph', width=100)
+ if stat_exe:
+ percent_cov = 100.0 * stat_cov / stat_exe
+ percent_ins = 100.0 * stat_ins / stat_exe
+
+ # Color percent based on thresholds
+ percent.Text('%.1f%%' % percent_cov)
+ if percent_cov >= 80:
+ percent.element.setAttribute('class', 'high_pct')
+ elif percent_cov >= 60:
+ percent.element.setAttribute('class', 'mid_pct')
+ else:
+ percent.element.setAttribute('class', 'low_pct')
+
+ # Graphs use integer values
+ percent_cov = int(percent_cov)
+ percent_ins = int(percent_ins)
+
+ graph.Text('.')
+ graph.E('span', style='padding-left:%dpx' % percent_cov,
+ e_class='g_covered')
+ graph.E('span', style='padding-left:%dpx' % (percent_ins - percent_cov),
+ e_class='g_instr')
+ graph.E('span', style='padding-left:%dpx' % (100 - percent_ins),
+ e_class='g_missing')
+
+ if attrs:
+ row.E('td', e_class='stat').Text(attrs.get('language'))
+ row.E('td', e_class='stat').Text(attrs.get('group'))
+ else:
+ row.E('td', colspan=2)
+
+ def WriteFile(self, cov_file):
+ """Writes the HTML for a file.
+
+ Args:
+ cov_file: croc.CoveredFile to write.
+ """
+ print ' ' + cov_file.filename
+ title = 'Coverage for ' + cov_file.filename
+
+ f = self.CreateHtmlDoc(cov_file.filename + '.html', title)
+ body = f.body
+
+ # Write header section
+ self.AddCaptionForFile(body, cov_file.filename)
+
+ # Summary for this file
+ table = body.E('table')
+ self.AddSectionHeader(table, None, None, is_file=True)
+ self.AddItem(table, None, cov_file.stats, cov_file.attrs)
+
+ body.E('h2').Text('Line-by-line coverage:')
+
+ # Print line-by-line coverage
+ if cov_file.local_path:
+ code_table = body.E('table').E('tr').E('td').E('pre')
+
+ flines = open(cov_file.local_path, 'rt')
+ lineno = 0
+
+ for line in flines:
+ lineno += 1
+ line_cov = cov_file.lines.get(lineno, 2)
+ e_class = COV_TYPE_CLASS.get(line_cov)
+
+ code_table.E('span', e_class=e_class).Text('%4d %s : %s\n' % (
+ lineno,
+ COV_TYPE_STRING.get(line_cov),
+ line.rstrip()
+ ))
+
+ else:
+ body.Text('Line-by-line coverage not available. Make sure the directory'
+ ' containing this file has been scanned via ')
+ body.E('B').Text('add_files')
+ body.Text(' in a configuration file, or the ')
+ body.E('B').Text('--addfiles')
+ body.Text(' command line option.')
+
+ # TODO: if file doesn't have a local path, try to find it by
+ # reverse-mapping roots and searching for the file.
+
+ body.E('p', e_class='time').Text(self.time_string)
+ f.Write()
+
+ def WriteSubdir(self, cov_dir):
+ """Writes the index.html for a subdirectory.
+
+ Args:
+ cov_dir: croc.CoveredDir to write.
+ """
+ print ' ' + cov_dir.dirpath + '/'
+
+ # Create the subdir if it doesn't already exist
+ subdir = self.output_root + '/' + cov_dir.dirpath
+ if not os.path.exists(subdir):
+ os.mkdir(subdir)
+
+ if cov_dir.dirpath:
+ title = 'Coverage for ' + cov_dir.dirpath + '/'
+ f = self.CreateHtmlDoc(cov_dir.dirpath + '/index.html', title)
+ else:
+ title = 'Coverage summary'
+ f = self.CreateHtmlDoc('index.html', title)
+
+ body = f.body
+
+ dirs = [''] + cov_dir.dirpath.split('/')
+ num_dirs = len(dirs)
+ sort_jsfile = '../' * (num_dirs - 1) + 'sorttable.js'
+ script = body.E('script', src=sort_jsfile)
+ body.E('/script')
+
+ # Write header section
+ if cov_dir.dirpath:
+ self.AddCaptionForSubdir(body, cov_dir.dirpath)
+ else:
+ body.E('h2').Text(title)
+
+ table = body.E('table', e_class='sortable')
+ table.E('h3').Text('Coverage by Group')
+ # Coverage by group
+ self.AddSectionHeader(table, None, 'Group')
+
+ for group in sorted(cov_dir.stats_by_group):
+ self.AddItem(table, group, cov_dir.stats_by_group[group], None)
+
+ # List subdirs
+ if cov_dir.subdirs:
+ table = body.E('table', e_class='sortable')
+ table.E('h3').Text('Subdirectories')
+ self.AddSectionHeader(table, None, 'Subdirectory')
+
+ for d in sorted(cov_dir.subdirs):
+ self.AddItem(table, d + '/', cov_dir.subdirs[d].stats_by_group['all'],
+ None, link=d + '/index.html')
+
+ # List files
+ if cov_dir.files:
+ table = body.E('table', e_class='sortable')
+ table.E('h3').Text('Files in This Directory')
+ self.AddSectionHeader(table, None, 'Filename',
+ is_file=True)
+
+ for filename in sorted(cov_dir.files):
+ cov_file = cov_dir.files[filename]
+ self.AddItem(table, filename, cov_file.stats, cov_file.attrs,
+ link=filename + '.html')
+
+ body.E('p', e_class='time').Text(self.time_string)
+ f.Write()
+
+ def WriteRoot(self):
+ """Writes the files in the output root."""
+ # Find ourselves
+ src_dir = os.path.split(self.WriteRoot.func_code.co_filename)[0]
+
+ # Files to copy into output root
+ copy_files = ['croc.css']
+ # Third_party files to copy into output root
+ third_party_files = ['sorttable.js']
+
+ # Copy files from our directory into the output directory
+ for copy_file in copy_files:
+ print ' Copying %s' % copy_file
+ shutil.copyfile(os.path.join(src_dir, copy_file),
+ os.path.join(self.output_root, copy_file))
+ # Copy third party files from third_party directory into
+ # the output directory
+ src_dir = os.path.join(src_dir, 'third_party')
+ for third_party_file in third_party_files:
+ print ' Copying %s' % third_party_file
+ shutil.copyfile(os.path.join(src_dir, third_party_file),
+ os.path.join(self.output_root, third_party_file))
+
+ def Write(self):
+ """Writes HTML output."""
+
+ print 'Writing HTML to %s...' % self.output_root
+
+ # Loop through the tree and write subdirs, breadth-first
+ # TODO: switch to depth-first and sort values - makes nicer output?
+ todo = [self.cov.tree]
+ while todo:
+ cov_dir = todo.pop(0)
+
+ # Append subdirs to todo list
+ todo += cov_dir.subdirs.values()
+
+ # Write this subdir
+ self.WriteSubdir(cov_dir)
+
+ # Write files in this subdir
+ for cov_file in cov_dir.files.itervalues():
+ self.WriteFile(cov_file)
+
+ # Write files in root directory
+ self.WriteRoot()
diff --git a/chromium/tools/code_coverage/croc_scan.py b/chromium/tools/code_coverage/croc_scan.py
new file mode 100644
index 00000000000..8d0e2e8df2a
--- /dev/null
+++ b/chromium/tools/code_coverage/croc_scan.py
@@ -0,0 +1,164 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Crocodile source scanners."""
+
+
+import re
+
+
+class Scanner(object):
+ """Generic source scanner."""
+
+ def __init__(self):
+ """Constructor."""
+
+ self.re_token = re.compile('#')
+ self.comment_to_eol = ['#']
+ self.comment_start = None
+ self.comment_end = None
+
+ def ScanLines(self, lines):
+ """Scans the lines for executable statements.
+
+ Args:
+ lines: Iterator returning source lines.
+
+ Returns:
+ An array of line numbers which are executable.
+ """
+ exe_lines = []
+ lineno = 0
+
+ in_string = None
+ in_comment = None
+ comment_index = None
+
+ for line in lines:
+ lineno += 1
+ in_string_at_start = in_string
+
+ for t in self.re_token.finditer(line):
+ tokenstr = t.groups()[0]
+
+ if in_comment:
+ # Inside a multi-line comment, so look for end token
+ if tokenstr == in_comment:
+ in_comment = None
+ # Replace comment with spaces
+ line = (line[:comment_index]
+ + ' ' * (t.end(0) - comment_index)
+ + line[t.end(0):])
+
+ elif in_string:
+ # Inside a string, so look for end token
+ if tokenstr == in_string:
+ in_string = None
+
+ elif tokenstr in self.comment_to_eol:
+ # Single-line comment, so truncate line at start of token
+ line = line[:t.start(0)]
+ break
+
+ elif tokenstr == self.comment_start:
+ # Multi-line comment start - end token is comment_end
+ in_comment = self.comment_end
+ comment_index = t.start(0)
+
+ else:
+ # Starting a string - end token is same as start
+ in_string = tokenstr
+
+ # If still in comment at end of line, remove comment
+ if in_comment:
+ line = line[:comment_index]
+ # Next line, delete from the beginnine
+ comment_index = 0
+
+ # If line-sans-comments is not empty, claim it may be executable
+ if line.strip() or in_string_at_start:
+ exe_lines.append(lineno)
+
+ # Return executable lines
+ return exe_lines
+
+ def Scan(self, filename):
+ """Reads the file and scans its lines.
+
+ Args:
+ filename: Path to file to scan.
+
+ Returns:
+ An array of line numbers which are executable.
+ """
+
+ # TODO: All manner of error checking
+ f = None
+ try:
+ f = open(filename, 'rt')
+ return self.ScanLines(f)
+ finally:
+ if f:
+ f.close()
+
+
+class PythonScanner(Scanner):
+ """Python source scanner."""
+
+ def __init__(self):
+ """Constructor."""
+ Scanner.__init__(self)
+
+ # TODO: This breaks for strings ending in more than 2 backslashes. Need
+ # a pattern which counts only an odd number of backslashes, so the last
+ # one thus escapes the quote.
+ self.re_token = re.compile(r'(#|\'\'\'|"""|(?<!(?<!\\)\\)["\'])')
+ self.comment_to_eol = ['#']
+ self.comment_start = None
+ self.comment_end = None
+
+
+class CppScanner(Scanner):
+ """C / C++ / ObjC / ObjC++ source scanner."""
+
+ def __init__(self):
+ """Constructor."""
+ Scanner.__init__(self)
+
+ # TODO: This breaks for strings ending in more than 2 backslashes. Need
+ # a pattern which counts only an odd number of backslashes, so the last
+ # one thus escapes the quote.
+ self.re_token = re.compile(r'(^\s*#|//|/\*|\*/|(?<!(?<!\\)\\)["\'])')
+
+ # TODO: Treat '\' at EOL as a token, and handle it as continuing the
+ # previous line. That is, if in a comment-to-eol, this line is a comment
+ # too.
+
+ # Note that we treat # at beginning of line as a comment, so that we ignore
+ # preprocessor definitions
+ self.comment_to_eol = ['//', '#']
+
+ self.comment_start = '/*'
+ self.comment_end = '*/'
+
+
+def ScanFile(filename, language):
+ """Scans a file for executable lines.
+
+ Args:
+ filename: Path to file to scan.
+ language: Language for file ('C', 'C++', 'python', 'ObjC', 'ObjC++')
+
+ Returns:
+ A list of executable lines, or an empty list if the file was not a handled
+ language.
+ """
+
+ if language == 'python':
+ return PythonScanner().Scan(filename)
+ elif language in ['C', 'C++', 'ObjC', 'ObjC++']:
+ return CppScanner().Scan(filename)
+
+ # Something we don't handle
+ return []
diff --git a/chromium/tools/code_coverage/croc_scan_test.py b/chromium/tools/code_coverage/croc_scan_test.py
new file mode 100755
index 00000000000..a69b28aac5c
--- /dev/null
+++ b/chromium/tools/code_coverage/croc_scan_test.py
@@ -0,0 +1,187 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for croc_scan.py."""
+
+import re
+import unittest
+import croc_scan
+
+
+class TestScanner(unittest.TestCase):
+ """Tests for croc_scan.Scanner."""
+
+ def testInit(self):
+ """Test __init()__."""
+ s = croc_scan.Scanner()
+
+ self.assertEqual(s.re_token.pattern, '#')
+ self.assertEqual(s.comment_to_eol, ['#'])
+ self.assertEqual(s.comment_start, None)
+ self.assertEqual(s.comment_end, None)
+
+ def testScanLines(self):
+ """Test ScanLines()."""
+ s = croc_scan.Scanner()
+ # Set up imaginary language:
+ # ':' = comment to EOL
+ # '"' = string start/end
+ # '(' = comment start
+ # ')' = comment end
+ s.re_token = re.compile(r'([\:\"\(\)])')
+ s.comment_to_eol = [':']
+ s.comment_start = '('
+ s.comment_end = ')'
+
+ # No input file = no output lines
+ self.assertEqual(s.ScanLines([]), [])
+
+ # Empty lines and lines with only whitespace are ignored
+ self.assertEqual(s.ScanLines([
+ '', # 1
+ 'line', # 2 exe
+ ' \t ', # 3
+ ]), [2])
+
+ # Comments to EOL are stripped, but not inside strings
+ self.assertEqual(s.ScanLines([
+ 'test', # 1 exe
+ ' : A comment', # 2
+ '"a : in a string"', # 3 exe
+ 'test2 : with comment to EOL', # 4 exe
+ 'foo = "a multiline string with an empty line', # 5 exe
+ '', # 6 exe
+ ': and a comment-to-EOL character"', # 7 exe
+ ': done', # 8
+ ]), [1, 3, 4, 5, 6, 7])
+
+ # Test Comment start/stop detection
+ self.assertEqual(s.ScanLines([
+ '( a comment on one line)', # 1
+ 'text (with a comment)', # 2 exe
+ '( a comment with a : in the middle)', # 3
+ '( a multi-line', # 4
+ ' comment)', # 5
+ 'a string "with a ( in it"', # 6 exe
+ 'not in a multi-line comment', # 7 exe
+ '(a comment with a " in it)', # 8
+ ': not in a string, so this gets stripped', # 9
+ 'more text "with an uninteresting string"', # 10 exe
+ ]), [2, 6, 7, 10])
+
+ # TODO: Test Scan(). Low priority, since it just wraps ScanLines().
+
+
+class TestPythonScanner(unittest.TestCase):
+ """Tests for croc_scan.PythonScanner."""
+
+ def testScanLines(self):
+ """Test ScanLines()."""
+ s = croc_scan.PythonScanner()
+
+ # No input file = no output lines
+ self.assertEqual(s.ScanLines([]), [])
+
+ self.assertEqual(s.ScanLines([
+ '# a comment', # 1
+ '', # 2
+ '"""multi-line string', # 3 exe
+ '# not a comment', # 4 exe
+ 'end of multi-line string"""', # 5 exe
+ ' ', # 6
+ '"single string with #comment"', # 7 exe
+ '', # 8
+ '\'\'\'multi-line string, single-quote', # 9 exe
+ '# not a comment', # 10 exe
+ 'end of multi-line string\'\'\'', # 11 exe
+ '', # 12
+ '"string with embedded \\" is handled"', # 13 exe
+ '# quoted "', # 14
+ '"\\""', # 15 exe
+ '# quoted backslash', # 16
+ '"\\\\"', # 17 exe
+ 'main()', # 18 exe
+ '# end', # 19
+ ]), [3, 4, 5, 7, 9, 10, 11, 13, 15, 17, 18])
+
+
+class TestCppScanner(unittest.TestCase):
+ """Tests for croc_scan.CppScanner."""
+
+ def testScanLines(self):
+ """Test ScanLines()."""
+ s = croc_scan.CppScanner()
+
+ # No input file = no output lines
+ self.assertEqual(s.ScanLines([]), [])
+
+ self.assertEqual(s.ScanLines([
+ '// a comment', # 1
+ '# a preprocessor define', # 2
+ '', # 3
+ '\'#\', \'"\'', # 4 exe
+ '', # 5
+ '/* a multi-line comment', # 6
+ 'with a " in it', # 7
+ '*/', # 8
+ '', # 9
+ '"a string with /* and \' in it"', # 10 exe
+ '', # 11
+ '"a multi-line string\\', # 12 exe
+ '// not a comment\\', # 13 exe
+ 'ending here"', # 14 exe
+ '', # 15
+ '"string with embedded \\" is handled"', # 16 exe
+ '', # 17
+ 'main()', # 18 exe
+ '// end', # 19
+ ]), [4, 10, 12, 13, 14, 16, 18])
+
+
+class TestScanFile(unittest.TestCase):
+ """Tests for croc_scan.ScanFile()."""
+
+ class MockScanner(object):
+ """Mock scanner."""
+
+ def __init__(self, language):
+ """Constructor."""
+ self.language = language
+
+ def Scan(self, filename):
+ """Mock Scan() method."""
+ return 'scan %s %s' % (self.language, filename)
+
+ def MockPythonScanner(self):
+ return self.MockScanner('py')
+
+ def MockCppScanner(self):
+ return self.MockScanner('cpp')
+
+ def setUp(self):
+ """Per-test setup."""
+ # Hook scanners
+ self.old_python_scanner = croc_scan.PythonScanner
+ self.old_cpp_scanner = croc_scan.CppScanner
+ croc_scan.PythonScanner = self.MockPythonScanner
+ croc_scan.CppScanner = self.MockCppScanner
+
+ def tearDown(self):
+ """Per-test cleanup."""
+ croc_scan.PythonScanner = self.old_python_scanner
+ croc_scan.CppScanner = self.old_cpp_scanner
+
+ def testScanFile(self):
+ """Test ScanFile()."""
+ self.assertEqual(croc_scan.ScanFile('foo', 'python'), 'scan py foo')
+ self.assertEqual(croc_scan.ScanFile('bar1', 'C'), 'scan cpp bar1')
+ self.assertEqual(croc_scan.ScanFile('bar2', 'C++'), 'scan cpp bar2')
+ self.assertEqual(croc_scan.ScanFile('bar3', 'ObjC'), 'scan cpp bar3')
+ self.assertEqual(croc_scan.ScanFile('bar4', 'ObjC++'), 'scan cpp bar4')
+ self.assertEqual(croc_scan.ScanFile('bar', 'fortran'), [])
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/code_coverage/croc_test.py b/chromium/tools/code_coverage/croc_test.py
new file mode 100755
index 00000000000..7c2521ca3df
--- /dev/null
+++ b/chromium/tools/code_coverage/croc_test.py
@@ -0,0 +1,758 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for Crocodile."""
+
+import os
+import StringIO
+import unittest
+import croc
+
+
+class TestCoverageStats(unittest.TestCase):
+ """Tests for croc.CoverageStats."""
+
+ def testAdd(self):
+ """Test Add()."""
+ c = croc.CoverageStats()
+
+ # Initially empty
+ self.assertEqual(c, {})
+
+ # Add items
+ c['a'] = 1
+ c['b'] = 0
+ self.assertEqual(c, {'a': 1, 'b': 0})
+
+ # Add dict with non-overlapping items
+ c.Add({'c': 5})
+ self.assertEqual(c, {'a': 1, 'b': 0, 'c': 5})
+
+ # Add dict with overlapping items
+ c.Add({'a': 4, 'd': 3})
+ self.assertEqual(c, {'a': 5, 'b': 0, 'c': 5, 'd': 3})
+
+
+class TestCoveredFile(unittest.TestCase):
+ """Tests for croc.CoveredFile."""
+
+ def setUp(self):
+ self.cov_file = croc.CoveredFile('bob.cc', group='source', language='C++')
+
+ def testInit(self):
+ """Test init."""
+ f = self.cov_file
+
+ # Check initial values
+ self.assertEqual(f.filename, 'bob.cc')
+ self.assertEqual(f.attrs, {'group': 'source', 'language': 'C++'})
+ self.assertEqual(f.lines, {})
+ self.assertEqual(f.stats, {})
+ self.assertEqual(f.local_path, None)
+ self.assertEqual(f.in_lcov, False)
+
+ def testUpdateCoverageEmpty(self):
+ """Test updating coverage when empty."""
+ f = self.cov_file
+ f.UpdateCoverage()
+ self.assertEqual(f.stats, {
+ 'lines_executable': 0,
+ 'lines_instrumented': 0,
+ 'lines_covered': 0,
+ 'files_executable': 1,
+ })
+
+ def testUpdateCoverageExeOnly(self):
+ """Test updating coverage when no lines are instrumented."""
+ f = self.cov_file
+ f.lines = {1: None, 2: None, 4: None}
+ f.UpdateCoverage()
+ self.assertEqual(f.stats, {
+ 'lines_executable': 3,
+ 'lines_instrumented': 0,
+ 'lines_covered': 0,
+ 'files_executable': 1,
+ })
+
+ # Now mark the file instrumented via in_lcov
+ f.in_lcov = True
+ f.UpdateCoverage()
+ self.assertEqual(f.stats, {
+ 'lines_executable': 3,
+ 'lines_instrumented': 0,
+ 'lines_covered': 0,
+ 'files_executable': 1,
+ 'files_instrumented': 1,
+ })
+
+ def testUpdateCoverageExeAndInstr(self):
+ """Test updating coverage when no lines are covered."""
+ f = self.cov_file
+ f.lines = {1: None, 2: None, 4: 0, 5: 0, 7: None}
+ f.UpdateCoverage()
+ self.assertEqual(f.stats, {
+ 'lines_executable': 5,
+ 'lines_instrumented': 2,
+ 'lines_covered': 0,
+ 'files_executable': 1,
+ 'files_instrumented': 1,
+ })
+
+ def testUpdateCoverageWhenCovered(self):
+ """Test updating coverage when lines are covered."""
+ f = self.cov_file
+ f.lines = {1: None, 2: None, 3: 1, 4: 0, 5: 0, 6: 1, 7: None}
+ f.UpdateCoverage()
+ self.assertEqual(f.stats, {
+ 'lines_executable': 7,
+ 'lines_instrumented': 4,
+ 'lines_covered': 2,
+ 'files_executable': 1,
+ 'files_instrumented': 1,
+ 'files_covered': 1,
+ })
+
+
+class TestCoveredDir(unittest.TestCase):
+ """Tests for croc.CoveredDir."""
+
+ def setUp(self):
+ self.cov_dir = croc.CoveredDir('/a/b/c')
+
+ def testInit(self):
+ """Test init."""
+ d = self.cov_dir
+
+ # Check initial values
+ self.assertEqual(d.dirpath, '/a/b/c')
+ self.assertEqual(d.files, {})
+ self.assertEqual(d.subdirs, {})
+ self.assertEqual(d.stats_by_group, {'all': {}})
+
+ def testGetTreeEmpty(self):
+ """Test getting empty tree."""
+ d = self.cov_dir
+ self.assertEqual(d.GetTree(), 'c/')
+
+ def testGetTreeStats(self):
+ """Test getting tree with stats."""
+ d = self.cov_dir
+ d.stats_by_group['all'] = croc.CoverageStats(
+ lines_executable=50, lines_instrumented=30, lines_covered=20)
+ d.stats_by_group['bar'] = croc.CoverageStats(
+ lines_executable=0, lines_instrumented=0, lines_covered=0)
+ d.stats_by_group['foo'] = croc.CoverageStats(
+ lines_executable=33, lines_instrumented=22, lines_covered=11)
+ # 'bar' group is skipped because it has no executable lines
+ self.assertEqual(
+ d.GetTree(),
+ 'c/ all:20/30/50 foo:11/22/33')
+
+ def testGetTreeSubdir(self):
+ """Test getting tree with subdirs."""
+ d1 = self.cov_dir = croc.CoveredDir('/a')
+ d2 = self.cov_dir = croc.CoveredDir('/a/b')
+ d3 = self.cov_dir = croc.CoveredDir('/a/c')
+ d4 = self.cov_dir = croc.CoveredDir('/a/b/d')
+ d5 = self.cov_dir = croc.CoveredDir('/a/b/e')
+ d1.subdirs = {'/a/b': d2, '/a/c': d3}
+ d2.subdirs = {'/a/b/d': d4, '/a/b/e': d5}
+ self.assertEqual(d1.GetTree(), 'a/\n b/\n d/\n e/\n c/')
+
+
+class TestCoverage(unittest.TestCase):
+ """Tests for croc.Coverage."""
+
+ def MockWalk(self, src_dir):
+ """Mock for os.walk().
+
+ Args:
+ src_dir: Source directory to walk.
+
+ Returns:
+ A list of (dirpath, dirnames, filenames) tuples.
+ """
+ self.mock_walk_calls.append(src_dir)
+ return self.mock_walk_return
+
+ def MockScanFile(self, filename, language):
+ """Mock for croc_scan.ScanFile().
+
+ Args:
+ filename: Path to file to scan.
+ language: Language for file.
+
+ Returns:
+ A list of executable lines.
+ """
+ self.mock_scan_calls.append([filename, language])
+ if filename in self.mock_scan_return:
+ return self.mock_scan_return[filename]
+ else:
+ return self.mock_scan_return['default']
+
+ def setUp(self):
+ """Per-test setup."""
+
+ # Empty coverage object
+ self.cov = croc.Coverage()
+
+ # Coverage object with minimal setup
+ self.cov_minimal = croc.Coverage()
+ self.cov_minimal.AddRoot('/src')
+ self.cov_minimal.AddRoot('c:\\source')
+ self.cov_minimal.AddRule('^_/', include=1, group='my')
+ self.cov_minimal.AddRule('.*\\.c$', language='C')
+ self.cov_minimal.AddRule('.*\\.c##$', language='C##') # sharper than thou
+
+ # Data for MockWalk()
+ self.mock_walk_calls = []
+ self.mock_walk_return = []
+
+ # Data for MockScanFile()
+ self.mock_scan_calls = []
+ self.mock_scan_return = {'default': [1]}
+
+ def testInit(self):
+ """Test init."""
+ c = self.cov
+ self.assertEqual(c.files, {})
+ self.assertEqual(c.root_dirs, [])
+ self.assertEqual(c.print_stats, [])
+ self.assertEqual(c.rules, [])
+
+ def testAddRoot(self):
+ """Test AddRoot() and CleanupFilename()."""
+ c = self.cov
+
+ # Check for identity on already-clean filenames
+ self.assertEqual(c.CleanupFilename(''), '')
+ self.assertEqual(c.CleanupFilename('a'), 'a')
+ self.assertEqual(c.CleanupFilename('.a'), '.a')
+ self.assertEqual(c.CleanupFilename('..a'), '..a')
+ self.assertEqual(c.CleanupFilename('a.b'), 'a.b')
+ self.assertEqual(c.CleanupFilename('a/b/c'), 'a/b/c')
+ self.assertEqual(c.CleanupFilename('a/b/c/'), 'a/b/c/')
+
+ # Backslash to forward slash
+ self.assertEqual(c.CleanupFilename('a\\b\\c'), 'a/b/c')
+
+ # Handle relative paths
+ self.assertEqual(c.CleanupFilename('.'),
+ c.CleanupFilename(os.path.abspath('.')))
+ self.assertEqual(c.CleanupFilename('..'),
+ c.CleanupFilename(os.path.abspath('..')))
+ self.assertEqual(c.CleanupFilename('./foo/bar'),
+ c.CleanupFilename(os.path.abspath('./foo/bar')))
+ self.assertEqual(c.CleanupFilename('../../a/b/c'),
+ c.CleanupFilename(os.path.abspath('../../a/b/c')))
+
+ # Replace alt roots
+ c.AddRoot('foo')
+ self.assertEqual(c.CleanupFilename('foo'), '_')
+ self.assertEqual(c.CleanupFilename('foo/bar/baz'), '_/bar/baz')
+ self.assertEqual(c.CleanupFilename('aaa/foo'), 'aaa/foo')
+
+ # Alt root replacement is applied for all roots
+ c.AddRoot('foo/bar', '_B')
+ self.assertEqual(c.CleanupFilename('foo/bar/baz'), '_B/baz')
+
+ # Can use previously defined roots in cleanup
+ c.AddRoot('_/nom/nom/nom', '_CANHAS')
+ self.assertEqual(c.CleanupFilename('foo/nom/nom/nom/cheezburger'),
+ '_CANHAS/cheezburger')
+
+ # Verify roots starting with UNC paths or drive letters work, and that
+ # more than one root can point to the same alt_name
+ c.AddRoot('/usr/local/foo', '_FOO')
+ c.AddRoot('D:\\my\\foo', '_FOO')
+ self.assertEqual(c.CleanupFilename('/usr/local/foo/a/b'), '_FOO/a/b')
+ self.assertEqual(c.CleanupFilename('D:\\my\\foo\\c\\d'), '_FOO/c/d')
+
+ # Cannot specify a blank alt_name
+ self.assertRaises(ValueError, c.AddRoot, 'some_dir', '')
+
+ def testAddRule(self):
+ """Test AddRule() and ClassifyFile()."""
+ c = self.cov
+
+ # With only the default rule, nothing gets kept
+ self.assertEqual(c.ClassifyFile('_/src/'), {})
+ self.assertEqual(c.ClassifyFile('_/src/a.c'), {})
+
+ # Add rules to include a tree and set a default group
+ c.AddRule('^_/src/', include=1, group='source')
+ self.assertEqual(c.ClassifyFile('_/src/'),
+ {'include': 1, 'group': 'source'})
+ self.assertEqual(c.ClassifyFile('_/notsrc/'), {})
+ self.assertEqual(c.ClassifyFile('_/src/a.c'),
+ {'include': 1, 'group': 'source'})
+
+ # Define some languages and groups
+ c.AddRule('.*\\.(c|h)$', language='C')
+ c.AddRule('.*\\.py$', language='Python')
+ c.AddRule('.*_test\\.', group='test')
+ self.assertEqual(c.ClassifyFile('_/src/a.c'),
+ {'include': 1, 'group': 'source', 'language': 'C'})
+ self.assertEqual(c.ClassifyFile('_/src/a.h'),
+ {'include': 1, 'group': 'source', 'language': 'C'})
+ self.assertEqual(c.ClassifyFile('_/src/a.cpp'),
+ {'include': 1, 'group': 'source'})
+ self.assertEqual(c.ClassifyFile('_/src/a_test.c'),
+ {'include': 1, 'group': 'test', 'language': 'C'})
+ self.assertEqual(c.ClassifyFile('_/src/test_a.c'),
+ {'include': 1, 'group': 'source', 'language': 'C'})
+ self.assertEqual(c.ClassifyFile('_/src/foo/bar.py'),
+ {'include': 1, 'group': 'source', 'language': 'Python'})
+ self.assertEqual(c.ClassifyFile('_/src/test.py'),
+ {'include': 1, 'group': 'source', 'language': 'Python'})
+
+ # Exclude a path (for example, anything in a build output dir)
+ c.AddRule('.*/build/', include=0)
+ # But add back in a dir which matched the above rule but isn't a build
+ # output dir
+ c.AddRule('_/src/tools/build/', include=1)
+ self.assertEqual(c.ClassifyFile('_/src/build.c').get('include'), 1)
+ self.assertEqual(c.ClassifyFile('_/src/build/').get('include'), 0)
+ self.assertEqual(c.ClassifyFile('_/src/build/a.c').get('include'), 0)
+ self.assertEqual(c.ClassifyFile('_/src/tools/build/').get('include'), 1)
+ self.assertEqual(c.ClassifyFile('_/src/tools/build/t.c').get('include'), 1)
+
+ def testGetCoveredFile(self):
+ """Test GetCoveredFile()."""
+ c = self.cov_minimal
+
+ # Not currently any covered files
+ self.assertEqual(c.GetCoveredFile('_/a.c'), None)
+
+ # Add some files
+ a_c = c.GetCoveredFile('_/a.c', add=True)
+ b_c = c.GetCoveredFile('_/b.c##', add=True)
+ self.assertEqual(a_c.filename, '_/a.c')
+ self.assertEqual(a_c.attrs, {'include': 1, 'group': 'my', 'language': 'C'})
+ self.assertEqual(b_c.filename, '_/b.c##')
+ self.assertEqual(b_c.attrs,
+ {'include': 1, 'group': 'my', 'language': 'C##'})
+
+ # Specifying the same filename should return the existing object
+ self.assertEqual(c.GetCoveredFile('_/a.c'), a_c)
+ self.assertEqual(c.GetCoveredFile('_/a.c', add=True), a_c)
+
+ # Filenames get cleaned on the way in, as do root paths
+ self.assertEqual(c.GetCoveredFile('/src/a.c'), a_c)
+ self.assertEqual(c.GetCoveredFile('c:\\source\\a.c'), a_c)
+
+ # TODO: Make sure that covered files require language, group, and include
+ # (since that checking is now done in GetCoveredFile() rather than
+ # ClassifyFile())
+
+ def testRemoveCoveredFile(self):
+ """Test RemoveCoveredFile()."""
+ # TODO: TEST ME!
+
+ def testParseLcov(self):
+ """Test ParseLcovData()."""
+ c = self.cov_minimal
+
+ c.ParseLcovData([
+ '# Ignore unknown lines',
+ # File we should include'
+ 'SF:/src/a.c',
+ 'DA:10,1',
+ 'DA:11,0',
+ 'DA:12,1 \n', # Trailing whitespace should get stripped
+ 'end_of_record',
+ # File we should ignore
+ 'SF:/not_src/a.c',
+ 'DA:20,1',
+ 'end_of_record',
+ # Same as first source file, but alternate root
+ 'SF:c:\\source\\a.c',
+ 'DA:30,1',
+ 'end_of_record',
+ # Ignore extra end of record
+ 'end_of_record',
+ # Ignore data points after end of record
+ 'DA:40,1',
+ # Instrumented but uncovered file
+ 'SF:/src/b.c',
+ 'DA:50,0',
+ 'end_of_record',
+ # Empty file (instrumented but no executable lines)
+ 'SF:c:\\source\\c.c',
+ 'end_of_record',
+ ])
+
+ # We should know about three files
+ self.assertEqual(sorted(c.files), ['_/a.c', '_/b.c', '_/c.c'])
+
+ # Check expected contents
+ a_c = c.GetCoveredFile('_/a.c')
+ self.assertEqual(a_c.lines, {10: 1, 11: 0, 12: 1, 30: 1})
+ self.assertEqual(a_c.stats, {
+ 'files_executable': 1,
+ 'files_instrumented': 1,
+ 'files_covered': 1,
+ 'lines_instrumented': 4,
+ 'lines_executable': 4,
+ 'lines_covered': 3,
+ })
+ self.assertEqual(a_c.in_lcov, True)
+
+ b_c = c.GetCoveredFile('_/b.c')
+ self.assertEqual(b_c.lines, {50: 0})
+ self.assertEqual(b_c.stats, {
+ 'files_executable': 1,
+ 'files_instrumented': 1,
+ 'lines_instrumented': 1,
+ 'lines_executable': 1,
+ 'lines_covered': 0,
+ })
+ self.assertEqual(b_c.in_lcov, True)
+
+ c_c = c.GetCoveredFile('_/c.c')
+ self.assertEqual(c_c.lines, {})
+ self.assertEqual(c_c.stats, {
+ 'files_executable': 1,
+ 'files_instrumented': 1,
+ 'lines_instrumented': 0,
+ 'lines_executable': 0,
+ 'lines_covered': 0,
+ })
+ self.assertEqual(c_c.in_lcov, True)
+
+ # TODO: Test that files are marked as instrumented if they come from lcov,
+ # even if they don't have any instrumented lines. (and that in_lcov is set
+ # for those files - probably should set that via some method rather than
+ # directly...)
+
+ def testGetStat(self):
+ """Test GetStat() and PrintStat()."""
+ c = self.cov
+
+ # Add some stats, so there's something to report
+ c.tree.stats_by_group = {
+ 'all': {
+ 'count_a': 10,
+ 'count_b': 4,
+ 'foo': 'bar',
+ },
+ 'tests': {
+ 'count_a': 2,
+ 'count_b': 5,
+ 'baz': 'bob',
+ },
+ }
+
+ # Test missing stats and groups
+ self.assertRaises(croc.CrocStatError, c.GetStat, 'nosuch')
+ self.assertRaises(croc.CrocStatError, c.GetStat, 'baz')
+ self.assertRaises(croc.CrocStatError, c.GetStat, 'foo', group='tests')
+ self.assertRaises(croc.CrocStatError, c.GetStat, 'foo', group='nosuch')
+
+ # Test returning defaults
+ self.assertEqual(c.GetStat('nosuch', default=13), 13)
+ self.assertEqual(c.GetStat('baz', default='aaa'), 'aaa')
+ self.assertEqual(c.GetStat('foo', group='tests', default=0), 0)
+ self.assertEqual(c.GetStat('foo', group='nosuch', default=''), '')
+
+ # Test getting stats
+ self.assertEqual(c.GetStat('count_a'), 10)
+ self.assertEqual(c.GetStat('count_a', group='tests'), 2)
+ self.assertEqual(c.GetStat('foo', default='baz'), 'bar')
+
+ # Test stat math (eval)
+ self.assertEqual(c.GetStat('count_a - count_b'), 6)
+ self.assertEqual(c.GetStat('100.0 * count_a / count_b', group='tests'),
+ 40.0)
+ # Should catch eval errors
+ self.assertRaises(croc.CrocStatError, c.GetStat, '100 / 0')
+ self.assertRaises(croc.CrocStatError, c.GetStat, 'count_a -')
+
+ # Test nested stats via S()
+ self.assertEqual(c.GetStat('count_a - S("count_a", group="tests")'), 8)
+ self.assertRaises(croc.CrocStatError, c.GetStat, 'S()')
+ self.assertRaises(croc.CrocStatError, c.GetStat, 'S("nosuch")')
+
+ # Test PrintStat()
+ # We won't see the first print, but at least verify it doesn't assert
+ c.PrintStat('count_a', format='(test to stdout: %s)')
+ # Send subsequent prints to a file
+ f = StringIO.StringIO()
+ c.PrintStat('count_b', outfile=f)
+ # Test specifying output format
+ c.PrintStat('count_a', format='Count A = %05d', outfile=f)
+ # Test specifing additional keyword args
+ c.PrintStat('count_a', group='tests', outfile=f)
+ c.PrintStat('nosuch', default=42, outfile=f)
+ self.assertEqual(f.getvalue(), ("""\
+GetStat('count_b') = 4
+Count A = 00010
+GetStat('count_a') = 2
+GetStat('nosuch') = 42
+"""))
+ f.close()
+
+ def testAddConfigEmpty(self):
+ """Test AddConfig() with empty config."""
+ c = self.cov
+ # Most minimal config is an empty dict; should do nothing
+ c.AddConfig('{} # And we ignore comments')
+
+ def testAddConfig(self):
+ """Test AddConfig()."""
+ c = self.cov
+ lcov_queue = []
+ addfiles_queue = []
+
+ c.AddConfig("""{
+ 'roots' : [
+ {'root' : '/foo'},
+ {'root' : '/bar', 'altname' : 'BAR'},
+ ],
+ 'rules' : [
+ {'regexp' : '^_/', 'group' : 'apple'},
+ {'regexp' : 're2', 'include' : 1, 'language' : 'elvish'},
+ ],
+ 'lcov_files' : ['a.lcov', 'b.lcov'],
+ 'add_files' : ['/src', 'BAR/doo'],
+ 'print_stats' : [
+ {'stat' : 'count_a'},
+ {'stat' : 'count_b', 'group' : 'tests'},
+ ],
+ 'extra_key' : 'is ignored',
+ }""", lcov_queue=lcov_queue, addfiles_queue=addfiles_queue)
+
+ self.assertEqual(lcov_queue, ['a.lcov', 'b.lcov'])
+ self.assertEqual(addfiles_queue, ['/src', 'BAR/doo'])
+ self.assertEqual(c.root_dirs, [['/foo', '_'], ['/bar', 'BAR']])
+ self.assertEqual(c.print_stats, [
+ {'stat': 'count_a'},
+ {'stat': 'count_b', 'group': 'tests'},
+ ])
+ # Convert compiled re's back to patterns for comparison
+ rules = [[r[0].pattern] + r[1:] for r in c.rules]
+ self.assertEqual(rules, [
+ ['^_/', {'group': 'apple'}],
+ ['re2', {'include': 1, 'language': 'elvish'}],
+ ])
+
+ def testAddFilesSimple(self):
+ """Test AddFiles() simple call."""
+ c = self.cov_minimal
+ c.add_files_walk = self.MockWalk
+ c.scan_file = self.MockScanFile
+
+ c.AddFiles('/a/b/c')
+ self.assertEqual(self.mock_walk_calls, ['/a/b/c'])
+ self.assertEqual(self.mock_scan_calls, [])
+ self.assertEqual(c.files, {})
+
+ def testAddFilesRootMap(self):
+ """Test AddFiles() with root mappings."""
+ c = self.cov_minimal
+ c.add_files_walk = self.MockWalk
+ c.scan_file = self.MockScanFile
+
+ c.AddRoot('_/subdir', 'SUBDIR')
+
+ # AddFiles() should replace the 'SUBDIR' alt_name, then match both
+ # possible roots for the '_' alt_name.
+ c.AddFiles('SUBDIR/foo')
+ self.assertEqual(self.mock_walk_calls,
+ ['/src/subdir/foo', 'c:/source/subdir/foo'])
+ self.assertEqual(self.mock_scan_calls, [])
+ self.assertEqual(c.files, {})
+
+ def testAddFilesNonEmpty(self):
+ """Test AddFiles() where files are returned."""
+
+ c = self.cov_minimal
+ c.add_files_walk = self.MockWalk
+ c.scan_file = self.MockScanFile
+
+ # Add a rule to exclude a subdir
+ c.AddRule('^_/proj1/excluded/', include=0)
+
+ # Add a rule to exclude adding some fiels
+ c.AddRule('.*noscan.c$', add_if_missing=0)
+
+ # Set data for mock walk and scan
+ self.mock_walk_return = [
+ [
+ '/src/proj1',
+ ['excluded', 'subdir'],
+ ['a.c', 'no.f', 'yes.c', 'noexe.c', 'bob_noscan.c'],
+ ],
+ [
+ '/src/proj1/subdir',
+ [],
+ ['cherry.c'],
+ ],
+ ]
+
+ # Add a file with no executable lines; it should be scanned but not added
+ self.mock_scan_return['/src/proj1/noexe.c'] = []
+
+ c.AddFiles('/src/proj1')
+
+ self.assertEqual(self.mock_walk_calls, ['/src/proj1'])
+ self.assertEqual(self.mock_scan_calls, [
+ ['/src/proj1/a.c', 'C'],
+ ['/src/proj1/yes.c', 'C'],
+ ['/src/proj1/noexe.c', 'C'],
+ ['/src/proj1/subdir/cherry.c', 'C'],
+ ])
+
+ # Include files from the main dir and subdir
+ self.assertEqual(sorted(c.files), [
+ '_/proj1/a.c',
+ '_/proj1/subdir/cherry.c',
+ '_/proj1/yes.c'])
+
+ # Excluded dir should have been pruned from the mock walk data dirnames.
+ # In the real os.walk() call this prunes the walk.
+ self.assertEqual(self.mock_walk_return[0][1], ['subdir'])
+
+
+ def testEmptyTreeStats(self):
+ """Make sure we don't choke when absolutely nothing happened.
+
+ How we might hit this: bot compile error."""
+ c = self.cov_minimal
+ t = c.tree
+ t.stats_by_group['all'].AddDefaults()
+ self.assertEqual(t.stats_by_group, {
+ 'all': { 'files_covered': 0,
+ 'files_instrumented': 0,
+ 'files_executable': 0,
+ 'lines_covered': 0,
+ 'lines_instrumented': 0,
+ 'lines_executable': 0 }})
+
+ def testUpdateTreeStats(self):
+ """Test UpdateTreeStats()."""
+
+ c = self.cov_minimal
+ c.AddRule('.*_test', group='test')
+
+ # Fill the files list
+ c.ParseLcovData([
+ 'SF:/src/a.c',
+ 'DA:10,1', 'DA:11,1', 'DA:20,0',
+ 'end_of_record',
+ 'SF:/src/a_test.c',
+ 'DA:10,1', 'DA:11,1', 'DA:12,1',
+ 'end_of_record',
+ 'SF:/src/foo/b.c',
+ 'DA:10,1', 'DA:11,1', 'DA:20,0', 'DA:21,0', 'DA:30,0',
+ 'end_of_record',
+ 'SF:/src/foo/b_test.c',
+ 'DA:20,0', 'DA:21,0', 'DA:22,0',
+ 'end_of_record',
+ ])
+ c.UpdateTreeStats()
+
+ t = c.tree
+ self.assertEqual(t.dirpath, '')
+ self.assertEqual(sorted(t.files), [])
+ self.assertEqual(sorted(t.subdirs), ['_'])
+ self.assertEqual(t.stats_by_group, {
+ 'all': {
+ 'files_covered': 3,
+ 'files_executable': 4,
+ 'lines_executable': 14,
+ 'lines_covered': 7,
+ 'lines_instrumented': 14,
+ 'files_instrumented': 4,
+ },
+ 'my': {
+ 'files_covered': 2,
+ 'files_executable': 2,
+ 'lines_executable': 8,
+ 'lines_covered': 4,
+ 'lines_instrumented': 8,
+ 'files_instrumented': 2,
+ },
+ 'test': {
+ 'files_covered': 1,
+ 'files_executable': 2,
+ 'lines_executable': 6,
+ 'lines_covered': 3,
+ 'lines_instrumented': 6,
+ 'files_instrumented': 2,
+ },
+ })
+
+ t = t.subdirs['_']
+ self.assertEqual(t.dirpath, '_')
+ self.assertEqual(sorted(t.files), ['a.c', 'a_test.c'])
+ self.assertEqual(sorted(t.subdirs), ['foo'])
+ self.assertEqual(t.stats_by_group, {
+ 'all': {
+ 'files_covered': 3,
+ 'files_executable': 4,
+ 'lines_executable': 14,
+ 'lines_covered': 7,
+ 'lines_instrumented': 14,
+ 'files_instrumented': 4,
+ },
+ 'my': {
+ 'files_covered': 2,
+ 'files_executable': 2,
+ 'lines_executable': 8,
+ 'lines_covered': 4,
+ 'lines_instrumented': 8,
+ 'files_instrumented': 2,
+ },
+ 'test': {
+ 'files_covered': 1,
+ 'files_executable': 2,
+ 'lines_executable': 6,
+ 'lines_covered': 3,
+ 'lines_instrumented': 6,
+ 'files_instrumented': 2,
+ },
+ })
+
+ t = t.subdirs['foo']
+ self.assertEqual(t.dirpath, '_/foo')
+ self.assertEqual(sorted(t.files), ['b.c', 'b_test.c'])
+ self.assertEqual(sorted(t.subdirs), [])
+ self.assertEqual(t.stats_by_group, {
+ 'test': {
+ 'files_executable': 1,
+ 'files_instrumented': 1,
+ 'lines_executable': 3,
+ 'lines_instrumented': 3,
+ 'lines_covered': 0,
+ },
+ 'all': {
+ 'files_covered': 1,
+ 'files_executable': 2,
+ 'lines_executable': 8,
+ 'lines_covered': 2,
+ 'lines_instrumented': 8,
+ 'files_instrumented': 2,
+ },
+ 'my': {
+ 'files_covered': 1,
+ 'files_executable': 1,
+ 'lines_executable': 5,
+ 'lines_covered': 2,
+ 'lines_instrumented': 5,
+ 'files_instrumented': 1,
+ }
+ })
+
+ # TODO: test: less important, since these are thin wrappers around other
+ # tested methods.
+ # ParseConfig()
+ # ParseLcovFile()
+ # PrintTree()
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/code_coverage/example.croc b/chromium/tools/code_coverage/example.croc
new file mode 100644
index 00000000000..f2fc2ce5c16
--- /dev/null
+++ b/chromium/tools/code_coverage/example.croc
@@ -0,0 +1,197 @@
+# -*- python -*-
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Example configuration file for Croc
+
+# Basic formatting rules:
+# * It looks like JSON.
+# * It's really python.
+# * Dictionaries are wrapped in {}. Order does not matter. Entries are of
+# the form:
+# 'key':value,
+# Note the trailing comma, which will help save you from python's built-in
+# string concatenation.
+# * Lists are wrapped in []. Order does matter. Entries should be followed
+# with a trailing comma, which will help save you from python's built-in
+# string concatenation.
+# * Comments start with # and extend to end of line.
+# * Strings are wrapped in ''. Backslashes must be escaped ('foo\\bar', not
+# 'foo\bar') - this is particularly important in rule regular expressions.
+
+
+# What follows is the main configuration dictionary.
+{
+ # List of root directories, applied in order.
+ #
+ # Typically, coverage data files contain absolute paths to the sources.
+ # What you care about is usually a relative path from the top of your source
+ # tree (referred to here as a 'source root') to the sources.
+ #
+ # Roots may also be specified on the command line via the --root option.
+ # Roots specified by --root are applied before those specified in config
+ # files.
+ 'roots' : [
+ # Each entry is a dict.
+ # * It must contain a 'root' entry, which is the start of a path.
+ # * Root entries may be absolute paths
+ # * Root entries starting with './' or '../' are relative paths, and
+ # are taken relative to the current directory where you run croc.
+ # * Root entries may start with previously defined altnames.
+ # * Use '/' as a path separator, even on Windows.
+ # * It may contain a 'altname' entry. If the root matches the start of
+ # a filename, that start is replaced with the 'altname', or with '_'
+ # if no default is specified.
+ # * Multiple root entries may share the same altname. This is commonly
+ # used when combining LCOV files from different platforms into one
+ # coverage report, when each platform checks out source code into a
+ # different source tree.
+ {'root' : 'c:/P4/EarthHammer'},
+ {'root' : 'd:/pulse/recipes/330137642/base'},
+ {'root' : '/Volumes/BuildData/PulseData/data/recipes/330137640/base'},
+ {'root' : '/usr/local/google/builder/.pulse-agent/data/recipes/330137641/base'},
+
+ # Sub-paths we specifically care about and want to call out. Note that
+ # these are relative to the default '_' altname.
+ {
+ 'root' : '_/googleclient/third_party/software_construction_toolkit/files',
+ 'altname' : 'SCT',
+ },
+ {
+ 'root' : '_/googleclient/tools/hammer',
+ 'altname' : 'HAMMER',
+ },
+ ],
+
+ # List of rules, applied in order.
+ 'rules' : [
+ # Each rule is a dict.
+ # * It must contaihn a 'regexp' entry. Filenames which match this
+ # regular expression (after applying mappings from 'roots') are
+ # affected by the rule.
+ #
+ # * Other entries in the dict are attributes to apply to matching files.
+ #
+ # Allowed attributes:
+ #
+ # 'include' : If 1, the file will be included in coverage reports. If 0,
+ # it won't be included in coverage reports.
+ #
+ # 'group' : Name of the group the file belongs to. The most common
+ # group names are 'source' and 'test'. Files must belong to
+ # a group to be included in coverage reports.
+ #
+ # 'language' : Programming language for the file. The most common
+ # languages are 'C', 'C++', 'python', 'ObjC', 'ObjC++'.
+ # Files must have a language to be included in coverage
+ # reports.
+ #
+ # 'add_if_missing' : If 1, and the file was not referenced by any LCOV
+ # files, it will be be scanned for executable lines
+ # and added to the coverage report. If 0, if the
+ # file is not referenced by any LCOV files, it will
+ # simply be ignored and not present in coverage
+ # reports.
+
+ # Files/paths to include
+ {
+ 'regexp' : '^(SCT|HAMMER)/',
+ 'include' : 1,
+ 'add_if_missing': 1,
+ },
+ {
+ 'regexp' : '.*/(\\.svn|\\.hg)/',
+ 'include' : 0,
+ },
+
+ # Groups
+ {
+ 'regexp' : '',
+ 'group' : 'source',
+ },
+ {
+ 'regexp' : '.*_(test|test_mac|unittest)\\.',
+ 'group' : 'test',
+ },
+
+ # Languages
+ {
+ 'regexp' : '.*\\.py$',
+ 'language' : 'python',
+ },
+ ],
+
+ # List of paths to add source from.
+ #
+ # Each entry is a path. It may be a local path, or one relative to a root
+ # altname (see 'roots' above).
+ #
+ # If more than one root's altname matches the start of this path, all matches
+ # will be attempted; matches where the candidate directory doesn't exist will
+ # be ignored. For example, if you're combining data from multiple platforms'
+ # LCOV files, you probably defined at least one root per LCOV, but only have
+ # one copy of the source on your local platform. That's fine; Croc will use
+ # the source it can find and not worry about the source it can't.
+ #
+ # Source files must be added via 'add_files' to generate line-by-line HTML
+ # output (via the --html option) and/or to scan for missing executable lines
+ # (if 'add_if_missing' is 1).
+ 'add_files' : [
+ 'SCT',
+ 'HAMMER',
+ ],
+
+ # Statistics to print.
+ #
+ 'print_stats' : [
+ # Each entry is a dict.
+ #
+ # It must have a 'stat' entry, which is the statistic to print. This may
+ # be one of the following stats:
+ #
+ # * files_executable
+ # * files_instrumented
+ # * files_covered
+ # * lines_executable
+ # * lines_instrumented
+ # * lines_covered
+ #
+ # or an expression using those stats.
+ #
+ # It may have a 'format' entry, which is a python formatting string (very
+ # printf-like) for the statistic.
+ #
+ # It may have a 'group' entry. If this is specified, only files from the
+ # matching group will be included in the statistic. If not specified, the
+ # group defaults to 'all', which means all groups.
+ {
+ 'stat' : 'files_executable',
+ 'format' : '*RESULT FilesKnown: files_executable= %d files',
+ },
+ {
+ 'stat' : 'files_instrumented',
+ 'format' : '*RESULT FilesInstrumented: files_instrumented= %d files',
+ },
+ {
+ 'stat' : '100.0 * files_instrumented / files_executable',
+ 'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g',
+ },
+ {
+ 'stat' : 'lines_instrumented',
+ 'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines',
+ },
+ {
+ 'stat' : 'lines_covered',
+ 'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines',
+ 'group' : 'source',
+ },
+ {
+ 'stat' : 'lines_covered',
+ 'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines',
+ 'group' : 'test',
+ },
+ ],
+
+}
diff --git a/chromium/tools/code_coverage/third_party/README.chromium b/chromium/tools/code_coverage/third_party/README.chromium
new file mode 100644
index 00000000000..a4925525c89
--- /dev/null
+++ b/chromium/tools/code_coverage/third_party/README.chromium
@@ -0,0 +1,11 @@
+Name: SortTable
+Short Name: sorttable.js
+URL: http://www.kryogenix.org/code/browser/sorttable/
+Version: 2
+Date: 7th April 2007
+License: Licenced as X11: http://www.kryogenix.org/code/browser/licence.html
+
+Description:
+Add <script src="sorttable.js"></script> to your HTML
+Add class="sortable" to any table you'd like to make sortable
+Click on the headers to sort
diff --git a/chromium/tools/code_coverage/third_party/sorttable.js b/chromium/tools/code_coverage/third_party/sorttable.js
new file mode 100644
index 00000000000..16ef551497b
--- /dev/null
+++ b/chromium/tools/code_coverage/third_party/sorttable.js
@@ -0,0 +1,494 @@
+/*
+ SortTable
+ version 2
+ 7th April 2007
+ Stuart Langridge, http://www.kryogenix.org/code/browser/sorttable/
+
+ Instructions:
+ Download this file
+ Add <script src="sorttable.js"></script> to your HTML
+ Add class="sortable" to any table you'd like to make sortable
+ Click on the headers to sort
+
+ Thanks to many, many people for contributions and suggestions.
+ Licenced as X11: http://www.kryogenix.org/code/browser/licence.html
+ This basically means: do what you want with it.
+*/
+
+
+var stIsIE = /*@cc_on!@*/false;
+
+sorttable = {
+ init: function() {
+ // quit if this function has already been called
+ if (arguments.callee.done) return;
+ // flag this function so we don't do the same thing twice
+ arguments.callee.done = true;
+ // kill the timer
+ if (_timer) clearInterval(_timer);
+
+ if (!document.createElement || !document.getElementsByTagName) return;
+
+ sorttable.DATE_RE = /^(\d\d?)[\/\.-](\d\d?)[\/\.-]((\d\d)?\d\d)$/;
+
+ forEach(document.getElementsByTagName('table'), function(table) {
+ if (table.className.search(/\bsortable\b/) != -1) {
+ sorttable.makeSortable(table);
+ }
+ });
+
+ },
+
+ makeSortable: function(table) {
+ if (table.getElementsByTagName('thead').length == 0) {
+ // table doesn't have a tHead. Since it should have, create one and
+ // put the first table row in it.
+ the = document.createElement('thead');
+ the.appendChild(table.rows[0]);
+ table.insertBefore(the,table.firstChild);
+ }
+ // Safari doesn't support table.tHead, sigh
+ if (table.tHead == null) table.tHead = table.getElementsByTagName('thead')[0];
+
+ if (table.tHead.rows.length != 1) return; // can't cope with two header rows
+
+ // Sorttable v1 put rows with a class of "sortbottom" at the bottom (as
+ // "total" rows, for example). This is B&R, since what you're supposed
+ // to do is put them in a tfoot. So, if there are sortbottom rows,
+ // for backwards compatibility, move them to tfoot (creating it if needed).
+ sortbottomrows = [];
+ for (var i=0; i<table.rows.length; i++) {
+ if (table.rows[i].className.search(/\bsortbottom\b/) != -1) {
+ sortbottomrows[sortbottomrows.length] = table.rows[i];
+ }
+ }
+ if (sortbottomrows) {
+ if (table.tFoot == null) {
+ // table doesn't have a tfoot. Create one.
+ tfo = document.createElement('tfoot');
+ table.appendChild(tfo);
+ }
+ for (var i=0; i<sortbottomrows.length; i++) {
+ tfo.appendChild(sortbottomrows[i]);
+ }
+ delete sortbottomrows;
+ }
+
+ // work through each column and calculate its type
+ headrow = table.tHead.rows[0].cells;
+ for (var i=0; i<headrow.length; i++) {
+ // manually override the type with a sorttable_type attribute
+ if (!headrow[i].className.match(/\bsorttable_nosort\b/)) { // skip this col
+ mtch = headrow[i].className.match(/\bsorttable_([a-z0-9]+)\b/);
+ if (mtch) { override = mtch[1]; }
+ if (mtch && typeof sorttable["sort_"+override] == 'function') {
+ headrow[i].sorttable_sortfunction = sorttable["sort_"+override];
+ } else {
+ headrow[i].sorttable_sortfunction = sorttable.guessType(table,i);
+ }
+ // make it clickable to sort
+ headrow[i].sorttable_columnindex = i;
+ headrow[i].sorttable_tbody = table.tBodies[0];
+ dean_addEvent(headrow[i],"click", function(e) {
+
+ if (this.className.search(/\bsorttable_sorted\b/) != -1) {
+ // if we're already sorted by this column, just
+ // reverse the table, which is quicker
+ sorttable.reverse(this.sorttable_tbody);
+ this.className = this.className.replace('sorttable_sorted',
+ 'sorttable_sorted_reverse');
+ this.removeChild(document.getElementById('sorttable_sortfwdind'));
+ sortrevind = document.createElement('span');
+ sortrevind.id = "sorttable_sortrevind";
+ sortrevind.innerHTML = stIsIE ? '&nbsp<font face="webdings">5</font>' : '&nbsp;&#x25B4;';
+ this.appendChild(sortrevind);
+ return;
+ }
+ if (this.className.search(/\bsorttable_sorted_reverse\b/) != -1) {
+ // if we're already sorted by this column in reverse, just
+ // re-reverse the table, which is quicker
+ sorttable.reverse(this.sorttable_tbody);
+ this.className = this.className.replace('sorttable_sorted_reverse',
+ 'sorttable_sorted');
+ this.removeChild(document.getElementById('sorttable_sortrevind'));
+ sortfwdind = document.createElement('span');
+ sortfwdind.id = "sorttable_sortfwdind";
+ sortfwdind.innerHTML = stIsIE ? '&nbsp<font face="webdings">6</font>' : '&nbsp;&#x25BE;';
+ this.appendChild(sortfwdind);
+ return;
+ }
+
+ // remove sorttable_sorted classes
+ theadrow = this.parentNode;
+ forEach(theadrow.childNodes, function(cell) {
+ if (cell.nodeType == 1) { // an element
+ cell.className = cell.className.replace('sorttable_sorted_reverse','');
+ cell.className = cell.className.replace('sorttable_sorted','');
+ }
+ });
+ sortfwdind = document.getElementById('sorttable_sortfwdind');
+ if (sortfwdind) { sortfwdind.parentNode.removeChild(sortfwdind); }
+ sortrevind = document.getElementById('sorttable_sortrevind');
+ if (sortrevind) { sortrevind.parentNode.removeChild(sortrevind); }
+
+ this.className += ' sorttable_sorted';
+ sortfwdind = document.createElement('span');
+ sortfwdind.id = "sorttable_sortfwdind";
+ sortfwdind.innerHTML = stIsIE ? '&nbsp<font face="webdings">6</font>' : '&nbsp;&#x25BE;';
+ this.appendChild(sortfwdind);
+
+ // build an array to sort. This is a Schwartzian transform thing,
+ // i.e., we "decorate" each row with the actual sort key,
+ // sort based on the sort keys, and then put the rows back in order
+ // which is a lot faster because you only do getInnerText once per row
+ row_array = [];
+ col = this.sorttable_columnindex;
+ rows = this.sorttable_tbody.rows;
+ for (var j=0; j<rows.length; j++) {
+ row_array[row_array.length] = [sorttable.getInnerText(rows[j].cells[col]), rows[j]];
+ }
+ /* If you want a stable sort, uncomment the following line */
+ //sorttable.shaker_sort(row_array, this.sorttable_sortfunction);
+ /* and comment out this one */
+ row_array.sort(this.sorttable_sortfunction);
+
+ tb = this.sorttable_tbody;
+ for (var j=0; j<row_array.length; j++) {
+ tb.appendChild(row_array[j][1]);
+ }
+
+ delete row_array;
+ });
+ }
+ }
+ },
+
+ guessType: function(table, column) {
+ // guess the type of a column based on its first non-blank row
+ sortfn = sorttable.sort_alpha;
+ for (var i=0; i<table.tBodies[0].rows.length; i++) {
+ text = sorttable.getInnerText(table.tBodies[0].rows[i].cells[column]);
+ if (text != '') {
+ if (text.match(/^-?[£$¤]?[\d,.]+%?$/)) {
+ return sorttable.sort_numeric;
+ }
+ // check for a date: dd/mm/yyyy or dd/mm/yy
+ // can have / or . or - as separator
+ // can be mm/dd as well
+ possdate = text.match(sorttable.DATE_RE)
+ if (possdate) {
+ // looks like a date
+ first = parseInt(possdate[1]);
+ second = parseInt(possdate[2]);
+ if (first > 12) {
+ // definitely dd/mm
+ return sorttable.sort_ddmm;
+ } else if (second > 12) {
+ return sorttable.sort_mmdd;
+ } else {
+ // looks like a date, but we can't tell which, so assume
+ // that it's dd/mm (English imperialism!) and keep looking
+ sortfn = sorttable.sort_ddmm;
+ }
+ }
+ }
+ }
+ return sortfn;
+ },
+
+ getInnerText: function(node) {
+ // gets the text we want to use for sorting for a cell.
+ // strips leading and trailing whitespace.
+ // this is *not* a generic getInnerText function; it's special to sorttable.
+ // for example, you can override the cell text with a customkey attribute.
+ // it also gets .value for <input> fields.
+
+ if (!node) return "";
+
+ hasInputs = (typeof node.getElementsByTagName == 'function') &&
+ node.getElementsByTagName('input').length;
+
+ if (node.getAttribute("sorttable_customkey") != null) {
+ return node.getAttribute("sorttable_customkey");
+ }
+ else if (typeof node.textContent != 'undefined' && !hasInputs) {
+ return node.textContent.replace(/^\s+|\s+$/g, '');
+ }
+ else if (typeof node.innerText != 'undefined' && !hasInputs) {
+ return node.innerText.replace(/^\s+|\s+$/g, '');
+ }
+ else if (typeof node.text != 'undefined' && !hasInputs) {
+ return node.text.replace(/^\s+|\s+$/g, '');
+ }
+ else {
+ switch (node.nodeType) {
+ case 3:
+ if (node.nodeName.toLowerCase() == 'input') {
+ return node.value.replace(/^\s+|\s+$/g, '');
+ }
+ case 4:
+ return node.nodeValue.replace(/^\s+|\s+$/g, '');
+ break;
+ case 1:
+ case 11:
+ var innerText = '';
+ for (var i = 0; i < node.childNodes.length; i++) {
+ innerText += sorttable.getInnerText(node.childNodes[i]);
+ }
+ return innerText.replace(/^\s+|\s+$/g, '');
+ break;
+ default:
+ return '';
+ }
+ }
+ },
+
+ reverse: function(tbody) {
+ // reverse the rows in a tbody
+ newrows = [];
+ for (var i=0; i<tbody.rows.length; i++) {
+ newrows[newrows.length] = tbody.rows[i];
+ }
+ for (var i=newrows.length-1; i>=0; i--) {
+ tbody.appendChild(newrows[i]);
+ }
+ delete newrows;
+ },
+
+ /* sort functions
+ each sort function takes two parameters, a and b
+ you are comparing a[0] and b[0] */
+ sort_numeric: function(a,b) {
+ aa = parseFloat(a[0].replace(/[^0-9.-]/g,''));
+ if (isNaN(aa)) aa = 0;
+ bb = parseFloat(b[0].replace(/[^0-9.-]/g,''));
+ if (isNaN(bb)) bb = 0;
+ return aa-bb;
+ },
+ sort_alpha: function(a,b) {
+ if (a[0]==b[0]) return 0;
+ if (a[0]<b[0]) return -1;
+ return 1;
+ },
+ sort_ddmm: function(a,b) {
+ mtch = a[0].match(sorttable.DATE_RE);
+ y = mtch[3]; m = mtch[2]; d = mtch[1];
+ if (m.length == 1) m = '0'+m;
+ if (d.length == 1) d = '0'+d;
+ dt1 = y+m+d;
+ mtch = b[0].match(sorttable.DATE_RE);
+ y = mtch[3]; m = mtch[2]; d = mtch[1];
+ if (m.length == 1) m = '0'+m;
+ if (d.length == 1) d = '0'+d;
+ dt2 = y+m+d;
+ if (dt1==dt2) return 0;
+ if (dt1<dt2) return -1;
+ return 1;
+ },
+ sort_mmdd: function(a,b) {
+ mtch = a[0].match(sorttable.DATE_RE);
+ y = mtch[3]; d = mtch[2]; m = mtch[1];
+ if (m.length == 1) m = '0'+m;
+ if (d.length == 1) d = '0'+d;
+ dt1 = y+m+d;
+ mtch = b[0].match(sorttable.DATE_RE);
+ y = mtch[3]; d = mtch[2]; m = mtch[1];
+ if (m.length == 1) m = '0'+m;
+ if (d.length == 1) d = '0'+d;
+ dt2 = y+m+d;
+ if (dt1==dt2) return 0;
+ if (dt1<dt2) return -1;
+ return 1;
+ },
+
+ shaker_sort: function(list, comp_func) {
+ // A stable sort function to allow multi-level sorting of data
+ // see: http://en.wikipedia.org/wiki/Cocktail_sort
+ // thanks to Joseph Nahmias
+ var b = 0;
+ var t = list.length - 1;
+ var swap = true;
+
+ while(swap) {
+ swap = false;
+ for(var i = b; i < t; ++i) {
+ if ( comp_func(list[i], list[i+1]) > 0 ) {
+ var q = list[i]; list[i] = list[i+1]; list[i+1] = q;
+ swap = true;
+ }
+ } // for
+ t--;
+
+ if (!swap) break;
+
+ for(var i = t; i > b; --i) {
+ if ( comp_func(list[i], list[i-1]) < 0 ) {
+ var q = list[i]; list[i] = list[i-1]; list[i-1] = q;
+ swap = true;
+ }
+ } // for
+ b++;
+
+ } // while(swap)
+ }
+}
+
+/* ******************************************************************
+ Supporting functions: bundled here to avoid depending on a library
+ ****************************************************************** */
+
+// Dean Edwards/Matthias Miller/John Resig
+
+/* for Mozilla/Opera9 */
+if (document.addEventListener) {
+ document.addEventListener("DOMContentLoaded", sorttable.init, false);
+}
+
+/* for Internet Explorer */
+/*@cc_on @*/
+/*@if (@_win32)
+ document.write("<script id=__ie_onload defer src=javascript:void(0)><\/script>");
+ var script = document.getElementById("__ie_onload");
+ script.onreadystatechange = function() {
+ if (this.readyState == "complete") {
+ sorttable.init(); // call the onload handler
+ }
+ };
+/*@end @*/
+
+/* for Safari */
+if (/WebKit/i.test(navigator.userAgent)) { // sniff
+ var _timer = setInterval(function() {
+ if (/loaded|complete/.test(document.readyState)) {
+ sorttable.init(); // call the onload handler
+ }
+ }, 10);
+}
+
+/* for other browsers */
+window.onload = sorttable.init;
+
+// written by Dean Edwards, 2005
+// with input from Tino Zijdel, Matthias Miller, Diego Perini
+
+// http://dean.edwards.name/weblog/2005/10/add-event/
+
+function dean_addEvent(element, type, handler) {
+ if (element.addEventListener) {
+ element.addEventListener(type, handler, false);
+ } else {
+ // assign each event handler a unique ID
+ if (!handler.$$guid) handler.$$guid = dean_addEvent.guid++;
+ // create a hash table of event types for the element
+ if (!element.events) element.events = {};
+ // create a hash table of event handlers for each element/event pair
+ var handlers = element.events[type];
+ if (!handlers) {
+ handlers = element.events[type] = {};
+ // store the existing event handler (if there is one)
+ if (element["on" + type]) {
+ handlers[0] = element["on" + type];
+ }
+ }
+ // store the event handler in the hash table
+ handlers[handler.$$guid] = handler;
+ // assign a global event handler to do all the work
+ element["on" + type] = handleEvent;
+ }
+};
+// a counter used to create unique IDs
+dean_addEvent.guid = 1;
+
+function removeEvent(element, type, handler) {
+ if (element.removeEventListener) {
+ element.removeEventListener(type, handler, false);
+ } else {
+ // delete the event handler from the hash table
+ if (element.events && element.events[type]) {
+ delete element.events[type][handler.$$guid];
+ }
+ }
+};
+
+function handleEvent(event) {
+ var returnValue = true;
+ // grab the event object (IE uses a global event object)
+ event = event || fixEvent(((this.ownerDocument || this.document || this).parentWindow || window).event);
+ // get a reference to the hash table of event handlers
+ var handlers = this.events[event.type];
+ // execute each event handler
+ for (var i in handlers) {
+ this.$$handleEvent = handlers[i];
+ if (this.$$handleEvent(event) === false) {
+ returnValue = false;
+ }
+ }
+ return returnValue;
+};
+
+function fixEvent(event) {
+ // add W3C standard event methods
+ event.preventDefault = fixEvent.preventDefault;
+ event.stopPropagation = fixEvent.stopPropagation;
+ return event;
+};
+fixEvent.preventDefault = function() {
+ this.returnValue = false;
+};
+fixEvent.stopPropagation = function() {
+ this.cancelBubble = true;
+}
+
+// Dean's forEach: http://dean.edwards.name/base/forEach.js
+/*
+ forEach, version 1.0
+ Copyright 2006, Dean Edwards
+ License: http://www.opensource.org/licenses/mit-license.php
+*/
+
+// array-like enumeration
+if (!Array.forEach) { // mozilla already supports this
+ Array.forEach = function(array, block, context) {
+ for (var i = 0; i < array.length; i++) {
+ block.call(context, array[i], i, array);
+ }
+ };
+}
+
+// generic enumeration
+Function.prototype.forEach = function(object, block, context) {
+ for (var key in object) {
+ if (typeof this.prototype[key] == "undefined") {
+ block.call(context, object[key], key, object);
+ }
+ }
+};
+
+// character enumeration
+String.forEach = function(string, block, context) {
+ Array.forEach(string.split(""), function(chr, index) {
+ block.call(context, chr, index, string);
+ });
+};
+
+// globally resolve forEach enumeration
+var forEach = function(object, block, context) {
+ if (object) {
+ var resolve = Object; // default
+ if (object instanceof Function) {
+ // functions have a "length" property
+ resolve = Function;
+ } else if (object.forEach instanceof Function) {
+ // the object implements a custom forEach method so use that
+ object.forEach(block, context);
+ return;
+ } else if (typeof object == "string") {
+ // the object is a string
+ resolve = String;
+ } else if (typeof object.length == "number") {
+ // the object is array-like
+ resolve = Array;
+ }
+ resolve.forEach(object, block, context);
+ }
+};
diff --git a/chromium/tools/copyright_scanner/OWNERS b/chromium/tools/copyright_scanner/OWNERS
new file mode 100644
index 00000000000..90a3cbe2336
--- /dev/null
+++ b/chromium/tools/copyright_scanner/OWNERS
@@ -0,0 +1,3 @@
+phajdan.jr@chromium.org
+sgurun@chromium.org
+torne@chromium.org
diff --git a/chromium/tools/copyright_scanner/PRESUBMIT.py b/chromium/tools/copyright_scanner/PRESUBMIT.py
new file mode 100644
index 00000000000..455701ec27c
--- /dev/null
+++ b/chromium/tools/copyright_scanner/PRESUBMIT.py
@@ -0,0 +1,29 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+def CheckChangeOnUpload(input_api, output_api):
+ return _CommonChecks(input_api, output_api)
+
+def CheckChangeOnCommit(input_api, output_api):
+ return _CommonChecks(input_api, output_api)
+
+def _CommonChecks(input_api, output_api):
+ """Checks common to both upload and commit."""
+ results = []
+
+ would_affect_tests = [
+ 'PRESUBMIT.py',
+ 'copyright_scanner.py',
+ 'copyright_scanner_unittest.py'
+ ]
+ need_to_run_unittests = False
+ for f in input_api.AffectedFiles():
+ if any(t for t in would_affect_tests if f.LocalPath().endswith(t)):
+ need_to_run_unittests = True
+ break
+ tests = [input_api.os_path.join(
+ input_api.PresubmitLocalPath(), 'copyright_scanner_unittest.py')]
+ results.extend(
+ input_api.canned_checks.RunUnitTests(input_api, output_api, tests))
+ return results
diff --git a/chromium/tools/copyright_scanner/__init__.py b/chromium/tools/copyright_scanner/__init__.py
new file mode 100755
index 00000000000..e6b8d8e5bb1
--- /dev/null
+++ b/chromium/tools/copyright_scanner/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+__all__ = ['copyright_scanner']
diff --git a/chromium/tools/copyright_scanner/copyright_scanner.py b/chromium/tools/copyright_scanner/copyright_scanner.py
new file mode 100644
index 00000000000..1e532a9b0da
--- /dev/null
+++ b/chromium/tools/copyright_scanner/copyright_scanner.py
@@ -0,0 +1,401 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for scanning source files to determine code authorship.
+"""
+
+import itertools
+
+def ForwardSlashesToOsPathSeps(input_api, path):
+ """Converts forward slashes ('/') in the input path to OS-specific
+ path separators. Used when the paths come from outside and are using
+ UNIX path separators. Only works for relative paths!
+ Args:
+ input_api: InputAPI, as in presubmit scripts.
+ path: The path to convert.
+ Returns:
+ Converted path.
+ """
+ return input_api.os_path.join(*path.split('/'))
+
+def FindFiles(input_api, root_dir, start_paths_list, excluded_dirs_list):
+ """Similar to UNIX utility find(1), searches for files in the directories.
+ Automatically leaves out only source code files and excludes third_party
+ directories.
+ Args:
+ input_api: InputAPI, as in presubmit scripts.
+ root_dir: The root directory, to which all other paths are relative.
+ start_paths_list: The list of paths to start search from. Each path can
+ be a file or a directory.
+ excluded_dirs_list: The list of directories to skip.
+ Returns:
+ The list of source code files found, relative to |root_dir|.
+ """
+ excluded_dirs_list = [d for d in excluded_dirs_list if not 'third_party' in d]
+ # Using a common pattern for third-partyies makes the ignore regexp shorter
+ excluded_dirs_list.append('third_party')
+
+ path_join = input_api.os_path.join
+ EXTRA_EXCLUDED_DIRS = [
+ # VCS dirs
+ path_join('.git'),
+ path_join('.svn'),
+ # Build output
+ path_join('out', 'Debug'),
+ path_join('out', 'Release'),
+ # 'Copyright' appears in license agreements
+ path_join('chrome', 'app', 'resources'),
+ # Quickoffice js files from internal src used on buildbots.
+ # crbug.com/350472.
+ path_join('chrome', 'browser', 'resources', 'chromeos', 'quickoffice'),
+ # blink style copy right headers.
+ path_join('content', 'shell', 'renderer', 'test_runner'),
+ # blink style copy right headers.
+ path_join('content', 'shell', 'tools', 'plugin'),
+ # This is tests directory, doesn't exist in the snapshot
+ path_join('content', 'test', 'data'),
+ # This is a tests directory that doesn't exist in the shipped product.
+ path_join('gin', 'test'),
+ # This is a test output directory
+ path_join('data', 'dom_perf'),
+ # This is a tests directory that doesn't exist in the shipped product.
+ path_join('tools', 'perf', 'page_sets'),
+ path_join('tools', 'perf', 'page_sets', 'tough_animation_cases'),
+ # Histogram tools, doesn't exist in the snapshot
+ path_join('tools', 'histograms'),
+ # Swarming tools, doesn't exist in the snapshot
+ path_join('tools', 'swarming_client'),
+ # Don't check downloaded goma client binaries.
+ path_join('build', 'goma', 'client'),
+ # Ignore sysroots.
+ path_join('build', 'linux', 'debian_wheezy_amd64-sysroot'),
+ path_join('build', 'linux', 'debian_wheezy_arm-sysroot'),
+ path_join('build', 'linux', 'debian_wheezy_mips-sysroot'),
+ path_join('build', 'linux', 'debian_wheezy_i386-sysroot'),
+ # Old location (TODO(sbc): Remove this once it no longer exists on any bots)
+ path_join('chrome', 'installer', 'linux', 'debian_wheezy_arm-sysroot'),
+ # Data is not part of open source chromium, but are included on some bots.
+ path_join('data'),
+ # This is not part of open source chromium, but are included on some bots.
+ path_join('skia', 'tools', 'clusterfuzz-data'),
+ # Not shipped, only relates to Chrome for Android, but not to WebView
+ path_join('clank'),
+ # Internal-only repository.
+ path_join('remoting', 'android', 'internal'),
+ ]
+ excluded_dirs_list.extend(EXTRA_EXCLUDED_DIRS)
+
+ # Surround the directory names with OS path separators.
+ dirs_blacklist = [path_join('.', d, '')[1:] for d in excluded_dirs_list if d]
+ def IsBlacklistedDir(d):
+ for item in dirs_blacklist:
+ if item in d:
+ return True
+ return False
+
+ files_whitelist_re = input_api.re.compile(
+ r'\.(asm|c(c|pp|xx)?|h(h|pp|xx)?|p(l|m)|xs|sh|php|py(|x)'
+ '|rb|idl|java|el|sc(i|e)|cs|pas|inc|js|pac|html|dtd|xsl|mod|mm?'
+ '|tex|mli?)$')
+ files = []
+
+ base_path_len = len(root_dir)
+ for path in start_paths_list:
+ full_path = path_join(root_dir, path)
+ if input_api.os_path.isfile(full_path):
+ if files_whitelist_re.search(path) and \
+ not IsBlacklistedDir(full_path[base_path_len:]): # Keep '/' prefix.
+ files.append(path)
+ else:
+ for dirpath, dirnames, filenames in input_api.os_walk(full_path):
+ # Remove excluded subdirs for faster scanning.
+ for item in dirnames[:]:
+ if IsBlacklistedDir(
+ path_join(dirpath, item)[base_path_len + 1:]):
+ dirnames.remove(item)
+ for filename in filenames:
+ filepath = \
+ path_join(dirpath, filename)[base_path_len + 1:]
+ if files_whitelist_re.search(filepath) and \
+ not IsBlacklistedDir(filepath):
+ files.append(filepath)
+ return files
+
+
+class _GeneratedFilesDetector(object):
+ GENERATED_FILE = 'GENERATED FILE'
+ NO_COPYRIGHT = '*No copyright*'
+
+ def __init__(self, input_api):
+ self.python_multiline_string_double_re = \
+ input_api.re.compile(r'"""[^"]*(?:"""|$)', flags=input_api.re.MULTILINE)
+ self.python_multiline_string_single_re = \
+ input_api.re.compile(r"'''[^']*(?:'''|$)", flags=input_api.re.MULTILINE)
+ self.automatically_generated_re = input_api.re.compile(
+ r'(All changes made in this file will be lost'
+ '|DO NOT (EDIT|delete this file)'
+ '|Generated (at|automatically|data)'
+ '|Automatically generated'
+ '|\Wgenerated\s+(?:\w+\s+)*file\W)', flags=input_api.re.IGNORECASE)
+
+ def IsGeneratedFile(self, header):
+ header = header.upper()
+ if '"""' in header:
+ header = self.python_multiline_string_double_re.sub('', header)
+ if "'''" in header:
+ header = self.python_multiline_string_single_re.sub('', header)
+ # First do simple strings lookup to save time.
+ if 'ALL CHANGES MADE IN THIS FILE WILL BE LOST' in header:
+ return True
+ if 'DO NOT EDIT' in header or 'DO NOT DELETE' in header or \
+ 'GENERATED' in header:
+ return self.automatically_generated_re.search(header)
+ return False
+
+
+class _CopyrightsScanner(object):
+ @staticmethod
+ def StaticInit(input_api):
+ _CopyrightsScanner._c_comment_re = \
+ input_api.re.compile(r'''"[^"\\]*(?:\\.[^"\\]*)*"''')
+ _CopyrightsScanner._copyright_indicator = \
+ r'(?:copyright|copr\.|\xc2\xa9|\(c\))'
+ _CopyrightsScanner._full_copyright_indicator_re = input_api.re.compile(
+ r'(?:\W|^)' + _CopyrightsScanner._copyright_indicator + \
+ r'(?::\s*|\s+)(\w.*)$', input_api.re.IGNORECASE)
+ _CopyrightsScanner._copyright_disindicator_re = input_api.re.compile(
+ r'\s*\b(?:info(?:rmation)?|notice|and|or)\b', input_api.re.IGNORECASE)
+
+ def __init__(self, input_api):
+ self.max_line_numbers_proximity = 3
+ self.last_a_item_line_number = -200
+ self.last_b_item_line_number = -100
+ self.re = input_api.re
+
+ def _CloseLineNumbers(self, a, b):
+ return 0 <= a - b <= self.max_line_numbers_proximity
+
+ def MatchLine(self, line_number, line):
+ if '"' in line:
+ line = _CopyrightsScanner._c_comment_re.sub('', line)
+ upcase_line = line.upper()
+ # Record '(a)' and '(b)' last occurences in C++ comments.
+ # This is to filter out '(c)' used as a list item inside C++ comments.
+ # E.g. "// blah-blah (a) blah\n// blah-blah (b) and (c) blah"
+ cpp_comment_idx = upcase_line.find('//')
+ if cpp_comment_idx != -1:
+ if upcase_line.find('(A)') > cpp_comment_idx:
+ self.last_a_item_line_number = line_number
+ if upcase_line.find('(B)') > cpp_comment_idx:
+ self.last_b_item_line_number = line_number
+ # Fast bailout, uses the same patterns as _copyright_indicator regexp.
+ if not 'COPYRIGHT' in upcase_line and not 'COPR.' in upcase_line \
+ and not '\xc2\xa9' in upcase_line:
+ c_item_index = upcase_line.find('(C)')
+ if c_item_index == -1:
+ return None
+ if c_item_index > cpp_comment_idx and \
+ self._CloseLineNumbers(line_number,
+ self.last_b_item_line_number) and \
+ self._CloseLineNumbers(self.last_b_item_line_number,
+ self.last_a_item_line_number):
+ return None
+ copyr = None
+ m = _CopyrightsScanner._full_copyright_indicator_re.search(line)
+ if m and \
+ not _CopyrightsScanner._copyright_disindicator_re.match(m.group(1)):
+ copyr = m.group(0)
+ # Prettify the authorship string.
+ copyr = self.re.sub(r'([,.])?\s*$/', '', copyr)
+ copyr = self.re.sub(
+ _CopyrightsScanner._copyright_indicator, '', copyr, \
+ flags=self.re.IGNORECASE)
+ copyr = self.re.sub(r'^\s+', '', copyr)
+ copyr = self.re.sub(r'\s{2,}', ' ', copyr)
+ copyr = self.re.sub(r'\\@', '@', copyr)
+ return copyr
+
+
+def FindCopyrights(input_api, root_dir, files_to_scan):
+ """Determines code autorship, and finds generated files.
+ Args:
+ input_api: InputAPI, as in presubmit scripts.
+ root_dir: The root directory, to which all other paths are relative.
+ files_to_scan: The list of file names to scan.
+ Returns:
+ The list of copyrights associated with each of the files given.
+ If the certain file is generated, the corresponding list consists a single
+ entry -- 'GENERATED_FILE' string. If the file has no copyright info,
+ the corresponding list contains 'NO_COPYRIGHT' string.
+ """
+ generated_files_detector = _GeneratedFilesDetector(input_api)
+ _CopyrightsScanner.StaticInit(input_api)
+ copyrights = []
+ for file_name in files_to_scan:
+ linenum = 0
+ header = []
+ file_copyrights = []
+ scanner = _CopyrightsScanner(input_api)
+ contents = input_api.ReadFile(
+ input_api.os_path.join(root_dir, file_name), 'r')
+ for l in contents.split('\n'):
+ linenum += 1
+ if linenum <= 25:
+ header.append(l)
+ c = scanner.MatchLine(linenum, l)
+ if c:
+ file_copyrights.append(c)
+ if generated_files_detector.IsGeneratedFile('\n'.join(header)):
+ copyrights.append([_GeneratedFilesDetector.GENERATED_FILE])
+ elif file_copyrights:
+ copyrights.append(file_copyrights)
+ else:
+ copyrights.append([_GeneratedFilesDetector.NO_COPYRIGHT])
+ return copyrights
+
+
+def FindCopyrightViolations(input_api, root_dir, files_to_scan):
+ """Looks for files that are not belong exlusively to the Chromium Authors.
+ Args:
+ input_api: InputAPI, as in presubmit scripts.
+ root_dir: The root directory, to which all other paths are relative.
+ files_to_scan: The list of file names to scan.
+ Returns:
+ The list of file names that contain non-Chromium copyrights.
+ """
+ copyrights = FindCopyrights(input_api, root_dir, files_to_scan)
+ offending_files = []
+ allowed_copyrights_re = input_api.re.compile(
+ r'^(?:20[0-9][0-9](?:-20[0-9][0-9])? The Chromium Authors\. '
+ 'All rights reserved.*)$')
+ for f, cs in itertools.izip(files_to_scan, copyrights):
+ if cs[0] == _GeneratedFilesDetector.GENERATED_FILE or \
+ cs[0] == _GeneratedFilesDetector.NO_COPYRIGHT:
+ continue
+ for c in cs:
+ if not allowed_copyrights_re.match(c):
+ offending_files.append(input_api.os_path.normpath(f))
+ break
+ return offending_files
+
+
+def _GetWhitelistFileName(input_api):
+ return input_api.os_path.join(
+ 'tools', 'copyright_scanner', 'third_party_files_whitelist.txt')
+
+def _ProcessWhitelistedFilesList(input_api, lines):
+ whitelisted_files = []
+ for line in lines:
+ match = input_api.re.match(r'([^#\s]+)', line)
+ if match:
+ whitelisted_files.append(
+ ForwardSlashesToOsPathSeps(input_api, match.group(1)))
+ return whitelisted_files
+
+
+def LoadWhitelistedFilesList(input_api):
+ """Loads and parses the 3rd party code whitelist file.
+ input_api: InputAPI of presubmit scripts.
+ Returns:
+ The list of files.
+ """
+ full_file_name = input_api.os_path.join(
+ input_api.change.RepositoryRoot(), _GetWhitelistFileName(input_api))
+ file_data = input_api.ReadFile(full_file_name, 'rb')
+ return _ProcessWhitelistedFilesList(input_api, file_data.splitlines())
+
+
+def AnalyzeScanResults(input_api, whitelisted_files, offending_files):
+ """Compares whitelist contents with the results of file scanning.
+ input_api: InputAPI of presubmit scripts.
+ whitelisted_files: Whitelisted files list.
+ offending_files: Files that contain 3rd party code.
+ Returns:
+ A triplet of "unknown", "missing", and "stale" file lists.
+ "Unknown" are files that contain 3rd party code but not whitelisted.
+ "Missing" are files that are whitelisted but doesn't really exist.
+ "Stale" are files that are whitelisted unnecessarily.
+ """
+ unknown = set(offending_files) - set(whitelisted_files)
+ missing = [f for f in whitelisted_files if not input_api.os_path.isfile(
+ input_api.os_path.join(input_api.change.RepositoryRoot(), f))]
+ stale = set(whitelisted_files) - set(offending_files) - set(missing)
+ return (list(unknown), missing, list(stale))
+
+
+def _GetDeletedContents(affected_file):
+ """Returns a list of all deleted lines.
+ AffectedFile class from presubmit_support is lacking this functionality.
+ """
+ deleted_lines = []
+ for line in affected_file.GenerateScmDiff().splitlines():
+ if line.startswith('-') and not line.startswith('--'):
+ deleted_lines.append(line[1:])
+ return deleted_lines
+
+def _DoScanAtPresubmit(input_api, whitelisted_files, files_to_check):
+ # We pass empty 'known third-party' dirs list here. Since this is a patch
+ # for the Chromium's src tree, it must contain properly licensed Chromium
+ # code. Any third-party code must be put into a directory named 'third_party',
+ # and such dirs are automatically excluded by FindFiles.
+ files_to_scan = FindFiles(
+ input_api, input_api.change.RepositoryRoot(), files_to_check, [])
+ offending_files = FindCopyrightViolations(
+ input_api, input_api.change.RepositoryRoot(), files_to_scan)
+ return AnalyzeScanResults(
+ input_api, whitelisted_files, offending_files)
+
+def ScanAtPresubmit(input_api, output_api):
+ """Invoked at change presubmit time. Verifies that updated non third-party
+ code doesn't contain external copyrighted code.
+ input_api: InputAPI of presubmit scripts.
+ output_api: OutputAPI of presubmit scripts.
+ """
+ files_to_check = set([])
+ deleted_files = set([])
+ whitelist_contents_changed = False
+ for f in input_api.AffectedFiles():
+ if f.LocalPath() == _GetWhitelistFileName(input_api):
+ whitelist_contents_changed = True
+ deleted_files |= set(_ProcessWhitelistedFilesList(
+ input_api, _GetDeletedContents(f)))
+ continue
+ if f.Action() != 'D':
+ files_to_check.add(f.LocalPath())
+ else:
+ deleted_files.add(f.LocalPath())
+ whitelisted_files = set(LoadWhitelistedFilesList(input_api))
+ if not whitelist_contents_changed:
+ whitelisted_files &= files_to_check | deleted_files
+ else:
+ # Need to re-check the entire contents of the whitelist file.
+ # Also add files removed from the whitelist. If the file has indeed been
+ # deleted, the scanner will not complain.
+ files_to_check |= whitelisted_files | deleted_files
+
+ (unknown_files, missing_files, stale_files) = _DoScanAtPresubmit(
+ input_api, list(whitelisted_files), list(files_to_check))
+ results = []
+ if unknown_files:
+ results.append(output_api.PresubmitError(
+ 'The following files contain a third-party license but are not in ' \
+ 'a listed third-party directory and are not whitelisted. You must ' \
+ 'add the following files to the whitelist file %s\n' \
+ '(Note that if the code you are adding does not actually contain ' \
+ 'any third-party code, it may contain the word "copyright", which ' \
+ 'should be masked out, e.g. by writing it as "copy-right"):' \
+ '' % _GetWhitelistFileName(input_api),
+ sorted(unknown_files)))
+ if missing_files:
+ results.append(output_api.PresubmitPromptWarning(
+ 'The following files are whitelisted in %s, ' \
+ 'but do not exist or not files:' % _GetWhitelistFileName(input_api),
+ sorted(missing_files)))
+ if stale_files:
+ results.append(output_api.PresubmitPromptWarning(
+ 'The following files are whitelisted unnecessarily. You must ' \
+ 'remove the following files from the whitelist file ' \
+ '%s:' % _GetWhitelistFileName(input_api),
+ sorted(stale_files)))
+ return results
diff --git a/chromium/tools/copyright_scanner/copyright_scanner_unittest.py b/chromium/tools/copyright_scanner/copyright_scanner_unittest.py
new file mode 100755
index 00000000000..339abde00b2
--- /dev/null
+++ b/chromium/tools/copyright_scanner/copyright_scanner_unittest.py
@@ -0,0 +1,308 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for Copyright Scanner utilities."""
+
+import os
+import re
+import sys
+import unittest
+
+test_dir = os.path.dirname(os.path.abspath(__file__))
+sys.path.extend([
+ os.path.normpath(os.path.join(test_dir, '..', '..', 'build')),
+ os.path.join(test_dir),
+])
+
+import find_depot_tools
+from testing_support.super_mox import SuperMoxTestBase
+
+import copyright_scanner
+
+class FindCopyrightsTest(SuperMoxTestBase):
+ def setUp(self):
+ SuperMoxTestBase.setUp(self)
+ self.input_api = self.mox.CreateMockAnything()
+ self.input_api.re = re
+ self.input_api.os_path = os.path
+ self.input_api.os_walk = os.walk
+
+ def ShouldMatchReferenceOutput(self, test_data, expected_output):
+ for data in test_data:
+ self.input_api.ReadFile = lambda _1, _2: data
+ actual_output = copyright_scanner.FindCopyrights(self.input_api, '', [''])
+ self.assertEqual(
+ expected_output,
+ actual_output,
+ 'Input """\n%s""", expected output: "%s", actual: "%s"' % \
+ (data, expected_output, actual_output));
+
+ def testCopyrightedFiles(self):
+ test_data = [
+ '// (c) 2014 Google Inc.\n//\n// (a) One\n//\n// (b) Two\n//\n',
+ 'Copyright 2014 Google Inc.\n',
+ 'Copr. 2014 Google Inc.',
+ '\xc2\xa9 2014 Google Inc.',
+ 'Copyright 2014 Google Inc.'
+ ]
+ self.ShouldMatchReferenceOutput(test_data, [['2014 Google Inc.']])
+
+ def testGeneratedFiles(self):
+ test_data = [
+ 'ALL CHANGES MADE IN THIS FILE WILL BE LOST\nCopyright 2014 Google\n',
+ 'GENERATED FILE. DO NOT EDIT\nCopyright 2014 Google\n',
+ 'GENERATED. DO NOT DELETE THIS FILE.\nCopyright 2014 Google\n',
+ 'DO NOT EDIT\nCopyright 2014 Google\n',
+ 'DO NOT DELETE THIS FILE\nCopyright 2014 Google\n',
+ 'All changes made in this file will be lost\nCopyright 2014 Google\n',
+ 'Automatically generated file\nCopyright 2014 Google\n',
+ 'Synthetically generated dummy file\nCopyright 2014 Google\n',
+ 'Generated data (by gnugnu)\nCopyright 2014 Google\n'
+ ]
+ self.ShouldMatchReferenceOutput(test_data, [['GENERATED FILE']])
+
+ def testNonCopyrightedFiles(self):
+ test_data = [
+ 'std::cout << "Copyright 2014 Google"\n',
+ '// Several points can be made:\n//\n// (a) One\n//\n// (b) Two\n'
+ '//\n// (c) Three\n//\n',
+ 'See \'foo\' for copyright information.\n',
+ 'See \'foo\' for the copyright notice.\n',
+ 'See \'foo\' for the copyright and other things.\n'
+ ]
+ self.ShouldMatchReferenceOutput(test_data, [['*No copyright*']])
+
+ def testNonGeneratedFiles(self):
+ test_data = [
+ 'This file was prohibited from being generated.\n',
+ 'Please do not delete our files! They are valuable to us.\n',
+ 'Manually generated from dice rolls.\n',
+ '"""This Python script produces generated data\n"""\n',
+ '\'\'\'This Python script produces generated data\n\'\'\'\n'
+ ]
+ self.ShouldMatchReferenceOutput(test_data, [['*No copyright*']])
+
+
+class FindFilesTest(SuperMoxTestBase):
+ def setUp(self):
+ SuperMoxTestBase.setUp(self)
+ self.input_api = self.mox.CreateMockAnything()
+ self.input_api.re = re
+ self.input_api.os_path = os.path
+
+ def testFilesAsStartPaths(self):
+ join = self.input_api.os_path.join
+ self.input_api.os_path.isfile = lambda _: True
+ input_files = [
+ 'a',
+ 'a.cc',
+ 'a.txt',
+ join('foo', 'a'),
+ join('foo', 'a.cc'),
+ join('foo', 'a.txt'),
+ join('third_party', 'a'),
+ join('third_party', 'a.cc'),
+ join('third_party', 'a.txt'),
+ join('foo', 'third_party', 'a'),
+ join('foo', 'third_party', 'a.cc'),
+ join('foo', 'third_party', 'a.txt'),
+ ]
+ root_dir = os.path.sep + 'src'
+ actual = copyright_scanner.FindFiles(
+ self.input_api, root_dir, input_files, [''])
+ self.assertEqual(['a.cc', join('foo', 'a.cc')], actual)
+ actual = copyright_scanner.FindFiles(
+ self.input_api, root_dir, input_files, ['third_party'])
+ self.assertEqual(['a.cc', join('foo', 'a.cc')], actual)
+ actual = copyright_scanner.FindFiles(
+ self.input_api, root_dir, input_files, ['foo'])
+ self.assertEqual(['a.cc'], actual)
+ actual = copyright_scanner.FindFiles(
+ self.input_api, root_dir, input_files, ['foo', 'third_party'])
+ self.assertEqual(['a.cc'], actual)
+ actual = copyright_scanner.FindFiles(
+ self.input_api, root_dir, input_files, [join('foo', 'third_party')])
+ self.assertEqual(['a.cc', join('foo', 'a.cc')], actual)
+
+ def testDirAsStartPath(self):
+ self.input_api.os_path.isfile = lambda _: False
+ join = self.input_api.os_path.join
+ normpath = self.input_api.os_path.normpath
+ root_dir = os.path.sep + 'src'
+ scan_from = '.'
+ base_path = join(root_dir, scan_from)
+
+ def mock_os_walk(path):
+ return lambda _: [(join(base_path, path), [''], ['a', 'a.cc', 'a.txt'])]
+
+ self.input_api.os_walk = mock_os_walk('')
+ actual = map(normpath, copyright_scanner.FindFiles(
+ self.input_api, root_dir, [scan_from], ['']))
+ self.assertEqual(['a.cc'], actual)
+
+ self.input_api.os_walk = mock_os_walk('third_party')
+ actual = map(normpath, copyright_scanner.FindFiles(
+ self.input_api, root_dir, [scan_from], ['']))
+ self.assertEqual([], actual)
+
+ self.input_api.os_walk = mock_os_walk('foo')
+ actual = map(normpath, copyright_scanner.FindFiles(
+ self.input_api, root_dir, [scan_from], ['']))
+ self.assertEqual([join('foo', 'a.cc')], actual)
+
+ self.input_api.os_walk = mock_os_walk('foo')
+ actual = map(normpath, copyright_scanner.FindFiles(
+ self.input_api, root_dir, [scan_from], ['foo']))
+ self.assertEqual([], actual)
+
+ self.input_api.os_walk = mock_os_walk(join('foo', 'bar'))
+ actual = map(normpath, copyright_scanner.FindFiles(
+ self.input_api, root_dir, [scan_from], ['foo']))
+ self.assertEqual([], actual)
+
+ self.input_api.os_walk = mock_os_walk(join('foo', 'third_party'))
+ actual = map(normpath, copyright_scanner.FindFiles(
+ self.input_api, root_dir, [scan_from], ['']))
+ self.assertEqual([], actual)
+
+ self.input_api.os_walk = mock_os_walk(join('foo', 'third_party'))
+ actual = map(normpath, copyright_scanner.FindFiles(
+ self.input_api, root_dir, [scan_from], [join('foo', 'third_party')]))
+ self.assertEqual([], actual)
+
+
+class AnalyzeScanResultsTest(SuperMoxTestBase):
+ def setUp(self):
+ SuperMoxTestBase.setUp(self)
+ self.input_api = self.mox.CreateMockAnything()
+ self.input_api.os_path = os.path
+ self.input_api.change = self.mox.CreateMockAnything()
+ self.input_api.change.RepositoryRoot = lambda: ''
+
+ def testAnalyzeScanResults(self):
+ # Tests whitelisted vs. current files state logic.
+ #
+ # Whitelisted - in whitelist, and contains 3rd party code => OK
+ # Missing - in whitelist, but doesn't exist
+ # Stale - in whitelist, but is clean
+ # Unknown - not in whitelist, but contains 3rd party code
+ self.input_api.os_path.isfile = lambda x: x != 'Missing'
+ self.assertEqual(
+ (['Unknown'], ['Missing'], ['Stale']),
+ copyright_scanner.AnalyzeScanResults(self.input_api, \
+ ['Whitelisted', 'Missing', 'Stale'], ['Whitelisted', 'Unknown']))
+
+
+class ScanAtPresubmitTest(SuperMoxTestBase):
+ def setUp(self):
+ SuperMoxTestBase.setUp(self)
+ self.input_api = self.mox.CreateMockAnything()
+ self.input_api.re = re
+ self.input_api.os_path = os.path
+ self.output_api = self.mox.CreateMockAnything()
+ def tearDown(self):
+ self.mox.UnsetStubs()
+ SuperMoxTestBase.tearDown(self)
+
+ class AffectedFileMock(object):
+ def __init__(self, local_path, action):
+ self._local_path = local_path
+ self._action = action
+ def LocalPath(self):
+ return self._local_path
+ def Action(self):
+ return self._action
+
+ def CreateAffectedFilesFunc(self, paths_and_actions):
+ result = []
+ for i in range(0, len(paths_and_actions), 2):
+ result.append(ScanAtPresubmitTest.AffectedFileMock(
+ paths_and_actions[i], paths_and_actions[i + 1]))
+ return lambda: result
+
+ def CreateDoScanAtPresubmitFunc(self):
+ self._whitelisted_files = None
+ self._files_to_check = None
+ def ScanAtPresubmitStub(_, whitelisted, to_check):
+ self._whitelisted_files = whitelisted
+ self._files_to_check = to_check
+ return ([], [], [])
+ return ScanAtPresubmitStub
+
+ def GetWhitelistedFiles(self):
+ return sorted(self._whitelisted_files)
+
+ def GetFilesToCheck(self):
+ return sorted(self._files_to_check)
+
+ def testWhitelistedUntouched(self):
+ # When a change doesn't touch the whitelist file, any updated files
+ # (except deleted) must be checked. The whitelist used for analysis
+ # must be trimmed to the changed files subset.
+ #
+ # A_NW.cc - added, not whitelisted => check
+ # A_W.cc - added, whitelisted => check, remain on the trimmed whitelist
+ # D_NW.cc - deleted, not whitelisted => ignore
+ # D_W.cc - deleted and whitelisted => remain on w/l
+ # M_NW.cc - modified, not whitelisted => check
+ # M_W.cc - modified and whitelisted => check, remain on w/l
+ # NM_W.cc - not modified, whitelisted => trim from w/l
+ # W - the whitelist file
+
+ self.input_api.AffectedFiles = self.CreateAffectedFilesFunc(
+ ['A_NW.cc', 'A', 'A_W.cc', 'A', 'D_NW.cc', 'D', 'D_W.cc', 'D',
+ 'M_NW.cc', 'M', 'M_W.cc', 'M'])
+ self.mox.StubOutWithMock(copyright_scanner, '_GetWhitelistFileName')
+ copyright_scanner._GetWhitelistFileName = lambda _: 'W'
+ self.mox.StubOutWithMock(copyright_scanner, 'LoadWhitelistedFilesList')
+ copyright_scanner.LoadWhitelistedFilesList = \
+ lambda _: ['A_W.cc', 'D_W.cc', 'M_W.cc', 'NM_W.cc']
+ self.mox.StubOutWithMock(copyright_scanner, '_DoScanAtPresubmit')
+ copyright_scanner._DoScanAtPresubmit = self.CreateDoScanAtPresubmitFunc()
+ self.mox.ReplayAll()
+ copyright_scanner.ScanAtPresubmit(self.input_api, self.output_api)
+ self.assertEqual(
+ ['A_W.cc', 'D_W.cc', 'M_W.cc'], self.GetWhitelistedFiles())
+ self.assertEqual(
+ ['A_NW.cc', 'A_W.cc', 'M_NW.cc', 'M_W.cc'], self.GetFilesToCheck())
+
+ def testWhitelistTouched(self):
+ # When the whitelist file is touched by the change, all the files listed in
+ # it, including deleted entries, must be re-checked. All modified files
+ # (including the deleted ones) must be checked as well. The current contents
+ # of the whitelist are used for analysis.
+ # Whitelist addition or deletion are not considered.
+ #
+ # All the files from names testWhitelistedUntouched are re-used, but now
+ # action for all of them is 'check' (except for the w/l file itself).
+ # A_DW.cc - added, deleted from w/l => check
+ # D_DW.cc - deleted from repo and w/l => check
+ # M_DW.cc - modified, deleted from w/l => check
+ self.input_api.AffectedFiles = self.CreateAffectedFilesFunc(
+ ['A_DW.cc', 'A', 'A_NW.cc', 'A', 'A_W.cc', 'A',
+ 'D_DW.cc', 'D', 'D_NW.cc', 'D', 'D_W.cc', 'D',
+ 'M_DW.cc', 'M', 'M_NW.cc', 'M', 'M_W.cc', 'M', 'W', 'M'])
+ self.mox.StubOutWithMock(copyright_scanner, '_GetWhitelistFileName')
+ copyright_scanner._GetWhitelistFileName = lambda _: 'W'
+ self.mox.StubOutWithMock(copyright_scanner, '_GetDeletedContents')
+ def GetDeletedContentsStub(affected_file):
+ self.assertEqual('W', affected_file.LocalPath())
+ return ['A_DW.cc', 'D_DW.cc', 'M_DW.cc']
+ copyright_scanner._GetDeletedContents = GetDeletedContentsStub
+ self.mox.StubOutWithMock(copyright_scanner, 'LoadWhitelistedFilesList')
+ copyright_scanner.LoadWhitelistedFilesList = \
+ lambda _: ['A_W.cc', 'D_W.cc', 'M_W.cc', 'NM_W.cc']
+ self.mox.StubOutWithMock(copyright_scanner, '_DoScanAtPresubmit')
+ copyright_scanner._DoScanAtPresubmit = self.CreateDoScanAtPresubmitFunc()
+ self.mox.ReplayAll()
+ copyright_scanner.ScanAtPresubmit(self.input_api, self.output_api)
+ self.assertEqual(
+ ['A_W.cc', 'D_W.cc', 'M_W.cc', 'NM_W.cc'], self.GetWhitelistedFiles())
+ self.assertEqual(
+ ['A_DW.cc', 'A_NW.cc', 'A_W.cc', 'D_DW.cc', 'D_NW.cc', 'D_W.cc',
+ 'M_DW.cc', 'M_NW.cc', 'M_W.cc', 'NM_W.cc' ], self.GetFilesToCheck())
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/copyright_scanner/third_party_files_whitelist.txt b/chromium/tools/copyright_scanner/third_party_files_whitelist.txt
new file mode 100644
index 00000000000..43fc3311682
--- /dev/null
+++ b/chromium/tools/copyright_scanner/third_party_files_whitelist.txt
@@ -0,0 +1,251 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file records third-party licensing information for the purposes of the
+# Android WebView build. See webview_licenses.py for details.
+#
+# New third-party code should be added under a directory named 'third_party',
+# so additions to this file should be rare. See
+# http://www.chromium.org/developers/adding-3rd-party-libraries.
+#
+# Please always use forward slashes '/' as path separators, even if you are
+# on Windows.
+
+# Copyright IBM; MIT license. This third-party code is taken from ICU, the
+# license for which we already pick up from third_party/icu/.
+base/i18n/icu_string_conversions.cc
+# Contains '(c)' in comments
+base/logging.h
+# Copyright Ron Rivest, public domain.
+base/md5.cc
+# Copyright Apple Inc; BSD license. Moved from third_party/WebKit/.
+cc/input/scroll_elasticity_helper.h
+# Copyright Netscape Communications Corporation; MPL, GPL v2 or LGPL v2
+# license. Not used on Android.
+chrome/browser/importer/firefox_profile_lock.cc
+# Copyright Netscape Communications Corporation; MPL, GPL v2 or LGPL v2
+# license. Not used on Android.
+chrome/browser/importer/firefox_profile_lock.h
+# Copyright Netscape Communications Corporation; MPL, GPL v2 or LGPL v2
+# license. Not used on Android.
+chrome/browser/importer/firefox_profile_lock_posix.cc
+# Copyright Netscape Communications Corporation; MPL, GPL v2 or LGPL v2
+# license. Not used on Android.
+chrome/browser/importer/firefox_profile_lock_win.cc
+# String 'copyright' used in code.
+chrome/common/importer/firefox_importer_utils.cc
+# Copyright Netscape Communications Corporation; MPL, GPL v2 or LGPL v2
+# license. Not used on Android.
+chrome/utility/importer/nss_decryptor.cc
+# Copyright Netscape Communications Corporation; MPL, GPL v2 or LGPL v2
+# license. Not used on Android.
+chrome/utility/importer/nss_decryptor_mac.h
+# Copyright Netscape Communications Corporation; MPL, GPL v2 or LGPL v2
+# license. Not used on Android.
+chrome/utility/importer/nss_decryptor_system_nss.cc
+# Copyright Netscape Communications Corporation; MPL, GPL v2 or LGPL v2
+# license. Not used on Android.
+chrome/utility/importer/nss_decryptor_win.h
+# Copyright Google Inc; BSD license. Test code only.
+chrome/tools/test/generate_mime_tests.pl
+# String 'copyright' used in the text presented to the user as part of
+# Google Chrome terms of service.
+components/resources/terms/chromeos/terms_en.html
+components/resources/terms/terms_am.html
+components/resources/terms/terms_ca.html
+components/resources/terms/terms_cs.html
+components/resources/terms/terms_da.html
+components/resources/terms/terms_de.html
+components/resources/terms/terms_en-GB.html
+components/resources/terms/terms_en.html
+components/resources/terms/terms_es-419.html
+components/resources/terms/terms_es.html
+components/resources/terms/terms_et.html
+components/resources/terms/terms_fi.html
+components/resources/terms/terms_fil.html
+components/resources/terms/terms_fr.html
+components/resources/terms/terms_he.html
+components/resources/terms/terms_hr.html
+components/resources/terms/terms_hu.html
+components/resources/terms/terms_id.html
+components/resources/terms/terms_it.html
+components/resources/terms/terms_ja.html
+components/resources/terms/terms_kn.html
+components/resources/terms/terms_ko.html
+components/resources/terms/terms_lt.html
+components/resources/terms/terms_lv.html
+components/resources/terms/terms_ml.html
+components/resources/terms/terms_nb.html
+components/resources/terms/terms_nl.html
+components/resources/terms/terms_pl.html
+components/resources/terms/terms_pt-BR.html
+components/resources/terms/terms_pt-PT.html
+components/resources/terms/terms_ro.html
+components/resources/terms/terms_sk.html
+components/resources/terms/terms_sl.html
+components/resources/terms/terms_sr.html
+components/resources/terms/terms_sv.html
+components/resources/terms/terms_sw.html
+components/resources/terms/terms_ta.html
+components/resources/terms/terms_te.html
+components/resources/terms/terms_th.html
+components/resources/terms/terms_tr.html
+components/resources/terms/terms_vi.html
+# Copyright Apple Inc, Nokia Corporation and Torch Mobile Inc; BSD license.
+# Contains code moved from third_party/WebKit/.
+content/browser/frame_host/navigation_controller_impl.cc
+# Copyright Apple, Inc, Google Inc; BSD license. Not used on Android.
+# Moved from third_party/WebKit/.
+content/browser/renderer_host/input/web_input_event_builders_mac.mm
+# Copyright Apple Inc and Torch Mobile Inc; BSD license. Moved from
+# third_party/WebKit/.
+content/renderer/history_controller.h
+# Copyright Apple Inc, Nokia Corporation and Torch Mobile Inc; BSD license.
+# Moved from third_party/WebKit/.
+content/renderer/history_controller.cc
+# Copyright Apple Inc and Torch Mobile Inc; BSD license. Moved from
+# third_party/WebKit/.
+content/renderer/history_entry.h
+# Copyright Apple Inc, Nokia Corporation and Torch Mobile Inc; BSD license.
+# Moved from third_party/WebKit/.
+content/renderer/history_entry.cc
+# Copyright Google Inc, no license. Not used on Android.
+google_update/google_update_idl.idl
+# Copyright WebM Project authors; BSD license. Copied and modified from
+# third_party/libvpx. Not used on Android.
+media/filters/vp8_bool_decoder.h
+media/filters/vp8_bool_decoder.cc
+# Native client not used in Android. Contains the word "Copyright"
+native_client_sdk/doc_generated/rest-devsite-examples.html
+# String '(c)' used in certificates organization names
+net/cert/x509_certificate_known_roots_win.h
+net/quic/crypto/common_cert_set_1a.inc
+net/quic/crypto/common_cert_set_1b.inc
+net/quic/crypto/common_cert_set_2a.inc
+net/quic/crypto/common_cert_set_2b.inc
+# String '(c)' used in certificates organization names
+net/test/test_certificate_data.h
+# Copyright The Chromium Authors and Netscape Communications Corporation; BSD
+# and (MPL, GPL v2 or LGPL v2) licenses. This third-party code is taken from
+# Mozilla, the license for which we already pick up from third_party/npapi/.
+net/cookies/cookie_monster.cc
+# Copyright The Chromium Authors and Netscape Communications Corporation; BSD
+# and (MPL, GPL v2 or LGPL v2) licenses. This third-party code is taken from
+# Mozilla, the license for which we already pick up from third_party/npapi/.
+net/cookies/canonical_cookie.cc
+# Copyright The Chromium Authors and Netscape Communications Corporation; BSD
+# and (MPL, GPL v2 or LGPL v2) licenses. This third-party code is taken from
+# Mozilla, the license for which we already pick up from third_party/npapi/.
+net/cookies/parsed_cookie.cc
+# Copyright The Chromium Authors and Google Inc; BSD and (MPL, GPL v2 or LGPL
+# v2) licenses. This third-party code is taken from Mozilla, the license for
+# which we already pick up from third_party/npapi/.
+net/base/registry_controlled_domains/registry_controlled_domain.cc
+# Copyright The Chromium Authors and Google Inc; BSD and (MPL, GPL v2 or LGPL
+# v2) licenses. This third-party code is taken from Mozilla, the license for
+# which we already pick up from third_party/npapi/.
+net/base/registry_controlled_domains/registry_controlled_domain.h
+# Copyright The Chromium Authors and IBM Corporation; BSD and (MPL, GPL v2 or
+# LGPL v2) licenses. This third-party code is taken from Mozilla, the license
+# for which we already pick up from third_party/npapi/.
+net/http/des.cc
+# Copyright The Chromium Authors and IBM Corporation; BSD and (MPL, GPL v2 or
+# LGPL v2) licenses. This third-party code is taken from Mozilla, the license
+# for which we already pick up from third_party/npapi/.
+net/http/http_auth_handler_ntlm_portable.cc
+# Copyright The Chromium Authors and Netscape Communications; BSD and (MPL, GPL
+# v2 or LGPL v2) licenses. This third-party code is taken from Mozilla, the
+# license for which we already pick up from third_party/npapi/.
+net/http/http_chunked_decoder.cc
+# Copyright The Chromium Authors and Netscape Communications; BSD and (MPL, GPL
+# v2 or LGPL v2) licenses. This third-party code is taken from Mozilla, the
+# license for which we already pick up from third_party/npapi/.
+net/http/http_chunked_decoder.h
+# Copyright IBM Corporation; MPL, GPL v2 or LGPL v2 license. This third-party
+# code is taken from Mozilla, the license for which we already pick up from
+# third_party/npapi/.
+net/http/md4.cc
+# Copyright IBM Corporation; MPL, GPL v2 or LGPL v2 license. This third-party
+# code is taken from Mozilla, the license for which we already pick up from
+# third_party/npapi/.
+net/http/md4.h
+# Netscape Communications Corporation; MPL, GPL v2 or LGPL v2 license. This
+# third-party code is taken from Mozilla, the license for which we already pick
+# up from third_party/npapi/.
+net/proxy/proxy_resolver_script.h
+# Copyright The Chromium Authors and Netscape Communications Corporation; BSD
+# and (MPL, GPL v2 or LGPL v2) licenses. Not used on Android.
+net/socket/ssl_client_socket_nss.cc
+# Contains the word 'Copyright' in comments
+ppapi/generators/idl_c_proto.py
+ppapi/generators/idl_outfile.py
+# Copyright (c) 2007-2009 The Khronos Group Inc. Not used on Android
+ppapi/lib/gl/include/EGL/egl.h
+ppapi/lib/gl/include/EGL/eglext.h
+ppapi/lib/gl/include/EGL/eglplatform.h
+ppapi/lib/gl/include/KHR/khrplatform.h
+# Copyright The Android Open Source Project; ASL v2 license.
+skia/config/SkUserConfig.h
+# Generates copyright headers for Chromium.
+tools/boilerplate.py
+# Contains test strings that look like copyrights.
+tools/copyright_scanner/copyright_scanner_unittest.py
+# Contains word 'copyright' in comments.
+tools/gen_keyboard_overlay_data/gen_keyboard_overlay_data.py
+# This third-party code is taken from Mozilla, but is copyright Google and has
+# been re-licensed under the Chromium license.
+tools/imagediff/image_diff_png.cc
+# Copyright Ero Carrera; BSD license. Tool only.
+tools/symsrc/pefile.py
+# Copyright The Chromium Authors, Sun Microsystems Inc, the V8 project authors;
+# BSD license. Tool only.
+tools/traceline/traceline/assembler.h
+# Copyright Google Inc; BSD license. Tool only.
+tools/traceline/traceline/sidestep/mini_disassembler.cc
+# Copyright Marijn Haverbeke. MIT license. Tool only, not used on Android.
+tools/win/sizeviewer/clike.js
+# Copyright Marijn Haverbeke. MIT license. Tool only, not used on Android.
+tools/win/sizeviewer/codemirror.js
+# Copyright The Chromium Authors, Apple Inc; BSD license. Not used on Android.
+ui/base/clipboard/clipboard_util_win.cc
+# Copyright The Chromium Authors, Apple Inc and Graham Dennis; BSD license. Not
+# used on Android.
+ui/base/cocoa/tool_tip_base_view.mm
+# Copyright The Chromium Authors, Apple Inc; BSD license. Not used on Android.
+ui/base/dragdrop/os_exchange_data_provider_win.cc
+# Copyright Apple Inc; BSD license. Moved from third_party/WebKit/.
+ui/events/blink/input_scroll_elasticity_controller.cc
+ui/events/blink/input_scroll_elasticity_controller.h
+# Copyright The Chromium Authors, Michael Emmel, Google Inc; BSD license. This
+# third-party code is taken from WebKit, the license for which we already pick
+# up from webkit/.
+ui/events/keycodes/keyboard_codes_posix.h
+# String 'copyright' used in code.
+ui/file_manager/file_manager/foreground/js/main_scripts.js
+# String 'copyright' used in code.
+ui/file_manager/gallery/js/gallery_scripts.js
+# String 'copyright' used in code.
+ui/file_manager/video_player/js/video_player_scripts.js
+# This third-party code is taken from Mozilla, but is copyright Google and has
+# been re-licensed under the Chromium license.
+ui/gfx/codec/jpeg_codec.cc
+# This third-party code is taken from Mozilla, but is copyright Google and has
+# been re-licensed under the Chromium license.
+ui/gfx/codec/png_codec.cc
+# Copyright The Chromium Authors and Apple Inc; BSD license. This third-party
+# code is taken from WebKit, the license for which we already pick up from
+# webkit/.
+content/browser/appcache/appcache_manifest_parser.cc
+# Copyright The Chromium Authors and Apple Inc; BSD license. This third-party
+# code is taken from WebKit, the license for which we already pick up from
+# webkit/.
+content/browser/appcache/appcache_manifest_parser.h
+# String 'copyright' used in code.
+ui/webui/resources/js/cr/ui/array_data_model.js
+# Copyright The Chromium Authors and Apple Inc; BSD license. This third-party
+# code is taken from WebKit, the license for which we already pick up from
+# webkit/.
+components/test_runner/helper/layout_test_helper_mac.mm
+# Bundles of existing code.
+chrome/browser/resources/md_downloads/vulcanized.html
diff --git a/chromium/tools/coverity/coverity.py b/chromium/tools/coverity/coverity.py
new file mode 100755
index 00000000000..6fed7ca8cfa
--- /dev/null
+++ b/chromium/tools/coverity/coverity.py
@@ -0,0 +1,308 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Runs Coverity Prevent on a build of Chromium.
+
+This script should be run in a Visual Studio Command Prompt, so that the
+INCLUDE, LIB, and PATH environment variables are set properly for Visual
+Studio.
+
+Usage examples:
+ coverity.py
+ coverity.py --dry-run
+ coverity.py --target=debug
+ %comspec% /c ""C:\Program Files\Microsoft Visual Studio 8\VC\vcvarsall.bat"
+ x86 && C:\Python24\python.exe C:\coverity.py"
+
+For a full list of options, pass the '--help' switch.
+
+See http://support.microsoft.com/kb/308569 for running this script as a
+Scheduled Task on Windows XP.
+
+"""
+
+import optparse
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+import time
+
+# These constants provide default values, but are exposed as command-line
+# flags. See the --help for more info. Note that for historical reasons
+# (the script started out as Windows-only and has legacy usages which pre-date
+# these switches), the constants are all tuned for Windows.
+# Usage of this script on Linux pretty much requires explicit
+# --source-dir, --coverity-bin-dir, --coverity-intermediate-dir, and
+# --coverity-target command line flags.
+
+CHROMIUM_SOURCE_DIR = 'C:\\chromium.latest'
+
+# Relative to CHROMIUM_SOURCE_DIR.
+CHROMIUM_SOLUTION_FILE = 'src\\chrome\\chrome.sln'
+
+# Relative to CHROMIUM_SOURCE_DIR.
+CHROMIUM_SOLUTION_DIR = 'src\\chrome'
+
+COVERITY_BIN_DIR = 'C:\\coverity\\prevent-win32-4.5.1\\bin'
+
+COVERITY_INTERMEDIATE_DIR = 'C:\\coverity\\cvbuild\\cr_int'
+
+COVERITY_ANALYZE_OPTIONS = ('--cxx --security --concurrency '
+ '--enable ATOMICITY '
+ '--enable MISSING_LOCK '
+ '--enable DELETE_VOID '
+ '--checker-option PASS_BY_VALUE:size_threshold:16 '
+ '--checker-option '
+ 'USE_AFTER_FREE:allow_simple_use:false '
+ '--enable-constraint-fpp '
+ '--enable-callgraph-metrics')
+
+# Might need to be changed to FQDN
+COVERITY_REMOTE = 'chromecoverity-linux1'
+
+COVERITY_PORT = '5467'
+
+COVERITY_PRODUCT = 'Chromium'
+
+COVERITY_TARGET = 'Windows'
+
+COVERITY_USER = 'admin'
+# looking for a PASSWORD constant? Look at --coverity-password-file instead.
+
+# Relative to CHROMIUM_SOURCE_DIR. Contains the pid of this script.
+LOCK_FILE = 'coverity.lock'
+
+
+def _ReadPassword(pwfilename):
+ """Reads the coverity password in from a file where it was stashed"""
+ pwfile = open(pwfilename, 'r')
+ password = pwfile.readline()
+ pwfile.close()
+ return password.rstrip()
+
+
+def _RunCommand(cmd, dry_run, shell=False, echo_cmd=True):
+ """Runs the command if dry_run is false, otherwise just prints the command."""
+ if echo_cmd:
+ print cmd
+ if not dry_run:
+ return subprocess.call(cmd, shell=shell)
+ else:
+ return 0
+
+
+def _ReleaseLock(lock_file, lock_filename):
+ """Removes the lockfile. Function-ized so we can bail from anywhere"""
+ os.close(lock_file)
+ os.remove(lock_filename)
+
+
+def run_coverity(options, args):
+ """Runs all the selected tests for the given build type and target."""
+ # Create the lock file to prevent another instance of this script from
+ # running.
+ lock_filename = os.path.join(options.source_dir, LOCK_FILE)
+ try:
+ lock_file = os.open(lock_filename,
+ os.O_CREAT | os.O_EXCL | os.O_TRUNC | os.O_RDWR)
+ except OSError, err:
+ print 'Failed to open lock file:\n ' + str(err)
+ return 1
+
+ # Write the pid of this script (the python.exe process) to the lock file.
+ os.write(lock_file, str(os.getpid()))
+
+ options.target = options.target.title()
+
+ start_time = time.time()
+
+ print 'Change directory to ' + options.source_dir
+ os.chdir(options.source_dir)
+
+ # The coverity-password filename may have been a relative path.
+ # If so, assume it's relative to the source directory, which means
+ # the time to read the password is after we do the chdir().
+ coverity_password = _ReadPassword(options.coverity_password_file)
+
+ cmd = 'gclient sync'
+ gclient_exit = _RunCommand(cmd, options.dry_run, shell=True)
+ if gclient_exit != 0:
+ print 'gclient aborted with status %s' % gclient_exit
+ _ReleaseLock(lock_file, lock_filename)
+ return 1
+
+ print 'Elapsed time: %ds' % (time.time() - start_time)
+
+ # Do a clean build. Remove the build output directory first.
+ if sys.platform.startswith('linux'):
+ rm_path = os.path.join(options.source_dir,'src','out',options.target)
+ elif sys.platform == 'win32':
+ rm_path = os.path.join(options.source_dir,options.solution_dir,
+ options.target)
+ elif sys.platform == 'darwin':
+ rm_path = os.path.join(options.source_dir,'src','xcodebuild')
+ else:
+ print 'Platform "%s" unrecognized, aborting' % sys.platform
+ _ReleaseLock(lock_file, lock_filename)
+ return 1
+
+ if options.dry_run:
+ print 'shutil.rmtree(%s)' % repr(rm_path)
+ else:
+ shutil.rmtree(rm_path,True)
+
+ if options.preserve_intermediate_dir:
+ print 'Preserving intermediate directory.'
+ else:
+ if options.dry_run:
+ print 'shutil.rmtree(%s)' % repr(options.coverity_intermediate_dir)
+ print 'os.mkdir(%s)' % repr(options.coverity_intermediate_dir)
+ else:
+ shutil.rmtree(options.coverity_intermediate_dir,True)
+ os.mkdir(options.coverity_intermediate_dir)
+
+ print 'Elapsed time: %ds' % (time.time() - start_time)
+
+ use_shell_during_make = False
+ if sys.platform.startswith('linux'):
+ use_shell_during_make = True
+ os.chdir('src')
+ _RunCommand('pwd', options.dry_run, shell=True)
+ cmd = '%s/cov-build --dir %s make BUILDTYPE=%s chrome' % (
+ options.coverity_bin_dir, options.coverity_intermediate_dir,
+ options.target)
+ elif sys.platform == 'win32':
+ cmd = ('%s\\cov-build.exe --dir %s devenv.com %s\\%s /build %s '
+ '/project chrome.vcproj') % (
+ options.coverity_bin_dir, options.coverity_intermediate_dir,
+ options.source_dir, options.solution_file, options.target)
+ elif sys.platform == 'darwin':
+ use_shell_during_make = True
+ os.chdir('src/chrome')
+ _RunCommand('pwd', options.dry_run, shell=True)
+ cmd = ('%s/cov-build --dir %s xcodebuild -project chrome.xcodeproj '
+ '-configuration %s -target chrome') % (
+ options.coverity_bin_dir, options.coverity_intermediate_dir,
+ options.target)
+
+
+ _RunCommand(cmd, options.dry_run, shell=use_shell_during_make)
+ print 'Elapsed time: %ds' % (time.time() - start_time)
+
+ cov_analyze_exe = os.path.join(options.coverity_bin_dir,'cov-analyze')
+ cmd = '%s --dir %s %s' % (cov_analyze_exe,
+ options.coverity_intermediate_dir,
+ options.coverity_analyze_options)
+ _RunCommand(cmd, options.dry_run, shell=use_shell_during_make)
+ print 'Elapsed time: %ds' % (time.time() - start_time)
+
+ cov_commit_exe = os.path.join(options.coverity_bin_dir,'cov-commit-defects')
+
+ # On Linux we have started using a Target with a space in it, so we want
+ # to quote it. On the other hand, Windows quoting doesn't work quite the
+ # same way. To be conservative, I'd like to avoid quoting an argument
+ # that doesn't need quoting and which we haven't historically been quoting
+ # on that platform. So, only quote the target if we have to.
+ coverity_target = options.coverity_target
+ if sys.platform != 'win32':
+ coverity_target = '"%s"' % coverity_target
+
+ cmd = ('%s --dir %s --remote %s --port %s '
+ '--product %s '
+ '--target %s '
+ '--user %s '
+ '--password %s') % (cov_commit_exe,
+ options.coverity_intermediate_dir,
+ options.coverity_dbhost,
+ options.coverity_port,
+ options.coverity_product,
+ coverity_target,
+ options.coverity_user,
+ coverity_password)
+ # Avoid echoing the Commit command because it has a password in it
+ _RunCommand(cmd, options.dry_run, shell=use_shell_during_make, echo_cmd=False)
+
+ print 'Total time: %ds' % (time.time() - start_time)
+
+ _ReleaseLock(lock_file, lock_filename)
+
+ return 0
+
+
+def main():
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('', '--dry-run', action='store_true', default=False,
+ help='print but don\'t run the commands')
+
+ option_parser.add_option('', '--target', default='Release',
+ help='build target (Debug or Release)')
+
+ option_parser.add_option('', '--source-dir', dest='source_dir',
+ help='full path to directory ABOVE "src"',
+ default=CHROMIUM_SOURCE_DIR)
+
+ option_parser.add_option('', '--solution-file', dest='solution_file',
+ default=CHROMIUM_SOLUTION_FILE)
+
+ option_parser.add_option('', '--solution-dir', dest='solution_dir',
+ default=CHROMIUM_SOLUTION_DIR)
+
+ option_parser.add_option('', '--coverity-bin-dir', dest='coverity_bin_dir',
+ default=COVERITY_BIN_DIR)
+
+ option_parser.add_option('', '--coverity-intermediate-dir',
+ dest='coverity_intermediate_dir',
+ default=COVERITY_INTERMEDIATE_DIR)
+
+ option_parser.add_option('', '--coverity-analyze-options',
+ dest='coverity_analyze_options',
+ help=('all cov-analyze options, e.g. "%s"'
+ % COVERITY_ANALYZE_OPTIONS),
+ default=COVERITY_ANALYZE_OPTIONS)
+
+ option_parser.add_option('', '--coverity-db-host',
+ dest='coverity_dbhost',
+ help=('coverity defect db server hostname, e.g. %s'
+ % COVERITY_REMOTE),
+ default=COVERITY_REMOTE)
+
+ option_parser.add_option('', '--coverity-db-port', dest='coverity_port',
+ help=('port # of coverity web/db server, e.g. %s'
+ % COVERITY_PORT),
+ default=COVERITY_PORT)
+
+ option_parser.add_option('', '--coverity-product', dest='coverity_product',
+ help=('Product name reported to coverity, e.g. %s'
+ % COVERITY_PRODUCT),
+ default=COVERITY_PRODUCT)
+
+ option_parser.add_option('', '--coverity-target', dest='coverity_target',
+ help='Platform Target reported to coverity',
+ default=COVERITY_TARGET)
+
+ option_parser.add_option('', '--coverity-user', dest='coverity_user',
+ help='Username used to log into coverity',
+ default=COVERITY_USER)
+
+ option_parser.add_option('', '--coverity-password-file',
+ dest='coverity_password_file',
+ help='file containing the coverity password',
+ default='coverity-password')
+
+ helpmsg = ('By default, the intermediate dir is emptied before analysis. '
+ 'This switch disables that behavior.')
+ option_parser.add_option('', '--preserve-intermediate-dir',
+ action='store_true', help=helpmsg,
+ default=False)
+
+ options, args = option_parser.parse_args()
+ return run_coverity(options, args)
+
+
+if '__main__' == __name__:
+ sys.exit(main())
diff --git a/chromium/tools/cr/OWNERS b/chromium/tools/cr/OWNERS
new file mode 100644
index 00000000000..fa6851a195a
--- /dev/null
+++ b/chromium/tools/cr/OWNERS
@@ -0,0 +1,3 @@
+miguelg@chromium.org
+petrcermak@chromium.org
+skyostil@chromium.org
diff --git a/chromium/tools/cr/README b/chromium/tools/cr/README
new file mode 100644
index 00000000000..6f7bdc21f07
--- /dev/null
+++ b/chromium/tools/cr/README
@@ -0,0 +1,14 @@
+cr is a unified interface to dealing with output directories, and all the
+things you do with them.
+
+
+It offers the advantages of dealing with multiple output directories in a
+single client, and unifying the command lines needed to deal with the various
+flavours and targets.
+This means it will be very easy to build run and test multiple platforms in a
+single checkout.
+It also makes the commands more discoverable by being a single starting point
+with built in help.
+
+If you are using bash, source cr-bash-helpers.sh and you will get the cr
+function in your shell, along with tab completion.
diff --git a/chromium/tools/cr/cr-bash-helpers.sh b/chromium/tools/cr/cr-bash-helpers.sh
new file mode 100755
index 00000000000..3fa6a8aa93b
--- /dev/null
+++ b/chromium/tools/cr/cr-bash-helpers.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Source this file into your shell to gain the cr function and tab completion
+# for it
+
+# Make sure we're being sourced (possibly by another script). Check for bash
+# since zsh sets $0 when sourcing.
+if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then
+ echo "ERROR: cr-bash-helpers.sh must be sourced."
+ exit 1
+fi
+
+READLINK_e=("readlink" "-e")
+if [[ -x `which greadlink` ]]; then
+ READLINK_e=("greadlink" "-e")
+fi
+
+if [[ $(uname) == "Darwin" ]]; then
+ cr_base_dir=$(dirname "${BASH_SOURCE:-$0}")
+else
+ cr_base_dir=$(dirname $(${READLINK_e[@]} "${BASH_SOURCE:-$0}"))
+fi
+
+cr_main="${cr_base_dir}/main.py"
+cr_exec=("PYTHONDONTWRITEBYTECODE=1" "python" "${cr_main}")
+
+# The main entry point to the cr tool.
+# Invokes the python script with pyc files turned off.
+function cr() {
+ env ${cr_exec[@]} "$@"
+}
+
+# Attempts to cd to the root/src of the current client.
+function crcd() {
+ cd $(cr info -s CR_SRC)
+}
+
+# Add to your PS1 to have the current selected output directory in your prompt
+function _cr_ps1() {
+ cr info -s CR_OUT_FULL
+}
+
+# The tab completion handler, delegates into the python script.
+function _cr_complete() {
+ COMPREPLY=()
+ local cur="${COMP_WORDS[COMP_CWORD]}"
+ local main="python -B "${cr_main}")"
+ local completions="$(env COMP_CWORD=${COMP_CWORD} \
+ COMP_WORD=${cur} \
+ ${cr_exec[@]})"
+ COMPREPLY=( $(compgen -W "${completions}" -- ${cur}) )
+}
+
+# Setup the bash auto complete
+complete -F _cr_complete cr
diff --git a/chromium/tools/cr/cr.sh b/chromium/tools/cr/cr.sh
new file mode 100755
index 00000000000..5d1c13ade4a
--- /dev/null
+++ b/chromium/tools/cr/cr.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+echo "*************************************************"
+echo "* Do not source cr.sh ***************************"
+echo "*************************************************"
+echo "* You need to source cr-bash-helpers.sh instead *"
+echo "* This file will stop working and be removed *"
+echo "* soon. *"
+echo "*************************************************"
+source $(dirname $(realpath "${BASH_SOURCE:-$0}"))/cr-bash-helpers.sh
diff --git a/chromium/tools/cr/cr/__init__.py b/chromium/tools/cr/cr/__init__.py
new file mode 100644
index 00000000000..3c515753af3
--- /dev/null
+++ b/chromium/tools/cr/cr/__init__.py
@@ -0,0 +1,21 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Chromium cr tool module.
+
+This is the root module of all the cr code.
+Commonly accessed elements, including all plugins, are promoted into this
+module.
+"""
+
+import cr.loader
+from cr.loader import Import
+
+Import(__name__, 'auto.user')
+Import(__name__, 'autocomplete')
+Import(__name__, 'config')
+Import(__name__, 'plugin')
+Import(__name__, 'base')
+Import(__name__, 'commands')
+Import(__name__, 'actions')
diff --git a/chromium/tools/cr/cr/actions/__init__.py b/chromium/tools/cr/cr/actions/__init__.py
new file mode 100644
index 00000000000..63ca9b09644
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/__init__.py
@@ -0,0 +1,17 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A package to hold all the actions for the cr tool.
+
+This package holds the standard actions used by the commands in the cr tool.
+These actions are the things that actually perform the work, they are generally
+run in sequences by commands.
+"""
+
+import cr
+
+cr.Import(__name__, 'action')
+cr.Import(__name__, 'runner')
+cr.Import(__name__, 'builder')
+cr.Import(__name__, 'installer')
diff --git a/chromium/tools/cr/cr/actions/action.py b/chromium/tools/cr/cr/actions/action.py
new file mode 100644
index 00000000000..d187094a26b
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/action.py
@@ -0,0 +1,47 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the Action plugin base class."""
+
+import cr
+
+
+class Action(cr.Plugin):
+ """Base class for cr actions.
+
+ This provides the standard interface used to add actions to commands,
+ including support for selecting the right implementation of an action and
+ handling command line arguments for the action.
+ """
+
+ @classmethod
+ def AddArguments(cls, command, parser):
+ cls.AddSelectorArg(command, parser)
+
+ @classmethod
+ def AddSelectorArg(cls, command, parser):
+ parser.add_argument(
+ cls.SELECTOR_ARG, dest=cls.SELECTOR,
+ choices=cls.Choices(),
+ default=None,
+ help=cls.SELECTOR_HELP + ' Overrides ' + cls.SELECTOR
+ )
+
+ @cr.Plugin.activemethod
+ def Skipping(self):
+ """A method that is used to detect void or skip implementations.
+
+ Most actions have a skip version that you can select to indicate that you
+ want to not perform the action at all.
+ It is important that commands can detect this so they can modify the action
+ sequence if there are other changes that depend on it (for instance not
+ performing actions that were only there to produce the inputs of an action
+ that is being skipped).
+
+ Returns:
+ True if this implementation is a skip action.
+ """
+ return self.name == 'skip'
+
+
diff --git a/chromium/tools/cr/cr/actions/adb.py b/chromium/tools/cr/cr/actions/adb.py
new file mode 100644
index 00000000000..ab706861798
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/adb.py
@@ -0,0 +1,150 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module to hold adb specific action implementations."""
+
+import re
+
+import cr
+
+
+class Adb(object):
+ """Exposes the functionality of the adb tool to the rest of cr.
+
+ This is intended as the only class in the cr that needs to understand the
+ adb command line, and expose it in neutral form to the rest of the code.
+ """
+
+ # Tracks the set of killed target names, so we don't keep issuing kill
+ # commands that are not going to have any effect.
+ _kills = {}
+
+ @classmethod
+ def GetPids(cls, target):
+ """Gets the set of running PIDs that match the specified target."""
+ pids = []
+ with target:
+ output = cr.Host.Capture('{CR_ADB}', 'shell', 'ps')
+ pattern = re.compile(r'\S+\s+(\d+)\s+.*{CR_PROCESS}')
+ for line in output.split('\n'):
+ match = re.match(pattern, line)
+ if match:
+ pids.append(match.group(1))
+ return pids
+
+ @classmethod
+ def Run(cls, target, arguments):
+ """Invoke a target binary on the device."""
+ with target:
+ cr.Host.Execute(
+ '{CR_ADB}', 'shell', 'am', 'start',
+ '-a', '{CR_ACTION}',
+ '-n', '{CR_INTENT}',
+ '{CR_RUN_ARGUMENTS}',
+ *arguments
+ )
+
+ @classmethod
+ def Kill(cls, target, _):
+ """Kill all running processes for a target."""
+ target_name = target.build_target
+ if target_name in cls._kills:
+ # already killed this target, do nothing
+ return
+ pids = cls.GetPids(target)
+ if pids:
+ with target:
+ cr.Host.Execute('{CR_ADB}', 'shell', 'kill', *pids)
+ elif target.verbose:
+ print target.Substitute('{CR_TARGET_NAME} not running')
+ cls._kills[target_name] = True
+
+ @classmethod
+ def Uninstall(cls, target, arguments):
+ with target:
+ cr.Host.Execute(
+ '{CR_ADB}', 'uninstall',
+ '{CR_PACKAGE}',
+ *arguments
+ )
+
+ @classmethod
+ def Install(cls, target, arguments):
+ with target:
+ cr.Host.Execute(
+ '{CR_ADB}', 'install',
+ '{CR_BINARY}',
+ *arguments
+ )
+
+ @classmethod
+ def Reinstall(cls, target, arguments):
+ with target:
+ cr.Host.Execute(
+ '{CR_ADB}', 'install',
+ '-r',
+ '{CR_BINARY}',
+ *arguments
+ )
+
+ @classmethod
+ def AttachGdb(cls, target, arguments):
+ with target:
+ cr.Host.Execute(
+ '{CR_ADB_GDB}',
+ '--adb={CR_ADB}',
+ '--symbol-dir=${CR_BUILD_DIR}/lib',
+ '--program-name={CR_TARGET_NAME}',
+ '--package-name={CR_PACKAGE}',
+ *arguments
+ )
+
+
+class AdbRunner(cr.Runner):
+ """An implementation of cr.Runner for the android platform."""
+
+ @property
+ def enabled(self):
+ return cr.AndroidPlatform.GetInstance().is_active
+
+ def Kill(self, targets, arguments):
+ for target in targets:
+ Adb.Kill(target, arguments)
+
+ def Run(self, target, arguments):
+ Adb.Run(target, arguments)
+
+ def Test(self, target, arguments):
+ with target:
+ test_type = cr.context.Get('CR_TEST_TYPE')
+ if test_type == cr.Target.INSTRUMENTATION_TEST:
+ target_name_flag = '--test-apk'
+ else:
+ target_name_flag = '-s'
+ cr.Host.Execute(
+ '{CR_TEST_RUNNER}', test_type,
+ target_name_flag, '{CR_TARGET_NAME}',
+ '--{CR_TEST_MODE}',
+ *arguments
+ )
+
+
+class AdbInstaller(cr.Installer):
+ """An implementation of cr.Installer for the android platform."""
+
+ @property
+ def enabled(self):
+ return cr.AndroidPlatform.GetInstance().is_active
+
+ def Uninstall(self, targets, arguments):
+ for target in targets:
+ Adb.Uninstall(target, arguments)
+
+ def Install(self, targets, arguments):
+ for target in targets:
+ Adb.Install(target, arguments)
+
+ def Reinstall(self, targets, arguments):
+ for target in targets:
+ Adb.Reinstall(target, arguments)
diff --git a/chromium/tools/cr/cr/actions/builder.py b/chromium/tools/cr/cr/actions/builder.py
new file mode 100644
index 00000000000..015e6e5ace3
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/builder.py
@@ -0,0 +1,82 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the Builder base class."""
+
+import difflib
+
+import cr
+
+
+class Builder(cr.Action, cr.Plugin.Type):
+ """Base class for implementing builders.
+
+ Builder implementations must override the Build and Clean methods at a
+ minimum to build a target and clean up back to a pristine state respectively.
+ They can also override Rebuild if they are able to handle it in a more
+ efficient way that a Clean Build sequence.
+ They should override the GetTargets method to return the set of valid targets
+ the build system knows about, and override IsTarget if they can implement it
+ more efficiently than checking from presents in the result of GetTargets.
+ """
+
+ SELECTOR_ARG = '--builder'
+ SELECTOR = 'CR_BUILDER'
+ SELECTOR_HELP = 'Sets the builder to use to update dependencies.'
+
+ @cr.Plugin.activemethod
+ def Build(self, targets, arguments):
+ raise NotImplementedError('Must be overridden.')
+
+ @cr.Plugin.activemethod
+ def Clean(self, targets, arguments):
+ """Clean temporary files built by a target."""
+ raise NotImplementedError('Must be overridden.')
+
+ @cr.Plugin.activemethod
+ def Rebuild(self, targets, arguments):
+ """Make a target build even if it is up to date.
+
+ Default implementation is to do a Clean and Build sequence.
+ Do not call the base version if you implement a more efficient one.
+ """
+ self.Clean(targets, [])
+ self.Build(targets, arguments)
+
+ @cr.Plugin.activemethod
+ def GetTargets(self):
+ """Gets the full set of targets supported by this builder.
+
+ Used in automatic target name transformations, and also in offering the
+ user choices.
+ """
+ return []
+
+ @cr.Plugin.activemethod
+ def IsTarget(self, target_name):
+ """Check if a target name is on the builder knows about."""
+ return target_name in self.GetTargets()
+
+ @cr.Plugin.activemethod
+ def GuessTargets(self, target_name):
+ """Returns a list of closest matching targets for a named target."""
+ return difflib.get_close_matches(target_name, self.GetTargets(), 10, 0.4)
+
+
+class SkipBuilder(Builder):
+ """The "skip" version of a Builder, causes the build step to be skipped."""
+
+ @property
+ def priority(self):
+ return super(SkipBuilder, self).priority - 1
+
+ def Build(self, targets, arguments):
+ pass
+
+ def Clean(self, targets, arguments):
+ pass
+
+ def IsTarget(self, target_name):
+ return True
+
diff --git a/chromium/tools/cr/cr/actions/debugger.py b/chromium/tools/cr/cr/actions/debugger.py
new file mode 100644
index 00000000000..1db3d8103b8
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/debugger.py
@@ -0,0 +1,51 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the Debugger base class."""
+
+import cr
+
+
+class Debugger(cr.Action, cr.Plugin.Type):
+ """Base class for implementing debuggers.
+
+ Implementations must override the Invoke and Attach methods.
+ """
+
+ SELECTOR_ARG = '--debugger'
+ SELECTOR = 'CR_DEBUGGER'
+ SELECTOR_HELP = 'Sets the debugger to use for debug commands.'
+
+ @classmethod
+ def AddArguments(cls, command, parser):
+ cr.Runner.AddSelectorArg(command, parser)
+
+ @classmethod
+ def ShouldInvoke(cls):
+ """Checks if the debugger is attaching or launching."""
+ return not cr.Runner.Skipping()
+
+ @cr.Plugin.activemethod
+ def Restart(self, targets, arguments):
+ """Ask the debugger to restart.
+
+ Defaults to a Kill Invoke sequence.
+ """
+ self.Kill(targets, [])
+ self.Invoke(targets, arguments)
+
+ @cr.Plugin.activemethod
+ def Kill(self, targets, arguments):
+ """Kill the running debugger."""
+ cr.Runner.Kill(targets, arguments)
+
+ @cr.Plugin.activemethod
+ def Invoke(self, targets, arguments):
+ """Invoke the program within a debugger."""
+ raise NotImplementedError('Must be overridden.')
+
+ @cr.Plugin.activemethod
+ def Attach(self, targets, arguments):
+ """Attach a debugger to a running program."""
+ raise NotImplementedError('Must be overridden.')
diff --git a/chromium/tools/cr/cr/actions/gdb.py b/chromium/tools/cr/cr/actions/gdb.py
new file mode 100644
index 00000000000..cdbb1bf733e
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/gdb.py
@@ -0,0 +1,39 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+import cr
+
+
+class GdbDebugger(cr.Debugger):
+ """An implementation of cr.Debugger that launches gdb."""
+
+ DETECTED = cr.Config('DETECTED')
+
+ @property
+ def enabled(self):
+ return (cr.LinuxPlatform.GetInstance().is_active and
+ self.DETECTED.Find('CR_GDB'))
+
+ def Invoke(self, targets, arguments):
+ for target in targets:
+ with target:
+ cr.Host.Execute(
+ '{CR_GDB}', '--eval-command=run', '--args',
+ '{CR_BINARY}',
+ '{CR_RUN_ARGUMENTS}',
+ *arguments
+ )
+
+ def Attach(self, targets, arguments):
+ raise NotImplementedError('Attach not currently supported for gdb.')
+
+ @classmethod
+ def ClassInit(cls):
+ # Attempt to find a valid gdb on the path.
+ gdb_binaries = cr.Host.SearchPath('gdb')
+ if gdb_binaries:
+ cls.DETECTED.Set(CR_GDB=gdb_binaries[0])
+
diff --git a/chromium/tools/cr/cr/actions/gn.py b/chromium/tools/cr/cr/actions/gn.py
new file mode 100644
index 00000000000..34b045bf5ae
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/gn.py
@@ -0,0 +1,88 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module to add gn support to cr."""
+
+import cr
+import os
+import re
+
+GN_ARG_PREFIX = 'GN_ARG_'
+
+
+class GnPrepareOut(cr.PrepareOut):
+ """A prepare action that runs gn whenever you select an output directory."""
+
+ ACTIVE = cr.Config.From(
+ GN_ARG_is_component_build='true',
+ )
+
+ @property
+ def priority(self):
+ return -1
+
+ def UpdateContext(self):
+ # Collapse GN_ARGS from all GN_ARG prefixes.
+ gn_args = cr.context.Find('GN_ARGS') or ''
+ for key, value in cr.context.exported.items():
+ if key.startswith(GN_ARG_PREFIX):
+ gn_args += ' %s=%s' % (key[len(GN_ARG_PREFIX):], value)
+
+ gn_args += (' is_debug=%s' %
+ ('true' if cr.context['CR_BUILDTYPE'] == 'Debug' else 'false'))
+
+ arch = cr.context.Find('CR_ENVSETUP_ARCH') or ''
+ if arch:
+ gn_args += ' target_cpu="%s"' % ('x86' if arch == 'ia32' else arch)
+
+ # Detect goma.
+ goma_binaries = cr.Host.SearchPath('gomacc', [
+ '{GOMA_DIR}',
+ '/usr/local/google/code/goma',
+ os.path.expanduser('~/goma')
+ ])
+ if goma_binaries:
+ gn_args += ' use_goma=true'
+ gn_args += ' goma_dir="%s"' % os.path.dirname(goma_binaries[0])
+
+ cr.context['GN_ARGS'] = gn_args.strip()
+ if cr.context.verbose >= 1:
+ print cr.context.Substitute('GN_ARGS = {GN_ARGS}')
+
+ def Prepare(self):
+ if cr.context.verbose >= 1:
+ print cr.context.Substitute('Invoking gn with {GN_ARGS}')
+
+ out_path = os.path.join(cr.context['CR_SRC'], cr.context['CR_OUT_FULL'])
+ args_file = os.path.join(out_path, 'args.gn')
+ args = {}
+ # Split the argument list while preserving quotes,
+ # e.g., a="b c" becomes ('a', '"b c"').
+ split_re = r'(?:[^\s,"]|"(?:\\.|[^"])*")+'
+ for arg in re.findall(split_re, cr.context['GN_ARGS']):
+ key, value = arg.split('=', 1)
+ args[key] = value
+
+ # Override any existing settings.
+ arg_lines = []
+ if os.path.exists(args_file):
+ with open(args_file) as f:
+ for line in f:
+ key = line.split('=', 1)[0].strip()
+ if key not in args:
+ arg_lines.append(line.strip())
+
+ # Append new settings.
+ for key, value in args.items():
+ arg_lines.append('%s = %s' % (key, value))
+
+ try:
+ os.makedirs(out_path)
+ except OSError:
+ if not os.path.isdir(out_path):
+ raise
+ with open(args_file, 'w') as f:
+ f.write('\n'.join(arg_lines) + '\n')
+
+ cr.Host.Execute('gn', 'gen', out_path)
diff --git a/chromium/tools/cr/cr/actions/gyp.py b/chromium/tools/cr/cr/actions/gyp.py
new file mode 100644
index 00000000000..752846be0ca
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/gyp.py
@@ -0,0 +1,39 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module to add gyp support to cr."""
+
+import cr
+import os
+
+GYP_DEFINE_PREFIX = 'GYP_DEF_'
+
+class GypPrepareOut(cr.PrepareOut):
+ """A prepare action that runs gyp whenever you select an output directory."""
+
+ ACTIVE = cr.Config.From(
+ GYP_GENERATORS='ninja',
+ GYP_GENERATOR_FLAGS='output_dir={CR_OUT_BASE} config={CR_BUILDTYPE}',
+ GYP_DEF_target_arch='{CR_ENVSETUP_ARCH}',
+ )
+
+ def UpdateContext(self):
+ # Collapse GYP_DEFINES from all GYP_DEF prefixes
+ gyp_defines = cr.context.Find('GYP_DEFINES') or ''
+ for key, value in cr.context.exported.items():
+ if key.startswith(GYP_DEFINE_PREFIX):
+ gyp_defines += ' %s=%s' % (key[len(GYP_DEFINE_PREFIX):], value)
+ cr.context['GYP_DEFINES'] = gyp_defines.strip()
+ if cr.context.verbose >= 1:
+ print cr.context.Substitute('GYP_DEFINES = {GYP_DEFINES}')
+
+ def Prepare(self):
+ if cr.context.verbose >= 1:
+ print cr.context.Substitute('Invoking gyp with {GYP_GENERATOR_FLAGS}')
+
+ cr.Host.Execute(
+ '{CR_SRC}/build/gyp_chromium',
+ '--depth={CR_SRC}',
+ '--check'
+ )
diff --git a/chromium/tools/cr/cr/actions/installer.py b/chromium/tools/cr/cr/actions/installer.py
new file mode 100644
index 00000000000..064b4d0b8dd
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/installer.py
@@ -0,0 +1,55 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the Installer base class."""
+
+import cr
+
+
+class Installer(cr.Action, cr.Plugin.Type):
+ """Base class for implementing installers.
+
+ Installer implementations must implement the Uninstall and Install methods.
+ If the location into which targets are built is find for running them, then
+ they do not actually have to do anything.
+ """
+
+ SELECTOR_ARG = '--installer'
+ SELECTOR = 'CR_INSTALLER'
+ SELECTOR_HELP = 'Sets the installer to use.'
+
+ @cr.Plugin.activemethod
+ def Uninstall(self, targets, arguments):
+ """Removes a target from it's installed location."""
+
+ raise NotImplementedError('Must be overridden.')
+
+ @cr.Plugin.activemethod
+ def Install(self, targets, arguments):
+ """Installs a target somewhere so that it is ready to run."""
+ raise NotImplementedError('Must be overridden.')
+
+ @cr.Plugin.activemethod
+ def Reinstall(self, targets, arguments):
+ """Force a target to install even if already installed.
+
+ Default implementation is to do an Uninstall Install sequence.
+ Do not call the base version if you implement a more efficient one.
+ """
+ self.Uninstall(targets, [])
+ self.Install(targets, arguments)
+
+
+class SkipInstaller(Installer):
+ """An Installer the user chooses to bypass the install step of a command."""
+
+ @property
+ def priority(self):
+ return super(SkipInstaller, self).priority - 1
+
+ def Uninstall(self, targets, arguments):
+ pass
+
+ def Install(self, targets, arguments):
+ pass
diff --git a/chromium/tools/cr/cr/actions/linux.py b/chromium/tools/cr/cr/actions/linux.py
new file mode 100644
index 00000000000..8e670e3bfea
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/linux.py
@@ -0,0 +1,52 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module to hold linux specific action implementations."""
+
+import cr
+
+
+class LinuxRunner(cr.Runner):
+ """An implementation of cr.Runner for the linux platform.
+
+ This supports directly executing the binaries from the output directory.
+ """
+
+ @property
+ def enabled(self):
+ return cr.LinuxPlatform.GetInstance().is_active
+
+ def Kill(self, targets, arguments):
+ # Not needed on Linux because the target generally runs in the same shell
+ # and can be killed using Ctrl-C.
+ pass
+
+ def Run(self, target, arguments):
+ with target:
+ cr.Host.Execute('{CR_BINARY}', '{CR_RUN_ARGUMENTS}', *arguments)
+
+ def Test(self, target, arguments):
+ self.Run(target, arguments)
+
+
+class LinuxInstaller(cr.Installer):
+ """An implementation of cr.Installer for the linux platform.
+
+ This does nothing, the linux runner works from the output directory, there
+ is no need to install anywhere.
+ """
+
+ @property
+ def enabled(self):
+ return cr.LinuxPlatform.GetInstance().is_active
+
+ def Uninstall(self, targets, arguments):
+ pass
+
+ def Install(self, targets, arguments):
+ pass
+
+ def Reinstall(self, targets, arguments):
+ pass
+
diff --git a/chromium/tools/cr/cr/actions/linuxchromeos.py b/chromium/tools/cr/cr/actions/linuxchromeos.py
new file mode 100644
index 00000000000..b8faa3b3c1d
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/linuxchromeos.py
@@ -0,0 +1,30 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Linux ChromeOS specific implementations."""
+
+import cr
+
+
+class LinuxChromeOSRunner(cr.LinuxRunner):
+ """A version of cr.LinuxRunner for LinuxChromeOS.
+
+ Running ChromeOS in Linux is the same as a normal linux Chrome build -- just
+ executing the output binary.
+ """
+
+ @property
+ def enabled(self):
+ return cr.LinuxChromeOSPlatform.GetInstance().is_active
+
+
+class LinuxChromeOSInstaller(cr.LinuxInstaller):
+ """A version of cr.LinuxInstaller for LinuxChromeOS.
+
+ This does nothing, as there is nothing to be installed.
+ """
+
+ @property
+ def enabled(self):
+ return cr.LinuxChromeOSPlatform.GetInstance().is_active
diff --git a/chromium/tools/cr/cr/actions/ninja.py b/chromium/tools/cr/cr/actions/ninja.py
new file mode 100644
index 00000000000..db3b3b2966f
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/ninja.py
@@ -0,0 +1,120 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module to add ninja support to cr."""
+
+import multiprocessing
+import os
+
+import cr
+
+_PHONY_SUFFIX = ': phony'
+_LINK_SUFFIX = ': link'
+
+
+DEFAULT = cr.Config.From(
+ GOMA_DIR=os.path.expanduser('~/goma'),
+)
+
+class NinjaBuilder(cr.Builder):
+ """An implementation of Builder that uses ninja to do the actual build."""
+
+ # Some basic configuration installed if we are enabled.
+ EXTRA_FOR_IO_BOUND_JOBS = 2
+ ENABLED = cr.Config.From(
+ NINJA_BINARY=os.path.join('{DEPOT_TOOLS}', 'ninja'),
+ NINJA_JOBS=multiprocessing.cpu_count() + EXTRA_FOR_IO_BOUND_JOBS,
+ NINJA_PROCESSORS=multiprocessing.cpu_count(),
+ NINJA_BUILD_FILE=os.path.join('{CR_BUILD_DIR}', 'build.ninja'),
+ # Don't rename to GOMA_* or Goma will complain: "unkown GOMA_ parameter".
+ NINJA_GOMA_LINE='cc = {CR_GOMA_CC} $',
+ )
+ # A config block only included if goma is detected.
+ GOMA = cr.Config.From(
+ CR_GOMA_CC=os.path.join('{GOMA_DIR}', 'gomacc'),
+ CR_GOMA_CTL=os.path.join('{GOMA_DIR}', 'goma_ctl.py'),
+ GOMA_DIR='{CR_GOMA_DIR}',
+ GYP_DEF_gomadir='{CR_GOMA_DIR}',
+ GYP_DEF_use_goma=1,
+ NINJA_JOBS=multiprocessing.cpu_count() * 10,
+ )
+ # A placeholder for the system detected configuration
+ DETECTED = cr.Config('DETECTED')
+
+ def __init__(self):
+ super(NinjaBuilder, self).__init__()
+ self._targets = []
+
+ def Build(self, targets, arguments):
+ # Make sure Goma is started if Ninja is set to use it.
+ # This may be redundant, but it currently improves reliability.
+ try:
+ with open(cr.context.Get('NINJA_BUILD_FILE'), 'r') as f:
+ if f.readline().rstrip('\n') == cr.context.Get('NINJA_GOMA_LINE'):
+ # Goma is active, so make sure it's started.
+ cr.Host.ExecuteSilently(
+ '{CR_GOMA_CTL}',
+ 'ensure_start'
+ )
+ except IOError:
+ pass
+
+ build_arguments = [target.build_target for target in targets]
+ build_arguments.extend(arguments)
+ cr.Host.Execute(
+ '{NINJA_BINARY}',
+ '-C{CR_BUILD_DIR}',
+ '-j{NINJA_JOBS}',
+ '-l{NINJA_PROCESSORS}',
+ *build_arguments
+ )
+
+ def Clean(self, targets, arguments):
+ build_arguments = [target.build_target for target in targets]
+ build_arguments.extend(arguments)
+ cr.Host.Execute(
+ '{NINJA_BINARY}',
+ '-C{CR_BUILD_DIR}',
+ '-tclean',
+ *build_arguments
+ )
+
+ def GetTargets(self):
+ """Overridden from Builder.GetTargets."""
+ if not self._targets:
+ try:
+ cr.context.Get('CR_BUILD_DIR', raise_errors=True)
+ except KeyError:
+ return self._targets
+ output = cr.Host.Capture(
+ '{NINJA_BINARY}',
+ '-C{CR_BUILD_DIR}',
+ '-ttargets',
+ 'all'
+ )
+ for line in output.split('\n'):
+ line = line.strip()
+ if line.endswith(_PHONY_SUFFIX):
+ target = line[:-len(_PHONY_SUFFIX)].strip()
+ self._targets.append(target)
+ elif line.endswith(_LINK_SUFFIX):
+ target = line[:-len(_LINK_SUFFIX)].strip()
+ self._targets.append(target)
+ return self._targets
+
+ @classmethod
+ def ClassInit(cls):
+ # TODO(iancottrell): If we can't detect ninja, we should be disabled.
+ ninja_binaries = cr.Host.SearchPath('ninja')
+ if ninja_binaries:
+ cls.DETECTED.Set(NINJA_BINARY=ninja_binaries[0])
+
+ goma_binaries = cr.Host.SearchPath('gomacc', [
+ '{GOMA_DIR}',
+ '/usr/local/google/code/goma',
+ os.path.expanduser('~/goma')
+ ])
+ if goma_binaries:
+ cls.DETECTED.Set(CR_GOMA_DIR=os.path.dirname(goma_binaries[0]))
+ cls.DETECTED.AddChildren(cls.GOMA)
diff --git a/chromium/tools/cr/cr/actions/runner.py b/chromium/tools/cr/cr/actions/runner.py
new file mode 100644
index 00000000000..0b7a03d6cc9
--- /dev/null
+++ b/chromium/tools/cr/cr/actions/runner.py
@@ -0,0 +1,88 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the Runner base class."""
+
+import cr
+
+
+class Runner(cr.Action, cr.Plugin.Type):
+ """Base class for implementing target runners.
+
+ Runner implementations must implement the Kill, Run and Test methods.
+
+ """
+
+ SELECTOR_ARG = '--runner'
+ SELECTOR = 'CR_RUNNER'
+ SELECTOR_HELP = 'Sets the runner to use to execute the target.'
+
+ @classmethod
+ def AddArguments(cls, command, parser):
+ parser.add_argument(
+ '--test', dest='CR_TEST_TYPE',
+ choices=cr.Target.TEST_TYPES,
+ default=None,
+ help="""
+ Sets the test type to use,
+ defaults to choosing based on the target.
+ Set to 'no' to force it to not be a test.
+ """
+ )
+ cls.AddSelectorArg(command, parser)
+
+ @cr.Plugin.activemethod
+ def Kill(self, targets, arguments):
+ """Stops all running processes that match a target."""
+ raise NotImplementedError('Must be overridden.')
+
+ @cr.Plugin.activemethod
+ def Run(self, target, arguments):
+ """Run a new copy of a runnable target."""
+ raise NotImplementedError('Must be overridden.')
+
+ @cr.Plugin.activemethod
+ def Test(self, target, arguments):
+ """Run a test target."""
+ raise NotImplementedError('Must be overridden.')
+
+ @cr.Plugin.activemethod
+ def Invoke(self, targets, arguments):
+ """Invoke a target.
+
+ This dispatches to either Test or Run depending on the target type.
+ """
+ for target in targets:
+ if target.is_test:
+ self.Test(target, arguments)
+ else:
+ self.Run(target, arguments)
+
+ @cr.Plugin.activemethod
+ def Restart(self, targets, arguments):
+ """Force a target to restart if it is already running.
+
+ Default implementation is to do a Kill Invoke sequence.
+ Do not call the base version if you implement a more efficient one.
+ """
+ self.Kill(targets, [])
+ self.Invoke(targets, arguments)
+
+
+class SkipRunner(Runner):
+ """A Runner the user chooses to bypass the run step of a command."""
+
+ @property
+ def priority(self):
+ return super(SkipRunner, self).priority - 1
+
+ def Kill(self, targets, arguments):
+ pass
+
+ def Run(self, target, arguments):
+ pass
+
+ def Test(self, target, arguments):
+ pass
+
diff --git a/chromium/tools/cr/cr/auto/__init__.py b/chromium/tools/cr/cr/auto/__init__.py
new file mode 100644
index 00000000000..698aee36359
--- /dev/null
+++ b/chromium/tools/cr/cr/auto/__init__.py
@@ -0,0 +1,10 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A root package for all the automatically loaded modules.
+
+There are no built in packages below this, it holds a set of packages that
+have their search paths modified in order to pick up plugins from directories
+that are not known until run-time.
+"""
diff --git a/chromium/tools/cr/cr/auto/build/__init__.py b/chromium/tools/cr/cr/auto/build/__init__.py
new file mode 100644
index 00000000000..4f2dbd94674
--- /dev/null
+++ b/chromium/tools/cr/cr/auto/build/__init__.py
@@ -0,0 +1,5 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A package that holds the modules loaded from the output directory.""" \ No newline at end of file
diff --git a/chromium/tools/cr/cr/auto/client/__init__.py b/chromium/tools/cr/cr/auto/client/__init__.py
new file mode 100644
index 00000000000..90647c658e4
--- /dev/null
+++ b/chromium/tools/cr/cr/auto/client/__init__.py
@@ -0,0 +1,5 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A package to hold the modules auto loaded from the client directory.""" \ No newline at end of file
diff --git a/chromium/tools/cr/cr/auto/user/__init__.py b/chromium/tools/cr/cr/auto/user/__init__.py
new file mode 100644
index 00000000000..9f93de8df98
--- /dev/null
+++ b/chromium/tools/cr/cr/auto/user/__init__.py
@@ -0,0 +1,5 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A package to hold the modules loaded from the users home directory.""" \ No newline at end of file
diff --git a/chromium/tools/cr/cr/autocomplete.py b/chromium/tools/cr/cr/autocomplete.py
new file mode 100644
index 00000000000..f03b821c065
--- /dev/null
+++ b/chromium/tools/cr/cr/autocomplete.py
@@ -0,0 +1,23 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Bash auto completion support.
+
+Contains the special mode that returns lists of possible completions for the
+current command line.
+"""
+
+import cr
+
+
+def Complete():
+ """Attempts to build a completion list for the current command line.
+
+ COMP_WORD contains the word that is being completed, and COMP_CWORD has
+ the index of that word on the command line.
+ """
+
+ # TODO(iancottrell): support auto complete of more than just the command
+ # try to parse the command line using parser
+ print ' '.join(command.name for command in cr.Command.Plugins())
diff --git a/chromium/tools/cr/cr/base/__init__.py b/chromium/tools/cr/cr/base/__init__.py
new file mode 100644
index 00000000000..81cdefedaf1
--- /dev/null
+++ b/chromium/tools/cr/cr/base/__init__.py
@@ -0,0 +1,11 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A package for the base supporting classes of the cr tool."""
+
+import cr
+
+cr.Import(__name__, 'platform')
+cr.Import(__name__, 'buildtype')
+cr.Import(__name__, 'client')
diff --git a/chromium/tools/cr/cr/base/android.py b/chromium/tools/cr/cr/base/android.py
new file mode 100644
index 00000000000..676cb498c1c
--- /dev/null
+++ b/chromium/tools/cr/cr/base/android.py
@@ -0,0 +1,130 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""The android specific platform implementation module."""
+
+import os
+import subprocess
+
+import cr
+
+# This is the set of environment variables that are not automatically
+# copied back from the envsetup shell
+_IGNORE_ENV = [
+ 'SHLVL', # Because it's nothing to do with envsetup
+ 'GYP_GENERATOR_FLAGS', # because we set them in they gyp handler
+ 'GYP_GENERATORS', # because we set them in they gyp handler
+ 'PATH', # Because it gets a special merge handler
+ 'GYP_DEFINES', # Because it gets a special merge handler
+]
+
+
+class AndroidPlatform(cr.Platform):
+ """The implementation of Platform for the android target."""
+
+ ACTIVE = cr.Config.From(
+ CR_ENVSETUP=os.path.join('{CR_SRC}', 'build', 'android', 'envsetup.sh'),
+ CR_ADB=os.path.join('{CR_SRC}', 'third_party', 'android_tools', 'sdk',
+ 'platform-tools', 'adb'),
+ CR_TARGET_SUFFIX='_apk',
+ CR_BINARY=os.path.join('{CR_BUILD_DIR}', 'apks', '{CR_TARGET_NAME}.apk'),
+ CR_ACTION='android.intent.action.VIEW',
+ CR_PACKAGE='com.google.android.apps.{CR_TARGET}',
+ CR_PROCESS='{CR_PACKAGE}',
+ CR_ACTIVITY='.Main',
+ CR_INTENT='{CR_PACKAGE}/{CR_ACTIVITY}',
+ CR_TEST_RUNNER=os.path.join(
+ '{CR_SRC}', 'build', 'android', 'test_runner.py'),
+ CR_ADB_GDB=os.path.join('{CR_SRC}', 'build', 'android', 'adb_gdb'),
+ CR_DEFAULT_TARGET='chrome_public',
+ GYP_DEF_OS='android',
+ GN_ARG_target_os='"android"'
+ )
+
+ def __init__(self):
+ super(AndroidPlatform, self).__init__()
+ self._env = cr.Config('android-env', literal=True, export=True)
+ self.detected_config.AddChild(self._env)
+ self._env_ready = False
+ self._env_paths = []
+
+ @property
+ def priority(self):
+ return super(AndroidPlatform, self).priority + 1
+
+ def Prepare(self):
+ """Override Prepare from cr.Platform."""
+ super(AndroidPlatform, self).Prepare()
+ try:
+ # capture the result of env setup if we have not already done so
+ if not self._env_ready:
+ # See what the env would be without env setup
+ before = cr.context.exported
+ # Run env setup and capture/parse its output
+ envsetup = 'source {CR_ENVSETUP}'
+ output = cr.Host.CaptureShell(envsetup + ' > /dev/null && env')
+ env_setup = cr.Config('envsetup', literal=True, export=True)
+ for line in output.split('\n'):
+ (key, op, value) = line.partition('=')
+ if op:
+ key = key.strip()
+ if key not in _IGNORE_ENV:
+ env_setup[key] = env_setup.ParseValue(value.strip())
+ if key == 'PATH':
+ self._env_paths = value.strip().split(os.path.pathsep)
+ items = env_setup.exported.items()
+ if not items:
+ # Because of the way envsetup is run, the exit code does not make it
+ # back to us. Instead, we assume if we got no environment at all, it
+ # must have failed.
+ print 'Envsetup failed!'
+ exit(1)
+ # Find all the things that envsetup changed
+ for key, value in env_setup.exported.items():
+ if str(value) != str(before.get(key, None)):
+ self._env[key] = value
+ self._env_ready = True
+ except subprocess.CalledProcessError, e:
+ exit(e.returncode)
+
+ @property
+ def paths(self):
+ return self._env_paths
+
+
+class AndroidInitHook(cr.InitHook):
+ """Android output directory init hook.
+
+ This makes sure that your client is android capable when you try
+ to make and android output directory.
+ """
+
+ @property
+ def enabled(self):
+ return cr.AndroidPlatform.GetInstance().is_active
+
+ def Run(self, old_version, config):
+ _ = old_version, config # unused
+ # Check we are an android capable client
+ target_os = cr.context.gclient.get('target_os', [])
+ if 'android' in target_os:
+ return
+ url = cr.context.gclient.get('solutions', [{}])[0].get('url')
+ if (url.startswith('https://chrome-internal.googlesource.com/') and
+ url.endswith('/internal/apps.git')):
+ return
+ print 'This client is not android capable.'
+ print 'It can be made capable by adding android to the target_os list'
+ print 'in the .gclient file, and then syncing again.'
+ if not cr.Host.YesNo('Would you like to upgrade this client?'):
+ print 'Abandoning the creation of and android output directory.'
+ exit(1)
+ target_os.append('android')
+ cr.context.gclient['target_os'] = target_os
+ cr.base.client.WriteGClient()
+ print 'Client updated.'
+ print 'You may need to sync before an output directory can be made.'
+ if cr.Host.YesNo('Would you like to sync this client now?'):
+ cr.SyncCommand.Sync(["--nohooks"])
+
diff --git a/chromium/tools/cr/cr/base/arch.py b/chromium/tools/cr/cr/base/arch.py
new file mode 100644
index 00000000000..4973b4433c3
--- /dev/null
+++ b/chromium/tools/cr/cr/base/arch.py
@@ -0,0 +1,81 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the basic architectures supported by cr."""
+
+import cr
+
+DEFAULT = cr.Config.From(
+ CR_ENVSETUP_ARCH='{CR_ARCH}',
+)
+
+
+class Arch(cr.Plugin, cr.Plugin.Type):
+ """Base class for implementing cr architecture targets."""
+
+ SELECTOR = 'CR_ARCH'
+
+ @classmethod
+ def AddArguments(cls, parser):
+ parser.add_argument(
+ '--architecture', dest=cls.SELECTOR,
+ choices=cls.Choices(),
+ default=None,
+ help='Sets the target architecture to use. Overrides ' + cls.SELECTOR
+ )
+
+
+class IA32Arch(Arch):
+
+ ACTIVE = cr.Config.From(
+ CR_ENVSETUP_ARCH='ia32',
+ )
+
+
+class Mips32Arch(Arch):
+
+ ACTIVE = cr.Config.From(
+ CR_ENVSETUP_ARCH='mipsel',
+ )
+
+ @property
+ def enabled(self):
+ return cr.AndroidPlatform.GetInstance().is_active
+
+
+class X64Arch(Arch):
+
+ ACTIVE = cr.Config.From(
+ CR_ENVSETUP_ARCH='x64',
+ )
+
+ @property
+ def priority(self):
+ return super(X64Arch, self).priority + 1
+
+
+class Arm32Arch(Arch):
+
+ ACTIVE = cr.Config.From(
+ CR_ENVSETUP_ARCH='arm',
+ )
+
+ @property
+ def priority(self):
+ return super(Arm32Arch, self).priority + 2
+
+ @property
+ def enabled(self):
+ return cr.AndroidPlatform.GetInstance().is_active
+
+
+class Arm64Arch(Arch):
+
+ ACTIVE = cr.Config.From(
+ CR_ENVSETUP_ARCH='arm64',
+ )
+
+ @property
+ def enabled(self):
+ return cr.AndroidPlatform.GetInstance().is_active
diff --git a/chromium/tools/cr/cr/base/buildtype.py b/chromium/tools/cr/cr/base/buildtype.py
new file mode 100644
index 00000000000..17558128c1b
--- /dev/null
+++ b/chromium/tools/cr/cr/base/buildtype.py
@@ -0,0 +1,58 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the basic build type support in cr."""
+
+import cr
+
+
+class BuildType(cr.Plugin, cr.Plugin.Type):
+ """Base class for implementing cr build types.
+
+ A build type corresponds to the second directory level in the standard output
+ directory format, and the BUILDTYPE environment variable used by chromium
+ tools.
+ """
+
+ SELECTOR = 'CR_BUILDTYPE'
+
+ DEFAULT = cr.Config.From(
+ BUILDTYPE='{CR_BUILDTYPE}',
+ )
+
+ def __init__(self):
+ super(BuildType, self).__init__()
+ self.active_config.Set(
+ CR_TEST_MODE=self.name,
+ )
+
+ @classmethod
+ def AddArguments(cls, parser):
+ parser.add_argument(
+ '--type', dest=cls.SELECTOR,
+ choices=cls.Choices(),
+ default=None,
+ help='Sets the build type to use. Overrides ' + cls.SELECTOR
+ )
+
+
+class DebugBuildType(BuildType):
+ """A concrete implementation of BuildType for Debug builds."""
+
+ def __init__(self):
+ super(DebugBuildType, self).__init__()
+ self._name = 'Debug'
+
+
+class ReleaseBuildType(BuildType):
+ """A concrete implementation of BuildType for Release builds."""
+
+ def __init__(self):
+ super(ReleaseBuildType, self).__init__()
+ self._name = 'Release'
+
+ @property
+ def priority(self):
+ return BuildType.GetPlugin('Debug').priority + 1
+
diff --git a/chromium/tools/cr/cr/base/client.py b/chromium/tools/cr/cr/base/client.py
new file mode 100644
index 00000000000..4c89c464472
--- /dev/null
+++ b/chromium/tools/cr/cr/base/client.py
@@ -0,0 +1,284 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Client configuration management.
+
+This module holds the code for detecting and configuring the current client and
+it's output directories.
+It is responsible for writing out the client specific plugins that tell the
+rest of the cr tool what the client is capable of.
+"""
+
+import os
+import pprint
+import sys
+
+import cr
+import cr.auto.build
+import cr.auto.client
+
+# The config version currently supported.
+VERSION = 0.5
+# The default directory name to store configs inside.
+CONFIG_PATH = '.cr'
+# The filename of the config file inside a config directory.
+CONFIG_FILE = 'config.py'
+# The directory inside the config directory which contains the client config.
+CLIENT_CONFIG_DIR = 'client'
+# The directory inside the config directory which contains build configs.
+BUILD_CONFIG_DIR = 'builds'
+# The format string for the header of a config file.
+CONFIG_FILE_PREFIX = """
+# This is an autogenerated file
+# it *will* be overwritten, and changes may lost
+# The system will autoload any other python file in the same folder.
+
+import cr
+
+OVERRIDES = cr.Config.From("""
+# The format string for each value in a config file.
+CONFIG_VAR_LINE = '\n {0} = {1!r},'
+# The format string for the tail of a config file.
+CONFIG_FILE_SUFFIX = '\n)\n'
+# The name of the gclient config file
+GCLIENT_FILENAME = '.gclient'
+
+# The default config values installed by this module.
+DEFAULT = cr.Config.From(
+ CR_ROOT_PATH=os.path.join('{GOOGLE_CODE}'),
+ CR_CLIENT_NAME='chromium',
+ CR_CLIENT_PATH=os.path.join('{CR_ROOT_PATH}', '{CR_CLIENT_NAME}'),
+ CR_SRC=os.path.join('{CR_CLIENT_PATH}', 'src'),
+ CR_BUILD_DIR=os.path.join('{CR_SRC}', '{CR_OUT_FULL}'),
+)
+
+
+def DetectClient():
+ # Attempt to detect the current client from the cwd
+ # See if we can detect the source tree root
+ client_path = os.getcwd()
+ while (client_path and
+ not os.path.exists(os.path.join(client_path, GCLIENT_FILENAME))):
+ old = client_path
+ client_path = os.path.dirname(client_path)
+ if client_path == old:
+ client_path = None
+ if client_path is not None:
+ dirname, basename = os.path.split(client_path)
+ if basename == 'src':
+ # we have the src path, base is one level up
+ client_path = dirname
+ if client_path is not None:
+ cr.context.derived['CR_CLIENT_PATH'] = client_path
+ # now get the value from it may be different
+ client_path = cr.context.Get('CR_CLIENT_PATH')
+ if client_path is not None:
+ cr.context.derived['CR_CLIENT_NAME'] = os.path.basename(client_path)
+
+
+def _GetConfigDir(use_build_dir):
+ base_path = os.path.join(cr.context.Get('CR_CLIENT_PATH'), CONFIG_PATH)
+ if use_build_dir:
+ path_suffix = os.path.join(BUILD_CONFIG_DIR, cr.context.Get('CR_OUT_FULL'))
+ else:
+ path_suffix = CLIENT_CONFIG_DIR
+ return os.path.realpath(os.path.join(base_path, path_suffix))
+
+
+def _GetDeprecatedConfigDir(use_build_dir):
+ if use_build_dir:
+ path = cr.context.Get('CR_BUILD_DIR')
+ else:
+ path = cr.context.Get('CR_CLIENT_PATH')
+ return os.path.realpath(os.path.join(path, CONFIG_PATH))
+
+
+def _GetConfigFile(config_dir):
+ return os.path.join(config_dir, CONFIG_FILE)
+
+
+def _MigrateAndGetConfigDir(use_build_dir):
+ new_config_dir = _GetConfigDir(use_build_dir)
+ new_config_file = _GetConfigFile(new_config_dir)
+ new_config_exists = os.path.exists(new_config_file)
+
+ old_config_dir = _GetDeprecatedConfigDir(use_build_dir)
+ old_config_file = _GetConfigFile(old_config_dir)
+ old_config_exists = os.path.exists(old_config_file)
+
+ if old_config_exists:
+ if new_config_exists:
+ print 'Warning: Old config file %s superseded by new config file %s' % (
+ old_config_file, new_config_file)
+ else:
+ print 'Migrating config file from %s to %s...' % (
+ old_config_file, new_config_file)
+ if not cr.context.dry_run:
+ # Make the new config directory (if necessary).
+ try:
+ os.makedirs(new_config_dir)
+ except OSError:
+ if not os.path.isdir(new_config_dir):
+ raise
+ # Move the config file.
+ os.rename(old_config_file, new_config_file)
+ # Delete the old config directory (only applies to the build config).
+ if use_build_dir:
+ try:
+ os.removedirs(old_config_dir)
+ except OSError:
+ print 'Warning: Old config directory %s could not be removed' % (
+ old_config_dir)
+
+ return new_config_dir
+
+
+def _WriteConfig(writer, data):
+ writer.write(CONFIG_FILE_PREFIX)
+ for key, value in data.items():
+ writer.write(CONFIG_VAR_LINE.format(key, value))
+ writer.write(CONFIG_FILE_SUFFIX)
+
+
+def AddArguments(parser):
+ parser.add_argument(
+ '-o', '--out', dest='_out', metavar='name',
+ default=None,
+ help='The name of the out directory to use. Overrides CR_OUT.'
+ )
+
+
+def GetOutArgument():
+ return getattr(cr.context.args, '_out', None)
+
+
+def ApplyOutArgument():
+ # TODO(iancottrell): be flexible, allow out to do approximate match...
+ out = GetOutArgument()
+ if out:
+ cr.context.derived.Set(CR_OUT_FULL=out)
+
+
+def ReadGClient():
+ """Loads the .gclient configuration for the current client.
+
+ This will load from CR_CLIENT_PATH.
+
+ Returns:
+ The dict of values set in the .gclient file.
+
+ """
+ # Now attempt to load and parse the .gclient file
+ result = {}
+ try:
+ gclient_file = cr.context.Substitute(
+ os.path.join('{CR_CLIENT_PATH}', GCLIENT_FILENAME))
+ with open(gclient_file, 'r') as spec_file:
+ # matching the behaviour of gclient, so pylint: disable=exec-used
+ exec(spec_file.read(), {}, result)
+ except IOError:
+ # no .gclient file, skip it
+ pass
+ return result
+
+
+def WriteGClient():
+ """Writes the .gclient configuration for the current client.
+
+ This will write to CR_CLIENT_PATH.
+
+ """
+ gclient_file = cr.context.Substitute(
+ os.path.join('{CR_CLIENT_PATH}', GCLIENT_FILENAME))
+ spec = '\n'.join('%s = %s' % (key, pprint.pformat(value))
+ for key,value in cr.context.gclient.items())
+ if cr.context.dry_run:
+ print 'Write the following spec to', gclient_file
+ print spec
+ else:
+ with open(gclient_file, 'w') as spec_file:
+ spec_file.write(spec)
+
+def LoadConfig():
+ """Loads the client configuration for the given context.
+
+ This will load configuration if present from CR_CLIENT_PATH and then
+ CR_BUILD_DIR.
+
+ Returns:
+ True if configuration was fully loaded.
+
+ """
+ # Load the root config, will help set default build dir
+ client_config_dir = _MigrateAndGetConfigDir(use_build_dir=False)
+ cr.auto.client.__path__.append(client_config_dir)
+ cr.loader.Scan()
+ # Now load build dir config
+ build_config_dir = _MigrateAndGetConfigDir(use_build_dir=True)
+ cr.auto.build.__path__.append(build_config_dir)
+ cr.loader.Scan()
+
+ if not hasattr(cr.auto.build, 'config'):
+ return False
+
+ cr.context.derived.Set(CR_BUILD_CONFIG_PATH=_GetConfigFile(build_config_dir))
+ return True
+
+
+def WriteConfig(use_build_dir, data):
+ """Writes a configuration out to a file.
+
+ This writes all the key value pairs in data out to a config file.
+
+ Args:
+ use_build_dir: True if the config file should be written to the build
+ directory. Otherwise it will be written to the root config directory.
+ data: The key value pairs to write.
+ """
+ config_dir = _GetConfigDir(use_build_dir)
+ filename = _GetConfigFile(config_dir)
+ if cr.context.dry_run:
+ print 'makedirs', config_dir
+ print 'Write config to', filename
+ _WriteConfig(sys.stdout, data)
+ else:
+ try:
+ os.makedirs(config_dir)
+ except OSError:
+ if not os.path.isdir(config_dir):
+ raise
+ with open(filename, 'w') as writer:
+ _WriteConfig(writer, data)
+
+
+def PrintInfo():
+ print 'Selected output directory is', cr.context.Find('CR_BUILD_DIR')
+ print 'Build config file is', _GetConfigFile(_GetConfigDir(
+ use_build_dir=True))
+ try:
+ for name in cr.auto.build.config.OVERRIDES.exported.keys():
+ print ' ', name, '=', cr.context.Get(name)
+ except AttributeError:
+ pass
+
+
+class InitHook(cr.Plugin, cr.Plugin.Type):
+ """Base class for output directory initialization hooks.
+
+ Implementations used to fix from old version to new ones live in the
+ cr.fixups package.
+ """
+
+ def Run(self, old_version, config):
+ """Run the initialization hook.
+
+ This is invoked once per init invocation.
+ Args:
+ old_version: The old version,
+ 0.0 if the old version was bad or missing,
+ None if building a new output direcory.
+ config: The mutable config that will be written.
+ """
+ raise NotImplementedError('Must be overridden.')
+
diff --git a/chromium/tools/cr/cr/base/context.py b/chromium/tools/cr/cr/base/context.py
new file mode 100644
index 00000000000..1081fd15b8f
--- /dev/null
+++ b/chromium/tools/cr/cr/base/context.py
@@ -0,0 +1,245 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Application context management for the cr tool.
+
+Contains all the support code to enable the shared context used by the cr tool.
+This includes the configuration variables and command line handling.
+"""
+
+import argparse
+import os
+import cr
+
+class _DumpVisitor(cr.visitor.ExportVisitor):
+ """A visitor that prints all variables in a config hierarchy."""
+
+ def __init__(self, with_source):
+ super(_DumpVisitor, self).__init__({})
+ self.to_dump = {}
+ self.with_source = with_source
+
+ def StartNode(self):
+ if self.with_source:
+ self._DumpNow()
+ super(_DumpVisitor, self).StartNode()
+
+ def EndNode(self):
+ if self.with_source or not self.stack:
+ self._DumpNow()
+ super(_DumpVisitor, self).EndNode()
+ if not self.stack:
+ self._DumpNow()
+
+ def Visit(self, key, value):
+ super(_DumpVisitor, self).Visit(key, value)
+ if key in self.store:
+ str_value = str(self.store[key])
+ if str_value != str(os.environ.get(key, None)):
+ self.to_dump[key] = str_value
+
+ def _DumpNow(self):
+ if self.to_dump:
+ if self.with_source:
+ print 'From', self.Where()
+ for key in sorted(self.to_dump.keys()):
+ print ' ', key, '=', self.to_dump[key]
+ self.to_dump = {}
+
+
+class _ShowHelp(argparse.Action):
+ """An argparse action to print the help text.
+
+ This is like the built in help text printing action, except it knows to do
+ nothing when we are just doing the early speculative parse of the args.
+ """
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ if cr.context.speculative:
+ return
+ command = cr.Command.GetActivePlugin()
+ if command:
+ command.parser.print_help()
+ else:
+ parser.print_help()
+ exit(1)
+
+
+class _ArgumentParser(argparse.ArgumentParser):
+ """An extension of an ArgumentParser to enable speculative parsing.
+
+ It supports doing an early parse that never produces errors or output, to do
+ early collection of arguments that may affect what other arguments are
+ allowed.
+ """
+
+ def error(self, message):
+ if cr.context.speculative:
+ return
+ super(_ArgumentParser, self).error(message)
+
+ def parse_args(self):
+ if cr.context.speculative:
+ result = self.parse_known_args()
+ if result:
+ return result[0]
+ return None
+ return super(_ArgumentParser, self).parse_args()
+
+ def parse_known_args(self, args=None, namespace=None):
+ result = super(_ArgumentParser, self).parse_known_args(args, namespace)
+ if result is None:
+ return namespace, None
+ return result
+
+
+# The context stack
+_stack = []
+
+
+class _ContextData:
+ pass
+
+
+class Context(cr.config.Config):
+ """The base context holder for the cr system.
+
+ This holds the common context shared throughout cr.
+ Mostly this is stored in the Config structure of variables.
+ """
+
+ def __init__(self, name='Context'):
+ super(Context, self).__init__(name)
+ self._data = _ContextData()
+
+ def CreateData(self, description='', epilog=''):
+ self._data.args = None
+ self._data.arguments = cr.config.Config('ARGS')
+ self._data.derived = cr.config.Config('DERIVED')
+ self.AddChildren(*cr.config.GLOBALS)
+ self.AddChildren(
+ cr.config.Config('ENVIRONMENT', literal=True, export=True).Set(
+ {k: self.ParseValue(v) for k, v in os.environ.items()}),
+ self._data.arguments,
+ self._data.derived,
+ )
+ # Build the command line argument parser
+ self._data.parser = _ArgumentParser(add_help=False, description=description,
+ epilog=epilog)
+ self._data.subparsers = self.parser.add_subparsers()
+ # Add the global arguments
+ self.AddCommonArguments(self._data.parser)
+ self._data.gclient = {}
+
+ @property
+ def data(self):
+ return self._data
+
+ def __enter__(self):
+ """ To support using 'with cr.base.context.Create():'"""
+ _stack.append(self)
+ cr.context = self
+ return self
+
+ def __exit__(self, *_):
+ _stack.pop()
+ if _stack:
+ cr.context = _stack[-1]
+ return False
+
+ def AddSubParser(self, source):
+ parser = source.AddArguments(self._data.subparsers)
+
+ @classmethod
+ def AddCommonArguments(cls, parser):
+ """Adds the command line arguments common to all commands in cr."""
+ parser.add_argument(
+ '-h', '--help',
+ action=_ShowHelp, nargs=0,
+ help='show the help message and exit.'
+ )
+ parser.add_argument(
+ '--dry-run', dest='CR_DRY_RUN',
+ action='store_true', default=None,
+ help="""
+ Don't execute commands, just print them. Implies verbose.
+ Overrides CR_DRY_RUN
+ """
+ )
+ parser.add_argument(
+ '-v', '--verbose', dest='CR_VERBOSE',
+ action='count', default=None,
+ help="""
+ Print information about commands being performed.
+ Repeating multiple times increases the verbosity level.
+ Overrides CR_VERBOSE
+ """
+ )
+
+ @property
+ def args(self):
+ return self._data.args
+
+ @property
+ def arguments(self):
+ return self._data.arguments
+
+ @property
+ def speculative(self):
+ return self._data.speculative
+
+ @property
+ def derived(self):
+ return self._data.derived
+
+ @property
+ def parser(self):
+ return self._data.parser
+
+ @property
+ def remains(self):
+ remains = getattr(self._data.args, '_remains', None)
+ if remains and remains[0] == '--':
+ remains = remains[1:]
+ return remains
+
+ @property
+ def verbose(self):
+ if self.autocompleting:
+ return False
+ return self.Find('CR_VERBOSE') or self.dry_run
+
+ @property
+ def dry_run(self):
+ if self.autocompleting:
+ return True
+ return self.Find('CR_DRY_RUN')
+
+ @property
+ def autocompleting(self):
+ return 'COMP_WORD' in os.environ
+
+ @property
+ def gclient(self):
+ if not self._data.gclient:
+ self._data.gclient = cr.base.client.ReadGClient()
+ return self._data.gclient
+
+ def ParseArgs(self, speculative=False):
+ cr.plugin.DynamicChoices.only_active = not speculative
+ self._data.speculative = speculative
+ self._data.args = self._data.parser.parse_args()
+ self._data.arguments.Wipe()
+ if self._data.args:
+ self._data.arguments.Set(
+ {k: v for k, v in vars(self._data.args).items() if v is not None})
+
+ def DumpValues(self, with_source):
+ _DumpVisitor(with_source).VisitNode(self)
+
+
+def Create(description='', epilog=''):
+ context = Context()
+ context.CreateData(description=description, epilog=epilog)
+ return context
diff --git a/chromium/tools/cr/cr/base/host.py b/chromium/tools/cr/cr/base/host.py
new file mode 100644
index 00000000000..a9d5a6f0b15
--- /dev/null
+++ b/chromium/tools/cr/cr/base/host.py
@@ -0,0 +1,185 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for build host support."""
+
+import os
+import pipes
+import signal
+import subprocess
+
+import cr
+
+# Controls what verbosity level turns on command trail logging
+_TRAIL_VERBOSITY = 2
+
+def PrintTrail(trail):
+ print 'Command expanded the following variables:'
+ for key, value in trail:
+ if value == None:
+ value = ''
+ print ' ', key, '=', value
+
+
+class Host(cr.Plugin, cr.Plugin.Type):
+ """Base class for implementing cr hosts.
+
+ The host is the main access point to services provided by the machine cr
+ is running on. It exposes information about the machine, and runs external
+ commands on behalf of the actions.
+ """
+
+ def __init__(self):
+ super(Host, self).__init__()
+
+ def Matches(self):
+ """Detects whether this is the correct host implementation.
+
+ This method is overridden by the concrete implementations.
+ Returns:
+ true if the plugin matches the machine it is running on.
+ """
+ return False
+
+ @classmethod
+ def Select(cls):
+ for host in cls.Plugins():
+ if host.Matches():
+ return host
+
+ def _Execute(self, command,
+ shell=False, capture=False, silent=False,
+ ignore_dry_run=False, return_status=False,
+ ignore_interrupt_signal=False):
+ """This is the only method that launches external programs.
+
+ It is a thin wrapper around subprocess.Popen that handles cr specific
+ issues. The command is expanded in the active context so that variables
+ are substituted.
+ Args:
+ command: the command to run.
+ shell: whether to run the command using the shell.
+ capture: controls wether the output of the command is captured.
+ ignore_dry_run: Normally, if the context is in dry run mode the command is
+ printed but not executed. This flag overrides that behaviour, causing
+ the command to be run anyway.
+ return_status: switches the function to returning the status code rather
+ the output.
+ ignore_interrupt_signal: Ignore the interrupt signal (i.e., Ctrl-C) while
+ the command is running. Useful for letting interactive programs manage
+ Ctrl-C by themselves.
+ Returns:
+ the status if return_status is true, or the output if capture is true,
+ otherwise nothing.
+ """
+ with cr.context.Trace():
+ command = [cr.context.Substitute(arg) for arg in command if arg]
+ command = filter(bool, command)
+ trail = cr.context.trail
+ if not command:
+ print 'Empty command passed to execute'
+ exit(1)
+ if cr.context.verbose:
+ print ' '.join(command)
+ if cr.context.verbose >= _TRAIL_VERBOSITY:
+ PrintTrail(trail)
+ if ignore_dry_run or not cr.context.dry_run:
+ out = None
+ if capture:
+ out = subprocess.PIPE
+ elif silent:
+ out = open(os.devnull, "w")
+ try:
+ p = subprocess.Popen(
+ command, shell=shell,
+ env={k: str(v) for k, v in cr.context.exported.items()},
+ stdout=out)
+ except OSError:
+ print 'Failed to exec', command
+ # Don't log the trail if we already have
+ if cr.context.verbose < _TRAIL_VERBOSITY:
+ PrintTrail(trail)
+ exit(1)
+ try:
+ if ignore_interrupt_signal:
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
+ output, _ = p.communicate()
+ finally:
+ if ignore_interrupt_signal:
+ signal.signal(signal.SIGINT, signal.SIG_DFL)
+ if silent:
+ out.close()
+ if return_status:
+ return p.returncode
+ if p.returncode != 0:
+ print 'Error {0} executing command {1}'.format(p.returncode, command)
+ exit(p.returncode)
+ return output or ''
+ return ''
+
+ @cr.Plugin.activemethod
+ def Shell(self, *command):
+ command = ' '.join([pipes.quote(arg) for arg in command])
+ return self._Execute([command], shell=True, ignore_interrupt_signal=True)
+
+ @cr.Plugin.activemethod
+ def Execute(self, *command):
+ return self._Execute(command, shell=False)
+
+ @cr.Plugin.activemethod
+ def ExecuteSilently(self, *command):
+ return self._Execute(command, shell=False, silent=True)
+
+ @cr.Plugin.activemethod
+ def CaptureShell(self, *command):
+ return self._Execute(command,
+ shell=True, capture=True, ignore_dry_run=True)
+
+ @cr.Plugin.activemethod
+ def Capture(self, *command):
+ return self._Execute(command, capture=True, ignore_dry_run=True)
+
+ @cr.Plugin.activemethod
+ def ExecuteStatus(self, *command):
+ return self._Execute(command,
+ ignore_dry_run=True, return_status=True)
+
+ @cr.Plugin.activemethod
+ def YesNo(self, question, default=True):
+ """Ask the user a yes no question
+
+ This blocks until the user responds.
+ Args:
+ question: The question string to show the user
+ default: True if the default response is Yes
+ Returns:
+ True if the response was yes.
+ """
+ options = 'Y/n' if default else 'y/N'
+ result = raw_input(question + ' [' + options + '] ').lower()
+ if result == '':
+ return default
+ return result in ['y', 'yes']
+
+ @classmethod
+ def SearchPath(cls, name, paths=[]):
+ """Searches the PATH for an executable.
+
+ Args:
+ name: the name of the binary to search for.
+ Returns:
+ the set of executables found, or an empty list if none.
+ """
+ result = []
+ extensions = ['']
+ extensions.extend(os.environ.get('PATHEXT', '').split(os.pathsep))
+ paths = [cr.context.Substitute(path) for path in paths if path]
+ paths = paths + os.environ.get('PATH', '').split(os.pathsep)
+ for path in paths:
+ partial = os.path.join(path, name)
+ for extension in extensions:
+ filename = partial + extension
+ if os.path.exists(filename) and filename not in result:
+ result.append(filename)
+ return result
diff --git a/chromium/tools/cr/cr/base/linux.py b/chromium/tools/cr/cr/base/linux.py
new file mode 100644
index 00000000000..407c48efe3f
--- /dev/null
+++ b/chromium/tools/cr/cr/base/linux.py
@@ -0,0 +1,44 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""The linux specific host and platform implementation module."""
+
+import os
+
+import cr
+
+
+class LinuxHost(cr.Host):
+ """The implementation of Host for linux."""
+
+ ACTIVE = cr.Config.From(
+ GOOGLE_CODE='/usr/local/google/code',
+ )
+
+ def __init__(self):
+ super(LinuxHost, self).__init__()
+
+ def Matches(self):
+ return cr.Platform.System() == 'Linux'
+
+
+class LinuxPlatform(cr.Platform):
+ """The implementation of Platform for the linux target."""
+
+ ACTIVE = cr.Config.From(
+ CR_BINARY=os.path.join('{CR_BUILD_DIR}', '{CR_BUILD_TARGET}'),
+ CHROME_DEVEL_SANDBOX='/usr/local/sbin/chrome-devel-sandbox',
+ )
+
+ @property
+ def enabled(self):
+ return cr.Platform.System() == 'Linux'
+
+ @property
+ def priority(self):
+ return 2
+
+ @property
+ def paths(self):
+ return ['{GOMA_DIR}']
diff --git a/chromium/tools/cr/cr/base/linux_chromeos.py b/chromium/tools/cr/cr/base/linux_chromeos.py
new file mode 100644
index 00000000000..213744ca3c5
--- /dev/null
+++ b/chromium/tools/cr/cr/base/linux_chromeos.py
@@ -0,0 +1,31 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Linux Chrome OS platform."""
+
+import os
+
+import cr
+
+class LinuxChromeOSPlatform(cr.Platform):
+ """Platform for Linux Chrome OS target"""
+
+ ACTIVE = cr.Config.From(
+ CR_BINARY=os.path.join('{CR_BUILD_DIR}', '{CR_BUILD_TARGET}'),
+ CHROME_DEVEL_SANDBOX='/usr/local/sbin/chrome-devel-sandbox',
+ GYP_DEF_chromeos=1,
+ GN_ARG_target_os='"chromeos"',
+ )
+
+ @property
+ def enabled(self):
+ return cr.Platform.System() == 'Linux'
+
+ @property
+ def priority(self):
+ return 2
+
+ @property
+ def paths(self):
+ return ['{GOMA_DIR}']
diff --git a/chromium/tools/cr/cr/base/mac.py b/chromium/tools/cr/cr/base/mac.py
new file mode 100644
index 00000000000..5bf22761dc3
--- /dev/null
+++ b/chromium/tools/cr/cr/base/mac.py
@@ -0,0 +1,44 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""The mac specific host and platform implementation module."""
+
+import os
+
+import cr
+
+
+class MacHost(cr.Host):
+ """The implementation of Host for mac."""
+
+ ACTIVE = cr.Config.From(
+ GOOGLE_CODE='/usr/local/google/code',
+ )
+
+ def __init__(self):
+ super(MacHost, self).__init__()
+
+ def Matches(self):
+ return cr.Platform.System() == 'Darwin'
+
+
+class MacPlatform(cr.Platform):
+ """The implementation of Platform for the mac target."""
+
+ ACTIVE = cr.Config.From(
+ CR_BINARY=os.path.join('{CR_BUILD_DIR}', '{CR_BUILD_TARGET}'),
+ CHROME_DEVEL_SANDBOX='/usr/local/sbin/chrome-devel-sandbox',
+ )
+
+ @property
+ def enabled(self):
+ return cr.Platform.System() == 'Darwin'
+
+ @property
+ def priority(self):
+ return 2
+
+ @property
+ def paths(self):
+ return ['{GOMA_DIR}']
diff --git a/chromium/tools/cr/cr/base/platform.py b/chromium/tools/cr/cr/base/platform.py
new file mode 100644
index 00000000000..31269521f1e
--- /dev/null
+++ b/chromium/tools/cr/cr/base/platform.py
@@ -0,0 +1,70 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for the target platform support."""
+
+from importlib import import_module
+import os
+
+import cr
+
+DEFAULT = cr.Config.From(
+ DEPOT_TOOLS=os.path.join('{GOOGLE_CODE}', 'depot_tools'),
+ CHROMIUM_OUT_DIR='{CR_OUT_BASE}',)
+
+
+class Platform(cr.Plugin, cr.Plugin.Type):
+ """Base class for implementing cr platforms.
+
+ A platform is the target operating system being compiled for (linux android).
+ """
+
+ _platform_module = import_module('platform', None)
+ SELECTOR = 'CR_PLATFORM'
+
+ @classmethod
+ def AddArguments(cls, parser):
+ parser.add_argument(
+ '--platform', dest=cls.SELECTOR,
+ choices=cls.Choices(),
+ default=None,
+ help='Sets the target platform to use. Overrides ' + cls.SELECTOR
+ )
+
+ @classmethod
+ def System(cls):
+ return cls._platform_module.system()
+
+ def __init__(self):
+ super(Platform, self).__init__()
+
+ def Activate(self):
+ super(Platform, self).Activate()
+ if _PathFixup not in cr.context.fixup_hooks:
+ cr.context.fixup_hooks.append(_PathFixup)
+
+ @cr.Plugin.activemethod
+ def Prepare(self):
+ pass
+
+ @property
+ def paths(self):
+ return []
+
+
+def _PathFixup(base, key, value):
+ """A context fixup that does platform specific modifications to the PATH."""
+ if key == 'PATH':
+ paths = []
+ for entry in Platform.GetActivePlugin().paths:
+ entry = base.Substitute(entry)
+ if entry not in paths:
+ paths.append(entry)
+ for entry in value.split(os.path.pathsep):
+ if entry.endswith(os.path.sep + 'goma'):
+ pass
+ elif entry not in paths:
+ paths.append(entry)
+ value = os.path.pathsep.join(paths)
+ return value
diff --git a/chromium/tools/cr/cr/commands/__init__.py b/chromium/tools/cr/cr/commands/__init__.py
new file mode 100644
index 00000000000..1617a307bc5
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/__init__.py
@@ -0,0 +1,15 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A package for all the built in commands.
+
+This package has all the standard commands built in to the cr tool.
+Most commands use actions to perform the real work.
+"""
+
+import cr
+
+cr.Import(__name__, 'command')
+cr.Import(__name__, 'prepare')
+cr.Import(__name__, 'init')
diff --git a/chromium/tools/cr/cr/commands/args.py b/chromium/tools/cr/cr/commands/args.py
new file mode 100644
index 00000000000..64276422f8f
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/args.py
@@ -0,0 +1,32 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the args command."""
+
+import os
+
+import cr
+
+class ArgsCommand(cr.Command):
+ """The implementation of the args command.
+
+ The args command is meant for editing the current build configuration
+ in a text editor.
+ """
+
+ def __init__(self):
+ super(ArgsCommand, self).__init__()
+ self.help = 'Edit build configuration in a text editor'
+ self.description = ("""
+ Opens the configuration for the currently selected out directory in
+ a text editor.
+ """)
+
+ def Run(self):
+ build_config_path = cr.context.Get('CR_BUILD_CONFIG_PATH')
+ editor = os.environ.get('EDITOR', 'vi')
+ print 'Opening %s in a text editor (%s)...' % (build_config_path, editor)
+ cr.Host.Execute(editor, build_config_path)
+ # TODO(petrcermak): Figure out a way to do this automatically.
+ print 'Please run \'cr prepare\' if you modified the file'
diff --git a/chromium/tools/cr/cr/commands/build.py b/chromium/tools/cr/cr/commands/build.py
new file mode 100644
index 00000000000..1d33130f2d0
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/build.py
@@ -0,0 +1,81 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the build commands."""
+
+import cr
+
+
+class BuildCommand(cr.Command):
+ """The implementation of the build command.
+
+ This is a thin shell over the Builder.Build method of the selected builder.
+ """
+
+ def __init__(self):
+ super(BuildCommand, self).__init__()
+ self.help = 'Build a target'
+ self.description = ("""
+ Uses the specified builder for the platform to bring the target
+ up to date.
+ """)
+
+ def AddArguments(self, subparsers):
+ parser = super(BuildCommand, self).AddArguments(subparsers)
+ cr.Builder.AddArguments(self, parser)
+ cr.Target.AddArguments(self, parser, allow_multiple=True)
+ self.ConsumeArgs(parser, 'the builder')
+ return parser
+
+ def Run(self):
+ return cr.Builder.Build(
+ cr.Target.GetTargets(), cr.context.remains)
+
+
+class CleanCommand(cr.Command):
+ """The implementation of the clean command.
+
+ This is a thin shell over the Builder.Clean method of the selected builder.
+ """
+
+ def __init__(self):
+ super(CleanCommand, self).__init__()
+ self.help = 'Clean a target'
+ self.description = (
+ 'Uses the specified builder to clean out built files for the target.')
+
+ def AddArguments(self, subparsers):
+ parser = super(CleanCommand, self).AddArguments(subparsers)
+ cr.Builder.AddArguments(self, parser)
+ cr.Target.AddArguments(self, parser, allow_multiple=True)
+ self.ConsumeArgs(parser, 'the builder')
+ return parser
+
+ def Run(self):
+ return cr.Builder.Clean(
+ cr.Target.GetTargets(), cr.context.remains)
+
+
+class RebuildCommand(cr.Command):
+ """The implementation of the rebuild command.
+
+ This is a thin shell over the Builder.Rebuild method of the selected builder.
+ """
+
+ def __init__(self):
+ super(RebuildCommand, self).__init__()
+ self.help = 'Rebuild a target'
+ self.description = (
+ 'Uses the specified builder for the platform to rebuild a target.')
+
+ def AddArguments(self, subparsers):
+ parser = super(RebuildCommand, self).AddArguments(subparsers)
+ cr.Builder.AddArguments(self, parser)
+ cr.Target.AddArguments(self, parser, allow_multiple=True)
+ self.ConsumeArgs(parser, 'the builder')
+ return parser
+
+ def Run(self):
+ return cr.Builder.Rebuild(
+ cr.Target.GetTargets(), cr.context.remains)
diff --git a/chromium/tools/cr/cr/commands/clobber.py b/chromium/tools/cr/cr/commands/clobber.py
new file mode 100644
index 00000000000..ac371e3aa69
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/clobber.py
@@ -0,0 +1,35 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the clobber command."""
+
+import os
+
+import cr
+
+
+class ClobberCommand(cr.Command):
+ """The implementation of the clobber command.
+
+ The clobber command removes all generated files from the output directory.
+ """
+
+ def __init__(self):
+ super(ClobberCommand, self).__init__()
+ self.help = 'Clobber the current output directory'
+ self.description = ("""
+ This deletes all generated files from the output directory.
+ """)
+
+ def Run(self):
+ self.Clobber()
+
+ @classmethod
+ def Clobber(cls):
+ """Performs the clobber."""
+ build_dir = cr.context.Get('CR_BUILD_DIR')
+ clobber_path = os.path.join('{CR_SRC}', 'build', 'clobber.py')
+ print 'Clobbering output directory %s...' % build_dir
+ cr.Host.Execute(clobber_path, build_dir)
+ print 'Done'
diff --git a/chromium/tools/cr/cr/commands/command.py b/chromium/tools/cr/cr/commands/command.py
new file mode 100644
index 00000000000..d42a688c7a9
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/command.py
@@ -0,0 +1,96 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Module to hold the Command plugin."""
+
+import argparse
+
+import cr
+
+
+class Command(cr.Plugin, cr.Plugin.Type):
+ """Base class for implementing cr commands.
+
+ These are the sub-commands on the command line, and modify the
+ accepted remaining arguments.
+ Commands in general do not implement the functionality directly, instead they
+ run a sequence of actions.
+ """
+
+ @classmethod
+ def Select(cls):
+ """Called to select which command is active.
+
+ This picks a command based on the first non - argument on the command
+ line.
+ Returns:
+ the selected command, or None if not specified on the command line.
+ """
+ if cr.context.args:
+ return getattr(cr.context.args, '_command', None)
+ return None
+
+ def __init__(self):
+ super(Command, self).__init__()
+ self.help = 'Missing help: {0}'.format(self.__class__.__name__)
+ self.description = None
+ self.epilog = None
+ self.parser = None
+ self.requires_build_dir = True
+
+ def AddArguments(self, subparsers):
+ """Add arguments to the command line parser.
+
+ Called by the main function to add the command to the command line parser.
+ Commands that override this function to add more arguments must invoke
+ this method.
+ Args:
+ subparsers: The argparse subparser manager to add this command to.
+ Returns:
+ the parser that was built for the command.
+ """
+ self.parser = subparsers.add_parser(
+ self.name,
+ add_help=False,
+ help=self.help,
+ description=self.description or self.help,
+ epilog=self.epilog,
+ )
+ self.parser.set_defaults(_command=self)
+ cr.context.AddCommonArguments(self.parser)
+ cr.base.client.AddArguments(self.parser)
+ return self.parser
+
+ def ConsumeArgs(self, parser, reason):
+ """Adds a remaining argument consumer to the parser.
+
+ A helper method that commands can use to consume all remaining arguments.
+ Use for things like lists of targets.
+ Args:
+ parser: The parser to consume remains for.
+ reason: The reason to give the user in the help text.
+ """
+ parser.add_argument(
+ '_remains', metavar='arguments',
+ nargs=argparse.REMAINDER,
+ help='The additional arguments to {0}.'.format(reason)
+ )
+
+ def EarlyArgProcessing(self):
+ """Called to make decisions based on speculative argument parsing.
+
+ When this method is called, enough of the command line parsing has been
+ done that the command is selected. This allows the command to make any
+ modifications needed before the final argument parsing is done.
+ """
+ cr.base.client.ApplyOutArgument()
+
+ @cr.Plugin.activemethod
+ def Run(self):
+ """The main method of the command.
+
+ This is the only thing that a command has to implement, and it should not
+ call this base version.
+ """
+ raise NotImplementedError('Must be overridden.')
+
diff --git a/chromium/tools/cr/cr/commands/debug.py b/chromium/tools/cr/cr/commands/debug.py
new file mode 100644
index 00000000000..f113691833c
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/debug.py
@@ -0,0 +1,40 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the run command."""
+
+import cr
+
+
+class DebugCommand(cr.Command):
+ """The implementation of the debug command.
+
+ This is much like the run command except it launches the program under
+ a debugger instead.
+ """
+
+ def __init__(self):
+ super(DebugCommand, self).__init__()
+ self.help = 'Debug a binary'
+
+ def AddArguments(self, subparsers):
+ parser = super(DebugCommand, self).AddArguments(subparsers)
+ cr.Builder.AddArguments(self, parser)
+ cr.Installer.AddArguments(self, parser)
+ cr.Debugger.AddArguments(self, parser)
+ cr.Target.AddArguments(self, parser)
+ self.ConsumeArgs(parser, 'the binary')
+ return parser
+
+ def Run(self):
+ targets = cr.Target.GetTargets()
+ if not cr.Debugger.ShouldInvoke():
+ cr.Debugger.Attach(targets, cr.context.remains)
+ elif cr.Installer.Skipping():
+ cr.Debugger.Restart(targets, cr.context.remains)
+ else:
+ cr.Builder.Build(targets, [])
+ cr.Debugger.Kill(targets, [])
+ cr.Installer.Reinstall(targets, [])
+ cr.Debugger.Invoke(targets, cr.context.remains)
diff --git a/chromium/tools/cr/cr/commands/gn.py b/chromium/tools/cr/cr/commands/gn.py
new file mode 100644
index 00000000000..70d2d06eb18
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/gn.py
@@ -0,0 +1,39 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the gn command."""
+
+import os
+
+import cr
+
+
+class GnCommand(cr.Command):
+ """The implementation of the gn command.
+
+ The gn command is meant for running the gn tool without having to manually
+ specify an out directory.
+ """
+
+ def __init__(self):
+ super(GnCommand, self).__init__()
+ self.help = 'Run gn with the currently selected out directory'
+ self.description = ("""
+ Runs the gn command with the currently selected out directory as the
+ second argument.
+ """)
+
+ def AddArguments(self, subparsers):
+ parser = super(GnCommand, self).AddArguments(subparsers)
+ self.ConsumeArgs(parser, 'gn')
+ return parser
+
+ def Run(self):
+ out_path = os.path.join(cr.context['CR_SRC'],
+ cr.context['CR_OUT_FULL'])
+ args = cr.context.remains
+ if args:
+ cr.Host.Execute('gn', args[0], out_path, *args[1:])
+ else:
+ cr.Host.Execute('gn')
diff --git a/chromium/tools/cr/cr/commands/info.py b/chromium/tools/cr/cr/commands/info.py
new file mode 100644
index 00000000000..4e5c2be13cd
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/info.py
@@ -0,0 +1,44 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the info implementation of Command."""
+
+import cr
+
+
+class InfoCommand(cr.Command):
+ """The cr info command implementation."""
+
+ def __init__(self):
+ super(InfoCommand, self).__init__()
+ self.help = 'Print information about the cr environment'
+
+ def AddArguments(self, subparsers):
+ parser = super(InfoCommand, self).AddArguments(subparsers)
+ parser.add_argument(
+ '-s', '--short', dest='_short',
+ action='store_true', default=False,
+ help='Short form results, useful for scripting.'
+ )
+ self.ConsumeArgs(parser, 'the environment')
+ return parser
+
+ def EarlyArgProcessing(self):
+ if getattr(cr.context.args, '_short', False):
+ self.requires_build_dir = False
+ cr.Command.EarlyArgProcessing(self)
+
+ def Run(self):
+ if cr.context.remains:
+ for var in cr.context.remains:
+ if getattr(cr.context.args, '_short', False):
+ val = cr.context.Find(var)
+ if val is None:
+ val = ''
+ print val
+ else:
+ print var, '=', cr.context.Find(var)
+ else:
+ cr.base.client.PrintInfo()
+
diff --git a/chromium/tools/cr/cr/commands/init.py b/chromium/tools/cr/cr/commands/init.py
new file mode 100644
index 00000000000..7ea7b98dc75
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/init.py
@@ -0,0 +1,171 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the init command."""
+
+import os
+
+import cr
+
+# The set of variables to store in the per output configuration.
+OUT_CONFIG_VARS = [
+ 'CR_VERSION',
+ cr.Platform.SELECTOR,
+ cr.BuildType.SELECTOR,
+ cr.Arch.SELECTOR,
+ cr.PrepareOut.SELECTOR,
+ 'CR_OUT_BASE',
+ 'CR_OUT_FULL',
+]
+
+
+class InitCommand(cr.Command):
+ """The implementation of the init command.
+
+ The init command builds or updates an output directory.
+ It then uses the Prepare and Select commands to get that directory
+ ready to use.
+ """
+
+ def __init__(self):
+ super(InitCommand, self).__init__()
+ self.requires_build_dir = False
+ self.help = 'Create and configure an output directory'
+ self.description = ("""
+ If the .cr directory is not present, build it and add
+ the specified configuration.
+ If the file already exists, update the configuration with any
+ additional settings.
+ """)
+ self._settings = []
+
+ def AddArguments(self, subparsers):
+ """Overridden from cr.Command."""
+ parser = super(InitCommand, self).AddArguments(subparsers)
+ cr.Platform.AddArguments(parser)
+ cr.BuildType.AddArguments(parser)
+ cr.Arch.AddArguments(parser)
+ cr.SelectCommand.AddPrepareArguments(parser)
+ cr.PrepareOut.AddArguments(parser)
+ parser.add_argument(
+ '-s', '--set', dest='_settings', metavar='settings',
+ action='append',
+ help='Configuration overrides.'
+ )
+ return parser
+
+ def EarlyArgProcessing(self):
+ base_settings = getattr(cr.context.args, '_settings', None)
+ if base_settings:
+ self._settings.extend(base_settings)
+ # Do not call super early processing, we do not want to apply
+ # the output arg...
+ out = cr.base.client.GetOutArgument()
+ if out:
+ # Output directory is fully specified
+ # We need to deduce other settings from it's name
+ base, buildtype = os.path.split(out)
+ if not (base and buildtype):
+ print 'Specified output directory must be two levels'
+ exit(1)
+ if not cr.BuildType.FindPlugin(buildtype):
+ print 'Specified build type', buildtype, 'is not valid'
+ print 'Must be one of', ','.join(p.name for p in cr.BuildType.Plugins())
+ exit(1)
+ if (cr.context.args.CR_BUILDTYPE and
+ cr.context.args.CR_BUILDTYPE != buildtype):
+ print 'If --type and --out are both specified, they must match'
+ print 'Got', cr.context.args.CR_BUILDTYPE, 'and', buildtype
+ exit(1)
+ platform = cr.context.args.CR_PLATFORM
+ if not platform:
+ # Try to guess platform based on output name
+ platforms = [p.name for p in cr.Platform.AllPlugins()]
+ matches = [p for p in platforms if p in base]
+ # Get the longest matching string and check if the others are
+ # substrings. This is done to support "linuxchromeos" and "linux".
+ platform = max(matches, key=len)
+ all_matches_are_substrings = all(p in platform for p in matches)
+ if not all_matches_are_substrings or not matches:
+ print 'Platform is not set, and could not be guessed from', base
+ print 'Should be one of', ','.join(platforms)
+ if len(matches) > 1:
+ print 'Matched all of', ','.join(matches)
+ exit(1)
+ generator = cr.context.args.CR_GENERATOR
+ if not generator:
+ generator = 'gn'
+ cr.context.derived.Set(
+ CR_OUT_FULL=out,
+ CR_OUT_BASE=base,
+ CR_PLATFORM=platform,
+ CR_BUILDTYPE=buildtype,
+ CR_GENERATOR=generator
+ )
+ if not 'CR_OUT_BASE' in cr.context:
+ cr.context.derived['CR_OUT_BASE'] = 'out_{CR_PLATFORM}'
+ if not 'CR_OUT_FULL' in cr.context:
+ cr.context.derived['CR_OUT_FULL'] = os.path.join(
+ '{CR_OUT_BASE}', '{CR_BUILDTYPE}')
+
+ def Run(self):
+ """Overridden from cr.Command."""
+ src_path = cr.context.Get('CR_SRC')
+ if not os.path.isdir(src_path):
+ print cr.context.Substitute('Path {CR_SRC} is not a valid client')
+ exit(1)
+
+ # Ensure we have an output directory override ready to fill in
+ # This will only be missing if we are creating a brand new output
+ # directory
+ build_package = cr.auto.build
+
+ # Collect the old version (and float convert)
+ old_version = cr.context.Find('CR_VERSION')
+ try:
+ old_version = float(old_version)
+ except (ValueError, TypeError):
+ old_version = 0.0
+ is_new = not hasattr(build_package, 'config')
+ if is_new:
+
+ class FakeModule(object):
+ OVERRIDES = cr.Config('OVERRIDES')
+
+ def __init__(self):
+ self.__name__ = 'config'
+
+ old_version = None
+ config = FakeModule()
+ setattr(build_package, 'config', config)
+ cr.plugin.ChainModuleConfigs(config)
+
+ # Force override the version
+ build_package.config.OVERRIDES.Set(CR_VERSION=cr.base.client.VERSION)
+ # Add all the variables that we always want to have
+ for name in OUT_CONFIG_VARS:
+ value = cr.context.Find(name)
+ build_package.config.OVERRIDES[name] = value
+ # Apply the settings from the command line
+ for setting in self._settings:
+ name, separator, value = setting.partition('=')
+ name = name.strip()
+ if not separator:
+ value = True
+ else:
+ value = cr.Config.ParseValue(value.strip())
+ build_package.config.OVERRIDES[name] = value
+
+ # Run all the output directory init hooks
+ for hook in cr.InitHook.Plugins():
+ hook.Run(old_version, build_package.config)
+ # Redo activations, they might have changed
+ cr.plugin.Activate()
+
+ # Write out the new configuration, and select it as the default
+ cr.base.client.WriteConfig(
+ use_build_dir=True, data=build_package.config.OVERRIDES.exported)
+ # Prepare the platform in here, using the updated config
+ cr.Platform.Prepare()
+ cr.SelectCommand.Select()
diff --git a/chromium/tools/cr/cr/commands/install.py b/chromium/tools/cr/cr/commands/install.py
new file mode 100644
index 00000000000..20e1dff2b46
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/install.py
@@ -0,0 +1,36 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the install command."""
+
+import cr
+
+
+class InstallCommand(cr.Command):
+ """The implementation of the install command.
+
+ This first uses Builder.Build to bring the target up to date, and then
+ installs it using Installer.Reinstall.
+ The builder installs its command line arguments, and you can use those to
+ select which builder is used. Selecting the skip builder
+ (using --builder=skip) bypasses the build stage.
+ """
+
+ def __init__(self):
+ super(InstallCommand, self).__init__()
+ self.help = 'Install a binary'
+
+ def AddArguments(self, subparsers):
+ parser = super(InstallCommand, self).AddArguments(subparsers)
+ cr.Builder.AddArguments(self, parser)
+ cr.Installer.AddArguments(self, parser)
+ cr.Target.AddArguments(self, parser, allow_multiple=True)
+ self.ConsumeArgs(parser, 'the installer')
+ return parser
+
+ def Run(self):
+ targets = cr.Target.GetTargets()
+ if not cr.Installer.Skipping():
+ cr.Builder.Build(targets, [])
+ cr.Installer.Reinstall(targets, cr.context.remains)
diff --git a/chromium/tools/cr/cr/commands/prepare.py b/chromium/tools/cr/cr/commands/prepare.py
new file mode 100644
index 00000000000..7a70bcb8515
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/prepare.py
@@ -0,0 +1,69 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the prepare command."""
+
+import cr
+
+
+class PrepareCommand(cr.Command):
+ """The implementation of the prepare command.
+
+ The prepare command is used to perform the steps needed to get an output
+ directory ready to use. These should not be the kind of things that need to
+ happen every time you build something, but the rarer things that you re-do
+ only when you get or add new source files, or change your build options.
+ This delegates all it's behavior to implementations of PrepareOut. These will
+ (mostly) be in the cr.actions package.
+ """
+
+ def __init__(self):
+ super(PrepareCommand, self).__init__()
+ self.help = 'Prepares an output directory'
+ self.description = ("""
+ This does any preparation needed for the output directory, such as
+ running gyp.
+ """)
+
+ def Run(self):
+ self.Prepare()
+
+ @classmethod
+ def UpdateContext(cls):
+ PrepareOut.GetActivePlugin().UpdateContext()
+
+ @classmethod
+ def Prepare(cls):
+ cls.UpdateContext()
+ PrepareOut.GetActivePlugin().Prepare()
+
+
+class PrepareOut(cr.Plugin, cr.Plugin.Type):
+ """Base class for output directory preparation plugins.
+
+ See PrepareCommand for details.
+ """
+
+ SELECTOR = 'CR_GENERATOR'
+
+ @classmethod
+ def AddArguments(cls, parser):
+ parser.add_argument(
+ '--generator', dest=cls.SELECTOR,
+ choices=cls.Choices(),
+ default=None,
+ help=('Sets the build file generator to use. ' +
+ 'Overrides %s.' % cls.SELECTOR)
+ )
+
+ def UpdateContext(self):
+ """Update the context if needed.
+
+ This is also used by commands that want the environment setup correctly, but
+ are not going to call Prepare directly (such as sync)."""
+
+ def Prepare(self):
+ """All PrepareOut plugins must override this method to do their work."""
+ raise NotImplementedError('Must be overridden.')
+
diff --git a/chromium/tools/cr/cr/commands/run.py b/chromium/tools/cr/cr/commands/run.py
new file mode 100644
index 00000000000..15d7db06ff1
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/run.py
@@ -0,0 +1,53 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the run command."""
+
+import cr
+
+
+class RunCommand(cr.Command):
+ """The implementation of the run command.
+
+ This first uses Builder to bring the target up to date.
+ It then uses Installer to install the target (if needed), and
+ finally it uses Runner to run the target.
+ You can use skip version to not perform any of these steps.
+ """
+
+ def __init__(self):
+ super(RunCommand, self).__init__()
+ self.help = 'Invoke a target'
+
+ def AddArguments(self, subparsers):
+ parser = super(RunCommand, self).AddArguments(subparsers)
+ cr.Builder.AddArguments(self, parser)
+ cr.Installer.AddArguments(self, parser)
+ cr.Runner.AddArguments(self, parser)
+ cr.Target.AddArguments(self, parser, allow_multiple=False)
+ self.ConsumeArgs(parser, 'the binary')
+ return parser
+
+ def Run(self):
+ original_targets = cr.Target.GetTargets()
+ targets = original_targets[:]
+ for target in original_targets:
+ targets.extend(target.GetRunDependencies())
+ test_targets = [target for target in targets if target.is_test]
+ run_targets = [target for target in targets if not target.is_test]
+ if cr.Installer.Skipping():
+ # No installer, only build test targets
+ build_targets = test_targets
+ else:
+ build_targets = targets
+ if build_targets:
+ cr.Builder.Build(build_targets, [])
+ # See if we can use restart when not installing
+ if cr.Installer.Skipping():
+ cr.Runner.Restart(targets, cr.context.remains)
+ else:
+ cr.Runner.Kill(run_targets, [])
+ cr.Installer.Reinstall(run_targets, [])
+ cr.Runner.Invoke(original_targets, cr.context.remains)
+
diff --git a/chromium/tools/cr/cr/commands/select.py b/chromium/tools/cr/cr/commands/select.py
new file mode 100644
index 00000000000..cff8e00a065
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/select.py
@@ -0,0 +1,59 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the select command."""
+
+import cr
+
+# The set of variables SELECT writes into the client plugin to control the
+# active output directory.
+SELECT_OUT_VARS = ['CR_OUT_FULL']
+
+
+class SelectCommand(cr.Command):
+ """The implementation of the select command.
+
+ The select command is used to set the default output directory used by all
+ other commands. It does this by writing out a plugin into the client root
+ that sets the active output path.
+ """
+
+ def __init__(self):
+ super(SelectCommand, self).__init__()
+ self.help = 'Select an output directory'
+ self.description = ("""
+ This makes the specified output directory the default for all future
+ operations. It also invokes prepare on that directory.
+ """)
+
+ def AddArguments(self, subparsers):
+ parser = super(SelectCommand, self).AddArguments(subparsers)
+ self.AddPrepareArguments(parser)
+ return parser
+
+ @classmethod
+ def AddPrepareArguments(cls, parser):
+ parser.add_argument(
+ '--no-prepare', dest='_no_prepare',
+ action='store_true', default=False,
+ help='Don\'t prepare the output directory.'
+ )
+
+ def Run(self):
+ self.Select()
+
+ @classmethod
+ def Select(cls):
+ """Performs the select.
+
+ This is also called by the init command to auto select the new output
+ directory.
+ """
+ cr.base.client.WriteConfig(
+ use_build_dir=False,
+ data=dict(CR_OUT_FULL=cr.context.Get('CR_OUT_FULL')))
+ cr.base.client.PrintInfo()
+ # Now we run the post select actions
+ if not getattr(cr.context.args, '_no_prepare', None):
+ cr.PrepareCommand.Prepare()
diff --git a/chromium/tools/cr/cr/commands/shell.py b/chromium/tools/cr/cr/commands/shell.py
new file mode 100644
index 00000000000..2cd338deb5e
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/shell.py
@@ -0,0 +1,53 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the shell command."""
+
+import os
+import tempfile
+
+import cr
+
+
+class ShellCommand(cr.Command):
+ """The implementation of the shell command.
+
+ The shell command is the escape hatch that lets user run any program in the
+ same environment that cr would use if it were running it.
+ """
+
+ def __init__(self):
+ super(ShellCommand, self).__init__()
+ self.help = 'Launch a shell'
+ self.description = ("""
+ If no arguments are present, this launches an interactive system
+ shell (ie bash) with the environment modified to that used for the
+ build systems.
+ If any arguments are present, they are used as a command line to run
+ in that shell.
+ This allows you to run commands that are not yet available natively
+ in cr.
+ """)
+
+ def AddArguments(self, subparsers):
+ parser = super(ShellCommand, self).AddArguments(subparsers)
+ self.ConsumeArgs(parser, 'the shell')
+ return parser
+
+ def Run(self):
+ if cr.context.remains:
+ cr.Host.Shell(*cr.context.remains)
+ return
+ # If we get here, we are trying to launch an interactive shell
+ shell = os.environ.get('SHELL', None)
+ if shell is None:
+ print 'Don\'t know how to run a shell on this system'
+ elif shell.endswith('bash'):
+ ps1 = '[CR] ' + os.environ.get('PS1', '')
+ with tempfile.NamedTemporaryFile() as rcfile:
+ rcfile.write('source ~/.bashrc\nPS1="'+ps1+'"')
+ rcfile.flush()
+ cr.Host.Execute(shell, '--rcfile', rcfile.name)
+ else:
+ cr.Host.Execute(shell)
diff --git a/chromium/tools/cr/cr/commands/sync.py b/chromium/tools/cr/cr/commands/sync.py
new file mode 100644
index 00000000000..90f6abe42a1
--- /dev/null
+++ b/chromium/tools/cr/cr/commands/sync.py
@@ -0,0 +1,57 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for the sync command."""
+
+import os.path
+
+import cr
+
+
+class SyncCommand(cr.Command):
+ """The implementation of the sync command.
+
+ This command is a very thin shim over the gclient sync, and should remain so.
+ The only significant thing it adds is that the environment is set up so that
+ the run-hooks will do their work in the selected output directory.
+ """
+
+ # The configuration loaded to support this command.
+ DEFAULT = cr.Config.From(
+ GCLIENT_BINARY=os.path.join('{DEPOT_TOOLS}', 'gclient'),
+ )
+
+ # A placeholder for the detected gclient environment
+ DETECTED = cr.Config('DETECTED')
+
+ def __init__(self):
+ super(SyncCommand, self).__init__()
+ self.help = 'Sync the source tree'
+ self.description = 'Run gclient sync with the right environment.'
+
+ def AddArguments(self, subparsers):
+ parser = super(SyncCommand, self).AddArguments(subparsers)
+ self.ConsumeArgs(parser, 'gclient')
+ # TODO(iancottrell): clean no-hooks support would be nice.
+ return parser
+
+ def Run(self):
+ self.Sync(cr.context.remains)
+
+ @staticmethod
+ def Sync(args):
+ cr.PrepareCommand.UpdateContext()
+ # TODO(iancottrell): we should probably run the python directly,
+ # rather than the shell wrapper
+ # TODO(iancottrell): try to help out when the local state is not a good
+ # one to do a sync in
+ cr.Host.Execute('{GCLIENT_BINARY}', 'sync', *args)
+
+ @classmethod
+ def ClassInit(cls):
+ # Attempt to detect gclient and it's parent repository.
+ gclient_binaries = cr.Host.SearchPath('gclient')
+ if gclient_binaries:
+ cls.DETECTED.Set(GCLIENT_BINARY=gclient_binaries[0])
+ cls.DETECTED.Set(DEPOT_TOOLS=os.path.dirname(gclient_binaries[0]))
diff --git a/chromium/tools/cr/cr/config.py b/chromium/tools/cr/cr/config.py
new file mode 100644
index 00000000000..7c235b6a372
--- /dev/null
+++ b/chromium/tools/cr/cr/config.py
@@ -0,0 +1,244 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration variable management for the cr tool.
+
+This holds the classes that support the hierarchical variable management used
+in the cr tool to provide all the command configuration controls.
+"""
+
+import string
+
+import cr.visitor
+
+_PARSE_CONSTANT_VALUES = [None, True, False]
+_PARSE_CONSTANTS = dict((str(value), value) for value in _PARSE_CONSTANT_VALUES)
+
+# GLOBALS is the singleton used to tie static global configuration objects
+# together.
+GLOBALS = []
+
+
+class _MissingToErrorFormatter(string.Formatter):
+ """A string formatter used in value resolve.
+
+ The main extra it adds is a new conversion specifier 'e' that throws a
+ KeyError if it could not find the value.
+ This allows a string value to use {A_KEY!e} to indicate that it is a
+ formatting error if A_KEY is not present.
+ """
+
+ def convert_field(self, value, conversion):
+ if conversion == 'e':
+ result = str(value)
+ if not result:
+ raise KeyError('unknown')
+ return result
+ return super(_MissingToErrorFormatter, self).convert_field(
+ value, conversion)
+
+
+class _Tracer(object):
+ """Traces variable lookups.
+
+ This adds a hook to a config object, and uses it to track all variable
+ lookups that happen and add them to a trail. When done, it removes the hook
+ again. This is used to provide debugging information about what variables are
+ used in an operation.
+ """
+
+ def __init__(self, config):
+ self.config = config
+ self.trail = []
+
+ def __enter__(self):
+ self.config.fixup_hooks.append(self._Trace)
+ return self
+
+ def __exit__(self, *_):
+ self.config.fixup_hooks.remove(self._Trace)
+ self.config.trail = self.trail
+ return False
+
+ def _Trace(self, _, key, value):
+ self.trail.append((key, value))
+ return value
+
+
+class Config(cr.visitor.Node, cr.loader.AutoExport):
+ """The main variable holding class.
+
+ This holds a set of unresolved key value pairs, and the set of child Config
+ objects that should be referenced when looking up a key.
+ Key search is one in a pre-order traversal, and new children are prepended.
+ This means parents override children, and the most recently added child
+ overrides the rest.
+
+ Values can be simple python types, callable dynamic values, or strings.
+ If the value is a string, it is assumed to be a standard python format string
+ where the root config object is used to resolve the keys. This allows values
+ to refer to variables that are overriden in another part of the hierarchy.
+ """
+
+ @classmethod
+ def From(cls, *args, **kwargs):
+ """Builds an unnamed config object from a set of key,value args."""
+ return Config('??').Apply(args, kwargs)
+
+ @classmethod
+ def If(cls, condition, true_value, false_value=''):
+ """Returns a config value that selects a value based on the condition.
+
+ Args:
+ condition: The variable name to select a value on.
+ true_value: The value to use if the variable is True.
+ false_value: The value to use if the resolved variable is False.
+ Returns:
+ A dynamic value.
+ """
+ def Resolve(base):
+ test = base.Get(condition)
+ if test:
+ value = true_value
+ else:
+ value = false_value
+ return base.Substitute(value)
+ return Resolve
+
+ @classmethod
+ def Optional(cls, value, alternate=''):
+ """Returns a dynamic value that defaults to an alternate.
+
+ Args:
+ value: The main value to resolve.
+ alternate: The value to use if the main value does not resolve.
+ Returns:
+ value if it resolves, alternate otherwise.
+ """
+ def Resolve(base):
+ try:
+ return base.Substitute(value)
+ except KeyError:
+ return base.Substitute(alternate)
+ return Resolve
+
+ def __init__(self, name='--', literal=False, export=None, enabled=True):
+ super(Config, self).__init__(name=name, enabled=enabled, export=export)
+ self._literal = literal
+ self._formatter = _MissingToErrorFormatter()
+ self.fixup_hooks = []
+ self.trail = []
+
+ @property
+ def literal(self):
+ return self._literal
+
+ def Substitute(self, value):
+ return self._formatter.vformat(str(value), (), self)
+
+ def Resolve(self, visitor, key, value):
+ """Resolves a value to it's final form.
+
+ Raw values can be callable, simple values, or contain format strings.
+ Args:
+ visitor: The visitor asking to resolve a value.
+ key: The key being visited.
+ value: The unresolved value associated with the key.
+ Returns:
+ the fully resolved value.
+ """
+ error = None
+ if callable(value):
+ value = value(self)
+ # Using existence of value.swapcase as a proxy for is a string
+ elif hasattr(value, 'swapcase'):
+ if not visitor.current_node.literal:
+ try:
+ value = self.Substitute(value)
+ except KeyError as e:
+ error = e
+ return self.Fixup(key, value), error
+
+ def Fixup(self, key, value):
+ for hook in self.fixup_hooks:
+ value = hook(self, key, value)
+ return value
+
+ def Missing(self, key):
+ for hook in self.fixup_hooks:
+ hook(self, key, None)
+ raise KeyError(key)
+
+ @staticmethod
+ def ParseValue(value):
+ """Converts a string to a value.
+
+ Takes a string from something like an environment variable, and tries to
+ build an internal typed value. Recognizes Null, booleans, and numbers as
+ special.
+ Args:
+ value: The the string value to interpret.
+ Returns:
+ the parsed form of the value.
+ """
+ if value in _PARSE_CONSTANTS:
+ return _PARSE_CONSTANTS[value]
+ try:
+ return int(value)
+ except ValueError:
+ pass
+ try:
+ return float(value)
+ except ValueError:
+ pass
+ return value
+
+ def _Set(self, key, value):
+ # early out if the value did not change, so we don't call change callbacks
+ if value == self._values.get(key, None):
+ return
+ self._values[key] = value
+ self.NotifyChanged()
+ return self
+
+ def ApplyMap(self, arg):
+ for key, value in arg.items():
+ self._Set(key, value)
+ return self
+
+ def Apply(self, args, kwargs):
+ """Bulk set variables from arguments.
+
+ Intended for internal use by the Set and From methods.
+ Args:
+ args: must be either a dict or something that can build a dict.
+ kwargs: must be a dict.
+ Returns:
+ self for easy chaining.
+ """
+ if len(args) == 1:
+ arg = args[0]
+ if isinstance(arg, dict):
+ self.ApplyMap(arg)
+ else:
+ self.ApplyMap(dict(arg))
+ elif len(args) > 1:
+ self.ApplyMap(dict(args))
+ self.ApplyMap(kwargs)
+ return self
+
+ def Set(self, *args, **kwargs):
+ return self.Apply(args, kwargs)
+
+ def Trace(self):
+ return _Tracer(self)
+
+ def __getitem__(self, key):
+ return self.Get(key)
+
+ def __setitem__(self, key, value):
+ self._Set(key, value)
+
+ def __contains__(self, key):
+ return self.Find(key) is not None
diff --git a/chromium/tools/cr/cr/fixups/__init__.py b/chromium/tools/cr/cr/fixups/__init__.py
new file mode 100644
index 00000000000..86bfa2ad3cc
--- /dev/null
+++ b/chromium/tools/cr/cr/fixups/__init__.py
@@ -0,0 +1,9 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A package for all the version fixups.
+
+All the code in this package is there to fix up older output directories and
+clients to a form that works with the current version of cr.
+"""
diff --git a/chromium/tools/cr/cr/fixups/arch.py b/chromium/tools/cr/cr/fixups/arch.py
new file mode 100644
index 00000000000..a756272f523
--- /dev/null
+++ b/chromium/tools/cr/cr/fixups/arch.py
@@ -0,0 +1,54 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module for architecture output directory fixups."""
+
+import cr
+
+
+class _ArchInitHookHelper(cr.InitHook):
+ """Base class helper for CR_ARCH value fixups."""
+
+ def _VersionTest(self, old_version):
+ _ = old_version
+ return True
+
+ def _ArchConvert(self, old_arch):
+ return old_arch
+
+ def Run(self, old_version, config):
+ if old_version is None or not self._VersionTest(old_version):
+ return
+ old_arch = config.OVERRIDES.Find(cr.Arch.SELECTOR)
+ new_arch = self._ArchConvert(old_arch)
+ if new_arch != old_arch:
+ print '** Fixing architecture from {0} to {1}'.format(old_arch, new_arch)
+ config.OVERRIDES[cr.Arch.SELECTOR] = new_arch
+
+
+class WrongArchDefaultInitHook(_ArchInitHookHelper):
+ """Fixes bad initial defaults.
+
+ In the initial versions of cr before output directories were versioned
+ it was writing invalid architecture defaults. This detects that case and sets
+ the architecture to the current default instead.
+ """
+
+ def _VersionTest(self, old_version):
+ return old_version <= 0.0
+
+ def _ArchConvert(self, _):
+ return cr.Arch.default.name
+
+
+class MipsAndArmRenameInitHook(_ArchInitHookHelper):
+ """Fixes rename of Mips and Arm to Mips32 and Arm32."""
+
+ def _ArchConvert(self, old_arch):
+ if old_arch == 'mips':
+ return cr.Mips32Arch.GetInstance().name
+ if old_arch == 'arm':
+ return cr.Arm32Arch.GetInstance().name
+ return old_arch
+
diff --git a/chromium/tools/cr/cr/loader.py b/chromium/tools/cr/cr/loader.py
new file mode 100644
index 00000000000..b0bbb9f2761
--- /dev/null
+++ b/chromium/tools/cr/cr/loader.py
@@ -0,0 +1,126 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module scan and load system.
+
+The main interface to this module is the Scan function, which triggers a
+recursive scan of all packages and modules below cr, with modules being
+imported as they are found.
+This allows all the plugins in the system to self register.
+The aim is to make writing plugins as simple as possible, minimizing the
+boilerplate so the actual functionality is clearer.
+"""
+from importlib import import_module
+import os
+import sys
+
+import cr
+
+# This is the name of the variable inserted into modules to track which
+# scanners have been applied.
+_MODULE_SCANNED_TAG = '_CR_MODULE_SCANNED'
+
+
+class AutoExport(object):
+ """A marker for classes that should be promoted up into the cr namespace."""
+
+
+def _AutoExportScanner(module):
+ """Scan the modules for things that need wiring up automatically."""
+ for name, value in module.__dict__.items():
+ if isinstance(value, type) and issubclass(value, AutoExport):
+ # Add this straight to the cr module.
+ if not hasattr(cr, name):
+ setattr(cr, name, value)
+
+
+scan_hooks = [_AutoExportScanner]
+
+
+def _Import(name):
+ """Import a module or package if it is not already imported."""
+ module = sys.modules.get(name, None)
+ if module is not None:
+ return module
+ return import_module(name, None)
+
+
+def _TryImport(name):
+ """Try to import a module or package if it is not already imported."""
+ try:
+ return _Import(name)
+ except ImportError:
+ if cr.context.verbose:
+ print 'Warning: Failed to load module', name
+ return None
+
+
+def _ScanModule(module):
+ """Runs all the scan_hooks for a module."""
+ scanner_tags = getattr(module, _MODULE_SCANNED_TAG, None)
+ if scanner_tags is None:
+ # First scan, add the scanned marker set.
+ scanner_tags = set()
+ setattr(module, _MODULE_SCANNED_TAG, scanner_tags)
+ for scan in scan_hooks:
+ if scan not in scanner_tags:
+ scanner_tags.add(scan)
+ scan(module)
+
+
+def _ScanPackage(package):
+ """Scan a package for child packages and modules."""
+ modules = []
+ # Recurse sub folders.
+ for path in package.__path__:
+ try:
+ basenames = sorted(os.listdir(path))
+ except OSError:
+ basenames = []
+ packages = []
+ for basename in basenames:
+ fullpath = os.path.join(path, basename)
+ if os.path.isdir(fullpath):
+ name = '.'.join([package.__name__, basename])
+ packages.append(name)
+ elif basename.endswith('.py') and not basename.startswith('_'):
+ name = '.'.join([package.__name__, basename[:-3]])
+ module = _TryImport(name)
+ if module:
+ _ScanModule(module)
+ modules.append(module)
+ for name in packages:
+ child = _TryImport(name)
+ if child:
+ modules.extend(_ScanPackage(child))
+ return modules
+
+
+def Import(package, name):
+ module = _Import(package + '.' + name)
+ path = getattr(module, '__path__', None)
+ if path:
+ _ScanPackage(module)
+ else:
+ _ScanModule(module)
+ return module
+
+
+def Scan():
+ """Scans from the cr package down, loading modules as needed.
+
+ This finds all packages and modules below the cr package, by scanning the
+ file system. It imports all the packages, and then runs post import hooks on
+ each module to do any automated work. One example of this is the hook that
+ finds all classes that extend AutoExport and copies them up into the cr
+ namespace directly.
+
+ Modules are allowed to refer to each other, their import will be retried
+ until it succeeds or no progress can be made on any module.
+ """
+ modules = _ScanPackage(cr)
+ # Now scan all the found modules one more time.
+ # This happens after all imports, in case any imports register scan hooks.
+ for module in modules:
+ _ScanModule(module)
diff --git a/chromium/tools/cr/cr/plugin.py b/chromium/tools/cr/cr/plugin.py
new file mode 100644
index 00000000000..1dbf9f5f8bd
--- /dev/null
+++ b/chromium/tools/cr/cr/plugin.py
@@ -0,0 +1,336 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""The plugin management system for the cr tool.
+
+This holds the Plugin class and supporting code, that controls how plugins are
+found and used.
+The module registers a scan hook with the cr.loader system to enable it to
+discover plugins as they are loaded.
+"""
+from operator import attrgetter
+
+import cr
+import cr.loader
+
+
+def _PluginConfig(name, only_enabled=False, only_active=False):
+ config = cr.Config(name)
+ config.only_active = only_active
+ config.only_enabled = only_enabled or config.only_active
+ config.property_name = name.lower() + '_config'
+ return config
+
+_selectors = cr.Config('PRIORITY')
+CONFIG_TYPES = [
+ # Lowest priority, always there default values.
+ _PluginConfig('DEFAULT').AddChild(_selectors),
+ # Only turned on if the plugin is enabled.
+ _PluginConfig('ENABLED', only_enabled=True),
+ # Only turned on while the plugin is the active one.
+ _PluginConfig('ACTIVE', only_active=True),
+ # Holds detected values for active plugins.
+ _PluginConfig('DETECTED', only_active=True),
+ # Holds overrides, used in custom setup plugins.
+ _PluginConfig('OVERRIDES'),
+]
+
+cr.config.GLOBALS.extend(CONFIG_TYPES)
+_plugins = {}
+
+
+# Actually a decorator, so pylint: disable=invalid-name
+class classproperty(object):
+ """This adds a property to a class.
+
+ This is like a simple form of @property except it is for the class, rather
+ than instances of the class. Only supports readonly properties.
+ """
+
+ def __init__(self, getter):
+ self.getter = getter
+
+ def __get__(self, instance, owner):
+ return self.getter(owner)
+
+
+class DynamicChoices(object):
+ """Manages the list of active plugins for command line options.
+
+ Looks like a simple iterable, but it can change as the underlying plugins
+ arrive and enable/disable themselves. This allows it to be used as the
+ set of valid choices for the argparse command line options.
+ """
+
+ # If this is True, all DynamicChoices only return active plugins.
+ # If false, all plugins are included.
+ only_active = True
+
+ def __init__(self, cls):
+ self.cls = cls
+
+ def __contains__(self, name):
+ return self.cls.FindPlugin(name, self.only_active) is not None
+
+ def __iter__(self):
+ return [p.name for p in self.cls.Plugins()].__iter__()
+
+
+def _FindRoot(cls):
+ if Plugin.Type in cls.__bases__:
+ return cls
+ for base in cls.__bases__:
+ result = _FindRoot(base)
+ if result is not None:
+ return result
+ return None
+
+
+class Plugin(cr.loader.AutoExport):
+ """Base class for managing registered plugin types."""
+
+ class Type(object):
+ """Base class that tags a class as an abstract plugin type."""
+
+ class activemethod(object):
+ """A decorator that delegates a static method to the active plugin.
+
+ Makes a static method that delegates to the equivalent method on the
+ active instance of the plugin type.
+ """
+
+ def __init__(self, method):
+ self.method = method
+
+ def __get__(self, instance, owner):
+ def unbound(*args, **kwargs):
+ active = owner.GetActivePlugin()
+ if not active:
+ print 'No active', owner.__name__
+ exit(1)
+ method = getattr(active, self.method.__name__, None)
+ if not method:
+ print owner.__name__, 'does not support', self.method.__name__
+ exit(1)
+ return method(*args, **kwargs)
+
+ def bound(*args, **kwargs):
+ return self.method(instance, *args, **kwargs)
+
+ if instance is None:
+ return unbound
+ return bound
+
+ def __init__(self):
+ # Default the name to the lowercased class name.
+ self._name = self.__class__.__name__.lower()
+ # Strip the common suffix if present.
+ self._root = _FindRoot(self.__class__)
+ rootname = self._root.__name__.lower()
+ if self._name.endswith(rootname) and self.__class__ != self._root:
+ self._name = self._name[:-len(rootname)]
+ for config_root in CONFIG_TYPES:
+ config = cr.Config()
+ setattr(self, config_root.property_name, config)
+ self._is_active = False
+
+ def Init(self):
+ """Post plugin registration initialisation method."""
+ for config_root in CONFIG_TYPES:
+ config = getattr(self, config_root.property_name)
+ config.name = self.name
+ if config_root.only_active and not self.is_active:
+ config.enabled = False
+ if config_root.only_enabled and not self.enabled:
+ config.enabled = False
+ child = getattr(self.__class__, config_root.name, None)
+ if child is not None:
+ child.name = self.__class__.__name__
+ config.AddChild(child)
+ config_root.AddChild(config)
+
+ @property
+ def name(self):
+ return self._name
+
+ @property
+ def priority(self):
+ return 0
+
+ @property
+ def enabled(self):
+ # By default all non type classes are enabled.
+ return Plugin.Type not in self.__class__.__bases__
+
+ @property
+ def is_active(self):
+ return self._is_active
+
+ def Activate(self):
+ assert not self._is_active
+ self._is_active = True
+ for config_root in CONFIG_TYPES:
+ if config_root.only_active:
+ getattr(self, config_root.property_name).enabled = True
+
+ def Deactivate(self):
+ assert self._is_active
+ self._is_active = False
+ for config_root in CONFIG_TYPES:
+ if config_root.only_active:
+ getattr(self, config_root.property_name).enabled = False
+
+ @classmethod
+ def ClassInit(cls):
+ pass
+
+ @classmethod
+ def GetInstance(cls):
+ """Gets an instance of this plugin.
+
+ This looks in the plugin registry, and if an instance is not found a new
+ one is built and registered.
+
+ Returns:
+ The registered plugin instance.
+ """
+ plugin = _plugins.get(cls, None)
+ if plugin is None:
+ # Run delayed class initialization
+ cls.ClassInit()
+ # Build a new instance of cls, and register it as the main instance.
+ plugin = cls()
+ _plugins[cls] = plugin
+ # Wire up the hierarchy for Config objects.
+ for name, value in cls.__dict__.items():
+ if isinstance(value, cr.Config):
+ for base in cls.__bases__:
+ child = getattr(base, name, None)
+ if child is not None:
+ value.AddChild(child)
+ plugin.Init()
+ return plugin
+
+ @classmethod
+ def AllPlugins(cls):
+ # Don't yield abstract roots, just children. We detect roots as direct
+ # sub classes of Plugin.Type
+ if Plugin.Type not in cls.__bases__:
+ yield cls.GetInstance()
+ for child in cls.__subclasses__():
+ for p in child.AllPlugins():
+ yield p
+
+ @classmethod
+ def UnorderedPlugins(cls):
+ """Returns all enabled plugins of type cls, in undefined order."""
+ plugin = cls.GetInstance()
+ if plugin.enabled:
+ yield plugin
+ for child in cls.__subclasses__():
+ for p in child.UnorderedPlugins():
+ yield p
+
+ @classmethod
+ def Plugins(cls):
+ """Return all enabled plugins of type cls in priority order."""
+ return sorted(cls.UnorderedPlugins(),
+ key=attrgetter('priority'), reverse=True)
+
+ @classmethod
+ def Choices(cls):
+ return DynamicChoices(cls)
+
+ @classmethod
+ def FindPlugin(cls, name, only_active=True):
+ if only_active:
+ plugins = cls.UnorderedPlugins()
+ else:
+ plugins = cls.AllPlugins()
+ for plugin in plugins:
+ if plugin.name == name or plugin.__class__.__name__ == name:
+ return plugin
+ return None
+
+ @classmethod
+ def GetPlugin(cls, name):
+ result = cls.FindPlugin(name)
+ if result is None:
+ raise KeyError(name)
+ return result
+
+ @classmethod
+ def GetAllActive(cls):
+ return [plugin for plugin in cls.UnorderedPlugins() if plugin.is_active]
+
+ @classmethod
+ def GetActivePlugin(cls):
+ """Gets the active plugin of type cls.
+
+ This method will select a plugin to be the active one, and will activate
+ the plugin if needed.
+ Returns:
+ the plugin that is currently active.
+ """
+ plugin, _ = _GetActivePlugin(cls)
+ return plugin
+
+ @classproperty
+ def default(cls):
+ """Returns the plugin that should be used if the user did not choose one."""
+ result = None
+ for plugin in cls.UnorderedPlugins():
+ if not result or plugin.priority > result.priority:
+ result = plugin
+ return result
+
+ @classmethod
+ def Select(cls):
+ """Called to determine which plugin should be the active one."""
+ plugin = cls.default
+ selector = getattr(cls, 'SELECTOR', None)
+ if selector:
+ if plugin is not None:
+ _selectors[selector] = plugin.name
+ name = cr.context.Find(selector)
+ if name is not None:
+ plugin = cls.FindPlugin(name)
+ return plugin
+
+
+def ChainModuleConfigs(module):
+ """Detects and connects the default Config objects from a module."""
+ for config_root in CONFIG_TYPES:
+ if hasattr(module, config_root.name):
+ config = getattr(module, config_root.name)
+ config.name = module.__name__
+ config_root.AddChild(config)
+
+
+cr.loader.scan_hooks.append(ChainModuleConfigs)
+
+
+def _GetActivePlugin(cls):
+ activated = False
+ actives = cls.GetAllActive()
+ plugin = cls.Select()
+ for active in actives:
+ if active != plugin:
+ active.Deactivate()
+ if plugin and not plugin.is_active:
+ activated = True
+ plugin.Activate()
+ return plugin, activated
+
+
+def Activate():
+ """Activates a plugin for all known plugin types."""
+ types = Plugin.Type.__subclasses__()
+ modified = True
+ while modified:
+ modified = False
+ for child in types:
+ _, activated = _GetActivePlugin(child)
+ if activated:
+ modified = True
diff --git a/chromium/tools/cr/cr/targets/__init__.py b/chromium/tools/cr/cr/targets/__init__.py
new file mode 100644
index 00000000000..e44e02bcdf5
--- /dev/null
+++ b/chromium/tools/cr/cr/targets/__init__.py
@@ -0,0 +1,13 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A package for all the built in commands.
+
+This package has all the standard commands built in to the cr tool.
+Most commands use actions to perform the real work.
+"""
+
+import cr
+
+cr.Import(__name__, 'target')
diff --git a/chromium/tools/cr/cr/targets/chrome.py b/chromium/tools/cr/cr/targets/chrome.py
new file mode 100644
index 00000000000..25e37e8b0fc
--- /dev/null
+++ b/chromium/tools/cr/cr/targets/chrome.py
@@ -0,0 +1,24 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for the chrome targets."""
+
+import cr
+
+
+class ChromeTarget(cr.NamedTarget):
+ NAME = 'chrome'
+ CONFIG = cr.Config.From(
+ CR_RUN_ARGUMENTS=cr.Config.Optional('-d "{CR_URL!e}"'),
+ CR_TARGET_NAME='Chrome',
+ )
+
+
+class ChromeTestTarget(cr.NamedTarget):
+ NAME = 'chrome_test'
+ CONFIG = cr.Config.From(
+ CR_TARGET_NAME='ChromeTest',
+ CR_TEST_TYPE=cr.Target.INSTRUMENTATION_TEST,
+ CR_RUN_DEPENDENCIES=[ChromeTarget.NAME],
+ )
diff --git a/chromium/tools/cr/cr/targets/chrome_public.py b/chromium/tools/cr/cr/targets/chrome_public.py
new file mode 100644
index 00000000000..5f0311ab683
--- /dev/null
+++ b/chromium/tools/cr/cr/targets/chrome_public.py
@@ -0,0 +1,27 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for the chrome_public targets."""
+
+import cr
+
+
+class ChromePublicTarget(cr.NamedTarget):
+ NAME = 'chrome_public'
+ CONFIG = cr.Config.From(
+ CR_RUN_ARGUMENTS=cr.Config.Optional('-d "{CR_URL!e}"'),
+ CR_TARGET_NAME='ChromePublic',
+ CR_PACKAGE='org.chromium.chrome',
+ CR_ACTIVITY='com.google.android.apps.chrome.Main',
+ )
+
+
+class ChromePublicTestTarget(cr.NamedTarget):
+ NAME = 'chrome_public_test'
+ CONFIG = cr.Config.From(
+ CR_TARGET_NAME='ChromePublicTest',
+ CR_TEST_TYPE=cr.Target.INSTRUMENTATION_TEST,
+ CR_RUN_DEPENDENCIES=[ChromePublicTarget.NAME],
+ )
+
diff --git a/chromium/tools/cr/cr/targets/content_shell.py b/chromium/tools/cr/cr/targets/content_shell.py
new file mode 100644
index 00000000000..753e848d734
--- /dev/null
+++ b/chromium/tools/cr/cr/targets/content_shell.py
@@ -0,0 +1,26 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module for the content_shell targets."""
+
+import cr
+
+
+class ContentShellTarget(cr.NamedTarget):
+ NAME = 'content_shell'
+ CONFIG = cr.Config.From(
+ CR_RUN_ARGUMENTS=cr.Config.Optional('-d "{CR_URL!e}"'),
+ CR_TARGET_NAME='ContentShell',
+ CR_PACKAGE='org.chromium.content_shell_apk',
+ CR_ACTIVITY='.ContentShellActivity',
+ )
+
+
+class ContentShellTestTarget(cr.NamedTarget):
+ NAME = 'content_shell_test'
+ CONFIG = cr.Config.From(
+ CR_TARGET_NAME='ContentShellTest',
+ CR_TEST_TYPE=cr.Target.INSTRUMENTATION_TEST,
+ CR_RUN_DEPENDENCIES=[ContentShellTarget.NAME],
+ )
diff --git a/chromium/tools/cr/cr/targets/target.py b/chromium/tools/cr/cr/targets/target.py
new file mode 100644
index 00000000000..1bd8eca7910
--- /dev/null
+++ b/chromium/tools/cr/cr/targets/target.py
@@ -0,0 +1,159 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Module to hold the Target plugin."""
+
+import operator
+import re
+
+import cr
+import cr.base.context
+
+DEFAULT = cr.Config.From(
+ CR_DEFAULT_TARGET='chrome',
+)
+
+
+class Target(cr.base.context.Context, cr.AutoExport):
+ """Base class for implementing cr targets.
+
+ A target is something that can be built and run.
+ """
+
+ # The default base priority
+ PRIORITY = 0
+ # The default pattern used to try to detect whether a target is a test and
+ # should use the test runner.
+ TEST_PATTERN = re.compile('tests?$')
+ # The special "test type" that means it's not a test.
+ NOT_A_TEST = 'no'
+ # The default choice for the type of test when it can't be determined.
+ NORMAL_TEST = 'gtest'
+ INSTRUMENTATION_TEST = 'instrumentation'
+ # TODO(iancottrell): support the other test types
+ TEST_TYPES = [NOT_A_TEST, NORMAL_TEST, INSTRUMENTATION_TEST]
+
+ def __init__(self, target_name):
+ super(Target, self).__init__(target_name)
+ test_type = None
+ if self.TEST_PATTERN.search(target_name):
+ test_type = self.NORMAL_TEST
+ config = cr.Config('DEFAULTS').From(
+ CR_TARGET=target_name,
+ CR_TARGET_NAME='{CR_TARGET}',
+ CR_BUILD_TARGET=cr.Config.Optional(
+ '{CR_TARGET}{CR_TARGET_SUFFIX}', '{CR_TARGET}'),
+ CR_RUN_ARGUMENTS='',
+ CR_TEST_TYPE=test_type,
+ CR_RUN_DEPENDENCIES=[],
+ )
+ self._data = cr.context.data
+ self.AddChildren(config, cr.context)
+ if hasattr(self, 'CONFIG'):
+ self.AddChild(self.CONFIG)
+ if not self.valid:
+ self.Set(CR_TARGET_SUFFIX='')
+ self.test_type = self.Find('CR_TEST_TYPE')
+ self.target_name = self.Find('CR_TARGET_NAME')
+
+ def GetRunDependencies(self):
+ return map(Target.CreateTarget, self.Get('CR_RUN_DEPENDENCIES'))
+
+ @property
+ def build_target(self):
+ return self.Get('CR_BUILD_TARGET')
+
+ @property
+ def valid(self):
+ return cr.Builder.IsTarget(self.build_target)
+
+ @property
+ def is_test(self):
+ return self.test_type and self.test_type != self.NOT_A_TEST
+
+ @classmethod
+ def AddArguments(cls, command, parser, allow_multiple=False):
+ nargs = '?'
+ help_string = 'The target to {0}'
+ if allow_multiple:
+ nargs = '*'
+ help_string = 'The target(s) to {0}'
+ parser.add_argument(
+ '_targets', metavar='target',
+ help=help_string.format(command.name),
+ nargs=nargs
+ )
+
+ @classmethod
+ def AllTargets(cls):
+ yield cls
+ for child in cls.__subclasses__():
+ for t in child.AllTargets():
+ yield t
+
+ @classmethod
+ def CreateTarget(cls, target_name):
+ """Attempts to build a target by name.
+
+ This searches the set of installed targets in priority order to see if any
+ of them are willing to handle the supplied name.
+ If a target cannot be found, the program will be aborted.
+ Args:
+ target_name: The name of the target we are searching for.
+ Returns:
+ The target that matched.
+ """
+ target_clses = sorted(
+ cls.AllTargets(),
+ key=operator.attrgetter('PRIORITY'),
+ reverse=True
+ )
+ for handler in target_clses:
+ target = handler.Build(target_name)
+ if target:
+ if not target.valid:
+ print 'Invalid target {0} as {1}'.format(
+ target_name, target.build_target)
+ guesses = cr.Builder.GuessTargets(target_name)
+ if guesses:
+ print 'Did you mean {0}?'.format(
+ ', '.join(guesses[:-1]) + ' or ' + guesses[-1]
+ if len(guesses) > 1 else guesses[0])
+ exit(1)
+ return target
+ print 'Unknown target {0}'.format(target_name)
+ exit(1)
+
+ @classmethod
+ def GetTargets(cls):
+ target_names = getattr(cr.context.args, '_targets', None)
+ if not target_names:
+ target_names = [cr.context.Get('CR_DEFAULT_TARGET')]
+ elif hasattr(target_names, 'swapcase'):
+ # deal with the single target case
+ target_names = [target_names]
+ return [cls.CreateTarget(target_name)
+ for target_name in target_names]
+
+ @classmethod
+ def Build(cls, target_name):
+ return cls(target_name)
+
+
+class NamedTarget(Target):
+ """A base class for explicit named targets.
+
+ Only matches a target if the name is an exact match.
+ Up it's priority to come ahead of general purpose rule matches.
+ """
+ NAME = None
+ PRIORITY = Target.PRIORITY + 1
+
+ @classmethod
+ def Build(cls, target_name):
+ try:
+ if target_name == cls.NAME:
+ return cls(target_name)
+ except AttributeError:
+ pass
+ return None
diff --git a/chromium/tools/cr/cr/visitor.py b/chromium/tools/cr/cr/visitor.py
new file mode 100644
index 00000000000..8e01c70b561
--- /dev/null
+++ b/chromium/tools/cr/cr/visitor.py
@@ -0,0 +1,260 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""".
+
+"""
+import collections
+
+# HIDDEN is a marker used to suppress a value, making it as if it were not set
+# in that object. This causes the search to continue through the tree.
+# This is most useful as a return value of dynamic values that want to find
+# the value they are shadowing.
+HIDDEN = object()
+
+
+class VisitComplete(Exception):
+ """Indicates a vist traversal has finished early."""
+
+
+class Visitor(object):
+ """The base class for anything that wants to "visit" all variables.
+
+ The two main uses of visitor are search and export. They differ in that export
+ is trying to find all variables, whereas search is just looking for one.
+ """
+
+ def __init__(self):
+ self.stack = []
+
+ def VisitNode(self, node):
+ """Called for every node in the tree."""
+ if not node.enabled:
+ return self
+ try:
+ try:
+ self.stack.append(node)
+ self.StartNode()
+ # Visit all the values first
+ for key in self.KeysOf(node.values):
+ self.Visit(key, node.values[key])
+ # And now recurse into all the children
+ for child in node.children:
+ self.VisitNode(child)
+ finally:
+ self.EndNode()
+ self.stack.pop()
+ except VisitComplete:
+ if self.stack:
+ # propagate back up the stack
+ raise
+ return self
+
+ def Visit(self, key, value):
+ """Visit is called for every variable in each node."""
+
+ def StartNode(self):
+ """StartNode is called once for each node before traversal."""
+
+ def EndNode(self):
+ """Visit is called for every node after traversal."""
+
+ @property
+ def root_node(self):
+ """Returns the variable at the root of the current traversal."""
+ return self.stack[0]
+
+ @property
+ def current_node(self):
+ """Returns the node currently being scanned."""
+ return self.stack[-1]
+
+ def Resolve(self, key, value):
+ """Returns a fully substituted value.
+
+ This asks the root node to do the actual work.
+ Args:
+ key: The key being visited.
+ value: The unresolved value associated with the key.
+ Returns:
+ the fully resolved value.
+ """
+ return self.root_node.Resolve(self, key, value)
+
+ def Where(self):
+ """Returns the current traversal stack as a string."""
+ return '/'.join([entry.name for entry in self.stack])
+
+
+class SearchVisitor(Visitor):
+ """A Visitor that finds a single matching key."""
+
+ def __init__(self, key):
+ super(SearchVisitor, self).__init__()
+ self.key = key
+ self.found = False
+ self.error = None
+
+ def KeysOf(self, store):
+ if self.key in store:
+ yield self.key
+
+ def Visit(self, key, value):
+ value, error = self.Resolve(key, value)
+ if value is not HIDDEN:
+ self.found = True
+ self.value = value
+ self.error = error
+ raise VisitComplete()
+
+
+class WhereVisitor(SearchVisitor):
+ """A SearchVisitor that returns the path to the matching key."""
+
+ def Visit(self, key, value):
+ self.where = self.Where()
+ super(WhereVisitor, self).Visit(key, value)
+
+
+class ExportVisitor(Visitor):
+ """A visitor that builds a fully resolved map of all variables."""
+
+ def __init__(self, store):
+ super(ExportVisitor, self).__init__()
+ self.store = store
+
+ def KeysOf(self, store):
+ if self.current_node.export is False:
+ # not exporting from this config
+ return
+ for key in store.keys():
+ if key in self.store:
+ # duplicate
+ continue
+ if (self.current_node.export is None) and key.startswith('_'):
+ # non exported name
+ continue
+ yield key
+
+ def Visit(self, key, value):
+ value, _ = self.Resolve(key, value)
+ if value is not HIDDEN:
+ self.store[key] = value
+
+
+class Node(object):
+ """The base class for objects in a visitable node tree."""
+
+ def __init__(self, name='--', enabled=True, export=True):
+ self._name = name
+ self._children = collections.deque()
+ self._values = {}
+ self._viewers = []
+ self.trail = []
+ self._enabled = enabled
+ self._export = export
+ self._export_cache = None
+
+ @property
+ def name(self):
+ return self._name
+
+ @name.setter
+ def name(self, value):
+ self._name = value
+
+ @property
+ def enabled(self):
+ return self._enabled
+
+ @enabled.setter
+ def enabled(self, value):
+ if self._enabled == value:
+ return
+ self._enabled = value
+ self.NotifyChanged()
+
+ @property
+ def export(self):
+ return self._export
+
+ @property
+ def exported(self):
+ if self._export_cache is None:
+ self._export_cache = ExportVisitor({}).VisitNode(self).store
+ return self._export_cache
+
+ @property
+ def values(self):
+ return self._values
+
+ @property
+ def children(self):
+ return self._children
+
+ def RegisterViewer(self, viewer):
+ self._viewers.append(viewer)
+
+ def UnregisterViewer(self, viewer):
+ self._viewers.remove(viewer)
+
+ def OnChanged(self, child):
+ _ = child
+ self.NotifyChanged()
+
+ def NotifyChanged(self):
+ self._export_cache = None
+ for viewers in self._viewers:
+ viewers.OnChanged(self)
+
+ def _AddChild(self, child):
+ if child and child != self and child not in self._children:
+ self._children.appendleft(child)
+ child.RegisterViewer(self)
+
+ def AddChild(self, child):
+ self._AddChild(child)
+ self.NotifyChanged()
+ return self
+
+ def AddChildren(self, *children):
+ for child in children:
+ self._AddChild(child)
+ self.NotifyChanged()
+ return self
+
+ def Find(self, key):
+ search = SearchVisitor(key).VisitNode(self)
+ if not search.found:
+ return None
+ return search.value
+
+ def WhereIs(self, key):
+ search = WhereVisitor(key).VisitNode(self)
+ if not search.found:
+ return None
+ return search.where
+
+ def Get(self, key, raise_errors=False):
+ search = SearchVisitor(key).VisitNode(self)
+ if not search.found:
+ self.Missing(key)
+ if search.error and raise_errors:
+ raise search.error # bad type inference pylint: disable=raising-bad-type
+ return search.value
+
+ def Missing(self, key):
+ raise KeyError(key)
+
+ def Resolve(self, visitor, key, value):
+ _ = visitor, key
+ return value
+
+ def Wipe(self):
+ for child in self._children:
+ child.UnregisterViewer(self)
+ self._children = collections.deque()
+ self._values = {}
+ self.NotifyChanged()
+
diff --git a/chromium/tools/cr/main.py b/chromium/tools/cr/main.py
new file mode 100644
index 00000000000..c4b9b74e07d
--- /dev/null
+++ b/chromium/tools/cr/main.py
@@ -0,0 +1,95 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Chromium cr tool main module.
+
+Holds the main function and all it's support code.
+"""
+
+import os
+import sys
+import cr
+
+_CONTACT = 'iancottrell@chromium.org'
+
+
+def Main():
+ """Chromium cr tool main function.
+
+ This is the main entry point of the cr tool, it finds and loads all the
+ plugins, creates the context and then activates and runs the specified
+ command.
+ """
+
+ # Add the users plugin dir to the cr.auto.user package scan
+ user_path = os.path.expanduser(os.path.join('~', '.config', 'cr'))
+ cr.auto.user.__path__.append(user_path)
+
+ cr.loader.Scan()
+
+ # Build the command context
+ with cr.base.context.Create(
+ description='The chrome dev build tool.',
+ epilog='Contact ' + _CONTACT + ' if you have issues with this tool.',
+ ) as context:
+
+ # Try to detect the current client information
+ cr.base.client.DetectClient()
+
+ # Install the sub-commands
+ for command in cr.Command.Plugins():
+ cr.context.AddSubParser(command)
+
+ # test for the special autocomplete command
+ if cr.context.autocompleting:
+ # After plugins are loaded so pylint: disable=g-import-not-at-top
+ cr.autocomplete.Complete()
+ return
+ # Speculative argument processing to add config specific args
+ cr.context.ParseArgs(True)
+ cr.plugin.Activate()
+ # At this point we should know what command we are going to use
+ command = cr.Command.GetActivePlugin()
+ # Do some early processing, in case it changes the build dir
+ if command:
+ command.EarlyArgProcessing()
+ # Update the activated set again, in case the early processing changed it
+ cr.plugin.Activate()
+ # Load the build specific configuration
+ found_build_dir = cr.base.client.LoadConfig()
+ # Final processing or arguments
+ cr.plugin.Activate()
+ cr.context.ParseArgs()
+ # If we did not get a command before, it might have been fixed.
+ if command is None:
+ command = cr.Command.GetActivePlugin()
+ # If the verbosity level is 3 or greater, then print the environment here
+ if cr.context.verbose >= 3:
+ cr.context.DumpValues(cr.context.verbose > 3)
+ if command is None:
+ print cr.context.Substitute('No command specified.')
+ exit(1)
+ if command.requires_build_dir:
+ if not found_build_dir:
+ if not cr.context.Find('CR_OUT_FULL'):
+ print cr.context.Substitute(
+ 'No build directory specified. Please use cr init to make one.')
+ else:
+ print cr.context.Substitute(
+ 'Build {CR_BUILD_DIR} not a valid build directory')
+ exit(1)
+ if cr.context.Find('CR_VERSION') != cr.base.client.VERSION:
+ print cr.context.Substitute(
+ 'Build {CR_BUILD_DIR} is for the wrong version of cr')
+ print 'Please run cr init to reset it'
+ exit(1)
+ cr.Platform.Prepare()
+ if cr.context.verbose >= 1:
+ print cr.context.Substitute(
+ 'Running cr ' + command.name + ' for {CR_BUILD_DIR}')
+ # Invoke the given command
+ command.Run()
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/chromium/tools/cros/OWNERS b/chromium/tools/cros/OWNERS
new file mode 100644
index 00000000000..9dc4e192272
--- /dev/null
+++ b/chromium/tools/cros/OWNERS
@@ -0,0 +1,7 @@
+# For more information about telemetry development, please see:
+# http://dev.chromium.org/developers/telemetry/telemetry-feature-guidelines
+
+achuith@chromium.org
+tbarzic@chromium.org
+tengs@chromium.org
+zelidrag@chromium.org
diff --git a/chromium/tools/cros/bootstrap_deps b/chromium/tools/cros/bootstrap_deps
new file mode 100644
index 00000000000..f8e63101d62
--- /dev/null
+++ b/chromium/tools/cros/bootstrap_deps
@@ -0,0 +1,23 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file specifies dependencies required to bootstrap tools/perf. It is in a
+# minimal version of the format used by other DEPS files that gclient can read,
+# but it should only be used to bootstrap tools/perf *outside* of a normal
+# Chrome checkout.
+
+deps = {
+ "src/tools/cros/":
+ "https://src.chromium.org/chrome/trunk/src/tools/cros/",
+}
+
+# Both contest/test/gpu and tools/perf will pull in telemetry.
+deps_includes = {
+ "src/content/test/gpu/bootstrap_deps":
+ "https://src.chromium.org/chrome/trunk/src/content/test/gpu/bootstrap_deps",
+ "src/tools/perf/bootstrap_deps":
+ "https://src.chromium.org/chrome/trunk/src/tools/perf/bootstrap_deps",
+ "src/chrome/browser/policy/test/bootstrap_deps":
+ "https://src.chromium.org/chrome/trunk/src/chrome/browser/policy/test/bootstrap_deps",
+}
diff --git a/chromium/tools/cygprofile/BUILD.gn b/chromium/tools/cygprofile/BUILD.gn
new file mode 100644
index 00000000000..c84654659ed
--- /dev/null
+++ b/chromium/tools/cygprofile/BUILD.gn
@@ -0,0 +1,39 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# GYP: //tools/cygprofile/cygprofile.gyp:cygprofile
+static_library("cygprofile") {
+ sources = [
+ "cygprofile.cc",
+ "cygprofile.h",
+ ]
+
+ configs -= [ "//build/config/android:default_cygprofile_instrumentation" ]
+ configs += [ "//build/config/android:no_cygprofile_instrumentation" ]
+
+ deps = [
+ # This adds uninstrumented symbols to the static library from base.
+ # These symbols are likely *not* to be used because there are many other
+ # duplicates in other objects/libraries.
+ "//base",
+ ]
+}
+
+# GYP: //tools/cygprofile/cygprofile.gyp:cygprofile_unittests
+executable("cygprofile_unittests") {
+ testonly = true
+
+ sources = [
+ "cygprofile_unittest.cc",
+ ]
+
+ configs -= [ "//build/config/android:default_cygprofile_instrumentation" ]
+ configs += [ "//build/config/android:no_cygprofile_instrumentation" ]
+
+ deps = [
+ ":cygprofile",
+ "//base",
+ "//testing/gtest",
+ ]
+}
diff --git a/chromium/tools/cygprofile/OWNERS b/chromium/tools/cygprofile/OWNERS
new file mode 100644
index 00000000000..3301555a7b8
--- /dev/null
+++ b/chromium/tools/cygprofile/OWNERS
@@ -0,0 +1,2 @@
+lizeb@chromium.org
+pasko@chromium.org
diff --git a/chromium/tools/cygprofile/PRESUBMIT.py b/chromium/tools/cygprofile/PRESUBMIT.py
new file mode 100644
index 00000000000..0b69bb9c7d8
--- /dev/null
+++ b/chromium/tools/cygprofile/PRESUBMIT.py
@@ -0,0 +1,34 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Top-level presubmit script for cygprofile.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into depot_tools.
+"""
+
+
+def CommonChecks(input_api, output_api):
+ output = []
+ blacklist = []
+ output.extend(input_api.canned_checks.RunPylint(
+ input_api, output_api, black_list=blacklist))
+ output.extend(input_api.canned_checks.RunUnitTests(
+ input_api,
+ output_api,
+ [input_api.os_path.join(input_api.PresubmitLocalPath(), 'run_tests')]))
+
+ if input_api.is_committing:
+ output.extend(input_api.canned_checks.PanProjectChecks(input_api,
+ output_api,
+ owners_check=False))
+ return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CommonChecks(input_api, output_api)
diff --git a/chromium/tools/cygprofile/check_orderfile.py b/chromium/tools/cygprofile/check_orderfile.py
new file mode 100755
index 00000000000..0c34d8807b7
--- /dev/null
+++ b/chromium/tools/cygprofile/check_orderfile.py
@@ -0,0 +1,105 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Check that symbols are ordered into a binary as they appear in the orderfile.
+"""
+
+import logging
+import optparse
+import sys
+
+import cyglog_to_orderfile
+import cygprofile_utils
+import patch_orderfile
+import symbol_extractor
+
+
+_MAX_WARNINGS_TO_PRINT = 200
+
+
+def _IsSameMethod(name1, name2):
+ """Returns true if name1 or name2 are split method forms of the other."""
+ return patch_orderfile.RemoveSuffixes(name1) == \
+ patch_orderfile.RemoveSuffixes(name2)
+
+
+def _CountMisorderedSymbols(symbols, symbol_infos):
+ """Count the number of misordered symbols, and log them.
+
+ Args:
+ symbols: ordered sequence of symbols from the orderfile
+ symbol_infos: ordered list of SymbolInfo from the binary
+
+ Returns:
+ (misordered_pairs_count, matched_symbols_count, unmatched_symbols_count)
+ """
+ name_to_symbol_info = symbol_extractor.CreateNameToSymbolInfo(symbol_infos)
+ matched_symbol_infos = []
+ missing_count = 0
+ misordered_count = 0
+
+ # Find the SymbolInfo matching the orderfile symbols in the binary.
+ for symbol in symbols:
+ if symbol in name_to_symbol_info:
+ matched_symbol_infos.append(name_to_symbol_info[symbol])
+ else:
+ missing_count += 1
+ if missing_count < _MAX_WARNINGS_TO_PRINT:
+ logging.warning('Symbol "%s" is in the orderfile, not in the binary' %
+ symbol)
+ logging.info('%d matched symbols, %d un-matched (Only the first %d unmatched'
+ ' symbols are shown)' % (
+ len(matched_symbol_infos), missing_count,
+ _MAX_WARNINGS_TO_PRINT))
+
+ # In the order of the orderfile, find all the symbols that are at an offset
+ # smaller than their immediate predecessor, and record the pair.
+ previous_symbol_info = symbol_extractor.SymbolInfo(
+ name='', offset=-1, size=0, section='')
+ for symbol_info in matched_symbol_infos:
+ if symbol_info.offset < previous_symbol_info.offset and not (
+ _IsSameMethod(symbol_info.name, previous_symbol_info.name)):
+ logging.warning('Misordered pair: %s - %s' % (
+ str(previous_symbol_info), str(symbol_info)))
+ misordered_count += 1
+ previous_symbol_info = symbol_info
+ return (misordered_count, len(matched_symbol_infos), missing_count)
+
+
+def main():
+ parser = optparse.OptionParser(usage=
+ 'usage: %prog [options] <binary> <orderfile>')
+ parser.add_option('--target-arch', action='store', dest='arch',
+ choices=['arm', 'arm64', 'x86', 'x86_64', 'x64', 'mips'],
+ help='The target architecture for the binary.')
+ parser.add_option('--threshold', action='store', dest='threshold', default=0,
+ help='The maximum allowed number of out-of-order symbols.')
+ options, argv = parser.parse_args(sys.argv)
+ if not options.arch:
+ options.arch = cygprofile_utils.DetectArchitecture()
+ if len(argv) != 3:
+ parser.print_help()
+ return 1
+ (binary_filename, orderfile_filename) = argv[1:]
+
+ symbol_extractor.SetArchitecture(options.arch)
+ obj_dir = cygprofile_utils.GetObjDir(binary_filename)
+ symbol_to_sections_map = \
+ cyglog_to_orderfile.GetSymbolToSectionsMapFromObjectFiles(obj_dir)
+ section_to_symbols_map = cygprofile_utils.InvertMapping(
+ symbol_to_sections_map)
+ symbols = patch_orderfile.GetSymbolsFromOrderfile(orderfile_filename,
+ section_to_symbols_map)
+ symbol_infos = symbol_extractor.SymbolInfosFromBinary(binary_filename)
+ # Missing symbols is not an error since some of them can be eliminated through
+ # inlining.
+ (misordered_pairs_count, matched_symbols, _) = _CountMisorderedSymbols(
+ symbols, symbol_infos)
+ return (misordered_pairs_count > options.threshold) or (matched_symbols == 0)
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.INFO)
+ sys.exit(main())
diff --git a/chromium/tools/cygprofile/check_orderfile_unittest.py b/chromium/tools/cygprofile/check_orderfile_unittest.py
new file mode 100755
index 00000000000..644b9c330dc
--- /dev/null
+++ b/chromium/tools/cygprofile/check_orderfile_unittest.py
@@ -0,0 +1,46 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import check_orderfile
+import symbol_extractor
+
+
+class TestCheckOrderFile(unittest.TestCase):
+ _SYMBOL_INFOS = [symbol_extractor.SymbolInfo('first', 0x1, 0, ''),
+ symbol_extractor.SymbolInfo('second', 0x2, 0, ''),
+ symbol_extractor.SymbolInfo('notProfiled', 0x4, 0, ''),
+ symbol_extractor.SymbolInfo('third', 0x3, 0, ''),]
+
+ def testMatchesSymbols(self):
+ symbols = ['first', 'second', 'third']
+ (misordered_pairs_count, matched_count, missing_count) = (
+ check_orderfile._CountMisorderedSymbols(symbols, self._SYMBOL_INFOS))
+ self.assertEquals(
+ (misordered_pairs_count, matched_count, missing_count), (0, 3, 0))
+
+ def testMissingMatches(self):
+ symbols = ['second', 'third', 'other', 'first']
+ (_, matched_count, unmatched_count) = (
+ check_orderfile._CountMisorderedSymbols(symbols, self._SYMBOL_INFOS))
+ self.assertEquals(matched_count, 3)
+ self.assertEquals(unmatched_count, 1)
+
+ def testNoUnorderedSymbols(self):
+ symbols = ['first', 'other', 'second', 'third', 'noMatchEither']
+ (misordered_pairs_count, _, _) = (
+ check_orderfile._CountMisorderedSymbols(symbols, self._SYMBOL_INFOS))
+ self.assertEquals(misordered_pairs_count, 0)
+
+ def testUnorderedSymbols(self):
+ symbols = ['first', 'other', 'third', 'second', 'noMatchEither']
+ (misordered_pairs_count, _, _) = (
+ check_orderfile._CountMisorderedSymbols(symbols, self._SYMBOL_INFOS))
+ self.assertEquals(misordered_pairs_count, 1)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/cygprofile/cyglog_to_orderfile.py b/chromium/tools/cygprofile/cyglog_to_orderfile.py
new file mode 100755
index 00000000000..bc382f60e66
--- /dev/null
+++ b/chromium/tools/cygprofile/cyglog_to_orderfile.py
@@ -0,0 +1,294 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Symbolizes a log file produced by cyprofile instrumentation.
+
+Given a log file and the binary being profiled, creates an orderfile.
+"""
+
+import logging
+import multiprocessing
+import optparse
+import os
+import re
+import string
+import sys
+import tempfile
+
+import cygprofile_utils
+import symbol_extractor
+
+
+def _ParseLogLines(log_file_lines):
+ """Parses a merged cyglog produced by mergetraces.py.
+
+ Args:
+ log_file_lines: array of lines in log file produced by profiled run
+
+ Below is an example of a small log file:
+ 5086e000-52e92000 r-xp 00000000 b3:02 51276 libchromeview.so
+ secs usecs pid:threadid func
+ START
+ 1314897086 795828 3587:1074648168 0x509e105c
+ 1314897086 795874 3587:1074648168 0x509e0eb4
+ 1314897086 796326 3587:1074648168 0x509e0e3c
+ 1314897086 796552 3587:1074648168 0x509e07bc
+ END
+
+ Returns:
+ An ordered list of callee offsets.
+ """
+ call_lines = []
+ vm_start = 0
+ line = log_file_lines[0]
+ assert 'r-xp' in line
+ end_index = line.find('-')
+ vm_start = int(line[:end_index], 16)
+ for line in log_file_lines[3:]:
+ fields = line.split()
+ if len(fields) == 4:
+ call_lines.append(fields)
+ else:
+ assert fields[0] == 'END'
+ # Convert strings to int in fields.
+ call_info = []
+ for call_line in call_lines:
+ addr = int(call_line[3], 16)
+ if vm_start < addr:
+ addr -= vm_start
+ call_info.append(addr)
+ return call_info
+
+
+def _GroupLibrarySymbolInfosByOffset(lib_filename):
+ """Returns a dict {offset: [SymbolInfo]} from a library."""
+ symbol_infos = symbol_extractor.SymbolInfosFromBinary(lib_filename)
+ return symbol_extractor.GroupSymbolInfosByOffset(symbol_infos)
+
+
+class SymbolNotFoundException(Exception):
+ def __init__(self, value):
+ super(SymbolNotFoundException, self).__init__(value)
+ self.value = value
+
+ def __str__(self):
+ return repr(self.value)
+
+
+def _FindSymbolInfosAtOffset(offset_to_symbol_infos, offset):
+ """Finds all SymbolInfo at a given offset.
+
+ Args:
+ offset_to_symbol_infos: {offset: [SymbolInfo]}
+ offset: offset to look the symbols at
+
+ Returns:
+ The list of SymbolInfo at the given offset
+
+ Raises:
+ SymbolNotFoundException if the offset doesn't match any symbol.
+ """
+ if offset in offset_to_symbol_infos:
+ return offset_to_symbol_infos[offset]
+ elif offset % 2 and (offset - 1) in offset_to_symbol_infos:
+ # On ARM, odd addresses are used to signal thumb instruction. They are
+ # generated by setting the LSB to 1 (see
+ # http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0471e/Babfjhia.html).
+ # TODO(lizeb): Make sure this hack doesn't propagate to other archs.
+ return offset_to_symbol_infos[offset - 1]
+ else:
+ raise SymbolNotFoundException(offset)
+
+
+def _GetObjectFileNames(obj_dir):
+ """Returns the list of object files in a directory."""
+ obj_files = []
+ for (dirpath, _, filenames) in os.walk(obj_dir):
+ for file_name in filenames:
+ if file_name.endswith('.o'):
+ obj_files.append(os.path.join(dirpath, file_name))
+ return obj_files
+
+
+def _AllSymbolInfos(object_filenames):
+ """Returns a list of SymbolInfo from an iterable of filenames."""
+ pool = multiprocessing.Pool()
+ # Hopefully the object files are in the page cache at this step, so IO should
+ # not be a problem (hence no concurrency limit on the pool).
+ symbol_infos_nested = pool.map(
+ symbol_extractor.SymbolInfosFromBinary, object_filenames)
+ result = []
+ for symbol_infos in symbol_infos_nested:
+ result += symbol_infos
+ return result
+
+
+def _SameCtorOrDtorNames(symbol1, symbol2):
+ """Returns True if two symbols refer to the same constructor or destructor.
+
+ The Itanium C++ ABI specifies dual constructor and destructor
+ emmission (section 5.1.4.3):
+ https://refspecs.linuxbase.org/cxxabi-1.83.html#mangling-special
+ To avoid fully parsing all mangled symbols, a heuristic is used with c++filt.
+
+ Note: some compilers may name generated copies differently. If this becomes
+ an issue this heuristic will need to be updated.
+ """
+ # Check if this is the understood case of constructor/destructor
+ # signatures. GCC emits up to three types of constructor/destructors:
+ # complete, base, and allocating. If they're all the same they'll
+ # get folded together.
+ return (re.search('(C[123]|D[012])E', symbol1) and
+ symbol_extractor.DemangleSymbol(symbol1) ==
+ symbol_extractor.DemangleSymbol(symbol2))
+
+
+def GetSymbolToSectionsMapFromObjectFiles(obj_dir):
+ """Scans object files to create a {symbol: linker section(s)} map.
+
+ Args:
+ obj_dir: The root of the output object file directory, which will be
+ scanned for .o files to form the mapping.
+
+ Returns:
+ A map {symbol_name: [section_name1, section_name2...]}
+ """
+ object_files = _GetObjectFileNames(obj_dir)
+ symbol_to_sections_map = {}
+ symbol_warnings = cygprofile_utils.WarningCollector(300)
+ symbol_infos = _AllSymbolInfos(object_files)
+ for symbol_info in symbol_infos:
+ symbol = symbol_info.name
+ if symbol.startswith('.LTHUNK'):
+ continue
+ section = symbol_info.section
+ if ((symbol in symbol_to_sections_map) and
+ (symbol_info.section not in symbol_to_sections_map[symbol])):
+ symbol_to_sections_map[symbol].append(section)
+
+ if not _SameCtorOrDtorNames(
+ symbol, symbol_to_sections_map[symbol][0].lstrip('.text.')):
+ symbol_warnings.Write('Symbol ' + symbol +
+ ' unexpectedly in more than one section: ' +
+ ', '.join(symbol_to_sections_map[symbol]))
+ elif not section.startswith('.text.'):
+ symbol_warnings.Write('Symbol ' + symbol +
+ ' in incorrect section ' + section)
+ else:
+ # In most cases we expect just one item in this list, and maybe 4 or so in
+ # the worst case.
+ symbol_to_sections_map[symbol] = [section]
+ symbol_warnings.WriteEnd('bad sections')
+ return symbol_to_sections_map
+
+
+def _WarnAboutDuplicates(offsets):
+ """Warns about duplicate offsets.
+
+ Args:
+ offsets: list of offsets to check for duplicates
+
+ Returns:
+ True if there are no duplicates, False otherwise.
+ """
+ seen_offsets = set()
+ ok = True
+ for offset in offsets:
+ if offset not in seen_offsets:
+ seen_offsets.add(offset)
+ else:
+ ok = False
+ logging.warning('Duplicate offset: ' + hex(offset))
+ return ok
+
+
+def _OutputOrderfile(offsets, offset_to_symbol_infos, symbol_to_sections_map,
+ output_file):
+ """Outputs the orderfile to output_file.
+
+ Args:
+ offsets: Iterable of offsets to match to section names
+ offset_to_symbol_infos: {offset: [SymbolInfo]}
+ symbol_to_sections_map: {name: [section1, section2]}
+ output_file: file-like object to write the results to
+
+ Returns:
+ True if all symbols were found in the library.
+ """
+ success = True
+ unknown_symbol_warnings = cygprofile_utils.WarningCollector(300)
+ symbol_not_found_errors = cygprofile_utils.WarningCollector(
+ 300, level=logging.ERROR)
+ output_sections = set()
+ for offset in offsets:
+ try:
+ symbol_infos = _FindSymbolInfosAtOffset(offset_to_symbol_infos, offset)
+ for symbol_info in symbol_infos:
+ if symbol_info.name in symbol_to_sections_map:
+ sections = symbol_to_sections_map[symbol_info.name]
+ for section in sections:
+ if not section in output_sections:
+ output_file.write(section + '\n')
+ output_sections.add(section)
+ else:
+ unknown_symbol_warnings.Write(
+ 'No known section for symbol ' + symbol_info.name)
+ except SymbolNotFoundException:
+ symbol_not_found_errors.Write(
+ 'Did not find function in binary. offset: ' + hex(offset))
+ success = False
+ unknown_symbol_warnings.WriteEnd('no known section for symbol.')
+ symbol_not_found_errors.WriteEnd('symbol not found in the binary.')
+ return success
+
+
+def main():
+ parser = optparse.OptionParser(usage=
+ 'usage: %prog [options] <merged_cyglog> <library> <output_filename>')
+ parser.add_option('--target-arch', action='store', dest='arch',
+ choices=['arm', 'arm64', 'x86', 'x86_64', 'x64', 'mips'],
+ help='The target architecture for libchrome.so')
+ options, argv = parser.parse_args(sys.argv)
+ if not options.arch:
+ options.arch = cygprofile_utils.DetectArchitecture()
+ if len(argv) != 4:
+ parser.print_help()
+ return 1
+ (log_filename, lib_filename, output_filename) = argv[1:]
+ symbol_extractor.SetArchitecture(options.arch)
+
+ obj_dir = cygprofile_utils.GetObjDir(lib_filename)
+
+ log_file_lines = map(string.rstrip, open(log_filename).readlines())
+ offsets = _ParseLogLines(log_file_lines)
+ _WarnAboutDuplicates(offsets)
+
+ offset_to_symbol_infos = _GroupLibrarySymbolInfosByOffset(lib_filename)
+ symbol_to_sections_map = GetSymbolToSectionsMapFromObjectFiles(obj_dir)
+
+ success = False
+ temp_filename = None
+ output_file = None
+ try:
+ (fd, temp_filename) = tempfile.mkstemp(dir=os.path.dirname(output_filename))
+ output_file = os.fdopen(fd, 'w')
+ ok = _OutputOrderfile(
+ offsets, offset_to_symbol_infos, symbol_to_sections_map, output_file)
+ output_file.close()
+ os.rename(temp_filename, output_filename)
+ temp_filename = None
+ success = ok
+ finally:
+ if output_file:
+ output_file.close()
+ if temp_filename:
+ os.remove(temp_filename)
+
+ return 0 if success else 1
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.INFO)
+ sys.exit(main())
diff --git a/chromium/tools/cygprofile/cyglog_to_orderfile_unittest.py b/chromium/tools/cygprofile/cyglog_to_orderfile_unittest.py
new file mode 100755
index 00000000000..9ea0d182107
--- /dev/null
+++ b/chromium/tools/cygprofile/cyglog_to_orderfile_unittest.py
@@ -0,0 +1,113 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import cyglog_to_orderfile
+import os
+import symbol_extractor
+import sys
+
+sys.path.insert(
+ 0, os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
+ 'third_party', 'android_platform', 'development',
+ 'scripts'))
+import symbol
+
+
+class TestCyglogToOrderfile(unittest.TestCase):
+ def testParseLogLines(self):
+ lines = """5086e000-52e92000 r-xp 00000000 b3:02 51276 libchromeview.so
+secs usecs pid:threadid func
+START
+1314897086 795828 3587:1074648168 0x509e105c
+1314897086 795874 3587:1074648168 0x509e0eb4
+END""".split('\n')
+ offsets = cyglog_to_orderfile._ParseLogLines(lines)
+ self.assertListEqual(
+ offsets, [0x509e105c - 0x5086e000, 0x509e0eb4 - 0x5086e000])
+
+ def testFindSymbolInfosAtOffsetExactMatch(self):
+ offset_map = {0x10: [symbol_extractor.SymbolInfo(
+ name='Symbol', offset=0x10, size=0x13, section='.text')]}
+ functions = cyglog_to_orderfile._FindSymbolInfosAtOffset(offset_map, 0x10)
+ self.assertEquals(len(functions), 1)
+ self.assertEquals(functions[0], offset_map[0x10][0])
+
+ def testFindSymbolInfosAtOffsetInexactMatch(self):
+ offset_map = {0x10: [symbol_extractor.SymbolInfo(
+ name='Symbol', offset=0x10, size=0x13, section='.text')]}
+ functions = cyglog_to_orderfile._FindSymbolInfosAtOffset(offset_map, 0x11)
+ self.assertEquals(len(functions), 1)
+ self.assertEquals(functions[0], offset_map[0x10][0])
+
+ def testFindSymbolInfosAtOffsetNoMatch(self):
+ offset_map = {0x10: [symbol_extractor.SymbolInfo(
+ name='Symbol', offset=0x10, size=0x13, section='.text')]}
+ self.assertRaises(
+ cyglog_to_orderfile.SymbolNotFoundException,
+ cyglog_to_orderfile._FindSymbolInfosAtOffset, offset_map, 0x12)
+
+ def testWarnAboutDuplicates(self):
+ offsets = [0x1, 0x2, 0x3]
+ self.assertTrue(cyglog_to_orderfile._WarnAboutDuplicates(offsets))
+ offsets.append(0x1)
+ self.assertFalse(cyglog_to_orderfile._WarnAboutDuplicates(offsets))
+
+ def testSameCtorOrDtorNames(self):
+ if not os.path.exists(symbol.ToolPath('c++filt')):
+ print 'Skipping test dependent on missing c++filt binary.'
+ return
+ self.assertTrue(cyglog_to_orderfile._SameCtorOrDtorNames(
+ '_ZNSt3__119istreambuf_iteratorIcNS_11char_traitsIcEEEC1Ev',
+ '_ZNSt3__119istreambuf_iteratorIcNS_11char_traitsIcEEEC2Ev'))
+ self.assertTrue(cyglog_to_orderfile._SameCtorOrDtorNames(
+ '_ZNSt3__119istreambuf_iteratorIcNS_11char_traitsIcEEED1Ev',
+ '_ZNSt3__119istreambuf_iteratorIcNS_11char_traitsIcEEED2Ev'))
+ self.assertFalse(cyglog_to_orderfile._SameCtorOrDtorNames(
+ '_ZNSt3__119istreambuf_iteratorIcNS_11char_traitsIcEEEC1Ev',
+ '_ZNSt3__119foo_iteratorIcNS_11char_traitsIcEEEC1Ev'))
+ self.assertFalse(cyglog_to_orderfile._SameCtorOrDtorNames(
+ '_ZNSt3__119istreambuf_iteratorIcNS_11char_traitsIcEEE',
+ '_ZNSt3__119istreambuf_iteratorIcNS_11char_traitsIcEEE'))
+
+ def testOutputOrderfile(self):
+ class FakeOutputFile(object):
+ def __init__(self):
+ self.writes = []
+
+ def write(self, data):
+ self.writes.append(data)
+
+ # One symbol not matched, one with an odd address, one regularly matched
+ # And two symbols aliased to the same address
+ offsets = [0x12, 0x17]
+ offset_to_symbol_infos = {
+ 0x10: [symbol_extractor.SymbolInfo(
+ name='Symbol', offset=0x10, size=0x13, section='dummy')],
+ 0x12: [symbol_extractor.SymbolInfo(
+ name='Symbol2', offset=0x12, size=0x13, section='dummy')],
+ 0x16: [symbol_extractor.SymbolInfo(
+ name='Symbol3', offset=0x16, size=0x13, section='dummy'),
+ symbol_extractor.SymbolInfo(
+ name='Symbol32', offset=0x16, size=0x13, section='dummy'),]}
+ symbol_to_sections_map = {
+ 'Symbol': ['.text.Symbol'],
+ 'Symbol2': ['.text.Symbol2', '.text.hot.Symbol2'],
+ 'Symbol3': ['.text.Symbol3'],
+ 'Symbol32': ['.text.Symbol32']}
+ fake_output = FakeOutputFile()
+ cyglog_to_orderfile._OutputOrderfile(
+ offsets, offset_to_symbol_infos, symbol_to_sections_map, fake_output)
+ expected = """.text.Symbol2
+.text.hot.Symbol2
+.text.Symbol3
+.text.Symbol32
+"""
+ self.assertEquals(expected, ''.join(fake_output.writes))
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/cygprofile/cygprofile.cc b/chromium/tools/cygprofile/cygprofile.cc
new file mode 100644
index 00000000000..d699c3e355b
--- /dev/null
+++ b/chromium/tools/cygprofile/cygprofile.cc
@@ -0,0 +1,376 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/cygprofile/cygprofile.h"
+
+#include <fcntl.h>
+#include <pthread.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <sys/stat.h>
+#include <sys/syscall.h>
+#include <sys/time.h>
+#include <sys/types.h>
+
+#include <cstdio>
+#include <fstream>
+#include <string>
+#include <vector>
+
+#include "base/bind.h"
+#include "base/containers/hash_tables.h"
+#include "base/files/scoped_file.h"
+#include "base/lazy_instance.h"
+#include "base/logging.h"
+#include "base/macros.h"
+#include "base/memory/ptr_util.h"
+#include "base/stl_util.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_piece.h"
+#include "base/strings/stringprintf.h"
+#include "base/synchronization/lock.h"
+
+namespace cygprofile {
+namespace {
+
+// Allow 8 MBytes of data for each thread log.
+const size_t kMaxBufferSize = 8 * 1024 * 1024 / sizeof(LogEntry);
+
+// Have the background internal thread do its flush every 15 sec.
+const int kFlushThreadIdleTimeSec = 15;
+
+const char kLogFileNamePrefix[] = "/data/local/tmp/chrome/cyglog/";
+
+// "cyglog.PID.LWP.PPID"
+const char kLogFilenameFormat[] = "%scyglog.%d.%d-%d";
+
+// Magic value of above to prevent instrumentation. Used when ThreadLog is being
+// constructed (to prevent reentering by malloc, for example) and by the flush
+// log thread (to prevent it from being logged0.
+ThreadLog* const kMagicBeingConstructed = reinterpret_cast<ThreadLog*>(1);
+
+// Per-thread pointer to the current log object.
+static __thread ThreadLog* g_tls_log = NULL;
+
+// Returns light-weight process ID. On Linux, this is a system-wide unique
+// thread id.
+pid_t GetTID() {
+ return syscall(__NR_gettid);
+}
+
+timespec GetCurrentTime() {
+ timespec timestamp;
+ clock_gettime(CLOCK_MONOTONIC, &timestamp);
+ return timestamp;
+}
+
+// Sleeps for |sec| seconds.
+void SleepSec(int sec) {
+ for (int secs_to_sleep = sec; secs_to_sleep != 0;)
+ secs_to_sleep = sleep(secs_to_sleep);
+}
+
+// Exposes the string header that will appear at the top of every trace file.
+// This string contains memory mapping information for the mapped
+// library/executable which is used offline during symbolization. Note that
+// this class is meant to be instantiated once per process and lazily (during
+// the first flush).
+struct ImmutableFileHeaderLine {
+ ImmutableFileHeaderLine() : value(MakeFileHeaderLine()) {}
+
+ const std::string value;
+
+ private:
+ // Returns whether the integer representation of the hexadecimal address
+ // stored in |line| at position |start_offset| was successfully stored in
+ // |result|.
+ static bool ParseAddress(const std::string& line,
+ size_t start_offset,
+ size_t length,
+ uint64_t* result) {
+ if (start_offset >= line.length())
+ return false;
+
+ uint64_t address;
+ const bool ret = HexStringToUInt64(
+ base::StringPiece(line.c_str() + start_offset, length), &address);
+ if (!ret)
+ return false;
+
+ *result = address;
+ return true;
+ }
+
+ // Parses /proc/self/maps and returns a two line string such as:
+ // 758c6000-79f4b000 r-xp 00000000 b3:17 309475 libchrome.2009.0.so
+ // secs usecs pid:threadid func
+ static std::string MakeFileHeaderLine() {
+ std::ifstream mapsfile("/proc/self/maps");
+ CHECK(mapsfile.good());
+ std::string result;
+
+ for (std::string line; std::getline(mapsfile, line); ) {
+ if (line.find("r-xp") == std::string::npos)
+ continue;
+
+ const size_t address_length = line.find('-');
+ uint64_t start_address = 0;
+ CHECK(ParseAddress(line, 0, address_length, &start_address));
+
+ uint64_t end_address = 0;
+ CHECK(ParseAddress(line, address_length + 1, address_length,
+ &end_address));
+
+ const uintptr_t current_func_addr = reinterpret_cast<uintptr_t>(
+ &MakeFileHeaderLine);
+ if (current_func_addr >= start_address &&
+ current_func_addr < end_address) {
+ result.swap(line);
+ break;
+ }
+ }
+ CHECK(!result.empty());
+ result.append("\nsecs\tusecs\tpid:threadid\tfunc\n");
+ return result;
+ }
+};
+
+base::LazyInstance<ThreadLogsManager>::Leaky g_logs_manager =
+ LAZY_INSTANCE_INITIALIZER;
+
+base::LazyInstance<ImmutableFileHeaderLine>::Leaky g_file_header_line =
+ LAZY_INSTANCE_INITIALIZER;
+
+} // namespace
+
+// Custom thread implementation that joins on destruction. Note that
+// base::Thread has non-trivial dependencies on e.g. AtExitManager which makes
+// it hard to use it early.
+class Thread {
+ public:
+ Thread(const base::Closure& thread_callback)
+ : thread_callback_(thread_callback) {
+ CHECK_EQ(0, pthread_create(&handle_, NULL, &Thread::EntryPoint, this));
+ }
+
+ ~Thread() {
+ CHECK_EQ(0, pthread_join(handle_, NULL));
+ }
+
+ private:
+ static void* EntryPoint(void* data) {
+ // Disable logging on this thread. Although this routine is not instrumented
+ // (cygprofile.gyp provides that), the called routines are and thus will
+ // call instrumentation.
+ CHECK(g_tls_log == NULL); // Must be 0 as this is a new thread.
+ g_tls_log = kMagicBeingConstructed;
+
+ Thread* const instance = reinterpret_cast<Thread*>(data);
+ instance->thread_callback_.Run();
+ return NULL;
+ }
+
+ const base::Closure thread_callback_;
+ pthread_t handle_;
+
+ DISALLOW_COPY_AND_ASSIGN(Thread);
+};
+
+// Single log entry recorded for each function call.
+LogEntry::LogEntry(const void* address)
+ : time(GetCurrentTime()),
+ pid(getpid()),
+ tid(GetTID()),
+ address(address) {
+}
+
+ThreadLog::ThreadLog()
+ : tid_(GetTID()),
+ in_use_(false),
+ flush_callback_(
+ base::Bind(&ThreadLog::FlushInternal, base::Unretained(this))) {
+}
+
+ThreadLog::ThreadLog(const FlushCallback& flush_callback)
+ : tid_(GetTID()),
+ in_use_(false),
+ flush_callback_(flush_callback) {
+}
+
+ThreadLog::~ThreadLog() {
+ g_tls_log = NULL;
+}
+
+void ThreadLog::AddEntry(void* address) {
+ if (in_use_)
+ return;
+ in_use_ = true;
+
+ CHECK_EQ(tid_, GetTID());
+ const std::pair<base::hash_set<void*>::iterator, bool> pair =
+ called_functions_.insert(address);
+ const bool did_insert = pair.second;
+
+ if (did_insert) {
+ base::AutoLock auto_lock(lock_);
+ entries_.push_back(LogEntry(address));
+ // Crash in a quickly understandable way instead of crashing (or maybe not
+ // though) due to OOM.
+ CHECK_LE(entries_.size(), kMaxBufferSize);
+ }
+
+ in_use_ = false;
+}
+
+void ThreadLog::TakeEntries(std::vector<LogEntry>* destination) {
+ base::AutoLock auto_lock(lock_);
+ destination->swap(entries_);
+ STLClearObject(&entries_);
+}
+
+void ThreadLog::Flush(std::vector<LogEntry>* entries) const {
+ flush_callback_.Run(entries);
+}
+
+void ThreadLog::FlushInternal(std::vector<LogEntry>* entries) const {
+ const std::string log_filename(
+ base::StringPrintf(
+ kLogFilenameFormat, kLogFileNamePrefix, getpid(), tid_, getppid()));
+ const base::ScopedFILE file(fopen(log_filename.c_str(), "a"));
+ CHECK(file.get());
+
+ const long offset = ftell(file.get());
+ if (offset == 0)
+ fprintf(file.get(), "%s", g_file_header_line.Get().value.c_str());
+
+ for (std::vector<LogEntry>::const_iterator it = entries->begin();
+ it != entries->end(); ++it) {
+ fprintf(file.get(), "%ld %ld\t%d:%d\t%p\n", it->time.tv_sec,
+ it->time.tv_nsec / 1000, it->pid, it->tid, it->address);
+ }
+
+ STLClearObject(entries);
+}
+
+ThreadLogsManager::ThreadLogsManager()
+ : wait_callback_(base::Bind(&SleepSec, kFlushThreadIdleTimeSec)) {
+}
+
+ThreadLogsManager::ThreadLogsManager(const base::Closure& wait_callback,
+ const base::Closure& notify_callback)
+
+ : wait_callback_(wait_callback),
+ notify_callback_(notify_callback) {
+}
+
+ThreadLogsManager::~ThreadLogsManager() {
+ // Note that the internal thread does some work until it sees |flush_thread_|
+ // = NULL.
+ std::unique_ptr<Thread> flush_thread;
+ {
+ base::AutoLock auto_lock(lock_);
+ flush_thread_.swap(flush_thread);
+ }
+ flush_thread.reset(); // Joins the flush thread.
+
+ STLDeleteContainerPointers(logs_.begin(), logs_.end());
+}
+
+void ThreadLogsManager::AddLog(std::unique_ptr<ThreadLog> new_log) {
+ base::AutoLock auto_lock(lock_);
+
+ if (logs_.empty())
+ StartInternalFlushThread_Locked();
+
+ logs_.push_back(new_log.release());
+}
+
+void ThreadLogsManager::StartInternalFlushThread_Locked() {
+ lock_.AssertAcquired();
+ CHECK(!flush_thread_);
+ // Note that the |flush_thread_| joins at destruction which guarantees that it
+ // will never outlive |this|, i.e. it's safe not to use ref-counting.
+ flush_thread_.reset(
+ new Thread(base::Bind(&ThreadLogsManager::FlushAllLogsOnFlushThread,
+ base::Unretained(this))));
+}
+
+// Type used below for flushing.
+struct LogData {
+ LogData(ThreadLog* thread_log) : thread_log(thread_log) {}
+
+ ThreadLog* const thread_log;
+ std::vector<LogEntry> entries;
+};
+
+void ThreadLogsManager::FlushAllLogsOnFlushThread() {
+ while (true) {
+ {
+ base::AutoLock auto_lock(lock_);
+ // The |flush_thread_| field is reset during destruction.
+ if (!flush_thread_)
+ return;
+ }
+ // Sleep for a few secs and then flush all thread's buffers. There is a
+ // danger that, when quitting Chrome, this thread may see unallocated data
+ // and segfault. We do not care because we need logs when Chrome is working.
+ wait_callback_.Run();
+
+ // Copy the ThreadLog pointers to avoid acquiring both the logs manager's
+ // lock and the one for individual thread logs.
+ std::vector<ThreadLog*> thread_logs_copy;
+ {
+ base::AutoLock auto_lock(lock_);
+ thread_logs_copy = logs_;
+ }
+
+ // Move the logs' data before flushing them so that the mutexes are not
+ // acquired for too long.
+ std::vector<LogData> logs;
+ for (std::vector<ThreadLog*>::const_iterator it =
+ thread_logs_copy.begin();
+ it != thread_logs_copy.end(); ++it) {
+ ThreadLog* const thread_log = *it;
+ LogData log_data(thread_log);
+ logs.push_back(log_data);
+ thread_log->TakeEntries(&logs.back().entries);
+ }
+
+ for (std::vector<LogData>::iterator it = logs.begin();
+ it != logs.end(); ++it) {
+ if (!it->entries.empty())
+ it->thread_log->Flush(&it->entries);
+ }
+
+ if (!notify_callback_.is_null())
+ notify_callback_.Run();
+ }
+}
+
+extern "C" {
+
+// The GCC compiler callbacks, called on every function invocation providing
+// addresses of caller and callee codes.
+void __cyg_profile_func_enter(void* this_fn, void* call_site)
+ __attribute__((no_instrument_function));
+void __cyg_profile_func_exit(void* this_fn, void* call_site)
+ __attribute__((no_instrument_function));
+
+void __cyg_profile_func_enter(void* this_fn, void* callee_unused) {
+ if (g_tls_log == NULL) {
+ g_tls_log = kMagicBeingConstructed;
+ ThreadLog* new_log = new ThreadLog();
+ CHECK(new_log);
+ g_logs_manager.Pointer()->AddLog(base::WrapUnique(new_log));
+ g_tls_log = new_log;
+ }
+
+ if (g_tls_log != kMagicBeingConstructed)
+ g_tls_log->AddEntry(this_fn);
+}
+
+void __cyg_profile_func_exit(void* this_fn, void* call_site) {}
+
+} // extern "C"
+} // namespace cygprofile
diff --git a/chromium/tools/cygprofile/cygprofile.h b/chromium/tools/cygprofile/cygprofile.h
new file mode 100644
index 00000000000..439f1e52cbc
--- /dev/null
+++ b/chromium/tools/cygprofile/cygprofile.h
@@ -0,0 +1,166 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Tool to log the execution of the process (Chrome). Writes logs containing
+// time and address of the callback being called for the first time.
+//
+// For performance reasons logs are buffered. Every thread has its own buffer
+// and log file so the contention between threads is minimal. As a side-effect,
+// functions called might be mentioned in many thread logs.
+//
+// A special thread is created in the process to periodically flush logs for all
+// threads in case the thread had stopped before flushing its logs.
+//
+// Also note that the instrumentation code is self-activated. It begins to
+// record the log data when it is called first, including the run-time startup.
+// Have it in mind when modifying it, in particular do not use global objects
+// with constructors as they are called during startup (too late for us).
+
+#ifndef TOOLS_CYGPROFILE_CYGPROFILE_H_
+#define TOOLS_CYGPROFILE_CYGPROFILE_H_
+
+#include <sys/time.h>
+#include <sys/types.h>
+
+#include <memory>
+#include <vector>
+
+#include "base/callback.h"
+#include "base/containers/hash_tables.h"
+#include "base/macros.h"
+#include "base/synchronization/lock.h"
+#include "build/build_config.h"
+
+#if !defined(OS_ANDROID)
+// This is only supported on Android thanks to the fact that on Android
+// processes (other than the system's zygote) don't fork.
+//
+// To make cygprofile truly work (i.e. without any deadlock) on Chrome
+// platforms that use fork(), cygprofile.cc should be written in a way that
+// guarantees that:
+// - No lock is acquired by a foreign thread during fork(). In particular this
+// means that cygprofile.cc should not perform any heap allocation (since heap
+// allocators, including TCMalloc generally use locks).
+// - Only cygprofile.cc uses pthread_atfork() in the whole process. Unlike POSIX
+// signals, pthread_atfork() doesn't provide a way to install multiple handlers.
+// Calling pthread_atfork() in cygprofile.cc would override any handler that
+// could have been installed previously.
+//
+// Chrome happens to violate the first requirement at least once by having its
+// process launcher thread fork. However the child process in that case, when
+// it's not instrumented with cygprofile, directly calls exec(). This is safe
+// since the child process doesn't try to release a lock acquired by another
+// thread in the parent process which would lead to a deadlock. This problem was
+// actually observed by trying to port the current version of cygprofile.cc to
+// Linux.
+#error This is only supported on Android.
+#endif
+
+// The following is only exposed for testing.
+namespace cygprofile {
+
+class Thread;
+
+// Single log entry recorded for each function call.
+struct LogEntry {
+ LogEntry(const void* address);
+
+ const timespec time;
+ const pid_t pid;
+ const pid_t tid;
+ const void* const address;
+};
+
+// Per-thread function calls log.
+class ThreadLog {
+ public:
+ // Callback invoked for flushing that can be provided for testing.
+ typedef base::Callback<void (std::vector<LogEntry>*)> FlushCallback;
+
+ ThreadLog();
+
+ // Used for testing.
+ ThreadLog(const FlushCallback& flush_callback);
+
+ ~ThreadLog();
+
+ // Must only be called from the thread this ThreadLog instance is watching.
+ void AddEntry(void* address);
+
+ // Can be called from any thread.
+ void TakeEntries(std::vector<LogEntry>* output);
+
+ // Flushes the provided vector of entries to a file and clears it. Note that
+ // this can be called from any thread.
+ void Flush(std::vector<LogEntry>* entries) const;
+
+ private:
+ // Default implementation (that can be overridden for testing) of the method
+ // above.
+ void FlushInternal(std::vector<LogEntry>* entries) const;
+
+ // Thread identifier as Linux kernel shows it. LWP (light-weight process) is
+ // a unique ID of the thread in the system, unlike pthread_self() which is the
+ // same for fork()-ed threads.
+ const pid_t tid_;
+
+ // Current thread is inside the instrumentation routine.
+ bool in_use_;
+
+ // Callback used to flush entries.
+ const FlushCallback flush_callback_;
+
+ // Keeps track of all functions that have been logged on this thread so we do
+ // not record duplicates.
+ base::hash_set<void*> called_functions_;
+
+ // A lock that guards |entries_| usage between per-thread instrumentation
+ // routine and timer flush callback. So the contention could happen only
+ // during the flush, every 15 secs.
+ base::Lock lock_;
+
+ std::vector<LogEntry> entries_;
+
+ DISALLOW_COPY_AND_ASSIGN(ThreadLog);
+};
+
+// Manages a list of per-thread logs.
+class ThreadLogsManager {
+ public:
+ ThreadLogsManager();
+
+ // Used for testing. The provided callbacks are used for testing to
+ // synchronize the internal thread with the unit test running on the main
+ // thread.
+ ThreadLogsManager(const base::Closure& wait_callback,
+ const base::Closure& notify_callback);
+
+ ~ThreadLogsManager();
+
+ // Can be called from any thread.
+ void AddLog(std::unique_ptr<ThreadLog> new_log);
+
+ private:
+ void StartInternalFlushThread_Locked();
+
+ // Flush thread's entry point.
+ void FlushAllLogsOnFlushThread();
+
+ // Used to make the internal thread sleep before each flush iteration.
+ const base::Closure wait_callback_;
+ // Used to trigger a notification when a flush happened on the internal
+ // thread.
+ const base::Closure notify_callback_;
+
+ // Protects the state below.
+ base::Lock lock_;
+ std::unique_ptr<Thread> flush_thread_;
+ std::vector<ThreadLog*> logs_;
+
+ DISALLOW_COPY_AND_ASSIGN(ThreadLogsManager);
+};
+
+} // namespace cygprofile
+
+#endif // TOOLS_CYGPROFILE_CYGPROFILE_H_
diff --git a/chromium/tools/cygprofile/cygprofile_unittest.cc b/chromium/tools/cygprofile/cygprofile_unittest.cc
new file mode 100644
index 00000000000..280e6e57e3b
--- /dev/null
+++ b/chromium/tools/cygprofile/cygprofile_unittest.cc
@@ -0,0 +1,102 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/cygprofile/cygprofile.h"
+
+#include <stdint.h>
+#include <sys/time.h>
+#include <utility>
+#include <vector>
+
+#include "base/bind.h"
+#include "base/callback.h"
+#include "base/logging.h"
+#include "base/synchronization/waitable_event.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace cygprofile {
+namespace {
+
+void FlushEntries(std::vector<LogEntry>* destination,
+ std::vector<LogEntry>* entries) {
+ CHECK_EQ(0U, destination->size());
+ // Move the provided |entries| vector to the provided |destination| so that
+ // the unit test that triggered the flush can check it.
+ destination->swap(*entries);
+}
+
+// Flush callback that should not be invoked.
+void CheckFlushDoesNotHappen(std::vector<LogEntry>* entries) {
+ NOTREACHED();
+}
+
+uint64_t GetUsecSecTimeFromTimeSpec(struct timespec timespec) {
+ return timespec.tv_sec * 1000 * 1000 + timespec.tv_nsec / 1000;
+}
+
+TEST(CygprofileTest, ThreadLogBasic) {
+ ThreadLog thread_log(base::Bind(&CheckFlushDoesNotHappen));
+
+ thread_log.AddEntry(reinterpret_cast<void*>(0x2));
+ thread_log.AddEntry(reinterpret_cast<void*>(0x1));
+
+ std::vector<LogEntry> entries;
+ thread_log.TakeEntries(&entries);
+
+ ASSERT_EQ(2U, entries.size());
+ // The entries should appear in their insertion order.
+ const LogEntry& first_entry = entries[0];
+ ASSERT_EQ(reinterpret_cast<uintptr_t>(first_entry.address), 2U);
+ ASSERT_EQ(getpid(), first_entry.pid);
+ ASSERT_LT(0, first_entry.tid);
+
+ const LogEntry& second_entry = entries[1];
+ ASSERT_EQ(1U, reinterpret_cast<uintptr_t>(second_entry.address));
+ ASSERT_EQ(first_entry.pid, second_entry.pid);
+ ASSERT_EQ(first_entry.tid, second_entry.tid);
+
+ ASSERT_GE(GetUsecSecTimeFromTimeSpec(second_entry.time),
+ GetUsecSecTimeFromTimeSpec(first_entry.time));
+}
+
+TEST(CygprofileTest, ManagerBasic) {
+ base::WaitableEvent wait_event(true, false);
+ base::WaitableEvent notify_event(true, false);
+
+ ThreadLogsManager manager(
+ base::Bind(&base::WaitableEvent::Wait, base::Unretained(&wait_event)),
+ base::Bind(&base::WaitableEvent::Signal,
+ base::Unretained(&notify_event)));
+
+ std::vector<LogEntry> entries;
+ std::unique_ptr<ThreadLog> thread_log(
+ new ThreadLog(base::Bind(&FlushEntries, base::Unretained(&entries))));
+
+ thread_log->AddEntry(reinterpret_cast<void*>(0x2));
+ thread_log->AddEntry(reinterpret_cast<void*>(0x3));
+
+ // This should make the manager spawn its internal flush thread which will
+ // wait for a notification before it starts doing some work.
+ manager.AddLog(std::move(thread_log));
+
+ EXPECT_EQ(0U, entries.size());
+ // This will wake up the internal thread.
+ wait_event.Signal();
+ // Now it's our turn to wait until it performed the flush.
+ notify_event.Wait();
+
+ // The flush should have moved the data to the local vector of entries.
+ EXPECT_EQ(2U, entries.size());
+ ASSERT_EQ(2U, reinterpret_cast<uintptr_t>(entries[0].address));
+ ASSERT_EQ(3U, reinterpret_cast<uintptr_t>(entries[1].address));
+}
+
+} // namespace
+} // namespace cygprofile
+
+// Custom runner implementation since base's one requires JNI on Android.
+int main(int argc, char** argv) {
+ testing::InitGoogleTest(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/chromium/tools/cygprofile/cygprofile_utils.py b/chromium/tools/cygprofile/cygprofile_utils.py
new file mode 100755
index 00000000000..4219a1539fd
--- /dev/null
+++ b/chromium/tools/cygprofile/cygprofile_utils.py
@@ -0,0 +1,64 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common utilites used by cygprofile scripts.
+"""
+
+import logging
+import os
+import re
+
+class WarningCollector(object):
+ """Collects warnings, but limits the number printed to a set value."""
+ def __init__(self, max_warnings, level=logging.WARNING):
+ self._warnings = 0
+ self._max_warnings = max_warnings
+ self._level = level
+
+ def Write(self, message):
+ """Prints a warning if fewer than max_warnings have already been printed."""
+ if self._warnings < self._max_warnings:
+ logging.log(self._level, message)
+ self._warnings += 1
+
+ def WriteEnd(self, message):
+ """Once all warnings have been printed, use this to print the number of
+ elided warnings."""
+ if self._warnings > self._max_warnings:
+ logging.log(self._level, '%d more warnings for: %s' % (
+ self._warnings - self._max_warnings, message))
+
+
+def DetectArchitecture(default='arm'):
+ """Detects the architecture by looking for target_arch in GYP_DEFINES.
+ If not not found, returns default.
+ """
+ gyp_defines = os.environ.get('GYP_DEFINES', '')
+ match = re.match('target_arch=(\S+)', gyp_defines)
+ if match and len(match.groups()) == 1:
+ return match.group(1)
+ else:
+ return default
+
+
+def InvertMapping(x_to_ys):
+ """Given a map x -> [y1, y2...] returns inverse mapping y->[x1, x2...]."""
+ y_to_xs = {}
+ for x, ys in x_to_ys.items():
+ for y in ys:
+ y_to_xs.setdefault(y, []).append(x)
+ return y_to_xs
+
+
+def GetObjDir(libchrome):
+ """Get the path to the obj directory corresponding to the given libchrome.
+
+ Assumes libchrome is in for example .../Release/lib/libchrome.so and object
+ files are in .../Release/obj.
+ """
+ # TODO(lizeb,pasko): Pass obj path in explicitly where needed rather than
+ # relying on the above assumption.
+ return os.path.abspath(os.path.join(
+ os.path.dirname(libchrome), '../obj'))
diff --git a/chromium/tools/cygprofile/cygprofile_utils_unittest.py b/chromium/tools/cygprofile/cygprofile_utils_unittest.py
new file mode 100755
index 00000000000..84e0a61a65a
--- /dev/null
+++ b/chromium/tools/cygprofile/cygprofile_utils_unittest.py
@@ -0,0 +1,22 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import cygprofile_utils
+
+
+class TestCygprofileUtils(unittest.TestCase):
+ def testInvertMapping(self):
+ inputMap = {'1': ['2', '3'],
+ '4': ['2', '5']}
+ self.assertEqual(cygprofile_utils.InvertMapping(inputMap),
+ {'2': ['1', '4'],
+ '3': ['1'],
+ '5': ['4']})
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/cygprofile/mergetraces.py b/chromium/tools/cygprofile/mergetraces.py
new file mode 100755
index 00000000000..2ac83931cd7
--- /dev/null
+++ b/chromium/tools/cygprofile/mergetraces.py
@@ -0,0 +1,254 @@
+#!/usr/bin/python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use: ../mergetraces.py `ls cyglog.* -Sr` > merged_cyglog
+
+""""Merge multiple logs files from different processes into a single log.
+
+Given two log files of execution traces, merge the traces into a single trace.
+Merging will use timestamps (i.e. the first two columns of logged calls) to
+create a single log that is an ordered trace of calls by both processes.
+"""
+
+import optparse
+import string
+import sys
+
+
+def ParseLogLines(lines):
+ """Parse log file lines.
+
+ Args:
+ lines: lines from log file produced by profiled run
+
+ Below is an example of a small log file:
+ 5086e000-52e92000 r-xp 00000000 b3:02 51276 libchromeview.so
+ secs usecs pid:threadid func
+ START
+ 1314897086 795828 3587:1074648168 0x509e105c
+ 1314897086 795874 3587:1074648168 0x509e0eb4
+ 1314897086 796326 3587:1074648168 0x509e0e3c
+ 1314897086 796552 3587:1074648168 0x509e07bc
+ END
+
+ Returns:
+ tuple conisiting of 1) an ordered list of the logged calls, as an array of
+ fields, 2) the virtual start address of the library, used to compute the
+ offset of the symbol in the library and 3) the virtual end address
+ """
+ call_lines = []
+ vm_start = 0
+ vm_end = 0
+ dash_index = lines[0].find ('-')
+ space_index = lines[0].find (' ')
+ vm_start = int (lines[0][:dash_index], 16)
+ vm_end = int (lines[0][dash_index+1:space_index], 16)
+ for line in lines[2:]:
+ line = line.strip()
+ fields = line.split()
+ call_lines.append (fields)
+
+ return (call_lines, vm_start, vm_end)
+
+
+def HasDuplicates(calls):
+ """Makes sure that calls are only logged once.
+
+ Args:
+ calls: list of calls logged
+
+ Returns:
+ boolean indicating if calls has duplicate calls
+ """
+ seen = set([])
+ for call in calls:
+ if call[3] in seen:
+ return True
+ seen.add(call[3])
+ return False
+
+def CheckTimestamps(calls):
+ """Prints warning to stderr if the call timestamps are not in order.
+
+ Args:
+ calls: list of calls logged
+ """
+ index = 0
+ last_timestamp_secs = -1
+ last_timestamp_us = -1
+ while (index < len (calls)):
+ timestamp_secs = int (calls[index][0])
+ timestamp_us = int (calls[index][1])
+ timestamp = (timestamp_secs * 1000000) + timestamp_us
+ last_timestamp = (last_timestamp_secs * 1000000) + last_timestamp_us
+ if (timestamp < last_timestamp):
+ raise Exception("last_timestamp: " + str(last_timestamp_secs)
+ + " " + str(last_timestamp_us) + " timestamp: "
+ + str(timestamp_secs) + " " + str(timestamp_us) + "\n")
+ last_timestamp_secs = timestamp_secs
+ last_timestamp_us = timestamp_us
+ index = index + 1
+
+
+def Convert(call_lines, start_address, end_address):
+ """Converts the call addresses to static offsets and removes invalid calls.
+
+ Removes profiled calls not in shared library using start and end virtual
+ addresses, converts strings to integer values, coverts virtual addresses to
+ address in shared library.
+
+ Returns:
+ list of calls as tuples (sec, usec, pid:tid, callee)
+ """
+ converted_calls = []
+ call_addresses = set()
+ for fields in call_lines:
+ secs = int (fields[0])
+ usecs = int (fields[1])
+ callee = int (fields[3], 16)
+ # Eliminate repetitions of the same function.
+ if callee in call_addresses:
+ continue
+ # Eliminate small addresses. It should be safe to do so because these point
+ # before the .text section (it is in .plt or earlier).
+ # TODO(pasko): understand why __cyg_profile_func_enter may output a small
+ # offset sometimes.
+ if callee < start_address + 4096:
+ sys.stderr.write('WARNING: ignoring small address: %s' %
+ hex(callee - start_address))
+ call_addresses.add(callee)
+ continue
+ if start_address <= callee < end_address:
+ converted_calls.append((secs, usecs, fields[2], (callee - start_address)))
+ call_addresses.add(callee)
+ return converted_calls
+
+
+def Timestamp(trace_entry):
+ return int (trace_entry[0]) * 1000000 + int(trace_entry[1])
+
+
+def AddTrace (tracemap, trace):
+ """Adds a trace to the tracemap.
+
+ Adds entries in the trace to the tracemap. All new calls will be added to
+ the tracemap. If the calls already exist in the tracemap then they will be
+ replaced if they happened sooner in the new trace.
+
+ Args:
+ tracemap: the tracemap
+ trace: the trace
+
+ """
+ for trace_entry in trace:
+ call = trace_entry[3]
+ if (not call in tracemap) or (
+ Timestamp(tracemap[call]) > Timestamp(trace_entry)):
+ tracemap[call] = trace_entry
+
+
+def GroupByProcessAndThreadId(input_trace):
+ """Returns an array of traces grouped by pid and tid.
+
+ This is used to make the order of functions not depend on thread scheduling
+ which can be greatly impacted when profiling is done with cygprofile. As a
+ result each thread has its own contiguous segment of code (ordered by
+ timestamp) and processes also have their code isolated (i.e. not interleaved).
+ """
+ def MakeTimestamp(sec, usec):
+ return sec * 1000000 + usec
+
+ def PidAndTidFromString(pid_and_tid):
+ strings = pid_and_tid.split(':')
+ return (int(strings[0]), int(strings[1]))
+
+ tid_to_pid_map = {}
+ pid_first_seen = {}
+ tid_first_seen = {}
+
+ for (sec, usec, pid_and_tid, _) in input_trace:
+ (pid, tid) = PidAndTidFromString(pid_and_tid)
+
+ # Make sure that thread IDs are unique since this is a property we rely on.
+ if tid_to_pid_map.setdefault(tid, pid) != pid:
+ raise Exception(
+ 'Seen PIDs %d and %d for TID=%d. Thread-IDs must be unique' % (
+ tid_to_pid_map[tid], pid, tid))
+
+ if not pid in pid_first_seen:
+ pid_first_seen[pid] = MakeTimestamp(sec, usec)
+ if not tid in tid_first_seen:
+ tid_first_seen[tid] = MakeTimestamp(sec, usec)
+
+ def CompareEvents(event1, event2):
+ (sec1, usec1, pid_and_tid, _) = event1
+ (pid1, tid1) = PidAndTidFromString(pid_and_tid)
+ (sec2, usec2, pid_and_tid, _) = event2
+ (pid2, tid2) = PidAndTidFromString(pid_and_tid)
+
+ pid_cmp = cmp(pid_first_seen[pid1], pid_first_seen[pid2])
+ if pid_cmp != 0:
+ return pid_cmp
+ tid_cmp = cmp(tid_first_seen[tid1], tid_first_seen[tid2])
+ if tid_cmp != 0:
+ return tid_cmp
+ return cmp(MakeTimestamp(sec1, usec1), MakeTimestamp(sec2, usec2))
+
+ return sorted(input_trace, cmp=CompareEvents)
+
+
+def Main():
+ """Merge two traces for code in specified library and write to stdout.
+
+ Merges the two traces and coverts the virtual addresses to the offsets in the
+ library. First line of merged trace has dummy virtual address of 0-ffffffff
+ so that symbolizing the addresses uses the addresses in the log, since the
+ addresses have already been converted to static offsets.
+ """
+ parser = optparse.OptionParser('usage: %prog trace1 ... traceN')
+ (_, args) = parser.parse_args()
+ if len(args) <= 1:
+ parser.error('expected at least the following args: trace1 trace2')
+
+ step = 0
+
+ # Maps function addresses to their corresponding trace entry.
+ tracemap = dict()
+
+ for trace_file in args:
+ step += 1
+ sys.stderr.write(" " + str(step) + "/" + str(len(args)) +
+ ": " + trace_file + ":\n")
+
+ trace_lines = map(string.rstrip, open(trace_file).readlines())
+ (trace_calls, trace_start, trace_end) = ParseLogLines(trace_lines)
+ CheckTimestamps(trace_calls)
+ sys.stderr.write("Len: " + str(len(trace_calls)) +
+ ". Start: " + hex(trace_start) +
+ ", end: " + hex(trace_end) + '\n')
+
+ trace_calls = Convert(trace_calls, trace_start, trace_end)
+ sys.stderr.write("Converted len: " + str(len(trace_calls)) + "\n")
+
+ AddTrace(tracemap, trace_calls)
+ sys.stderr.write("Merged len: " + str(len(tracemap)) + "\n")
+
+ # Extract the resulting trace from the tracemap
+ merged_trace = []
+ for call in tracemap:
+ merged_trace.append(tracemap[call])
+ merged_trace.sort(key=Timestamp)
+
+ grouped_trace = GroupByProcessAndThreadId(merged_trace)
+
+ print "0-ffffffff r-xp 00000000 xx:00 00000 ./"
+ print "secs\tusecs\tpid:threadid\tfunc"
+ for call in grouped_trace:
+ print (str(call[0]) + "\t" + str(call[1]) + "\t" + call[2] + "\t" +
+ hex(call[3]))
+
+
+if __name__ == '__main__':
+ Main()
diff --git a/chromium/tools/cygprofile/mergetraces_unittest.py b/chromium/tools/cygprofile/mergetraces_unittest.py
new file mode 100644
index 00000000000..de881379d69
--- /dev/null
+++ b/chromium/tools/cygprofile/mergetraces_unittest.py
@@ -0,0 +1,51 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mergetraces
+
+class GroupByProcessAndThreadIdTestBasic(unittest.TestCase):
+ def runTest(self):
+ # (sec, usec, 'pid:tid', function address).
+ input_trace = [
+ (100, 10, '2000:2001', 0x5),
+ (100, 11, '2000:2001', 0x3),
+ (100, 13, '2000:1999', 0x8),
+ (100, 14, '2000:2000', 0x7),
+ (120, 13, '2001:2003', 0x9),
+ (150, 12, '2001:2004', 0x6),
+ (180, 11, '2000:2000', 0x1),
+ ]
+
+ # Functions should be grouped by thread-id and PIDs should not be
+ # interleaved.
+ expected_trace = [
+ (100, 10, '2000:2001', 0x5),
+ (100, 11, '2000:2001', 0x3),
+ (100, 13, '2000:1999', 0x8),
+ (100, 14, '2000:2000', 0x7),
+ (180, 11, '2000:2000', 0x1),
+ (120, 13, '2001:2003', 0x9),
+ (150, 12, '2001:2004', 0x6),
+ ]
+
+ grouped_trace = mergetraces.GroupByProcessAndThreadId(input_trace)
+
+ self.assertEqual(grouped_trace, expected_trace)
+
+class GroupByProcessAndThreadIdFailsWithNonUniqueTIDs(unittest.TestCase):
+ def runTest(self):
+ # (sec, usec, 'pid:tid', function address).
+ input_trace = [
+ (100, 10, '1999:2001', 0x5),
+ (100, 10, '1988:2001', 0x5),
+ ]
+
+ try:
+ mergetraces.GroupByProcessAndThreadId(input_trace)
+ except Exception:
+ return
+
+ self.fail('Multiple processes should not have a same thread-ID.')
diff --git a/chromium/tools/cygprofile/patch_orderfile.py b/chromium/tools/cygprofile/patch_orderfile.py
new file mode 100755
index 00000000000..dbb344f174a
--- /dev/null
+++ b/chromium/tools/cygprofile/patch_orderfile.py
@@ -0,0 +1,407 @@
+#!/usr/bin/python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Patch an orderfile.
+
+Starting with a list of symbols in a binary and an orderfile (ordered list of
+sections), matches the symbols in the orderfile and augments each symbol with
+the symbols residing at the same address (due to having identical code). The
+output is a list of section matching rules appropriate for the linker option
+-section-ordering-file. These section matching rules include both actual
+section names and names with wildcard (*) suffixes.
+
+Note: It is possible to have.
+- Several symbols mapping to the same offset in the binary.
+- Several offsets for a given symbol (because we strip the ".clone." and other
+ suffixes)
+
+The general pipeline is:
+1. Get the symbol infos (name, offset, size, section) from the binary
+2. Get the symbol names from the orderfile
+3. Find the orderfile symbol names in the symbols coming from the binary
+4. For each symbol found, get all the symbols at the same address
+5. Output them to an updated orderfile, with several different prefixes
+ and suffixes
+6. Output catch-all section matching rules for unprofiled methods.
+"""
+
+import collections
+import logging
+import optparse
+import sys
+
+import cyglog_to_orderfile
+import cygprofile_utils
+import symbol_extractor
+
+# Prefixes for the symbols. We strip them from the incoming symbols, and add
+# them back in the output file.
+_PREFIXES = ('.text.startup.', '.text.hot.', '.text.unlikely.', '.text.')
+
+# Suffixes for the symbols. These are due to method splitting for inlining and
+# method cloning for various reasons including constant propagation and
+# inter-procedural optimization.
+_SUFFIXES = ('.clone.', '.part.', '.isra.', '.constprop.')
+
+
+def RemoveSuffixes(name):
+ """Strips method name suffixes from cloning and splitting.
+
+ .clone. comes from cloning in -O3.
+ .part. comes from partial method splitting for inlining.
+ .isra. comes from inter-procedural optimizations.
+ .constprop. is cloning for constant propagation.
+ """
+ for suffix in _SUFFIXES:
+ name = name.split(suffix)[0]
+ return name
+
+
+def _UniqueGenerator(generator):
+ """Converts a generator to skip yielding elements already seen.
+
+ Example:
+ @_UniqueGenerator
+ def Foo():
+ yield 1
+ yield 2
+ yield 1
+ yield 3
+
+ Foo() yields 1,2,3.
+ """
+ def _FilteringFunction(*args, **kwargs):
+ returned = set()
+ for item in generator(*args, **kwargs):
+ if item in returned:
+ continue
+ returned.add(item)
+ yield item
+
+ return _FilteringFunction
+
+
+def _GroupSymbolInfos(symbol_infos):
+ """Groups the symbol infos by name and offset.
+
+ Args:
+ symbol_infos: an iterable of SymbolInfo
+
+ Returns:
+ The same output as _GroupSymbolInfosFromBinary.
+ """
+ # Map the addresses to symbols.
+ offset_to_symbol_infos = collections.defaultdict(list)
+ name_to_symbol_infos = collections.defaultdict(list)
+ for symbol in symbol_infos:
+ symbol = symbol_extractor.SymbolInfo(name=RemoveSuffixes(symbol.name),
+ offset=symbol.offset,
+ size=symbol.size,
+ section=symbol.section)
+ offset_to_symbol_infos[symbol.offset].append(symbol)
+ name_to_symbol_infos[symbol.name].append(symbol)
+ return (dict(offset_to_symbol_infos), dict(name_to_symbol_infos))
+
+
+def _GroupSymbolInfosFromBinary(binary_filename):
+ """Group all the symbols from a binary by name and offset.
+
+ Args:
+ binary_filename: path to the binary.
+
+ Returns:
+ A tuple of dict:
+ (offset_to_symbol_infos, name_to_symbol_infos):
+ - offset_to_symbol_infos: {offset: [symbol_info1, ...]}
+ - name_to_symbol_infos: {name: [symbol_info1, ...]}
+ """
+ symbol_infos = symbol_extractor.SymbolInfosFromBinary(binary_filename)
+ return _GroupSymbolInfos(symbol_infos)
+
+
+def _StripPrefix(line):
+ """Strips the linker section name prefix from a symbol line.
+
+ Args:
+ line: a line from an orderfile, usually in the form:
+ .text.SymbolName
+
+ Returns:
+ The symbol, SymbolName in the example above.
+ """
+ for prefix in _PREFIXES:
+ if line.startswith(prefix):
+ return line[len(prefix):]
+ return line # Unprefixed case
+
+
+def _SectionNameToSymbols(section_name, section_to_symbols_map):
+ """Yields all symbols which could be referred to by section_name.
+
+ If the section name is present in the map, the names in the map are returned.
+ Otherwise, any clone annotations and prefixes are stripped from the section
+ name and the remainder is returned.
+ """
+ if (not section_name or
+ section_name == '.text' or
+ section_name.endswith('*')):
+ return # Don't return anything for catch-all sections
+ if section_name in section_to_symbols_map:
+ for symbol in section_to_symbols_map[section_name]:
+ yield symbol
+ else:
+ name = _StripPrefix(section_name)
+ if name:
+ yield name
+
+
+def GetSectionsFromOrderfile(filename):
+ """Yields the sections from an orderfile.
+
+ Args:
+ filename: The name of the orderfile.
+
+ Yields:
+ A list of symbol names.
+ """
+ with open(filename, 'r') as f:
+ for line in f.xreadlines():
+ line = line.rstrip('\n')
+ if line:
+ yield line
+
+
+@_UniqueGenerator
+def GetSymbolsFromOrderfile(filename, section_to_symbols_map):
+ """Yields the symbols from an orderfile. Output elements do not repeat.
+
+ Args:
+ filename: The name of the orderfile.
+ section_to_symbols_map: The mapping from section to symbol names. If a
+ section name is missing from the mapping, the
+ symbol name is assumed to be the section name with
+ prefixes and suffixes stripped.
+
+ Yields:
+ A list of symbol names.
+ """
+ # TODO(lizeb,pasko): Move this method to symbol_extractor.py
+ for section in GetSectionsFromOrderfile(filename):
+ for symbol in _SectionNameToSymbols(RemoveSuffixes(section),
+ section_to_symbols_map):
+ yield symbol
+
+
+def _SymbolsWithSameOffset(profiled_symbol, name_to_symbol_info,
+ offset_to_symbol_info):
+ """Expands a symbol to include all symbols with the same offset.
+
+ Args:
+ profiled_symbol: the string symbol name to be expanded.
+ name_to_symbol_info: {name: [symbol_info1], ...}, as returned by
+ GetSymbolInfosFromBinary
+ offset_to_symbol_info: {offset: [symbol_info1, ...], ...}
+
+ Returns:
+ A list of symbol names, or an empty list if profiled_symbol was not in
+ name_to_symbol_info.
+ """
+ if profiled_symbol not in name_to_symbol_info:
+ return []
+ symbol_infos = name_to_symbol_info[profiled_symbol]
+ expanded = []
+ for symbol_info in symbol_infos:
+ expanded += (s.name for s in offset_to_symbol_info[symbol_info.offset])
+ return expanded
+
+
+@_UniqueGenerator
+def _SectionMatchingRules(section_name, name_to_symbol_infos,
+ offset_to_symbol_infos, section_to_symbols_map,
+ symbol_to_sections_map, suffixed_sections):
+ """Gets the set of section matching rules for section_name.
+
+ These rules will include section_name, but also any sections which may
+ contain the same code due to cloning, splitting, or identical code folding.
+
+ Args:
+ section_name: The section to expand.
+ name_to_symbol_infos: {name: [symbol_info1], ...}, as returned by
+ GetSymbolInfosFromBinary.
+ offset_to_symbol_infos: {offset: [symbol_info1, ...], ...}
+ section_to_symbols_map: The mapping from section to symbol name. Missing
+ section names are treated as per _SectionNameToSymbols.
+ symbol_to_sections_map: The mapping from symbol name to names of linker
+ sections containing the symbol. If a symbol isn't in the mapping, the
+ section names are generated from the set of _PREFIXES with the symbol
+ name.
+ suffixed_sections: A set of sections which can have suffixes.
+
+ Yields:
+ Section names including at least section_name.
+ """
+ for name in _ExpandSection(section_name, name_to_symbol_infos,
+ offset_to_symbol_infos, section_to_symbols_map,
+ symbol_to_sections_map):
+ yield name
+ # Since only a subset of methods (mostly those compiled with O2) ever get
+ # suffixes, don't emit the wildcards for ones where it won't be helpful.
+ # Otherwise linking takes too long.
+ if name in suffixed_sections:
+ # TODO(lizeb,pasko): instead of just appending .*, append .suffix.* for
+ # _SUFFIXES. We can't do this right now because that many wildcards
+ # seems to kill the linker (linking libchrome takes 3 hours). This gets
+ # almost all the benefit at a much lower link-time cost, but could cause
+ # problems with unexpected suffixes.
+ yield name + '.*'
+
+def _ExpandSection(section_name, name_to_symbol_infos, offset_to_symbol_infos,
+ section_to_symbols_map, symbol_to_sections_map):
+ """Yields the set of section names for section_name.
+
+ This set will include section_name, but also any sections which may contain
+ the same code due to identical code folding.
+
+ Args:
+ section_name: The section to expand.
+ name_to_symbol_infos: {name: [symbol_info1], ...}, as returned by
+ GetSymbolInfosFromBinary.
+ offset_to_symbol_infos: {offset: [symbol_info1, ...], ...}
+ section_to_symbols_map: The mapping from section to symbol name. Missing
+ section names are treated as per _SectionNameToSymbols.
+ symbol_to_sections_map: The mapping from symbol name to names of linker
+ sections containing the symbol. If a symbol isn't in the mapping, the
+ section names are generated from the set of _PREFIXES with the symbol
+ name.
+
+ Yields:
+ Section names including at least section_name.
+ """
+ yield section_name
+ for first_sym in _SectionNameToSymbols(section_name,
+ section_to_symbols_map):
+ for symbol in _SymbolsWithSameOffset(first_sym, name_to_symbol_infos,
+ offset_to_symbol_infos):
+ if symbol in symbol_to_sections_map:
+ for section in symbol_to_sections_map[symbol]:
+ yield section
+ for prefix in _PREFIXES:
+ yield prefix + symbol
+
+
+@_UniqueGenerator
+def _ExpandSections(section_names, name_to_symbol_infos,
+ offset_to_symbol_infos, section_to_symbols_map,
+ symbol_to_sections_map, suffixed_sections):
+ """Gets an ordered set of section matching rules for a list of sections.
+
+ Rules will not be repeated.
+
+ Args:
+ section_names: The sections to expand.
+ name_to_symbol_infos: {name: [symbol_info1], ...}, as returned by
+ _GroupSymbolInfosFromBinary.
+ offset_to_symbol_infos: {offset: [symbol_info1, ...], ...}
+ section_to_symbols_map: The mapping from section to symbol names.
+ symbol_to_sections_map: The mapping from symbol name to names of linker
+ sections containing the symbol.
+ suffixed_sections: A set of sections which can have suffixes.
+
+ Yields:
+ Section matching rules including at least section_names.
+ """
+ for profiled_section in section_names:
+ for section in _SectionMatchingRules(
+ profiled_section, name_to_symbol_infos, offset_to_symbol_infos,
+ section_to_symbols_map, symbol_to_sections_map, suffixed_sections):
+ yield section
+
+
+def _CombineSectionListsByPrimaryName(symbol_to_sections_map):
+ """Combines values of the symbol_to_sections_map by stripping suffixes.
+
+ Example:
+ {foo: [.text.foo, .text.bar.part.1],
+ foo.constprop.4: [.text.baz.constprop.3]} ->
+ {foo: [.text.foo, .text.bar, .text.baz]}
+
+ Args:
+ symbol_to_sections_map: Mapping from symbol name to list of section names
+
+ Returns:
+ The same mapping, but with symbol and section names suffix-stripped.
+ """
+ simplified = {}
+ for suffixed_symbol, suffixed_sections in symbol_to_sections_map.iteritems():
+ symbol = RemoveSuffixes(suffixed_symbol)
+ sections = [RemoveSuffixes(section) for section in suffixed_sections]
+ simplified.setdefault(symbol, []).extend(sections)
+ return simplified
+
+
+def _SectionsWithSuffixes(symbol_to_sections_map):
+ """Finds sections which have suffixes applied.
+
+ Args:
+ symbol_to_sections_map: a map where the values are lists of section names.
+
+ Returns:
+ A set containing all section names which were seen with suffixes applied.
+ """
+ sections_with_suffixes = set()
+ for suffixed_sections in symbol_to_sections_map.itervalues():
+ for suffixed_section in suffixed_sections:
+ section = RemoveSuffixes(suffixed_section)
+ if section != suffixed_section:
+ sections_with_suffixes.add(section)
+ return sections_with_suffixes
+
+
+def _StripSuffixes(section_list):
+ """Remove all suffixes on items in a list of sections or symbols."""
+ return [RemoveSuffixes(section) for section in section_list]
+
+
+def main(argv):
+ parser = optparse.OptionParser(usage=
+ 'usage: %prog [options] <unpatched_orderfile> <library>')
+ parser.add_option('--target-arch', action='store', dest='arch',
+ choices=['arm', 'arm64', 'x86', 'x86_64', 'x64', 'mips'],
+ help='The target architecture for the library.')
+ options, argv = parser.parse_args(argv)
+ if not options.arch:
+ options.arch = cygprofile_utils.DetectArchitecture()
+ if len(argv) != 3:
+ parser.print_help()
+ return 1
+ orderfile_filename = argv[1]
+ binary_filename = argv[2]
+ symbol_extractor.SetArchitecture(options.arch)
+ (offset_to_symbol_infos, name_to_symbol_infos) = _GroupSymbolInfosFromBinary(
+ binary_filename)
+ obj_dir = cygprofile_utils.GetObjDir(binary_filename)
+ raw_symbol_map = cyglog_to_orderfile.GetSymbolToSectionsMapFromObjectFiles(
+ obj_dir)
+ suffixed = _SectionsWithSuffixes(raw_symbol_map)
+ symbol_to_sections_map = _CombineSectionListsByPrimaryName(raw_symbol_map)
+ section_to_symbols_map = cygprofile_utils.InvertMapping(
+ symbol_to_sections_map)
+ profiled_sections = _StripSuffixes(
+ GetSectionsFromOrderfile(orderfile_filename))
+ expanded_sections = _ExpandSections(
+ profiled_sections, name_to_symbol_infos, offset_to_symbol_infos,
+ section_to_symbols_map, symbol_to_sections_map, suffixed)
+ for section in expanded_sections:
+ print section
+ # The following is needed otherwise Gold only applies a partial sort.
+ print '.text' # gets methods not in a section, such as assembly
+ for prefix in _PREFIXES:
+ print prefix + '*' # gets everything else
+ return 0
+
+
+if __name__ == '__main__':
+ logging.basicConfig(level=logging.INFO)
+ sys.exit(main(sys.argv))
diff --git a/chromium/tools/cygprofile/patch_orderfile_unittest.py b/chromium/tools/cygprofile/patch_orderfile_unittest.py
new file mode 100755
index 00000000000..047d447f10c
--- /dev/null
+++ b/chromium/tools/cygprofile/patch_orderfile_unittest.py
@@ -0,0 +1,152 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import patch_orderfile
+import symbol_extractor
+
+
+class TestPatchOrderFile(unittest.TestCase):
+ def testRemoveSuffixes(self):
+ no_clone = 'this.does.not.contain.clone'
+ self.assertEquals(no_clone, patch_orderfile.RemoveSuffixes(no_clone))
+ with_clone = 'this.does.contain.clone.'
+ self.assertEquals(
+ 'this.does.contain', patch_orderfile.RemoveSuffixes(with_clone))
+ with_part = 'this.is.a.part.42'
+ self.assertEquals(
+ 'this.is.a', patch_orderfile.RemoveSuffixes(with_part))
+
+ def testAliasClonedSymbols(self):
+ symbol_infos = [
+ symbol_extractor.SymbolInfo(name='aSymbol', offset=0x42, size=0x12,
+ section='.text'),
+ symbol_extractor.SymbolInfo(name='aSymbol.clone.', offset=8, size=1,
+ section='.text')]
+ (offset_to_symbol_infos, name_to_symbol_infos) = \
+ patch_orderfile._GroupSymbolInfos(symbol_infos)
+ self.assertEquals(len(offset_to_symbol_infos), 2)
+ for i in range(2):
+ s = symbol_infos[i]
+ matching = offset_to_symbol_infos[s.offset][0]
+ self.assertEquals(matching.offset, s.offset)
+ self.assertEquals(matching.size, s.size)
+ self.assertEquals(len(name_to_symbol_infos), 1)
+ self.assertEquals(len(name_to_symbol_infos['aSymbol']), 2)
+
+ def testGroupSymbolsByOffset(self):
+ symbol_infos = (
+ symbol_extractor.SymbolInfo(name='aSymbol', offset=0x42, size=0x12,
+ section='.text'),
+ symbol_extractor.SymbolInfo(name='anotherSymbol', offset=0x42, size=1,
+ section='.text'))
+ (offset_to_symbol_infos, _) = \
+ patch_orderfile._GroupSymbolInfos(symbol_infos)
+ self.assertEquals(len(offset_to_symbol_infos), 1)
+ self.assertEquals(tuple(offset_to_symbol_infos[0x42]), symbol_infos)
+
+ def testSymbolsWithSameOffset(self):
+ symbol_name = "dummySymbol"
+ symbol_name2 = "other"
+ name_to_symbol_infos = {symbol_name: [
+ symbol_extractor.SymbolInfo(symbol_name, 0x42, 0x12,
+ section='.text')]}
+ offset_to_symbol_infos = {
+ 0x42: [symbol_extractor.SymbolInfo(symbol_name, 0x42, 0x12,
+ section='.text'),
+ symbol_extractor.SymbolInfo(symbol_name2, 0x42, 0x12,
+ section='.text')]}
+ symbol_names = patch_orderfile._SymbolsWithSameOffset(
+ symbol_name, name_to_symbol_infos, offset_to_symbol_infos)
+ self.assertEquals(len(symbol_names), 2)
+ self.assertEquals(symbol_names[0], symbol_name)
+ self.assertEquals(symbol_names[1], symbol_name2)
+ self.assertEquals([], patch_orderfile._SymbolsWithSameOffset(
+ "symbolThatShouldntMatch",
+ name_to_symbol_infos, offset_to_symbol_infos))
+
+ def testSectionNameToSymbols(self):
+ mapping = {'.text.foo': ['foo'],
+ '.text.startup.bar': ['bar', 'bar1']}
+ self.assertEquals(list(patch_orderfile._SectionNameToSymbols(
+ '.text.foo', mapping)),
+ ['foo'])
+ self.assertEquals(list(patch_orderfile._SectionNameToSymbols(
+ '.text.startup.bar', mapping)),
+ ['bar', 'bar1'])
+ self.assertEquals(list(patch_orderfile._SectionNameToSymbols(
+ '.text.startup.bar', mapping)),
+ ['bar', 'bar1'])
+ self.assertEquals(list(patch_orderfile._SectionNameToSymbols(
+ '.text.hot.foobar', mapping)),
+ ['foobar'])
+ self.assertEquals(list(patch_orderfile._SectionNameToSymbols(
+ '.text.startup.*', mapping)),
+ [])
+
+ def testSectionMatchingRules(self):
+ symbol_name1 = 'symbol1'
+ symbol_name2 = 'symbol2'
+ symbol_name3 = 'symbol3'
+ section_name1 = '.text.' + symbol_name1
+ section_name3 = '.text.foo'
+ suffixed = set([section_name3])
+ name_to_symbol_infos = {symbol_name1: [
+ symbol_extractor.SymbolInfo(symbol_name1, 0x42, 0x12,
+ section='.text')]}
+ offset_to_symbol_infos = {
+ 0x42: [symbol_extractor.SymbolInfo(symbol_name1, 0x42, 0x12,
+ section='.text'),
+ symbol_extractor.SymbolInfo(symbol_name2, 0x42, 0x12,
+ section='.text')]}
+ section_to_symbols_map = {section_name1: [symbol_name1],
+ section_name3: [symbol_name1, symbol_name3]}
+ symbol_to_sections_map = {symbol_name1:
+ [section_name1, section_name3],
+ symbol_name3: [section_name3]}
+ expected = [
+ section_name1,
+ section_name3,
+ section_name3 + '.*',
+ '.text.startup.' + symbol_name1,
+ '.text.hot.' + symbol_name1,
+ '.text.unlikely.' + symbol_name1,
+ '.text.startup.symbol2',
+ '.text.hot.symbol2',
+ '.text.unlikely.symbol2',
+ '.text.symbol2']
+ self.assertEqual(expected, list(patch_orderfile._SectionMatchingRules(
+ section_name1, name_to_symbol_infos, offset_to_symbol_infos,
+ section_to_symbols_map, symbol_to_sections_map, suffixed)))
+
+ def testUniqueGenerator(self):
+ @patch_orderfile._UniqueGenerator
+ def TestIterator():
+ yield 1
+ yield 2
+ yield 1
+ yield 3
+
+ self.assertEqual(list(TestIterator()), [1,2,3])
+
+ def testCombineSectionListsByPrimaryName(self):
+ self.assertEqual(patch_orderfile._CombineSectionListsByPrimaryName(
+ {'foo': ['.text.foo', '.text.bar.constprop.1'],
+ 'foo.part.1': ['.text.baz'],
+ 'foobar': ['.text.foobar']}),
+ {'foo': ['.text.foo', '.text.bar', '.text.baz'],
+ 'foobar': ['.text.foobar']})
+
+ def testSectionsWithSuffixes(self):
+ self.assertEqual(patch_orderfile._SectionsWithSuffixes(
+ {'foo': ['.text.foo', '.text.bar.constprop.1'],
+ 'foo.part.1': ['.text.baz'],
+ 'foobar': ['.text.foobar']}),
+ set(['.text.bar']))
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/chromium/tools/cygprofile/profile_android_startup.py b/chromium/tools/cygprofile/profile_android_startup.py
new file mode 100644
index 00000000000..aadb2328020
--- /dev/null
+++ b/chromium/tools/cygprofile/profile_android_startup.py
@@ -0,0 +1,366 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility library for running a startup profile on an Android device.
+
+Sets up a device for cygprofile, disables sandboxing permissions, and sets up
+support for web page replay, device forwarding, and fake certificate authority
+to make runs repeatable.
+"""
+
+import logging
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+import time
+
+sys.path.append(os.path.join(sys.path[0], '..', '..',
+ 'third_party', 'catapult', 'devil'))
+from devil.android import device_errors
+from devil.android import device_utils
+from devil.android import flag_changer
+from devil.android import forwarder
+from devil.android.sdk import intent
+
+sys.path.append(os.path.join(sys.path[0], '..', '..', 'build', 'android'))
+from pylib import constants
+
+sys.path.append(os.path.join(sys.path[0], '..', '..', 'tools', 'perf'))
+from chrome_telemetry_build import chromium_config
+sys.path.append(chromium_config.GetTelemetryDir())
+from telemetry.internal.util import webpagereplay
+
+sys.path.append(os.path.join(sys.path[0], '..', '..',
+ 'third_party', 'webpagereplay'))
+import adb_install_cert
+import certutils
+
+
+class NoCyglogDataError(Exception):
+ """An error used to indicate that no cyglog data was collected."""
+
+ def __init__(self, value):
+ super(NoCyglogDataError, self).__init__()
+ self.value = value
+
+ def __str__(self):
+ return repr(self.value)
+
+
+def _DownloadFromCloudStorage(bucket, sha1_file_name):
+ """Download the given file based on a hash file."""
+ cmd = ['download_from_google_storage', '--no_resume',
+ '--bucket', bucket, '-s', sha1_file_name]
+ print 'Executing command ' + ' '.join(cmd)
+ process = subprocess.Popen(cmd)
+ process.wait()
+ if process.returncode != 0:
+ raise Exception('Exception executing command %s' % ' '.join(cmd))
+
+
+class WprManager(object):
+ """A utility to download a WPR archive, host it, and forward device ports to
+ it.
+ """
+
+ _WPR_BUCKET = 'chrome-partner-telemetry'
+
+ def __init__(self, wpr_archive, device, cmdline_file):
+ self._device = device
+ self._wpr_archive = wpr_archive
+ self._wpr_archive_hash = wpr_archive + '.sha1'
+ self._cmdline_file = cmdline_file
+ self._wpr_server = None
+ self._wpr_ca_cert_path = None
+ self._device_cert_util = None
+ self._host_http_port = None
+ self._host_https_port = None
+ self._is_test_ca_installed = False
+ self._flag_changer = None
+
+ def Start(self):
+ """Set up the device and host for WPR."""
+ self.Stop()
+ # TODO(lizeb,pasko): make self._InstallTestCa() work
+ self._BringUpWpr()
+ self._StartForwarder()
+
+ def Stop(self):
+ """Clean up the device and host's WPR setup."""
+ self._StopForwarder()
+ self._StopWpr()
+ # TODO(lizeb,pasko): make self._RemoveTestCa() work
+
+ def __enter__(self):
+ self.Start()
+
+ def __exit__(self, unused_exc_type, unused_exc_val, unused_exc_tb):
+ self.Stop()
+
+ def _InstallTestCa(self):
+ """Generates and deploys a test certificate authority."""
+ print 'Installing test certificate authority on device: %s' % (
+ self._device.adb.GetDeviceSerial())
+ self._wpr_ca_cert_path = os.path.join(tempfile.mkdtemp(), 'testca.pem')
+ certutils.write_dummy_ca_cert(*certutils.generate_dummy_ca_cert(),
+ cert_path=self._wpr_ca_cert_path)
+ self._device_cert_util = adb_install_cert.AndroidCertInstaller(
+ self._device.adb.GetDeviceSerial(), None, self._wpr_ca_cert_path)
+ self._device_cert_util.install_cert(overwrite_cert=True)
+ self._is_test_ca_installed = True
+
+ def _RemoveTestCa(self):
+ """Remove root CA generated by previous call to InstallTestCa().
+
+ Removes the test root certificate from both the device and host machine.
+ """
+ print 'Cleaning up test CA...'
+ if not self._wpr_ca_cert_path:
+ return
+
+ if self._is_test_ca_installed:
+ try:
+ self._device_cert_util.remove_cert()
+ except Exception:
+ # Best effort cleanup - show the error and continue.
+ logging.error(
+ 'Error while trying to remove certificate authority: %s. '
+ % self._adb.device_serial())
+ self._is_test_ca_installed = False
+
+ shutil.rmtree(os.path.dirname(self._wpr_ca_cert_path), ignore_errors=True)
+ self._wpr_ca_cert_path = None
+ self._device_cert_util = None
+
+ def _BringUpWpr(self):
+ """Start the WPR server on the host and the forwarder on the device."""
+ print 'Starting WPR on host...'
+ _DownloadFromCloudStorage(self._WPR_BUCKET, self._wpr_archive_hash)
+ args = ['--use_closest_match']
+ if self._is_test_ca_installed:
+ args.extend(['--should_generate_certs',
+ '--https_root_ca_cert_path=' + self._wpr_ca_cert_path])
+ wpr_server = webpagereplay.ReplayServer(self._wpr_archive,
+ '127.0.0.1', 0, 0, None, args)
+ ports = wpr_server.StartServer()[:-1]
+ self._wpr_server = wpr_server
+ self._host_http_port = ports[0]
+ self._host_https_port = ports[1]
+
+ def _StopWpr(self):
+ """ Stop the WPR and forwarder. """
+ print 'Stopping WPR on host...'
+ if self._wpr_server:
+ self._wpr_server.StopServer()
+ self._wpr_server = None
+
+ def _StartForwarder(self):
+ """Sets up forwarding of device ports to the host, and configures chrome
+ to use those ports.
+ """
+ if not self._wpr_server:
+ logging.warning('No host WPR server to forward to.')
+ return
+ print 'Starting device forwarder...'
+ forwarder.Forwarder.Map([(0, self._host_http_port),
+ (0, self._host_https_port)],
+ self._device)
+ device_http = forwarder.Forwarder.DevicePortForHostPort(
+ self._host_http_port)
+ device_https = forwarder.Forwarder.DevicePortForHostPort(
+ self._host_https_port)
+ self._flag_changer = flag_changer.FlagChanger(
+ self._device, self._cmdline_file)
+ self._flag_changer.AddFlags([
+ '--host-resolver-rules="MAP * 127.0.0.1,EXCLUDE localhost"',
+ '--testing-fixed-http-port=%s' % device_http,
+ '--testing-fixed-https-port=%s' % device_https])
+
+ def _StopForwarder(self):
+ """Shuts down the port forwarding service."""
+ print 'Stopping device forwarder...'
+ if self._flag_changer:
+ self._flag_changer.Restore()
+ self._flag_changer = None
+ forwarder.Forwarder.UnmapAllDevicePorts(self._device)
+
+
+class AndroidProfileTool(object):
+ """A utility for generating cygprofile data for chrome on andorid.
+
+ Runs cygprofile_unittest found in output_directory, does profiling runs,
+ and pulls the data to the local machine in output_directory/cyglog_data.
+ """
+
+ _DEVICE_CYGLOG_DIR = '/data/local/tmp/chrome/cyglog'
+
+ # TEST_URL must be a url in the WPR_ARCHIVE.
+ _TEST_URL = 'https://www.google.com/#hl=en&q=science'
+ _WPR_ARCHIVE = os.path.join(
+ constants.DIR_SOURCE_ROOT, 'tools', 'perf', 'page_sets', 'data',
+ 'top_10_mobile_002.wpr')
+
+
+ def __init__(self, output_directory):
+ devices = device_utils.DeviceUtils.HealthyDevices()
+ self._device = devices[0]
+ self._cygprofile_tests = os.path.join(
+ output_directory, 'cygprofile_unittests')
+ self._host_cyglog_dir = os.path.join(
+ output_directory, 'cyglog_data')
+ self._SetUpDevice()
+
+ def RunCygprofileTests(self):
+ """Run the cygprofile unit tests suite on the device.
+
+ Args:
+ path_to_tests: The location on the host machine with the compiled
+ cygprofile test binary.
+ Returns:
+ The exit code for the tests.
+ """
+ device_path = '/data/local/tmp/cygprofile_unittests'
+ self._device.PushChangedFiles([(self._cygprofile_tests, device_path)])
+ try:
+ self._device.RunShellCommand(device_path, check_return=True)
+ except device_errors.CommandFailedError:
+ # TODO(jbudorick): Let the exception propagate up once clients can
+ # handle it.
+ logging.exception('Failure while running cygprofile_unittests:')
+ return 1
+ return 0
+
+ def CollectProfile(self, apk, package_info):
+ """Run a profile and collect the log files.
+
+ Args:
+ apk: The location of the chrome apk to profile.
+ package_info: A PackageInfo structure describing the chrome apk,
+ as from pylib/constants.
+ Returns:
+ A list of cygprofile data files.
+ Raises:
+ NoCyglogDataError: No data was found on the device.
+ """
+ self._Install(apk)
+ try:
+ changer = self._SetChromeFlags(package_info)
+ self._SetUpDeviceFolders()
+ # Start up chrome once with a blank page, just to get the one-off
+ # activities out of the way such as apk resource extraction and profile
+ # creation.
+ self._StartChrome(package_info, 'about:blank')
+ time.sleep(15)
+ self._KillChrome(package_info)
+ self._SetUpDeviceFolders()
+ with WprManager(self._WPR_ARCHIVE, self._device,
+ package_info.cmdline_file):
+ self._StartChrome(package_info, self._TEST_URL)
+ time.sleep(90)
+ self._KillChrome(package_info)
+ finally:
+ self._RestoreChromeFlags(changer)
+
+ data = self._PullCyglogData()
+ self._DeleteDeviceData()
+ return data
+
+ def Cleanup(self):
+ """Delete all local and device files left over from profiling. """
+ self._DeleteDeviceData()
+ self._DeleteHostData()
+
+ def _Install(self, apk):
+ """Installs Chrome.apk on the device.
+ Args:
+ apk: The location of the chrome apk to profile.
+ package_info: A PackageInfo structure describing the chrome apk,
+ as from pylib/constants.
+ """
+ print 'Installing apk...'
+ self._device.Install(apk)
+
+ def _SetUpDevice(self):
+ """When profiling, files are output to the disk by every process. This
+ means running without sandboxing enabled.
+ """
+ # We need to have adb root in order to pull cyglog data
+ try:
+ print 'Enabling root...'
+ self._device.EnableRoot()
+ # SELinux need to be in permissive mode, otherwise the process cannot
+ # write the log files.
+ print 'Putting SELinux in permissive mode...'
+ self._device.RunShellCommand(['setenforce', '0'], check_return=True)
+ except device_errors.CommandFailedError as e:
+ # TODO(jbudorick) Handle this exception appropriately once interface
+ # conversions are finished.
+ logging.error(str(e))
+
+ def _SetChromeFlags(self, package_info):
+ print 'Setting Chrome flags...'
+ changer = flag_changer.FlagChanger(
+ self._device, package_info.cmdline_file)
+ changer.AddFlags(['--no-sandbox', '--disable-fre'])
+ return changer
+
+ def _RestoreChromeFlags(self, changer):
+ print 'Restoring Chrome flags...'
+ if changer:
+ changer.Restore()
+
+ def _SetUpDeviceFolders(self):
+ """Creates folders on the device to store cyglog data. """
+ print 'Setting up device folders...'
+ self._DeleteDeviceData()
+ self._device.RunShellCommand(
+ ['mkdir', '-p', str(self._DEVICE_CYGLOG_DIR)],
+ check_return=True)
+
+ def _DeleteDeviceData(self):
+ """Clears out cyglog storage locations on the device. """
+ self._device.RunShellCommand(
+ ['rm', '-rf', str(self._DEVICE_CYGLOG_DIR)],
+ check_return=True)
+
+ def _StartChrome(self, package_info, url):
+ print 'Launching chrome...'
+ self._device.StartActivity(
+ intent.Intent(package=package_info.package,
+ activity=package_info.activity,
+ data=url,
+ extras={'create_new_tab' : True}),
+ blocking=True, force_stop=True)
+
+ def _KillChrome(self, package_info):
+ self._device.KillAll(package_info.package)
+
+ def _DeleteHostData(self):
+ """Clears out cyglog storage locations on the host."""
+ shutil.rmtree(self._host_cyglog_dir, ignore_errors=True)
+
+ def _SetUpHostFolders(self):
+ self._DeleteHostData()
+ os.mkdir(self._host_cyglog_dir)
+
+ def _PullCyglogData(self):
+ """Pull the cyglog data off of the device.
+
+ Returns:
+ A list of cyglog data files which were pulled.
+ Raises:
+ NoCyglogDataError: No data was found on the device.
+ """
+ print 'Pulling cyglog data...'
+ self._SetUpHostFolders()
+ self._device.PullFile(
+ self._DEVICE_CYGLOG_DIR, self._host_cyglog_dir)
+ files = os.listdir(self._host_cyglog_dir)
+
+ if len(files) == 0:
+ raise NoCyglogDataError('No cyglog data was collected')
+
+ return [os.path.join(self._host_cyglog_dir, x) for x in files]
diff --git a/chromium/tools/cygprofile/run_tests b/chromium/tools/cygprofile/run_tests
new file mode 100755
index 00000000000..70eb64981ab
--- /dev/null
+++ b/chromium/tools/cygprofile/run_tests
@@ -0,0 +1,25 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import sys
+import unittest
+
+
+if __name__ == '__main__':
+ logging.basicConfig(
+ level=logging.DEBUG if '-v' in sys.argv else logging.WARNING,
+ format='%(levelname)5s %(filename)15s(%(lineno)3d): %(message)s')
+
+ suite = unittest.TestSuite()
+ loader = unittest.TestLoader()
+ suite.addTests(loader.discover(start_dir=os.path.dirname(__file__),
+ pattern='*_unittest.py'))
+ res = unittest.TextTestRunner(verbosity=2).run(suite)
+ if res.wasSuccessful():
+ sys.exit(0)
+ else:
+ sys.exit(1)
diff --git a/chromium/tools/cygprofile/symbol_extractor.py b/chromium/tools/cygprofile/symbol_extractor.py
new file mode 100755
index 00000000000..452548e3e78
--- /dev/null
+++ b/chromium/tools/cygprofile/symbol_extractor.py
@@ -0,0 +1,165 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities to get and manipulate symbols from a binary."""
+
+import collections
+import logging
+import os
+import re
+import subprocess
+import sys
+
+import cygprofile_utils
+
+sys.path.insert(
+ 0, os.path.join(os.path.dirname(__file__), os.pardir, os.pardir,
+ 'third_party', 'android_platform', 'development',
+ 'scripts'))
+import symbol
+
+_MAX_WARNINGS_TO_PRINT = 200
+
+SymbolInfo = collections.namedtuple('SymbolInfo', ('name', 'offset', 'size',
+ 'section'))
+
+def SetArchitecture(arch):
+ """Set the architecture for binaries to be symbolized."""
+ symbol.ARCH = arch
+
+
+def _FromObjdumpLine(line):
+ """Create a SymbolInfo by parsing a properly formatted objdump output line.
+
+ Args:
+ line: line from objdump
+
+ Returns:
+ An instance of SymbolInfo if the line represents a symbol, None otherwise.
+ """
+ # All of the symbol lines we care about are in the form
+ # 0000000000 g F .text.foo 000000000 [.hidden] foo
+ # where g (global) might also be l (local) or w (weak).
+ parts = line.split()
+ if len(parts) < 6 or parts[2] != 'F':
+ return None
+
+ assert len(parts) == 6 or (len(parts) == 7 and parts[5] == '.hidden')
+ accepted_scopes = set(['g', 'l', 'w'])
+ assert parts[1] in accepted_scopes
+
+ offset = int(parts[0], 16)
+ section = parts[3]
+ size = int(parts[4], 16)
+ name = parts[-1].rstrip('\n')
+ # Forbid ARM mapping symbols and other unexpected symbol names, but allow $
+ # characters in a non-initial position, which can appear as a component of a
+ # mangled name, e.g. Clang can mangle a lambda function to:
+ # 02cd61e0 l F .text 000000c0 _ZZL11get_globalsvENK3$_1clEv
+ # The equivalent objdump line from GCC is:
+ # 0325c58c l F .text 000000d0 _ZZL11get_globalsvENKUlvE_clEv
+ assert re.match('^[a-zA-Z0-9_.][a-zA-Z0-9_.$]*$', name)
+ return SymbolInfo(name=name, offset=offset, section=section, size=size)
+
+
+def _SymbolInfosFromStream(objdump_lines):
+ """Parses the output of objdump, and get all the symbols from a binary.
+
+ Args:
+ objdump_lines: An iterable of lines
+
+ Returns:
+ A list of SymbolInfo.
+ """
+ symbol_infos = []
+ for line in objdump_lines:
+ symbol_info = _FromObjdumpLine(line)
+ if symbol_info is not None:
+ symbol_infos.append(symbol_info)
+ return symbol_infos
+
+
+def SymbolInfosFromBinary(binary_filename):
+ """Runs objdump to get all the symbols from a binary.
+
+ Args:
+ binary_filename: path to the binary.
+
+ Returns:
+ A list of SymbolInfo from the binary.
+ """
+ command = (symbol.ToolPath('objdump'), '-t', '-w', binary_filename)
+ p = subprocess.Popen(command, shell=False, stdout=subprocess.PIPE)
+ try:
+ result = _SymbolInfosFromStream(p.stdout)
+ return result
+ finally:
+ p.stdout.close()
+ p.wait()
+
+
+def GroupSymbolInfosByOffset(symbol_infos):
+ """Create a dict {offset: [symbol_info1, ...], ...}.
+
+ As several symbols can be at the same offset, this is a 1-to-many
+ relationship.
+
+ Args:
+ symbol_infos: iterable of SymbolInfo instances
+
+ Returns:
+ a dict {offset: [symbol_info1, ...], ...}
+ """
+ offset_to_symbol_infos = collections.defaultdict(list)
+ for symbol_info in symbol_infos:
+ offset_to_symbol_infos[symbol_info.offset].append(symbol_info)
+ return dict(offset_to_symbol_infos)
+
+def GroupSymbolInfosByName(symbol_infos):
+ """Create a dict {name: [symbol_info1, ...], ...}.
+
+ A symbol can have several offsets, this is a 1-to-many relationship.
+
+ Args:
+ symbol_infos: iterable of SymbolInfo instances
+
+ Returns:
+ a dict {name: [symbol_info1, ...], ...}
+ """
+ name_to_symbol_infos = collections.defaultdict(list)
+ for symbol_info in symbol_infos:
+ name_to_symbol_infos[symbol_info.name].append(symbol_info)
+ return dict(name_to_symbol_infos)
+
+def CreateNameToSymbolInfo(symbol_infos):
+ """Create a dict {name: symbol_info, ...}.
+
+ Args:
+ symbol_infos: iterable of SymbolInfo instances
+
+ Returns:
+ a dict {name: symbol_info, ...}
+ If a symbol name corresponds to more than one symbol_info, the symbol_info
+ with the lowest offset is chosen.
+ """
+ # TODO(lizeb,pasko): move the functionality in this method into
+ # check_orderfile.
+ symbol_infos_by_name = {}
+ warnings = cygprofile_utils.WarningCollector(_MAX_WARNINGS_TO_PRINT)
+ for infos in GroupSymbolInfosByName(symbol_infos).itervalues():
+ first_symbol_info = min(infos, key=lambda x:x.offset)
+ symbol_infos_by_name[first_symbol_info.name] = first_symbol_info
+ if len(infos) > 1:
+ warnings.Write('Symbol %s appears at %d offsets: %s' %
+ (first_symbol_info.name,
+ len(infos),
+ ','.join([hex(x.offset) for x in infos])))
+ warnings.WriteEnd('symbols at multiple offsets.')
+ return symbol_infos_by_name
+
+
+def DemangleSymbol(mangled_symbol):
+ """Return the demangled form of mangled_symbol."""
+ return symbol.CallCppFilt(mangled_symbol)
diff --git a/chromium/tools/cygprofile/symbol_extractor_unittest.py b/chromium/tools/cygprofile/symbol_extractor_unittest.py
new file mode 100755
index 00000000000..bd2db4a356c
--- /dev/null
+++ b/chromium/tools/cygprofile/symbol_extractor_unittest.py
@@ -0,0 +1,135 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import symbol_extractor
+import unittest
+
+class TestSymbolInfo(unittest.TestCase):
+ def testIgnoresBlankLine(self):
+ symbol_info = symbol_extractor._FromObjdumpLine('')
+ self.assertIsNone(symbol_info)
+
+ def testIgnoresMalformedLine(self):
+ # This line is too short.
+ line = ('00c1b228 F .text 00000060 _ZN20trace_event')
+ symbol_info = symbol_extractor._FromObjdumpLine(line)
+ self.assertIsNone(symbol_info)
+ # This line has the wrong marker.
+ line = '00c1b228 l f .text 00000060 _ZN20trace_event'
+ symbol_info = symbol_extractor._FromObjdumpLine(line)
+ self.assertIsNone(symbol_info)
+
+ def testAssertionErrorOnInvalidLines(self):
+ # This line has an invalid scope.
+ line = ('00c1b228 z F .text 00000060 _ZN20trace_event')
+ self.assertRaises(AssertionError, symbol_extractor._FromObjdumpLine, line)
+ # This line has too many fields.
+ line = ('00c1b228 l F .text 00000060 _ZN20trace_event too many')
+ self.assertRaises(AssertionError, symbol_extractor._FromObjdumpLine, line)
+ # This line has invalid characters in the symbol.
+ line = ('00c1b228 l F .text 00000060 _ZN20trace_?bad')
+ self.assertRaises(AssertionError, symbol_extractor._FromObjdumpLine, line)
+ # This line has an invalid character at the start of the symbol name.
+ line = ('00c1b228 l F .text 00000060 $_ZN20trace_bad')
+ self.assertRaises(AssertionError, symbol_extractor._FromObjdumpLine, line)
+
+ def testSymbolInfo(self):
+ line = ('00c1c05c l F .text 0000002c '
+ '_GLOBAL__sub_I_chrome_main_delegate.cc')
+ test_name = '_GLOBAL__sub_I_chrome_main_delegate.cc'
+ test_offset = 0x00c1c05c
+ test_size = 0x2c
+ test_section = '.text'
+ symbol_info = symbol_extractor._FromObjdumpLine(line)
+ self.assertIsNotNone(symbol_info)
+ self.assertEquals(test_offset, symbol_info.offset)
+ self.assertEquals(test_size, symbol_info.size)
+ self.assertEquals(test_name, symbol_info.name)
+ self.assertEquals(test_section, symbol_info.section)
+
+ def testHiddenSymbol(self):
+ line = ('00c1c05c l F .text 0000002c '
+ '.hidden _GLOBAL__sub_I_chrome_main_delegate.cc')
+ test_name = '_GLOBAL__sub_I_chrome_main_delegate.cc'
+ test_offset = 0x00c1c05c
+ test_size = 0x2c
+ test_section = '.text'
+ symbol_info = symbol_extractor._FromObjdumpLine(line)
+ self.assertIsNotNone(symbol_info)
+ self.assertEquals(test_offset, symbol_info.offset)
+ self.assertEquals(test_size, symbol_info.size)
+ self.assertEquals(test_name, symbol_info.name)
+ self.assertEquals(test_section, symbol_info.section)
+
+ def testDollarInSymbolName(self):
+ # A $ character elsewhere in the symbol name is fine.
+ # This is an example of a lambda function name from Clang.
+ line = ('00c1b228 l F .text 00000060 _ZZL11get_globalsvENK3$_1clEv')
+ symbol_info = symbol_extractor._FromObjdumpLine(line)
+ self.assertIsNotNone(symbol_info)
+ self.assertEquals(0xc1b228, symbol_info.offset)
+ self.assertEquals(0x60, symbol_info.size)
+ self.assertEquals('_ZZL11get_globalsvENK3$_1clEv', symbol_info.name)
+ self.assertEquals('.text', symbol_info.section)
+
+
+class TestSymbolInfosFromStream(unittest.TestCase):
+ def testSymbolInfosFromStream(self):
+ lines = ['Garbage',
+ '',
+ '00c1c05c l F .text 0000002c first',
+ ''
+ 'more garbage',
+ '00155 g F .text 00000012 second']
+ symbol_infos = symbol_extractor._SymbolInfosFromStream(lines)
+ self.assertEquals(len(symbol_infos), 2)
+ first = symbol_extractor.SymbolInfo('first', 0x00c1c05c, 0x2c, '.text')
+ self.assertEquals(first, symbol_infos[0])
+ second = symbol_extractor.SymbolInfo('second', 0x00155, 0x12, '.text')
+ self.assertEquals(second, symbol_infos[1])
+
+
+class TestSymbolInfoMappings(unittest.TestCase):
+ def setUp(self):
+ self.symbol_infos = [
+ symbol_extractor.SymbolInfo('firstNameAtOffset', 0x42, 42, '.text'),
+ symbol_extractor.SymbolInfo('secondNameAtOffset', 0x42, 42, '.text'),
+ symbol_extractor.SymbolInfo('thirdSymbol', 0x64, 20, '.text')]
+
+ def testGroupSymbolInfosByOffset(self):
+ offset_to_symbol_info = symbol_extractor.GroupSymbolInfosByOffset(
+ self.symbol_infos)
+ self.assertEquals(len(offset_to_symbol_info), 2)
+ self.assertIn(0x42, offset_to_symbol_info)
+ self.assertEquals(offset_to_symbol_info[0x42][0], self.symbol_infos[0])
+ self.assertEquals(offset_to_symbol_info[0x42][1], self.symbol_infos[1])
+ self.assertIn(0x64, offset_to_symbol_info)
+ self.assertEquals(offset_to_symbol_info[0x64][0], self.symbol_infos[2])
+
+ def testCreateNameToSymbolInfo(self):
+ name_to_symbol_info = symbol_extractor.CreateNameToSymbolInfo(
+ self.symbol_infos)
+ self.assertEquals(len(name_to_symbol_info), 3)
+ for i in range(3):
+ name = self.symbol_infos[i].name
+ self.assertIn(name, name_to_symbol_info)
+ self.assertEquals(self.symbol_infos[i], name_to_symbol_info[name])
+
+ def testSymbolCollisions(self):
+ symbol_infos_with_collision = list(self.symbol_infos)
+ symbol_infos_with_collision.append(symbol_extractor.SymbolInfo(
+ 'secondNameAtOffset', 0x84, 42, '.text'))
+
+ # The symbol added above should not affect the output.
+ name_to_symbol_info = symbol_extractor.CreateNameToSymbolInfo(
+ self.symbol_infos)
+ self.assertEquals(len(name_to_symbol_info), 3)
+ for i in range(3):
+ name = self.symbol_infos[i].name
+ self.assertIn(name, name_to_symbol_info)
+ self.assertEquals(self.symbol_infos[i], name_to_symbol_info[name])
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/diagnose-me.py b/chromium/tools/diagnose-me.py
new file mode 100755
index 00000000000..970da8a52ae
--- /dev/null
+++ b/chromium/tools/diagnose-me.py
@@ -0,0 +1,109 @@
+#!/usr/bin/python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Diagnose some common system configuration problems on Linux, and
+suggest fixes."""
+
+import os
+import subprocess
+import sys
+
+all_checks = []
+
+def Check(name):
+ """Decorator that defines a diagnostic check."""
+ def wrap(func):
+ all_checks.append((name, func))
+ return func
+ return wrap
+
+
+@Check("/usr/bin/ld is not gold")
+def CheckSystemLd():
+ proc = subprocess.Popen(['/usr/bin/ld', '-v'], stdout=subprocess.PIPE)
+ stdout = proc.communicate()[0]
+ if 'GNU gold' in stdout:
+ return ("When /usr/bin/ld is gold, system updates can silently\n"
+ "corrupt your graphics drivers.\n"
+ "Try 'sudo apt-get remove binutils-gold'.\n")
+ return None
+
+
+@Check("random lds are not in the $PATH")
+def CheckPathLd():
+ proc = subprocess.Popen(['which', '-a', 'ld'], stdout=subprocess.PIPE)
+ stdout = proc.communicate()[0]
+ instances = stdout.split()
+ if len(instances) > 1:
+ return ("You have multiple 'ld' binaries in your $PATH:\n"
+ + '\n'.join(' - ' + i for i in instances) + "\n"
+ "You should delete all of them but your system one.\n"
+ "gold is hooked into your build via gyp.\n")
+ return None
+
+
+@Check("/usr/bin/ld doesn't point to gold")
+def CheckLocalGold():
+ # Check /usr/bin/ld* symlinks.
+ for path in ('ld.bfd', 'ld'):
+ path = '/usr/bin/' + path
+ try:
+ target = os.readlink(path)
+ except OSError, e:
+ if e.errno == 2:
+ continue # No such file
+ if e.errno == 22:
+ continue # Not a symlink
+ raise
+ if '/usr/local/gold' in target:
+ return ("%s is a symlink into /usr/local/gold.\n"
+ "It's difficult to make a recommendation, because you\n"
+ "probably set this up yourself. But you should make\n"
+ "/usr/bin/ld be the standard linker, which you likely\n"
+ "renamed /usr/bin/ld.bfd or something like that.\n" % path)
+
+ return None
+
+
+@Check("random ninja binaries are not in the $PATH")
+def CheckPathNinja():
+ proc = subprocess.Popen(['which', 'ninja'], stdout=subprocess.PIPE)
+ stdout = proc.communicate()[0]
+ if not 'depot_tools' in stdout:
+ return ("The ninja binary in your path isn't from depot_tools:\n"
+ + " " + stdout +
+ "Remove custom ninjas from your path so that the one\n"
+ "in depot_tools is used.\n")
+ return None
+
+
+@Check("build dependencies are satisfied")
+def CheckBuildDeps():
+ script_path = os.path.join(
+ os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'build',
+ 'install-build-deps.sh')
+ proc = subprocess.Popen([script_path, '--quick-check'],
+ stdout=subprocess.PIPE)
+ stdout = proc.communicate()[0]
+ if 'WARNING' in stdout:
+ return ("Your build dependencies are out-of-date.\n"
+ "Run '" + script_path + "' to update.")
+ return None
+
+
+def RunChecks():
+ for name, check in all_checks:
+ sys.stdout.write("* Checking %s: " % name)
+ sys.stdout.flush()
+ error = check()
+ if not error:
+ print "ok"
+ else:
+ print "FAIL"
+ print error
+
+
+if __name__ == '__main__':
+ RunChecks()
diff --git a/chromium/tools/dromaeo_benchmark_runner/dromaeo_benchmark_runner.py b/chromium/tools/dromaeo_benchmark_runner/dromaeo_benchmark_runner.py
new file mode 100755
index 00000000000..5c4be81c37d
--- /dev/null
+++ b/chromium/tools/dromaeo_benchmark_runner/dromaeo_benchmark_runner.py
@@ -0,0 +1,266 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dromaeo benchmark automation script.
+
+Script runs dromaeo tests in browsers specified by --browser switch and saves
+results to a spreadsheet on docs.google.com.
+
+Prerequisites:
+1. Install Google Data APIs Python Client Library from
+ http://code.google.com/p/gdata-python-client.
+2. Checkout Dromaeo benchmark from
+ http://src.chromium.org/svn/trunk/src/chrome/test/data/dromaeo and provide
+ local path to it in --dromaeo_home switch.
+3. Create a spreadsheet at http://docs.google.com and specify its name in
+ --spreadsheet switch
+
+Benchmark results are presented in the following format:
+browser | date time
+test 1 name|m11|...|m1n|test 1 average mean| |e11|...|e1n|test 1 average error
+test 2 name|m21|...|m2n|test 2 average mean| |e21|...|e2n|test 2 average error
+...
+
+Here mij is mean run/s in individual dromaeo test i during benchmark run j,
+eij is error in individual dromaeo test i during benchmark run j.
+
+Example usage:
+dromaeo_benchmark_runner.py -b "E:\chromium\src\chrome\Release\chrome.exe"
+ -b "C:\Program Files (x86)\Safari\safari.exe"
+ -b "C:\Program Files (x86)\Opera 10.50 pre-alpha\opera.exe" -n 1
+ -d "E:\chromium\src\chrome\test\data\dromaeo" -f dom -e example@gmail.com
+
+"""
+
+import getpass
+import json
+import os
+import re
+import subprocess
+import time
+import urlparse
+from optparse import OptionParser
+from BaseHTTPServer import HTTPServer
+import SimpleHTTPServer
+import gdata.spreadsheet.service
+
+max_spreadsheet_columns = 20
+test_props = ['mean', 'error']
+
+
+def ParseArguments():
+ parser = OptionParser()
+ parser.add_option("-b", "--browser",
+ action="append", dest="browsers",
+ help="list of browsers to test")
+ parser.add_option("-n", "--run_count", dest="run_count", type="int",
+ default=5, help="number of runs")
+ parser.add_option("-d", "--dromaeo_home", dest="dromaeo_home",
+ help="directory with your dromaeo files")
+ parser.add_option("-p", "--port", dest="port", type="int",
+ default=8080, help="http server port")
+ parser.add_option("-f", "--filter", dest="filter",
+ default="dom", help="dromaeo suite filter")
+ parser.add_option("-e", "--email", dest="email",
+ help="your google docs account")
+ parser.add_option("-s", "--spreadsheet", dest="spreadsheet_title",
+ default="dromaeo",
+ help="your google docs spreadsheet name")
+
+ options = parser.parse_args()[0]
+
+ if not options.dromaeo_home:
+ raise Exception('please specify dromaeo_home')
+
+ return options
+
+
+def KillProcessByName(process_name):
+ process = subprocess.Popen('wmic process get processid, executablepath',
+ stdout=subprocess.PIPE)
+ stdout = str(process.communicate()[0])
+ match = re.search(re.escape(process_name) + '\s+(\d+)', stdout)
+ if match:
+ pid = match.group(1)
+ subprocess.call('taskkill /pid %s' % pid)
+
+
+class SpreadsheetWriter(object):
+ "Utility class for storing benchmarking results in Google spreadsheets."
+
+ def __init__(self, email, spreadsheet_title):
+ '''Login to google docs and search for spreadsheet'''
+
+ self.token_file = os.path.expanduser("~/.dromaeo_bot_auth_token")
+ self.gd_client = gdata.spreadsheet.service.SpreadsheetsService()
+
+ authenticated = False
+ if os.path.exists(self.token_file):
+ token = ''
+ try:
+ file = open(self.token_file, 'r')
+ token = file.read()
+ file.close()
+ self.gd_client.SetClientLoginToken(token)
+ self.gd_client.GetSpreadsheetsFeed()
+ authenticated = True
+ except (IOError, gdata.service.RequestError):
+ pass
+ if not authenticated:
+ self.gd_client.email = email
+ self.gd_client.password = getpass.getpass('Password for %s: ' % email)
+ self.gd_client.source = 'python robot for dromaeo'
+ self.gd_client.ProgrammaticLogin()
+ token = self.gd_client.GetClientLoginToken()
+ try:
+ file = open(self.token_file, 'w')
+ file.write(token)
+ file.close()
+ except (IOError):
+ pass
+ os.chmod(self.token_file, 0600)
+
+ # Search for the spreadsheet with title = spreadsheet_title.
+ spreadsheet_feed = self.gd_client.GetSpreadsheetsFeed()
+ for spreadsheet in spreadsheet_feed.entry:
+ if spreadsheet.title.text == spreadsheet_title:
+ self.spreadsheet_key = spreadsheet.id.text.rsplit('/', 1)[1]
+ if not self.spreadsheet_key:
+ raise Exception('Spreadsheet %s not found' % spreadsheet_title)
+
+ # Get the key of the first worksheet in spreadsheet.
+ worksheet_feed = self.gd_client.GetWorksheetsFeed(self.spreadsheet_key)
+ self.worksheet_key = worksheet_feed.entry[0].id.text.rsplit('/', 1)[1]
+
+ def _InsertRow(self, row):
+ row = dict([('c' + str(i), row[i]) for i in xrange(len(row))])
+ self.gd_client.InsertRow(row, self.spreadsheet_key, self.worksheet_key)
+
+ def _InsertBlankRow(self):
+ self._InsertRow('-' * self.columns_count)
+
+ def PrepareSpreadsheet(self, run_count):
+ """Update cells in worksheet topmost row with service information.
+
+ Calculate column count corresponding to run_count and create worksheet
+ column titles [c0, c1, ...] in the topmost row to speed up spreadsheet
+ updates (it allows to insert a whole row with a single request)
+ """
+
+ # Calculate the number of columns we need to present all test results.
+ self.columns_count = (run_count + 2) * len(test_props)
+ if self.columns_count > max_spreadsheet_columns:
+ # Google spreadsheet has just max_spreadsheet_columns columns.
+ max_run_count = max_spreadsheet_columns / len(test_props) - 2
+ raise Exception('maximum run count is %i' % max_run_count)
+ # Create worksheet column titles [c0, c1, ..., cn].
+ for i in xrange(self.columns_count):
+ self.gd_client.UpdateCell(1, i + 1, 'c' + str(i), self.spreadsheet_key,
+ self.worksheet_key)
+
+ def WriteColumnTitles(self, run_count):
+ "Create titles for test results (mean 1, mean 2, ..., average mean, ...)"
+ row = []
+ for prop in test_props:
+ row.append('')
+ for i in xrange(run_count):
+ row.append('%s %i' % (prop, i + 1))
+ row.append('average ' + prop)
+ self._InsertRow(row)
+
+ def WriteBrowserBenchmarkTitle(self, browser_name):
+ "Create browser benchmark title (browser name, date time)"
+ self._InsertBlankRow()
+ self._InsertRow([browser_name, time.strftime('%d.%m.%Y %H:%M:%S')])
+
+ def WriteBrowserBenchmarkResults(self, test_name, test_data):
+ "Insert a row with single test results"
+ row = []
+ for prop in test_props:
+ if not row:
+ row.append(test_name)
+ else:
+ row.append('')
+ row.extend([str(x) for x in test_data[prop]])
+ row.append(str(sum(test_data[prop]) / len(test_data[prop])))
+ self._InsertRow(row)
+
+
+class DromaeoHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
+
+ def do_POST(self):
+ self.send_response(200)
+ self.end_headers()
+ self.wfile.write("<HTML>POST OK.<BR><BR>");
+ length = int(self.headers.getheader('content-length'))
+ parameters = urlparse.parse_qs(self.rfile.read(length))
+ self.server.got_post = True
+ self.server.post_data = parameters['data']
+
+
+class BenchmarkResults(object):
+ "Storage class for dromaeo benchmark results"
+
+ def __init__(self):
+ self.data = {}
+
+ def ProcessBrowserPostData(self, data):
+ "Convert dromaeo test results in internal format"
+ tests = json.loads(data[0])
+ for test in tests:
+ test_name = test['name']
+ if test_name not in self.data:
+ # Test is encountered for the first time.
+ self.data[test_name] = dict([(prop, []) for prop in test_props])
+ # Append current run results.
+ for prop in test_props:
+ value = -1
+ if prop in test: value = test[prop] # workaround for Opera 10.5
+ self.data[test_name][prop].append(value)
+
+
+def main():
+ options = ParseArguments()
+
+ # Start sever with dromaeo.
+ os.chdir(options.dromaeo_home)
+ server = HTTPServer(('', options.port), DromaeoHandler)
+
+ # Open and prepare spreadsheet on google docs.
+ spreadsheet_writer = SpreadsheetWriter(options.email,
+ options.spreadsheet_title)
+ spreadsheet_writer.PrepareSpreadsheet(options.run_count)
+ spreadsheet_writer.WriteColumnTitles(options.run_count)
+
+ for browser in options.browsers:
+ browser_name = os.path.splitext(os.path.basename(browser))[0]
+ spreadsheet_writer.WriteBrowserBenchmarkTitle(browser_name)
+ benchmark_results = BenchmarkResults()
+ for run_number in xrange(options.run_count):
+ print '%s run %i' % (browser_name, run_number + 1)
+ # Run browser.
+ test_page = 'http://localhost:%i/index.html?%s&automated&post_json' % (
+ options.port, options.filter)
+ browser_process = subprocess.Popen('%s "%s"' % (browser, test_page))
+ server.got_post = False
+ server.post_data = None
+ # Wait until POST request from browser.
+ while not server.got_post:
+ server.handle_request()
+ benchmark_results.ProcessBrowserPostData(server.post_data)
+ # Kill browser.
+ KillProcessByName(browser)
+ browser_process.wait()
+
+ # Insert test results into spreadsheet.
+ for (test_name, test_data) in benchmark_results.data.iteritems():
+ spreadsheet_writer.WriteBrowserBenchmarkResults(test_name, test_data)
+
+ server.socket.close()
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/emacs/chrome-filetypes.el b/chromium/tools/emacs/chrome-filetypes.el
new file mode 100644
index 00000000000..14fc6bbaef3
--- /dev/null
+++ b/chromium/tools/emacs/chrome-filetypes.el
@@ -0,0 +1,16 @@
+; To get syntax highlighting and tab settings for gyp(i) files, add the
+; following to init.el:
+; (setq-default chrome-root "/path/to/chrome/src/")
+; (add-to-list 'load-path (concat chrome-root "tools/emacs"))
+; (require 'chrome-filetypes)
+
+(define-derived-mode gyp-mode python-mode "Gyp"
+ "Major mode for editing Generate Your Project files."
+ (setq indent-tabs-mode nil
+ tab-width 2
+ python-indent 2))
+
+(add-to-list 'auto-mode-alist '("\\.gyp$" . gyp-mode))
+(add-to-list 'auto-mode-alist '("\\.gypi$" . gyp-mode))
+
+(provide 'chrome-filetypes)
diff --git a/chromium/tools/emacs/flymake-chromium.el b/chromium/tools/emacs/flymake-chromium.el
new file mode 100644
index 00000000000..91aac410ff5
--- /dev/null
+++ b/chromium/tools/emacs/flymake-chromium.el
@@ -0,0 +1,129 @@
+;; Copyright (c) 2011 The Chromium Authors. All rights reserved.
+;; Use of this source code is governed by a BSD-style license that can be
+;; found in the LICENSE file.
+
+;; Set up flymake for use with chromium code. Uses ninja (since none of the
+;; other chromium build systems have latency that allows interactive use).
+;;
+;; Requires a modern emacs (GNU Emacs >= 23) and that gyp has already generated
+;; the build.ninja file(s). See defcustoms below for settable knobs.
+
+
+(require 'flymake)
+
+(defcustom cr-flymake-ninja-build-file "out/Debug/build.ninja"
+ "Relative path from chromium's src/ directory to the
+ build.ninja file to use.")
+
+(defcustom cr-flymake-ninja-executable "ninja"
+ "Ninja executable location; either in $PATH or explicitly given.")
+
+(defun cr-flymake-absbufferpath ()
+ "Return the absolute path to the current buffer, or nil if the
+ current buffer has no path."
+ (when buffer-file-truename
+ (expand-file-name buffer-file-truename)))
+
+(defun cr-flymake-chromium-src ()
+ "Return chromium's src/ directory, or nil on failure."
+ (let ((srcdir (locate-dominating-file
+ (cr-flymake-absbufferpath) cr-flymake-ninja-build-file)))
+ (when srcdir (expand-file-name srcdir))))
+
+(defun cr-flymake-string-prefix-p (prefix str)
+ "Return non-nil if PREFIX is a prefix of STR (23.2 has string-prefix-p but
+ that's case insensitive and also 23.1 doesn't have it)."
+ (string= prefix (substring str 0 (length prefix))))
+
+(defun cr-flymake-current-file-name ()
+ "Return the relative path from chromium's src/ directory to the
+ file backing the current buffer or nil if it doesn't look like
+ we're under chromium/src/."
+ (when (and (cr-flymake-chromium-src)
+ (cr-flymake-string-prefix-p
+ (cr-flymake-chromium-src) (cr-flymake-absbufferpath)))
+ (substring (cr-flymake-absbufferpath) (length (cr-flymake-chromium-src)))))
+
+(defun cr-flymake-from-build-to-src-root ()
+ "Return a path fragment for getting from the build.ninja file to src/."
+ (replace-regexp-in-string
+ "[^/]+" ".."
+ (substring
+ (file-name-directory
+ (file-truename (or (and (cr-flymake-string-prefix-p
+ "/" cr-flymake-ninja-build-file)
+ cr-flymake-ninja-build-file)
+ (concat (cr-flymake-chromium-src)
+ cr-flymake-ninja-build-file))))
+ (length (cr-flymake-chromium-src)))))
+
+(defun cr-flymake-getfname (file-name-from-error-message)
+ "Strip cruft from the passed-in filename to help flymake find the real file."
+ (file-name-nondirectory file-name-from-error-message))
+
+(defun cr-flymake-ninja-command-line ()
+ "Return the command-line for running ninja, as a list of strings, or nil if
+ we're not during a save"
+ (unless (buffer-modified-p)
+ (list cr-flymake-ninja-executable
+ (list "-C"
+ (concat (cr-flymake-chromium-src)
+ (file-name-directory cr-flymake-ninja-build-file))
+ (concat (cr-flymake-from-build-to-src-root)
+ (cr-flymake-current-file-name) "^")))))
+
+(defun cr-flymake-kick-off-check-after-save ()
+ "Kick off a syntax check after file save, if flymake-mode is on."
+ (when flymake-mode (flymake-start-syntax-check)))
+
+(defadvice next-error (around cr-flymake-next-error activate)
+ "If flymake has something to say, let it say it; otherwise
+ revert to normal next-error behavior."
+ (if (not flymake-err-info)
+ (condition-case msg
+ ad-do-it
+ (error (message "%s" (prin1-to-string msg))))
+ (flymake-goto-next-error)
+ ;; copy/pasted from flymake-display-err-menu-for-current-line because I
+ ;; couldn't find a way to have it tell me what the relevant error for this
+ ;; line was in a single call:
+ (let* ((line-no (flymake-current-line-no))
+ (line-err-info-list
+ (nth 0 (flymake-find-err-info flymake-err-info line-no)))
+ (menu-data (flymake-make-err-menu-data line-no line-err-info-list)))
+ (prin1 (car (car (car (cdr menu-data)))) t))))
+
+(defun cr-flymake-find-file ()
+ "Enable flymake, but only if it makes sense, and immediately
+ disable timer-based execution."
+ (when (and (not flymake-mode)
+ (not buffer-read-only)
+ (cr-flymake-current-file-name))
+ ;; Since flymake-allowed-file-name-masks requires static regexps to match
+ ;; against, can't use cr-flymake-chromium-src here. Instead we add a
+ ;; generic regexp, but only to a buffer-local version of the variable.
+ (set (make-local-variable 'flymake-allowed-file-name-masks)
+ (list (list "\\.c\\(\\|c\\|pp\\)"
+ 'cr-flymake-ninja-command-line
+ 'ignore
+ 'cr-flymake-getfname)))
+ (flymake-find-file-hook)
+ (if flymake-mode
+ (cancel-timer flymake-timer)
+ (kill-local-variable 'flymake-allowed-file-name-masks))))
+
+(defun cr-compile ()
+ "Run the interactive compile command with the working directory
+ set to src/."
+ (interactive)
+ (let ((default-directory (cr-flymake-chromium-src)))
+ (call-interactively 'compile)))
+
+(add-hook 'find-file-hook 'cr-flymake-find-file 'append)
+(add-hook 'after-save-hook 'cr-flymake-kick-off-check-after-save)
+
+;; Show flymake infrastructure ERRORs in hopes of fixing them. Set to 3 for
+;; DEBUG-level output from flymake.el.
+(setq flymake-log-level 0)
+
+(provide 'flymake-chromium)
diff --git a/chromium/tools/emacs/trybot-linux.txt b/chromium/tools/emacs/trybot-linux.txt
new file mode 100644
index 00000000000..ad3ada8de5d
--- /dev/null
+++ b/chromium/tools/emacs/trybot-linux.txt
@@ -0,0 +1,6 @@
+A snippet of Linux trybot output (note UTF-8 quotes).
+
+ AR(target) out/Debug/obj.target/printing/libprinting.a
+app/l10n_util_unittest.cc: In member function ‘virtual void L10nUtilTest_TruncateString_Test::TestBody()’:
+app/l10n_util_unittest.cc:67: error: invalid initialization of reference of type ‘const string16&’ from expression of type ‘std::wstring’
+./app/l10n_util.h:166: error: in passing argument 1 of ‘string16 l10n_util::TruncateString(const string16&, size_t)’
diff --git a/chromium/tools/emacs/trybot-mac.txt b/chromium/tools/emacs/trybot-mac.txt
new file mode 100644
index 00000000000..d12efd5f1ca
--- /dev/null
+++ b/chromium/tools/emacs/trybot-mac.txt
@@ -0,0 +1,1985 @@
+/b/build/third_party/zope/__init__.py:19: UserWarning: Module twisted was already imported from /b/build/third_party/twisted_8_1/twisted/__init__.pyc, but /System/Library/Frameworks/Python.framework/Versions/2.6/Extras/lib/python is being added to sys.path
+ import pkg_resources
+/b/build/third_party/zope/__init__.py:19: UserWarning: Module zope was already imported from /b/build/third_party/zope/__init__.pyc, but /System/Library/Frameworks/Python.framework/Versions/2.6/Extras/lib/python is being added to sys.path
+ import pkg_resources
+
+pump xcodebuild -configuration Debug -project all.xcodeproj -buildhostsfile /b/build/scripts/slave/mac_distcc_hosts/golo_mini-10_6
+__________Using distcc-pump from /usr/bin
+__________Using 1 distcc server in pump mode
+
+=== BUILD AGGREGATE TARGET app_resources OF PROJECT app WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET app_strings OF PROJECT app WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"grit\"" ../xcodebuild/app.build/Debug/app_strings.build/Script-3555EADE2A4F7996024F949F.sh
+ cd /b/build/slave/mac/build/src/app
+ /bin/sh -c /b/build/slave/mac/build/src/app/../xcodebuild/app.build/Debug/app_strings.build/Script-3555EADE2A4F7996024F949F.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET app_base Support OF PROJECT app WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET app_base OF PROJECT app WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET test_support_base OF PROJECT base WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET protobuf_full_do_not_use OF PROJECT protobuf WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET protobuf_lite OF PROJECT protobuf WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET protoc OF PROJECT protobuf WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../xcodebuild/protobuf.build/Debug/protoc.build/Script-7AC9923F7CDD2087661C69AB.sh
+ cd /b/build/slave/mac/build/src/third_party/protobuf
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/protobuf/../../xcodebuild/protobuf.build/Debug/protoc.build/Script-7AC9923F7CDD2087661C69AB.sh
+
+
+=== BUILD AGGREGATE TARGET sync_proto OF PROJECT sync_proto WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"genproto\"" ../../../../xcodebuild/sync_proto.build/Debug/sync_proto.build/Script-237DAA9DBF3380A9B7228EAF.sh
+ cd /b/build/slave/mac/build/src/chrome/browser/sync/protocol
+ /bin/sh -c /b/build/slave/mac/build/src/chrome/browser/sync/protocol/../../../../xcodebuild/sync_proto.build/Debug/sync_proto.build/Script-237DAA9DBF3380A9B7228EAF.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET device_management_proto OF PROJECT device_management_proto WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"genproto\"" ../../../../xcodebuild/device_management_proto.build/Debug/device_management_proto.build/Script-7D8C1925B70AD3724821C80A.sh
+ cd /b/build/slave/mac/build/src/chrome/browser/policy/proto
+ /bin/sh -c /b/build/slave/mac/build/src/chrome/browser/policy/proto/../../../../xcodebuild/device_management_proto.build/Debug/device_management_proto.build/Script-7D8C1925B70AD3724821C80A.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET py_proto OF PROJECT protobuf WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET net_test_support OF PROJECT net WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET skia OF PROJECT skia WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET gmock OF PROJECT gmock WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET gtest OF PROJECT gtest WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET icui18n OF PROJECT icu WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET icuuc OF PROJECT icu WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET libpng OF PROJECT libpng WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET zlib OF PROJECT zlib WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET libjpeg OF PROJECT libjpeg WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET base OF PROJECT base WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET modp_b64 OF PROJECT modp_b64 WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET dynamic_annotations OF PROJECT dynamic_annotations WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET nss OF PROJECT nss WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET nspr OF PROJECT nss WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET sqlite OF PROJECT sqlite WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET icudata OF PROJECT icu WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET libevent OF PROJECT libevent WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET base_i18n OF PROJECT base WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET gfx_resources OF PROJECT gfx WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET gfx OF PROJECT gfx WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET skia_opts OF PROJECT skia WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET net_resources OF PROJECT net WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"grit\"" ../xcodebuild/net.build/Debug/net_resources.build/Script-D3210C5A91652E9B8F9DF7BD.sh
+ cd /b/build/slave/mac/build/src/net
+ /bin/sh -c /b/build/slave/mac/build/src/net/../xcodebuild/net.build/Debug/net_resources.build/Script-D3210C5A91652E9B8F9DF7BD.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET js2c OF PROJECT v8 WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET v8_nosnapshot OF PROJECT v8 WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET v8_base OF PROJECT v8 WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET mksnapshot OF PROJECT v8 WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../../xcodebuild/v8.build/Debug/mksnapshot.build/Script-759699424E9CDB8D5A56B17D.sh
+ cd /b/build/slave/mac/build/src/v8/tools/gyp
+ /bin/sh -c /b/build/slave/mac/build/src/v8/tools/gyp/../../../xcodebuild/v8.build/Debug/mksnapshot.build/Script-759699424E9CDB8D5A56B17D.sh
+
+
+=== BUILD AGGREGATE TARGET v8_snapshot Support OF PROJECT v8 WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET v8_snapshot OF PROJECT v8 WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET v8 OF PROJECT v8 WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET net OF PROJECT net WITH CONFIGURATION Debug ===
+Check dependencies
+Distributed-CompileC ../xcodebuild/net.build/Debug/net.build/Objects-normal/i386/spdy_session.o spdy/spdy_session.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/net
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS localhost
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNO_NSPR_10_SUPPORT -DNSS_USE_STATIC_LIBS -DUSE_UTIL_DIRECTLY -DNSS_PLATFORM_CLIENT_AUTH -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/net/../xcodebuild/Debug -I/b/build/slave/mac/build/src/net/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../sdch/open-vcdiff/src -I../third_party/zlib -I/b/build/slave/mac/build/src/net/../xcodebuild/DerivedSources/Debug/net -I../v8/include -I../third_party/nss/mozilla/nsprpub/pr/include -I../third_party/nss/mozilla/nsprpub/lib/ds -I../third_party/nss/mozilla/nsprpub/lib/libc/include -I../third_party/nss/mozilla/security/nss/lib/base -I../third_party/nss/mozilla/security/nss/lib/certdb -I../third_party/nss/mozilla/security/nss/lib/certhigh -I../third_party/nss/mozilla/security/nss/lib/cryptohi -I../third_party/nss/mozilla/security/nss/lib/dev -I../third_party/nss/mozilla/security/nss/lib/freebl -I../third_party/nss/mozilla/security/nss/lib/freebl/ecl -I../third_party/nss/mozilla/security/nss/lib/nss -I../third_party/nss/mozilla/security/nss/lib/pk11wrap -I../third_party/nss/mozilla/security/nss/lib/pkcs7 -I../third_party/nss/mozilla/security/nss/lib/pki -I../third_party/nss/mozilla/security/nss/lib/smime -I../third_party/nss/mozilla/security/nss/lib/softoken -I../third_party/nss/mozilla/security/nss/lib/util -Ithird_party/nss/ssl -I/b/build/slave/mac/build/src/net/../xcodebuild/net.build/Debug/net.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/net/../xcodebuild/net.build/Debug/net.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/net/spdy/spdy_session.cc -o /b/build/slave/mac/build/src/net/../xcodebuild/net.build/Debug/net.build/Objects-normal/i386/spdy_session.o
+
+Libtool ../xcodebuild/Debug/libnet.a normal i386
+ cd /b/build/slave/mac/build/src/net
+ setenv DISTCC_HOSTS "distcc4.golo.chromium.org:3632,lzo,cpp/0 distcc7.golo.chromium.org:3632,lzo,cpp/0 distcc8.golo.chromium.org:3632,lzo,cpp/0 distcc10.golo.chromium.org:3632,lzo,cpp/0 distcc6.golo.chromium.org:3632,lzo,cpp/0 distcc3.golo.chromium.org:3632,lzo,cpp/0 distcc2.golo.chromium.org:3632,lzo,cpp/0 distcc1.golo.chromium.org:3632,lzo,cpp/0 distcc9.golo.chromium.org:3632,lzo,cpp/0 distcc5.golo.chromium.org:3632,lzo,cpp/0"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv MACOSX_DEPLOYMENT_TARGET 10.5
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/libtool -static -arch_only i386 -syslibroot /Developer/SDKs/MacOSX10.5.sdk -L/b/build/slave/mac/build/src/net/../xcodebuild/Debug -filelist /b/build/slave/mac/build/src/net/../xcodebuild/net.build/Debug/net.build/Objects-normal/i386/net.LinkFileList -o /b/build/slave/mac/build/src/net/../xcodebuild/Debug/libnet.a
+
+
+=== BUILD NATIVE TARGET googleurl OF PROJECT googleurl WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET sdch OF PROJECT sdch WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET ssl_false_start_blacklist_process OF PROJECT net WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../xcodebuild/net.build/Debug/ssl_false_start_blacklist_process.build/Script-5AC333953C6BB680BA591B7D.sh
+ cd /b/build/slave/mac/build/src/net
+ /bin/sh -c /b/build/slave/mac/build/src/net/../xcodebuild/net.build/Debug/ssl_false_start_blacklist_process.build/Script-5AC333953C6BB680BA591B7D.sh
+
+
+=== BUILD AGGREGATE TARGET net_base Support OF PROJECT net WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET net_base OF PROJECT net WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET ssl_host_info Support OF PROJECT net WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"genproto\"" "../xcodebuild/net.build/Debug/ssl_host_info Support.build/Script-2ECF060B764178F116E30CEF.sh"
+ cd /b/build/slave/mac/build/src/net
+ /bin/sh -c "\"/b/build/slave/mac/build/src/net/../xcodebuild/net.build/Debug/ssl_host_info Support.build/Script-2ECF060B764178F116E30CEF.sh\""
+
+make: Nothing to be done for `all'.
+
+=== BUILD NATIVE TARGET ssl_host_info OF PROJECT net WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET ssl OF PROJECT ssl WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET app_unittests OF PROJECT app WITH CONFIGURATION Debug ===
+Check dependencies
+Ld ../xcodebuild/Debug/app_unittests normal i386
+ cd /b/build/slave/mac/build/src/app
+ setenv DISTCC_HOSTS "distcc4.golo.chromium.org:3632,lzo,cpp/0 distcc7.golo.chromium.org:3632,lzo,cpp/0 distcc8.golo.chromium.org:3632,lzo,cpp/0 distcc10.golo.chromium.org:3632,lzo,cpp/0 distcc6.golo.chromium.org:3632,lzo,cpp/0 distcc3.golo.chromium.org:3632,lzo,cpp/0 distcc2.golo.chromium.org:3632,lzo,cpp/0 distcc1.golo.chromium.org:3632,lzo,cpp/0 distcc9.golo.chromium.org:3632,lzo,cpp/0 distcc5.golo.chromium.org:3632,lzo,cpp/0"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv MACOSX_DEPLOYMENT_TARGET 10.5
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/g++-4.2 -arch i386 -isysroot /Developer/SDKs/MacOSX10.5.sdk -L/b/build/slave/mac/build/src/app/../xcodebuild/Debug -F/b/build/slave/mac/build/src/app/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -filelist /b/build/slave/mac/build/src/app/../xcodebuild/app.build/Debug/app_unittests.build/Objects-normal/i386/app_unittests.LinkFileList -mmacosx-version-min=10.5 -Wl,-search_paths_first -lapp_base /b/build/slave/mac/build/src/xcodebuild/Debug/libtest_support_base.a /b/build/slave/mac/build/src/xcodebuild/Debug/libnet_test_support.a /b/build/slave/mac/build/src/xcodebuild/Debug/libskia.a /b/build/slave/mac/build/src/xcodebuild/Debug/libgmock.a /b/build/slave/mac/build/src/xcodebuild/Debug/libgtest.a /b/build/slave/mac/build/src/xcodebuild/Debug/libicui18n.a /b/build/slave/mac/build/src/xcodebuild/Debug/libicuuc.a /b/build/slave/mac/build/src/xcodebuild/Debug/libpng.a /b/build/slave/mac/build/src/xcodebuild/Debug/libchrome_zlib.a /b/build/slave/mac/build/src/xcodebuild/Debug/libjpeg.a /b/build/slave/mac/build/src/xcodebuild/Debug/libbase.a /b/build/slave/mac/build/src/xcodebuild/Debug/libmodp_b64.a /b/build/slave/mac/build/src/xcodebuild/Debug/libdynamic_annotations.a /b/build/slave/mac/build/src/xcodebuild/Debug/libnss.a /b/build/slave/mac/build/src/xcodebuild/Debug/libnspr.a /b/build/slave/mac/build/src/xcodebuild/Debug/libsqlite3.a /b/build/slave/mac/build/src/xcodebuild/Debug/libicudata.a /b/build/slave/mac/build/src/xcodebuild/Debug/libevent.a /b/build/slave/mac/build/src/xcodebuild/Debug/libbase_i18n.a /b/build/slave/mac/build/src/xcodebuild/Debug/libgfx.a /b/build/slave/mac/build/src/xcodebuild/Debug/libskia_opts.a /b/build/slave/mac/build/src/xcodebuild/Debug/libnet.a /b/build/slave/mac/build/src/xcodebuild/Debug/libgoogleurl.a /b/build/slave/mac/build/src/xcodebuild/Debug/libsdch.a /b/build/slave/mac/build/src/xcodebuild/Debug/libnet_base.a /b/build/slave/mac/build/src/xcodebuild/Debug/libssl_host_info.a /b/build/slave/mac/build/src/xcodebuild/Debug/libprotobuf_lite.a /b/build/slave/mac/build/src/xcodebuild/Debug/libv8_snapshot.a /b/build/slave/mac/build/src/xcodebuild/Debug/libv8_base.a /b/build/slave/mac/build/src/xcodebuild/Debug/libssl.a -framework OpenGL -framework AppKit -framework Carbon -framework CoreFoundation -framework Foundation -framework IOKit -framework Security -framework SystemConfiguration -lresolv -o /b/build/slave/mac/build/src/app/../xcodebuild/Debug/app_unittests
+
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../xcodebuild/app.build/Debug/app_unittests.build/Script-07E688F6B96AA35A01000844.sh
+ cd /b/build/slave/mac/build/src/app
+ /bin/sh -c /b/build/slave/mac/build/src/app/../xcodebuild/app.build/Debug/app_unittests.build/Script-07E688F6B96AA35A01000844.sh
+
+
+=== BUILD NATIVE TARGET base_unittests OF PROJECT base WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../xcodebuild/base.build/Debug/base_unittests.build/Script-25445A6E87471CA82F7A2A04.sh
+ cd /b/build/slave/mac/build/src/base
+ /bin/sh -c /b/build/slave/mac/build/src/base/../xcodebuild/base.build/Debug/base_unittests.build/Script-25445A6E87471CA82F7A2A04.sh
+
+
+=== BUILD NATIVE TARGET test_support_perf OF PROJECT base WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET common_constants Support OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET common_constants OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET cacheinvalidation_proto OF PROJECT cacheinvalidation WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"genproto\"" ../../xcodebuild/cacheinvalidation.build/Debug/cacheinvalidation_proto.build/Script-F6284ABD289942E92747985A.sh
+ cd /b/build/slave/mac/build/src/third_party/cacheinvalidation
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/cacheinvalidation/../../xcodebuild/cacheinvalidation.build/Debug/cacheinvalidation_proto.build/Script-F6284ABD289942E92747985A.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD NATIVE TARGET cacheinvalidation OF PROJECT cacheinvalidation WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET sync_notifier OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET notifier OF PROJECT jingle WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET libjingle OF PROJECT libjingle WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET sync_proto_cpp OF PROJECT sync_proto WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET sync OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET chrome_resources OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"grit\"" ../xcodebuild/chrome.build/Debug/chrome_resources.build/Script-9CA0E2FB3E57937086B90B01.sh
+ cd /b/build/slave/mac/build/src/chrome
+ /bin/sh -c /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/chrome_resources.build/Script-9CA0E2FB3E57937086B90B01.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET chrome_strings OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"grit\"" ../xcodebuild/chrome.build/Debug/chrome_strings.build/Script-E0610D9F312AEECB2F90C4E3.sh
+ cd /b/build/slave/mac/build/src/chrome
+ /bin/sh -c /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/chrome_strings.build/Script-E0610D9F312AEECB2F90C4E3.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET theme_resources OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET npapi OF PROJECT npapi WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET ppapi_c OF PROJECT ppapi WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET webkit_resources OF PROJECT webkit_support WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET webkit_strings OF PROJECT webkit_support WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET webkit_user_agent Support OF PROJECT webkit_support WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET webkit_user_agent OF PROJECT webkit_support WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET glue OF PROJECT webkit_support WITH CONFIGURATION Debug ===
+Check dependencies
+Distributed-CompileC ../../xcodebuild/webkit_support.build/Debug/glue.build/Objects-normal/i386/context_menu.o ../glue/context_menu.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/webkit/support
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS localhost
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/webkit/support/../../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/webkit/support/../../xcodebuild/Debug/include -I../../third_party/icu/public/common -I../../third_party/icu/public/i18n -I/b/build/slave/mac/build/src/webkit/support/../../xcodebuild/webkit_support.build/DerivedSources/Debug -I/b/build/slave/mac/build/src/webkit/support/../../xcodebuild/DerivedSources/Debug/webkit -I../../gpu -I../.. -I../../third_party -I../../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/webkit/support/../../xcodebuild/DerivedSources/Debug/app -I../../skia/config -I../../third_party/skia/include/config -I../../third_party/skia/include/core -I../../third_party/skia/include/effects -I../../skia/ext -I../../third_party/npapi -I../../third_party/npapi/bindings -I/b/build/slave/mac/build/src/webkit/support/../../xcodebuild/webkit_support.build/Debug/glue.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/webkit/support/../../xcodebuild/webkit_support.build/Debug/glue.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/webkit/support/../glue/context_menu.cc -o /b/build/slave/mac/build/src/webkit/support/../../xcodebuild/webkit_support.build/Debug/glue.build/Objects-normal/i386/context_menu.o
+
+Libtool ../../xcodebuild/Debug/libglue.a normal i386
+ cd /b/build/slave/mac/build/src/webkit/support
+ setenv DISTCC_HOSTS "distcc4.golo.chromium.org:3632,lzo,cpp/0 distcc7.golo.chromium.org:3632,lzo,cpp/0 distcc8.golo.chromium.org:3632,lzo,cpp/0 distcc10.golo.chromium.org:3632,lzo,cpp/0 distcc6.golo.chromium.org:3632,lzo,cpp/0 distcc3.golo.chromium.org:3632,lzo,cpp/0 distcc2.golo.chromium.org:3632,lzo,cpp/0 distcc1.golo.chromium.org:3632,lzo,cpp/0 distcc9.golo.chromium.org:3632,lzo,cpp/0 distcc5.golo.chromium.org:3632,lzo,cpp/0"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv MACOSX_DEPLOYMENT_TARGET 10.5
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/libtool -static -arch_only i386 -syslibroot /Developer/SDKs/MacOSX10.5.sdk -L/b/build/slave/mac/build/src/webkit/support/../../xcodebuild/Debug -filelist /b/build/slave/mac/build/src/webkit/support/../../xcodebuild/webkit_support.build/Debug/glue.build/Objects-normal/i386/glue.LinkFileList -o /b/build/slave/mac/build/src/webkit/support/../../xcodebuild/Debug/libglue.a
+
+
+=== BUILD NATIVE TARGET common OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/appcache_backend_proxy.o common/appcache/appcache_backend_proxy.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/appcache/appcache_backend_proxy.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/appcache_backend_proxy.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/web_database_observer_impl.o common/web_database_observer_impl.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/web_database_observer_impl.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/web_database_observer_impl.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/database_util.o common/database_util.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/database_util.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/database_util.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/appcache_dispatcher.o common/appcache/appcache_dispatcher.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/appcache/appcache_dispatcher.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/appcache_dispatcher.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/db_message_filter.o common/db_message_filter.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/db_message_filter.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/db_message_filter.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/file_system_dispatcher.o common/file_system/file_system_dispatcher.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/file_system/file_system_dispatcher.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/file_system_dispatcher.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/extension_localization_peer.o common/extensions/extension_localization_peer.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/extensions/extension_localization_peer.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/extension_localization_peer.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/render_messages.o common/render_messages.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/render_messages.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/render_messages.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/socket_stream_dispatcher.o common/socket_stream_dispatcher.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/socket_stream_dispatcher.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/socket_stream_dispatcher.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/render_messages_params.o common/render_messages_params.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/render_messages_params.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/render_messages_params.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/resource_dispatcher.o common/resource_dispatcher.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/resource_dispatcher.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/resource_dispatcher.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/webblobregistry_impl.o common/webblobregistry_impl.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/webblobregistry_impl.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/webblobregistry_impl.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/logging_chrome.o common/logging_chrome.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DLIBXML_STATIC -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I../gpu -I.. -I../third_party -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/sqlite -I../third_party/zlib -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/common/logging_chrome.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/logging_chrome.o
+
+Libtool ../xcodebuild/Debug/libcommon.a normal i386
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_HOSTS "distcc4.golo.chromium.org:3632,lzo,cpp/0 distcc7.golo.chromium.org:3632,lzo,cpp/0 distcc8.golo.chromium.org:3632,lzo,cpp/0 distcc10.golo.chromium.org:3632,lzo,cpp/0 distcc6.golo.chromium.org:3632,lzo,cpp/0 distcc3.golo.chromium.org:3632,lzo,cpp/0 distcc2.golo.chromium.org:3632,lzo,cpp/0 distcc1.golo.chromium.org:3632,lzo,cpp/0 distcc9.golo.chromium.org:3632,lzo,cpp/0 distcc5.golo.chromium.org:3632,lzo,cpp/0"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv MACOSX_DEPLOYMENT_TARGET 10.5
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/libtool -static -arch_only i386 -syslibroot /Developer/SDKs/MacOSX10.5.sdk -L/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -filelist /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/common.build/Objects-normal/i386/common.LinkFileList -o /b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/libcommon.a
+
+
+=== BUILD NATIVE TARGET common_net OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET default_plugin_resources OF PROJECT default_plugin WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET default_plugin OF PROJECT default_plugin WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET libxml OF PROJECT libxml WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET ipc OF PROJECT ipc WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET printing OF PROJECT printing WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET bzip2 OF PROJECT bzip2 WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET appcache OF PROJECT webkit_support WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET blob OF PROJECT webkit_support WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET gles2_implementation OF PROJECT gpu WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET gles2_cmd_helper OF PROJECT gpu WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET command_buffer_client OF PROJECT gpu WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET command_buffer_common OF PROJECT gpu WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET ppapi_shared_impl OF PROJECT ppapi WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET gpu_plugin OF PROJECT gpu WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET command_buffer_service OF PROJECT gpu WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET translator_glsl OF PROJECT build_angle WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET translator_common OF PROJECT build_angle WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET libvpx_include OF PROJECT libvpx WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET chromotocol_proto OF PROJECT chromotocol WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"genproto\"" ../../xcodebuild/chromotocol.build/Debug/chromotocol_proto.build/Script-FC778EC6151CD2300601907F.sh
+ cd /b/build/slave/mac/build/src/remoting/proto
+ /bin/sh -c /b/build/slave/mac/build/src/remoting/proto/../../xcodebuild/chromotocol.build/Debug/chromotocol_proto.build/Script-FC778EC6151CD2300601907F.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD NATIVE TARGET chromotocol_proto_lib OF PROJECT chromotocol WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET trace_proto OF PROJECT trace WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"genproto\"" ../../xcodebuild/trace.build/Debug/trace_proto.build/Script-1767794B043163548BD577A7.sh
+ cd /b/build/slave/mac/build/src/remoting/proto
+ /bin/sh -c /b/build/slave/mac/build/src/remoting/proto/../../xcodebuild/trace.build/Debug/trace_proto.build/Script-1767794B043163548BD577A7.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD NATIVE TARGET trace_proto_lib OF PROJECT trace WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET chromoting_base OF PROJECT remoting WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET chromoting_plugin OF PROJECT remoting WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET config_sources OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET genmacro OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../xcodebuild/yasm.build/Debug/genmacro.build/Script-4C7889A20E677CE959D733D8.sh
+ cd /b/build/slave/mac/build/src/third_party/yasm
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/yasm/../../xcodebuild/yasm.build/Debug/genmacro.build/Script-4C7889A20E677CE959D733D8.sh
+
+
+=== BUILD NATIVE TARGET genmodule OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../xcodebuild/yasm.build/Debug/genmodule.build/Script-4889D2DFD6E02E6FD4E5DD23.sh
+ cd /b/build/slave/mac/build/src/third_party/yasm
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/yasm/../../xcodebuild/yasm.build/Debug/genmodule.build/Script-4889D2DFD6E02E6FD4E5DD23.sh
+
+
+=== BUILD NATIVE TARGET genperf_libs OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET genperf OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../xcodebuild/yasm.build/Debug/genperf.build/Script-1D9F9DCA8B66106271900FD2.sh
+ cd /b/build/slave/mac/build/src/third_party/yasm
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/yasm/../../xcodebuild/yasm.build/Debug/genperf.build/Script-1D9F9DCA8B66106271900FD2.sh
+
+
+=== BUILD NATIVE TARGET genversion OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../xcodebuild/yasm.build/Debug/genversion.build/Script-86C7D43B641DD7FA312A063A.sh
+ cd /b/build/slave/mac/build/src/third_party/yasm
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/yasm/../../xcodebuild/yasm.build/Debug/genversion.build/Script-86C7D43B641DD7FA312A063A.sh
+
+
+=== BUILD AGGREGATE TARGET generate_files OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"generate_gperf\"" ../../xcodebuild/yasm.build/Debug/generate_files.build/Script-17643966799E5C48ACBBC76C.sh
+ cd /b/build/slave/mac/build/src/third_party/yasm
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/yasm/../../xcodebuild/yasm.build/Debug/generate_files.build/Script-17643966799E5C48ACBBC76C.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD NATIVE TARGET genstring OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../xcodebuild/yasm.build/Debug/genstring.build/Script-8C8DB269C91395EE620901FA.sh
+ cd /b/build/slave/mac/build/src/third_party/yasm
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/yasm/../../xcodebuild/yasm.build/Debug/genstring.build/Script-8C8DB269C91395EE620901FA.sh
+
+
+=== BUILD NATIVE TARGET re2c OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../xcodebuild/yasm.build/Debug/re2c.build/Script-80CBF3376FF074DC264D0801.sh
+ cd /b/build/slave/mac/build/src/third_party/yasm
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/yasm/../../xcodebuild/yasm.build/Debug/re2c.build/Script-80CBF3376FF074DC264D0801.sh
+
+
+=== BUILD AGGREGATE TARGET yasm Support OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"generate_gperf\"" "../../xcodebuild/yasm.build/Debug/yasm Support.build/Script-01D2B874063C3CD35D7BB021.sh"
+ cd /b/build/slave/mac/build/src/third_party/yasm
+ /bin/sh -c "\"/b/build/slave/mac/build/src/third_party/yasm/../../xcodebuild/yasm.build/Debug/yasm Support.build/Script-01D2B874063C3CD35D7BB021.sh\""
+
+make: Nothing to be done for `all'.
+PhaseScriptExecution "Rule \"generate_re2c\"" "../../xcodebuild/yasm.build/Debug/yasm Support.build/Script-78F98A3A8E7B6F2F3A9E27E4.sh"
+ cd /b/build/slave/mac/build/src/third_party/yasm
+ /bin/sh -c "\"/b/build/slave/mac/build/src/third_party/yasm/../../xcodebuild/yasm.build/Debug/yasm Support.build/Script-78F98A3A8E7B6F2F3A9E27E4.sh\""
+
+make: Nothing to be done for `all'.
+
+=== BUILD NATIVE TARGET yasm OF PROJECT yasm WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../xcodebuild/yasm.build/Debug/yasm.build/Script-553C58D6C5752DBCAC66E158.sh
+ cd /b/build/slave/mac/build/src/third_party/yasm
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/yasm/../../xcodebuild/yasm.build/Debug/yasm.build/Script-553C58D6C5752DBCAC66E158.sh
+
+
+=== BUILD AGGREGATE TARGET assemble_ffmpeg_asm OF PROJECT ffmpeg WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"assemble\"" ../../xcodebuild/ffmpeg.build/Debug/assemble_ffmpeg_asm.build/Script-CC0AEB08A28175BFCADB3230.sh
+ cd /b/build/slave/mac/build/src/third_party/ffmpeg
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/ffmpeg/../../xcodebuild/ffmpeg.build/Debug/assemble_ffmpeg_asm.build/Script-CC0AEB08A28175BFCADB3230.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET make_ffmpeg_asm_lib OF PROJECT ffmpeg WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET ffmpegsumo Support OF PROJECT ffmpeg WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET ffmpegsumo OF PROJECT ffmpeg WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../xcodebuild/ffmpeg.build/Debug/ffmpegsumo.build/Script-5F4EE13F45F454D1CDFA2E81.sh
+ cd /b/build/slave/mac/build/src/third_party/ffmpeg
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/ffmpeg/../../xcodebuild/ffmpeg.build/Debug/ffmpegsumo.build/Script-5F4EE13F45F454D1CDFA2E81.sh
+
+
+=== BUILD NATIVE TARGET ffmpegsumo_nolink OF PROJECT ffmpeg WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../xcodebuild/ffmpeg.build/Debug/ffmpegsumo_nolink.build/Script-5FDA933F15D46D3AB0B441AC.sh
+ cd /b/build/slave/mac/build/src/third_party/ffmpeg
+ /bin/sh -c /b/build/slave/mac/build/src/third_party/ffmpeg/../../xcodebuild/ffmpeg.build/Debug/ffmpegsumo_nolink.build/Script-5FDA933F15D46D3AB0B441AC.sh
+
+
+=== BUILD AGGREGATE TARGET ffmpeg Support OF PROJECT ffmpeg WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET ffmpeg OF PROJECT ffmpeg WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET media OF PROJECT media WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET chromoting_jingle_glue OF PROJECT remoting WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET expat OF PROJECT expat WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET libjingle_p2p OF PROJECT libjingle WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET libsrtp OF PROJECT libsrtp WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET chromoting_client OF PROJECT remoting WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET chromoting_protocol OF PROJECT remoting WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET ppapi_cpp_objects OF PROJECT ppapi WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET sync_listen_notifications OF PROJECT sync_tools WITH CONFIGURATION Debug ===
+Check dependencies
+Ld ../../../../xcodebuild/Debug/sync_listen_notifications normal i386
+ cd /b/build/slave/mac/build/src/chrome/browser/sync/tools
+ setenv DISTCC_HOSTS "distcc4.golo.chromium.org:3632,lzo,cpp/0 distcc7.golo.chromium.org:3632,lzo,cpp/0 distcc8.golo.chromium.org:3632,lzo,cpp/0 distcc10.golo.chromium.org:3632,lzo,cpp/0 distcc6.golo.chromium.org:3632,lzo,cpp/0 distcc3.golo.chromium.org:3632,lzo,cpp/0 distcc2.golo.chromium.org:3632,lzo,cpp/0 distcc1.golo.chromium.org:3632,lzo,cpp/0 distcc9.golo.chromium.org:3632,lzo,cpp/0 distcc5.golo.chromium.org:3632,lzo,cpp/0"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv MACOSX_DEPLOYMENT_TARGET 10.5
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/g++-4.2 -arch i386 -isysroot /Developer/SDKs/MacOSX10.5.sdk -L/b/build/slave/mac/build/src/chrome/browser/sync/tools/../../../../xcodebuild/Debug -F/b/build/slave/mac/build/src/chrome/browser/sync/tools/../../../../xcodebuild/Debug -filelist /b/build/slave/mac/build/src/chrome/browser/sync/tools/../../../../xcodebuild/sync_tools.build/Debug/sync_listen_notifications.build/Objects-normal/i386/sync_listen_notifications.LinkFileList -mmacosx-version-min=10.5 -Wl,-search_paths_first /b/build/slave/mac/build/src/xcodebuild/Debug/libbase.a /b/build/slave/mac/build/src/xcodebuild/Debug/libcommon_constants.a /b/build/slave/mac/build/src/xcodebuild/Debug/libsync_notifier.a /b/build/slave/mac/build/src/xcodebuild/Debug/libnotifier.a /b/build/slave/mac/build/src/xcodebuild/Debug/libjingle.a /b/build/slave/mac/build/src/xcodebuild/Debug/libmodp_b64.a /b/build/slave/mac/build/src/xcodebuild/Debug/libdynamic_annotations.a /b/build/slave/mac/build/src/xcodebuild/Debug/libnss.a /b/build/slave/mac/build/src/xcodebuild/Debug/libnspr.a /b/build/slave/mac/build/src/xcodebuild/Debug/libsqlite3.a /b/build/slave/mac/build/src/xcodebuild/Debug/libicui18n.a /b/build/slave/mac/build/src/xcodebuild/Debug/libicuuc.a /b/build/slave/mac/build/src/xcodebuild/Debug/libicudata.a /b/build/slave/mac/build/src/xcodebuild/Debug/libevent.a /b/build/slave/mac/build/src/xcodebuild/Debug/libsync.a /b/build/slave/mac/build/src/xcodebuild/Debug/libcommon.a /b/build/slave/mac/build/src/xcodebuild/Debug/libcommon_net.a /b/build/slave/mac/build/src/xcodebuild/Debug/libapp_base.a /b/build/slave/mac/build/src/xcodebuild/Debug/libbase_i18n.a /b/build/slave/mac/build/src/xcodebuild/Debug/libgfx.a /b/build/slave/mac/build/src/xcodebuild/Debug/libskia.a /b/build/slave/mac/build/src/xcodebuild/Debug/libskia_opts.a /b/build/slave/mac/build/src/xcodebuild/Debug/libpng.a /b/build/slave/mac/build/src/xcodebuild/Debug/libchrome_zlib.a /b/build/slave/mac/build/src/xcodebuild/Debug/libjpeg.a /b/build/slave/mac/build/src/xcodebuild/Debug/libnet.a /b/build/slave/mac/build/src/xcodebuild/Debug/libgoogleurl.a /b/build/slave/mac/build/src/xcodebuild/Debug/libsdch.a /b/build/slave/mac/build/src/xcodebuild/Debug/libnet_base.a /b/build/slave/mac/build/src/xcodebuild/Debug/libssl_host_info.a /b/build/slave/mac/build/src/xcodebuild/Debug/libprotobuf_lite.a /b/build/slave/mac/build/src/xcodebuild/Debug/libv8_snapshot.a /b/build/slave/mac/build/src/xcodebuild/Debug/libv8_base.a /b/build/slave/mac/build/src/xcodebuild/Debug/libssl.a /b/build/slave/mac/build/src/xcodebuild/Debug/libdefault_plugin.a /b/build/slave/mac/build/src/xcodebuild/Debug/libxml2.a /b/build/slave/mac/build/src/xcodebuild/Debug/libipc.a /b/build/slave/mac/build/src/xcodebuild/Debug/libprinting.a /b/build/slave/mac/build/src/xcodebuild/Debug/libchrome_bz2.a /b/build/slave/mac/build/src/xcodebuild/Debug/libappcache.a /b/build/slave/mac/build/src/xcodebuild/Debug/libblob.a /b/build/slave/mac/build/src/xcodebuild/Debug/libglue.a /b/build/slave/mac/build/src/xcodebuild/Debug/libgles2_implementation.a /b/build/slave/mac/build/src/xcodebuild/Debug/libgles2_cmd_helper.a /b/build/slave/mac/build/src/xcodebuild/Debug/libcommand_buffer_client.a /b/build/slave/mac/build/src/xcodebuild/Debug/libcommand_buffer_common.a /b/build/slave/mac/build/src/xcodebuild/Debug/libppapi_shared_impl.a /b/build/slave/mac/build/src/xcodebuild/Debug/libwebkit_user_agent.a /b/build/slave/mac/build/src/xcodebuild/Debug/libgpu_plugin.a /b/build/slave/mac/build/src/xcodebuild/Debug/libcommand_buffer_service.a /b/build/slave/mac/build/src/xcodebuild/Debug/libtranslator_glsl.a /b/build/slave/mac/build/src/xcodebuild/Debug/libtranslator_common.a /b/build/slave/mac/build/src/xcodebuild/Debug/libchromoting_plugin.a /b/build/slave/mac/build/src/xcodebuild/Debug/libchromoting_base.a /b/build/slave/mac/build/src/xcodebuild/Debug/libmedia.a /b/build/slave/mac/build/src/xcodebuild/Debug/libffmpeg.a /b/build/slave/mac/build/src/xcodebuild/Debug/libchromoting_jingle_glue.a /b/build/slave/mac/build/src/xcodebuild/Debug/libexpat.a /b/build/slave/mac/build/src/xcodebuild/Debug/libjingle_p2p.a /b/build/slave/mac/build/src/xcodebuild/Debug/libsrtp.a /b/build/slave/mac/build/src/xcodebuild/Debug/libchromotocol_proto_lib.a /b/build/slave/mac/build/src/xcodebuild/Debug/libtrace_proto_lib.a /b/build/slave/mac/build/src/xcodebuild/Debug/libchromoting_client.a /b/build/slave/mac/build/src/xcodebuild/Debug/libchromoting_protocol.a /b/build/slave/mac/build/src/xcodebuild/Debug/libppapi_cpp_objects.a /b/build/slave/mac/build/src/xcodebuild/Debug/libcacheinvalidation.a -framework AppKit -framework Carbon -framework CoreFoundation -framework Foundation -framework IOKit -framework Security -framework OpenGL -framework SystemConfiguration -lresolv -lcups -framework QuartzCore -framework AudioToolbox -framework CoreAudio -o /b/build/slave/mac/build/src/chrome/browser/sync/tools/../../../../xcodebuild/Debug/sync_listen_notifications
+
+PhaseScriptExecution "Postbuild \"Strip If Needed\"" ../../../../xcodebuild/sync_tools.build/Debug/sync_listen_notifications.build/Script-577A6588CCA38D4E2CF91DDA.sh
+ cd /b/build/slave/mac/build/src/chrome/browser/sync/tools
+ /bin/sh -c /b/build/slave/mac/build/src/chrome/browser/sync/tools/../../../../xcodebuild/sync_tools.build/Debug/sync_listen_notifications.build/Script-577A6588CCA38D4E2CF91DDA.sh
+
+
+=== BUILD AGGREGATE TARGET platform_locale_settings OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET chrome_extra_resources OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"grit\"" ../xcodebuild/chrome.build/Debug/chrome_extra_resources.build/Script-3DF0B99B7073177A35D87914.sh
+ cd /b/build/slave/mac/build/src/chrome
+ /bin/sh -c /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/chrome_extra_resources.build/Script-3DF0B99B7073177A35D87914.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET default_extensions OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET debugger OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/debugger.build/Objects-normal/i386/debugger_remote_service.o browser/debugger/debugger_remote_service.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/debugger.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/debugger.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/debugger/debugger_remote_service.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/debugger.build/Objects-normal/i386/debugger_remote_service.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/debugger.build/Objects-normal/i386/extension_ports_remote_service.o browser/debugger/extension_ports_remote_service.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/debugger.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/debugger.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/debugger/extension_ports_remote_service.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/debugger.build/Objects-normal/i386/extension_ports_remote_service.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/debugger.build/Objects-normal/i386/devtools_window.o browser/debugger/devtools_window.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/debugger.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/debugger.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/debugger/devtools_window.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/debugger.build/Objects-normal/i386/devtools_window.o
+
+Libtool ../xcodebuild/Debug/libdebugger.a normal i386
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_HOSTS "distcc4.golo.chromium.org:3632,lzo,cpp/0 distcc7.golo.chromium.org:3632,lzo,cpp/0 distcc8.golo.chromium.org:3632,lzo,cpp/0 distcc10.golo.chromium.org:3632,lzo,cpp/0 distcc6.golo.chromium.org:3632,lzo,cpp/0 distcc3.golo.chromium.org:3632,lzo,cpp/0 distcc2.golo.chromium.org:3632,lzo,cpp/0 distcc1.golo.chromium.org:3632,lzo,cpp/0 distcc9.golo.chromium.org:3632,lzo,cpp/0 distcc5.golo.chromium.org:3632,lzo,cpp/0"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv MACOSX_DEPLOYMENT_TARGET 10.5
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/libtool -static -arch_only i386 -syslibroot /Developer/SDKs/MacOSX10.5.sdk -L/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -filelist /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/debugger.build/Objects-normal/i386/debugger.LinkFileList -o /b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/libdebugger.a
+
+
+=== BUILD NATIVE TARGET plugin OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/plugin.build/Objects-normal/i386/plugin_thread.o plugin/plugin_thread.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS localhost
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../gpu -I.. -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/plugin.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/plugin.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/plugin/plugin_thread.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/plugin.build/Objects-normal/i386/plugin_thread.o
+
+Libtool ../xcodebuild/Debug/libplugin.a normal i386
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_HOSTS "distcc4.golo.chromium.org:3632,lzo,cpp/0 distcc7.golo.chromium.org:3632,lzo,cpp/0 distcc8.golo.chromium.org:3632,lzo,cpp/0 distcc10.golo.chromium.org:3632,lzo,cpp/0 distcc6.golo.chromium.org:3632,lzo,cpp/0 distcc3.golo.chromium.org:3632,lzo,cpp/0 distcc2.golo.chromium.org:3632,lzo,cpp/0 distcc1.golo.chromium.org:3632,lzo,cpp/0 distcc9.golo.chromium.org:3632,lzo,cpp/0 distcc5.golo.chromium.org:3632,lzo,cpp/0"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv MACOSX_DEPLOYMENT_TARGET 10.5
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/libtool -static -arch_only i386 -syslibroot /Developer/SDKs/MacOSX10.5.sdk -L/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -filelist /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/plugin.build/Objects-normal/i386/plugin.LinkFileList -o /b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/libplugin.a
+
+
+=== BUILD NATIVE TARGET utility OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET profile_import OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET chrome_gpu OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET ppapi_plugin OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET worker OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/worker.build/Objects-normal/i386/worker_webkitclient_impl.o worker/worker_webkitclient_impl.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS localhost
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I.. -I../gpu -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/worker.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/worker.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/worker/worker_webkitclient_impl.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/worker.build/Objects-normal/i386/worker_webkitclient_impl.o
+
+Libtool ../xcodebuild/Debug/libworker.a normal i386
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_HOSTS "distcc4.golo.chromium.org:3632,lzo,cpp/0 distcc7.golo.chromium.org:3632,lzo,cpp/0 distcc8.golo.chromium.org:3632,lzo,cpp/0 distcc10.golo.chromium.org:3632,lzo,cpp/0 distcc6.golo.chromium.org:3632,lzo,cpp/0 distcc3.golo.chromium.org:3632,lzo,cpp/0 distcc2.golo.chromium.org:3632,lzo,cpp/0 distcc1.golo.chromium.org:3632,lzo,cpp/0 distcc9.golo.chromium.org:3632,lzo,cpp/0 distcc5.golo.chromium.org:3632,lzo,cpp/0"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv MACOSX_DEPLOYMENT_TARGET 10.5
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/libtool -static -arch_only i386 -syslibroot /Developer/SDKs/MacOSX10.5.sdk -L/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -filelist /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/worker.build/Objects-normal/i386/worker.LinkFileList -o /b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/libworker.a
+
+
+=== BUILD NATIVE TARGET syncapi OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET service OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET device_management_proto_cpp OF PROJECT device_management_proto WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD AGGREGATE TARGET safe_browsing_csd_proto OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"genproto\"" ../xcodebuild/chrome.build/Debug/safe_browsing_csd_proto.build/Script-A64480A7A7F73B68D5D0EBFB.sh
+ cd /b/build/slave/mac/build/src/chrome
+ /bin/sh -c /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/safe_browsing_csd_proto.build/Script-A64480A7A7F73B68D5D0EBFB.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET safe_browsing_report_proto OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"genproto\"" ../xcodebuild/chrome.build/Debug/safe_browsing_report_proto.build/Script-6B102C7C9773962E1B26CF1B.sh
+ cd /b/build/slave/mac/build/src/chrome
+ /bin/sh -c /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/safe_browsing_report_proto.build/Script-6B102C7C9773962E1B26CF1B.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET userfeedback_proto OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"genproto\"" ../xcodebuild/chrome.build/Debug/userfeedback_proto.build/Script-E9D8DD8102F83FCBC3F0768B.sh
+ cd /b/build/slave/mac/build/src/chrome
+ /bin/sh -c /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/userfeedback_proto.build/Script-E9D8DD8102F83FCBC3F0768B.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD AGGREGATE TARGET browser Support OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+
+=== BUILD NATIVE TARGET browser OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/appcache_dispatcher_host.o browser/appcache/appcache_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/appcache/appcache_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/appcache_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/appcache_frontend_proxy.o browser/appcache/appcache_frontend_proxy.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/appcache/appcache_frontend_proxy.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/appcache_frontend_proxy.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/dom_ui.o browser/dom_ui/dom_ui.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/dom_ui/dom_ui.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/dom_ui.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_port_container.o browser/automation/extension_port_container.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/automation/extension_port_container.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_port_container.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/browser_about_handler.o browser/browser_about_handler.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/browser_about_handler.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/browser_about_handler.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/browser_main.o browser/browser_main.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/browser_main.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/browser_main.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/devtools_ui.o browser/dom_ui/devtools_ui.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/dom_ui/devtools_ui.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/devtools_ui.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/dispatcher_host.o browser/device_orientation/dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/device_orientation/dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_browser_actions_api.o browser/extensions/extension_browser_actions_api.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/extensions/extension_browser_actions_api.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_browser_actions_api.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/dom_storage_dispatcher_host.o browser/in_process_webkit/dom_storage_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/in_process_webkit/dom_storage_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/dom_storage_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/indexed_db_callbacks.o browser/in_process_webkit/indexed_db_callbacks.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/in_process_webkit/indexed_db_callbacks.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/indexed_db_callbacks.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_menu_manager.o browser/extensions/extension_menu_manager.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/extensions/extension_menu_manager.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_menu_manager.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/browser_process_impl.o browser/browser_process_impl.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/browser_process_impl.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/browser_process_impl.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/instant_loader.o browser/instant/instant_loader.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/instant/instant_loader.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/instant_loader.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_event_router.o browser/extensions/extension_event_router.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/extensions/extension_event_router.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_event_router.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/memory_purger.o browser/memory_purger.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/memory_purger.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/memory_purger.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_message_service.o browser/extensions/extension_message_service.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/extensions/extension_message_service.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_message_service.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_sidebar_api.o browser/extensions/extension_sidebar_api.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/extensions/extension_sidebar_api.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_sidebar_api.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_process_manager.o browser/extensions/extension_process_manager.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/extensions/extension_process_manager.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_process_manager.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_host.o browser/extensions/extension_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/extensions/extension_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/histogram_synchronizer.o browser/metrics/histogram_synchronizer.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/metrics/histogram_synchronizer.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/histogram_synchronizer.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/dom_storage_area.o browser/in_process_webkit/dom_storage_area.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/in_process_webkit/dom_storage_area.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/dom_storage_area.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/indexed_db_dispatcher_host.o browser/in_process_webkit/indexed_db_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/in_process_webkit/indexed_db_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/indexed_db_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_page_actions_module.o browser/extensions/extension_page_actions_module.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/extensions/extension_page_actions_module.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_page_actions_module.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/mime_registry_dispatcher.o browser/mime_registry_dispatcher.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/mime_registry_dispatcher.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/mime_registry_dispatcher.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/file_system_dispatcher_host.o browser/file_system/file_system_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/file_system/file_system_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/file_system_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/geolocation_dispatcher_host_old.o browser/geolocation/geolocation_dispatcher_host_old.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/geolocation/geolocation_dispatcher_host_old.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/geolocation_dispatcher_host_old.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/balloon_host.o browser/notifications/balloon_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/notifications/balloon_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/balloon_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/gpu_process_host.o browser/gpu_process_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/gpu_process_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/gpu_process_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/notification_object_proxy.o browser/notifications/notification_object_proxy.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/notifications/notification_object_proxy.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/notification_object_proxy.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_function_dispatcher.o browser/extensions/extension_function_dispatcher.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/extensions/extension_function_dispatcher.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/extension_function_dispatcher.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/geolocation_permission_context.o browser/geolocation/geolocation_permission_context.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/geolocation/geolocation_permission_context.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/geolocation_permission_context.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/plugin_process_host.o browser/plugin_process_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/plugin_process_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/plugin_process_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/desktop_notification_service.o browser/notifications/desktop_notification_service.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/notifications/desktop_notification_service.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/desktop_notification_service.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/ppapi_plugin_process_host.o browser/ppapi_plugin_process_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/ppapi_plugin_process_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/ppapi_plugin_process_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/web_cache_manager.o browser/renderer_host/web_cache_manager.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/web_cache_manager.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/web_cache_manager.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/plugin_service.o browser/plugin_service.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/plugin_service.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/plugin_service.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/nacl_process_host.o browser/nacl_host/nacl_process_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/nacl_host/nacl_process_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/nacl_process_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/metrics_service.o browser/metrics/metrics_service.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/metrics/metrics_service.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/metrics_service.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/profile.o browser/profiles/profile.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/profiles/profile.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/profile.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_host_delegate.o browser/renderer_host/render_view_host_delegate.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/render_view_host_delegate.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_host_delegate.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/async_resource_handler.o browser/renderer_host/async_resource_handler.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/async_resource_handler.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/async_resource_handler.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/audio_renderer_host.o browser/renderer_host/audio_renderer_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/audio_renderer_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/audio_renderer_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/task_manager_resource_providers.o browser/task_manager/task_manager_resource_providers.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/task_manager/task_manager_resource_providers.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/task_manager_resource_providers.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_widget_host.o browser/renderer_host/render_widget_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/render_widget_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_widget_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_host.o browser/renderer_host/render_view_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/render_view_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/profile_impl.o browser/profiles/profile_impl.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/profiles/profile_impl.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/profile_impl.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/sync_resource_handler.o browser/renderer_host/sync_resource_handler.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/sync_resource_handler.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/sync_resource_handler.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/socket_stream_host.o browser/renderer_host/socket_stream_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/socket_stream_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/socket_stream_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/speech_input_dispatcher_host.o browser/speech/speech_input_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/speech/speech_input_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/speech_input_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/file_utilities_dispatcher_host.o browser/renderer_host/file_utilities_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/file_utilities_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/file_utilities_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_widget_host_view_mac.o browser/renderer_host/render_widget_host_view_mac.mm normal i386 objective-c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x objective-c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/render_widget_host_view_mac.mm -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_widget_host_view_mac.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/database_dispatcher_host.o browser/renderer_host/database_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/database_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/database_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/browser_render_process_host.o browser/renderer_host/browser_render_process_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/browser_render_process_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/browser_render_process_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/search_provider_install_state_dispatcher_host.o browser/search_engines/search_provider_install_state_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/search_engines/search_provider_install_state_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/search_provider_install_state_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_context_menu.o browser/tab_contents/render_view_context_menu.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_context_menu.o
+
+distcc[1299] ERROR: compile /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu.cc on distcc4.golo.chromium.org:3632,lzo,cpp/18 failed
+distcc[1299] (dcc_build_somewhere) Warning: remote compilation of '/b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu.cc' failed, retrying locally
+distcc[1299] Warning: failed to distribute /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu.cc to distcc4.golo.chromium.org:3632,lzo,cpp/18, running locally instead
+/b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu.cc: In member function 'string16 RenderViewContextMenu::PrintableSelectionText()':
+/b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu.cc:1436: error: invalid initialization of reference of type 'const std::wstring&' from expression of type 'string16'
+../app/l10n_util.h:166: error: in passing argument 1 of 'std::wstring l10n_util::TruncateString(const std::wstring&, size_t)'
+distcc[1299] ERROR: compile /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu.cc on localhost failed
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_host_manager.o browser/tab_contents/render_view_host_manager.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_host_manager.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_host_manager.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/blob_dispatcher_host.o browser/renderer_host/blob_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/blob_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/blob_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/tab_contents_view_mac.o browser/tab_contents/tab_contents_view_mac.mm normal i386 objective-c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x objective-c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/tab_contents/tab_contents_view_mac.mm -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/tab_contents_view_mac.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/worker_service.o browser/worker_host/worker_service.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/worker_host/worker_service.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/worker_service.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/pepper_file_message_filter.o browser/renderer_host/pepper_file_message_filter.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/pepper_file_message_filter.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/pepper_file_message_filter.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/worker_process_host.o browser/worker_host/worker_process_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/worker_host/worker_process_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/worker_process_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/socket_stream_dispatcher_host.o browser/renderer_host/socket_stream_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/socket_stream_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/socket_stream_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/tab_contents.o browser/tab_contents/tab_contents.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/tab_contents/tab_contents.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/tab_contents.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_context_menu_mac.o browser/tab_contents/render_view_context_menu_mac.mm normal i386 objective-c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x objective-c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu_mac.mm -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/render_view_context_menu_mac.o
+
+distcc[1301] ERROR: compile /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu_mac.mm on distcc9.golo.chromium.org:3632,lzo,cpp/18 failed
+distcc[1301] (dcc_build_somewhere) Warning: remote compilation of '/b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu_mac.mm' failed, retrying locally
+distcc[1301] Warning: failed to distribute /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu_mac.mm to distcc9.golo.chromium.org:3632,lzo,cpp/18, running locally instead
+/b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu_mac.mm: In member function 'virtual void RenderViewContextMenuMac::LookUpInDictionary()':
+/b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu_mac.mm:78: error: invalid initialization of reference of type 'const std::wstring&' from expression of type 'string16'
+../base/sys_string_conversions.h:68: error: in passing argument 1 of 'NSString* base::SysWideToNSString(const std::wstring&)'
+distcc[1301] ERROR: compile /b/build/slave/mac/build/src/chrome/browser/tab_contents/render_view_context_menu_mac.mm on localhost failed
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/resource_dispatcher_host.o browser/renderer_host/resource_dispatcher_host.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/resource_dispatcher_host.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/resource_dispatcher_host.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/resource_message_filter.o browser/renderer_host/resource_message_filter.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DCHROME_V8 -DGOOGLE_PROTOBUF_NO_RTTI -DXML_STATIC -DFEATURE_ENABLE_SSL -DFEATURE_ENABLE_VOICEMAIL -DEXPAT_RELATIVE_PATH -DOSX -DPOSIX -DLIBXML_STATIC -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/DerivedSources/Debug -I../third_party/apple -I../third_party/GTM -I../third_party/GTM/AppKit -I../third_party/GTM/Foundation -I../third_party/GTM/DebugUtils -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_resources -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_locale_settings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app/app_strings -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I../third_party/bzip2 -I../third_party/expat/files/lib -I../third_party/libjingle/overrides -I../third_party/libjingle/source -I../third_party/expat/files -I../third_party/libxml/mac/include -I../third_party/libxml/src/include -I../third_party/npapi -I../third_party/npapi/bindings -I../third_party/speex/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/browser/renderer_host/resource_message_filter.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/browser.build/Objects-normal/i386/resource_message_filter.o
+
+
+=== BUILD AGGREGATE TARGET safe_browsing_proto OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+PhaseScriptExecution "Rule \"genproto\"" ../xcodebuild/chrome.build/Debug/safe_browsing_proto.build/Script-04EB001E8DD6896C5E67BFBB.sh
+ cd /b/build/slave/mac/build/src/chrome
+ /bin/sh -c /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/safe_browsing_proto.build/Script-04EB001E8DD6896C5E67BFBB.sh
+
+make: Nothing to be done for `all'.
+
+=== BUILD NATIVE TARGET renderer OF PROJECT chrome WITH CONFIGURATION Debug ===
+Check dependencies
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/autofill_helper.o renderer/autofill_helper.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/autofill_helper.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/autofill_helper.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/dom_automation_controller.o renderer/automation/dom_automation_controller.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/automation/dom_automation_controller.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/dom_automation_controller.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/chrome_app_bindings.o renderer/extensions/chrome_app_bindings.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/extensions/chrome_app_bindings.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/chrome_app_bindings.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/password_autocomplete_manager.o renderer/password_autocomplete_manager.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/password_autocomplete_manager.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/password_autocomplete_manager.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/bindings_utils.o renderer/extensions/bindings_utils.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/extensions/bindings_utils.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/bindings_utils.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/renderer_net_predictor.o renderer/net/renderer_net_predictor.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/net/renderer_net_predictor.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/renderer_net_predictor.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/audio_renderer_impl.o renderer/media/audio_renderer_impl.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/media/audio_renderer_impl.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/audio_renderer_impl.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/cookie_message_filter.o renderer/cookie_message_filter.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/cookie_message_filter.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/cookie_message_filter.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/event_bindings.o renderer/extensions/event_bindings.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/extensions/event_bindings.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/event_bindings.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/external_popup_menu.o renderer/external_popup_menu.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/external_popup_menu.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/external_popup_menu.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/renderer_extension_bindings.o renderer/extensions/renderer_extension_bindings.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/extensions/renderer_extension_bindings.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/renderer_extension_bindings.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/extension_process_bindings.o renderer/extensions/extension_process_bindings.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/extensions/extension_process_bindings.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/extension_process_bindings.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/device_orientation_dispatcher.o renderer/device_orientation_dispatcher.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/device_orientation_dispatcher.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/device_orientation_dispatcher.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/audio_message_filter.o renderer/audio_message_filter.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/audio_message_filter.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/audio_message_filter.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/blocked_plugin.o renderer/blocked_plugin.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/blocked_plugin.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/blocked_plugin.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/page_load_histograms.o renderer/page_load_histograms.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/page_load_histograms.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/page_load_histograms.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/devtools_agent.o renderer/devtools_agent.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/devtools_agent.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/devtools_agent.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/devtools_agent_filter.o renderer/devtools_agent_filter.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/devtools_agent_filter.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/devtools_agent_filter.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/dom_ui_bindings.o renderer/dom_ui_bindings.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/dom_ui_bindings.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/dom_ui_bindings.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/devtools_client.o renderer/devtools_client.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/devtools_client.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/devtools_client.o
+
+Distributed-CompileC ../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/indexed_db_dispatcher.o renderer/indexed_db_dispatcher.cc normal i386 c++ com.apple.compilers.gcc.4_2
+ cd /b/build/slave/mac/build/src/chrome
+ setenv DISTCC_COMPILER "gcc version 4.2.1 (Apple Inc. build 5659)"
+ setenv DISTCC_HOSTS "--randomize distcc5.golo.chromium.org:3632,lzo,cpp/18 distcc9.golo.chromium.org:3632,lzo,cpp/18 distcc1.golo.chromium.org:3632,lzo,cpp/18 distcc2.golo.chromium.org:3632,lzo,cpp/18 distcc3.golo.chromium.org:3632,lzo,cpp/18 distcc6.golo.chromium.org:3632,lzo,cpp/18 distcc10.golo.chromium.org:3632,lzo,cpp/18 distcc7.golo.chromium.org:3632,lzo,cpp/18 distcc8.golo.chromium.org:3632,lzo,cpp/18 distcc4.golo.chromium.org:3632,lzo,cpp/18"
+ setenv DISTCC_SYSTEM "10.6.4 (10F2025, i386)"
+ setenv INCLUDE_SERVER_DIR /tmp/distcc-pump.C2DZDO
+ setenv INCLUDE_SERVER_PID 983
+ setenv INCLUDE_SERVER_PORT /tmp/distcc-pump.C2DZDO/socket
+ setenv LANG en_US.US-ASCII
+ setenv PATH "/usr/bin:/Developer/usr/bin:/usr/bin:/b/build/../depot_tools:/usr/bin:/bin:/usr/sbin:/sbin:/usr/local/bin"
+ /Developer/usr/bin/distcc /Developer/usr/bin/gcc-4.2 -x c++ -arch i386 -fmessage-length=0 -pipe -Wno-trigraphs -fno-exceptions -fno-rtti -O0 -Werror -Wnewline-eof -DCHROMIUM_BUILD -DENABLE_REMOTING=1 -DENABLE_GPU=1 -DNACL_WINDOWS=0 -DNACL_LINUX=0 -DNACL_OSX=1 -DNACL_TARGET_SUBARCH=32 -DNACL_BUILD_SUBARCH=32 -DGOOGLE_PROTOBUF_NO_RTTI -DHUNSPELL_STATIC -DHUNSPELL_CHROME_CLIENT -DUSE_HUNSPELL -DCLD_WINDOWS -DCOMPILER_GCC -D__STDC_CONSTANT_MACROS -DNACL_BLOCK_SHIFT=5 -DNACL_BLOCK_SIZE=32 -D__STDC_FORMAT_MACROS -DDYNAMIC_ANNOTATIONS_ENABLED=1 -D_DEBUG -isysroot /Developer/SDKs/MacOSX10.5.sdk -fvisibility=hidden -fvisibility-inlines-hidden -fno-threadsafe-statics -mmacosx-version-min=10.5 -gdwarf-2 -Wall -Wendif-labels -Wextra -Wno-unused-parameter -Wno-missing-field-initializers -fpch-preprocess -F/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug -F/Developer/SDKs/MacOSX10.5.sdk/System/Library/Frameworks/ApplicationServices.framework/Frameworks -I/b/build/slave/mac/build/src/chrome/../xcodebuild/Debug/include -I../third_party/icu/public/common -I../third_party/icu/public/i18n -I.. -I../third_party/cld -I../gpu -I../third_party -I../third_party/mesa/MesaLib/include -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/app -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/chrome -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/protoc_out -I../third_party/protobuf -I../third_party/protobuf/src -I../skia/config -I../third_party/skia/include/config -I../third_party/skia/include/core -I../third_party/skia/include/effects -I../skia/ext -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/ffmpeg -I../third_party/ffmpeg/config -I../third_party/ffmpeg/patched-ffmpeg-mt -I../third_party/npapi -I../third_party/npapi/bindings -I/b/build/slave/mac/build/src/chrome/../xcodebuild/DerivedSources/Debug/webkit -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources/i386 -I/b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/DerivedSources -g1 -c /b/build/slave/mac/build/src/chrome/renderer/indexed_db_dispatcher.cc -o /b/build/slave/mac/build/src/chrome/../xcodebuild/chrome.build/Debug/renderer.build/Objects-normal/i386/indexed_db_dispatcher.o
+
diff --git a/chromium/tools/emacs/trybot-windows.txt b/chromium/tools/emacs/trybot-windows.txt
new file mode 100644
index 00000000000..eac8299e915
--- /dev/null
+++ b/chromium/tools/emacs/trybot-windows.txt
@@ -0,0 +1,72 @@
+This file contains sample trybot output from a Windows trybot run.
+It contains a warning and an error but has otherwise been shortened
+for length.
+
+"C:\Program Files (x86)\Xoreax\IncrediBuild\BuildConsole.exe" e:\b\build\slave\win\build\src\build\all.sln "/Cfg=Debug|Win32"
+
+-----------------------------------------------------------------
+IncrediBuild Console 3.60 Internal (build 1156)
+Copyright (C) 2001-2010 Xoreax Software Ltd. All rights reserved.
+-----------------------------------------------------------------
+--------------------Configuration: toolband_proxy_lib - Debug|Win32------------
+Compiling...
+toolband_p.c
+toolband_proxy.cc
+toolband_dlldata.c
+Creating library...
+
+toolband_proxy_lib - 0 error(s), 0 warning(s)
+--------------------Configuration: webcore_bindings - Debug|Win32--------------
+Compiling...
+CSSGrammar.cpp
+e:\b\build\slave\win\build\src\third_party\webkit\javascriptcore\wtf\text\StringImpl.h(90) : warning C4355: 'this' : used in base member initializer list with a gratuitous backslash \ for testing
+e:\b\build\slave\win\build\src\third_party\webkit\webcore\dom\ViewportArguments.h(78) : warning C4305: 'initializing' : truncation from '' to 'bool'
+e:\b\build\slave\win\build\src\build\Debug\obj\global_intermediate\webkit\CSSGrammar.cpp(1930) : warning C4065: switch statement contains 'default' but no 'case' labels
+V8DerivedSources1.cpp
+--------------------Configuration: run_testserver - Debug|Win32----------------
+Compiling...
+run_testserver.cc
+Linking...
+Embedding manifest...
+Embedding manifest... (rc.exe)
+Microsoft (R) Windows (R) Resource Compiler Version 6.1.6723.1
+
+Copyright (C) Microsoft Corporation. All rights reserved.
+
+Embedding manifest... (link.exe)
+
+run_testserver - 0 error(s), 0 warning(s)
+--------------------Configuration: browser - Debug|Win32-----------------------
+Compiling...
+bookmark_manager_resources_map.cc
+theme_resources_map.cc
+shared_resources_map.cc
+process_singleton_win.cc
+e:\b\build\slave\win\build\src\chrome\browser\process_singleton_win.cc(95) : error C2664: 'PathService::Get' : cannot convert parameter 2 from 'std::wstring *' to 'FilePath *'
+ Types pointed to are unrelated; conversion requires reinterpret_cast, C-style cast or function-style cast
+gpu_process_host.cc
+ntp_background_util.cc
+
+browser - 6 error(s), 0 warning(s)
+
+1 build system warning(s):
+ - PDB instance limit is enabled
+
+---------------------- Done ----------------------
+
+ Build: 244 succeeded, 1 failed, 233 up-to-date, 42 skipped
+
+
+We also see weird paths with mixed slashes like this:
+
+--------------------Configuration: inspector_protocol_sources - Debug|Win32----
+--------------------Configuration: installer_util_nacl_win64 - Debug|x64-------
+Compiling...
+google_chrome_distribution_dummy.cc
+helper.cc
+installation_state.cc
+self_reg_work_item.cc
+..\chrome/installer/util/work_item.h(67) : error C2514: 'FilePath' : class has no constructors
+ ..\chrome/installer/util/work_item.h(26) : see declaration of 'FilePath'
+delete_tree_work_item.cc
+delete_reg_key_work_item.cc
diff --git a/chromium/tools/emacs/trybot.el b/chromium/tools/emacs/trybot.el
new file mode 100644
index 00000000000..970ffc085d9
--- /dev/null
+++ b/chromium/tools/emacs/trybot.el
@@ -0,0 +1,176 @@
+; To use this,
+; 1) Add to init.el:
+; (setq-default chrome-root "/path/to/chrome/src/")
+; (add-to-list 'load-path (concat chrome-root "tools/emacs"))
+; (require 'trybot)
+; 2) Run on trybot output:
+; M-x trybot
+;
+; To hack on this,
+; M-x eval-buffer
+; M-x trybot-test-win or M-x trybot-test-mac
+
+(defvar chrome-root nil
+ "Path to the src/ directory of your Chrome checkout.")
+
+(defun get-chrome-root ()
+ (or chrome-root default-directory))
+
+; Hunt down from the top, case correcting each path component as needed.
+; Currently does not keep a cache. Returns nil if no matching file can be
+; figured out.
+(defun case-corrected-filename (filename)
+ (save-match-data
+ (let ((path-components (split-string filename "/"))
+ (corrected-path (file-name-as-directory (get-chrome-root))))
+ (mapc
+ (function
+ (lambda (elt)
+ (if corrected-path
+ (let ((next-component
+ (car (member-ignore-case
+ elt (directory-files corrected-path)))))
+ (setq corrected-path
+ (and next-component
+ (file-name-as-directory
+ (concat corrected-path next-component))))))))
+ path-components)
+ (if corrected-path
+ (file-relative-name (directory-file-name corrected-path)
+ (get-chrome-root))
+ nil))))
+
+(defun trybot-fixup-win ()
+ "Fix up Windows-specific output."
+
+ ; Fix Windows paths ("d:\...\src\").
+ (save-excursion
+ ; This regexp is subtle and rather hard to read. :~(
+ ; Use regexp-builder when making changes to it.
+ (while (re-search-forward
+ (concat
+ ; First part: path leader, either of the form
+ ; e:\...src\ or ..\
+ "\\(^.:\\\\.*\\\\src\\\\\\|\\.\\.\\\\\\)"
+ ; Second part: path, followed by error message marker.
+ "\\(.*?\\)[(:]") nil t)
+ (replace-match "" nil t nil 1)
+ ; Line now looks like:
+ ; foo\bar\baz.cc error message here
+ ; We want to fixup backslashes in path into forward slashes,
+ ; without modifying the error message - by matching up to the
+ ; first colon above (which will be just beyond the end of the
+ ; filename) we can use the end of the match as a limit.
+ (subst-char-in-region (point) (match-end 0) ?\\ ?/)
+ ; See if we can correct the file name casing.
+ (let ((filename (buffer-substring (match-beginning 2) (match-end 2))))
+ (if (and (not (file-exists-p filename))
+ (setq filename (case-corrected-filename filename)))
+ (replace-match filename t t nil 2))))))
+
+(defun trybot-fixup-maclin ()
+ "Fix up Mac/Linux output."
+ (save-excursion
+ (while (re-search-forward "^/b/build/[^ ]*/src/" nil t)
+ (replace-match ""))))
+
+(defun trybot-fixup (type-hint)
+ "Parse and fixup the contents of the current buffer as trybot output."
+
+ ; XXX is there something I should so so this stuff doesn't end up on the
+ ; undo stack?
+
+ ;; Fixup paths.
+ (cd (get-chrome-root))
+
+ (goto-char (point-min))
+
+ ;; Fix up path references.
+ (cond ((eq type-hint 'win) (trybot-fixup-win))
+ ((eq type-hint 'mac) (trybot-fixup-maclin))
+ ((eq type-hint 'linux) (trybot-fixup-maclin))
+ (t (trybot-fixup-win) (trybot-fixup-maclin)))
+
+ (compilation-mode))
+
+(defun trybot-get-new-buffer ()
+ "Get a new clean buffer for trybot output."
+ ; Use trybot-buffer-name if available; otherwise, "*trybot*".
+ (let ((buffer-name (if (boundp 'trybot-buffer-name)
+ trybot-buffer-name
+ "*trybot*")))
+ (let ((old (get-buffer buffer-name)))
+ (when old (kill-buffer old)))
+ (get-buffer-create buffer-name)))
+
+(defun trybot-fetch (type-hint url)
+ "Fetch a URL and postprocess it as trybot output."
+
+ (let ((on-fetch-completion
+ (lambda (process state)
+ (switch-to-buffer (process-buffer process))
+ (when (equal state "finished\n")
+ (trybot-fixup (process-get process 'type-hint)))))
+ (command (concat "curl -s " (shell-quote-argument url)
+ ; Pipe it through the output shortener.
+ (cond
+ ((eq type-hint 'win)
+ (concat " | " (get-chrome-root)
+ "build/sanitize-win-build-log.sh"))
+ ((eq type-hint 'mac)
+ (concat " | " (get-chrome-root)
+ "build/sanitize-mac-build-log.sh"))))))
+
+ ; Start up the subprocess.
+ (let* ((coding-system-for-read 'utf-8-dos)
+ (buffer (trybot-get-new-buffer))
+ (process (start-process-shell-command "curl" buffer command)))
+ ; Attach the type hint to the process so we can get it back when
+ ; the process completes.
+ (process-put process 'type-hint type-hint)
+ (set-process-query-on-exit-flag process nil)
+ (set-process-sentinel process on-fetch-completion))))
+
+(defun trybot-test (type-hint filename)
+ "Load the given test data filename and do the trybot parse on it."
+
+ (let ((trybot-buffer-name "*trybot-test*")
+ (url (concat "file://" (get-chrome-root) "tools/emacs/" filename)))
+ (trybot-fetch type-hint url)))
+
+(defun trybot-test-win ()
+ "Load the Windows test data and do the trybot parse on it."
+ (interactive)
+ (trybot-test 'win "trybot-windows.txt"))
+(defun trybot-test-mac ()
+ "Load the Mac test data and do the trybot parse on it."
+ (interactive)
+ (trybot-test 'mac "trybot-mac.txt"))
+(defun trybot-test-linux ()
+ "Load the Linux test data and do the trybot parse on it."
+ (interactive)
+ (trybot-test 'linux "trybot-linux.txt"))
+
+(defun trybot (url)
+ "Fetch a trybot URL and fix up the output into a compilation-mode buffer."
+ (interactive "sURL to trybot stdout (leave empty to use clipboard): ")
+
+ ;; Yank URL from clipboard if necessary.
+ (when (= (length url) 0)
+ (with-temp-buffer
+ (clipboard-yank)
+ (setq url (buffer-string))))
+
+ ;; Append /text to the URL to get plain text output in the common
+ ;; case of getting a URL to the HTML build log.
+ (when (equal "stdio" (car (last (split-string url "/"))))
+ (setq url (concat url "/text")))
+
+ (let ((type-hint (cond ((string-match "/[Ww]in" url) 'win)
+ ((string-match "/mac/" url) 'mac)
+ ; Match /linux, /linux_view, etc.
+ ((string-match "/linux" url) 'linux)
+ (t 'unknown))))
+ (trybot-fetch type-hint url)))
+
+(provide 'trybot)
diff --git a/chromium/tools/export_tarball/export_tarball.py b/chromium/tools/export_tarball/export_tarball.py
new file mode 100755
index 00000000000..876b3d89d35
--- /dev/null
+++ b/chromium/tools/export_tarball/export_tarball.py
@@ -0,0 +1,197 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This tool creates a tarball with all the sources, but without .svn directories.
+
+It can also remove files which are not strictly required for build, so that
+the resulting tarball can be reasonably small (last time it was ~110 MB).
+
+Example usage:
+
+export_tarball.py /foo/bar
+
+The above will create file /foo/bar.tar.bz2.
+"""
+
+import optparse
+import os
+import subprocess
+import sys
+import tarfile
+
+
+NONESSENTIAL_DIRS = (
+ 'breakpad/src/processor/testdata',
+ 'chrome/browser/resources/tracing/tests',
+ 'chrome/common/extensions/docs',
+ 'courgette/testdata',
+ 'data',
+ 'native_client/src/trusted/service_runtime/testdata',
+ 'src/chrome/test/data',
+ 'o3d/documentation',
+ 'o3d/samples',
+ 'o3d/tests',
+ 'ppapi/examples',
+ 'ppapi/native_client/tests',
+ 'third_party/angle/samples/gles2_book',
+ 'third_party/findbugs',
+ 'third_party/hunspell_dictionaries',
+ 'third_party/hunspell/tests',
+ 'third_party/lighttpd',
+ 'third_party/sqlite/src/test',
+ 'third_party/sqlite/test',
+ 'third_party/vc_80',
+ 'third_party/xdg-utils/tests',
+ 'third_party/yasm/source/patched-yasm/modules/arch/x86/tests',
+ 'third_party/yasm/source/patched-yasm/modules/dbgfmts/dwarf2/tests',
+ 'third_party/yasm/source/patched-yasm/modules/objfmts/bin/tests',
+ 'third_party/yasm/source/patched-yasm/modules/objfmts/coff/tests',
+ 'third_party/yasm/source/patched-yasm/modules/objfmts/elf/tests',
+ 'third_party/yasm/source/patched-yasm/modules/objfmts/macho/tests',
+ 'third_party/yasm/source/patched-yasm/modules/objfmts/rdf/tests',
+ 'third_party/yasm/source/patched-yasm/modules/objfmts/win32/tests',
+ 'third_party/yasm/source/patched-yasm/modules/objfmts/win64/tests',
+ 'third_party/yasm/source/patched-yasm/modules/objfmts/xdf/tests',
+ 'third_party/WebKit/LayoutTests',
+ 'third_party/WebKit/Source/JavaScriptCore/tests',
+ 'third_party/WebKit/Source/WebCore/ChangeLog',
+ 'third_party/WebKit/Source/WebKit2',
+ 'third_party/WebKit/Tools/Scripts',
+ 'tools/gyp/test',
+ 'v8/test',
+ 'webkit/data/layout_tests',
+ 'webkit/tools/test/reference_build',
+)
+
+TESTDIRS = (
+ 'chrome/test/data',
+ 'content/test/data',
+ 'media/test/data',
+ 'net/data',
+)
+
+
+def GetSourceDirectory():
+ return os.path.realpath(
+ os.path.join(os.path.dirname(__file__), '..', '..', '..', 'src'))
+
+
+# Workaround lack of the exclude parameter in add method in python-2.4.
+# TODO(phajdan.jr): remove the workaround when it's not needed on the bot.
+class MyTarFile(tarfile.TarFile):
+ def set_remove_nonessential_files(self, remove):
+ self.__remove_nonessential_files = remove
+
+ def set_verbose(self, verbose):
+ self.__verbose = verbose
+
+ def __report_skipped(self, name):
+ if self.__verbose:
+ print 'D\t%s' % name
+
+ def __report_added(self, name):
+ if self.__verbose:
+ print 'A\t%s' % name
+
+ def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
+ head, tail = os.path.split(name)
+ if tail in ('.svn', '.git'):
+ self.__report_skipped(name)
+ return
+
+ if self.__remove_nonessential_files:
+ # WebKit change logs take quite a lot of space. This saves ~10 MB
+ # in a bzip2-compressed tarball.
+ if 'ChangeLog' in name:
+ self.__report_skipped(name)
+ return
+
+ # Remove contents of non-essential directories, but preserve gyp files,
+ # so that build/gyp_chromium can work.
+ for nonessential_dir in (NONESSENTIAL_DIRS + TESTDIRS):
+ dir_path = os.path.join(GetSourceDirectory(), nonessential_dir)
+ if (name.startswith(dir_path) and
+ os.path.isfile(name) and
+ 'gyp' not in name):
+ self.__report_skipped(name)
+ return
+
+ self.__report_added(name)
+ tarfile.TarFile.add(self, name, arcname=arcname, recursive=recursive)
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ parser.add_option("--basename")
+ parser.add_option("--remove-nonessential-files",
+ dest="remove_nonessential_files",
+ action="store_true", default=False)
+ parser.add_option("--test-data", action="store_true")
+ # TODO(phajdan.jr): Remove --xz option when it's not needed for compatibility.
+ parser.add_option("--xz", action="store_true")
+ parser.add_option("--verbose", action="store_true", default=False)
+ parser.add_option("--progress", action="store_true", default=False)
+
+ options, args = parser.parse_args(argv)
+
+ if len(args) != 1:
+ print 'You must provide only one argument: output file name'
+ print '(without .tar.xz extension).'
+ return 1
+
+ if not os.path.exists(GetSourceDirectory()):
+ print 'Cannot find the src directory ' + GetSourceDirectory()
+ return 1
+
+ # These two commands are from src/DEPS; please keep them in sync.
+ if subprocess.call(['python', 'build/util/lastchange.py', '-o',
+ 'build/util/LASTCHANGE'], cwd=GetSourceDirectory()) != 0:
+ print 'Could not run build/util/lastchange.py to update LASTCHANGE.'
+ return 1
+ if subprocess.call(['python', 'build/util/lastchange.py', '-s',
+ 'third_party/WebKit', '-o',
+ 'build/util/LASTCHANGE.blink'],
+ cwd=GetSourceDirectory()) != 0:
+ print 'Could not run build/util/lastchange.py to update LASTCHANGE.blink.'
+ return 1
+
+ output_fullname = args[0] + '.tar'
+ output_basename = options.basename or os.path.basename(args[0])
+
+ archive = MyTarFile.open(output_fullname, 'w')
+ archive.set_remove_nonessential_files(options.remove_nonessential_files)
+ archive.set_verbose(options.verbose)
+ try:
+ if options.test_data:
+ for directory in TESTDIRS:
+ archive.add(os.path.join(GetSourceDirectory(), directory),
+ arcname=os.path.join(output_basename, directory))
+ else:
+ archive.add(GetSourceDirectory(), arcname=output_basename)
+ finally:
+ archive.close()
+
+ if options.progress:
+ sys.stdout.flush()
+ pv = subprocess.Popen(
+ ['pv', '--force', output_fullname],
+ stdout=subprocess.PIPE,
+ stderr=sys.stdout)
+ with open(output_fullname + '.xz', 'w') as f:
+ rc = subprocess.call(['xz', '-9', '-'], stdin=pv.stdout, stdout=f)
+ pv.wait()
+ else:
+ rc = subprocess.call(['xz', '-9', output_fullname])
+
+ if rc != 0:
+ print 'xz -9 failed!'
+ return 1
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv[1:]))
diff --git a/chromium/tools/export_tarball/export_v8_tarball.py b/chromium/tools/export_tarball/export_v8_tarball.py
new file mode 100755
index 00000000000..b232c0aee73
--- /dev/null
+++ b/chromium/tools/export_tarball/export_v8_tarball.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a tarball with V8 sources, but without .svn directories.
+
+This allows easy packaging of V8, synchronized with browser releases.
+
+Example usage:
+
+export_v8_tarball.py /foo/bar
+
+The above will create file /foo/bar/v8-VERSION.tar.bz2 if it doesn't exist.
+"""
+
+import optparse
+import os
+import re
+import subprocess
+import sys
+import tarfile
+
+_V8_MAJOR_VERSION_PATTERN = re.compile(r'#define\s+MAJOR_VERSION\s+(.*)')
+_V8_MINOR_VERSION_PATTERN = re.compile(r'#define\s+MINOR_VERSION\s+(.*)')
+_V8_BUILD_NUMBER_PATTERN = re.compile(r'#define\s+BUILD_NUMBER\s+(.*)')
+_V8_PATCH_LEVEL_PATTERN = re.compile(r'#define\s+PATCH_LEVEL\s+(.*)')
+
+_V8_PATTERNS = [
+ _V8_MAJOR_VERSION_PATTERN,
+ _V8_MINOR_VERSION_PATTERN,
+ _V8_BUILD_NUMBER_PATTERN,
+ _V8_PATCH_LEVEL_PATTERN]
+
+_NONESSENTIAL_DIRS = (
+ 'third_party/icu',
+)
+
+
+def GetV8Version(v8_directory):
+ """
+ Returns version number as string based on the string
+ contents of version.cc file.
+ """
+ with open(os.path.join(v8_directory, 'src', 'version.cc')) as version_file:
+ version_contents = version_file.read()
+
+ version_components = []
+ for pattern in _V8_PATTERNS:
+ version_components.append(pattern.search(version_contents).group(1).strip())
+
+ if version_components[len(version_components) - 1] == '0':
+ version_components.pop()
+
+ return '.'.join(version_components)
+
+
+def GetSourceDirectory():
+ return os.path.realpath(
+ os.path.join(os.path.dirname(__file__), '..', '..', '..', 'src'))
+
+
+def GetV8Directory():
+ return os.path.join(GetSourceDirectory(), 'v8')
+
+
+# Workaround lack of the exclude parameter in add method in python-2.4.
+# TODO(phajdan.jr): remove the workaround when it's not needed on the bot.
+class MyTarFile(tarfile.TarFile):
+ def set_remove_nonessential_files(self, remove):
+ self.__remove_nonessential_files = remove
+
+ def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
+ head, tail = os.path.split(name)
+ if tail in ('.svn', '.git'):
+ return
+
+ if self.__remove_nonessential_files:
+ # Remove contents of non-essential directories, but preserve gyp files,
+ # so that build/gyp_chromium can work.
+ for nonessential_dir in _NONESSENTIAL_DIRS:
+ dir_path = os.path.join(GetV8Directory(), nonessential_dir)
+ if (name.startswith(dir_path) and
+ os.path.isfile(name) and
+ 'gyp' not in name):
+ return
+
+ tarfile.TarFile.add(self, name, arcname=arcname, recursive=recursive)
+
+
+def main(argv):
+ parser = optparse.OptionParser()
+ options, args = parser.parse_args(argv)
+
+ if len(args) != 1:
+ print 'You must provide only one argument: output file directory'
+ return 1
+
+ v8_directory = GetV8Directory()
+ if not os.path.exists(v8_directory):
+ print 'Cannot find the v8 directory.'
+ return 1
+
+ v8_version = GetV8Version(v8_directory)
+ print 'Packaging V8 version %s...' % v8_version
+
+ subprocess.check_call(["make", "dependencies"], cwd=v8_directory)
+
+ output_basename = 'v8-%s' % v8_version
+
+ # Package full tarball.
+ output_fullname = os.path.join(args[0], output_basename + '.tar.bz2')
+ if not os.path.exists(output_fullname):
+ archive = MyTarFile.open(output_fullname, 'w:bz2')
+ archive.set_remove_nonessential_files(False)
+ try:
+ archive.add(v8_directory, arcname=output_basename)
+ finally:
+ archive.close()
+
+ # Package lite tarball.
+ output_fullname = os.path.join(args[0], output_basename + '-lite.tar.bz2')
+ if not os.path.exists(output_fullname):
+ archive = MyTarFile.open(output_fullname, 'w:bz2')
+ archive.set_remove_nonessential_files(True)
+ try:
+ archive.add(v8_directory, arcname=output_basename)
+ finally:
+ archive.close()
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/chromium/tools/find_runtime_symbols/OWNERS b/chromium/tools/find_runtime_symbols/OWNERS
new file mode 100644
index 00000000000..aeea00ec3e0
--- /dev/null
+++ b/chromium/tools/find_runtime_symbols/OWNERS
@@ -0,0 +1 @@
+dmikurube@chromium.org
diff --git a/chromium/tools/find_runtime_symbols/PRESUBMIT.py b/chromium/tools/find_runtime_symbols/PRESUBMIT.py
new file mode 100644
index 00000000000..3a29a5aae5a
--- /dev/null
+++ b/chromium/tools/find_runtime_symbols/PRESUBMIT.py
@@ -0,0 +1,45 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Top-level presubmit script for find_runtime_symbols.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into depot_tools.
+"""
+
+
+def CommonChecks(input_api, output_api):
+ import sys
+ def join(*args):
+ return input_api.os_path.join(input_api.PresubmitLocalPath(), *args)
+
+ output = []
+ sys_path_backup = sys.path
+ try:
+ sys.path = [
+ join('..', 'find_runtime_symbols'),
+ ] + sys.path
+ output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
+ finally:
+ sys.path = sys_path_backup
+
+ output.extend(
+ input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api,
+ input_api.os_path.join(input_api.PresubmitLocalPath(), 'tests'),
+ whitelist=[r'.+_test\.py$']))
+
+ if input_api.is_committing:
+ output.extend(input_api.canned_checks.PanProjectChecks(input_api,
+ output_api,
+ owners_check=False))
+ return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CommonChecks(input_api, output_api)
diff --git a/chromium/tools/find_runtime_symbols/README b/chromium/tools/find_runtime_symbols/README
new file mode 100644
index 00000000000..ee5c2ac88ca
--- /dev/null
+++ b/chromium/tools/find_runtime_symbols/README
@@ -0,0 +1,24 @@
+This script maps runtime addresses to symbol names. It is robust over
+Address Space Layout Randomization (ASLR) since it uses runtime addresses with
+runtime mapping information (/proc/.../maps).
+Like 'pprof --symbols' in gperftools <http://code.google.com/p/gperftools/>.
+
+
+Step 1: Prepare symbol information.
+
+It is required to collect symbol information before mapping runtime addresses
+to symbol names.
+
+./prepare_symbol_info.py /path/to/maps [/another/path/to/symbol_info_dir]
+
+The required 'maps' file is /proc/.../maps of the process at runtime.
+
+
+Step 2: Find symbols.
+
+./find_runtime_symbols.py /path/to/symbol_info_dir < addresses.txt
+
+'symbol_info_dir' is the result of the Step 1.
+The stdin should be a list of hex addresses to map, one per line.
+
+The results will be printed to stdout like 'pprof --symbols'.
diff --git a/chromium/tools/find_runtime_symbols/find_runtime_symbols.py b/chromium/tools/find_runtime_symbols/find_runtime_symbols.py
new file mode 100755
index 00000000000..038874132db
--- /dev/null
+++ b/chromium/tools/find_runtime_symbols/find_runtime_symbols.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Find symbols in a binary corresponding to given runtime virtual addresses.
+
+Note that source file names are treated as symbols in this script while they
+are actually not.
+"""
+
+import json
+import logging
+import os
+import sys
+
+from static_symbols import StaticSymbolsInFile
+
+
+_BASE_PATH = os.path.dirname(os.path.abspath(__file__))
+_TOOLS_LINUX_PATH = os.path.join(_BASE_PATH, os.pardir, 'linux')
+sys.path.insert(0, _TOOLS_LINUX_PATH)
+
+
+from procfs import ProcMaps # pylint: disable=F0401
+
+try:
+ from collections import OrderedDict # pylint: disable=E0611
+except ImportError:
+ _SIMPLEJSON_PATH = os.path.join(_BASE_PATH, os.pardir, os.pardir,
+ 'third_party')
+ sys.path.insert(0, _SIMPLEJSON_PATH)
+ from simplejson import OrderedDict
+
+
+FUNCTION_SYMBOLS = 0
+SOURCEFILE_SYMBOLS = 1
+TYPEINFO_SYMBOLS = 2
+
+_MAPS_FILENAME = 'maps'
+_FILES_FILENAME = 'files.json'
+
+
+class RuntimeSymbolsInProcess(object):
+ def __init__(self):
+ self._maps = None
+ self._static_symbols_in_filse = {}
+
+ def find_procedure(self, runtime_address):
+ for vma in self._maps.iter(ProcMaps.executable):
+ if vma.begin <= runtime_address < vma.end:
+ static_symbols = self._static_symbols_in_filse.get(vma.name)
+ if static_symbols:
+ return static_symbols.find_procedure_by_runtime_address(
+ runtime_address, vma)
+ else:
+ return None
+ return None
+
+ def find_sourcefile(self, runtime_address):
+ for vma in self._maps.iter(ProcMaps.executable):
+ if vma.begin <= runtime_address < vma.end:
+ static_symbols = self._static_symbols_in_filse.get(vma.name)
+ if static_symbols:
+ return static_symbols.find_sourcefile_by_runtime_address(
+ runtime_address, vma)
+ else:
+ return None
+ return None
+
+ def find_typeinfo(self, runtime_address):
+ for vma in self._maps.iter(ProcMaps.constants):
+ if vma.begin <= runtime_address < vma.end:
+ static_symbols = self._static_symbols_in_filse.get(vma.name)
+ if static_symbols:
+ return static_symbols.find_typeinfo_by_runtime_address(
+ runtime_address, vma)
+ else:
+ return None
+ return None
+
+ @staticmethod
+ def load(prepared_data_dir):
+ symbols_in_process = RuntimeSymbolsInProcess()
+
+ with open(os.path.join(prepared_data_dir, _MAPS_FILENAME), mode='r') as f:
+ symbols_in_process._maps = ProcMaps.load_file(f)
+ with open(os.path.join(prepared_data_dir, _FILES_FILENAME), mode='r') as f:
+ files = json.load(f)
+
+ # pylint: disable=W0212
+ for vma in symbols_in_process._maps.iter(ProcMaps.executable_and_constants):
+ file_entry = files.get(vma.name)
+ if not file_entry:
+ continue
+
+ static_symbols = StaticSymbolsInFile(vma.name)
+
+ nm_entry = file_entry.get('nm')
+ if nm_entry and nm_entry['format'] == 'bsd':
+ with open(os.path.join(prepared_data_dir, nm_entry['file']), 'r') as f:
+ static_symbols.load_nm_bsd(f, nm_entry['mangled'])
+
+ readelf_entry = file_entry.get('readelf-e')
+ if readelf_entry:
+ with open(os.path.join(prepared_data_dir, readelf_entry['file']),
+ 'r') as f:
+ static_symbols.load_readelf_ew(f)
+
+ decodedline_file_entry = file_entry.get('readelf-debug-decodedline-file')
+ if decodedline_file_entry:
+ with open(os.path.join(prepared_data_dir,
+ decodedline_file_entry['file']), 'r') as f:
+ static_symbols.load_readelf_debug_decodedline_file(f)
+
+ symbols_in_process._static_symbols_in_filse[vma.name] = static_symbols
+
+ return symbols_in_process
+
+
+def _find_runtime_function_symbols(symbols_in_process, addresses):
+ result = OrderedDict()
+ for address in addresses:
+ if isinstance(address, basestring):
+ address = int(address, 16)
+ found = symbols_in_process.find_procedure(address)
+ if found:
+ result[address] = found.name
+ else:
+ result[address] = '0x%016x' % address
+ return result
+
+
+def _find_runtime_sourcefile_symbols(symbols_in_process, addresses):
+ result = OrderedDict()
+ for address in addresses:
+ if isinstance(address, basestring):
+ address = int(address, 16)
+ found = symbols_in_process.find_sourcefile(address)
+ if found:
+ result[address] = found
+ else:
+ result[address] = ''
+ return result
+
+
+def _find_runtime_typeinfo_symbols(symbols_in_process, addresses):
+ result = OrderedDict()
+ for address in addresses:
+ if isinstance(address, basestring):
+ address = int(address, 16)
+ if address == 0:
+ result[address] = 'no typeinfo'
+ else:
+ found = symbols_in_process.find_typeinfo(address)
+ if found:
+ if found.startswith('typeinfo for '):
+ result[address] = found[13:]
+ else:
+ result[address] = found
+ else:
+ result[address] = '0x%016x' % address
+ return result
+
+
+_INTERNAL_FINDERS = {
+ FUNCTION_SYMBOLS: _find_runtime_function_symbols,
+ SOURCEFILE_SYMBOLS: _find_runtime_sourcefile_symbols,
+ TYPEINFO_SYMBOLS: _find_runtime_typeinfo_symbols,
+ }
+
+
+def find_runtime_symbols(symbol_type, symbols_in_process, addresses):
+ return _INTERNAL_FINDERS[symbol_type](symbols_in_process, addresses)
+
+
+def main():
+ # FIX: Accept only .pre data
+ if len(sys.argv) < 2:
+ sys.stderr.write("""Usage:
+%s /path/to/prepared_data_dir/ < addresses.txt
+""" % sys.argv[0])
+ return 1
+
+ log = logging.getLogger('find_runtime_symbols')
+ log.setLevel(logging.WARN)
+ handler = logging.StreamHandler()
+ handler.setLevel(logging.WARN)
+ formatter = logging.Formatter('%(message)s')
+ handler.setFormatter(formatter)
+ log.addHandler(handler)
+
+ prepared_data_dir = sys.argv[1]
+ if not os.path.exists(prepared_data_dir):
+ log.warn("Nothing found: %s" % prepared_data_dir)
+ return 1
+ if not os.path.isdir(prepared_data_dir):
+ log.warn("Not a directory: %s" % prepared_data_dir)
+ return 1
+
+ symbols_in_process = RuntimeSymbolsInProcess.load(prepared_data_dir)
+ symbols_dict = find_runtime_symbols(FUNCTION_SYMBOLS,
+ symbols_in_process,
+ sys.stdin)
+ for address, symbol in symbols_dict.iteritems():
+ if symbol:
+ print '%016x %s' % (address, symbol)
+ else:
+ print '%016x' % address
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/find_runtime_symbols/prepare_symbol_info.py b/chromium/tools/find_runtime_symbols/prepare_symbol_info.py
new file mode 100755
index 00000000000..befe314ab07
--- /dev/null
+++ b/chromium/tools/find_runtime_symbols/prepare_symbol_info.py
@@ -0,0 +1,252 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import hashlib
+import json
+import logging
+import optparse
+import os
+import re
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+BASE_PATH = os.path.dirname(os.path.abspath(__file__))
+REDUCE_DEBUGLINE_PATH = os.path.join(BASE_PATH, 'reduce_debugline.py')
+_TOOLS_LINUX_PATH = os.path.join(BASE_PATH, os.pardir, 'linux')
+sys.path.insert(0, _TOOLS_LINUX_PATH)
+
+
+from procfs import ProcMaps # pylint: disable=F0401
+
+
+LOGGER = logging.getLogger('prepare_symbol_info')
+
+
+def _dump_command_result(command, output_dir_path, basename, suffix):
+ handle_out, filename_out = tempfile.mkstemp(
+ suffix=suffix, prefix=basename + '.', dir=output_dir_path)
+ handle_err, filename_err = tempfile.mkstemp(
+ suffix=suffix + '.err', prefix=basename + '.', dir=output_dir_path)
+ error = False
+ try:
+ subprocess.check_call(
+ command, stdout=handle_out, stderr=handle_err, shell=True)
+ except (OSError, subprocess.CalledProcessError):
+ error = True
+ finally:
+ os.close(handle_err)
+ os.close(handle_out)
+
+ if os.path.exists(filename_err):
+ if LOGGER.getEffectiveLevel() <= logging.DEBUG:
+ with open(filename_err, 'r') as f:
+ for line in f:
+ LOGGER.debug(line.rstrip())
+ os.remove(filename_err)
+
+ if os.path.exists(filename_out) and (
+ os.path.getsize(filename_out) == 0 or error):
+ os.remove(filename_out)
+ return None
+
+ if not os.path.exists(filename_out):
+ return None
+
+ return filename_out
+
+
+def prepare_symbol_info(maps_path,
+ output_dir_path=None,
+ alternative_dirs=None,
+ use_tempdir=False,
+ use_source_file_name=False):
+ """Prepares (collects) symbol information files for find_runtime_symbols.
+
+ 1) If |output_dir_path| is specified, it tries collecting symbol information
+ files in the given directory |output_dir_path|.
+ 1-a) If |output_dir_path| doesn't exist, create the directory and use it.
+ 1-b) If |output_dir_path| is an empty directory, use it.
+ 1-c) If |output_dir_path| is a directory which has 'files.json', assumes that
+ files are already collected and just ignores it.
+ 1-d) Otherwise, depends on |use_tempdir|.
+
+ 2) If |output_dir_path| is not specified, it tries to create a new directory
+ depending on 'maps_path'.
+
+ If it cannot create a new directory, creates a temporary directory depending
+ on |use_tempdir|. If |use_tempdir| is False, returns None.
+
+ Args:
+ maps_path: A path to a file which contains '/proc/<pid>/maps'.
+ alternative_dirs: A mapping from a directory '/path/on/target' where the
+ target process runs to a directory '/path/on/host' where the script
+ reads the binary. Considered to be used for Android binaries.
+ output_dir_path: A path to a directory where files are prepared.
+ use_tempdir: If True, it creates a temporary directory when it cannot
+ create a new directory.
+ use_source_file_name: If True, it adds reduced result of 'readelf -wL'
+ to find source file names.
+
+ Returns:
+ A pair of a path to the prepared directory and a boolean representing
+ if it created a temporary directory or not.
+ """
+ alternative_dirs = alternative_dirs or {}
+ if not output_dir_path:
+ matched = re.match('^(.*)\.maps$', os.path.basename(maps_path))
+ if matched:
+ output_dir_path = matched.group(1) + '.pre'
+ if not output_dir_path:
+ matched = re.match('^/proc/(.*)/maps$', os.path.realpath(maps_path))
+ if matched:
+ output_dir_path = matched.group(1) + '.pre'
+ if not output_dir_path:
+ output_dir_path = os.path.basename(maps_path) + '.pre'
+ # TODO(dmikurube): Find another candidate for output_dir_path.
+
+ used_tempdir = False
+ LOGGER.info('Data for profiling will be collected in "%s".' % output_dir_path)
+ if os.path.exists(output_dir_path):
+ if os.path.isdir(output_dir_path) and not os.listdir(output_dir_path):
+ LOGGER.warn('Using an empty existing directory "%s".' % output_dir_path)
+ else:
+ LOGGER.warn('A file or a directory exists at "%s".' % output_dir_path)
+ if os.path.exists(os.path.join(output_dir_path, 'files.json')):
+ LOGGER.warn('Using the existing directory "%s".' % output_dir_path)
+ return output_dir_path, used_tempdir
+ else:
+ if use_tempdir:
+ output_dir_path = tempfile.mkdtemp()
+ used_tempdir = True
+ LOGGER.warn('Using a temporary directory "%s".' % output_dir_path)
+ else:
+ LOGGER.warn('The directory "%s" is not available.' % output_dir_path)
+ return None, used_tempdir
+ else:
+ LOGGER.info('Creating a new directory "%s".' % output_dir_path)
+ try:
+ os.mkdir(output_dir_path)
+ except OSError:
+ LOGGER.warn('A directory "%s" cannot be created.' % output_dir_path)
+ if use_tempdir:
+ output_dir_path = tempfile.mkdtemp()
+ used_tempdir = True
+ LOGGER.warn('Using a temporary directory "%s".' % output_dir_path)
+ else:
+ LOGGER.warn('The directory "%s" is not available.' % output_dir_path)
+ return None, used_tempdir
+
+ shutil.copyfile(maps_path, os.path.join(output_dir_path, 'maps'))
+
+ with open(maps_path, mode='r') as f:
+ maps = ProcMaps.load_file(f)
+
+ LOGGER.debug('Listing up symbols.')
+ files = {}
+ for entry in maps.iter(ProcMaps.executable):
+ LOGGER.debug(' %016x-%016x +%06x %s' % (
+ entry.begin, entry.end, entry.offset, entry.name))
+ binary_path = entry.name
+ for target_path, host_path in alternative_dirs.iteritems():
+ if entry.name.startswith(target_path):
+ binary_path = entry.name.replace(target_path, host_path, 1)
+ if not (ProcMaps.EXECUTABLE_PATTERN.match(binary_path) or
+ (os.path.isfile(binary_path) and os.access(binary_path, os.X_OK))):
+ continue
+ nm_filename = _dump_command_result(
+ 'nm -n --format bsd %s | c++filt' % binary_path,
+ output_dir_path, os.path.basename(binary_path), '.nm')
+ if not nm_filename:
+ continue
+ readelf_e_filename = _dump_command_result(
+ 'readelf -eW %s' % binary_path,
+ output_dir_path, os.path.basename(binary_path), '.readelf-e')
+ if not readelf_e_filename:
+ continue
+ readelf_debug_decodedline_file = None
+ if use_source_file_name:
+ readelf_debug_decodedline_file = _dump_command_result(
+ 'readelf -wL %s | %s' % (binary_path, REDUCE_DEBUGLINE_PATH),
+ output_dir_path, os.path.basename(binary_path), '.readelf-wL')
+
+ files[entry.name] = {}
+ files[entry.name]['nm'] = {
+ 'file': os.path.basename(nm_filename),
+ 'format': 'bsd',
+ 'mangled': False}
+ files[entry.name]['readelf-e'] = {
+ 'file': os.path.basename(readelf_e_filename)}
+ if readelf_debug_decodedline_file:
+ files[entry.name]['readelf-debug-decodedline-file'] = {
+ 'file': os.path.basename(readelf_debug_decodedline_file)}
+
+ files[entry.name]['size'] = os.stat(binary_path).st_size
+
+ with open(binary_path, 'rb') as entry_f:
+ md5 = hashlib.md5()
+ sha1 = hashlib.sha1()
+ chunk = entry_f.read(1024 * 1024)
+ while chunk:
+ md5.update(chunk)
+ sha1.update(chunk)
+ chunk = entry_f.read(1024 * 1024)
+ files[entry.name]['sha1'] = sha1.hexdigest()
+ files[entry.name]['md5'] = md5.hexdigest()
+
+ with open(os.path.join(output_dir_path, 'files.json'), 'w') as f:
+ json.dump(files, f, indent=2, sort_keys=True)
+
+ LOGGER.info('Collected symbol information at "%s".' % output_dir_path)
+ return output_dir_path, used_tempdir
+
+
+def main():
+ if not sys.platform.startswith('linux'):
+ sys.stderr.write('This script work only on Linux.')
+ return 1
+
+ option_parser = optparse.OptionParser(
+ '%s /path/to/maps [/path/to/output_data_dir/]' % sys.argv[0])
+ option_parser.add_option('--alternative-dirs', dest='alternative_dirs',
+ metavar='/path/on/target@/path/on/host[:...]',
+ help='Read files in /path/on/host/ instead of '
+ 'files in /path/on/target/.')
+ option_parser.add_option('--verbose', dest='verbose', action='store_true',
+ help='Enable verbose mode.')
+ options, args = option_parser.parse_args(sys.argv)
+ alternative_dirs_dict = {}
+ if options.alternative_dirs:
+ for alternative_dir_pair in options.alternative_dirs.split(':'):
+ target_path, host_path = alternative_dir_pair.split('@', 1)
+ alternative_dirs_dict[target_path] = host_path
+
+ LOGGER.setLevel(logging.DEBUG)
+ handler = logging.StreamHandler()
+ if options.verbose:
+ handler.setLevel(logging.DEBUG)
+ else:
+ handler.setLevel(logging.INFO)
+ formatter = logging.Formatter('%(message)s')
+ handler.setFormatter(formatter)
+ LOGGER.addHandler(handler)
+
+ if len(args) < 2:
+ option_parser.error('Argument error.')
+ return 1
+ elif len(args) == 2:
+ result, _ = prepare_symbol_info(args[1],
+ alternative_dirs=alternative_dirs_dict)
+ else:
+ result, _ = prepare_symbol_info(args[1], args[2],
+ alternative_dirs=alternative_dirs_dict)
+
+ return not result
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/find_runtime_symbols/reduce_debugline.py b/chromium/tools/find_runtime_symbols/reduce_debugline.py
new file mode 100755
index 00000000000..75c8c8578d7
--- /dev/null
+++ b/chromium/tools/find_runtime_symbols/reduce_debugline.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Reduces result of 'readelf -wL' to just a list of starting addresses.
+
+It lists up all addresses where the corresponding source files change. The
+list is sorted in ascending order. See tests/reduce_debugline_test.py for
+examples.
+
+This script assumes that the result of 'readelf -wL' ends with an empty line.
+
+Note: the option '-wL' has the same meaning with '--debug-dump=decodedline'.
+"""
+
+import re
+import sys
+
+
+_FILENAME_PATTERN = re.compile('(CU: |)(.+)\:')
+
+
+def reduce_decoded_debugline(input_file):
+ filename = ''
+ starting_dict = {}
+ started = False
+
+ for line in input_file:
+ line = line.strip()
+ unpacked = line.split(None, 2)
+
+ if len(unpacked) == 3 and unpacked[2].startswith('0x'):
+ if not started and filename:
+ started = True
+ starting_dict[int(unpacked[2], 16)] = filename
+ else:
+ started = False
+ if line.endswith(':'):
+ matched = _FILENAME_PATTERN.match(line)
+ if matched:
+ filename = matched.group(2)
+
+ starting_list = []
+ prev_filename = ''
+ for address in sorted(starting_dict):
+ curr_filename = starting_dict[address]
+ if prev_filename != curr_filename:
+ starting_list.append((address, starting_dict[address]))
+ prev_filename = curr_filename
+ return starting_list
+
+
+def main():
+ if len(sys.argv) != 1:
+ print >> sys.stderr, 'Unsupported arguments'
+ return 1
+
+ starting_list = reduce_decoded_debugline(sys.stdin)
+ bits64 = starting_list[-1][0] > 0xffffffff
+ for address, filename in starting_list:
+ if bits64:
+ print '%016x %s' % (address, filename)
+ else:
+ print '%08x %s' % (address, filename)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/find_runtime_symbols/static_symbols.py b/chromium/tools/find_runtime_symbols/static_symbols.py
new file mode 100644
index 00000000000..cd57bacd99a
--- /dev/null
+++ b/chromium/tools/find_runtime_symbols/static_symbols.py
@@ -0,0 +1,277 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import bisect
+import re
+
+
+_ARGUMENT_TYPE_PATTERN = re.compile('\([^()]*\)(\s*const)?')
+_TEMPLATE_ARGUMENT_PATTERN = re.compile('<[^<>]*>')
+_LEADING_TYPE_PATTERN = re.compile('^.*\s+(\w+::)')
+_READELF_SECTION_HEADER_PATTER = re.compile(
+ '^\s*\[\s*(Nr|\d+)\]\s+(|\S+)\s+([A-Z_]+)\s+([0-9a-f]+)\s+'
+ '([0-9a-f]+)\s+([0-9a-f]+)\s+([0-9]+)\s+([WAXMSILGxOop]*)\s+'
+ '([0-9]+)\s+([0-9]+)\s+([0-9]+)')
+
+
+class ParsingException(Exception):
+ def __str__(self):
+ return repr(self.args[0])
+
+
+class AddressMapping(object):
+ def __init__(self):
+ self._symbol_map = {}
+
+ def append(self, start, entry):
+ self._symbol_map[start] = entry
+
+ def find(self, address):
+ return self._symbol_map.get(address)
+
+
+class RangeAddressMapping(AddressMapping):
+ def __init__(self):
+ super(RangeAddressMapping, self).__init__()
+ self._sorted_start_list = []
+ self._is_sorted = True
+
+ def append(self, start, entry):
+ if self._sorted_start_list:
+ if self._sorted_start_list[-1] > start:
+ self._is_sorted = False
+ elif self._sorted_start_list[-1] == start:
+ return
+ self._sorted_start_list.append(start)
+ self._symbol_map[start] = entry
+
+ def find(self, address):
+ if not self._sorted_start_list:
+ return None
+ if not self._is_sorted:
+ self._sorted_start_list.sort()
+ self._is_sorted = True
+ found_index = bisect.bisect_left(self._sorted_start_list, address)
+ found_start_address = self._sorted_start_list[found_index - 1]
+ return self._symbol_map[found_start_address]
+
+
+class Procedure(object):
+ """A class for a procedure symbol and an address range for the symbol."""
+
+ def __init__(self, start, end, name):
+ self.start = start
+ self.end = end
+ self.name = name
+
+ def __eq__(self, other):
+ return (self.start == other.start and
+ self.end == other.end and
+ self.name == other.name)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __str__(self):
+ return '%x-%x: %s' % (self.start, self.end, self.name)
+
+
+class ElfSection(object):
+ """A class for an elf section header."""
+
+ def __init__(
+ self, number, name, stype, address, offset, size, es, flg, lk, inf, al):
+ self.number = number
+ self.name = name
+ self.stype = stype
+ self.address = address
+ self.offset = offset
+ self.size = size
+ self.es = es
+ self.flg = flg
+ self.lk = lk
+ self.inf = inf
+ self.al = al
+
+ def __eq__(self, other):
+ return (self.number == other.number and
+ self.name == other.name and
+ self.stype == other.stype and
+ self.address == other.address and
+ self.offset == other.offset and
+ self.size == other.size and
+ self.es == other.es and
+ self.flg == other.flg and
+ self.lk == other.lk and
+ self.inf == other.inf and
+ self.al == other.al)
+
+ def __ne__(self, other):
+ return not self.__eq__(other)
+
+ def __str__(self):
+ return '%x+%x(%x) %s' % (self.address, self.size, self.offset, self.name)
+
+
+class StaticSymbolsInFile(object):
+ """Represents static symbol information in a binary file."""
+
+ def __init__(self, my_name):
+ self.my_name = my_name
+ self._elf_sections = []
+ self._procedures = RangeAddressMapping()
+ self._sourcefiles = RangeAddressMapping()
+ self._typeinfos = AddressMapping()
+
+ def _append_elf_section(self, elf_section):
+ self._elf_sections.append(elf_section)
+
+ def _append_procedure(self, start, procedure):
+ self._procedures.append(start, procedure)
+
+ def _append_sourcefile(self, start, sourcefile):
+ self._sourcefiles.append(start, sourcefile)
+
+ def _append_typeinfo(self, start, typeinfo):
+ self._typeinfos.append(start, typeinfo)
+
+ def _find_symbol_by_runtime_address(self, address, vma, target):
+ if not (vma.begin <= address < vma.end):
+ return None
+
+ if vma.name != self.my_name:
+ return None
+
+ file_offset = address - (vma.begin - vma.offset)
+ elf_address = None
+ for section in self._elf_sections:
+ if section.offset <= file_offset < (section.offset + section.size):
+ elf_address = section.address + file_offset - section.offset
+ if not elf_address:
+ return None
+
+ return target.find(elf_address)
+
+ def find_procedure_by_runtime_address(self, address, vma):
+ return self._find_symbol_by_runtime_address(address, vma, self._procedures)
+
+ def find_sourcefile_by_runtime_address(self, address, vma):
+ return self._find_symbol_by_runtime_address(address, vma, self._sourcefiles)
+
+ def find_typeinfo_by_runtime_address(self, address, vma):
+ return self._find_symbol_by_runtime_address(address, vma, self._typeinfos)
+
+ def load_readelf_ew(self, f):
+ found_header = False
+ for line in f:
+ if line.rstrip() == 'Section Headers:':
+ found_header = True
+ break
+ if not found_header:
+ return None
+
+ for line in f:
+ line = line.rstrip()
+ matched = _READELF_SECTION_HEADER_PATTER.match(line)
+ if matched:
+ self._append_elf_section(ElfSection(
+ int(matched.group(1), 10), # number
+ matched.group(2), # name
+ matched.group(3), # stype
+ int(matched.group(4), 16), # address
+ int(matched.group(5), 16), # offset
+ int(matched.group(6), 16), # size
+ matched.group(7), # es
+ matched.group(8), # flg
+ matched.group(9), # lk
+ matched.group(10), # inf
+ matched.group(11) # al
+ ))
+ else:
+ if line in ('Key to Flags:', 'Program Headers:'):
+ break
+
+ def load_readelf_debug_decodedline_file(self, input_file):
+ for line in input_file:
+ splitted = line.rstrip().split(None, 2)
+ self._append_sourcefile(int(splitted[0], 16), splitted[1])
+
+ @staticmethod
+ def _parse_nm_bsd_line(line):
+ if line[8] == ' ':
+ return line[0:8], line[9], line[11:]
+ elif line[16] == ' ':
+ return line[0:16], line[17], line[19:]
+ raise ParsingException('Invalid nm output.')
+
+ @staticmethod
+ def _get_short_function_name(function):
+ while True:
+ function, number = _ARGUMENT_TYPE_PATTERN.subn('', function)
+ if not number:
+ break
+ while True:
+ function, number = _TEMPLATE_ARGUMENT_PATTERN.subn('', function)
+ if not number:
+ break
+ return _LEADING_TYPE_PATTERN.sub('\g<1>', function)
+
+ def load_nm_bsd(self, f, mangled=False):
+ last_start = 0
+ routine = ''
+
+ for line in f:
+ line = line.rstrip()
+ sym_value, sym_type, sym_name = self._parse_nm_bsd_line(line)
+
+ if sym_value[0] == ' ':
+ continue
+
+ start_val = int(sym_value, 16)
+
+ if (sym_type in ('r', 'R', 'D', 'U', 'd', 'V') and
+ (not mangled and sym_name.startswith('typeinfo'))):
+ self._append_typeinfo(start_val, sym_name)
+
+ # It's possible for two symbols to share the same address, if
+ # one is a zero-length variable (like __start_google_malloc) or
+ # one symbol is a weak alias to another (like __libc_malloc).
+ # In such cases, we want to ignore all values except for the
+ # actual symbol, which in nm-speak has type "T". The logic
+ # below does this, though it's a bit tricky: what happens when
+ # we have a series of lines with the same address, is the first
+ # one gets queued up to be processed. However, it won't
+ # *actually* be processed until later, when we read a line with
+ # a different address. That means that as long as we're reading
+ # lines with the same address, we have a chance to replace that
+ # item in the queue, which we do whenever we see a 'T' entry --
+ # that is, a line with type 'T'. If we never see a 'T' entry,
+ # we'll just go ahead and process the first entry (which never
+ # got touched in the queue), and ignore the others.
+ if start_val == last_start and (sym_type == 't' or sym_type == 'T'):
+ # We are the 'T' symbol at this address, replace previous symbol.
+ routine = sym_name
+ continue
+ elif start_val == last_start:
+ # We're not the 'T' symbol at this address, so ignore us.
+ continue
+
+ # Tag this routine with the starting address in case the image
+ # has multiple occurrences of this routine. We use a syntax
+ # that resembles template paramters that are automatically
+ # stripped out by ShortFunctionName()
+ sym_name += "<%016x>" % start_val
+
+ if not mangled:
+ routine = self._get_short_function_name(routine)
+ self._append_procedure(
+ last_start, Procedure(last_start, start_val, routine))
+
+ last_start = start_val
+ routine = sym_name
+
+ if not mangled:
+ routine = self._get_short_function_name(routine)
+ self._append_procedure(
+ last_start, Procedure(last_start, last_start, routine))
diff --git a/chromium/tools/findit/OWNERS b/chromium/tools/findit/OWNERS
new file mode 100644
index 00000000000..947f12e8524
--- /dev/null
+++ b/chromium/tools/findit/OWNERS
@@ -0,0 +1 @@
+stgao@chromium.org
diff --git a/chromium/tools/findit/blame.py b/chromium/tools/findit/blame.py
new file mode 100644
index 00000000000..5e5494e6cd5
--- /dev/null
+++ b/chromium/tools/findit/blame.py
@@ -0,0 +1,165 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from threading import Lock
+
+from common import utils
+import crash_utils
+
+
+class Blame(object):
+ """Represents a blame object.
+
+ The object contains blame information for one line of stack, and this
+ information is shown when there are no CLs that change the crashing files.
+ Attributes:
+ line_content: The content of the line to find the blame for.
+ component_name: The name of the component for this line.
+ stack_frame_index: The stack frame index of this file.
+ file_name: The name of the file.
+ line_number: The line that caused a crash.
+ author: The author of this line on the latest revision.
+ revision: The latest revision of this line before the crash revision.
+ message: The commit message for the revision.
+ time: When the revision was committed.
+ url: The url of the change for the revision.
+ range_start: The starting range of the regression for this component.
+ range_end: The ending range of the regression.
+
+ """
+
+ def __init__(self, line_content, component_name, stack_frame_index,
+ file_name, line_number, author, revision, message, time,
+ url, range_start, range_end):
+ # Set all the variables from the arguments.
+ self.line_content = line_content
+ self.component_name = component_name
+ self.stack_frame_index = stack_frame_index
+ self.file = file_name
+ self.line_number = line_number
+ self.author = author
+ self.revision = revision
+ self.message = message
+ self.time = time
+ self.url = url
+ self.range_start = range_start
+ self.range_end = range_end
+
+
+class BlameList(object):
+ """Represents a list of blame objects.
+
+ Thread-safe.
+ """
+
+ def __init__(self):
+ self.blame_list = []
+ self.blame_list_lock = Lock()
+
+ def __getitem__(self, index):
+ return self.blame_list[index]
+
+ def FindBlame(self, callstack, component_to_crash_revision_dict,
+ component_to_regression_dict, parsers,
+ top_n_frames=10):
+ """Given a stack within a stacktrace, retrieves blame information.
+
+ Only either first 'top_n_frames' or the length of stack, whichever is
+ shorter, results are returned. The default value of 'top_n_frames' is 10.
+
+ Args:
+ callstack: The list of stack frames.
+ component_to_crash_revision_dict: A dictionary that maps component to its
+ crash revision.
+ component_to_regression_dict: A dictionary that maps component to its
+ revision range.
+ parsers: A list of two parsers, svn_parser and git_parser
+ top_n_frames: A number of stack frames to show the blame result for.
+ """
+ # Only return blame information for first 'top_n_frames' frames.
+ stack_frames = callstack.GetTopNFrames(top_n_frames)
+ tasks = []
+ # Iterate through frames in stack.
+ for stack_frame in stack_frames:
+ # If the component this line is from does not have a crash revision,
+ # it is not possible to get blame information, so ignore this line.
+ component_path = stack_frame.component_path
+ if component_path not in component_to_crash_revision_dict:
+ continue
+
+ crash_revision = component_to_crash_revision_dict[
+ component_path]['revision']
+ range_start = None
+ range_end = None
+ repository_type = crash_utils.GetRepositoryType(crash_revision)
+ repository_parser = parsers[repository_type]
+
+ # If the revision is in SVN, and if regression information is available,
+ # get it. For Git, we cannot know the ordering between hash numbers.
+ if repository_type == 'svn':
+ if component_to_regression_dict and \
+ component_path in component_to_regression_dict:
+ component_object = component_to_regression_dict[component_path]
+ range_start = int(component_object['old_revision'])
+ range_end = int(component_object['new_revision'])
+
+ # Create a task to generate blame entry.
+ tasks.append({
+ 'function': self.__GenerateBlameEntry,
+ 'args': [repository_parser, stack_frame, crash_revision,
+ range_start, range_end]})
+
+ # Run all the tasks.
+ crash_utils.RunTasks(tasks)
+
+ def __GenerateBlameEntry(self, repository_parser, stack_frame,
+ crash_revision, range_start, range_end):
+ """Generates blame list from the arguments."""
+ stack_frame_index = stack_frame.index
+ component_path = stack_frame.component_path
+ component_name = stack_frame.component_name
+ file_name = stack_frame.file_name
+ file_path = stack_frame.file_path
+ crashed_line_number = stack_frame.crashed_line_range[0]
+
+ if file_path.startswith(component_path):
+ file_path = file_path[len(component_path):]
+
+ # Parse blame information.
+ parsed_blame_info = repository_parser.ParseBlameInfo(
+ component_path, file_path, crashed_line_number, crash_revision)
+
+ # If it fails to retrieve information, do not do anything.
+ if not parsed_blame_info:
+ return
+
+ # Create blame object from the parsed info and add it to the list.
+ (line_content, revision, author, url, message, time) = parsed_blame_info
+ blame = Blame(line_content, component_name, stack_frame_index, file_name,
+ crashed_line_number, author, revision, message, time, url,
+ range_start, range_end)
+
+ with self.blame_list_lock:
+ self.blame_list.append(blame)
+
+ def FilterAndSortBlameList(self):
+ """Filters and sorts the blame list."""
+ # Sort the blame list by its position in stack.
+ self.blame_list.sort(key=lambda blame: blame.stack_frame_index)
+
+ filtered_blame_list = []
+
+ for blame in self.blame_list:
+ # If regression information is available, check if it needs to be
+ # filtered.
+ if blame.range_start and blame.range_end:
+
+ # Discards results that are after the end of regression.
+ if not utils.IsGitHash(blame.revision) and (
+ int(blame.range_end) <= int(blame.revision)):
+ continue
+
+ filtered_blame_list.append(blame)
+
+ self.blame_list = filtered_blame_list
diff --git a/chromium/tools/findit/chromium_deps.py b/chromium/tools/findit/chromium_deps.py
new file mode 100644
index 00000000000..46436cbc7e0
--- /dev/null
+++ b/chromium/tools/findit/chromium_deps.py
@@ -0,0 +1,240 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import json
+import os
+import re
+import time
+import urllib2
+
+from common import utils
+
+
+_THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+CONFIG = json.loads(open(os.path.join(_THIS_DIR,
+ 'deps_config.json'), 'r').read())
+OLD_GIT_URL_PATTERN = re.compile(r'https?://git.chromium.org/(.*)')
+
+
+class _VarImpl(object):
+
+ def __init__(self, local_scope):
+ self._local_scope = local_scope
+
+ def Lookup(self, var_name):
+ if var_name in self._local_scope.get('vars', {}):
+ return self._local_scope['vars'][var_name]
+ raise Exception('Var is not defined: %s' % var_name)
+
+
+def _ParseDEPS(content):
+ """Parse the DEPS file of chromium."""
+ local_scope = {}
+ var = _VarImpl(local_scope)
+ global_scope = {
+ 'Var': var.Lookup,
+ 'deps': {},
+ 'deps_os': {},
+ 'include_rules': [],
+ 'skip_child_includes': [],
+ 'hooks': [],
+ }
+ exec(content, global_scope, local_scope)
+
+ local_scope.setdefault('deps', {})
+ local_scope.setdefault('deps_os', {})
+
+ return (local_scope['deps'], local_scope['deps_os'])
+
+
+def _GetComponentName(path, host_dirs):
+ """Return the component name of a path."""
+ components_renamed = {
+ 'webkit': 'blink',
+ }
+
+ for host_dir in host_dirs:
+ if path.startswith(host_dir):
+ path = path[len(host_dir):]
+ name = path.split('/')[0].lower()
+ if name in components_renamed:
+ return components_renamed[name].lower()
+ else:
+ return name.lower()
+
+ # Unknown path, return the whole path as component name.
+ return '_'.join(path.split('/'))
+
+
+def _GetContentOfDEPS(revision):
+ chromium_git_file_url_template = CONFIG['chromium_git_file_url']
+
+ # Try .DEPS.git first, because before migration from SVN to GIT, the .DEPS.git
+ # has the dependency in GIT repo while DEPS has dependency in SVN repo.
+ url = chromium_git_file_url_template % (revision, '.DEPS.git')
+ http_status_code, content = utils.GetHttpClient().Get(
+ url, retries=5, retry_if_not=404)
+
+ # If .DEPS.git is not found, use DEPS, assuming it is a commit after migration
+ # from SVN to GIT.
+ if http_status_code == 404:
+ url = chromium_git_file_url_template % (revision, 'DEPS')
+ http_status_code, content = utils.GetHttpClient().Get(url, retries=5)
+
+ if http_status_code == 200:
+ return base64.b64decode(content)
+ else:
+ return ''
+
+
+def GetChromiumComponents(chromium_revision,
+ os_platform='unix',
+ deps_file_downloader=_GetContentOfDEPS):
+ """Return a list of components used by Chrome of the given revision.
+
+ Args:
+ chromium_revision: Revision of the Chrome build: svn revision, or git hash.
+ os_platform: The target platform of the Chrome build, eg. win, mac, etc.
+ deps_file_downloader: A function that takes the chromium_revision as input,
+ and returns the content of the DEPS file. The returned
+ content is assumed to be trusted input and will be
+ evaluated as python code.
+
+ Returns:
+ A map from component path to parsed component name, repository URL,
+ repository type and revision.
+ Return None if an error occurs.
+ """
+ if os_platform.lower() == 'linux':
+ os_platform = 'unix'
+
+ chromium_git_base_url = CONFIG['chromium_git_base_url']
+
+ if not utils.IsGitHash(chromium_revision):
+ # Convert svn revision or commit position to Git hash.
+ cr_rev_url_template = CONFIG['cr_rev_url']
+ url = cr_rev_url_template % chromium_revision
+ status_code, content = utils.GetHttpClient().Get(
+ url, timeout=120, retries=5, retry_if_not=404)
+ if status_code != 200 or not content:
+ if status_code == 404:
+ print 'Chromium commit position %s is not found.' % chromium_revision
+ return None
+
+ cr_rev_data = json.loads(content)
+ if 'git_sha' not in cr_rev_data:
+ return None
+
+ if 'repo' not in cr_rev_data or cr_rev_data['repo'] != 'chromium/src':
+ print ('%s seems like a commit position of "%s", but not "chromium/src".'
+ % (chromium_revision, cr_rev_data['repo']))
+ return None
+
+ chromium_revision = cr_rev_data.get('git_sha')
+ if not chromium_revision:
+ return None
+
+ # Download the content of DEPS file in chromium.
+ deps_content = deps_file_downloader(chromium_revision)
+ if not deps_content:
+ return None
+
+ all_deps = {}
+
+ # Parse the content of DEPS file.
+ deps, deps_os = _ParseDEPS(deps_content)
+ all_deps.update(deps)
+ if os_platform is not None:
+ all_deps.update(deps_os.get(os_platform, {}))
+
+ # Figure out components based on the dependencies.
+ components = {}
+ host_dirs = CONFIG['host_directories']
+ for component_path, component_repo_url in all_deps.iteritems():
+ if component_repo_url is None:
+ # For some platform like iso, some component is ignored.
+ continue
+
+ name = _GetComponentName(component_path, host_dirs)
+ repository, revision = component_repo_url.split('@')
+ match = OLD_GIT_URL_PATTERN.match(repository)
+ if match:
+ repository = 'https://chromium.googlesource.com/%s' % match.group(1)
+ is_git_hash = utils.IsGitHash(revision)
+ if is_git_hash:
+ repository_type = 'git'
+ else:
+ repository_type = 'svn'
+ if not component_path.endswith('/'):
+ component_path += '/'
+ components[component_path] = {
+ 'path': component_path,
+ 'name': name,
+ 'repository': repository,
+ 'repository_type': repository_type,
+ 'revision': revision
+ }
+
+ # Add chromium as a component.
+ components['src/'] = {
+ 'path': 'src/',
+ 'name': 'chromium',
+ 'repository': chromium_git_base_url,
+ 'repository_type': 'git',
+ 'revision': chromium_revision
+ }
+
+ return components
+
+
+def GetChromiumComponentRange(old_revision,
+ new_revision,
+ os_platform='unix',
+ deps_file_downloader=_GetContentOfDEPS):
+ """Return a list of components with their revision ranges.
+
+ Args:
+ old_revision: The old revision of a Chrome build.
+ new_revision: The new revision of a Chrome build.
+ os_platform: The target platform of the Chrome build, eg. win, mac, etc.
+ deps_file_downloader: A function that takes the chromium_revision as input,
+ and returns the content of the DEPS file. The returned
+ content is assumed to be trusted input and will be
+ evaluated as python code.
+
+ Returns:
+ A map from component path to its parsed regression and other information.
+ Return None if an error occurs.
+ """
+ old_components = GetChromiumComponents(old_revision, os_platform,
+ deps_file_downloader)
+ if not old_components:
+ return None
+
+ new_components = GetChromiumComponents(new_revision, os_platform,
+ deps_file_downloader)
+ if not new_components:
+ return None
+
+ components = {}
+ for path in new_components:
+ new_component = new_components[path]
+ old_revision = None
+
+ if path in old_components:
+ old_component = old_components[path]
+ old_revision = old_component['revision']
+
+ components[path] = {
+ 'path': path,
+ 'rolled': new_component['revision'] != old_revision,
+ 'name': new_component['name'],
+ 'old_revision': old_revision,
+ 'new_revision': new_component['revision'],
+ 'repository': new_component['repository'],
+ 'repository_type': new_component['repository_type']
+ }
+
+ return components
diff --git a/chromium/tools/findit/chromium_deps_unittest.py b/chromium/tools/findit/chromium_deps_unittest.py
new file mode 100644
index 00000000000..7f64a12b8fd
--- /dev/null
+++ b/chromium/tools/findit/chromium_deps_unittest.py
@@ -0,0 +1,189 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import chromium_deps
+from common import utils
+
+
+class ChromiumDEPSTest(unittest.TestCase):
+ DEPS_TEMPLATE = """
+vars = {
+ "googlecode_url": "http://%%s.googlecode.com/svn",
+ "webkit_trunk": "http://src.chromium.org/blink/trunk",
+ "webkit_revision": "%s",
+ "chromium_git": "https://chromium.googlesource.com",
+}
+
+deps = {
+ "src/breakpad/src":
+ (Var("googlecode_url") %% "google-breakpad") + "/trunk/src@%s",
+
+ "src/third_party/WebKit":
+ Var("webkit_trunk") + "@" + Var("webkit_revision"),
+}
+
+deps_os = {
+ "unix": {
+ "src/third_party/liblouis/src":
+ Var("chromium_git") +
+ "/external/liblouis.git@%s",
+ }
+}
+"""
+
+ def __init__(self, *args, **kwargs):
+ super(ChromiumDEPSTest, self).__init__(*args, **kwargs)
+
+ def testGetChromiumComponents(self):
+ chromium_revision = '283296'
+ chromium_revision_git_hash = 'b041fda2e8493dcb26aac08deb493943df240cbb'
+ webkit_revision = '178200'
+ breakpad_revision = '1345'
+ liblouis_commit_hashcode = '3c2daee56250162e5a75830871601d74328d39f5'
+
+ def _GetContentOfDEPS(chromium_revision_tmp):
+ self.assertEqual(chromium_revision_tmp, chromium_revision_git_hash)
+ return self.DEPS_TEMPLATE % (webkit_revision, breakpad_revision,
+ liblouis_commit_hashcode)
+
+ expected_results = {
+ 'src/breakpad/src/': {
+ 'path': 'src/breakpad/src/',
+ 'repository_type': 'svn',
+ 'name': 'breakpad',
+ 'repository': 'http://google-breakpad.googlecode.com/svn/trunk/src',
+ 'revision': breakpad_revision
+ },
+ 'src/third_party/liblouis/src/': {
+ 'path': 'src/third_party/liblouis/src/',
+ 'repository_type': 'git',
+ 'name': 'liblouis',
+ 'repository':
+ 'https://chromium.googlesource.com/external/liblouis.git',
+ 'revision': liblouis_commit_hashcode
+ },
+ 'src/': {
+ 'path': 'src/',
+ 'repository_type': 'git',
+ 'name': 'chromium',
+ 'repository': 'https://chromium.googlesource.com/chromium/src/',
+ 'revision': chromium_revision_git_hash
+ },
+ 'src/third_party/WebKit/': {
+ 'path': 'src/third_party/WebKit/',
+ 'repository_type': 'svn',
+ 'name': 'blink',
+ 'repository': 'http://src.chromium.org/blink/trunk',
+ 'revision': webkit_revision
+ }
+ }
+
+ components = chromium_deps.GetChromiumComponents(
+ chromium_revision, deps_file_downloader=_GetContentOfDEPS)
+ self.assertEqual(expected_results, components)
+
+ def testGetChromiumComponentRange(self):
+ chromium_revision1 = '283200'
+ chromium_revision_git_hash1 = 'c53c387f46a2ff0cf7c072222b826cff0817a80f'
+ webkit_revision1 = '178084'
+ breakpad_revision1 = '1345'
+ liblouis_commit_hashcode1 = '3c2daee56250162e5a75830871601d74328d39f5'
+
+ chromium_revision2 = '283296'
+ chromium_revision_git_hash2 = 'b041fda2e8493dcb26aac08deb493943df240cbb'
+ webkit_revision2 = '178200'
+ breakpad_revision2 = '1345'
+ liblouis_commit_hashcode2 = '3c2daee56250162e5a75830871601d74328d39f5'
+
+ def _GetContentOfDEPS(chromium_revision):
+ chromium_revision = str(chromium_revision)
+ if chromium_revision == chromium_revision_git_hash1:
+ return self.DEPS_TEMPLATE % (webkit_revision1, breakpad_revision1,
+ liblouis_commit_hashcode1)
+ else:
+ self.assertEqual(chromium_revision, chromium_revision_git_hash2)
+ return self.DEPS_TEMPLATE % (webkit_revision2, breakpad_revision2,
+ liblouis_commit_hashcode2)
+
+ expected_results = {
+ 'src/breakpad/src/': {
+ 'old_revision': breakpad_revision1,
+ 'name': 'breakpad',
+ 'repository': 'http://google-breakpad.googlecode.com/svn/trunk/src',
+ 'rolled': False,
+ 'new_revision': breakpad_revision2,
+ 'path': 'src/breakpad/src/',
+ 'repository_type': 'svn'
+ },
+ 'src/third_party/liblouis/src/': {
+ 'old_revision': liblouis_commit_hashcode1,
+ 'name': 'liblouis',
+ 'repository':
+ 'https://chromium.googlesource.com/external/liblouis.git',
+ 'rolled': False,
+ 'new_revision': liblouis_commit_hashcode2,
+ 'path': 'src/third_party/liblouis/src/',
+ 'repository_type': 'git'
+ },
+ 'src/': {
+ 'old_revision': chromium_revision_git_hash1,
+ 'name': 'chromium',
+ 'repository': 'https://chromium.googlesource.com/chromium/src/',
+ 'rolled': True,
+ 'new_revision': chromium_revision_git_hash2,
+ 'path': 'src/',
+ 'repository_type': 'git'
+ },
+ 'src/third_party/WebKit/': {
+ 'old_revision': webkit_revision1,
+ 'name': 'blink',
+ 'repository': 'http://src.chromium.org/blink/trunk',
+ 'rolled': True,
+ 'new_revision': webkit_revision2,
+ 'path': 'src/third_party/WebKit/',
+ 'repository_type': 'svn'
+ }
+ }
+
+ components = chromium_deps.GetChromiumComponentRange(
+ chromium_revision1, chromium_revision2,
+ deps_file_downloader=_GetContentOfDEPS)
+ self.assertEqual(expected_results, components)
+
+ def _VerifyGitHashForAllComponents(self, deps):
+ self.assertTrue(deps)
+ self.assertTrue(isinstance(deps, dict))
+ for component in deps.values():
+ for key in ['revision', 'old_revision', 'new_revision']:
+ if key in component:
+ self.assertTrue(utils.IsGitHash(component[key]))
+
+ def testComponentRangeCrossGitMigrationPoint(self):
+ # The old revision is from svn.
+ # The new revision is from git.
+ deps = chromium_deps.GetChromiumComponentRange(
+ '291440',
+ '744746cc51ef81c8f8d727fafa46b14d1c03fe44')
+ self._VerifyGitHashForAllComponents(deps)
+
+ def testGetSvnRevision(self):
+ # For this case, svn revision needs converting to git hash and there will be
+ # .DEPS.git and DEPS.
+ deps = chromium_deps.GetChromiumComponents(284750)
+ self._VerifyGitHashForAllComponents(deps)
+
+ def testGetGitRevisionWithoutDEPS_dot_GIT(self):
+ # For this case, there is only DEPS, not .DEPS.git.
+ deps = chromium_deps.GetChromiumComponents(
+ 'f8b3fe9660d8dda318800f55d5e29799bbfd43f7')
+ self._VerifyGitHashForAllComponents(deps)
+
+
+ def testGetGitRevisionWithDEPS_dot_GIT(self):
+ # For this case, there will be .DEPS.git.
+ deps = chromium_deps.GetChromiumComponents(
+ '8ae88241aa9f224e8ce97250f32469d616e437aa')
+ self._VerifyGitHashForAllComponents(deps)
diff --git a/chromium/tools/findit/common/__init__.py b/chromium/tools/findit/common/__init__.py
new file mode 100644
index 00000000000..31f0497a8a5
--- /dev/null
+++ b/chromium/tools/findit/common/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/chromium/tools/findit/common/cacert.pem b/chromium/tools/findit/common/cacert.pem
new file mode 100644
index 00000000000..c9ea29d7ee0
--- /dev/null
+++ b/chromium/tools/findit/common/cacert.pem
@@ -0,0 +1,2186 @@
+# Source: http://pki.google.com/roots.pem
+# Updated at: Wed Dec 17 18:17:42 PST 2014
+
+# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc.
+# Label: "GTE CyberTrust Global Root"
+# Serial: 421
+# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db
+# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74
+# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36
+-----BEGIN CERTIFICATE-----
+MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD
+VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv
+bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv
+b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV
+UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU
+cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds
+b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH
+iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS
+r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4
+04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r
+GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9
+3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P
+lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Server CA"
+# Serial: 1
+# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d
+# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c
+# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9
+-----BEGIN CERTIFICATE-----
+MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm
+MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx
+MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT
+DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3
+dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl
+cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3
+DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD
+gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91
+yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX
+L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj
+EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG
+7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e
+QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ
+qdq5snUb9kLy78fyGPmJvKP/iiMucEc=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division
+# Label: "Thawte Premium Server CA"
+# Serial: 1
+# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a
+# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a
+# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72
+-----BEGIN CERTIFICATE-----
+MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx
+FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD
+VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv
+biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy
+dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t
+MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB
+MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG
+A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp
+b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl
+cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv
+bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE
+VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ
+ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR
+uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG
+9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI
+hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM
+pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg==
+-----END CERTIFICATE-----
+
+# Issuer: O=Equifax OU=Equifax Secure Certificate Authority
+# Subject: O=Equifax OU=Equifax Secure Certificate Authority
+# Label: "Equifax Secure CA"
+# Serial: 903804111
+# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4
+# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a
+# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy
+dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1
+MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx
+dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f
+BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A
+cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ
+MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm
+aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw
+ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj
+IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y
+7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh
+1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Label: "Verisign Class 3 Public Primary Certification Authority"
+# Serial: 149843929435818692848040365716851702463
+# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67
+# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2
+# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70
+-----BEGIN CERTIFICATE-----
+MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
+cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
+MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
+BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
+YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
+ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
+BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
+I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
+CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do
+lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc
+AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network
+# Label: "Verisign Class 3 Public Primary Certification Authority - G2"
+# Serial: 167285380242319648451154478808036881606
+# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9
+# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f
+# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b
+-----BEGIN CERTIFICATE-----
+MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ
+BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh
+c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy
+MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp
+emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X
+DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw
+FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg
+UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo
+YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5
+MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB
+AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4
+pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0
+13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID
+AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk
+U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i
+F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY
+oJ2daZH9
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA
+# Label: "GlobalSign Root CA"
+# Serial: 4835703278459707669005204
+# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a
+# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c
+# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99
+-----BEGIN CERTIFICATE-----
+MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG
+A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv
+b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw
+MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i
+YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT
+aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ
+jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp
+xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp
+1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG
+snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ
+U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8
+9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B
+AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz
+yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE
+38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP
+AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad
+DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME
+HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2
+# Label: "GlobalSign Root CA - R2"
+# Serial: 4835703278459682885658125
+# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30
+# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe
+# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e
+-----BEGIN CERTIFICATE-----
+MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1
+MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL
+v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8
+eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq
+tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd
+C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa
+zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB
+mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH
+V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n
+bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG
+3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs
+J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO
+291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS
+ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd
+AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7
+TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority
+# Label: "ValiCert Class 1 VA"
+# Serial: 1
+# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb
+# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e
+# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy
+NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y
+LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+
+TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y
+TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0
+LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW
+I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw
+nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority
+# Label: "ValiCert Class 2 VA"
+# Serial: 1
+# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87
+# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6
+# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy
+NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY
+dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9
+WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS
+v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v
+UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu
+IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC
+W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd
+-----END CERTIFICATE-----
+
+# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority
+# Label: "RSA Root Certificate 1"
+# Serial: 1
+# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72
+# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb
+# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a
+-----BEGIN CERTIFICATE-----
+MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0
+IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz
+BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y
+aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG
+9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy
+NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y
+azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs
+YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw
+Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl
+cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD
+cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs
+2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY
+JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE
+Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ
+n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A
+PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 3 Public Primary Certification Authority - G3"
+# Serial: 206684696279472310254277870180966723415
+# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09
+# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6
+# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b
+N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t
+KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu
+kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm
+CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ
+Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu
+imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te
+2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe
+DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC
+/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p
+F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt
+TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only
+# Label: "Verisign Class 4 Public Primary Certification Authority - G3"
+# Serial: 314531972711909413743075096039378935511
+# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df
+# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d
+# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06
+-----BEGIN CERTIFICATE-----
+MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw
+CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl
+cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu
+LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT
+aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp
+dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD
+VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT
+aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ
+bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu
+IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1
+GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ
++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd
+U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm
+NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY
+ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/
+ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1
+CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq
+g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm
+fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c
+2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/
+bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Secure Server CA"
+# Serial: 927650371
+# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee
+# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39
+# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50
+-----BEGIN CERTIFICATE-----
+MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC
+VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u
+ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc
+KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u
+ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1
+MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE
+ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j
+b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF
+bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg
+U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA
+A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/
+I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3
+wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC
+AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb
+oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5
+BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p
+dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk
+MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp
+b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu
+dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0
+MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi
+E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa
+MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI
+hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN
+95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd
+2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited
+# Label: "Entrust.net Premium 2048 Secure Server CA"
+# Serial: 946059622
+# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc
+# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe
+# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f
+-----BEGIN CERTIFICATE-----
+MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML
+RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp
+bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5
+IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy
+MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3
+LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp
+YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG
+A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq
+K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe
+sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX
+MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT
+XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/
+HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH
+4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA
+vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G
+CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA
+WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo
+oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ
+h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18
+f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN
+B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy
+vUxFnmG6v4SBkgPR0ml8xQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust
+# Label: "Baltimore CyberTrust Root"
+# Serial: 33554617
+# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4
+# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74
+# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb
+-----BEGIN CERTIFICATE-----
+MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ
+RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD
+VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX
+DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y
+ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy
+VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr
+mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr
+IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK
+mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu
+XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy
+dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye
+jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1
+BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3
+DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92
+9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx
+jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0
+Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz
+ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS
+R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure Global eBusiness CA"
+# Serial: 1
+# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc
+# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45
+# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07
+-----BEGIN CERTIFICATE-----
+MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT
+ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw
+MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj
+dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l
+c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC
+UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc
+58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/
+o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH
+MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr
+aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA
+A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA
+Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv
+8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV
+-----END CERTIFICATE-----
+
+# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc.
+# Label: "Equifax Secure eBusiness CA 1"
+# Serial: 4
+# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d
+# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41
+# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73
+-----BEGIN CERTIFICATE-----
+MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT
+ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw
+MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j
+LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ
+KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo
+RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu
+WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw
+Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD
+AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK
+eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM
+zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+
+WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN
+/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ==
+-----END CERTIFICATE-----
+
+# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
+# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2
+# Label: "Equifax Secure eBusiness CA 2"
+# Serial: 930140085
+# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca
+# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc
+# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20
+-----BEGIN CERTIFICATE-----
+MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV
+UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj
+dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0
+NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD
+VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B
+AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G
+vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/
+BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C
+AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl
+IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw
+NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq
+y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF
+MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA
+A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy
+0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1
+E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Low-Value Services Root"
+# Serial: 1
+# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc
+# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d
+# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7
+-----BEGIN CERTIFICATE-----
+MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw
+MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML
+QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD
+VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA
+A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul
+CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n
+tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl
+dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch
+PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC
++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O
+BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E
+BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl
+MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk
+ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB
+IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X
+7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz
+43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY
+eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl
+pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA
+WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network
+# Label: "AddTrust External Root"
+# Serial: 1
+# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f
+# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68
+# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2
+-----BEGIN CERTIFICATE-----
+MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs
+IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290
+MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux
+FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h
+bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v
+dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt
+H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9
+uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX
+mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX
+a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN
+E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0
+WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD
+VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0
+Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU
+cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx
+IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN
+AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH
+YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5
+6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC
+Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX
+c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a
+mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Public Services Root"
+# Serial: 1
+# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f
+# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5
+# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27
+-----BEGIN CERTIFICATE-----
+MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx
+MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB
+ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV
+BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC
+AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV
+6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX
+GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP
+dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH
+1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF
+62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW
+BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw
+AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL
+MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU
+cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv
+b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6
+IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/
+iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao
+GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh
+4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm
+XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network
+# Label: "AddTrust Qualified Certificates Root"
+# Serial: 1
+# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb
+# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf
+# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16
+-----BEGIN CERTIFICATE-----
+MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU
+MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3
+b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1
+MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK
+EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh
+BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq
+xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G
+87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i
+2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U
+WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1
+0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G
+A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T
+AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr
+pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL
+ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm
+aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv
+hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm
+hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X
+dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3
+P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y
+iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no
+xqE=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc.
+# Label: "Entrust Root Certification Authority"
+# Serial: 1164660820
+# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4
+# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9
+# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c
+-----BEGIN CERTIFICATE-----
+MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC
+VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0
+Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW
+KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl
+cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw
+NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw
+NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy
+ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV
+BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ
+KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo
+Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4
+4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9
+KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI
+rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi
+94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB
+sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi
+gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo
+kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE
+vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA
+A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t
+O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua
+AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP
+9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/
+eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m
+0vdXcDazv/wor3ElhVsT/h5/WrQ8
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA O=GeoTrust Inc.
+# Label: "GeoTrust Global CA"
+# Serial: 144470
+# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5
+# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12
+# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a
+-----BEGIN CERTIFICATE-----
+MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT
+MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i
+YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg
+R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9
+9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq
+fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv
+iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU
+1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+
+bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW
+MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA
+ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l
+uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn
+Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS
+tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF
+PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un
+hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV
+5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Global CA 2"
+# Serial: 1
+# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9
+# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d
+# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85
+-----BEGIN CERTIFICATE-----
+MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs
+IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG
+EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg
+R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A
+PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8
+Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL
+TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL
+5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7
+S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe
+2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE
+FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap
+EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td
+EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv
+/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN
+A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0
+abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF
+I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz
+4iIprn2DQKi6bA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA"
+# Serial: 1
+# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48
+# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79
+# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12
+-----BEGIN CERTIFICATE-----
+MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE
+BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0
+IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV
+VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8
+cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT
+QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh
+F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v
+c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w
+mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd
+VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX
+teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ
+f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe
+Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+
+nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB
+/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY
+MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG
+9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc
+aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX
+IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn
+ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z
+uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN
+Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja
+QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW
+koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9
+ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt
+DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm
+bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc.
+# Label: "GeoTrust Universal CA 2"
+# Serial: 1
+# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7
+# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79
+# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b
+-----BEGIN CERTIFICATE-----
+MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW
+MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy
+c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD
+VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1
+c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC
+AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81
+WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG
+FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq
+XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL
+se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb
+KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd
+IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73
+y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt
+hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc
+QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4
+Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV
+HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV
+HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ
+KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z
+dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ
+L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr
+Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo
+ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY
+T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz
+GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m
+1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV
+OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH
+6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX
+QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS
+-----END CERTIFICATE-----
+
+# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc.
+# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc.
+# Label: "America Online Root Certification Authority 1"
+# Serial: 1
+# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e
+# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a
+# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3
+-----BEGIN CERTIFICATE-----
+MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
+bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2
+MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
+ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
+Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk
+hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym
+1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW
+OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb
+2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko
+O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU
+AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB
+BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF
+Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb
+LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir
+oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C
+MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds
+sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7
+-----END CERTIFICATE-----
+
+# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc.
+# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc.
+# Label: "America Online Root Certification Authority 2"
+# Serial: 1
+# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf
+# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84
+# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd
+-----BEGIN CERTIFICATE-----
+MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc
+MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP
+bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2
+MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft
+ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg
+Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP
+ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC
+206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci
+KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2
+JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9
+BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e
+Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B
+PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67
+Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq
+Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ
+o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3
++L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj
+YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj
+FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE
+AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn
+xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2
+LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc
+obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8
+CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe
+IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA
+DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F
+AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX
+Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb
+AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl
+Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw
+RY8mkaKO/qk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AAA Certificate Services O=Comodo CA Limited
+# Subject: CN=AAA Certificate Services O=Comodo CA Limited
+# Label: "Comodo AAA Services root"
+# Serial: 1
+# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0
+# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49
+# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4
+-----BEGIN CERTIFICATE-----
+MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj
+YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM
+GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua
+BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe
+3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4
+YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR
+rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm
+ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU
+oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF
+MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v
+QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t
+b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF
+AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q
+GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz
+Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2
+G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi
+l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3
+smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Secure Certificate Services O=Comodo CA Limited
+# Subject: CN=Secure Certificate Services O=Comodo CA Limited
+# Label: "Comodo Secure Services root"
+# Serial: 1
+# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd
+# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1
+# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8
+-----BEGIN CERTIFICATE-----
+MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp
+ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow
+fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV
+BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB
+BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM
+cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S
+HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996
+CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk
+3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz
+6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV
+HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud
+EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv
+Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw
+Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww
+DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0
+5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj
+Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI
+gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ
+aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl
+izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited
+# Subject: CN=Trusted Certificate Services O=Comodo CA Limited
+# Label: "Comodo Trusted Services root"
+# Serial: 1
+# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27
+# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd
+# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69
+-----BEGIN CERTIFICATE-----
+MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb
+MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow
+GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0
+aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla
+MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO
+BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD
+VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B
+AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW
+fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt
+TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL
+fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW
+1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7
+kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G
+A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD
+VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v
+ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo
+dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu
+Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/
+HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32
+pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS
+jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+
+xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn
+dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN DATACorp SGC Root CA"
+# Serial: 91374294542884689855167577680241077609
+# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06
+# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4
+# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48
+-----BEGIN CERTIFICATE-----
+MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB
+kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw
+IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG
+EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD
+VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu
+dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN
+BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6
+E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ
+D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK
+4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq
+lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW
+bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB
+o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT
+MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js
+LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr
+BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB
+AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft
+Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj
+j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH
+KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv
+2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3
+mfnGV/TJVTl4uix5yaaIK/QI
+-----END CERTIFICATE-----
+
+# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com
+# Label: "UTN USERFirst Hardware Root CA"
+# Serial: 91374294542884704022267039221184531197
+# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39
+# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7
+# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37
+-----BEGIN CERTIFICATE-----
+MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB
+lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug
+Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho
+dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt
+SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG
+A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe
+MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v
+d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh
+cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn
+0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ
+M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a
+MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd
+oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI
+DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy
+oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD
+VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0
+dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy
+bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF
+BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM
+//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli
+CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE
+CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t
+3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS
+KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com
+# Label: "XRamp Global CA Root"
+# Serial: 107108908803651509692980124233745014957
+# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1
+# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6
+# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2
+-----BEGIN CERTIFICATE-----
+MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB
+gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk
+MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY
+UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx
+NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3
+dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy
+dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB
+dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6
+38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP
+KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q
+DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4
+qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa
+JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi
+PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P
+BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs
+jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0
+eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD
+ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR
+vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt
+qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa
+IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy
+i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ
+O+7ETPTsJ3xCwnR8gooJybQDJbw=
+-----END CERTIFICATE-----
+
+# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority
+# Label: "Go Daddy Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67
+# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4
+# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4
+-----BEGIN CERTIFICATE-----
+MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh
+MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE
+YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3
+MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo
+ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg
+MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN
+ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA
+PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w
+wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi
+EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY
+avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+
+YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE
+sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h
+/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5
+IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj
+YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD
+ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy
+OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P
+TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ
+HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER
+dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf
+ReYNnyicsbkqWletNw+vHX/bvZ8=
+-----END CERTIFICATE-----
+
+# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority
+# Label: "Starfield Class 2 CA"
+# Serial: 0
+# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24
+# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a
+# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58
+-----BEGIN CERTIFICATE-----
+MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
+MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
+U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
+NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
+ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
+ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
+DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
+8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
+X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
+K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
+1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
+A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
+zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
+YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
+bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
+DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
+L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
+eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
+xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
+VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
+WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 1
+# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16
+# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f
+# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea
+-----BEGIN CERTIFICATE-----
+MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE
+FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j
+ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js
+LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM
+BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0
+Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy
+dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh
+cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh
+YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg
+dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp
+bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ
+YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT
+TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ
+9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8
+jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW
+FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz
+ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1
+ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L
+EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu
+L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq
+yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC
+O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V
+um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh
+NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Assured ID Root CA"
+# Serial: 17154717934120587862167794914071425081
+# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72
+# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43
+# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c
+-----BEGIN CERTIFICATE-----
+MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv
+b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG
+EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl
+cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi
+MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c
+JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP
+mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+
+wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4
+VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/
+AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB
+AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW
+BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun
+pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC
+dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf
+fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm
+NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx
+H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe
++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g==
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert Global Root CA"
+# Serial: 10944719598952040374951832963794454346
+# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e
+# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36
+# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61
+-----BEGIN CERTIFICATE-----
+MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD
+QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT
+MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j
+b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG
+9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB
+CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97
+nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt
+43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P
+T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4
+gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO
+BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR
+TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw
+DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr
+hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg
+06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF
+PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls
+YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk
+CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4=
+-----END CERTIFICATE-----
+
+# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com
+# Label: "DigiCert High Assurance EV Root CA"
+# Serial: 3553400076410547919724730734378100087
+# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a
+# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25
+# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs
+MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3
+d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j
+ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3
+LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug
+RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm
++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW
+PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM
+xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB
+Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3
+hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg
+EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF
+MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA
+FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec
+nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z
+eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF
+hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2
+Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe
+vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep
++OkuE6N36B9K
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc.
+# Label: "GeoTrust Primary Certification Authority"
+# Serial: 32798226551256963324313806436981982369
+# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf
+# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96
+# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c
+-----BEGIN CERTIFICATE-----
+MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY
+MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo
+R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx
+MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK
+Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp
+ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9
+AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA
+ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0
+7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W
+kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI
+mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ
+KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1
+6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl
+4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K
+oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj
+UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU
+AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk=
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA"
+# Serial: 69529181992039203566298953787712940909
+# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12
+# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81
+# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f
+-----BEGIN CERTIFICATE-----
+MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB
+qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV
+BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw
+NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j
+LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG
+A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl
+IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs
+W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta
+3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk
+6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6
+Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J
+NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA
+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP
+r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU
+DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz
+YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX
+xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2
+/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/
+LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7
+jVaMaA==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G5"
+# Serial: 33037644167568058970164719475676101450
+# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c
+# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5
+# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df
+-----BEGIN CERTIFICATE-----
+MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB
+yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW
+ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1
+nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex
+t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz
+SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG
+BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+
+rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/
+NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E
+BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH
+BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy
+aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv
+MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE
+p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y
+5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK
+WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ
+4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N
+hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO Certification Authority O=COMODO CA Limited
+# Label: "COMODO Certification Authority"
+# Serial: 104350513648249232941998508985834464573
+# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75
+# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b
+# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66
+-----BEGIN CERTIFICATE-----
+MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB
+gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G
+A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV
+BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw
+MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl
+YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P
+RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0
+aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3
+UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI
+2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8
+Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp
++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+
+DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O
+nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW
+/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g
+PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u
+QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY
+SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv
+IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/
+RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4
+zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd
+BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB
+ZQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C.
+# Label: "Network Solutions Certificate Authority"
+# Serial: 116697915152937497490437556386812487904
+# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e
+# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce
+# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c
+-----BEGIN CERTIFICATE-----
+MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi
+MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu
+MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp
+dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV
+UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO
+ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz
+c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP
+OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl
+mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF
+BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4
+qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw
+gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB
+BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu
+bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp
+dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8
+6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/
+h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH
+/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv
+wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN
+pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey
+-----END CERTIFICATE-----
+
+# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited
+# Label: "COMODO ECC Certification Authority"
+# Serial: 41578283867086692638256921589707938090
+# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23
+# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11
+# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7
+-----BEGIN CERTIFICATE-----
+MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL
+MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE
+BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT
+IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw
+MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy
+ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N
+T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv
+biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR
+FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J
+cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW
+BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/
+BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm
+fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv
+GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY=
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
+# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA
+# Label: "TC TrustCenter Class 2 CA II"
+# Serial: 941389028203453866782103406992443
+# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23
+# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e
+# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4
+-----BEGIN CERTIFICATE-----
+MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
+BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
+Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1
+OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
+SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc
+VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf
+tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg
+uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J
+XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK
+8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99
+5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3
+kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
+dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6
+Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
+JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
+Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
+TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS
+GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt
+ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8
+au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV
+hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI
+dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
+# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA
+# Label: "TC TrustCenter Class 3 CA II"
+# Serial: 1506523511417715638772220530020799
+# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e
+# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5
+# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e
+-----BEGIN CERTIFICATE-----
+MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV
+BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0
+Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1
+OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i
+SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc
+VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD
+ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW
+Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q
+Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2
+1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq
+ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1
+Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX
+XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy
+dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6
+Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz
+JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290
+Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u
+TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN
+irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8
+TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6
+g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB
+95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj
+S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A==
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Label: "TC TrustCenter Universal CA I"
+# Serial: 601024842042189035295619584734726
+# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c
+# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3
+# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
+BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1
+c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx
+MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg
+R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD
+VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN
+AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR
+JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T
+fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu
+jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z
+wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ
+fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD
+VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G
+CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1
+7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn
+8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs
+ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT
+ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/
+2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY
+-----END CERTIFICATE-----
+
+# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc
+# Label: "Cybertrust Global Root"
+# Serial: 4835703278459682877484360
+# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1
+# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6
+# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3
+-----BEGIN CERTIFICATE-----
+MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG
+A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh
+bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE
+ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS
+b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5
+7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS
+J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y
+HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP
+t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz
+FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY
+XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/
+MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw
+hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js
+MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA
+A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj
+Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx
+XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o
+omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc
+A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW
+WL1WMRJOEcgh4LMRkWXbtKaIOM5V
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G3"
+# Serial: 28809105769928564313984085209975885599
+# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05
+# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd
+# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4
+-----BEGIN CERTIFICATE-----
+MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB
+mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT
+MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s
+eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv
+cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ
+BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg
+MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0
+BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg
+LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz
++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm
+hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn
+5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W
+JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL
+DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC
+huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw
+HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB
+AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB
+zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN
+kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD
+AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH
+SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G
+spki4cErx5z481+oghLrGREt
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G2"
+# Serial: 71758320672825410020661621085256472406
+# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f
+# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12
+# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57
+-----BEGIN CERTIFICATE-----
+MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL
+MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp
+IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi
+BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw
+MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh
+d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig
+YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v
+dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/
+BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6
+papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E
+BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K
+DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3
+KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox
+XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only
+# Label: "thawte Primary Root CA - G3"
+# Serial: 127614157056681299805556476275995414779
+# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31
+# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2
+# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c
+-----BEGIN CERTIFICATE-----
+MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB
+rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf
+Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw
+MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV
+BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa
+Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl
+LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u
+MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl
+ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz
+MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm
+gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8
+YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf
+b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9
+9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S
+zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk
+OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV
+HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA
+2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW
+oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu
+t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c
+KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM
+m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu
+MdRAGmI0Nj81Aa6sY6A=
+-----END CERTIFICATE-----
+
+# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only
+# Label: "GeoTrust Primary Certification Authority - G2"
+# Serial: 80682863203381065782177908751794619243
+# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a
+# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0
+# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66
+-----BEGIN CERTIFICATE-----
+MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL
+MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj
+KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2
+MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0
+eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV
+BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw
+NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV
+BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH
+MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL
+So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal
+tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG
+CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT
+qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz
+rD6ogRLQy7rQkgu2npaqBA+K
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Universal Root Certification Authority"
+# Serial: 85209574734084581917763752644031726877
+# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19
+# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54
+# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c
+-----BEGIN CERTIFICATE-----
+MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB
+vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL
+ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp
+U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W
+ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe
+Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX
+MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0
+IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y
+IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh
+bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF
+AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF
+9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH
+H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H
+LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN
+/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT
+rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud
+EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw
+WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs
+exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud
+DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4
+sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+
+seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz
+4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+
+BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR
+lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3
+7M2CYfE45k+XmCpajQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only
+# Label: "VeriSign Class 3 Public Primary Certification Authority - G4"
+# Serial: 63143484348153506665311985501458640051
+# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41
+# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a
+# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79
+-----BEGIN CERTIFICATE-----
+MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL
+MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW
+ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln
+biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp
+U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y
+aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp
+U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg
+SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln
+biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5
+IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm
+GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve
+fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw
+AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ
+aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj
+aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW
+kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC
+4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga
+FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA==
+-----END CERTIFICATE-----
+
+# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority
+# Label: "Verisign Class 3 Public Primary Certification Authority"
+# Serial: 80507572722862485515306429940691309246
+# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4
+# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b
+# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05
+-----BEGIN CERTIFICATE-----
+MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG
+A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz
+cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2
+MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV
+BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt
+YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN
+ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE
+BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is
+I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G
+CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i
+2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ
+2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ
+-----END CERTIFICATE-----
+
+# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3
+# Label: "GlobalSign Root CA - R3"
+# Serial: 4835703278459759426209954
+# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28
+# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad
+# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b
+-----BEGIN CERTIFICATE-----
+MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G
+A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp
+Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4
+MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG
+A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8
+RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT
+gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm
+KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd
+QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ
+XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw
+DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o
+LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU
+RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp
+jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK
+6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX
+mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs
+Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH
+WD9f
+-----END CERTIFICATE-----
+
+# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA
+# Label: "TC TrustCenter Universal CA III"
+# Serial: 2010889993983507346460533407902964
+# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b
+# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87
+# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d
+-----BEGIN CERTIFICATE-----
+MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL
+MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV
+BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1
+c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy
+MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl
+ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm
+BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG
+SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF
+5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv
+DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v
+zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT
+yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj
+dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh
+MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB
+Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI
+4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz
+dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY
+aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G
+DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV
+CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH
+LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg==
+-----END CERTIFICATE-----
+
+# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc.
+# Label: "Go Daddy Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01
+# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b
+# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da
+-----BEGIN CERTIFICATE-----
+MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT
+EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp
+ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz
+NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH
+EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE
+AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD
+E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH
+/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy
+DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh
+GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR
+tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA
+AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE
+FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX
+WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu
+9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr
+gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo
+2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO
+LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI
+4uJEvlz36hz1
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96
+# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e
+# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5
+-----BEGIN CERTIFICATE-----
+MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs
+ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw
+MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6
+b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj
+aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp
+Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
+ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg
+nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1
+HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N
+Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN
+dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0
+HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO
+BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G
+CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU
+sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3
+4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg
+8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K
+pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1
+mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0
+-----END CERTIFICATE-----
+
+# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc.
+# Label: "Starfield Services Root Certificate Authority - G2"
+# Serial: 0
+# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2
+# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f
+# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5
+-----BEGIN CERTIFICATE-----
+MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
+EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
+HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
+ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
+MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
+VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
+ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
+dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
+OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
+8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
+Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
+hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
+6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
+DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
+AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
+bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
+ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
+qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
+iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
+0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
+sSi6
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Commercial O=AffirmTrust
+# Subject: CN=AffirmTrust Commercial O=AffirmTrust
+# Label: "AffirmTrust Commercial"
+# Serial: 8608355977964138876
+# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7
+# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7
+# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP
+Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr
+ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL
+MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1
+yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr
+VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/
+nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG
+XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj
+vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt
+Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g
+N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC
+nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Networking O=AffirmTrust
+# Subject: CN=AffirmTrust Networking O=AffirmTrust
+# Label: "AffirmTrust Networking"
+# Serial: 8957382827206547757
+# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f
+# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f
+# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b
+-----BEGIN CERTIFICATE-----
+MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz
+dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL
+MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp
+cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC
+AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y
+YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua
+kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL
+QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp
+6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG
+yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i
+QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ
+KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO
+tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu
+QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ
+Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u
+olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48
+x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s=
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium O=AffirmTrust
+# Subject: CN=AffirmTrust Premium O=AffirmTrust
+# Label: "AffirmTrust Premium"
+# Serial: 7893706540734352110
+# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57
+# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27
+# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a
+-----BEGIN CERTIFICATE-----
+MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE
+BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz
+dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG
+A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U
+cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf
+qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ
+JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ
++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS
+s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5
+HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7
+70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG
+V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S
+qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S
+5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia
+C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX
+OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE
+FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/
+BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2
+KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg
+Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B
+8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ
+MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc
+0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ
+u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF
+u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH
+YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8
+GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO
+RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e
+KeC2uAloGRwYQw==
+-----END CERTIFICATE-----
+
+# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust
+# Label: "AffirmTrust Premium ECC"
+# Serial: 8401224907861490260
+# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d
+# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb
+# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23
+-----BEGIN CERTIFICATE-----
+MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC
+VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ
+cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ
+BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt
+VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D
+0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9
+ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G
+A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G
+A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs
+aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I
+flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ==
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing
+# Label: "StartCom Certification Authority"
+# Serial: 45
+# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16
+# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0
+# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11
+-----BEGIN CERTIFICATE-----
+MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg
+Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh
+dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9
+MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi
+U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh
+cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA
+A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk
+pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf
+OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C
+Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT
+Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi
+HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM
+Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w
++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+
+Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3
+Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B
+26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID
+AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD
+VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul
+F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC
+ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w
+ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk
+aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0
+YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg
+c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0
+aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93
+d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG
+CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1
+dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF
+wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS
+Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst
+0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc
+pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl
+CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF
+P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK
+1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm
+KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE
+JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ
+8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm
+fyWl8kgAwKQB2j8=
+-----END CERTIFICATE-----
+
+# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd.
+# Label: "StartCom Certification Authority G2"
+# Serial: 59
+# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64
+# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17
+# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95
+-----BEGIN CERTIFICATE-----
+MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW
+MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm
+aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1
+OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG
+A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G
+CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ
+JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD
+vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo
+D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/
+Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW
+RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK
+HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN
+nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM
+0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i
+UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9
+Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg
+TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE
+AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL
+BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K
+2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX
+UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl
+6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK
+9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ
+HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI
+wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY
+XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l
+IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo
+hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr
+so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI
+-----END CERTIFICATE-----
diff --git a/chromium/tools/findit/common/http_client.py b/chromium/tools/findit/common/http_client.py
new file mode 100644
index 00000000000..ae3349dac46
--- /dev/null
+++ b/chromium/tools/findit/common/http_client.py
@@ -0,0 +1,31 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+class HttpClient(object):
+ """Represent a http client for sending request to a http[s] server.
+
+ If cookies need to be sent, they should be in a file pointed to by
+ COOKIE_FILE in the environment.
+ """
+
+ @staticmethod
+ def Get(url, params={}, timeout=120, retries=5, retry_interval=0.5,
+ retry_if_not=None):
+ """Send a GET request to the given url with the given parameters.
+
+ Args:
+ url: the url to send request to.
+ params: parameters to send as part of the http request.
+ timeout: timeout for the http request, default is 120 seconds.
+ retries: indicate how many retries before failing, default is 5.
+ retry_interval: interval in second to wait before retry, default is 0.5.
+ retry_if_not: a http status code. If set, retry only when the failed http
+ status code is a different value.
+
+ Returns:
+ (status_code, data)
+ state_code: the http status code in the response.
+ data: the body of the response.
+ """
+ raise NotImplemented()
diff --git a/chromium/tools/findit/common/http_client_local.py b/chromium/tools/findit/common/http_client_local.py
new file mode 100644
index 00000000000..b8a168dce24
--- /dev/null
+++ b/chromium/tools/findit/common/http_client_local.py
@@ -0,0 +1,253 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A http client with support for https connections with certificate verification.
+
+The verification is based on http://tools.ietf.org/html/rfc6125#section-6.4.3
+and the code is from Lib/ssl.py in python3:
+ http://hg.python.org/cpython/file/4dac45f88d45/Lib/ssl.py
+
+One use case is to download Chromium DEPS file in a secure way:
+ https://src.chromium.org/chrome/trunk/src/DEPS
+
+Notice: python 2.7 or newer is required.
+"""
+
+import cookielib
+import httplib
+import os
+import re
+import socket
+import ssl
+import time
+import urllib
+import urllib2
+
+import http_client
+
+
+_SCRIPT_DIR = os.path.dirname(__file__)
+_TRUSTED_ROOT_CERTS = os.path.join(_SCRIPT_DIR, 'cacert.pem')
+
+
+class CertificateError(ValueError):
+ pass
+
+
+def _DNSNameMatch(dn, hostname, max_wildcards=1):
+ """Matching according to RFC 6125, section 6.4.3
+
+ http://tools.ietf.org/html/rfc6125#section-6.4.3
+ """
+ pats = []
+ if not dn:
+ return False
+
+ parts = dn.split(r'.')
+ leftmost = parts[0]
+ remainder = parts[1:]
+
+ wildcards = leftmost.count('*')
+ if wildcards > max_wildcards:
+ # Issue #17980: avoid denials of service by refusing more
+ # than one wildcard per fragment. A survery of established
+ # policy among SSL implementations showed it to be a
+ # reasonable choice.
+ raise CertificateError(
+ 'too many wildcards in certificate DNS name: ' + repr(dn))
+
+ # speed up common case w/o wildcards
+ if not wildcards:
+ return dn.lower() == hostname.lower()
+
+ # RFC 6125, section 6.4.3, subitem 1.
+ # The client SHOULD NOT attempt to match a presented identifier in which
+ # the wildcard character comprises a label other than the left-most label.
+ if leftmost == '*':
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append('[^.]+')
+ elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
+ # RFC 6125, section 6.4.3, subitem 3.
+ # The client SHOULD NOT attempt to match a presented identifier
+ # where the wildcard character is embedded within an A-label or
+ # U-label of an internationalized domain name.
+ pats.append(re.escape(leftmost))
+ else:
+ # Otherwise, '*' matches any dotless string, e.g. www*
+ pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
+
+ # add the remaining fragments, ignore any wildcards
+ for frag in remainder:
+ pats.append(re.escape(frag))
+
+ pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+ return pat.match(hostname)
+
+
+def _MatchHostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
+ rules are followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError('empty or no certificate, match_hostname needs a '
+ 'SSL socket or SSL context with either '
+ 'CERT_OPTIONAL or CERT_REQUIRED')
+ dnsnames = []
+ san = cert.get('subjectAltName', ())
+ for key, value in san:
+ if key == 'DNS':
+ if _DNSNameMatch(value, hostname):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get('subject', ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == 'commonName':
+ if _DNSNameMatch(value, hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError('hostname %r doesn\'t match either of %s'
+ % (hostname, ', '.join(map(repr, dnsnames))))
+ elif len(dnsnames) == 1:
+ raise CertificateError('hostname %r doesn\'t match %r'
+ % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError('no appropriate commonName or '
+ 'subjectAltName fields were found')
+
+
+class HTTPSConnection(httplib.HTTPSConnection):
+
+ def __init__(self, host, root_certs=_TRUSTED_ROOT_CERTS, **kwargs):
+ self.root_certs = root_certs
+ httplib.HTTPSConnection.__init__(self, host, **kwargs)
+
+ def connect(self):
+ # Overrides for certificate verification.
+ args = [(self.host, self.port), self.timeout,]
+ if self.source_address:
+ args.append(self.source_address)
+ sock = socket.create_connection(*args)
+
+ if self._tunnel_host:
+ self.sock = sock
+ self._tunnel()
+
+ # Wrap the socket for verification with the root certs.
+ kwargs = {}
+ if self.root_certs is not None:
+ kwargs.update(cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.root_certs)
+ self.sock = ssl.wrap_socket(sock, **kwargs)
+
+ # Check hostname.
+ try:
+ _MatchHostname(self.sock.getpeercert(), self.host)
+ except CertificateError:
+ self.sock.shutdown(socket.SHUT_RDWR)
+ self.sock.close()
+ raise
+
+
+class HTTPSHandler(urllib2.HTTPSHandler):
+
+ def __init__(self, root_certs=_TRUSTED_ROOT_CERTS):
+ urllib2.HTTPSHandler.__init__(self)
+ self.root_certs = root_certs
+
+ def https_open(self, req):
+ # Pass a reference to the function below so that verification against
+ # trusted root certs could be injected.
+ return self.do_open(self.GetConnection, req)
+
+ def GetConnection(self, host, **kwargs):
+ params = dict(root_certs=self.root_certs)
+ params.update(kwargs)
+ return HTTPSConnection(host, **params)
+
+
+def _SendRequest(url, timeout=None):
+ """Send request to the given https url, and return the server response.
+
+ Args:
+ url: The https url to send request to.
+
+ Returns:
+ An integer: http code of the response.
+ A string: content of the response.
+
+ Raises:
+ CertificateError: Certificate verification fails.
+ """
+ if not url:
+ return None, None
+
+ handlers = []
+ if url.startswith('https://'):
+ # HTTPSHandler has to go first, because we don't want to send secure cookies
+ # to a man in the middle.
+ handlers.append(HTTPSHandler())
+
+
+ cookie_file = os.environ.get('COOKIE_FILE')
+ if cookie_file and os.path.exists(cookie_file):
+ handlers.append(
+ urllib2.HTTPCookieProcessor(cookielib.MozillaCookieJar(cookie_file)))
+
+ url_opener = urllib2.build_opener(*handlers)
+
+ status_code = None
+ content = None
+
+ try:
+ response = url_opener.open(url, timeout=timeout)
+
+ status_code = response.code
+ content = response.read()
+ except urllib2.HTTPError as e:
+ status_code = e.code
+ content = None
+ except (ssl.SSLError, httplib.BadStatusLine, IOError):
+ status_code = -1
+ content = None
+
+ return status_code, content
+
+
+class HttpClientLocal(http_client.HttpClient):
+ """This http client is used locally in a workstation, GCE VMs, etc."""
+
+ @staticmethod
+ def Get(url, params={}, timeout=120, retries=5, retry_interval=0.5,
+ retry_if_not=None):
+ if params:
+ url = '%s?%s' % (url, urllib.urlencode(params))
+
+ count = 0
+ while True:
+ count += 1
+
+ status_code, content = _SendRequest(url, timeout=timeout)
+ if status_code == 200:
+ return status_code, content
+ if retry_if_not and status_code == retry_if_not:
+ return status_code, content
+
+ if count < retries:
+ time.sleep(retry_interval)
+ else:
+ return status_code, content
+
+ # Should never be reached.
+ return status_code, content
diff --git a/chromium/tools/findit/common/http_client_local_unittest.py b/chromium/tools/findit/common/http_client_local_unittest.py
new file mode 100644
index 00000000000..40d41fa5058
--- /dev/null
+++ b/chromium/tools/findit/common/http_client_local_unittest.py
@@ -0,0 +1,15 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from http_client_local import HttpClientLocal
+
+class HttpClientLocalTest(unittest.TestCase):
+
+ def testGetWithoutParameters(self):
+ code, deps = HttpClientLocal.Get(
+ 'https://src.chromium.org/chrome/trunk/src/DEPS')
+ self.assertEqual(200, code)
+ self.assertTrue(isinstance(deps, str))
diff --git a/chromium/tools/findit/common/utils.py b/chromium/tools/findit/common/utils.py
new file mode 100644
index 00000000000..5011e76e2b1
--- /dev/null
+++ b/chromium/tools/findit/common/utils.py
@@ -0,0 +1,68 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+import sys
+
+from http_client_local import HttpClientLocal
+
+
+GIT_HASH_PATTERN = re.compile(r'^[0-9a-fA-F]{40}$')
+
+
+def GetOSName(platform_name=sys.platform):
+ if platform_name == 'cygwin' or platform_name.startswith('win'):
+ return 'win'
+ elif platform_name.startswith('linux'):
+ return 'unix'
+ elif platform_name.startswith('darwin'):
+ return 'mac'
+ else:
+ return platform_name
+
+
+def IsGitHash(revision):
+ return GIT_HASH_PATTERN.match(str(revision))
+
+
+def GetHttpClient():
+ # TODO(stgao): return implementation for appengine when running on appengine.
+ return HttpClientLocal
+
+
+def JoinLineNumbers(line_numbers, accepted_gap=1):
+ """Join line numbers into line blocks.
+
+ Args:
+ line_numbers: a list of line number.
+ accepted_gap: if two line numbers are within the give gap,
+ they would be combined together into a block.
+ Eg: for (1, 2, 3, 6, 7, 8, 12), if |accepted_gap| = 1, result
+ would be 1-3, 6-8, 12; if |accepted_gap| = 3, result would be
+ 1-8, 12; if |accepted_gap| =4, result would be 1-12.
+ """
+ if not line_numbers:
+ return ''
+
+ line_numbers = map(int, line_numbers)
+ line_numbers.sort()
+
+ block = []
+ start_line_number = line_numbers[0]
+ last_line_number = start_line_number
+ for current_line_number in line_numbers[1:]:
+ if last_line_number + accepted_gap < current_line_number:
+ if start_line_number == last_line_number:
+ block.append('%d' % start_line_number)
+ else:
+ block.append('%d-%d' % (start_line_number, last_line_number))
+ start_line_number = current_line_number
+ last_line_number = current_line_number
+ else:
+ if start_line_number == last_line_number:
+ block.append('%d' % start_line_number)
+ else:
+ block.append('%d-%d' % (start_line_number, last_line_number))
+
+ return ', '.join(block)
diff --git a/chromium/tools/findit/component_dictionary.py b/chromium/tools/findit/component_dictionary.py
new file mode 100644
index 00000000000..d1a8967338e
--- /dev/null
+++ b/chromium/tools/findit/component_dictionary.py
@@ -0,0 +1,122 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class FileDictionary(object):
+ """Maps file in a stacktrace to its crash information.
+
+ It maps file to another dictionary, which maps the file's path to crashed
+ lines, stack frame indices and crashed functions.
+ """
+
+ def __init__(self):
+ """Initializes the file dictionary."""
+ self.file_dict = {}
+
+ def AddFile(self, file_path, crashed_line_range, stack_frame_index,
+ function):
+ """Adds file and its crash information to the map.
+
+ Args:
+ file_path: The path of the crashed file.
+ crashed_line_range: The crashed line of the file.
+ stack_frame_index: The file's position in the callstack.
+ function: The name of the crashed function.
+ """
+ # Populate the dictionary if this file path has not been added before.
+ if file_path not in self.file_dict:
+ self.file_dict[file_path] = {}
+ self.file_dict[file_path]['line_numbers'] = []
+ self.file_dict[file_path]['stack_frame_indices'] = []
+ self.file_dict[file_path]['function'] = []
+
+ # Add the crashed line, frame index and function name.
+ self.file_dict[file_path]['line_numbers'].append(
+ crashed_line_range)
+ self.file_dict[file_path]['stack_frame_indices'].append(
+ stack_frame_index)
+ self.file_dict[file_path]['function'].append(function)
+
+ def GetCrashedLineNumbers(self, file_path):
+ """Returns crashed line numbers given a file path."""
+ return self.file_dict[file_path]['line_numbers']
+
+ def GetCrashStackFrameIndices(self, file_path):
+ """Returns stack frame indices given a file path."""
+ return self.file_dict[file_path]['stack_frame_indices']
+
+ def GetCrashFunctions(self, file_path):
+ """Returns list of crashed functions given a file path."""
+ return self.file_dict[file_path]['function']
+
+ def __iter__(self):
+ return iter(self.file_dict)
+
+
+class ComponentDictionary(object):
+ """Represents a file dictionary.
+
+ It maps each component path to a file dictionary.
+ """
+
+ def __init__(self, callstack, components):
+ """Initializes the dictionary with given components."""
+ self.component_dict = {}
+
+ # Create file dictionary for all the components.
+ for component in components:
+ self.component_dict[component] = FileDictionary()
+
+ # Create file dict from callstack.
+ self.__CreateFileDictFromCallstack(callstack)
+
+ def GetFileDict(self, component):
+ """Returns a file dictionary for a given component."""
+ return self.component_dict.get(component)
+
+ def __GenerateFileDict(self, stack_frame_list):
+ """Generates file dictionary, given an instance of StackFrame list."""
+ # Iterate through the list of stackframe objects.
+ for stack_frame in stack_frame_list:
+ # If the component of this line is not in the list of components to
+ # look for, ignore this line.
+ component_path = stack_frame.component_path
+ if component_path not in self.component_dict:
+ continue
+
+ # Get values of the variables
+ file_path = stack_frame.file_path
+ crashed_line_range = stack_frame.crashed_line_range
+ stack_frame_index = stack_frame.index
+ function = stack_frame.function
+
+ # Add the file to this component's dictionary of files.
+ file_dict = self.component_dict[component_path]
+ file_dict.AddFile(file_path, crashed_line_range, stack_frame_index,
+ function)
+
+ def __CreateFileDictFromCallstack(self, callstack, top_n_frames=10):
+ """Creates a file dict that maps a file to the occurrence in the stack.
+
+ Args:
+ callstack: A list containing parsed result from a single stack
+ within a stacktrace. For example, a stacktrace from
+ previously-allocated thread in release build stacktrace.
+ top_n_frames: The number of frames to look for.
+
+ Returns:
+ Component_dict, a dictionary with key as a file name and value as another
+ dictionary, which maps the file's path (because there can be multiple
+ files with same name but in different directory) to the list of this
+ file's place in stack, lines that this file caused a crash, and the name
+ of the function.
+ """
+
+ # Only look at first top_n_frames of the stacktrace, below those are likely
+ # to be noisy. Parse the stacktrace into the component dictionary.
+ stack_list = callstack.GetTopNFrames(top_n_frames)
+ self.__GenerateFileDict(stack_list)
+
+ def __iter__(self):
+ return iter(self.component_dict)
diff --git a/chromium/tools/findit/config.ini b/chromium/tools/findit/config.ini
new file mode 100644
index 00000000000..f6071ecf274
--- /dev/null
+++ b/chromium/tools/findit/config.ini
@@ -0,0 +1,26 @@
+[svn:src/]
+changelog_url: http://build.chromium.org/cgi-bin/svn-log?url=http://src.chromium.org/svn/trunk/src/&range=%s
+revision_url: http://src.chromium.org/viewvc/chrome?revision=%d&view=revision
+diff_url: http://src.chromium.org/viewvc/chrome/trunk/%s?r1=%d&r2=%d&pathrev=%d
+blame_url: http://src.chromium.org/viewvc/chrome/trunk/%s?annotate=%s&pathrev=%s
+
+[svn:src/third_party/WebKit/]
+changelog_url: http://build.chromium.org/cgi-bin/svn-log?url=http://src.chromium.org/blink/trunk/&range=%s
+revision_url: http://src.chromium.org/viewvc/blink?revision=%d&view=revision
+diff_url: http://src.chromium.org/viewvc/blink/trunk/%s?r1=%d&r2=%d&pathrev=%d
+blame_url: http://src.chromium.org/viewvc/blink/trunk/%s?annotate=%s&pathrev=%s
+
+[svn:src/third_party/WebKit/Source/]
+changelog_url: http://build.chromium.org/cgi-bin/svn-log?url=http://src.chromium.org/blink/trunk/&range=%s
+revision_url: http://src.chromium.org/viewvc/blink?revision=%d&view=revision
+diff_url: http://src.chromium.org/viewvc/blink/trunk/%s?r1=%d&r2=%d&pathrev=%d
+blame_url: http://src.chromium.org/viewvc/blink/trunk/%s?annotate=%s&pathrev=%s
+
+[git]
+changelog_url: /+log/%s..%s
+revision_url: /+/%s
+diff_url: /+/%s^!/%s
+blame_url: /+blame/%s/%s?format=json
+
+[codereview]
+review_url: https://codereview.chromium.org/api/%s?message=true
diff --git a/chromium/tools/findit/crash_utils.py b/chromium/tools/findit/crash_utils.py
new file mode 100644
index 00000000000..7e8011334e2
--- /dev/null
+++ b/chromium/tools/findit/crash_utils.py
@@ -0,0 +1,570 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import atexit
+import cgi
+import ConfigParser
+import json
+import os
+import Queue
+import threading
+import time
+
+from common import utils
+from result import Result
+
+
+INFINITY = float('inf')
+
+MAX_THREAD_NUMBER = 10
+TASK_QUEUE = None
+
+
+def SignalWorkerThreads():
+ global TASK_QUEUE
+ if not TASK_QUEUE:
+ return
+
+ for i in range(MAX_THREAD_NUMBER):
+ TASK_QUEUE.put(None)
+
+ # Give worker threads a chance to exit.
+ # Workaround the harmless bug in python 2.7 below.
+ time.sleep(1)
+
+
+atexit.register(SignalWorkerThreads)
+
+
+def Worker():
+ global TASK_QUEUE
+ while True:
+ try:
+ task = TASK_QUEUE.get()
+ if not task:
+ return
+ except TypeError:
+ # According to http://bugs.python.org/issue14623, this is a harmless bug
+ # in python 2.7 which won't be fixed.
+ # The exception is raised on daemon threads when python interpreter is
+ # shutting down.
+ return
+
+ function, args, kwargs, result_semaphore = task
+ try:
+ function(*args, **kwargs)
+ except:
+ pass
+ finally:
+ # Signal one task is done in case of exception.
+ result_semaphore.release()
+
+
+def RunTasks(tasks):
+ """Run given tasks. Not thread-safe: no concurrent calls of this function.
+
+ Return after all tasks were completed. A task is a dict as below:
+ {
+ 'function': the function to call,
+ 'args': the positional argument to pass to the function,
+ 'kwargs': the key-value arguments to pass to the function,
+ }
+ """
+ if not tasks:
+ return
+
+ global TASK_QUEUE
+ if not TASK_QUEUE:
+ TASK_QUEUE = Queue.Queue()
+ for index in range(MAX_THREAD_NUMBER):
+ thread = threading.Thread(target=Worker, name='worker_%s' % index)
+ # Set as daemon, so no join is needed.
+ thread.daemon = True
+ thread.start()
+
+ result_semaphore = threading.Semaphore(0)
+ # Push task to task queue for execution.
+ for task in tasks:
+ TASK_QUEUE.put(
+ (task['function'], task.get('args', []),
+ task.get('kwargs', {}), result_semaphore))
+
+ # Wait until all tasks to be executed.
+ for _ in tasks:
+ result_semaphore.acquire()
+
+
+def GetRepositoryType(revision_number):
+ """Returns the repository type of this revision number.
+
+ Args:
+ revision_number: A revision number or git hash.
+
+ Returns:
+ 'git' or 'svn', depending on the revision_number.
+ """
+ if utils.IsGitHash(revision_number):
+ return 'git'
+ else:
+ return 'svn'
+
+
+def ParseURLsFromConfig(file_name):
+ """Parses URLS from the config file.
+
+ The file should be in python config format, where svn section is in the
+ format "svn:component_path".
+ Each of the section for svn should contain changelog_url, revision_url,
+ diff_url and blame_url.
+
+ Args:
+ file_name: The name of the file that contains URL information.
+
+ Returns:
+ A dictionary that maps repository type to list of URLs. For svn, it maps
+ key 'svn' to another dictionary, which maps component path to the URLs
+ as explained above. For git, it maps to the URLs as explained above.
+ """
+ config = ConfigParser.ConfigParser()
+
+ # Get the absolute path of the config file, and read the file. If it fails,
+ # return none.
+ config_file_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
+ file_name)
+ config.read(config_file_path)
+ if not config:
+ return None
+
+ # Iterate through the config file, check for sections.
+ config_dict = {}
+ for section in config.sections():
+ # These two do not need another layer of dictionary, so add it and go
+ # to next section.
+ if ':' not in section:
+ for option in config.options(section):
+ if section not in config_dict:
+ config_dict[section] = {}
+
+ url = config.get(section, option)
+ config_dict[section][option] = url
+
+ continue
+
+ # Get repository type and component name from the section name.
+ repository_type_and_component = section.split(':')
+ repository_type = repository_type_and_component[0]
+ component_path = repository_type_and_component[1]
+
+ # Add 'svn' as the key, if it is not already there.
+ if repository_type not in config_dict:
+ config_dict[repository_type] = {}
+ url_map_for_repository = config_dict[repository_type]
+
+ # Add the path to the 'svn', if it is not already there.
+ if component_path not in url_map_for_repository:
+ url_map_for_repository[component_path] = {}
+ type_to_url = url_map_for_repository[component_path]
+
+ # Add all URLs to this map.
+ for option in config.options(section):
+ url = config.get(section, option)
+ type_to_url[option] = url
+
+ return config_dict
+
+
+def NormalizePath(path, parsed_deps):
+ """Normalizes the path.
+
+ Args:
+ path: A string representing a path.
+ parsed_deps: A map from component path to its component name, repository,
+ etc.
+
+ Returns:
+ A tuple containing a component this path is in (e.g blink, skia, etc)
+ and a path in that component's repository. Returns None if the component
+ repository is not supported, i.e from googlecode.
+ """
+ # First normalize the path by retreiving the normalized path.
+ normalized_path = os.path.normpath(path).replace('\\', '/')
+
+ # Iterate through all component paths in the parsed DEPS, in the decreasing
+ # order of the length of the file path.
+ for component_path in sorted(parsed_deps,
+ key=(lambda path: -len(path))):
+ # new_component_path is the component path with 'src/' removed.
+ new_component_path = component_path
+ if new_component_path.startswith('src/') and new_component_path != 'src/':
+ new_component_path = new_component_path[len('src/'):]
+
+ # We need to consider when the lowercased component path is in the path,
+ # because syzyasan build returns lowercased file path.
+ lower_component_path = new_component_path.lower()
+
+ # If this path is the part of file path, this file must be from this
+ # component.
+ if new_component_path in normalized_path or \
+ lower_component_path in normalized_path:
+
+ # Case when the retreived path is in lowercase.
+ if lower_component_path in normalized_path:
+ current_component_path = lower_component_path
+ else:
+ current_component_path = new_component_path
+
+ # Normalize the path by stripping everything off the component's relative
+ # path.
+ normalized_path = normalized_path.split(current_component_path, 1)[1]
+ lower_normalized_path = normalized_path.lower()
+
+ # Add 'src/' or 'Source/' at the front of the normalized path, depending
+ # on what prefix the component path uses. For example, blink uses
+ # 'Source' but chromium uses 'src/', and blink component path is
+ # 'src/third_party/WebKit/Source', so add 'Source/' in front of the
+ # normalized path.
+ if (lower_component_path == 'src/third_party/webkit/source' and
+ not lower_normalized_path.startswith('source/')):
+ normalized_path = (current_component_path.split('/')[-2] + '/' +
+ normalized_path)
+
+ component_name = parsed_deps[component_path]['name']
+
+ return (component_path, component_name, normalized_path)
+
+ # If the path does not match any component, default to chromium.
+ return ('src/', 'chromium', normalized_path)
+
+
+def SplitRange(regression):
+ """Splits a range as retrieved from clusterfuzz.
+
+ Args:
+ regression: A string in format 'r1234:r5678'.
+
+ Returns:
+ A list containing two numbers represented in string, for example
+ ['1234','5678'].
+ """
+ if not regression:
+ return None
+
+ revisions = regression.split(':')
+
+ # If regression information is not available, return none.
+ if len(revisions) != 2:
+ return None
+
+ range_start = revisions[0]
+ range_end = revisions[1]
+
+ # Strip 'r' off the range start/end. Not using lstrip to avoid the case when
+ # the range is in git hash and it starts with 'r'.
+ if range_start.startswith('r'):
+ range_start = range_start[1:]
+
+ if range_end.startswith('r'):
+ range_end = range_end[1:]
+
+ return [range_start, range_end]
+
+
+def LoadJSON(json_string):
+ """Loads json object from string, or None.
+
+ Args:
+ json_string: A string to get object from.
+
+ Returns:
+ JSON object if the string represents a JSON object, None otherwise.
+ """
+ try:
+ data = json.loads(json_string)
+ except ValueError:
+ data = None
+
+ return data
+
+
+def GetDataFromURL(url):
+ """Retrieves raw data from URL, tries 10 times.
+
+ Args:
+ url: URL to get data from.
+ retries: Number of times to retry connection.
+
+ Returns:
+ None if the data retrieval fails, or the raw data.
+ """
+ status_code, data = utils.GetHttpClient().Get(url, retries=10)
+ if status_code == 200:
+ return data
+ else:
+ # Return None if it fails to read data.
+ return None
+
+
+def FindMinLineDistance(crashed_line_list, changed_line_numbers,
+ line_range=3):
+ """Calculates how far the changed line is from one of the crashes.
+
+ Finds the minimum distance between the lines that the file crashed on
+ and the lines that the file changed. For example, if the file crashed on
+ line 200 and the CL changes line 203,204 and 205, the function returns 3.
+
+ Args:
+ crashed_line_list: A list of lines that the file crashed on.
+ changed_line_numbers: A list of lines that the file changed.
+ line_range: Number of lines to look back for.
+
+ Returns:
+ The minimum distance. If either of the input lists is empty,
+ it returns inf.
+
+ """
+ min_distance = INFINITY
+ crashed_line = -1
+ changed_line = -1
+
+ crashed_line_numbers = set()
+ for crashed_line_range in crashed_line_list:
+ for crashed_line in crashed_line_range:
+ for line in range(crashed_line - line_range, crashed_line + 1):
+ crashed_line_numbers.add(line)
+
+ for line in crashed_line_numbers:
+ for distance in changed_line_numbers:
+ # Find the current distance and update the min if current distance is
+ # less than current min.
+ current_distance = abs(line - distance)
+ if current_distance < min_distance:
+ min_distance = current_distance
+ crashed_line = line
+ changed_line = distance
+
+ return (min_distance, crashed_line, changed_line)
+
+
+def GuessIfSameSubPath(path1, path2):
+ """Guesses if two paths represent same path.
+
+ Compares the name of the folders in the path (by split('/')), and checks
+ if they match either more than 3 or min of path lengths.
+
+ Args:
+ path1: First path.
+ path2: Second path to compare.
+
+ Returns:
+ True if it they are thought to be a same path, False otherwise.
+ """
+ path1 = path1.split('/')
+ path2 = path2.split('/')
+
+ intersection = set(path1).intersection(set(path2))
+ return len(intersection) >= (min(3, min(len(path1), len(path2))))
+
+
+def FindMinStackFrameNumber(stack_frame_indices, priorities):
+ """Finds the minimum stack number, from the list of stack numbers.
+
+ Args:
+ stack_frame_indices: A list of lists containing stack position.
+ priorities: A list of of priority for each file.
+
+ Returns:
+ Inf if stack_frame_indices is empty, minimum stack number otherwise.
+ """
+ # Get the indexes of the highest priority (or low priority number).
+ highest_priority = min(priorities)
+ highest_priority_indices = []
+ for i in range(len(priorities)):
+ if priorities[i] == highest_priority:
+ highest_priority_indices.append(i)
+
+ # Gather the list of stack frame numbers for the files that change the
+ # crash lines.
+ flattened = []
+ for i in highest_priority_indices:
+ flattened += stack_frame_indices[i]
+
+ # If no stack frame information is available, return inf. Else, return min.
+ if not flattened:
+ return INFINITY
+ else:
+ return min(flattened)
+
+
+def AddHyperlink(text, link):
+ """Returns a string with HTML link tag.
+
+ Args:
+ text: A string to add link.
+ link: A link to add to the string.
+
+ Returns:
+ A string with hyperlink added.
+ """
+ sanitized_link = cgi.escape(link, quote=True)
+ sanitized_text = cgi.escape(str(text))
+ return '<a href="%s">%s</a>' % (sanitized_link, sanitized_text)
+
+
+def PrettifyList(items):
+ """Returns a string representation of a list.
+
+ It adds comma in between the elements and removes the brackets.
+ Args:
+ items: A list to prettify.
+ Returns:
+ A string representation of the list.
+ """
+ return ', '.join(map(str, items))
+
+
+def PrettifyFrameInfo(frame_indices, functions):
+ """Return a string to represent the frames with functions."""
+ frames = []
+ for frame_index, function in zip(frame_indices, functions):
+ frames.append('frame #%s, "%s"' % (frame_index, function.split('(')[0]))
+ return '; '.join(frames)
+
+
+def PrettifyFiles(file_list):
+ """Returns a string representation of a list of file names.
+
+ Args:
+ file_list: A list of tuple, (file_name, file_url).
+ Returns:
+ A string representation of file names with their urls.
+ """
+ ret = ['\n']
+ for file_name, file_url in file_list:
+ ret.append(' %s\n' % AddHyperlink(file_name, file_url))
+ return ''.join(ret)
+
+
+def Intersection(crashed_line_list, stack_frame_index, changed_line_numbers,
+ function, line_range=3):
+ """Finds the overlap betwee changed lines and crashed lines.
+
+ Finds the intersection of the lines that caused the crash and
+ lines that the file changes. The intersection looks within 3 lines
+ of the line that caused the crash.
+
+ Args:
+ crashed_line_list: A list of lines that the file crashed on.
+ stack_frame_index: A list of positions in stack for each of the lines.
+ changed_line_numbers: A list of lines that the file changed.
+ function: A list of functions that the file crashed on.
+ line_range: Number of lines to look backwards from crashed lines.
+
+ Returns:
+ line_number_intersection: Intersection between crashed_line_list and
+ changed_line_numbers.
+ stack_frame_index_intersection: Stack number for each of the intersections.
+ """
+ line_number_intersection = []
+ stack_frame_index_intersection = []
+ function_intersection = []
+
+ # Iterate through the crashed lines, and its occurence in stack.
+ for (lines, stack_frame_index, function_name) in zip(
+ crashed_line_list, stack_frame_index, function):
+ # Also check previous 'line_range' lines. Create a set of all changed lines
+ # and lines within 3 lines range before the crashed line.
+ line_minus_n = set()
+ for line in lines:
+ for line_in_range in range(line - line_range, line + 1):
+ line_minus_n.add(line_in_range)
+
+ for changed_line in changed_line_numbers:
+ # If a CL does not change crahsed line, check next line.
+ if changed_line not in line_minus_n:
+ continue
+
+ intersected_line = set()
+ # If the changed line is exactly the crashed line, add that line.
+ for line in lines:
+ if line in changed_line_numbers:
+ intersected_line.add(line)
+
+ # If the changed line is in 3 lines of the crashed line, add the line.
+ else:
+ intersected_line.add(changed_line)
+
+ # Avoid adding the same line twice.
+ if intersected_line not in line_number_intersection:
+ line_number_intersection.append(list(intersected_line))
+ stack_frame_index_intersection.append(stack_frame_index)
+ function_intersection.append(function_name)
+ break
+
+ return (line_number_intersection, stack_frame_index_intersection,
+ function_intersection)
+
+
+def MatchListToResultList(matches):
+ """Convert list of matches to the list of result objects.
+
+ Args:
+ matches: A list of match objects along with its stack priority and revision
+ number/git hash
+ Returns:
+ A list of result object.
+
+ """
+ result_list = []
+
+ for _, cl, match in matches:
+ suspected_cl = cl
+ revision_url = match.revision_url
+ component_name = match.component_name
+ author = match.author
+ reason = match.reason.strip()
+ review_url = match.review_url
+ reviewers = match.reviewers
+ # For matches, line content do not exist.
+ line_content = None
+ message = match.message
+ time = match.time
+
+ result = Result(suspected_cl, revision_url, component_name, author, reason,
+ review_url, reviewers, line_content, message, time)
+ result_list.append(result)
+
+ return result_list
+
+
+def BlameListToResultList(blame_list):
+ """Convert blame list to the list of result objects.
+
+ Args:
+ blame_list: A list of blame objects.
+
+ Returns:
+ A list of result objects.
+ """
+ result_list = []
+
+ for blame in blame_list:
+ suspected_cl = blame.revision
+ revision_url = blame.url
+ component_name = blame.component_name
+ author = blame.author
+ reason = (
+ 'The CL last changed line %s of file %s, which is stack frame %d.' %
+ (blame.line_number, blame.file, blame.stack_frame_index))
+ time = blame.time
+ # Blame object does not have review url and reviewers.
+ review_url = None
+ reviewers = None
+ line_content = blame.line_content
+ message = blame.message
+
+ result = Result(suspected_cl, revision_url, component_name, author, reason,
+ review_url, reviewers, line_content, message, time)
+ result_list.append(result)
+
+ return result_list
diff --git a/chromium/tools/findit/deps_config.json b/chromium/tools/findit/deps_config.json
new file mode 100644
index 00000000000..57b41f9b2bf
--- /dev/null
+++ b/chromium/tools/findit/deps_config.json
@@ -0,0 +1,17 @@
+{
+ "cr_rev_url": "https://cr-rev.appspot.com/_ah/api/crrev/v1/redirect/%s",
+ "chromium_git_base_url": "https://chromium.googlesource.com/chromium/src/",
+ "chromium_git_file_url":
+ "https://chromium.googlesource.com/chromium/src/+/%s/%s?format=text",
+ "host_directories": [
+ "src/chrome/browser/resources/",
+ "src/chrome/test/data/layout_tests/",
+ "src/media/",
+ "src/sdch/",
+ "src/testing/",
+ "src/third_party/WebKit/",
+ "src/third_party/",
+ "src/tools/",
+ "src/"
+ ]
+}
diff --git a/chromium/tools/findit/findit_for_clusterfuzz.py b/chromium/tools/findit/findit_for_clusterfuzz.py
new file mode 100644
index 00000000000..c101fbcae36
--- /dev/null
+++ b/chromium/tools/findit/findit_for_clusterfuzz.py
@@ -0,0 +1,224 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import chromium_deps
+from common import utils
+import crash_utils
+import findit_for_crash as findit
+import stacktrace
+
+
+def SplitStacktrace(stacktrace_string):
+ """Preprocesses stacktrace string into two parts, release and debug.
+
+ Args:
+ stacktrace_string: A string representation of stacktrace,
+ in clusterfuzz format.
+
+ Returns:
+ A tuple of list of strings, release build stacktrace and
+ debug build stacktrace.
+ """
+ # Make sure we only parse release/debug build stacktrace, and ignore
+ # unsymbolised stacktrace.
+ in_release_or_debug_stacktrace = False
+ release_build_stacktrace_lines = None
+ debug_build_stacktrace_lines = None
+ current_stacktrace_lines = []
+
+ # Iterate through all lines in stacktrace.
+ for line in stacktrace_string.splitlines():
+ line = line.strip()
+
+ # If the line starts with +, it signifies the start of new stacktrace.
+ if line.startswith('+-') and line.endswith('-+'):
+ if 'Release Build Stacktrace' in line:
+ in_release_or_debug_stacktrace = True
+ current_stacktrace_lines = []
+ release_build_stacktrace_lines = current_stacktrace_lines
+
+ elif 'Debug Build Stacktrace' in line:
+ in_release_or_debug_stacktrace = True
+ current_stacktrace_lines = []
+ debug_build_stacktrace_lines = current_stacktrace_lines
+
+ # If the stacktrace is neither release/debug build stacktrace, ignore
+ # all lines after it until we encounter release/debug build stacktrace.
+ else:
+ in_release_or_debug_stacktrace = False
+
+ # This case, it must be that the line is an actual stack frame, so add to
+ # the current stacktrace.
+ elif in_release_or_debug_stacktrace:
+ current_stacktrace_lines.append(line)
+
+ return (release_build_stacktrace_lines, debug_build_stacktrace_lines)
+
+
+def FindCulpritCLs(stacktrace_string,
+ build_type,
+ chrome_regression=None,
+ component_regression=None,
+ chrome_crash_revision=None,
+ component_crash_revision=None,
+ crashing_component_path=None,
+ crashing_component_name=None,
+ crashing_component_repo_url=None):
+ """Returns the result, a list of result.Result objects and message.
+
+ If either or both of component_regression and component_crash_revision is not
+ None, is is assumed that crashing_component_path and
+ crashing_component_repo_url are not None.
+
+ Args:
+ stacktrace_string: A string representing stacktrace.
+ build_type: The type of the job.
+ chrome_regression: A string, chrome regression from clusterfuzz, in format
+ '123456:123457'
+ component_regression: A string, component regression in the same format.
+ chrome_crash_revision: A crash revision of chrome, in string.
+ component_crash_revision: A crash revision of the component,
+ if component build.
+ crashing_component_path: A relative path of the crashing component, as in
+ DEPS file. For example, it would be 'src/v8' for
+ v8 and 'src/third_party/WebKit' for blink.
+ crashing_component_name: A name of the crashing component, such as v8.
+ crashing_component_repo_url: The URL of the crashing component's repo, as
+ shown in DEPS file. For example,
+ 'https://chromium.googlesource.com/skia.git'
+ for skia.
+
+ Returns:
+ A list of result objects, along with the short description on where the
+ result is from.
+ """
+ build_type = build_type.lower()
+ component_to_crash_revision_dict = {}
+ component_to_regression_dict = {}
+
+ # If chrome regression is available, parse DEPS file.
+ chrome_regression = crash_utils.SplitRange(chrome_regression)
+ if chrome_regression:
+ chrome_regression_start = chrome_regression[0]
+ chrome_regression_end = chrome_regression[1]
+
+ # Do not parse regression information for crashes introduced before the
+ # first archived build.
+ if chrome_regression_start != '0':
+ component_to_regression_dict = chromium_deps.GetChromiumComponentRange(
+ chrome_regression_start, chrome_regression_end)
+ if not component_to_regression_dict:
+ return (('Failed to get component regression ranges for chromium '
+ 'regression range %s:%s'
+ % (chrome_regression_start, chrome_regression_end)), [])
+
+ # Parse crash revision.
+ if chrome_crash_revision:
+ component_to_crash_revision_dict = chromium_deps.GetChromiumComponents(
+ chrome_crash_revision)
+ if not component_to_crash_revision_dict:
+ return (('Failed to get component dependencies for chromium revision "%s"'
+ % chrome_crash_revision), [])
+
+ # Check if component regression information is available.
+ component_regression = crash_utils.SplitRange(component_regression)
+ if component_regression:
+ component_regression_start = component_regression[0]
+ component_regression_end = component_regression[1]
+
+ # If this component already has an entry in parsed DEPS file, overwrite
+ # regression range and url.
+ if crashing_component_path in component_to_regression_dict:
+ component_regression_info = \
+ component_to_regression_dict[crashing_component_path]
+ component_regression_info['old_revision'] = component_regression_start
+ component_regression_info['new_revision'] = component_regression_end
+ component_regression_info['repository'] = crashing_component_repo_url
+
+ # if this component does not have an entry, add the entry to the parsed
+ # DEPS file.
+ else:
+ repository_type = crash_utils.GetRepositoryType(
+ component_regression_start)
+ component_regression_info = {
+ 'path': crashing_component_path,
+ 'rolled': True,
+ 'name': crashing_component_name,
+ 'old_revision': component_regression_start,
+ 'new_revision': component_regression_end,
+ 'repository': crashing_component_repo_url,
+ 'repository_type': repository_type
+ }
+ component_to_regression_dict[crashing_component_path] = \
+ component_regression_info
+
+ # If component crash revision is available, add it to the parsed crash
+ # revisions.
+ if component_crash_revision:
+
+ # If this component has already a crash revision info, overwrite it.
+ if crashing_component_path in component_to_crash_revision_dict:
+ component_crash_revision_info = \
+ component_to_crash_revision_dict[crashing_component_path]
+ component_crash_revision_info['revision'] = component_crash_revision
+ component_crash_revision_info['repository'] = crashing_component_repo_url
+
+ # If not, add it to the parsed DEPS.
+ else:
+ if utils.IsGitHash(component_crash_revision):
+ repository_type = 'git'
+ else:
+ repository_type = 'svn'
+ component_crash_revision_info = {
+ 'path': crashing_component_path,
+ 'name': crashing_component_name,
+ 'repository': crashing_component_repo_url,
+ 'repository_type': repository_type,
+ 'revision': component_crash_revision
+ }
+ component_to_crash_revision_dict[crashing_component_path] = \
+ component_crash_revision_info
+
+ # Parsed DEPS is used to normalize the stacktrace. Since parsed regression
+ # and parsed crash state essentially contain same information, use either.
+ if component_to_regression_dict:
+ parsed_deps = component_to_regression_dict
+ elif component_to_crash_revision_dict:
+ parsed_deps = component_to_crash_revision_dict
+ else:
+ return (('Identifying culprit CL requires at lease one of regression '
+ 'information or crash revision'), [])
+
+ # Split stacktrace into release build/debug build and parse them.
+ (release_build_stacktrace, debug_build_stacktrace) = SplitStacktrace(
+ stacktrace_string)
+ if not (release_build_stacktrace or debug_build_stacktrace):
+ parsed_release_build_stacktrace = stacktrace.Stacktrace(
+ stacktrace_string.splitlines(), build_type, parsed_deps)
+ else:
+ parsed_release_build_stacktrace = stacktrace.Stacktrace(
+ release_build_stacktrace, build_type, parsed_deps)
+
+ parsed_debug_build_stacktrace = stacktrace.Stacktrace(
+ debug_build_stacktrace, build_type, parsed_deps)
+
+ # Get a highest priority callstack (main_stack) from stacktrace, with release
+ # build stacktrace in higher priority than debug build stacktace. This stack
+ # is the callstack to find blame information for.
+ if parsed_release_build_stacktrace.stack_list:
+ main_stack = parsed_release_build_stacktrace.GetCrashStack()
+ elif parsed_debug_build_stacktrace.stack_list:
+ main_stack = parsed_debug_build_stacktrace.GetCrashStack()
+ else:
+ if 'mac_' in build_type:
+ return ('No line information available in stacktrace.', [])
+
+ return ('Findit failed to find any stack trace. Is it in a new format?', [])
+
+ # Run the algorithm on the parsed stacktrace, and return the result.
+ stacktrace_list = [parsed_release_build_stacktrace,
+ parsed_debug_build_stacktrace]
+ return findit.FindItForCrash(
+ stacktrace_list, main_stack, component_to_regression_dict,
+ component_to_crash_revision_dict)
diff --git a/chromium/tools/findit/findit_for_crash.py b/chromium/tools/findit/findit_for_crash.py
new file mode 100644
index 00000000000..689294987ca
--- /dev/null
+++ b/chromium/tools/findit/findit_for_crash.py
@@ -0,0 +1,664 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+from threading import Lock
+
+import blame
+from common import utils
+import component_dictionary
+import crash_utils
+import git_repository_parser
+import match_set
+import svn_repository_parser
+
+
+LINE_CHANGE_PRIORITY = 1
+FILE_CHANGE_PRIORITY = 2
+_THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+CONFIG = crash_utils.ParseURLsFromConfig(os.path.join(_THIS_DIR,
+ 'config.ini'))
+
+
+def GenerateMatchEntry(
+ matches, revision_info, revision_number, file_path, function,
+ component_path, component_name, crashed_line_numbers, stack_frame_indices,
+ file_change_type, repository_parser):
+ """Generates a match object and adds it to the match set.
+
+ Args:
+ matches: A matchset object, a map from CL to a match object.
+ revision_info: The revision information, a map from fields (message,
+ changed files, etc) to its values.
+ revision_number: The SVN revision number or git hash.
+ file_path: The path of the file.
+ function: The function that caused an crash.
+ component_path: The path of the component this file is from.
+ component_name: The name of the component the file is from.
+ crashed_line_numbers: The list of the lines in the file that caused
+ the crash.
+ stack_frame_indices: The list of positions of this file within a stack.
+ file_change_type: Whether file is modified, added or deleted.
+ repository_parser: The parser object to parse line diff.
+ """
+ # Check if this CL should be ignored.
+ with matches.matches_lock:
+ if revision_number in matches.cls_to_ignore:
+ return
+
+ # If this CL is not already identified as suspected, create a new entry.
+ if revision_number not in matches.matches:
+ match = match_set.Match(revision_info, component_name)
+ message = revision_info['message']
+ # TODO(jeun): Don't hold lock while issuing http request.
+ match.ParseMessage(message, matches.codereview_api_url)
+
+ # If this match is a revert, add to the set of CLs to be ignored.
+ if match.is_revert:
+ matches.cls_to_ignore.add(revision_number)
+
+ # If this match has info on what CL it is reverted from, add that CL.
+ if match.revert_of:
+ matches.cls_to_ignore.add(match.revert_of)
+
+ return
+
+ matches.matches[revision_number] = match
+
+ else:
+ match = matches.matches[revision_number]
+
+ (diff_url, changed_line_numbers, changed_line_contents) = (
+ repository_parser.ParseLineDiff(
+ file_path, component_path, file_change_type, revision_number))
+
+ # Ignore this match if the component is not supported for svn.
+ if not diff_url:
+ return
+
+ # Find the intersection between the lines that this file crashed on and
+ # the changed lines.
+ (line_number_intersection, stack_frame_index_intersection, functions) = (
+ crash_utils.Intersection(
+ crashed_line_numbers, stack_frame_indices, changed_line_numbers,
+ function))
+
+ # Find the minimum distance between the changed lines and crashed lines.
+ (min_distance, min_crashed_line, min_changed_line) = \
+ crash_utils.FindMinLineDistance(crashed_line_numbers,
+ changed_line_numbers)
+
+ # Check whether this CL changes the crashed lines or not.
+ if line_number_intersection:
+ priority = LINE_CHANGE_PRIORITY
+ else:
+ priority = FILE_CHANGE_PRIORITY
+
+ # Add the parsed information to the object.
+ with matches.matches_lock:
+ match.crashed_line_numbers.append(line_number_intersection)
+
+ file_name = file_path.split('/')[-1]
+ match.changed_files.append(file_name)
+
+ # Update the min distance only if it is less than the current one.
+ if min_distance < match.min_distance:
+ match.min_distance = min_distance
+ match.min_distance_info = (file_name, min_crashed_line, min_changed_line)
+
+ # If this CL does not change the crashed line, all occurrence of this
+ # file in the stack has the same priority.
+ if not stack_frame_index_intersection:
+ stack_frame_index_intersection = stack_frame_indices
+ functions = function
+ match.stack_frame_indices.append(stack_frame_index_intersection)
+ match.changed_file_urls.append(diff_url)
+ match.priorities.append(priority)
+ match.function_list.append(functions)
+
+
+def FindMatch(revisions_info_map, file_to_revision_info, file_to_crash_info,
+ component_path, component_name, repository_parser,
+ codereview_api_url):
+ """Finds a CL that modifies file in the stacktrace.
+
+ Args:
+ revisions_info_map: A dictionary mapping revision number to the CL
+ information.
+ file_to_revision_info: A dictionary mapping file to the revision that
+ modifies it.
+ file_to_crash_info: A dictionary mapping file to its occurrence in
+ stacktrace.
+ component_path: The path of the component to search for.
+ component_name: The name of the component to search for.
+ repository_parser: The parser object to parse the line diff.
+ codereview_api_url: A code review url to retrieve data from.
+
+ Returns:
+ Matches, a set of match objects.
+ """
+ matches = match_set.MatchSet(codereview_api_url)
+
+ tasks = []
+ # Iterate through the crashed files in the stacktrace.
+ for crashed_file_path in file_to_crash_info:
+ # Ignore header file.
+ if crashed_file_path.endswith('.h'):
+ continue
+
+ # If the file in the stacktrace is not changed in any commits, continue.
+ for changed_file_path in file_to_revision_info:
+ changed_file_name = changed_file_path.split('/')[-1].lower()
+ crashed_file_name = crashed_file_path.split('/')[-1].lower()
+ if changed_file_name != crashed_file_name:
+ continue
+
+ if not crash_utils.GuessIfSameSubPath(
+ changed_file_path.lower(), crashed_file_path.lower()):
+ continue
+
+ crashed_line_numbers = file_to_crash_info.GetCrashedLineNumbers(
+ crashed_file_path)
+ stack_frame_nums = file_to_crash_info.GetCrashStackFrameIndices(
+ crashed_file_path)
+ functions = file_to_crash_info.GetCrashFunctions(crashed_file_path)
+
+ # Iterate through the CLs that this file path is changed.
+ for (cl, file_change_type) in file_to_revision_info[changed_file_path]:
+ # If the file change is delete, ignore this CL.
+ if file_change_type == 'D':
+ continue
+
+ revision = revisions_info_map[cl]
+
+ tasks.append({
+ 'function': GenerateMatchEntry,
+ 'args':[matches, revision, cl, changed_file_path, functions,
+ component_path, component_name, crashed_line_numbers,
+ stack_frame_nums, file_change_type,
+ repository_parser]})
+
+ # Run all the tasks.
+ crash_utils.RunTasks(tasks)
+
+ matches.RemoveRevertedCLs()
+
+ return matches
+
+
+def FindMatchForComponent(component_path, file_to_crash_info, changelog,
+ callstack_priority, results, results_lock):
+ """Parses changelog and finds suspected CLs for a given component.
+
+ Args:
+ component_path: The path of component to look for the culprit CL.
+ file_to_crash_info: A dictionary mapping file to its occurrence in
+ stackframe.
+ changelog: The parsed changelog for this component.
+ callstack_priority: The priority of this call stack, 0 if from crash stack,
+ 1 if from freed, 2 if from previously allocated.
+ results: A dictionary to store the result.
+ results_lock: A lock that guards results.
+ """
+ (repository_parser, component_name, revisions, file_to_revision_map) = \
+ changelog
+
+ # Find match for this component.
+ codereview_api_url = CONFIG['codereview']['review_url']
+ component_result = FindMatch(
+ revisions, file_to_revision_map, file_to_crash_info, component_path,
+ component_name, repository_parser, codereview_api_url)
+ matches = component_result.matches
+
+ # For all the match results in a dictionary, add to the list so that it
+ # can be sorted.
+ with results_lock:
+ for cl in matches:
+ match = matches[cl]
+ results.append((callstack_priority, cl, match))
+
+
+def FindMatchForCallstack(
+ callstack, components, component_to_changelog_map, results,
+ results_lock):
+ """Finds culprit cl for a stack within a stacktrace.
+
+ For each components to look for, create new thread that computes the matches
+ and join the results at the end.
+
+ Args:
+ callstack: A callstack in a stacktrace to find the result for.
+ components: A set of components to look for.
+ component_to_changelog_map: A map from component to its parsed changelog.
+ results: A list to aggregrate results from all stacktraces.
+ results_lock: A lock that guards results.
+ """
+ # Create component dictionary from the component and call stack.
+ component_dict = component_dictionary.ComponentDictionary(callstack,
+ components)
+ callstack_priority = callstack.priority
+
+ # Iterate through all components.
+ for component_path in component_dict:
+ # If the component to consider in this callstack is not in the parsed list
+ # of components, ignore this one.
+ if component_path not in component_to_changelog_map:
+ continue
+
+ changelog = component_to_changelog_map[component_path]
+ file_to_crash_info = component_dict.GetFileDict(component_path)
+ FindMatchForComponent(component_path, file_to_crash_info, changelog,
+ callstack_priority, results, results_lock)
+
+
+def FindMatchForStacktrace(stacktrace, components,
+ component_to_regression_dict):
+ """Finds the culprit CL for stacktrace.
+
+ The passed stacktrace is either from release build stacktrace
+ or debug build stacktrace.
+
+ Args:
+ stacktrace: A list of parsed stacks within a stacktrace.
+ components: A set of components to look for.
+ component_to_regression_dict: A dictionary mapping component path to
+ its regression.
+
+ Returns:
+ A list of match results from all stacks.
+ """
+ # A list to aggregate results from all the callstacks in the stacktrace.
+ results = []
+ results_lock = Lock()
+
+ # Setup parsers.
+ svn_parser = svn_repository_parser.SVNParser(CONFIG['svn'])
+ git_parser = git_repository_parser.GitParser(component_to_regression_dict,
+ CONFIG['git'])
+
+ # Create a cache of parsed revisions.
+ component_to_changelog_map = {}
+ for component_path in components:
+ component_object = component_to_regression_dict[component_path]
+ range_start = component_object['old_revision']
+ range_end = component_object['new_revision']
+
+ # If range start is 0, the range is too large and the crash has been
+ # introduced the archived build, so ignore this case.
+ if range_start == '0':
+ continue
+
+ component_name = component_to_regression_dict[component_path]['name']
+
+ is_git = utils.IsGitHash(range_start)
+ if is_git:
+ repository_parser = git_parser
+ else:
+ repository_parser = svn_parser
+
+ (revisions, file_to_revision_map) = repository_parser.ParseChangelog(
+ component_path, range_start, range_end)
+
+ # If the returned map from ParseChangeLog is empty, we don't need to look
+ # further because either the parsing failed or the changelog is empty.
+ if not (revisions and file_to_revision_map):
+ continue
+
+ component_to_changelog_map[component_path] = (repository_parser,
+ component_name,
+ revisions,
+ file_to_revision_map)
+
+ # Analyze each of the call stacks in the stacktrace.
+ for callstack in stacktrace.stack_list:
+ FindMatchForCallstack(callstack, components, component_to_changelog_map,
+ results, results_lock)
+
+ return results
+
+
+def SortMatchesFunction(match_with_stack_priority):
+ """A function to sort the match triple.
+
+ Currently, it sorts the list by:
+ 1) The highest priority file change in the CL (changing crashed line is
+ higher priority than just changing the file).
+ 2) The callstack this match is computed (crash stack, freed, allocation).
+ 3) The minimum stack frame number of the changed file in the match.
+ 4) The number of files this CL changes (higher the better).
+ 5) The minimum distance between the lines that the CL changes and crashed
+ lines.
+
+ Args:
+ match_with_stack_priority: A match object, with the CL it is from and what
+ callstack it is from.
+
+ Returns:
+ A sort key.
+ """
+ (stack_priority, _, match) = match_with_stack_priority
+
+ return (min(match.priorities),
+ stack_priority,
+ match.min_distance,
+ crash_utils.FindMinStackFrameNumber(match.stack_frame_indices,
+ match.priorities),
+ -len(match.changed_files))
+
+
+def SortAndFilterMatches(matches, num_important_frames=5):
+ """Filters the list of potential culprit CLs to remove noise.
+
+ Args:
+ matches: A list containing match results.
+ num_important_frames: A number of frames on the top of the frame to Check
+ for when filtering the results. A match with a file
+ that is in top num_important_frames of the stacktrace
+ is regarded more probable then others.
+
+ Returns:
+ Filtered match results.
+ """
+ new_matches = []
+ line_changed = False
+ is_important_frame = False
+ highest_priority_stack = crash_utils.INFINITY
+ matches.sort(key=SortMatchesFunction)
+ # Iterate through the matches to find out what results are significant.
+ for stack_priority, cl, match in matches:
+ # Check if the current match changes crashed line.
+ is_line_change = (min(match.priorities) == LINE_CHANGE_PRIORITY)
+
+ # Check which stack this match is from, and finds the highest priority
+ # callstack up to this point.
+ current_stack = stack_priority
+ if current_stack < highest_priority_stack:
+ highest_priority_stack = current_stack
+
+ # Check if current match changes a file that occurs in crash state.
+ flattened_stack_frame_indices = [frame for frame_indices in
+ match.stack_frame_indices
+ for frame in frame_indices]
+ current_is_important = (
+ min(flattened_stack_frame_indices) < num_important_frames)
+
+ # This match and anything lower than this should be ignored if:
+ # - Current match does not change crashed lines but there are matches
+ # that do so.
+ # - Current match is not in crash state but there are matches in it.
+ # - There are other matches that came from higher priority stack.
+ if (line_changed and not is_line_change) or (
+ is_important_frame and not current_is_important) or (
+ current_stack > highest_priority_stack):
+ break
+
+ # Update the variables.
+ if is_line_change:
+ line_changed = True
+ if current_is_important:
+ is_important_frame = True
+
+ # Add current match to the filtered result.
+ new_matches.append((stack_priority, cl, match))
+
+ return new_matches
+
+
+def GenerateReasonForMatches(matches):
+ """Generates a reason that a match (CL) is a culprit cl.
+
+ Args:
+ matches: A list of match objects.
+ """
+ # Iterate through the matches in the list.
+ for i, _, match in matches:
+ reason = []
+
+ # Zip the files in the match by the reason they are suspected
+ # (how the file is modified).
+ match_by_priority = zip(
+ match.priorities, match.crashed_line_numbers, match.changed_files,
+ match.stack_frame_indices, match.function_list)
+
+ # Sort the zipped changed files in the match by their priority so that the
+ # changed lines comes first in the reason.
+ match_by_priority.sort(
+ key=lambda (priority, crashed_line_numbers, file_name,
+ stack_frame_indices, function_list): priority)
+
+ # Iterate through the sorted match.
+ for i in range(len(match_by_priority)):
+ (priority, crashed_line_numbers, file_name, stack_frame_indices,
+ function_list) = match_by_priority[i]
+
+ # If the file in the match is a line change, append a explanation.
+ if priority == LINE_CHANGE_PRIORITY:
+ crashed_line_numbers = [crashed_line_number
+ for lines in crashed_line_numbers
+ for crashed_line_number in lines]
+ reason.append(
+ 'Lines %s of file %s which potentially caused crash '
+ 'are changed in this cl (%s).\n' %
+ (utils.JoinLineNumbers(crashed_line_numbers, accepted_gap=4),
+ file_name,
+ crash_utils.PrettifyFrameInfo(stack_frame_indices, function_list)))
+
+ else:
+ # Get all the files that are not line change.
+ rest_of_the_files = match_by_priority[i:]
+
+ if len(rest_of_the_files) == 1:
+ file_string = 'File %s is changed in this cl '
+ else:
+ file_string = 'Files %s are changed in this cl '
+
+ # Create a list of file names, and prettify the list.
+ file_names = [
+ file_name for (_, _, file_name, _, _) in rest_of_the_files]
+ pretty_file_names = crash_utils.PrettifyList(file_names)
+
+ # Add the reason, break because we took care of the rest of the files.
+ file_string += ('(and is part of stack %s)' %
+ crash_utils.PrettifyFrameInfo(stack_frame_indices, function_list))
+ reason.append(file_string % pretty_file_names)
+ break
+
+ # Set the reason as string.
+ match.reason = '\n'.join(reason)
+
+
+def CombineMatches(matches):
+ """Combine possible duplicates in matches.
+
+ Args:
+ matches: A list of matches object, along with its callstack priority and
+ CL it is from.
+ Returns:
+ A combined list of matches.
+ """
+ combined_matches = []
+
+ for stack_index, cl, match in matches:
+ found_match = None
+
+ # Iterate through the list of combined matches.
+ for _, cl_combined, match_combined in combined_matches:
+ # Check for if current CL is already higher up in the result.
+ if cl == cl_combined:
+ found_match = match_combined
+ break
+
+ # If current match is not already in, add it to the list of matches.
+ if not found_match:
+ combined_matches.append((stack_index, cl, match))
+ continue
+
+ # Combine the reason if the current match is already in there.
+ found_match.reason += '\n' + match.reason
+ if match.min_distance < found_match.min_distance:
+ found_match.min_distance = match.min_distance
+ found_match.min_distance_info = match.min_distance_info
+
+ for stack_index, cl, match in combined_matches:
+ if match.min_distance_info:
+ file_name, min_crashed_line, min_changed_line = match.min_distance_info
+ match.reason = match.reason.strip()
+ match.reason += (
+ '\nMinimum distance from crash line to modified line: %d. '
+ '(file: %s, crashed on: %d, modified: %d).' %
+ (match.min_distance, file_name, min_crashed_line, min_changed_line))
+
+ return combined_matches
+
+
+def FilterAndGenerateReasonForMatches(result):
+ """A wrapper function.
+
+ It generates reasons for the matches and returns string representation
+ of filtered results.
+
+ Args:
+ result: A list of match objects.
+
+ Returns:
+ A string representation of filtered results.
+ """
+ new_result = SortAndFilterMatches(result)
+ GenerateReasonForMatches(new_result)
+ combined_matches = CombineMatches(new_result)
+ return crash_utils.MatchListToResultList(combined_matches)
+
+
+def ParseCrashComponents(main_stack):
+ """Parses the crashing component.
+
+ Crashing components is a component that top_n_frames of the stacktrace is
+ from.
+
+ Args:
+ main_stack: Main stack from the stacktrace.
+
+ Returns:
+ A set of components.
+ """
+ components = set()
+
+ for frame in main_stack.frame_list:
+ components.add(frame.component_path)
+
+ return components
+
+
+def GenerateAndFilterBlameList(callstack, component_to_crash_revision_dict,
+ component_to_regression_dict):
+ """A wrapper function.
+
+ Finds blame information for stack and returns string representation.
+
+ Args:
+ callstack: A callstack to find the blame information.
+ component_to_crash_revision_dict: A dictionary mapping component to its
+ crash revision.
+ component_to_regression_dict: A dictionary mapping component to its
+ regression.
+
+ Returns:
+ A list of blame results.
+ """
+ if component_to_regression_dict:
+ parsed_deps = component_to_regression_dict
+ else:
+ parsed_deps = component_to_crash_revision_dict
+
+ # Setup parser objects to use for parsing blame information.
+ svn_parser = svn_repository_parser.SVNParser(CONFIG['svn'])
+ git_parser = git_repository_parser.GitParser(parsed_deps, CONFIG['git'])
+ parsers = {}
+ parsers['svn'] = svn_parser
+ parsers['git'] = git_parser
+
+ # Create and generate the blame objects from the callstack.
+ blame_list = blame.BlameList()
+ blame_list.FindBlame(callstack, component_to_crash_revision_dict,
+ component_to_regression_dict,
+ parsers)
+
+ blame_list.FilterAndSortBlameList()
+ return crash_utils.BlameListToResultList(blame_list)
+
+
+def FindItForCrash(stacktrace_list,
+ callstack,
+ component_to_regression_dict,
+ component_to_crash_revision_dict):
+ """Finds the culprit CL from the list of stacktrace.
+
+ Args:
+ stacktrace_list: A list of stacktraces to look for, in the order of
+ decreasing significance.
+ callstack: A callstack object to show blame information for, if there are
+ no results for all stacktraces in the stacktrace_list.
+ component_to_regression_dict: A parsed regression information as a
+ result of parsing DEPS file.
+ component_to_crash_revision_dict: A parsed crash revision information.
+
+ Returns:
+ A list of result objects, with the message how the result is created.
+ """
+ # If regression information is not available, return blame information.
+ if not component_to_regression_dict:
+ result = GenerateAndFilterBlameList(callstack,
+ component_to_crash_revision_dict,
+ component_to_regression_dict)
+ if result:
+ return_message = (
+ 'Regression information is not available. The result is '
+ 'the blame information.')
+ else:
+ return_message = ('Findit could not find any suspected CLs.')
+
+ return (return_message, result)
+
+ for stacktrace in stacktrace_list:
+ # Check the next stacktrace if current one is empty.
+ if not stacktrace.stack_list:
+ continue
+
+ # Get the crash stack for this stacktrace, and extract crashing components
+ # from it.
+ main_stack = stacktrace.GetCrashStack()
+ components = ParseCrashComponents(main_stack)
+
+ result_for_stacktrace = FindMatchForStacktrace(
+ stacktrace, components, component_to_regression_dict)
+ filtered_result = FilterAndGenerateReasonForMatches(result_for_stacktrace)
+
+ # If the result is empty, check the next stacktrace. Else, return the
+ # filtered result.
+ if not filtered_result:
+ continue
+
+ return_message = (
+ 'The result is a list of CLs that change the crashed files.')
+ return (return_message, filtered_result)
+
+ # If no match is found, return the blame information for the input
+ # callstack.
+ result = GenerateAndFilterBlameList(
+ callstack, component_to_crash_revision_dict,
+ component_to_regression_dict)
+
+ if result:
+ return_message = (
+ 'No CL in the regression range changes the crashed files. '
+ 'The result is the blame information.')
+
+ # When findit could not find any CL that changes file in stacktrace or if
+ # if cannot get any blame information, return a message saying that no
+ # results are available.
+ else:
+ return_message = ('Findit could not find any suspected CLs.')
+
+ return (return_message, result)
+
diff --git a/chromium/tools/findit/git_repository_parser.py b/chromium/tools/findit/git_repository_parser.py
new file mode 100644
index 00000000000..765da99b667
--- /dev/null
+++ b/chromium/tools/findit/git_repository_parser.py
@@ -0,0 +1,293 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import xml.dom.minidom as minidom
+from xml.parsers.expat import ExpatError
+
+import crash_utils
+from repository_parser_interface import ParserInterface
+
+FILE_CHANGE_TYPE_MAP = {
+ 'add': 'A',
+ 'copy': 'C',
+ 'delete': 'D',
+ 'modify': 'M',
+ 'rename': 'R'
+}
+
+
+def _ConvertToFileChangeType(file_action):
+ # TODO(stgao): verify impact on code that checks the file change type.
+ return file_action[0].upper()
+
+
+class GitParser(ParserInterface):
+ """Parser for Git repository in googlesource.
+
+ Attributes:
+ parsed_deps: A map from component path to its repository name, regression,
+ etc.
+ url_parts_map: A map from url type to its url parts. This parts are added
+ the base url to form different urls.
+ """
+
+ def __init__(self, parsed_deps, url_parts_map):
+ self.component_to_url_map = parsed_deps
+ self.url_parts_map = url_parts_map
+
+ def ParseChangelog(self, component_path, range_start, range_end):
+ file_to_revision_map = {}
+ revision_map = {}
+ base_url = self.component_to_url_map[component_path]['repository']
+ changelog_url = base_url + self.url_parts_map['changelog_url']
+ revision_url = base_url + self.url_parts_map['revision_url']
+
+ # Retrieve data from the url, return empty maps if fails. Html url is a\
+ # url where the changelog can be parsed from html.
+ url = changelog_url % (range_start, range_end)
+ html_url = url + '?pretty=fuller'
+ response = crash_utils.GetDataFromURL(html_url)
+ if not response:
+ return (revision_map, file_to_revision_map)
+
+ # Parse xml out of the returned string. If it failes, Try parsing
+ # from JSON objects.
+ try:
+ dom = minidom.parseString(response)
+ except ExpatError:
+ self.ParseChangelogFromJSON(range_start, range_end, changelog_url,
+ revision_url, revision_map,
+ file_to_revision_map)
+ return (revision_map, file_to_revision_map)
+
+ # The revisions information are in from the third divs to the second
+ # to last one.
+ divs = dom.getElementsByTagName('div')[2:-1]
+ pres = dom.getElementsByTagName('pre')
+ uls = dom.getElementsByTagName('ul')
+
+ # Divs, pres and uls each contain revision information for one CL, so
+ # they should have same length.
+ if not divs or len(divs) != len(pres) or len(pres) != len(uls):
+ self.ParseChangelogFromJSON(range_start, range_end, changelog_url,
+ revision_url, revision_map,
+ file_to_revision_map)
+ return (revision_map, file_to_revision_map)
+
+ # Iterate through divs and parse revisions
+ for (div, pre, ul) in zip(divs, pres, uls):
+ # Create new revision object for each revision.
+ revision = {}
+
+ # There must be three <tr>s. If not, this page is wrong.
+ trs = div.getElementsByTagName('tr')
+ if len(trs) != 3:
+ continue
+
+ # Retrieve git hash.
+ githash = trs[0].getElementsByTagName('a')[0].firstChild.nodeValue
+
+ # Retrieve and set author.
+ author = trs[1].getElementsByTagName(
+ 'td')[0].firstChild.nodeValue.split('<')[0]
+ revision['author'] = author
+ revision['time'] = trs[1].getElementsByTagName(
+ 'td')[1].firstChild.nodeValue
+
+ # Retrive and set message.
+ revision['message'] = pre.firstChild.nodeValue
+
+ # Set url of this CL.
+ revision_url_part = self.url_parts_map['revision_url'] % githash
+ revision['url'] = base_url + revision_url_part
+
+ # Go through changed files, they are in li.
+ lis = ul.getElementsByTagName('li')
+ for li in lis:
+ # Retrieve path and action of the changed file
+ file_path = li.getElementsByTagName('a')[0].firstChild.nodeValue
+ file_change_type = li.getElementsByTagName('span')[
+ 0].getAttribute('class')
+
+ # Normalize file action so that it is same as SVN parser.
+ file_change_type = _ConvertToFileChangeType(file_change_type)
+
+ # Add the changed file to the map.
+ if file_path not in file_to_revision_map:
+ file_to_revision_map[file_path] = []
+ file_to_revision_map[file_path].append((githash, file_change_type))
+
+ # Add this revision object to the map.
+ revision_map[githash] = revision
+
+ # Parse one revision for the start range, because googlesource does not
+ # include the start of the range.
+ self.ParseRevision(revision_url, range_start, revision_map,
+ file_to_revision_map)
+
+ return (revision_map, file_to_revision_map)
+
+ def ParseChangelogFromJSON(self, range_start, range_end, changelog_url,
+ revision_url, revision_map, file_to_revision_map):
+ """Parses changelog by going over the JSON file.
+
+ Args:
+ range_start: Starting range of the regression.
+ range_end: Ending range of the regression.
+ changelog_url: The url to retrieve changelog from.
+ revision_url: The url to retrieve individual revision from.
+ revision_map: A map from a git hash number to its revision information.
+ file_to_revision_map: A map from file to a git hash in which it occurs.
+ """
+ # Compute URLs from given range, and retrieves changelog. Stop if it fails.
+ changelog_url %= (range_start, range_end)
+ json_url = changelog_url + '?format=json'
+ response = crash_utils.GetDataFromURL(json_url)
+ if not response:
+ return
+
+ # Parse changelog from the returned object. The returned string should
+ # start with ")}]'\n", so start from the 6th character.
+ revisions = crash_utils.LoadJSON(response[5:])
+ if not revisions:
+ return
+
+ # Parse individual revision in the log.
+ for revision in revisions['log']:
+ githash = revision['commit']
+ self.ParseRevision(revision_url, githash, revision_map,
+ file_to_revision_map)
+
+ # Parse the revision with range_start, because googlesource ignores
+ # that one.
+ self.ParseRevision(revision_url, range_start, revision_map,
+ file_to_revision_map)
+
+ def ParseRevision(self, revision_url, githash, revision_map,
+ file_to_revision_map):
+
+ # Retrieve data from the URL, return if it fails.
+ url = revision_url % githash
+ response = crash_utils.GetDataFromURL(url + '?format=json')
+ if not response:
+ return
+
+ # Load JSON object from the string. If it fails, terminate the function.
+ json_revision = crash_utils.LoadJSON(response[5:])
+ if not json_revision:
+ return
+
+ # Create a map representing object and get githash from the JSON object.
+ revision = {}
+ githash = json_revision['commit']
+
+ # Set author, message and URL of this CL.
+ revision['author'] = json_revision['author']['name']
+ revision['time'] = json_revision['author']['time']
+ revision['message'] = json_revision['message']
+ revision['url'] = url
+
+ # Iterate through the changed files.
+ for diff in json_revision['tree_diff']:
+ file_path = diff['new_path']
+ file_change_type = diff['type']
+
+ # Normalize file action so that it fits with svn_repository_parser.
+ file_change_type = _ConvertToFileChangeType(file_change_type)
+
+ # Add the file to the map.
+ if file_path not in file_to_revision_map:
+ file_to_revision_map[file_path] = []
+ file_to_revision_map[file_path].append((githash, file_change_type))
+
+ # Add this CL to the map.
+ revision_map[githash] = revision
+
+ return
+
+ def ParseLineDiff(self, path, component, file_change_type, githash):
+ changed_line_numbers = []
+ changed_line_contents = []
+ base_url = self.component_to_url_map[component]['repository']
+ backup_url = (base_url + self.url_parts_map['revision_url']) % githash
+
+ # If the file is added (not modified), treat it as if it is not changed.
+ if file_change_type in ('A', 'C', 'R'):
+ # TODO(stgao): Maybe return whole file change for Add, Rename, and Copy?
+ return (backup_url, changed_line_numbers, changed_line_contents)
+
+ # Retrieves the diff data from URL, and if it fails, return emptry lines.
+ url = (base_url + self.url_parts_map['diff_url']) % (githash, path)
+ data = crash_utils.GetDataFromURL(url + '?format=text')
+ if not data:
+ return (backup_url, changed_line_numbers, changed_line_contents)
+
+ # Decode the returned object to line diff info
+ diff = base64.b64decode(data).splitlines()
+
+ # Iterate through the lines in diff. Set current line to -1 so that we know
+ # that current line is part of the diff chunk.
+ current_line = -1
+ for line in diff:
+ line = line.strip()
+
+ # If line starts with @@, a new chunk starts.
+ if line.startswith('@@'):
+ current_line = int(line.split('+')[1].split(',')[0])
+
+ # If we are in a chunk.
+ elif current_line != -1:
+ # If line is either added or modified.
+ if line.startswith('+'):
+ changed_line_numbers.append(current_line)
+ changed_line_contents.append(line[2:])
+
+ # Do not increment current line if the change is 'delete'.
+ if not line.startswith('-'):
+ current_line += 1
+
+ # Return url without '?format=json'
+ return (url, changed_line_numbers, changed_line_contents)
+
+ def ParseBlameInfo(self, component, file_path, line, revision):
+ base_url = self.component_to_url_map[component]['repository']
+
+ # Retrieve blame JSON file from googlesource. If it fails, return None.
+ url_part = self.url_parts_map['blame_url'] % (revision, file_path)
+ blame_url = base_url + url_part
+ json_string = crash_utils.GetDataFromURL(blame_url)
+ if not json_string:
+ return
+
+ # Parse JSON object from the string. The returned string should
+ # start with ")}]'\n", so start from the 6th character.
+ annotation = crash_utils.LoadJSON(json_string[5:])
+ if not annotation:
+ return
+
+ # Go through the regions, which is a list of consecutive lines with same
+ # author/revision.
+ for blame_line in annotation['regions']:
+ start = blame_line['start']
+ count = blame_line['count']
+
+ # For each region, check if the line we want the blame info of is in this
+ # region.
+ if start <= line and line <= start + count - 1:
+ # If we are in the right region, get the information from the line.
+ revision = blame_line['commit']
+ author = blame_line['author']['name']
+ revision_url_parts = self.url_parts_map['revision_url'] % revision
+ revision_url = base_url + revision_url_parts
+ # TODO(jeun): Add a way to get content from JSON object.
+ content = None
+
+ (revision_info, _) = self.ParseChangelog(component, revision, revision)
+ message = revision_info[revision]['message']
+ time = revision_info[revision]['time']
+ return (content, revision, author, revision_url, message, time)
+
+ # Return none if the region does not exist.
+ return None
diff --git a/chromium/tools/findit/match_set.py b/chromium/tools/findit/match_set.py
new file mode 100644
index 00000000000..52114b8c2bb
--- /dev/null
+++ b/chromium/tools/findit/match_set.py
@@ -0,0 +1,128 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+
+from threading import Lock
+
+import crash_utils
+
+
+REVIEW_URL_PATTERN = re.compile(r'Review URL:( *)(.*?)/(\d+)')
+
+
+class Match(object):
+ """Represents a match entry.
+
+ A match is a CL that is suspected to have caused the crash. A match object
+ contains information about files it changes, their authors, etc.
+
+ Attributes:
+ is_revert: True if this CL is reverted by other CL.
+ revert_of: If this CL is a revert of some other CL, a revision number/
+ git hash of that CL.
+ crashed_line_numbers: The list of lines that caused crash for this CL.
+ function_list: The list of functions that caused the crash.
+ min_distance: The minimum distance between the lines that CL changed and
+ lines that caused the crash.
+ changed_files: The list of files that the CL changed.
+ changed_file_urls: The list of URLs for the file.
+ author: The author of the CL.
+ component_name: The name of the component that this CL belongs to.
+ stack_frame_indices: For files that caused crash, list of where in the
+ stackframe they occur.
+ priorities: A list of priorities for each of the changed file. A priority
+ is 1 if the file changes a crashed line, and 2 if it changes
+ the file but not the crashed line.
+ reivision_url: The revision URL of the CL.
+ review_url: The codereview URL that reviews this CL.
+ reviewers: The list of people that reviewed this CL.
+ reason: The reason why this CL is suspected.
+ time: When this CL was committed.
+ """
+ REVERT_PATTERN = re.compile(r'(revert\w*) r?(\d+)', re.I)
+
+ def __init__(self, revision, component_name):
+ self.is_revert = False
+ self.revert_of = None
+ self.message = None
+ self.crashed_line_numbers = []
+ self.function_list = []
+ self.min_distance = crash_utils.INFINITY
+ self.min_distance_info = None
+ self.changed_files = []
+ self.changed_file_urls = []
+ self.author = revision['author']
+ self.component_name = component_name
+ self.stack_frame_indices = []
+ self.priorities = []
+ self.revision_url = revision['url']
+ self.review_url = ''
+ self.reviewers = []
+ self.reason = None
+ self.time = revision['time']
+
+ def ParseMessage(self, message, codereview_api_url):
+ """Parses the message.
+
+ It checks the message to extract the code review website and list of
+ reviewers, and it also checks if the CL is a revert of another CL.
+
+ Args:
+ message: The message to parse.
+ codereview_api_url: URL to retrieve codereview data from.
+ """
+ self.message = message
+ for line in message.splitlines():
+ line = line.strip()
+ review_url_line_match = REVIEW_URL_PATTERN.match(line)
+
+ # Check if the line has the code review information.
+ if review_url_line_match:
+
+ # Get review number for the code review site from the line.
+ issue_number = review_url_line_match.group(3)
+
+ # Get JSON from the code review site, ignore the line if it fails.
+ url = codereview_api_url % issue_number
+ json_string = crash_utils.GetDataFromURL(url)
+ if not json_string:
+ continue
+
+ # Load the JSON from the string, and get the list of reviewers.
+ code_review = crash_utils.LoadJSON(json_string)
+ if code_review:
+ self.reviewers = code_review['reviewers']
+
+ # Check if this CL is a revert of other CL.
+ if line.lower().startswith('revert'):
+ self.is_revert = True
+
+ # Check if the line says what CL this CL is a revert of.
+ revert = self.REVERT_PATTERN.match(line)
+ if revert:
+ self.revert_of = revert.group(2)
+ return
+
+
+class MatchSet(object):
+ """Represents a set of matches.
+
+ Attributes:
+ matches: A map from CL to a match object.
+ cls_to_ignore: A set of CLs to ignore.
+ matches_lock: A lock guarding matches dictionary.
+ """
+
+ def __init__(self, codereview_api_url):
+ self.codereview_api_url = codereview_api_url
+ self.matches = {}
+ self.cls_to_ignore = set()
+ self.matches_lock = Lock()
+
+ def RemoveRevertedCLs(self):
+ """Removes CLs that are revert."""
+ for cl in self.matches:
+ if cl in self.cls_to_ignore:
+ del self.matches[cl]
diff --git a/chromium/tools/findit/repository_parser_interface.py b/chromium/tools/findit/repository_parser_interface.py
new file mode 100644
index 00000000000..aca4c62b9c8
--- /dev/null
+++ b/chromium/tools/findit/repository_parser_interface.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class ParserInterface(object):
+ """The interface for parsers."""
+
+ def ParseChangelog(self, component_path, range_start, range_end):
+ """Parses changelog from the URL stored in the parser object.
+
+ Args:
+ component_path: A string, path of the component. Path is used because
+ path is unique while component name is not.
+ range_start: The start range of the regression.
+ range_end: The end range of the regression.
+
+ Returns:
+ A tuple containing revision_map and file_to_revision_map,
+ revision_map maps a CL number to a dictionary containing revision
+ information such as author, commit message and the revision url.
+ file_to_revision_map maps a name of a file to a tuple containing the CL
+ number and path of the file that CL changes.
+ """
+ raise NotImplementedError()
+
+ def ParseLineDiff(self, path, component, file_action, githash):
+ """Parses the line diff of the given hash.
+
+ Args:
+ path: The path of the file.
+ component: The component the file is from.
+ file_action: Whether file is modified, deleted or added.
+ githash: The git hashcode to check the line diff.
+
+ Returns:
+ url: The URL of the diff page, returns the changelog page for the
+ file if the diff cannot be retrieved.
+ changed_line_numbers: The list of the line numbers that has been
+ changed.
+ changed_line_contents: The content of the changed lines.
+ """
+ raise NotImplementedError()
+
+ def ParseBlameInfo(self, component, file_path, line, revision):
+ """Parses blame information of the given file/line in revision.
+
+ Args:
+ component: The component this line is from.
+ file_path: The path of the file.
+ line: The line that caused the crash.
+ revision: The revision to parse blame information for.
+
+ Returns:
+ The content of the line, the last changed revision of the line, author
+ and the url of the revision.
+ """
+ raise NotImplementedError()
diff --git a/chromium/tools/findit/result.py b/chromium/tools/findit/result.py
new file mode 100644
index 00000000000..360a69bb6be
--- /dev/null
+++ b/chromium/tools/findit/result.py
@@ -0,0 +1,19 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class Result(object):
+
+ def __init__(self, suspected_cl, revision_url, component_name, author,
+ reason, review_url, reviewers, line_content, message, time):
+ self.suspected_cl = suspected_cl
+ self.revision_url = revision_url
+ self.component_name = component_name
+ self.author = author
+ self.reason = reason
+ self.review_url = review_url
+ self.reviewers = reviewers
+ self.line_content = line_content
+ self.commit_message = message
+ self.time = time
diff --git a/chromium/tools/findit/run_all_tests.py b/chromium/tools/findit/run_all_tests.py
new file mode 100755
index 00000000000..230537a6176
--- /dev/null
+++ b/chromium/tools/findit/run_all_tests.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+import unittest
+
+from chromium_deps_unittest import ChromiumDEPSTest
+from common.http_client_local_unittest import HttpClientLocalTest
+
+
+if __name__ == '__main__':
+ all_tests_suite = unittest.defaultTestLoader.loadTestsFromModule(
+ sys.modules[__name__])
+ tests = unittest.TestSuite(all_tests_suite)
+ result = unittest.TextTestRunner(stream=sys.stdout, verbosity=2).run(tests)
+ sys.exit(len(result.failures) + len(result.errors))
diff --git a/chromium/tools/findit/stacktrace.py b/chromium/tools/findit/stacktrace.py
new file mode 100644
index 00000000000..137e6fc92f9
--- /dev/null
+++ b/chromium/tools/findit/stacktrace.py
@@ -0,0 +1,321 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+
+import crash_utils
+
+
+SYZYASAN_STACK_FRAME_PATTERN = re.compile(
+ r'(CF: )?(.*?)( \(FPO: .*\) )?( \(CONV: .*\) )?\[(.*) @ (\d+)\]')
+FILE_PATH_AND_LINE_PATTERN = re.compile(r'(.*?):(\d+)(:\d+)?')
+
+
+class StackFrame(object):
+ """Represents a frame in stacktrace.
+
+ Attributes:
+ index: An index of the stack frame.
+ component_path: The path of the component this frame represents.
+ component_name: The name of the component this frame represents.
+ file_name: The name of the file that crashed.
+ function: The function that caused the crash.
+ file_path: The path of the crashed file.
+ crashed_line_range: The line of the file that caused the crash.
+ """
+
+ def __init__(self, stack_frame_index, component_path, component_name,
+ file_name, function, file_path, crashed_line_range):
+ self.index = stack_frame_index
+ self.component_path = component_path
+ self.component_name = component_name
+ self.file_name = file_name
+ self.function = function
+ self.file_path = file_path
+ self.crashed_line_range = crashed_line_range
+
+
+class CallStack(object):
+ """Represents a call stack within a stacktrace.
+
+ It is a list of StackFrame object, and the object keeps track of whether
+ the stack is crash stack, freed or previously-allocated.
+ """
+
+ def __init__(self, stack_priority):
+ self.frame_list = []
+ self.priority = stack_priority
+
+ def Add(self, stacktrace_line):
+ self.frame_list.append(stacktrace_line)
+
+ def GetTopNFrames(self, n):
+ return self.frame_list[:n]
+
+
+class Stacktrace(object):
+ """Represents Stacktrace object.
+
+ Contains a list of callstacks, because one stacktrace might have more than
+ one callstacks.
+ """
+
+ def __init__(self, stacktrace, build_type, parsed_deps):
+ self.stack_list = None
+ self.ParseStacktrace(stacktrace, build_type, parsed_deps)
+
+ def ParseStacktrace(self, stacktrace, build_type, parsed_deps):
+ """Parses stacktrace and normalizes it.
+
+ If there are multiple callstacks within the stacktrace,
+ it will parse each of them separately, and store them in the stack_list
+ variable.
+
+ Args:
+ stacktrace: A string containing stacktrace.
+ build_type: A string containing the build type of the crash.
+ parsed_deps: A parsed DEPS file to normalize path with.
+ """
+ # If the passed in string is empty, the object does not represent anything.
+ if not stacktrace:
+ return
+ # Reset the stack list.
+ self.stack_list = []
+ reached_new_callstack = False
+ # Note that we do not need exact stack frame index, we only need relative
+ # position of a frame within a callstack. The reason for not extracting
+ # index from a line is that some stack frames do not have index.
+ stack_frame_index = 0
+ current_stack = CallStack(-1)
+
+ for line in stacktrace:
+ line = line.strip()
+ (is_new_callstack, stack_priority) = self.__IsStartOfNewCallStack(
+ line, build_type)
+ if is_new_callstack:
+ # If this callstack is crash stack, update the boolean.
+ if not reached_new_callstack:
+ reached_new_callstack = True
+ current_stack = CallStack(stack_priority)
+
+ # If this is from freed or allocation, add the callstack we have
+ # to the list of callstacks, and increment the stack priority.
+ else:
+ stack_frame_index = 0
+ if current_stack and current_stack.frame_list:
+ self.stack_list.append(current_stack)
+ current_stack = CallStack(stack_priority)
+
+ # Generate stack frame object from the line.
+ parsed_stack_frame = self.__GenerateStackFrame(
+ stack_frame_index, line, build_type, parsed_deps)
+
+ # If the line does not represent the stack frame, ignore this line.
+ if not parsed_stack_frame:
+ continue
+
+ # Add the parsed stack frame object to the current stack.
+ current_stack.Add(parsed_stack_frame)
+ stack_frame_index += 1
+
+ # Add the current callstack only if there are frames in it.
+ if current_stack and current_stack.frame_list:
+ self.stack_list.append(current_stack)
+
+ def __IsStartOfNewCallStack(self, line, build_type):
+ """Check if this line is the start of the new callstack.
+
+ Since each builds have different format of stacktrace, the logic for
+ checking the line for all builds is handled in here.
+
+ Args:
+ line: Line to check for.
+ build_type: The name of the build.
+
+ Returns:
+ True if the line is the start of new callstack, False otherwise. If True,
+ it also returns the priority of the line.
+ """
+ if 'syzyasan' in build_type:
+ # In syzyasan build, new stack starts with 'crash stack:',
+ # 'freed stack:', etc.
+ callstack_start_pattern = re.compile(r'^(.*) stack:$')
+ match = callstack_start_pattern.match(line)
+
+ # If the line matches the callstack start pattern.
+ if match:
+ # Check the type of the new match.
+ stack_type = match.group(1)
+
+ # Crash stack gets priority 0.
+ if stack_type == 'Crash':
+ return (True, 0)
+
+ # Other callstacks all get priority 1.
+ else:
+ return (True, 1)
+
+ elif 'tsan' in build_type:
+ # Create patterns for each callstack type.
+ crash_callstack_start_pattern1 = re.compile(
+ r'^(Read|Write) of size \d+')
+
+ crash_callstack_start_pattern2 = re.compile(
+ r'^[A-Z]+: ThreadSanitizer')
+
+ allocation_callstack_start_pattern = re.compile(
+ r'^Previous (write|read) of size \d+')
+
+ location_callstack_start_pattern = re.compile(
+ r'^Location is heap block of size \d+')
+
+ # Crash stack gets priority 0.
+ if (crash_callstack_start_pattern1.match(line) or
+ crash_callstack_start_pattern2.match(line)):
+ return (True, 0)
+
+ # All other stacks get priority 1.
+ if allocation_callstack_start_pattern.match(line):
+ return (True, 1)
+
+ if location_callstack_start_pattern.match(line):
+ return (True, 1)
+
+ else:
+ # In asan and other build types, crash stack can start
+ # in two different ways.
+ crash_callstack_start_pattern1 = re.compile(r'^==\d+== ?[A-Z]+:')
+ crash_callstack_start_pattern2 = re.compile(
+ r'^(READ|WRITE) of size \d+ at')
+ crash_callstack_start_pattern3 = re.compile(r'^backtrace:')
+
+ freed_callstack_start_pattern = re.compile(
+ r'^freed by thread T\d+ (.* )?here:')
+
+ allocation_callstack_start_pattern = re.compile(
+ r'^previously allocated by thread T\d+ (.* )?here:')
+
+ other_callstack_start_pattern = re.compile(
+ r'^Thread T\d+ (.* )?created by')
+
+ # Crash stack gets priority 0.
+ if (crash_callstack_start_pattern1.match(line) or
+ crash_callstack_start_pattern2.match(line) or
+ crash_callstack_start_pattern3.match(line)):
+ return (True, 0)
+
+ # All other callstack gets priority 1.
+ if freed_callstack_start_pattern.match(line):
+ return (True, 1)
+
+ if allocation_callstack_start_pattern.match(line):
+ return (True, 1)
+
+ if other_callstack_start_pattern.match(line):
+ return (True, 1)
+
+ # If the line does not match any pattern, return false and a dummy for
+ # stack priority.
+ return (False, -1)
+
+ def __GenerateStackFrame(self, stack_frame_index, line, build_type,
+ parsed_deps):
+ """Extracts information from a line in stacktrace.
+
+ Args:
+ stack_frame_index: A stack frame index of this line.
+ line: A stacktrace string to extract data from.
+ build_type: A string containing the build type
+ of this crash (e.g. linux_asan_chrome_mp).
+ parsed_deps: A parsed DEPS file to normalize path with.
+
+ Returns:
+ A triple containing the name of the function, the path of the file and
+ the crashed line number.
+ """
+ line_parts = line.split()
+ try:
+
+ if 'syzyasan' in build_type:
+ stack_frame_match = SYZYASAN_STACK_FRAME_PATTERN.match(line)
+
+ if not stack_frame_match:
+ return None
+ file_path = stack_frame_match.group(5)
+ crashed_line_range = [int(stack_frame_match.group(6))]
+ function = stack_frame_match.group(2)
+
+ else:
+ if not line_parts[0].startswith('#'):
+ return None
+
+ if 'tsan' in build_type:
+ file_path_and_line = line_parts[-2]
+ function = ' '.join(line_parts[1:-2])
+ else:
+ file_path_and_line = line_parts[-1]
+ function = ' '.join(line_parts[3:-1])
+
+ # Get file path and line info from the line.
+ file_path_and_line_match = FILE_PATH_AND_LINE_PATTERN.match(
+ file_path_and_line)
+
+ # Return None if the file path information is not available
+ if not file_path_and_line_match:
+ return None
+
+ file_path = file_path_and_line_match.group(1)
+
+ # Get the crashed line range. For example, file_path:line_number:range.
+ crashed_line_range_num = file_path_and_line_match.group(3)
+
+ if crashed_line_range_num:
+ # Strip ':' prefix.
+ crashed_line_range_num = int(crashed_line_range_num[1:])
+ else:
+ crashed_line_range_num = 0
+
+ crashed_line_number = int(file_path_and_line_match.group(2))
+ # For example, 655:1 has crashed lines 655 and 656.
+ crashed_line_range = \
+ range(crashed_line_number,
+ crashed_line_number + crashed_line_range_num + 1)
+
+ # Return None if the line is malformed.
+ except IndexError:
+ return None
+ except ValueError:
+ return None
+
+ # Normalize the file path so that it can be compared to repository path.
+ (component_path, component_name, file_path) = (
+ crash_utils.NormalizePath(file_path, parsed_deps))
+
+ # Return a new stack frame object with the parsed information.
+ file_name = file_path.split('/')[-1]
+
+ # If we have the common stack frame index pattern, then use it
+ # since it is more reliable.
+ index_match = re.match('\s*#(\d+)\s.*', line)
+ if index_match:
+ stack_frame_index = int(index_match.group(1))
+
+ return StackFrame(stack_frame_index, component_path, component_name,
+ file_name, function, file_path, crashed_line_range)
+
+ def __getitem__(self, index):
+ return self.stack_list[index]
+
+ def GetCrashStack(self):
+ """Returns the callstack with the highest priority.
+
+ Crash stack has priority 0, and allocation/freed/other thread stacks
+ get priority 1.
+
+ Returns:
+ The highest priority callstack in the stacktrace.
+ """
+ sorted_stacklist = sorted(self.stack_list,
+ key=lambda callstack: callstack.priority)
+ return sorted_stacklist[0]
diff --git a/chromium/tools/findit/svn_repository_parser.py b/chromium/tools/findit/svn_repository_parser.py
new file mode 100644
index 00000000000..64a4503d495
--- /dev/null
+++ b/chromium/tools/findit/svn_repository_parser.py
@@ -0,0 +1,250 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import xml.dom.minidom as minidom
+from xml.parsers.expat import ExpatError
+
+import crash_utils
+from repository_parser_interface import ParserInterface
+
+
+# This number is 6 because each linediff page in src.chromium.org should
+# contain the following tables: table with revision number, table with actual
+# diff, table with dropdown menu, table with legend, a border table and a table
+# containing page information.
+NUM_TABLES_IN_LINEDIFF_PAGE = 6
+# Each of the linediff info should contain 3 tds, one for changed line number,
+# and two for line contents before/after.
+NUM_TDS_IN_LINEDIFF_PAGE = 3
+
+
+class SVNParser(ParserInterface):
+ """Parser for SVN repository using chromium.org, for components in config.
+
+ Attributes:
+ url_map: A map from component to the urls, where urls are for changelog,
+ revision, line diff and annotation.
+ """
+
+ def __init__(self, url_map):
+ self.component_to_urls_map = url_map
+
+ def ParseChangelog(self, component, range_start, range_end):
+ file_to_revision_map = {}
+ revision_map = {}
+
+ # Check if the current component is supported by reading the components
+ # parsed from config file. If it is not, fail.
+
+ url_map = self.component_to_urls_map.get(component)
+ if not url_map:
+ return (revision_map, file_to_revision_map)
+
+ # Retrieve data from the url, return empty map if fails.
+ revision_range_str = '%s:%s' % (range_start, range_end)
+ url = url_map['changelog_url'] % revision_range_str
+ response = crash_utils.GetDataFromURL(url)
+ if not response:
+ return (revision_map, file_to_revision_map)
+
+ # Parse xml out of the returned string. If it fails, return empty map.
+ try:
+ xml_revisions = minidom.parseString(response)
+ except ExpatError:
+ return (revision_map, file_to_revision_map)
+
+ # Iterate through the returned XML object.
+ revisions = xml_revisions.getElementsByTagName('logentry')
+ for revision in revisions:
+ # Create new revision object for each of the revision.
+ revision_object = {}
+
+ # Set author of the CL.
+ revision_object['author'] = revision.getElementsByTagName(
+ 'author')[0].firstChild.nodeValue
+
+ # Get the revision number from xml.
+ revision_number = int(revision.getAttribute('revision'))
+
+ # Iterate through the changed paths in the CL.
+ paths = revision.getElementsByTagName('paths')
+ if paths:
+ for changed_path in paths[0].getElementsByTagName('path'):
+ # Get path and file change type from the xml.
+ file_path = changed_path.firstChild.nodeValue
+ file_change_type = changed_path.getAttribute('action')
+
+ if file_path.startswith('/trunk/'):
+ file_path = file_path[len('/trunk/'):]
+
+ # Add file to the map.
+ if file_path not in file_to_revision_map:
+ file_to_revision_map[file_path] = []
+ file_to_revision_map[file_path].append(
+ (revision_number, file_change_type))
+
+ # Set commit message of the CL.
+ revision_object['message'] = revision.getElementsByTagName('msg')[
+ 0].firstChild.nodeValue
+
+ # Set url of this CL.
+ revision_url = url_map['revision_url'] % revision_number
+ revision_object['url'] = revision_url
+
+ # Add this CL to the revision map.
+ revision_map[revision_number] = revision_object
+
+ return (revision_map, file_to_revision_map)
+
+ def ParseLineDiff(self, path, component, file_change_type, revision_number):
+ changed_line_numbers = []
+ changed_line_contents = []
+
+ url_map = self.component_to_urls_map.get(component)
+ if not url_map:
+ return (None, None, None)
+
+ # If the file is added (not modified), treat it as if it is not changed.
+ backup_url = url_map['revision_url'] % revision_number
+ if file_change_type == 'A':
+ return (backup_url, changed_line_numbers, changed_line_contents)
+
+ # Retrieve data from the url. If no data is retrieved, return empty lists.
+ url = url_map['diff_url'] % (path, revision_number - 1,
+ revision_number, revision_number)
+ data = crash_utils.GetDataFromURL(url)
+ if not data:
+ return (backup_url, changed_line_numbers, changed_line_contents)
+
+ line_diff_html = minidom.parseString(data)
+ tables = line_diff_html.getElementsByTagName('table')
+ # If there are not NUM_TABLES tables in the html page, there should be an
+ # error in the html page.
+ if len(tables) != NUM_TABLES_IN_LINEDIFF_PAGE:
+ return (backup_url, changed_line_numbers, changed_line_contents)
+
+ # Diff content is in the second table. Each line of the diff content
+ # is in <tr>.
+ trs = tables[1].getElementsByTagName('tr')
+ prefix_len = len('vc_diff_')
+
+ # Filter trs so that it only contains diff chunk with contents.
+ filtered_trs = []
+ for tr in trs:
+ tr_class = tr.getAttribute('class')
+
+ # Check for the classes of the <tr>s.
+ if tr_class:
+ tr_class = tr_class[prefix_len:]
+
+ # Do not have to add header.
+ if tr_class == 'header' or tr_class == 'chunk_header':
+ continue
+
+ # If the class of tr is empty, this page does not have any change.
+ if tr_class == 'empty':
+ return (backup_url, changed_line_numbers, changed_line_contents)
+
+ filtered_trs.append(tr)
+
+ # Iterate through filtered trs, and grab line diff information.
+ for tr in filtered_trs:
+ tds = tr.getElementsByTagName('td')
+
+ # If there aren't 3 tds, this line does should not contain line diff.
+ if len(tds) != NUM_TDS_IN_LINEDIFF_PAGE:
+ continue
+
+ # If line number information is not in hyperlink, ignore this line.
+ try:
+ line_num = tds[0].getElementsByTagName('a')[0].firstChild.nodeValue
+ left_diff_type = tds[1].getAttribute('class')[prefix_len:]
+ right_diff_type = tds[2].getAttribute('class')[prefix_len:]
+ except IndexError:
+ continue
+
+ # Treat the line as modified only if both left and right diff has type
+ # changed or both have different change type, and if the change is not
+ # deletion.
+ if (left_diff_type != right_diff_type) or (
+ left_diff_type == 'change' and right_diff_type == 'change'):
+
+ # Check if the line content is not empty.
+ try:
+ new_line = tds[2].firstChild.nodeValue
+ except AttributeError:
+ new_line = ''
+
+ if not (left_diff_type == 'remove' and right_diff_type == 'empty'):
+ changed_line_numbers.append(int(line_num))
+ changed_line_contents.append(new_line.strip())
+
+ return (url, changed_line_numbers, changed_line_contents)
+
+ def ParseBlameInfo(self, component, file_path, line, revision):
+ url_map = self.component_to_urls_map.get(component)
+ if not url_map:
+ return None
+
+ # Retrieve blame data from url, return None if fails.
+ url = url_map['blame_url'] % (file_path, revision, revision)
+ data = crash_utils.GetDataFromURL(url)
+ if not data:
+ return None
+
+ blame_html = minidom.parseString(data)
+
+ title = blame_html.getElementsByTagName('title')
+ # If the returned html page is an exception page, return None.
+ if title[0].firstChild.nodeValue == 'ViewVC Exception':
+ return None
+
+ # Each of the blame result is in <tr>.
+ blame_results = blame_html.getElementsByTagName('tr')
+ try:
+ blame_result = blame_results[line]
+ except IndexError:
+ return None
+
+ # There must be 4 <td> for each <tr>. If not, this page is wrong.
+ tds = blame_result.getElementsByTagName('td')
+ if len(tds) != 4:
+ return None
+
+ # The third <td> has the line content, separated by <span>s. Combine
+ # those to get a string of changed line. If it has nothing, the line
+ # is empty.
+ line_content = ''
+ if tds[3].hasChildNodes():
+ contents = tds[3].childNodes
+
+ for content in contents:
+ # Nodetype 3 means it is text node.
+ if content.nodeType == minidom.Node.TEXT_NODE:
+ line_content += content.nodeValue
+ else:
+ line_content += content.firstChild.nodeValue
+
+ line_content = line_content.strip()
+
+ # If the current line has the same author/revision as the previous lines,
+ # the result is not shown. Propagate up until we find the line with info.
+ while not tds[1].firstChild:
+ line -= 1
+ blame_result = blame_results[line]
+ tds = blame_result.getElementsByTagName('td')
+ author = tds[1].firstChild.nodeValue
+
+ # Revision can either be in hyperlink or plain text.
+ try:
+ revision = tds[2].getElementsByTagName('a')[0].firstChild.nodeValue
+ except IndexError:
+ revision = tds[2].firstChild.nodeValue
+
+ (revision_info, _) = self.ParseChangelog(component, revision, revision)
+ message = revision_info[int(revision)]['message']
+
+ # Return the parsed information.
+ revision_url = url_map['revision_url'] % int(revision)
+ return (line_content, revision, author, revision_url, message)
diff --git a/chromium/tools/flakiness/find_flakiness.py b/chromium/tools/flakiness/find_flakiness.py
new file mode 100755
index 00000000000..21629e4a7a6
--- /dev/null
+++ b/chromium/tools/flakiness/find_flakiness.py
@@ -0,0 +1,179 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Contains two functions that run different test cases and the same test
+case in parallel repeatedly to identify flaky tests.
+"""
+
+
+import os
+import re
+import subprocess
+import time
+
+
+# Defaults for FindShardingFlakiness().
+FF_DATA_SUFFIX = '_flakies'
+FF_SLEEP_INTERVAL = 10.0
+FF_NUM_ITERATIONS = 100
+FF_SUPERVISOR_ARGS = ['-r3', '--random-seed']
+
+# Defaults for FindUnaryFlakiness().
+FF_OUTPUT_SUFFIX = '_purges'
+FF_NUM_PROCS = 20
+FF_NUM_REPEATS = 10
+FF_TIMEOUT = 600
+
+
+def FindShardingFlakiness(test_path, data_path, supervisor_args):
+ """Finds flaky test cases by sharding and running a test for the specified
+ number of times. The data file is read at the beginning of each run to find
+ the last known counts and is overwritten at the end of each run with the new
+ counts. There is an optional sleep interval between each run so the script can
+ be killed without losing the data, useful for overnight (or weekend!) runs.
+ """
+
+ failed_tests = {}
+ # Read a previously written data file.
+ if os.path.exists(data_path):
+ data_file = open(data_path, 'r')
+ num_runs = int(data_file.readline().split(' ')[0])
+ num_passes = int(data_file.readline().split(' ')[0])
+ for line in data_file:
+ if line:
+ split_line = line.split(' -> ')
+ failed_tests[split_line[0]] = int(split_line[1])
+ data_file.close()
+ # No data file found.
+ else:
+ num_runs = 0
+ num_passes = 0
+
+ log_lines = False
+ args = ['python', '../sharding_supervisor/sharding_supervisor.py']
+ args.extend(supervisor_args + [test_path])
+ proc = subprocess.Popen(args, stderr=subprocess.PIPE)
+
+ # Shard the test and collect failures.
+ while True:
+ line = proc.stderr.readline()
+ if not line:
+ if proc.poll() is not None:
+ break
+ continue
+ print line.rstrip()
+ if log_lines:
+ line = line.rstrip()
+ if line in failed_tests:
+ failed_tests[line] += 1
+ else:
+ failed_tests[line] = 1
+ elif line.find('FAILED TESTS:') >= 0:
+ log_lines = True
+ num_runs += 1
+ if proc.returncode == 0:
+ num_passes += 1
+
+ # Write the data file and print results.
+ data_file = open(data_path, 'w')
+ print '%i runs' % num_runs
+ data_file.write('%i runs\n' % num_runs)
+ print '%i passes' % num_passes
+ data_file.write('%i passes\n' % num_passes)
+ for (test, count) in failed_tests.iteritems():
+ print '%s -> %i' % (test, count)
+ data_file.write('%s -> %i\n' % (test, count))
+ data_file.close()
+
+
+def FindUnaryFlakiness(test_path, output_path, num_procs, num_repeats, timeout):
+ """Runs all the test cases in a given test in parallel with itself, to get at
+ those that hold on to shared resources. The idea is that if a test uses a
+ unary resource, then running many instances of this test will purge out some
+ of them as failures or timeouts.
+ """
+
+ test_name_regex = r'((\w+/)?\w+\.\w+(/\d+)?)'
+ test_start = re.compile('\[\s+RUN\s+\] ' + test_name_regex)
+ test_list = []
+
+ # Run the test to discover all the test cases.
+ proc = subprocess.Popen([test_path], stdout=subprocess.PIPE)
+ while True:
+ line = proc.stdout.readline()
+ if not line:
+ if proc.poll() is not None:
+ break
+ continue
+ print line.rstrip()
+ results = test_start.search(line)
+ if results:
+ test_list.append(results.group(1))
+
+ failures = []
+ index = 0
+ total = len(test_list)
+
+ # Run each test case in parallel with itself.
+ for test_name in test_list:
+ num_fails = 0
+ num_terminated = 0
+ procs = []
+ args = [test_path, '--gtest_filter=' + test_name,
+ '--gtest_repeat=%i' % num_repeats]
+ while len(procs) < num_procs:
+ procs.append(subprocess.Popen(args))
+ seconds = 0
+ while procs:
+ for proc in procs:
+ if proc.poll() is not None:
+ if proc.returncode != 0:
+ ++num_fails
+ procs.remove(proc)
+ # Timeout exceeded, kill the remaining processes and make a note.
+ if seconds > timeout:
+ num_fails += len(procs)
+ num_terminated = len(procs)
+ while procs:
+ procs.pop().terminate()
+ time.sleep(1.0)
+ seconds += 1
+ if num_fails:
+ line = '%s: %i failed' % (test_name, num_fails)
+ if num_terminated:
+ line += ' (%i terminated)' % num_terminated
+ failures.append(line)
+ print '%s (%i / %i): %i failed' % (test_name, index, total, num_fails)
+ index += 1
+ time.sleep(1.0)
+
+ # Print the results and write the data file.
+ print failures
+ data_file = open(output_path, 'w')
+ for line in failures:
+ data_file.write(line + '\n')
+ data_file.close()
+
+
+def main():
+ if not args:
+ parser.error('You must specify a path to test!')
+ if not os.path.exists(args[0]):
+ parser.error('%s does not exist!' % args[0])
+
+ data_path = os.path.basename(args[0]) + FF_DATA_SUFFIX
+ output_path = os.path.basename(args[0]) + FF_OUTPUT_SUFFIX
+
+ for i in range(FF_NUM_ITERATIONS):
+ FindShardingFlakiness(args[0], data_path, FF_SUPERVISOR_ARGS)
+ print 'That was just iteration %i of %i.' % (i + 1, FF_NUM_ITERATIONS)
+ time.sleep(FF_SLEEP_INTERVAL)
+
+ FindUnaryFlakiness(
+ args[0], output_path, FF_NUM_PROCS, FF_NUM_REPEATS, FF_TIMEOUT)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/flakiness/is_flaky.py b/chromium/tools/flakiness/is_flaky.py
new file mode 100755
index 00000000000..8d1c367728d
--- /dev/null
+++ b/chromium/tools/flakiness/is_flaky.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs a test repeatedly to measure its flakiness. The return code is non-zero
+if the failure rate is higher than the specified threshold, but is not 100%."""
+
+import argparse
+import multiprocessing.dummy
+import subprocess
+import sys
+import time
+
+def load_options():
+ parser = argparse.ArgumentParser(description=__doc__)
+ parser.add_argument('--retries', default=1000, type=int,
+ help='Number of test retries to measure flakiness.')
+ parser.add_argument('--threshold', default=0.05, type=float,
+ help='Minimum flakiness level at which test is '
+ 'considered flaky.')
+ parser.add_argument('--jobs', '-j', type=int, default=1,
+ help='Number of parallel jobs to run tests.')
+ parser.add_argument('command', nargs='+', help='Command to run test.')
+ return parser.parse_args()
+
+def run_test(job):
+ print 'Starting retry attempt %d out of %d' % (job['index'] + 1,
+ job['retries'])
+ return subprocess.check_call(job['cmd'], stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+
+def main():
+ options = load_options()
+ num_passed = num_failed = 0
+ running = []
+
+ pool = multiprocessing.dummy.Pool(processes=options.jobs)
+ args = [{'index': index, 'retries': options.retries, 'cmd': options.command}
+ for index in range(options.retries)]
+ results = pool.map(run_test, args)
+ num_passed = len([retcode for retcode in results if retcode == 0])
+ num_failed = len(results) - num_passed
+
+ if num_passed == 0:
+ flakiness = 0
+ else:
+ flakiness = num_failed / float(len(results))
+
+ print 'Flakiness is %.2f' % flakiness
+ if flakiness > options.threshold:
+ return 1
+ else:
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/flakiness/is_flaky_test.py b/chromium/tools/flakiness/is_flaky_test.py
new file mode 100644
index 00000000000..21238841ed9
--- /dev/null
+++ b/chromium/tools/flakiness/is_flaky_test.py
@@ -0,0 +1,72 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for is_flaky."""
+
+import is_flaky
+import subprocess
+import sys
+import threading
+import unittest
+
+
+class IsFlakyTest(unittest.TestCase):
+
+ def setUp(self):
+ self.original_subprocess_check_call = subprocess.check_call
+ subprocess.check_call = self.mock_check_call
+ self.check_call_calls = []
+ self.check_call_results = []
+ is_flaky.load_options = self.mock_load_options
+
+ def tearDown(self):
+ subprocess.check_call = self.original_subprocess_check_call
+
+ def mock_check_call(self, command, stdout, stderr):
+ self.check_call_calls.append(command)
+ if self.check_call_results:
+ return self.check_call_results.pop(0)
+ else:
+ return 0
+
+ def mock_load_options(self):
+ class MockOptions():
+ jobs = 2
+ retries = 10
+ threshold = 0.3
+ command = ['command', 'param1', 'param2']
+ return MockOptions()
+
+ def testExecutesTestCorrectNumberOfTimes(self):
+ is_flaky.main()
+ self.assertEqual(len(self.check_call_calls), 10)
+
+ def testExecutesTestWithCorrectArguments(self):
+ is_flaky.main()
+ for call in self.check_call_calls:
+ self.assertEqual(call, ['command', 'param1', 'param2'])
+
+ def testReturnsNonFlakyForAllSuccesses(self):
+ self.check_call_results = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
+ ret_code = is_flaky.main()
+ self.assertEqual(ret_code, 0)
+
+ def testReturnsNonFlakyForAllFailures(self):
+ self.check_call_results = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
+ ret_code = is_flaky.main()
+ self.assertEqual(ret_code, 0)
+
+ def testReturnsNonFlakyForSmallNumberOfFailures(self):
+ self.check_call_results = [1, 0, 1, 0, 0, 0, 0, 0, 0, 0]
+ ret_code = is_flaky.main()
+ self.assertEqual(ret_code, 0)
+
+ def testReturnsFlakyForLargeNumberOfFailures(self):
+ self.check_call_results = [1, 1, 1, 0, 1, 0, 0, 0, 0, 0]
+ ret_code = is_flaky.main()
+ self.assertEqual(ret_code, 1)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/gdb/gdb_chrome.py b/chromium/tools/gdb/gdb_chrome.py
new file mode 100644
index 00000000000..dc6514944fe
--- /dev/null
+++ b/chromium/tools/gdb/gdb_chrome.py
@@ -0,0 +1,338 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""GDB support for Chrome types.
+
+Add this to your gdb by amending your ~/.gdbinit as follows:
+ python
+ import sys
+ sys.path.insert(0, "/path/to/tools/gdb/")
+ import gdb_chrome
+ end
+
+Use
+ (gdb) p /r any_variable
+to print |any_variable| without using any printers.
+"""
+
+import datetime
+import gdb
+import gdb.printing
+import os
+import sys
+
+sys.path.insert(0, os.path.join(
+ os.path.dirname(os.path.abspath(__file__)),
+ '..', '..', 'third_party', 'WebKit', 'Tools', 'gdb'))
+try:
+ import webkit
+finally:
+ sys.path.pop(0)
+
+# When debugging this module, set the below variable to True, and then use
+# (gdb) python del sys.modules['gdb_chrome']
+# (gdb) python import gdb_chrome
+# to reload.
+_DEBUGGING = False
+
+
+pp_set = gdb.printing.RegexpCollectionPrettyPrinter("chromium")
+
+
+def typed_ptr(ptr):
+ """Prints a pointer along with its exact type.
+
+ By default, gdb would print just the address, which takes more
+ steps to interpret.
+ """
+ # Returning this as a cast expression surrounded by parentheses
+ # makes it easier to cut+paste inside of gdb.
+ return '((%s)%s)' % (ptr.dynamic_type, ptr)
+
+
+def yield_fields(val):
+ """Use this in a printer's children() method to print an object's fields.
+
+ e.g.
+ def children():
+ for result in yield_fields(self.val):
+ yield result
+ """
+ try:
+ fields = val.type.target().fields()
+ except:
+ fields = val.type.fields()
+ for field in fields:
+ if field.is_base_class:
+ yield (field.name, val.cast(gdb.lookup_type(field.name)))
+ else:
+ yield (field.name, val[field.name])
+
+
+class Printer(object):
+ def __init__(self, val):
+ self.val = val
+
+
+class StringPrinter(Printer):
+ def display_hint(self):
+ return 'string'
+
+
+class String16Printer(StringPrinter):
+ def to_string(self):
+ return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p'])
+pp_set.add_printer(
+ 'string16',
+ '^string16|std::basic_string<(unsigned short|base::char16).*>$',
+ String16Printer);
+
+
+class GURLPrinter(StringPrinter):
+ def to_string(self):
+ return self.val['spec_']
+pp_set.add_printer('GURL', '^GURL$', GURLPrinter)
+
+
+class FilePathPrinter(StringPrinter):
+ def to_string(self):
+ return self.val['path_']['_M_dataplus']['_M_p']
+pp_set.add_printer('FilePath', '^FilePath$', FilePathPrinter)
+
+
+class SizePrinter(Printer):
+ def to_string(self):
+ return '%sx%s' % (self.val['width_'], self.val['height_'])
+pp_set.add_printer('gfx::Size', '^gfx::(Size|SizeF|SizeBase<.*>)$', SizePrinter)
+
+
+class PointPrinter(Printer):
+ def to_string(self):
+ return '%s,%s' % (self.val['x_'], self.val['y_'])
+pp_set.add_printer('gfx::Point', '^gfx::(Point|PointF|PointBase<.*>)$',
+ PointPrinter)
+
+
+class RectPrinter(Printer):
+ def to_string(self):
+ return '%s %s' % (self.val['origin_'], self.val['size_'])
+pp_set.add_printer('gfx::Rect', '^gfx::(Rect|RectF|RectBase<.*>)$',
+ RectPrinter)
+
+
+class SmartPtrPrinter(Printer):
+ def to_string(self):
+ return '%s%s' % (self.typename, typed_ptr(self.ptr()))
+
+
+class ScopedPtrPrinter(SmartPtrPrinter):
+ typename = 'scoped_ptr'
+ def ptr(self):
+ return self.val['impl_']['data_']['ptr']
+pp_set.add_printer('scoped_ptr', '^scoped_ptr<.*>$', ScopedPtrPrinter)
+
+
+class ScopedRefPtrPrinter(SmartPtrPrinter):
+ typename = 'scoped_refptr'
+ def ptr(self):
+ return self.val['ptr_']
+pp_set.add_printer('scoped_refptr', '^scoped_refptr<.*>$', ScopedRefPtrPrinter)
+
+
+class LinkedPtrPrinter(SmartPtrPrinter):
+ typename = 'linked_ptr'
+ def ptr(self):
+ return self.val['value_']
+pp_set.add_printer('linked_ptr', '^linked_ptr<.*>$', LinkedPtrPrinter)
+
+
+class WeakPtrPrinter(SmartPtrPrinter):
+ typename = 'base::WeakPtr'
+ def ptr(self):
+ flag = ScopedRefPtrPrinter(self.val['ref_']['flag_']).ptr()
+ if flag and flag['is_valid_']:
+ return self.val['ptr_']
+ return gdb.Value(0).cast(self.val['ptr_'].type)
+pp_set.add_printer('base::WeakPtr', '^base::WeakPtr<.*>$', WeakPtrPrinter)
+
+
+class CallbackPrinter(Printer):
+ """Callbacks provide no usable information so reduce the space they take."""
+ def to_string(self):
+ return '...'
+pp_set.add_printer('base::Callback', '^base::Callback<.*>$', CallbackPrinter)
+
+
+class LocationPrinter(Printer):
+ def to_string(self):
+ return '%s()@%s:%s' % (self.val['function_name_'].string(),
+ self.val['file_name_'].string(),
+ self.val['line_number_'])
+pp_set.add_printer('tracked_objects::Location', '^tracked_objects::Location$',
+ LocationPrinter)
+
+
+class PendingTaskPrinter(Printer):
+ def to_string(self):
+ return 'From %s' % (self.val['posted_from'],)
+
+ def children(self):
+ for result in yield_fields(self.val):
+ if result[0] not in ('task', 'posted_from'):
+ yield result
+pp_set.add_printer('base::PendingTask', '^base::PendingTask$',
+ PendingTaskPrinter)
+
+
+class LockPrinter(Printer):
+ def to_string(self):
+ try:
+ if self.val['owned_by_thread_']:
+ return 'Locked by thread %s' % self.val['owning_thread_id_']
+ else:
+ return 'Unlocked'
+ except gdb.error:
+ return 'Unknown state'
+pp_set.add_printer('base::Lock', '^base::Lock$', LockPrinter)
+
+
+class TimeDeltaPrinter(object):
+ def __init__(self, val):
+ self._timedelta = datetime.timedelta(microseconds=int(val['delta_']))
+
+ def timedelta(self):
+ return self._timedelta
+
+ def to_string(self):
+ return str(self._timedelta)
+pp_set.add_printer('base::TimeDelta', '^base::TimeDelta$', TimeDeltaPrinter)
+
+
+class TimeTicksPrinter(TimeDeltaPrinter):
+ def __init__(self, val):
+ self._timedelta = datetime.timedelta(microseconds=int(val['ticks_']))
+pp_set.add_printer('base::TimeTicks', '^base::TimeTicks$', TimeTicksPrinter)
+
+
+class TimePrinter(object):
+ def __init__(self, val):
+ timet_offset = gdb.parse_and_eval(
+ 'base::Time::kTimeTToMicrosecondsOffset')
+ self._datetime = (datetime.datetime.fromtimestamp(0) +
+ datetime.timedelta(microseconds=
+ int(val['us_'] - timet_offset)))
+
+ def datetime(self):
+ return self._datetime
+
+ def to_string(self):
+ return str(self._datetime)
+pp_set.add_printer('base::Time', '^base::Time$', TimePrinter)
+
+
+class IpcMessagePrinter(Printer):
+ def header(self):
+ return self.val['header_'].cast(
+ gdb.lookup_type('IPC::Message::Header').pointer())
+
+ def to_string(self):
+ message_type = self.header()['type']
+ return '%s of kind %s line %s' % (
+ self.val.dynamic_type,
+ (message_type >> 16).cast(gdb.lookup_type('IPCMessageStart')),
+ message_type & 0xffff)
+
+ def children(self):
+ yield ('header_', self.header().dereference())
+ yield ('capacity_after_header_', self.val['capacity_after_header_'])
+ for field in self.val.type.fields():
+ if field.is_base_class:
+ continue
+ yield (field.name, self.val[field.name])
+pp_set.add_printer('IPC::Message', '^IPC::Message$', IpcMessagePrinter)
+
+
+class NotificationRegistrarPrinter(Printer):
+ def to_string(self):
+ try:
+ registrations = self.val['registered_']
+ vector_finish = registrations['_M_impl']['_M_finish']
+ vector_start = registrations['_M_impl']['_M_start']
+ if vector_start == vector_finish:
+ return 'Not watching notifications'
+ if vector_start.dereference().type.sizeof == 0:
+ # Incomplete type: b/8242773
+ return 'Watching some notifications'
+ return ('Watching %s notifications; '
+ 'print %s->registered_ for details') % (
+ int(vector_finish - vector_start),
+ typed_ptr(self.val.address))
+ except gdb.error:
+ return 'NotificationRegistrar'
+pp_set.add_printer('content::NotificationRegistrar',
+ '^content::NotificationRegistrar$',
+ NotificationRegistrarPrinter)
+
+
+class SiteInstanceImplPrinter(object):
+ def __init__(self, val):
+ self.val = val.cast(val.dynamic_type)
+
+ def to_string(self):
+ return 'SiteInstanceImpl@%s for %s' % (
+ self.val.address, self.val['site_'])
+
+ def children(self):
+ yield ('id_', self.val['id_'])
+ yield ('has_site_', self.val['has_site_'])
+ if self.val['browsing_instance_']['ptr_']:
+ yield ('browsing_instance_', self.val['browsing_instance_']['ptr_'])
+ if self.val['process_']:
+ yield ('process_', typed_ptr(self.val['process_']))
+pp_set.add_printer('content::SiteInstanceImpl', '^content::SiteInstanceImpl$',
+ SiteInstanceImplPrinter)
+
+
+class RenderProcessHostImplPrinter(object):
+ def __init__(self, val):
+ self.val = val.cast(val.dynamic_type)
+
+ def to_string(self):
+ pid = ''
+ try:
+ child_process_launcher_ptr = (
+ self.val['child_process_launcher_']['impl_']['data_']['ptr'])
+ if child_process_launcher_ptr:
+ context = (child_process_launcher_ptr['context_']['ptr_'])
+ if context:
+ pid = ' PID %s' % str(context['process_']['process_'])
+ except gdb.error:
+ # The definition of the Context type may not be available.
+ # b/8242773
+ pass
+ return 'RenderProcessHostImpl@%s%s' % (self.val.address, pid)
+
+ def children(self):
+ yield ('id_', self.val['id_'])
+ yield ('listeners_',
+ self.val['listeners_']['data_'])
+ yield ('worker_ref_count_', self.val['worker_ref_count_'])
+ yield ('fast_shutdown_started_', self.val['fast_shutdown_started_'])
+ yield ('deleting_soon_', self.val['deleting_soon_'])
+ yield ('pending_views_', self.val['pending_views_'])
+ yield ('visible_widgets_', self.val['visible_widgets_'])
+ yield ('backgrounded_', self.val['backgrounded_'])
+ yield ('widget_helper_', self.val['widget_helper_'])
+ yield ('is_initialized_', self.val['is_initialized_'])
+ yield ('browser_context_', typed_ptr(self.val['browser_context_']))
+ yield ('sudden_termination_allowed_',
+ self.val['sudden_termination_allowed_'])
+ yield ('ignore_input_events_', self.val['ignore_input_events_'])
+ yield ('is_guest_', self.val['is_guest_'])
+pp_set.add_printer('content::RenderProcessHostImpl',
+ '^content::RenderProcessHostImpl$',
+ RenderProcessHostImplPrinter)
+
+
+gdb.printing.register_pretty_printer(gdb, pp_set, replace=_DEBUGGING)
diff --git a/chromium/tools/gen_keyboard_overlay_data/gen_keyboard_overlay_data.py b/chromium/tools/gen_keyboard_overlay_data/gen_keyboard_overlay_data.py
new file mode 100755
index 00000000000..073ef0bfdce
--- /dev/null
+++ b/chromium/tools/gen_keyboard_overlay_data/gen_keyboard_overlay_data.py
@@ -0,0 +1,515 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate keyboard layout and hotkey data for the keyboard overlay.
+
+This script fetches data from the keyboard layout and hotkey data spreadsheet,
+and output the data depending on the option.
+
+ --cc: Rewrites a part of C++ code in
+ chrome/browser/chromeos/webui/keyboard_overlay_ui.cc
+
+ --grd: Rewrites a part of grd messages in
+ chrome/app/generated_resources.grd
+
+ --js: Rewrites the entire JavaScript code in
+ chrome/browser/resources/keyboard_overlay/keyboard_overlay_data.js
+
+These options can be specified at the same time.
+
+e.g.
+python gen_keyboard_overlay_data.py --cc --grd --js
+
+The output directory of the generated files can be changed with --outdir.
+
+e.g. (This will generate tmp/keyboard_overlay.js)
+python gen_keyboard_overlay_data.py --outdir=tmp --js
+"""
+
+import cStringIO
+import datetime
+import gdata.spreadsheet.service
+import getpass
+import json
+import optparse
+import os
+import re
+import sys
+
+MODIFIER_SHIFT = 1 << 0
+MODIFIER_CTRL = 1 << 1
+MODIFIER_ALT = 1 << 2
+
+KEYBOARD_GLYPH_SPREADSHEET_KEY = '0Ao3KldW9piwEdExLbGR6TmZ2RU9aUjFCMmVxWkVqVmc'
+HOTKEY_SPREADSHEET_KEY = '0AqzoqbAMLyEPdE1RQXdodk1qVkFyTWtQbUxROVM1cXc'
+CC_OUTDIR = 'chrome/browser/ui/webui/chromeos'
+CC_FILENAME = 'keyboard_overlay_ui.cc'
+GRD_OUTDIR = 'chrome/app'
+GRD_FILENAME = 'chromeos_strings.grdp'
+JS_OUTDIR = 'chrome/browser/resources/chromeos'
+JS_FILENAME = 'keyboard_overlay_data.js'
+CC_START = r'IDS_KEYBOARD_OVERLAY_INSTRUCTIONS_HIDE },'
+CC_END = r'};'
+GRD_START = r' <!-- BEGIN GENERATED KEYBOARD OVERLAY STRINGS -->'
+GRD_END = r' <!-- END GENERATED KEYBOARD OVERLAY STRINGS -->'
+
+LABEL_MAP = {
+ 'glyph_arrow_down': 'down',
+ 'glyph_arrow_left': 'left',
+ 'glyph_arrow_right': 'right',
+ 'glyph_arrow_up': 'up',
+ 'glyph_back': 'back',
+ 'glyph_backspace': 'backspace',
+ 'glyph_brightness_down': 'bright down',
+ 'glyph_brightness_up': 'bright up',
+ 'glyph_enter': 'enter',
+ 'glyph_forward': 'forward',
+ 'glyph_fullscreen': 'full screen',
+ # Kana/Eisu key on Japanese keyboard
+ 'glyph_ime': u'\u304b\u306a\u0020\u002f\u0020\u82f1\u6570',
+ 'glyph_lock': 'lock',
+ 'glyph_overview': 'switch window',
+ 'glyph_power': 'power',
+ 'glyph_right': 'right',
+ 'glyph_reload': 'reload',
+ 'glyph_search': 'search',
+ 'glyph_shift': 'shift',
+ 'glyph_tab': 'tab',
+ 'glyph_tools': 'tools',
+ 'glyph_volume_down': 'vol. down',
+ 'glyph_volume_mute': 'mute',
+ 'glyph_volume_up': 'vol. up',
+};
+
+INPUT_METHOD_ID_TO_OVERLAY_ID = {
+ 'xkb:be::fra': 'fr',
+ 'xkb:be::ger': 'de',
+ 'xkb:be::nld': 'nl',
+ 'xkb:bg::bul': 'bg',
+ 'xkb:bg:phonetic:bul': 'bg',
+ 'xkb:br::por': 'pt_BR',
+ 'xkb:ca::fra': 'fr_CA',
+ 'xkb:ca:eng:eng': 'ca',
+ 'xkb:ch::ger': 'de',
+ 'xkb:ch:fr:fra': 'fr',
+ 'xkb:cz::cze': 'cs',
+ 'xkb:de::ger': 'de',
+ 'xkb:de:neo:ger': 'de_neo',
+ 'xkb:dk::dan': 'da',
+ 'xkb:ee::est': 'et',
+ 'xkb:es::spa': 'es',
+ 'xkb:es:cat:cat': 'ca',
+ 'xkb:fi::fin': 'fi',
+ 'xkb:fr::fra': 'fr',
+ 'xkb:gb:dvorak:eng': 'en_GB_dvorak',
+ 'xkb:gb:extd:eng': 'en_GB',
+ 'xkb:gr::gre': 'el',
+ 'xkb:hr::scr': 'hr',
+ 'xkb:hu::hun': 'hu',
+ 'xkb:il::heb': 'iw',
+ 'xkb:it::ita': 'it',
+ 'xkb:jp::jpn': 'ja',
+ 'xkb:latam::spa': 'es_419',
+ 'xkb:lt::lit': 'lt',
+ 'xkb:lv:apostrophe:lav': 'lv',
+ 'xkb:no::nob': 'no',
+ 'xkb:pl::pol': 'pl',
+ 'xkb:pt::por': 'pt_PT',
+ 'xkb:ro::rum': 'ro',
+ 'xkb:rs::srp': 'sr',
+ 'xkb:ru::rus': 'ru',
+ 'xkb:ru:phonetic:rus': 'ru',
+ 'xkb:se::swe': 'sv',
+ 'xkb:si::slv': 'sl',
+ 'xkb:sk::slo': 'sk',
+ 'xkb:tr::tur': 'tr',
+ 'xkb:ua::ukr': 'uk',
+ 'xkb:us::eng': 'en_US',
+ 'xkb:us::fil': 'en_US',
+ 'xkb:us::ind': 'en_US',
+ 'xkb:us::msa': 'en_US',
+ 'xkb:us:altgr-intl:eng': 'en_US_altgr_intl',
+ 'xkb:us:colemak:eng': 'en_US_colemak',
+ 'xkb:us:dvorak:eng': 'en_US_dvorak',
+ 'xkb:us:intl:eng': 'en_US_intl',
+ 'xkb:us:intl:nld': 'en_US_intl',
+ 'xkb:us:intl:por': 'en_US_intl',
+ 'xkb:us:workman:eng': 'en_US_workman',
+ 'xkb:us:workman-intl:eng': 'en_US_workman_intl',
+}
+
+# The file was first generated in 2012 and we have a policy of not updating
+# copyright dates.
+COPYRIGHT_HEADER=\
+"""// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This is a generated file but may contain local modifications. See
+// src/tools/gen_keyboard_overlay_data/gen_keyboard_overlay_data.py --help
+"""
+
+# A snippet for grd file
+GRD_SNIPPET_TEMPLATE=""" <message name="%s" desc="%s">
+ %s
+ </message>
+"""
+
+# A snippet for C++ file
+CC_SNIPPET_TEMPLATE=""" { "%s", %s },
+"""
+
+
+def SplitBehavior(behavior):
+ """Splits the behavior to compose a message or i18n-content value.
+
+ Examples:
+ 'Activate last tab' => ['Activate', 'last', 'tab']
+ 'Close tab' => ['Close', 'tab']
+ """
+ return [x for x in re.split('[ ()"-.,]', behavior) if len(x) > 0]
+
+
+def ToMessageName(behavior):
+ """Composes a message name for grd file.
+
+ Examples:
+ 'Activate last tab' => IDS_KEYBOARD_OVERLAY_ACTIVATE_LAST_TAB
+ 'Close tab' => IDS_KEYBOARD_OVERLAY_CLOSE_TAB
+ """
+ segments = [segment.upper() for segment in SplitBehavior(behavior)]
+ return 'IDS_KEYBOARD_OVERLAY_' + ('_'.join(segments))
+
+
+def ToMessageDesc(description):
+ """Composes a message description for grd file."""
+ message_desc = 'The text in the keyboard overlay to explain the shortcut'
+ if description:
+ message_desc = '%s (%s).' % (message_desc, description)
+ else:
+ message_desc += '.'
+ return message_desc
+
+
+def Toi18nContent(behavior):
+ """Composes a i18n-content value for HTML/JavaScript files.
+
+ Examples:
+ 'Activate last tab' => keyboardOverlayActivateLastTab
+ 'Close tab' => keyboardOverlayCloseTab
+ """
+ segments = [segment.lower() for segment in SplitBehavior(behavior)]
+ result = 'keyboardOverlay'
+ for segment in segments:
+ result += segment[0].upper() + segment[1:]
+ return result
+
+
+def ToKeys(hotkey):
+ """Converts the action value to shortcut keys used from JavaScript.
+
+ Examples:
+ 'Ctrl - 9' => '9<>CTRL'
+ 'Ctrl - Shift - Tab' => 'tab<>CTRL<>SHIFT'
+ """
+ values = hotkey.split(' - ')
+ modifiers = sorted(value.upper() for value in values
+ if value in ['Shift', 'Ctrl', 'Alt', 'Search'])
+ keycode = [value.lower() for value in values
+ if value not in ['Shift', 'Ctrl', 'Alt', 'Search']]
+ # The keys which are highlighted even without modifier keys.
+ base_keys = ['backspace', 'power']
+ if not modifiers and (keycode and keycode[0] not in base_keys):
+ return None
+ return '<>'.join(keycode + modifiers)
+
+
+def ParseOptions():
+ """Parses the input arguemnts and returns options."""
+ # default_username = os.getusername() + '@google.com';
+ default_username = '%s@google.com' % os.environ.get('USER')
+ parser = optparse.OptionParser()
+ parser.add_option('--key', dest='key',
+ help='The key of the spreadsheet (required).')
+ parser.add_option('--username', dest='username',
+ default=default_username,
+ help='Your user name (default: %s).' % default_username)
+ parser.add_option('--password', dest='password',
+ help='Your password.')
+ parser.add_option('--account_type', default='GOOGLE', dest='account_type',
+ help='Account type used for gdata login (default: GOOGLE)')
+ parser.add_option('--js', dest='js', default=False, action='store_true',
+ help='Output js file.')
+ parser.add_option('--grd', dest='grd', default=False, action='store_true',
+ help='Output resource file.')
+ parser.add_option('--cc', dest='cc', default=False, action='store_true',
+ help='Output cc file.')
+ parser.add_option('--outdir', dest='outdir', default=None,
+ help='Specify the directory files are generated.')
+ (options, unused_args) = parser.parse_args()
+
+ if not options.username.endswith('google.com'):
+ print 'google.com account is necessary to use this script.'
+ sys.exit(-1)
+
+ if (not (options.js or options.grd or options.cc)):
+ print 'Either --js, --grd, or --cc needs to be specified.'
+ sys.exit(-1)
+
+ # Get the password from the terminal, if needed.
+ if not options.password:
+ options.password = getpass.getpass(
+ 'Application specific password for %s: ' % options.username)
+ return options
+
+
+def InitClient(options):
+ """Initializes the spreadsheet client."""
+ client = gdata.spreadsheet.service.SpreadsheetsService()
+ client.email = options.username
+ client.password = options.password
+ client.source = 'Spread Sheet'
+ client.account_type = options.account_type
+ print 'Logging in as %s (%s)' % (client.email, client.account_type)
+ client.ProgrammaticLogin()
+ return client
+
+
+def PrintDiffs(message, lhs, rhs):
+ """Prints the differences between |lhs| and |rhs|."""
+ dif = set(lhs).difference(rhs)
+ if dif:
+ print message, ', '.join(dif)
+
+
+def FetchSpreadsheetFeeds(client, key, sheets, cols):
+ """Fetch feeds from the spreadsheet.
+
+ Args:
+ client: A spreadsheet client to be used for fetching data.
+ key: A key string of the spreadsheet to be fetched.
+ sheets: A list of the sheet names to read data from.
+ cols: A list of columns to read data from.
+ """
+ worksheets_feed = client.GetWorksheetsFeed(key)
+ print 'Fetching data from the worksheet: %s' % worksheets_feed.title.text
+ worksheets_data = {}
+ titles = []
+ for entry in worksheets_feed.entry:
+ worksheet_id = entry.id.text.split('/')[-1]
+ list_feed = client.GetListFeed(key, worksheet_id)
+ list_data = []
+ # Hack to deal with sheet names like 'sv (Copy of fl)'
+ title = list_feed.title.text.split('(')[0].strip()
+ titles.append(title)
+ if title not in sheets:
+ continue
+ print 'Reading data from the sheet: %s' % list_feed.title.text
+ for i, entry in enumerate(list_feed.entry):
+ line_data = {}
+ for k in entry.custom:
+ if (k not in cols) or (not entry.custom[k].text):
+ continue
+ line_data[k] = entry.custom[k].text
+ list_data.append(line_data)
+ worksheets_data[title] = list_data
+ PrintDiffs('Exist only on the spreadsheet: ', titles, sheets)
+ PrintDiffs('Specified but do not exist on the spreadsheet: ', sheets, titles)
+ return worksheets_data
+
+
+def FetchKeyboardGlyphData(client):
+ """Fetches the keyboard glyph data from the spreadsheet."""
+ glyph_cols = ['scancode', 'p0', 'p1', 'p2', 'p3', 'p4', 'p5', 'p6', 'p7',
+ 'p8', 'p9', 'label', 'format', 'notes']
+ keyboard_glyph_data = FetchSpreadsheetFeeds(
+ client, KEYBOARD_GLYPH_SPREADSHEET_KEY,
+ INPUT_METHOD_ID_TO_OVERLAY_ID.values(), glyph_cols)
+ ret = {}
+ for lang in keyboard_glyph_data:
+ ret[lang] = {}
+ keys = {}
+ for line in keyboard_glyph_data[lang]:
+ scancode = line.get('scancode')
+ if (not scancode) and line.get('notes'):
+ ret[lang]['layoutName'] = line['notes']
+ continue
+ del line['scancode']
+ if 'notes' in line:
+ del line['notes']
+ if 'label' in line:
+ line['label'] = LABEL_MAP.get(line['label'], line['label'])
+ keys[scancode] = line
+ # Add a label to space key
+ if '39' not in keys:
+ keys['39'] = {'label': 'space'}
+ ret[lang]['keys'] = keys
+ return ret
+
+
+def FetchLayoutsData(client):
+ """Fetches the keyboard glyph data from the spreadsheet."""
+ layout_names = ['U_layout', 'J_layout', 'E_layout', 'B_layout']
+ cols = ['scancode', 'x', 'y', 'w', 'h']
+ layouts = FetchSpreadsheetFeeds(client, KEYBOARD_GLYPH_SPREADSHEET_KEY,
+ layout_names, cols)
+ ret = {}
+ for layout_name, layout in layouts.items():
+ ret[layout_name[0]] = []
+ for row in layout:
+ line = []
+ for col in cols:
+ value = row.get(col)
+ if not value:
+ line.append('')
+ else:
+ if col != 'scancode':
+ value = float(value)
+ line.append(value)
+ ret[layout_name[0]].append(line)
+ return ret
+
+
+def FetchHotkeyData(client):
+ """Fetches the hotkey data from the spreadsheet."""
+ hotkey_sheet = ['Cross Platform Behaviors']
+ hotkey_cols = ['behavior', 'context', 'kind', 'actionctrlctrlcmdonmac',
+ 'chromeos', 'descriptionfortranslation']
+ hotkey_data = FetchSpreadsheetFeeds(client, HOTKEY_SPREADSHEET_KEY,
+ hotkey_sheet, hotkey_cols)
+ action_to_id = {}
+ id_to_behavior = {}
+ # (behavior, action)
+ result = []
+ for line in hotkey_data['Cross Platform Behaviors']:
+ if (not line.get('chromeos')) or (line.get('kind') != 'Key'):
+ continue
+ action = ToKeys(line['actionctrlctrlcmdonmac'])
+ if not action:
+ continue
+ behavior = line['behavior'].strip()
+ description = line.get('descriptionfortranslation')
+ result.append((behavior, action, description))
+ return result
+
+
+def UniqueBehaviors(hotkey_data):
+ """Retrieves a sorted list of unique behaviors from |hotkey_data|."""
+ return sorted(set((behavior, description) for (behavior, _, description)
+ in hotkey_data),
+ cmp=lambda x, y: cmp(ToMessageName(x[0]), ToMessageName(y[0])))
+
+
+def GetPath(path_from_src):
+ """Returns the absolute path of the specified path."""
+ path = os.path.join(os.path.dirname(__file__), '../..', path_from_src)
+ if not os.path.isfile(path):
+ print 'WARNING: %s does not exist. Maybe moved or renamed?' % path
+ return path
+
+
+def OutputFile(outpath, snippet):
+ """Output the snippet into the specified path."""
+ out = file(outpath, 'w')
+ out.write(COPYRIGHT_HEADER + '\n')
+ out.write(snippet)
+ print 'Output ' + os.path.normpath(outpath)
+
+
+def RewriteFile(start, end, original_dir, original_filename, snippet,
+ outdir=None):
+ """Replaces a part of the specified file with snippet and outputs it."""
+ original_path = GetPath(os.path.join(original_dir, original_filename))
+ original = file(original_path, 'r')
+ original_content = original.read()
+ original.close()
+ if outdir:
+ outpath = os.path.join(outdir, original_filename)
+ else:
+ outpath = original_path
+ out = file(outpath, 'w')
+ rx = re.compile(r'%s\n.*?%s\n' % (re.escape(start), re.escape(end)),
+ re.DOTALL)
+ new_content = re.sub(rx, '%s\n%s%s\n' % (start, snippet, end),
+ original_content)
+ out.write(new_content)
+ out.close()
+ print 'Output ' + os.path.normpath(outpath)
+
+
+def OutputJson(keyboard_glyph_data, hotkey_data, layouts, var_name, outdir):
+ """Outputs the keyboard overlay data as a JSON file."""
+ action_to_id = {}
+ for (behavior, action, _) in hotkey_data:
+ i18nContent = Toi18nContent(behavior)
+ action_to_id[action] = i18nContent
+ data = {'keyboardGlyph': keyboard_glyph_data,
+ 'shortcut': action_to_id,
+ 'layouts': layouts,
+ 'inputMethodIdToOverlayId': INPUT_METHOD_ID_TO_OVERLAY_ID}
+
+ if not outdir:
+ outdir = JS_OUTDIR
+ outpath = GetPath(os.path.join(outdir, JS_FILENAME))
+ json_data = json.dumps(data, sort_keys=True, indent=2)
+ # Remove redundant spaces after ','
+ json_data = json_data.replace(', \n', ',\n')
+ # Replace double quotes with single quotes to avoid lint warnings.
+ json_data = json_data.replace('\"', '\'')
+ snippet = 'var %s = %s;\n' % (var_name, json_data)
+ OutputFile(outpath, snippet)
+
+
+def OutputGrd(hotkey_data, outdir):
+ """Outputs a part of messages in the grd file."""
+ snippet = cStringIO.StringIO()
+ for (behavior, description) in UniqueBehaviors(hotkey_data):
+ # Do not generate message for 'Show wrench menu'. It is handled manually
+ # based on branding.
+ if behavior == 'Show wrench menu':
+ continue
+ snippet.write(GRD_SNIPPET_TEMPLATE %
+ (ToMessageName(behavior), ToMessageDesc(description),
+ behavior))
+
+ RewriteFile(GRD_START, GRD_END, GRD_OUTDIR, GRD_FILENAME, snippet.getvalue(),
+ outdir)
+
+
+def OutputCC(hotkey_data, outdir):
+ """Outputs a part of code in the C++ file."""
+ snippet = cStringIO.StringIO()
+ for (behavior, _) in UniqueBehaviors(hotkey_data):
+ message_name = ToMessageName(behavior)
+ output = CC_SNIPPET_TEMPLATE % (Toi18nContent(behavior), message_name)
+ # Break the line if the line is longer than 80 characters
+ if len(output) > 80:
+ output = output.replace(' ' + message_name, '\n %s' % message_name)
+ snippet.write(output)
+
+ RewriteFile(CC_START, CC_END, CC_OUTDIR, CC_FILENAME, snippet.getvalue(),
+ outdir)
+
+
+def main():
+ options = ParseOptions()
+ client = InitClient(options)
+ hotkey_data = FetchHotkeyData(client)
+
+ if options.js:
+ keyboard_glyph_data = FetchKeyboardGlyphData(client)
+
+ if options.js:
+ layouts = FetchLayoutsData(client)
+ OutputJson(keyboard_glyph_data, hotkey_data, layouts, 'keyboardOverlayData',
+ options.outdir)
+ if options.grd:
+ OutputGrd(hotkey_data, options.outdir)
+ if options.cc:
+ OutputCC(hotkey_data, options.outdir)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/generate_library_loader/OWNERS b/chromium/tools/generate_library_loader/OWNERS
new file mode 100644
index 00000000000..508f05e86c0
--- /dev/null
+++ b/chromium/tools/generate_library_loader/OWNERS
@@ -0,0 +1,2 @@
+phajdan.jr@chromium.org
+spang@chromium.org
diff --git a/chromium/tools/generate_library_loader/generate_library_loader.gni b/chromium/tools/generate_library_loader/generate_library_loader.gni
index c714c124c6d..1e1d5d77775 100644
--- a/chromium/tools/generate_library_loader/generate_library_loader.gni
+++ b/chromium/tools/generate_library_loader/generate_library_loader.gni
@@ -20,9 +20,6 @@ template("generate_library_loader") {
visibility = action_visibility
script = "//tools/generate_library_loader/generate_library_loader.py"
- if (defined(invoker.visibility)) {
- visibility = invoker.visibility
- }
outputs = [
output_h,
diff --git a/chromium/tools/git/OWNERS b/chromium/tools/git/OWNERS
new file mode 100644
index 00000000000..e0511af84bc
--- /dev/null
+++ b/chromium/tools/git/OWNERS
@@ -0,0 +1 @@
+per-file move_source_file.py=satorux@chromium.org
diff --git a/chromium/tools/git/README b/chromium/tools/git/README
new file mode 100644
index 00000000000..7f8e363dee0
--- /dev/null
+++ b/chromium/tools/git/README
@@ -0,0 +1,16 @@
+This directory contains some helpful Git tools.
+
+post-checkout and post-merge
+============================
+These hooks warn you about DEPS modifications so you will remember
+to run "gclient sync".
+
+To install these Git hooks, create symlinks like so:
+ ln -s $(pwd)/post-checkout $(git rev-parse --git-dir)/hooks
+ ln -s $(pwd)/post-merge $(git rev-parse --git-dir)/hooks
+
+
+git-graph
+=========
+Create a graph of the recent history of occurences of a grep
+expression in the project.
diff --git a/chromium/tools/git/for-all-touched-files.py b/chromium/tools/git/for-all-touched-files.py
new file mode 100755
index 00000000000..633fd078cda
--- /dev/null
+++ b/chromium/tools/git/for-all-touched-files.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+ Invokes the specified (quoted) command for all files modified
+ between the current git branch and the specified branch or commit.
+
+ The special token [[FILENAME]] (or whatever you choose using the -t
+ flag) is replaced with each of the filenames of new or modified files.
+
+ Deleted files are not included. Neither are untracked files.
+
+Synopsis:
+ %prog [-b BRANCH] [-d] [-x EXTENSIONS|-c|-g] [-t TOKEN] QUOTED_COMMAND
+
+Examples:
+ %prog -x gyp,gypi "tools/format_xml.py [[FILENAME]]"
+ %prog -c "tools/sort-headers.py [[FILENAME]]"
+ %prog -g "tools/sort_sources.py [[FILENAME]]"
+ %prog -t "~~BINGO~~" "echo I modified ~~BINGO~~"
+"""
+
+import optparse
+import os
+import subprocess
+import sys
+
+
+# List of C++-like source file extensions.
+_CPP_EXTENSIONS = ('h', 'hh', 'hpp', 'c', 'cc', 'cpp', 'cxx', 'mm',)
+# List of build file extensions.
+_BUILD_EXTENSIONS = ('gyp', 'gypi', 'gn',)
+
+
+def GitShell(args, ignore_return=False):
+ """A shell invocation suitable for communicating with git. Returns
+ output as list of lines, raises exception on error.
+ """
+ job = subprocess.Popen(args,
+ shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ (out, err) = job.communicate()
+ if job.returncode != 0 and not ignore_return:
+ print out
+ raise Exception("Error %d running command %s" % (
+ job.returncode, args))
+ return out.split('\n')
+
+
+def FilenamesFromGit(branch_name, extensions):
+ """Provides a list of all new and modified files listed by [git diff
+ branch_name] where branch_name can be blank to get a diff of the
+ workspace.
+
+ Excludes deleted files.
+
+ If extensions is not an empty list, include only files with one of
+ the extensions on the list.
+ """
+ lines = GitShell('git diff --stat=600,500 %s' % branch_name)
+ filenames = []
+ for line in lines:
+ line = line.lstrip()
+ # Avoid summary line, and files that have been deleted (no plus).
+ if line.find('|') != -1 and line.find('+') != -1:
+ filename = line.split()[0]
+ if filename:
+ filename = filename.rstrip()
+ ext = filename.rsplit('.')[-1]
+ if not extensions or ext in extensions:
+ filenames.append(filename)
+ return filenames
+
+
+def ForAllTouchedFiles(branch_name, extensions, token, command):
+ """For each new or modified file output by [git diff branch_name],
+ run command with token replaced with the filename. If extensions is
+ not empty, do this only for files with one of the extensions in that
+ list.
+ """
+ filenames = FilenamesFromGit(branch_name, extensions)
+ for filename in filenames:
+ os.system(command.replace(token, filename))
+
+
+def main():
+ parser = optparse.OptionParser(usage=__doc__)
+ parser.add_option('-x', '--extensions', default='', dest='extensions',
+ help='Limits to files with given extensions '
+ '(comma-separated).')
+ parser.add_option('-c', '--cpp', default=False, action='store_true',
+ dest='cpp_only',
+ help='Runs your command only on C++-like source files.')
+ # -g stands for GYP and GN.
+ parser.add_option('-g', '--build', default=False, action='store_true',
+ dest='build_only',
+ help='Runs your command only on build files.')
+ parser.add_option('-t', '--token', default='[[FILENAME]]', dest='token',
+ help='Sets the token to be replaced for each file '
+ 'in your command (default [[FILENAME]]).')
+ parser.add_option('-b', '--branch', default='origin/master', dest='branch',
+ help='Sets what to diff to (default origin/master). Set '
+ 'to empty to diff workspace against HEAD.')
+ opts, args = parser.parse_args()
+
+ if not args:
+ parser.print_help()
+ sys.exit(1)
+
+ if opts.cpp_only and opts.build_only:
+ parser.error("--cpp and --build are mutually exclusive")
+
+ extensions = opts.extensions
+ if opts.cpp_only:
+ extensions = _CPP_EXTENSIONS
+ if opts.build_only:
+ extensions = _BUILD_EXTENSIONS
+
+ ForAllTouchedFiles(opts.branch, extensions, opts.token, args[0])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/git/git-diff-ide.py b/chromium/tools/git/git-diff-ide.py
new file mode 100755
index 00000000000..405d270eba3
--- /dev/null
+++ b/chromium/tools/git/git-diff-ide.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+ Invokes git diff [args...] and inserts file:line in front of each line of diff
+ output where possible.
+
+ This is useful from an IDE that allows you to double-click lines that begin
+ with file:line to open and jump to that point in the file.
+
+Synopsis:
+ %prog [git diff args...]
+
+Examples:
+ %prog
+ %prog HEAD
+"""
+
+import subprocess
+import sys
+
+
+def GitShell(args, ignore_return=False):
+ """A shell invocation suitable for communicating with git. Returns
+ output as list of lines, raises exception on error.
+ """
+ job = subprocess.Popen(args,
+ shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ (out, err) = job.communicate()
+ if job.returncode != 0 and not ignore_return:
+ print out
+ raise Exception("Error %d running command %s" % (
+ job.returncode, args))
+ return out.split('\n')
+
+
+def PrintGitDiff(extra_args):
+ """Outputs git diff extra_args with file:line inserted into relevant lines."""
+ current_file = '';
+ line_num = 0;
+ lines = GitShell('git diff %s' % ' '.join(extra_args))
+ for line in lines:
+ # Pass-through lines:
+ # diff --git a/file.c b/file.c
+ # index 0e38c2d..8cd69ae 100644
+ # --- a/file.c
+ if (line.startswith('diff ') or
+ line.startswith('index ') or
+ line.startswith('--- ')):
+ print line
+ continue
+
+ # Get the filename from the +++ line:
+ # +++ b/file.c
+ if line.startswith('+++ '):
+ # Filename might be /dev/null or a/file or b/file.
+ # Skip the first two characters unless it starts with /.
+ current_file = line[4:] if line[4] == '/' else line[6:]
+ print line
+ continue
+
+ # Update line number from the @@ lines:
+ # @@ -41,9 +41,9 @@ def MyFunc():
+ # ^^
+ if line.startswith('@@ '):
+ _, old_nr, new_nr, _ = line.split(' ', 3)
+ line_num = int(new_nr.split(',')[0])
+ print line
+ continue
+ print current_file + ':' + repr(line_num) + ':' + line
+
+ # Increment line number for lines that start with ' ' or '+':
+ # @@ -41,4 +41,4 @@ def MyFunc():
+ # file.c:41: // existing code
+ # file.c:42: // existing code
+ # file.c:43:-// deleted code
+ # file.c:43:-// deleted code
+ # file.c:43:+// inserted code
+ # file.c:44:+// inserted code
+ if line.startswith(' ') or line.startswith('+'):
+ line_num += 1
+
+
+def main():
+ PrintGitDiff(sys.argv[1:])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/git/git-utils.sh b/chromium/tools/git/git-utils.sh
new file mode 100755
index 00000000000..608d27aa26b
--- /dev/null
+++ b/chromium/tools/git/git-utils.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TPUT=$(which tput 2>/dev/null)
+if test -x "$TPUT" && $TPUT setaf 1 >/dev/null ; then
+ RED="$($TPUT setaf 1)"
+ NORMAL="$($TPUT op)"
+else
+ RED=
+ NORMAL=
+fi
+
+warn() {
+ echo "${RED}WARNING:${NORMAL} $@"
+}
diff --git a/chromium/tools/git/graph.sh b/chromium/tools/git/graph.sh
new file mode 100755
index 00000000000..800a52b86c8
--- /dev/null
+++ b/chromium/tools/git/graph.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+about="Given a grep expression, creates a graph of occurrences of that
+expression in the recent history of the tree.
+
+Prerequisites: git and GNU R (apt-get install r-base).
+"
+
+set -e
+
+target="$1"
+
+if [ -z $target ]; then
+ echo "usage: $0 <grep-compatible expression>"
+ echo
+ echo "$about"
+ exit 1
+fi
+
+datafile=$(mktemp -t tmp.XXXXXXXXXX)
+trap "rm -f $datafile" EXIT
+
+echo 'ago count' > $datafile
+for ago in {90..0}; do
+ commit=$(git rev-list -1 --until="$ago days ago" origin/trunk)
+ git checkout -q -f $commit
+ count=$(git grep -E "$target" -- '*.cc' '*.h' '*.m' '*.mm' | wc -l)
+ echo "-$ago $count" >> $datafile
+ echo -n '.'
+done
+
+R CMD BATCH <(cat <<EOF
+data = read.delim("$datafile", sep=' ')
+png(width=600, height=300)
+plot(count ~ ago, type="l", main="$target", xlab='days ago', data=data)
+EOF
+) /dev/null
+
+echo done. # Primarily to add a newline after all the dots.
diff --git a/chromium/tools/git/mass-rename.py b/chromium/tools/git/mass-rename.py
new file mode 100755
index 00000000000..21fbef76b72
--- /dev/null
+++ b/chromium/tools/git/mass-rename.py
@@ -0,0 +1,50 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+mass-rename: update source files (gyp lists, #includes) to reflect
+a rename. Expects "git diff --cached -M" to list a bunch of renames.
+
+To use:
+ 1) git mv foo1 bar1; git mv foo2 bar2; etc.
+ 2) *without committing*, ./tools/git/mass-rename.py
+ 3) look at git diff (without --cached) to see what the damage is
+"""
+
+
+import os
+import subprocess
+import sys
+
+
+BASE_DIR = os.path.abspath(os.path.dirname(__file__))
+
+
+def main():
+ popen = subprocess.Popen('git diff --cached --raw -M',
+ shell=True, stdout=subprocess.PIPE)
+ out, _ = popen.communicate()
+ if popen.returncode != 0:
+ return 1
+ for line in out.splitlines():
+ parts = line.split('\t')
+ if len(parts) != 3:
+ print 'Skipping: %s -- not a rename?' % parts
+ continue
+ attrs, fro, to = parts
+ if attrs.split()[4].startswith('R'):
+ subprocess.check_call([
+ sys.executable,
+ os.path.join(BASE_DIR, 'move_source_file.py'),
+ '--already_moved',
+ '--no_error_for_non_source_file',
+ fro, to])
+ else:
+ print 'Skipping: %s -- not a rename?' % fro
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/git/mass-rename.sh b/chromium/tools/git/mass-rename.sh
new file mode 100755
index 00000000000..f92814e70bb
--- /dev/null
+++ b/chromium/tools/git/mass-rename.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# mass-rename: update source files (gyp lists, #includes) to reflect
+# a rename. Expects "git diff --cached -M" to list a bunch of renames.
+#
+# To use:
+# 1) git mv foo1 bar1; git mv foo2 bar2; etc.
+# 2) *without committing*, ./tools/git/mass-rename.sh
+# 3) look at git diff (without --cached) to see what the damage is
+# 4) commit, then use tools/sort-headers.py to fix #include ordering:
+# for f in $(git diff --name-only origin); do ./tools/sort-headers.py $f; done
+
+DIR="$( cd "$( dirname "$0" )" && pwd )"
+python $DIR/mass-rename.py "$*"
diff --git a/chromium/tools/git/mffr.py b/chromium/tools/git/mffr.py
new file mode 100755
index 00000000000..d5b67c8c3f1
--- /dev/null
+++ b/chromium/tools/git/mffr.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Usage: mffr.py [-d] [-g *.h] [-g *.cc] REGEXP REPLACEMENT
+
+This tool performs a fast find-and-replace operation on files in
+the current git repository.
+
+The -d flag selects a default set of globs (C++ and Objective-C/C++
+source files). The -g flag adds a single glob to the list and may
+be used multiple times. If neither -d nor -g is specified, the tool
+searches all files (*.*).
+
+REGEXP uses full Python regexp syntax. REPLACEMENT can use
+back-references.
+"""
+
+import optparse
+import re
+import subprocess
+import sys
+
+
+# We need to use shell=True with subprocess on Windows so that it
+# finds 'git' from the path, but can lead to undesired behavior on
+# Linux.
+_USE_SHELL = (sys.platform == 'win32')
+
+
+def MultiFileFindReplace(original, replacement, file_globs):
+ """Implements fast multi-file find and replace.
+
+ Given an |original| string and a |replacement| string, find matching
+ files by running git grep on |original| in files matching any
+ pattern in |file_globs|.
+
+ Once files are found, |re.sub| is run to replace |original| with
+ |replacement|. |replacement| may use capture group back-references.
+
+ Args:
+ original: '(#(include|import)\s*["<])chrome/browser/ui/browser.h([>"])'
+ replacement: '\1chrome/browser/ui/browser/browser.h\3'
+ file_globs: ['*.cc', '*.h', '*.m', '*.mm']
+
+ Returns the list of files modified.
+
+ Raises an exception on error.
+ """
+ # Posix extended regular expressions do not reliably support the "\s"
+ # shorthand.
+ posix_ere_original = re.sub(r"\\s", "[[:space:]]", original)
+ if sys.platform == 'win32':
+ posix_ere_original = posix_ere_original.replace('"', '""')
+ out, err = subprocess.Popen(
+ ['git', 'grep', '-E', '--name-only', posix_ere_original,
+ '--'] + file_globs,
+ stdout=subprocess.PIPE,
+ shell=_USE_SHELL).communicate()
+ referees = out.splitlines()
+
+ for referee in referees:
+ with open(referee) as f:
+ original_contents = f.read()
+ contents = re.sub(original, replacement, original_contents)
+ if contents == original_contents:
+ raise Exception('No change in file %s although matched in grep' %
+ referee)
+ with open(referee, 'wb') as f:
+ f.write(contents)
+
+ return referees
+
+
+def main():
+ parser = optparse.OptionParser(usage='''
+(1) %prog <options> REGEXP REPLACEMENT
+REGEXP uses full Python regexp syntax. REPLACEMENT can use back-references.
+
+(2) %prog <options> -i <file>
+<file> should contain a list (in Python syntax) of
+[REGEXP, REPLACEMENT, [GLOBS]] lists, e.g.:
+[
+ [r"(foo|bar)", r"\1baz", ["*.cc", "*.h"]],
+ ["54", "42"],
+]
+As shown above, [GLOBS] can be omitted for a given search-replace list, in which
+case the corresponding search-replace will use the globs specified on the
+command line.''')
+ parser.add_option('-d', action='store_true',
+ dest='use_default_glob',
+ help='Perform the change on C++ and Objective-C(++) source '
+ 'and header files.')
+ parser.add_option('-f', action='store_true',
+ dest='force_unsafe_run',
+ help='Perform the run even if there are uncommitted local '
+ 'changes.')
+ parser.add_option('-g', action='append',
+ type='string',
+ default=[],
+ metavar="<glob>",
+ dest='user_supplied_globs',
+ help='Perform the change on the specified glob. Can be '
+ 'specified multiple times, in which case the globs are '
+ 'unioned.')
+ parser.add_option('-i', "--input_file",
+ type='string',
+ action='store',
+ default='',
+ metavar="<file>",
+ dest='input_filename',
+ help='Read arguments from <file> rather than the command '
+ 'line. NOTE: To be sure of regular expressions being '
+ 'interpreted correctly, use raw strings.')
+ opts, args = parser.parse_args()
+ if opts.use_default_glob and opts.user_supplied_globs:
+ print '"-d" and "-g" cannot be used together'
+ parser.print_help()
+ return 1
+
+ from_file = opts.input_filename != ""
+ if (from_file and len(args) != 0) or (not from_file and len(args) != 2):
+ parser.print_help()
+ return 1
+
+ if not opts.force_unsafe_run:
+ out, err = subprocess.Popen(['git', 'status', '--porcelain'],
+ stdout=subprocess.PIPE,
+ shell=_USE_SHELL).communicate()
+ if out:
+ print 'ERROR: This tool does not print any confirmation prompts,'
+ print 'so you should only run it with a clean staging area and cache'
+ print 'so that reverting a bad find/replace is as easy as running'
+ print ' git checkout -- .'
+ print ''
+ print 'To override this safeguard, pass the -f flag.'
+ return 1
+
+ global_file_globs = ['*.*']
+ if opts.use_default_glob:
+ global_file_globs = ['*.cc', '*.h', '*.m', '*.mm']
+ elif opts.user_supplied_globs:
+ global_file_globs = opts.user_supplied_globs
+
+ # Construct list of search-replace tasks.
+ search_replace_tasks = []
+ if opts.input_filename == '':
+ original = args[0]
+ replacement = args[1]
+ search_replace_tasks.append([original, replacement, global_file_globs])
+ else:
+ f = open(opts.input_filename)
+ search_replace_tasks = eval("".join(f.readlines()))
+ for task in search_replace_tasks:
+ if len(task) == 2:
+ task.append(global_file_globs)
+ f.close()
+
+ for (original, replacement, file_globs) in search_replace_tasks:
+ print 'File globs: %s' % file_globs
+ print 'Original: %s' % original
+ print 'Replacement: %s' % replacement
+ MultiFileFindReplace(original, replacement, file_globs)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/git/move_source_file.bat b/chromium/tools/git/move_source_file.bat
new file mode 100755
index 00000000000..eb988350150
--- /dev/null
+++ b/chromium/tools/git/move_source_file.bat
@@ -0,0 +1,6 @@
+@echo off
+setlocal
+:: This is required with cygwin only.
+PATH=%~dp0;%PATH%
+set PYTHONDONTWRITEBYTECODE=1
+call python "%~dp0move_source_file.py" %*
diff --git a/chromium/tools/git/move_source_file.py b/chromium/tools/git/move_source_file.py
new file mode 100755
index 00000000000..0569ea3601b
--- /dev/null
+++ b/chromium/tools/git/move_source_file.py
@@ -0,0 +1,265 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Moves C++ files to a new location, updating any include paths that point
+to them, and re-ordering headers as needed. If multiple source files are
+specified, the destination must be a directory. Updates include guards in
+moved header files. Assumes Chromium coding style.
+
+Attempts to update and reorder paths used in .gyp(i) files.
+
+Updates full-path references to files in // comments in source files.
+
+Must run in a git checkout, as it relies on git grep for a fast way to
+find files that reference the moved file.
+"""
+
+
+import optparse
+import os
+import re
+import subprocess
+import sys
+
+import mffr
+
+if __name__ == '__main__':
+ # Need to add the directory containing sort-headers.py to the Python
+ # classpath.
+ sys.path.append(os.path.abspath(os.path.join(sys.path[0], '..')))
+sort_headers = __import__('sort-headers')
+import sort_sources
+
+
+HANDLED_EXTENSIONS = ['.cc', '.mm', '.h', '.hh', '.cpp']
+
+
+def IsHandledFile(path):
+ return os.path.splitext(path)[1] in HANDLED_EXTENSIONS
+
+
+def MakeDestinationPath(from_path, to_path):
+ """Given the from and to paths, return a correct destination path.
+
+ The initial destination path may either a full path or a directory.
+ Also does basic sanity checks.
+ """
+ if not IsHandledFile(from_path):
+ raise Exception('Only intended to move individual source files '
+ '(%s does not have a recognized extension).' %
+ from_path)
+
+ # Remove '.', '..', etc.
+ to_path = os.path.normpath(to_path)
+
+ if os.path.isdir(to_path):
+ to_path = os.path.join(to_path, os.path.basename(from_path))
+ else:
+ dest_extension = os.path.splitext(to_path)[1]
+ if dest_extension not in HANDLED_EXTENSIONS:
+ raise Exception('Destination must be either a full path with '
+ 'a recognized extension or a directory.')
+ return to_path
+
+
+def UpdateIncludePathForBlink(path):
+ """Updates |path| as it would be when used in an include statement in Blink.
+
+ As Blink has its 'public' and 'Source' folders in the include search path,
+ these prefixes of file paths are not included in include statements. For
+ example, if |path| is 'public/foo/bar.h', the matching include statement
+ is '#include "foo/bar.h"'.
+ """
+ for prefix in ('public/', 'Source/'):
+ if path.startswith(prefix):
+ return path[len(prefix):]
+
+ return path
+
+
+def MoveFile(from_path, to_path):
+ """Performs a git mv command to move a file from |from_path| to |to_path|.
+ """
+ if not os.system('git mv %s %s' % (from_path, to_path)) == 0:
+ raise Exception('Fatal: Failed to run git mv command.')
+
+
+def UpdatePostMove(from_path, to_path, in_blink):
+ """Given a file that has moved from |from_path| to |to_path|,
+ updates the moved file's include guard to match the new path and
+ updates all references to the file in other source files. Also tries
+ to update references in .gyp(i) files using a heuristic.
+ """
+ # Include paths always use forward slashes.
+ from_path = from_path.replace('\\', '/')
+ to_path = to_path.replace('\\', '/')
+
+ if os.path.splitext(from_path)[1] in ['.h', '.hh']:
+ UpdateIncludeGuard(from_path, to_path)
+
+ from_include_path = from_path
+ to_include_path = to_path
+ if in_blink:
+ from_include_path = UpdateIncludePathForBlink(from_include_path)
+ to_include_path = UpdateIncludePathForBlink(to_include_path)
+
+ # Update include/import references.
+ files_with_changed_includes = mffr.MultiFileFindReplace(
+ r'(#(include|import)\s*["<])%s([>"])' % re.escape(from_include_path),
+ r'\1%s\3' % to_include_path,
+ ['*.cc', '*.h', '*.m', '*.mm', '*.cpp'])
+
+ # Reorder headers in files that changed.
+ for changed_file in files_with_changed_includes:
+ def AlwaysConfirm(a, b): return True
+ sort_headers.FixFileWithConfirmFunction(changed_file, AlwaysConfirm, True,
+ in_blink)
+
+ # Update comments; only supports // comments, which are primarily
+ # used in our code.
+ #
+ # This work takes a bit of time. If this script starts feeling too
+ # slow, one good way to speed it up is to make the comment handling
+ # optional under a flag.
+ mffr.MultiFileFindReplace(
+ r'(//.*)%s' % re.escape(from_path),
+ r'\1%s' % to_path,
+ ['*.cc', '*.h', '*.m', '*.mm', '*.cpp'])
+
+ # Update references in GYP and BUILD.gn files.
+ #
+ # GYP files are mostly located under the first level directory (ex.
+ # chrome/chrome_browser.gypi), but sometimes they are located in
+ # directories at a deeper level (ex. extensions/shell/app_shell.gypi). On
+ # the other hand, BUILD.gn files can be placed in any directories.
+ #
+ # Paths in a GYP or BUILD.gn file are relative to the directory where the
+ # file is placed.
+ #
+ # For instance, "chrome/browser/chromeos/device_uma.h" is listed as
+ # "browser/chromeos/device_uma.h" in "chrome/chrome_browser_chromeos.gypi",
+ # but it's listed as "device_uma.h" in "chrome/browser/chromeos/BUILD.gn".
+ #
+ # To handle this, the code here will visit directories from the top level
+ # src directory to the directory of |from_path| and try to update GYP and
+ # BUILD.gn files in each directory.
+ #
+ # The code only handles files moved/renamed within the same build file. If
+ # files are moved beyond the same build file, the affected build files
+ # should be fixed manually.
+ def SplitByFirstComponent(path):
+ """'foo/bar/baz' -> ('foo', 'bar/baz')
+ 'bar' -> ('bar', '')
+ '' -> ('', '')
+ """
+ parts = re.split(r"[/\\]", path, 1)
+ if len(parts) == 2:
+ return (parts[0], parts[1])
+ else:
+ return (parts[0], '')
+
+ visiting_directory = ''
+ from_rest = from_path
+ to_rest = to_path
+ while True:
+ files_with_changed_sources = mffr.MultiFileFindReplace(
+ r'([\'"])%s([\'"])' % from_rest,
+ r'\1%s\2' % to_rest,
+ [os.path.join(visiting_directory, 'BUILD.gn'),
+ os.path.join(visiting_directory, '*.gyp*')])
+ for changed_file in files_with_changed_sources:
+ sort_sources.ProcessFile(changed_file, should_confirm=False)
+ from_first, from_rest = SplitByFirstComponent(from_rest)
+ to_first, to_rest = SplitByFirstComponent(to_rest)
+ visiting_directory = os.path.join(visiting_directory, from_first)
+ if not from_rest or not to_rest or from_rest == to_rest:
+ break
+
+
+def MakeIncludeGuardName(path_from_root):
+ """Returns an include guard name given a path from root."""
+ guard = path_from_root.replace('/', '_')
+ guard = guard.replace('\\', '_')
+ guard = guard.replace('.', '_')
+ guard += '_'
+ return guard.upper()
+
+
+def UpdateIncludeGuard(old_path, new_path):
+ """Updates the include guard in a file now residing at |new_path|,
+ previously residing at |old_path|, with an up-to-date include guard.
+
+ Prints a warning if the update could not be completed successfully (e.g.,
+ because the old include guard was not formatted correctly per Chromium style).
+ """
+ old_guard = MakeIncludeGuardName(old_path)
+ new_guard = MakeIncludeGuardName(new_path)
+
+ with open(new_path) as f:
+ contents = f.read()
+
+ new_contents = contents.replace(old_guard, new_guard)
+ # The file should now have three instances of the new guard: two at the top
+ # of the file plus one at the bottom for the comment on the #endif.
+ if new_contents.count(new_guard) != 3:
+ print ('WARNING: Could not successfully update include guard; perhaps '
+ 'old guard is not per style guide? You will have to update the '
+ 'include guard manually. (%s)' % new_path)
+
+ with open(new_path, 'w') as f:
+ f.write(new_contents)
+
+def main():
+ # We use "git rev-parse" to check if the script is run from a git checkout. It
+ # returns 0 even when run in the .git directory. We don't want people running
+ # this in the .git directory.
+ if (os.system('git rev-parse') != 0 or
+ os.path.basename(os.getcwd()) == '.git'):
+ print 'Fatal: You must run in a git checkout.'
+ return 1
+
+ cwd = os.getcwd()
+ parent = os.path.dirname(cwd)
+ in_blink = (os.path.basename(parent) == 'third_party' and
+ os.path.basename(cwd) == 'WebKit')
+
+ parser = optparse.OptionParser(usage='%prog FROM_PATH... TO_PATH')
+ parser.add_option('--already_moved', action='store_true',
+ dest='already_moved',
+ help='Causes the script to skip moving the file.')
+ parser.add_option('--no_error_for_non_source_file', action='store_false',
+ default='True',
+ dest='error_for_non_source_file',
+ help='Causes the script to simply print a warning on '
+ 'encountering a non-source file rather than raising an '
+ 'error.')
+ opts, args = parser.parse_args()
+
+ if len(args) < 2:
+ parser.print_help()
+ return 1
+
+ from_paths = args[:len(args)-1]
+ orig_to_path = args[-1]
+
+ if len(from_paths) > 1 and not os.path.isdir(orig_to_path):
+ print 'Target %s is not a directory.' % orig_to_path
+ print
+ parser.print_help()
+ return 1
+
+ for from_path in from_paths:
+ if not opts.error_for_non_source_file and not IsHandledFile(from_path):
+ print '%s does not appear to be a source file, skipping' % (from_path)
+ continue
+ to_path = MakeDestinationPath(from_path, orig_to_path)
+ if not opts.already_moved:
+ MoveFile(from_path, to_path)
+ UpdatePostMove(from_path, to_path, in_blink)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/git/post-checkout b/chromium/tools/git/post-checkout
new file mode 100755
index 00000000000..452eb48eb45
--- /dev/null
+++ b/chromium/tools/git/post-checkout
@@ -0,0 +1,22 @@
+#!/bin/bash
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+script=$(readlink $0)
+source $(dirname ${script:-$0})/git-utils.sh
+
+old_ref=$1 # Previous HEAD.
+new_ref=$2 # Current HEAD.
+branch_switch=$3 # Whether we switched branches.
+
+if [ $old_ref == $new_ref ]; then
+ if ! git diff-index --quiet HEAD $(git rev-parse --show-cdup)DEPS; then
+ warn "DEPS has local modifications; do you need to re-run gclient sync?"
+ fi
+else
+ if git diff-tree $old_ref $new_ref | grep -qs $'\tDEPS$'; then
+ warn "DEPS has changed; you probably need to re-run gclient sync."
+ fi
+fi
+
diff --git a/chromium/tools/git/post-merge b/chromium/tools/git/post-merge
new file mode 100755
index 00000000000..8b774ce6593
--- /dev/null
+++ b/chromium/tools/git/post-merge
@@ -0,0 +1,12 @@
+#!/bin/bash
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+script=$(readlink $0)
+source $(dirname ${script:-$0})/git-utils.sh
+
+if git diff-tree ORIG_HEAD HEAD | grep -qs $'\tDEPS$'; then
+ warn "DEPS has changed; you probably need to re-run gclient sync."
+fi
+
diff --git a/chromium/tools/git/update-copyrights.sh b/chromium/tools/git/update-copyrights.sh
new file mode 100755
index 00000000000..ac69bd53e64
--- /dev/null
+++ b/chromium/tools/git/update-copyrights.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+echo 'Updating copyrights is no longer necessary.'
+echo 'See https://groups.google.com/a/chromium.org/d/msg/chromium-dev/8p4JKV76kig/OiFYFjuZ6nAJ'
diff --git a/chromium/tools/gn/BUILD.gn b/chromium/tools/gn/BUILD.gn
new file mode 100644
index 00000000000..2728c0d0906
--- /dev/null
+++ b/chromium/tools/gn/BUILD.gn
@@ -0,0 +1,331 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//testing/test.gni")
+
+defines = [ "GN_BUILD" ]
+
+static_library("gn_lib") {
+ configs += [ "//build/config:precompiled_headers" ]
+
+ sources = [
+ "action_target_generator.cc",
+ "action_target_generator.h",
+ "action_values.cc",
+ "action_values.h",
+ "args.cc",
+ "args.h",
+ "binary_target_generator.cc",
+ "binary_target_generator.h",
+ "build_settings.cc",
+ "build_settings.h",
+ "builder.cc",
+ "builder.h",
+ "builder_record.cc",
+ "builder_record.h",
+ "bundle_data.cc",
+ "bundle_data.h",
+ "bundle_data_target_generator.cc",
+ "bundle_data_target_generator.h",
+ "bundle_file_rule.cc",
+ "bundle_file_rule.h",
+ "c_include_iterator.cc",
+ "c_include_iterator.h",
+ "command_args.cc",
+ "command_check.cc",
+ "command_clean.cc",
+ "command_desc.cc",
+ "command_format.cc",
+ "command_gen.cc",
+ "command_help.cc",
+ "command_ls.cc",
+ "command_path.cc",
+ "command_refs.cc",
+ "commands.cc",
+ "commands.h",
+ "config.cc",
+ "config.h",
+ "config_values.cc",
+ "config_values.h",
+ "config_values_extractors.cc",
+ "config_values_extractors.h",
+ "config_values_generator.cc",
+ "config_values_generator.h",
+ "copy_target_generator.cc",
+ "copy_target_generator.h",
+ "create_bundle_target_generator.cc",
+ "create_bundle_target_generator.h",
+ "deps_iterator.cc",
+ "deps_iterator.h",
+ "eclipse_writer.cc",
+ "eclipse_writer.h",
+ "err.cc",
+ "err.h",
+ "escape.cc",
+ "escape.h",
+ "exec_process.cc",
+ "exec_process.h",
+ "filesystem_utils.cc",
+ "filesystem_utils.h",
+ "function_exec_script.cc",
+ "function_foreach.cc",
+ "function_forward_variables_from.cc",
+ "function_get_label_info.cc",
+ "function_get_path_info.cc",
+ "function_get_target_outputs.cc",
+ "function_process_file_template.cc",
+ "function_read_file.cc",
+ "function_rebase_path.cc",
+ "function_set_default_toolchain.cc",
+ "function_set_defaults.cc",
+ "function_template.cc",
+ "function_toolchain.cc",
+ "function_write_file.cc",
+ "functions.cc",
+ "functions.h",
+ "functions_target.cc",
+ "group_target_generator.cc",
+ "group_target_generator.h",
+ "header_checker.cc",
+ "header_checker.h",
+ "import_manager.cc",
+ "import_manager.h",
+ "inherited_libraries.cc",
+ "inherited_libraries.h",
+ "input_conversion.cc",
+ "input_conversion.h",
+ "input_file.cc",
+ "input_file.h",
+ "input_file_manager.cc",
+ "input_file_manager.h",
+ "item.cc",
+ "item.h",
+ "label.cc",
+ "label.h",
+ "label_pattern.cc",
+ "label_pattern.h",
+ "label_ptr.h",
+ "lib_file.cc",
+ "lib_file.h",
+ "loader.cc",
+ "loader.h",
+ "location.cc",
+ "location.h",
+ "ninja_action_target_writer.cc",
+ "ninja_action_target_writer.h",
+ "ninja_binary_target_writer.cc",
+ "ninja_binary_target_writer.h",
+ "ninja_build_writer.cc",
+ "ninja_build_writer.h",
+ "ninja_bundle_data_target_writer.cc",
+ "ninja_bundle_data_target_writer.h",
+ "ninja_copy_target_writer.cc",
+ "ninja_copy_target_writer.h",
+ "ninja_create_bundle_target_writer.cc",
+ "ninja_create_bundle_target_writer.h",
+ "ninja_group_target_writer.cc",
+ "ninja_group_target_writer.h",
+ "ninja_target_writer.cc",
+ "ninja_target_writer.h",
+ "ninja_toolchain_writer.cc",
+ "ninja_toolchain_writer.h",
+ "ninja_utils.cc",
+ "ninja_utils.h",
+ "ninja_writer.cc",
+ "ninja_writer.h",
+ "operators.cc",
+ "operators.h",
+ "output_file.cc",
+ "output_file.h",
+ "parse_tree.cc",
+ "parse_tree.h",
+ "parser.cc",
+ "parser.h",
+ "path_output.cc",
+ "path_output.h",
+ "pattern.cc",
+ "pattern.h",
+ "runtime_deps.cc",
+ "runtime_deps.h",
+ "scheduler.cc",
+ "scheduler.h",
+ "scope.cc",
+ "scope.h",
+ "scope_per_file_provider.cc",
+ "scope_per_file_provider.h",
+ "settings.cc",
+ "settings.h",
+ "setup.cc",
+ "setup.h",
+ "source_dir.cc",
+ "source_dir.h",
+ "source_file.cc",
+ "source_file.h",
+ "source_file_type.cc",
+ "source_file_type.h",
+ "standard_out.cc",
+ "standard_out.h",
+ "string_utils.cc",
+ "string_utils.h",
+ "substitution_list.cc",
+ "substitution_list.h",
+ "substitution_pattern.cc",
+ "substitution_pattern.h",
+ "substitution_type.cc",
+ "substitution_type.h",
+ "substitution_writer.cc",
+ "substitution_writer.h",
+ "switches.cc",
+ "switches.h",
+ "target.cc",
+ "target.h",
+ "target_generator.cc",
+ "target_generator.h",
+ "template.cc",
+ "template.h",
+ "token.cc",
+ "token.h",
+ "tokenizer.cc",
+ "tokenizer.h",
+ "tool.cc",
+ "tool.h",
+ "toolchain.cc",
+ "toolchain.h",
+ "trace.cc",
+ "trace.h",
+ "unique_vector.h",
+ "value.cc",
+ "value.h",
+ "value_extractors.cc",
+ "value_extractors.h",
+ "variables.cc",
+ "variables.h",
+ "visibility.cc",
+ "visibility.h",
+ "visual_studio_utils.cc",
+ "visual_studio_utils.h",
+ "visual_studio_writer.cc",
+ "visual_studio_writer.h",
+ "xml_element_writer.cc",
+ "xml_element_writer.h",
+ ]
+
+ deps = [
+ "//base",
+ "//base/third_party/dynamic_annotations",
+ ]
+}
+
+action("last_commit_position") {
+ script = "last_commit_position.py"
+
+ # This dependency forces a re-run when the code is synced.
+ inputs = [
+ "//build/util/LASTCHANGE",
+ ]
+
+ outfile = "$target_gen_dir/last_commit_position.h"
+ outputs = [
+ outfile,
+ ]
+
+ args = [
+ rebase_path("//", root_build_dir),
+ rebase_path(outfile, root_build_dir),
+ "TOOLS_GN_LAST_COMMIT_POSITION_H_",
+ ]
+}
+
+# Note for Windows debugging: GN is super-multithreaded and uses a lot of STL.
+# Iterator debugging on Windows does locking for every access, which ends up
+# slowing down debug runtime from 0:36 to 9:40. If you want to run debug builds
+# of GN over the large Chrome build, you will want to set the arg:
+# enable_iterator_debugging = false
+executable("gn") {
+ sources = [
+ "gn_main.cc",
+ ]
+
+ deps = [
+ ":gn_lib",
+ ":last_commit_position",
+ "//base",
+ "//build/config/sanitizers:deps",
+ ]
+}
+
+test("gn_unittests") {
+ sources = [
+ "action_target_generator_unittest.cc",
+ "args_unittest.cc",
+ "builder_unittest.cc",
+ "c_include_iterator_unittest.cc",
+ "command_format_unittest.cc",
+ "config_unittest.cc",
+ "config_values_extractors_unittest.cc",
+ "escape_unittest.cc",
+ "exec_process_unittest.cc",
+ "filesystem_utils_unittest.cc",
+ "function_foreach_unittest.cc",
+ "function_forward_variables_from_unittest.cc",
+ "function_get_label_info_unittest.cc",
+ "function_get_path_info_unittest.cc",
+ "function_get_target_outputs_unittest.cc",
+ "function_process_file_template_unittest.cc",
+ "function_rebase_path_unittest.cc",
+ "function_write_file_unittest.cc",
+ "functions_target_unittest.cc",
+ "functions_unittest.cc",
+ "group_target_generator_unittest.cc",
+ "header_checker_unittest.cc",
+ "inherited_libraries_unittest.cc",
+ "input_conversion_unittest.cc",
+ "label_pattern_unittest.cc",
+ "label_unittest.cc",
+ "loader_unittest.cc",
+ "ninja_action_target_writer_unittest.cc",
+ "ninja_binary_target_writer_unittest.cc",
+ "ninja_build_writer_unittest.cc",
+ "ninja_copy_target_writer_unittest.cc",
+ "ninja_create_bundle_target_writer_unittest.cc",
+ "ninja_group_target_writer_unittest.cc",
+ "ninja_target_writer_unittest.cc",
+ "ninja_toolchain_writer_unittest.cc",
+ "operators_unittest.cc",
+ "parse_tree_unittest.cc",
+ "parser_unittest.cc",
+ "path_output_unittest.cc",
+ "pattern_unittest.cc",
+ "runtime_deps_unittest.cc",
+ "scope_per_file_provider_unittest.cc",
+ "scope_unittest.cc",
+ "source_dir_unittest.cc",
+ "source_file_unittest.cc",
+ "string_utils_unittest.cc",
+ "substitution_pattern_unittest.cc",
+ "substitution_writer_unittest.cc",
+ "target_unittest.cc",
+ "template_unittest.cc",
+ "test_with_scope.cc",
+ "test_with_scope.h",
+ "tokenizer_unittest.cc",
+ "unique_vector_unittest.cc",
+ "value_unittest.cc",
+ "visibility_unittest.cc",
+ "visual_studio_utils_unittest.cc",
+ "visual_studio_writer_unittest.cc",
+ "xml_element_writer_unittest.cc",
+ ]
+
+ data = [
+ "format_test_data/",
+ ]
+
+ deps = [
+ ":gn_lib",
+ "//base/test:run_all_unittests",
+ "//base/test:test_support",
+ "//testing/gtest",
+ ]
+}
diff --git a/chromium/tools/gn/OWNERS b/chromium/tools/gn/OWNERS
new file mode 100644
index 00000000000..06fefbf4ecc
--- /dev/null
+++ b/chromium/tools/gn/OWNERS
@@ -0,0 +1 @@
+brettw@chromium.org
diff --git a/chromium/tools/gn/README.md b/chromium/tools/gn/README.md
new file mode 100644
index 00000000000..9d92d9c21eb
--- /dev/null
+++ b/chromium/tools/gn/README.md
@@ -0,0 +1,86 @@
+# What is GN?
+
+GN is a meta-build system that generates
+[NinjaBuild](https://chromium.googlesource.com/chromium/src/+/master/docs/ninja_build.md)
+files. It's meant to be faster and simpler than GYP. It outputs only Ninja build
+files.
+
+## Why bother with GN?
+
+1. We believe GN files are more readable and more maintainable
+ than GYP files.
+2. GN is fast:
+ * GN is 20x faster than GYP.
+ * GN supports automatically re-running itself as needed by Ninja
+ as part of the build. This eliminates the need to remember to
+ re-run GN when you change a build file.
+3. GN gives us better tools for enforcing dependencies (see
+ `gn check` and the `visibility`, `public_deps`, and `data_deps`
+ options for some examples).
+4. GN gives us tools for querying the build graph; you can ask
+ "what does X depend on" and "who depends on Y", for example.
+
+## What's the status?
+
+_as of March 2016:_
+
+ * Linux and Android: Complete but not quite all bots have been flipped.
+ Developers should be using GN on these platforms for most work.
+ * Windows and ChromeOS: Should be usable for almost all development. Some
+ bots need to be flipped. cros_sdk builds don't work on ChromeOS yet.
+ * Mac and iOS: Work is in progress, generally not usable for normal
+ development.
+
+## When are you going to be done?
+
+_as of March 2016:_
+
+We expect most work to be finishing up between now and the end of Q2.
+
+## What does "done" mean?
+
+Ideally we're done when all of the GYP files have been deleted from
+Chromium and no one misses them.
+
+We will be "mostly" done when the following are true:
+
+ * All of the bots maintained by the Chrome infra team for Chromium and
+ downstream of Chromium have been switched to GN. (Upstream projects
+ like Skia and V8 can choose to stay on GYP if they like).
+ * Any developer configurations we care about that don't have bots also
+ work (Generally speaking, we're aiming to not have any of these.
+ * Configurations we care about should have bots to ensure they don't
+ break). We have verified that all of the tests pass. We have
+ verified that the command lines match in the above configurations as
+ much as possible, and we accept any differences. We have reviewed
+ any binary differences that result in the official builds and
+ accepted them. The GN files are the "source of truth" for the
+ build, and normal chromium developers normally do not need to touch
+ GYP files to keep things working. We have replacements for the
+ hybrid "msvs-ninja" and "xcode-ninja" configurations that GYP can
+ currently build.
+
+The difference between "mostly done" and "done" exists to cover any
+issues we haven't yet identified :)
+
+## How can I help?
+
+We could especially use infrastructure/bot, Mac, and iOS help.
+
+Or, look at [the list of open bugs](https://code.google.com/p/chromium/issues/list?can=2&q=label:Proj-GN-Migration%20-type:Project&sort=pri&colspec=ID%20Pri%20Summary%20Type%20OS%20Owner%20Status%20Modified%20Blocking) related to the migration and see if there's anything that catches your fancy.
+
+## I want more info on GN!
+
+Read these links:
+
+ * [Quick start](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/quick_start.md)
+ * [FAQ](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/faq.md)
+ * [GYP conversion cookbook](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/cookbook.md)
+ * [Language and operation details](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/language.md)
+ * [Reference](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/reference.md) The built-in `gn help` documentation.
+ * [Style guide](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/style_guide.md)
+ * [Cross compiling and toolchains](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/cross_compiles.md)
+ * [Hacking on GN itself](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/hacking.md)
+ * [GNStandalone](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/standalone.md) Standalone GN projects
+ * [UpdateGNBinaries](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/update_binaries.md) Pushing new binaries
+ * [Check](https://chromium.googlesource.com/chromium/src/+/master/tools/gn/docs/check.md) `gn check` command reference
diff --git a/chromium/tools/gn/action_target_generator.cc b/chromium/tools/gn/action_target_generator.cc
new file mode 100644
index 00000000000..6f962b6dd2a
--- /dev/null
+++ b/chromium/tools/gn/action_target_generator.cc
@@ -0,0 +1,185 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/action_target_generator.h"
+
+#include "tools/gn/build_settings.h"
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/value.h"
+#include "tools/gn/value_extractors.h"
+#include "tools/gn/variables.h"
+
+ActionTargetGenerator::ActionTargetGenerator(
+ Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Target::OutputType type,
+ Err* err)
+ : TargetGenerator(target, scope, function_call, err),
+ output_type_(type) {
+}
+
+ActionTargetGenerator::~ActionTargetGenerator() {
+}
+
+void ActionTargetGenerator::DoRun() {
+ target_->set_output_type(output_type_);
+
+ if (!FillSources())
+ return;
+ if (output_type_ == Target::ACTION_FOREACH && target_->sources().empty()) {
+ // Foreach rules must always have some sources to have an effect.
+ *err_ = Err(function_call_, "action_foreach target has no sources.",
+ "If you don't specify any sources, there is nothing to run your\n"
+ "script over.");
+ return;
+ }
+
+ if (!FillInputs())
+ return;
+
+ if (!FillScript())
+ return;
+
+ if (!FillScriptArgs())
+ return;
+
+ if (!FillResponseFileContents())
+ return;
+
+ if (!FillOutputs(output_type_ == Target::ACTION_FOREACH))
+ return;
+
+ if (!FillDepfile())
+ return;
+
+ if (!FillConsole())
+ return;
+
+ if (!FillCheckIncludes())
+ return;
+
+ if (!CheckOutputs())
+ return;
+
+ // Action outputs don't depend on the current toolchain so we can skip adding
+ // that dependency.
+
+ // response_file_contents and {{response_file_name}} in the args must go
+ // together.
+ const auto& required_args_substitutions =
+ target_->action_values().args().required_types();
+ bool has_rsp_file_name = std::find(required_args_substitutions.begin(),
+ required_args_substitutions.end(),
+ SUBSTITUTION_RSP_FILE_NAME) !=
+ required_args_substitutions.end();
+ if (target_->action_values().uses_rsp_file() && !has_rsp_file_name) {
+ *err_ = Err(function_call_, "Missing {{response_file_name}} in args.",
+ "This target defines response_file_contents but doesn't use\n"
+ "{{response_file_name}} in the args, which means the response file\n"
+ "will be unused.");
+ return;
+ }
+ if (!target_->action_values().uses_rsp_file() && has_rsp_file_name) {
+ *err_ = Err(function_call_, "Missing response_file_contents definition.",
+ "This target uses {{response_file_name}} in the args, but does not\n"
+ "define response_file_contents which means the response file\n"
+ "will be empty.");
+ return;
+ }
+}
+
+bool ActionTargetGenerator::FillScript() {
+ // If this gets called, the target type requires a script, so error out
+ // if it doesn't have one.
+ const Value* value = scope_->GetValue(variables::kScript, true);
+ if (!value) {
+ *err_ = Err(function_call_, "This target type requires a \"script\".");
+ return false;
+ }
+ if (!value->VerifyTypeIs(Value::STRING, err_))
+ return false;
+
+ SourceFile script_file =
+ scope_->GetSourceDir().ResolveRelativeFile(
+ *value, err_,
+ scope_->settings()->build_settings()->root_path_utf8());
+ if (err_->has_error())
+ return false;
+ target_->action_values().set_script(script_file);
+ return true;
+}
+
+bool ActionTargetGenerator::FillScriptArgs() {
+ const Value* value = scope_->GetValue(variables::kArgs, true);
+ if (!value)
+ return true;
+ return target_->action_values().args().Parse(*value, err_);
+}
+
+bool ActionTargetGenerator::FillResponseFileContents() {
+ const Value* value = scope_->GetValue(variables::kResponseFileContents, true);
+ if (!value)
+ return true;
+ return target_->action_values().rsp_file_contents().Parse(*value, err_);
+}
+
+bool ActionTargetGenerator::FillDepfile() {
+ const Value* value = scope_->GetValue(variables::kDepfile, true);
+ if (!value)
+ return true;
+
+ SubstitutionPattern depfile;
+ if (!depfile.Parse(*value, err_))
+ return false;
+ if (!EnsureSubstitutionIsInOutputDir(depfile, *value))
+ return false;
+
+ target_->action_values().set_depfile(depfile);
+ return true;
+}
+
+bool ActionTargetGenerator::FillConsole() {
+ const Value* value = scope_->GetValue(variables::kConsole, true);
+ if (!value)
+ return true;
+ if (!value->VerifyTypeIs(Value::BOOLEAN, err_))
+ return false;
+ target_->action_values().set_console(value->boolean_value());
+ return true;
+}
+
+bool ActionTargetGenerator::CheckOutputs() {
+ const SubstitutionList& outputs = target_->action_values().outputs();
+ if (outputs.list().empty()) {
+ *err_ = Err(function_call_, "Action has no outputs.",
+ "If you have no outputs, the build system can not tell when your\n"
+ "script needs to be run.");
+ return false;
+ }
+
+ if (output_type_ == Target::ACTION) {
+ if (!outputs.required_types().empty()) {
+ *err_ = Err(function_call_, "Action has patterns in the output.",
+ "An action target should have the outputs completely specified. If\n"
+ "you want to provide a mapping from source to output, use an\n"
+ "\"action_foreach\" target.");
+ return false;
+ }
+ } else if (output_type_ == Target::ACTION_FOREACH) {
+ // A foreach target should always have a pattern in the outputs.
+ if (outputs.required_types().empty()) {
+ *err_ = Err(function_call_,
+ "action_foreach should have a pattern in the output.",
+ "An action_foreach target should have a source expansion pattern in\n"
+ "it to map source file to unique output file name. Otherwise, the\n"
+ "build system can't determine when your script needs to be run.");
+ return false;
+ }
+ }
+ return true;
+}
diff --git a/chromium/tools/gn/action_target_generator.h b/chromium/tools/gn/action_target_generator.h
new file mode 100644
index 00000000000..0a69edab4e2
--- /dev/null
+++ b/chromium/tools/gn/action_target_generator.h
@@ -0,0 +1,40 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_ACTION_TARGET_GENERATOR_H_
+#define TOOLS_GN_ACTION_TARGET_GENERATOR_H_
+
+#include "base/macros.h"
+#include "tools/gn/target.h"
+#include "tools/gn/target_generator.h"
+
+// Populates a Target with the values from an action[_foreach] rule.
+class ActionTargetGenerator : public TargetGenerator {
+ public:
+ ActionTargetGenerator(Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Target::OutputType type,
+ Err* err);
+ ~ActionTargetGenerator() override;
+
+ protected:
+ void DoRun() override;
+
+ private:
+ bool FillScript();
+ bool FillScriptArgs();
+ bool FillResponseFileContents();
+ bool FillDepfile();
+ bool FillConsole();
+
+ // Checks for errors in the outputs variable.
+ bool CheckOutputs();
+
+ Target::OutputType output_type_;
+
+ DISALLOW_COPY_AND_ASSIGN(ActionTargetGenerator);
+};
+
+#endif // TOOLS_GN_ACTION_TARGET_GENERATOR_H_
diff --git a/chromium/tools/gn/action_target_generator_unittest.cc b/chromium/tools/gn/action_target_generator_unittest.cc
new file mode 100644
index 00000000000..8f3e4d4c332
--- /dev/null
+++ b/chromium/tools/gn/action_target_generator_unittest.cc
@@ -0,0 +1,42 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/test_with_scope.h"
+
+// Tests that actions can't have output substitutions.
+TEST(ActionTargetGenerator, ActionOutputSubstitutions) {
+ Scheduler scheduler;
+ TestWithScope setup;
+ Scope::ItemVector items_;
+ setup.scope()->set_item_collector(&items_);
+
+ // First test one with no substitutions, this should be valid.
+ TestParseInput input_good(
+ "action(\"foo\") {\n"
+ " script = \"//foo.py\"\n"
+ " sources = [ \"//bar.txt\" ]\n"
+ " outputs = [ \"//out/Debug/one.txt\" ]\n"
+ "}");
+ ASSERT_FALSE(input_good.has_error());
+
+ // This should run fine.
+ Err err;
+ input_good.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(err.has_error()) << err.message();
+
+ // Same thing with a pattern in the output should fail.
+ TestParseInput input_bad(
+ "action(\"foo\") {\n"
+ " script = \"//foo.py\"\n"
+ " sources = [ \"//bar.txt\" ]\n"
+ " outputs = [ \"//out/Debug/{{source_name_part}}.txt\" ]\n"
+ "}");
+ ASSERT_FALSE(input_bad.has_error());
+
+ // This should run fine.
+ input_bad.parsed()->Execute(setup.scope(), &err);
+ ASSERT_TRUE(err.has_error());
+}
diff --git a/chromium/tools/gn/action_values.cc b/chromium/tools/gn/action_values.cc
new file mode 100644
index 00000000000..a7ce83da056
--- /dev/null
+++ b/chromium/tools/gn/action_values.cc
@@ -0,0 +1,31 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/action_values.h"
+
+#include "tools/gn/settings.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+
+ActionValues::ActionValues() : console_(false) {}
+
+ActionValues::~ActionValues() {}
+
+void ActionValues::GetOutputsAsSourceFiles(
+ const Target* target,
+ std::vector<SourceFile>* result) const {
+ if (target->output_type() == Target::BUNDLE_DATA) {
+ // The bundle_data target has no output, the real output will be generated
+ // by the create_bundle target.
+ } else if (target->output_type() == Target::COPY_FILES ||
+ target->output_type() == Target::ACTION_FOREACH) {
+ // Copy and foreach applies the outputs to the sources.
+ SubstitutionWriter::ApplyListToSources(
+ target->settings(), outputs_, target->sources(), result);
+ } else {
+ // Actions (and anything else that happens to specify an output) just use
+ // the output list with no substitution.
+ SubstitutionWriter::GetListAsSourceFiles(outputs_, result);
+ }
+}
diff --git a/chromium/tools/gn/action_values.h b/chromium/tools/gn/action_values.h
new file mode 100644
index 00000000000..879ccf6ef69
--- /dev/null
+++ b/chromium/tools/gn/action_values.h
@@ -0,0 +1,68 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_ACTION_VALUES_H_
+#define TOOLS_GN_ACTION_VALUES_H_
+
+#include <string>
+#include <vector>
+
+#include "base/macros.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/substitution_list.h"
+
+class Target;
+
+// Holds the values (outputs, args, script name, etc.) for either an action or
+// an action_foreach target.
+class ActionValues {
+ public:
+ ActionValues();
+ ~ActionValues();
+
+ // Filename of the script to execute.
+ const SourceFile& script() const { return script_; }
+ void set_script(const SourceFile& s) { script_ = s; }
+
+ // Arguments to the script.
+ SubstitutionList& args() { return args_; }
+ const SubstitutionList& args() const { return args_; }
+
+ // Files created by the script. These are strings rather than SourceFiles
+ // since they will often contain {{source expansions}}.
+ SubstitutionList& outputs() { return outputs_; }
+ const SubstitutionList& outputs() const { return outputs_; }
+
+ // Expands the outputs() above to the final SourceFile list.
+ void GetOutputsAsSourceFiles(const Target* target,
+ std::vector<SourceFile>* result) const;
+
+ // Depfile generated by the script.
+ const SubstitutionPattern& depfile() const { return depfile_; }
+ bool has_depfile() const { return !depfile_.ranges().empty(); }
+ void set_depfile(const SubstitutionPattern& depfile) { depfile_ = depfile; }
+
+ // Response file contents. Empty means no response file.
+ SubstitutionList& rsp_file_contents() { return rsp_file_contents_; }
+ const SubstitutionList& rsp_file_contents() const {
+ return rsp_file_contents_;
+ }
+ bool uses_rsp_file() const { return !rsp_file_contents_.list().empty(); }
+
+ // Console pool option
+ bool is_console() const { return console_; }
+ void set_console(bool value) { console_ = value; }
+
+ private:
+ SourceFile script_;
+ SubstitutionList args_;
+ SubstitutionList outputs_;
+ SubstitutionPattern depfile_;
+ SubstitutionList rsp_file_contents_;
+ bool console_;
+
+ DISALLOW_COPY_AND_ASSIGN(ActionValues);
+};
+
+#endif // TOOLS_GN_ACTION_VALUES_H_
diff --git a/chromium/tools/gn/args.cc b/chromium/tools/gn/args.cc
new file mode 100644
index 00000000000..60ee7b2ce33
--- /dev/null
+++ b/chromium/tools/gn/args.cc
@@ -0,0 +1,313 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/args.h"
+
+#include "base/sys_info.h"
+#include "build/build_config.h"
+#include "tools/gn/variables.h"
+
+const char kBuildArgs_Help[] =
+ "Build Arguments Overview\n"
+ "\n"
+ " Build arguments are variables passed in from outside of the build\n"
+ " that build files can query to determine how the build works.\n"
+ "\n"
+ "How build arguments are set\n"
+ "\n"
+ " First, system default arguments are set based on the current system.\n"
+ " The built-in arguments are:\n"
+ " - host_cpu\n"
+ " - host_os\n"
+ " - current_cpu\n"
+ " - current_os\n"
+ " - target_cpu\n"
+ " - target_os\n"
+ "\n"
+ " If specified, arguments from the --args command line flag are used. If\n"
+ " that flag is not specified, args from previous builds in the build\n"
+ " directory will be used (this is in the file args.gn in the build\n"
+ " directory).\n"
+ "\n"
+ " Last, for targets being compiled with a non-default toolchain, the\n"
+ " toolchain overrides are applied. These are specified in the\n"
+ " toolchain_args section of a toolchain definition. The use-case for\n"
+ " this is that a toolchain may be building code for a different\n"
+ " platform, and that it may want to always specify Posix, for example.\n"
+ " See \"gn help toolchain_args\" for more.\n"
+ "\n"
+ " If you specify an override for a build argument that never appears in\n"
+ " a \"declare_args\" call, a nonfatal error will be displayed.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " gn args out/FooBar\n"
+ " Create the directory out/FooBar and open an editor. You would type\n"
+ " something like this into that file:\n"
+ " enable_doom_melon=false\n"
+ " os=\"android\"\n"
+ "\n"
+ " gn gen out/FooBar --args=\"enable_doom_melon=true os=\\\"android\\\"\"\n"
+ " This will overwrite the build directory with the given arguments.\n"
+ " (Note that the quotes inside the args command will usually need to\n"
+ " be escaped for your shell to pass through strings values.)\n"
+ "\n"
+ "How build arguments are used\n"
+ "\n"
+ " If you want to use an argument, you use declare_args() and specify\n"
+ " default values. These default values will apply if none of the steps\n"
+ " listed in the \"How build arguments are set\" section above apply to\n"
+ " the given argument, but the defaults will not override any of these.\n"
+ "\n"
+ " Often, the root build config file will declare global arguments that\n"
+ " will be passed to all buildfiles. Individual build files can also\n"
+ " specify arguments that apply only to those files. It is also useful\n"
+ " to specify build args in an \"import\"-ed file if you want such\n"
+ " arguments to apply to multiple buildfiles.\n";
+
+namespace {
+
+// Removes all entries in |overrides| that are in |declared_overrides|.
+void RemoveDeclaredOverrides(const Scope::KeyValueMap& declared_arguments,
+ Scope::KeyValueMap* overrides) {
+ for (Scope::KeyValueMap::iterator override = overrides->begin();
+ override != overrides->end();) {
+ if (declared_arguments.find(override->first) == declared_arguments.end())
+ ++override;
+ else
+ overrides->erase(override++);
+ }
+}
+
+} // namespace
+
+Args::Args() {
+}
+
+Args::Args(const Args& other)
+ : overrides_(other.overrides_),
+ all_overrides_(other.all_overrides_),
+ declared_arguments_per_toolchain_(
+ other.declared_arguments_per_toolchain_) {
+}
+
+Args::~Args() {
+}
+
+void Args::AddArgOverride(const char* name, const Value& value) {
+ base::AutoLock lock(lock_);
+
+ overrides_[base::StringPiece(name)] = value;
+ all_overrides_[base::StringPiece(name)] = value;
+}
+
+void Args::AddArgOverrides(const Scope::KeyValueMap& overrides) {
+ base::AutoLock lock(lock_);
+
+ for (const auto& cur_override : overrides) {
+ overrides_[cur_override.first] = cur_override.second;
+ all_overrides_[cur_override.first] = cur_override.second;
+ }
+}
+
+const Value* Args::GetArgOverride(const char* name) const {
+ base::AutoLock lock(lock_);
+
+ Scope::KeyValueMap::const_iterator found =
+ all_overrides_.find(base::StringPiece(name));
+ if (found == all_overrides_.end())
+ return nullptr;
+ return &found->second;
+}
+
+Scope::KeyValueMap Args::GetAllOverrides() const {
+ base::AutoLock lock(lock_);
+ return all_overrides_;
+}
+
+void Args::SetupRootScope(Scope* dest,
+ const Scope::KeyValueMap& toolchain_overrides) const {
+ base::AutoLock lock(lock_);
+
+ SetSystemVarsLocked(dest);
+ ApplyOverridesLocked(overrides_, dest);
+ ApplyOverridesLocked(toolchain_overrides, dest);
+ SaveOverrideRecordLocked(toolchain_overrides);
+}
+
+bool Args::DeclareArgs(const Scope::KeyValueMap& args,
+ Scope* scope_to_set,
+ Err* err) const {
+ base::AutoLock lock(lock_);
+
+ Scope::KeyValueMap& declared_arguments(
+ DeclaredArgumentsForToolchainLocked(scope_to_set));
+ for (const auto& arg : args) {
+ // Verify that the value hasn't already been declared. We want each value
+ // to be declared only once.
+ //
+ // The tricky part is that a buildfile can be interpreted multiple times
+ // when used from different toolchains, so we can't just check that we've
+ // seen it before. Instead, we check that the location matches.
+ Scope::KeyValueMap::iterator previously_declared =
+ declared_arguments.find(arg.first);
+ if (previously_declared != declared_arguments.end()) {
+ if (previously_declared->second.origin() != arg.second.origin()) {
+ // Declaration location mismatch.
+ *err = Err(arg.second.origin(),
+ "Duplicate build argument declaration.",
+ "Here you're declaring an argument that was already declared "
+ "elsewhere.\nYou can only declare each argument once in the entire "
+ "build so there is one\ncanonical place for documentation and the "
+ "default value. Either move this\nargument to the build config "
+ "file (for visibility everywhere) or to a .gni file\nthat you "
+ "\"import\" from the files where you need it (preferred).");
+ err->AppendSubErr(Err(previously_declared->second.origin(),
+ "Previous declaration.",
+ "See also \"gn help buildargs\" for more on how "
+ "build arguments work."));
+ return false;
+ }
+ } else {
+ declared_arguments.insert(arg);
+ }
+
+ // Only set on the current scope to the new value if it hasn't been already
+ // set. Mark the variable used so the build script can override it in
+ // certain cases without getting unused value errors.
+ if (!scope_to_set->GetValue(arg.first)) {
+ scope_to_set->SetValue(arg.first, arg.second, arg.second.origin());
+ scope_to_set->MarkUsed(arg.first);
+ }
+ }
+
+ return true;
+}
+
+bool Args::VerifyAllOverridesUsed(Err* err) const {
+ base::AutoLock lock(lock_);
+ Scope::KeyValueMap all_overrides(all_overrides_);
+ for (const auto& map_pair : declared_arguments_per_toolchain_)
+ RemoveDeclaredOverrides(map_pair.second, &all_overrides);
+
+ if (all_overrides.empty())
+ return true;
+
+ // Get a list of all possible overrides for help with error finding.
+ //
+ // It might be nice to do edit distance checks to see if we can find one close
+ // to what you typed.
+ std::string all_declared_str;
+ for (const auto& map_pair : declared_arguments_per_toolchain_) {
+ for (const auto& cur_str : map_pair.second) {
+ if (!all_declared_str.empty())
+ all_declared_str += ", ";
+ all_declared_str += cur_str.first.as_string();
+ }
+ }
+
+ *err = Err(
+ all_overrides.begin()->second.origin(), "Build argument has no effect.",
+ "The variable \"" + all_overrides.begin()->first.as_string() +
+ "\" was set as a build argument\nbut never appeared in a " +
+ "declare_args() block in any buildfile.\n\nPossible arguments: " +
+ all_declared_str);
+ return false;
+}
+
+void Args::MergeDeclaredArguments(Scope::KeyValueMap* dest) const {
+ base::AutoLock lock(lock_);
+ for (const auto& map_pair : declared_arguments_per_toolchain_) {
+ for (const auto& arg : map_pair.second)
+ (*dest)[arg.first] = arg.second;
+ }
+}
+
+void Args::SetSystemVarsLocked(Scope* dest) const {
+ lock_.AssertAcquired();
+
+ // Host OS.
+ const char* os = nullptr;
+#if defined(OS_WIN)
+ os = "win";
+#elif defined(OS_MACOSX)
+ os = "mac";
+#elif defined(OS_LINUX)
+ os = "linux";
+#elif defined(OS_ANDROID)
+ os = "android";
+#else
+ #error Unknown OS type.
+#endif
+
+ // Host architecture.
+ static const char kX86[] = "x86";
+ static const char kX64[] = "x64";
+ static const char kArm[] = "arm";
+ const char* arch = nullptr;
+
+ // Set the host CPU architecture based on the underlying OS, not
+ // whatever the current bit-tedness of the GN binary is.
+ std::string os_arch = base::SysInfo::OperatingSystemArchitecture();
+ if (os_arch == "x86")
+ arch = kX86;
+ else if (os_arch == "x86_64")
+ arch = kX64;
+ else if (os_arch.substr(3) == "arm")
+ arch = kArm;
+ else
+ CHECK(false) << "OS architecture not handled.";
+
+ // Save the OS and architecture as build arguments that are implicitly
+ // declared. This is so they can be overridden in a toolchain build args
+ // override, and so that they will appear in the "gn args" output.
+ Value empty_string(nullptr, std::string());
+
+ Value os_val(nullptr, std::string(os));
+ dest->SetValue(variables::kHostOs, os_val, nullptr);
+ dest->SetValue(variables::kTargetOs, empty_string, nullptr);
+ dest->SetValue(variables::kCurrentOs, empty_string, nullptr);
+
+ Value arch_val(nullptr, std::string(arch));
+ dest->SetValue(variables::kHostCpu, arch_val, nullptr);
+ dest->SetValue(variables::kTargetCpu, empty_string, nullptr);
+ dest->SetValue(variables::kCurrentCpu, empty_string, nullptr);
+
+ Scope::KeyValueMap& declared_arguments(
+ DeclaredArgumentsForToolchainLocked(dest));
+ declared_arguments[variables::kHostOs] = os_val;
+ declared_arguments[variables::kCurrentOs] = empty_string;
+ declared_arguments[variables::kTargetOs] = empty_string;
+ declared_arguments[variables::kHostCpu] = arch_val;
+ declared_arguments[variables::kCurrentCpu] = empty_string;
+ declared_arguments[variables::kTargetCpu] = empty_string;
+
+ // Mark these variables used so the build config file can override them
+ // without geting a warning about overwriting an unused variable.
+ dest->MarkUsed(variables::kHostCpu);
+ dest->MarkUsed(variables::kCurrentCpu);
+ dest->MarkUsed(variables::kTargetCpu);
+ dest->MarkUsed(variables::kHostOs);
+ dest->MarkUsed(variables::kCurrentOs);
+ dest->MarkUsed(variables::kTargetOs);
+}
+
+void Args::ApplyOverridesLocked(const Scope::KeyValueMap& values,
+ Scope* scope) const {
+ lock_.AssertAcquired();
+ for (const auto& val : values)
+ scope->SetValue(val.first, val.second, val.second.origin());
+}
+
+void Args::SaveOverrideRecordLocked(const Scope::KeyValueMap& values) const {
+ lock_.AssertAcquired();
+ for (const auto& val : values)
+ all_overrides_[val.first] = val.second;
+}
+
+Scope::KeyValueMap& Args::DeclaredArgumentsForToolchainLocked(
+ Scope* scope) const {
+ lock_.AssertAcquired();
+ return declared_arguments_per_toolchain_[scope->settings()];
+}
diff --git a/chromium/tools/gn/args.h b/chromium/tools/gn/args.h
new file mode 100644
index 00000000000..1f9cb52f7de
--- /dev/null
+++ b/chromium/tools/gn/args.h
@@ -0,0 +1,108 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_ARGS_H_
+#define TOOLS_GN_ARGS_H_
+
+#include "base/containers/hash_tables.h"
+#include "base/macros.h"
+#include "base/synchronization/lock.h"
+#include "tools/gn/scope.h"
+
+class Err;
+
+extern const char kBuildArgs_Help[];
+
+// Manages build arguments. It stores the global arguments specified on the
+// command line, and sets up the root scope with the proper values.
+//
+// This class tracks accesses so we can report errors about unused variables.
+// The use case is if the user specifies an override on the command line, but
+// no buildfile actually uses that variable. We want to be able to report that
+// the argument was unused.
+class Args {
+ public:
+ Args();
+ Args(const Args& other);
+ ~Args();
+
+ // Specifies overrides of the build arguments. These are normally specified
+ // on the command line.
+ void AddArgOverride(const char* name, const Value& value);
+ void AddArgOverrides(const Scope::KeyValueMap& overrides);
+
+ // Returns the value corresponding to the given argument name, or NULL if no
+ // argument is set.
+ const Value* GetArgOverride(const char* name) const;
+
+ // Gets all overrides set on the build.
+ Scope::KeyValueMap GetAllOverrides() const;
+
+ // Sets up the root scope for a toolchain. This applies the default system
+ // flags, then any overrides stored in this object, then applies any
+ // toolchain overrides specified in the argument.
+ void SetupRootScope(Scope* dest,
+ const Scope::KeyValueMap& toolchain_overrides) const;
+
+ // Sets up the given scope with arguments passed in.
+ //
+ // If the values specified in the args are not already set, the values in
+ // the args list will be used (which are assumed to be the defaults), but
+ // they will not override the system defaults or the current overrides.
+ //
+ // All args specified in the input will be marked as "used".
+ //
+ // On failure, the err will be set and it will return false.
+ bool DeclareArgs(const Scope::KeyValueMap& args,
+ Scope* scope_to_set,
+ Err* err) const;
+
+ // Checks to see if any of the overrides ever used were never declared as
+ // arguments. If there are, this returns false and sets the error.
+ bool VerifyAllOverridesUsed(Err* err) const;
+
+ // Adds all declared arguments to the given output list. If the values exist
+ // in the list already, their values will be overwriten, but other values
+ // already in the list will remain.
+ void MergeDeclaredArguments(Scope::KeyValueMap* dest) const;
+
+ private:
+ using DeclaredArgumentsPerToolchain =
+ base::hash_map<const Settings*, Scope::KeyValueMap>;
+
+ // Sets the default config based on the current system.
+ void SetSystemVarsLocked(Scope* scope) const;
+
+ // Sets the given vars on the given scope.
+ void ApplyOverridesLocked(const Scope::KeyValueMap& values,
+ Scope* scope) const;
+
+ void SaveOverrideRecordLocked(const Scope::KeyValueMap& values) const;
+
+ // Returns the KeyValueMap used for arguments declared for the specified
+ // toolchain.
+ Scope::KeyValueMap& DeclaredArgumentsForToolchainLocked(Scope* scope) const;
+
+ // Since this is called during setup which we assume is single-threaded,
+ // this is not protected by the lock. It should be set only during init.
+ Scope::KeyValueMap overrides_;
+
+ mutable base::Lock lock_;
+
+ // Maintains a list of all overrides we've ever seen. This is the main
+ // |overrides_| as well as toolchain overrides. Tracking this allows us to
+ // check for overrides that were specified but never used.
+ mutable Scope::KeyValueMap all_overrides_;
+
+ // Maps from Settings (which corresponds to a toolchain) to the map of
+ // declared variables. This is used to tracks all variables declared in any
+ // buildfile. This is so we can see if the user set variables on the command
+ // line that are not used anywhere. Each map is toolchain specific as each
+ // toolchain may define variables in different locations.
+ mutable DeclaredArgumentsPerToolchain declared_arguments_per_toolchain_;
+
+ DISALLOW_ASSIGN(Args);
+};
+
+#endif // TOOLS_GN_ARGS_H_
diff --git a/chromium/tools/gn/args_unittest.cc b/chromium/tools/gn/args_unittest.cc
new file mode 100644
index 00000000000..098f3dec850
--- /dev/null
+++ b/chromium/tools/gn/args_unittest.cc
@@ -0,0 +1,41 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/args.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/test_with_scope.h"
+
+// Assertions for VerifyAllOverridesUsed() and DeclareArgs() with multiple
+// toolchains.
+TEST(ArgsTest, VerifyAllOverridesUsed) {
+ TestWithScope setup1, setup2;
+ Args args;
+ Scope::KeyValueMap key_value_map1;
+ Err err;
+ LiteralNode assignment1;
+
+ setup1.scope()->SetValue("a", Value(nullptr, true), &assignment1);
+ setup1.scope()->GetCurrentScopeValues(&key_value_map1);
+ EXPECT_TRUE(args.DeclareArgs(key_value_map1, setup1.scope(), &err));
+
+ LiteralNode assignment2;
+ setup2.scope()->SetValue("b", Value(nullptr, true), &assignment2);
+ Scope::KeyValueMap key_value_map2;
+ setup2.scope()->GetCurrentScopeValues(&key_value_map2);
+ EXPECT_TRUE(args.DeclareArgs(key_value_map2, setup2.scope(), &err));
+
+ // Override "a", shouldn't see any errors as "a" was defined.
+ args.AddArgOverride("a", Value(nullptr, true));
+ EXPECT_TRUE(args.VerifyAllOverridesUsed(&err));
+
+ // Override "a", & "b", shouldn't see any errors as both were defined.
+ args.AddArgOverride("b", Value(nullptr, true));
+ EXPECT_TRUE(args.VerifyAllOverridesUsed(&err));
+
+ // Override "a", "b" and "c", should fail as "c" was not defined.
+ args.AddArgOverride("c", Value(nullptr, true));
+ EXPECT_FALSE(args.VerifyAllOverridesUsed(&err));
+}
diff --git a/chromium/tools/gn/bin/compare_test_lists.py b/chromium/tools/gn/bin/compare_test_lists.py
new file mode 100644
index 00000000000..37fb1bf1e26
--- /dev/null
+++ b/chromium/tools/gn/bin/compare_test_lists.py
@@ -0,0 +1,101 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script compares the gtest test list for two different builds.
+#
+# Usage:
+# compare_test_lists.py <build_dir_1> <build_dir_2> <binary_name>
+#
+# For example, from the "src" directory:
+# python tools/gn/bin/compare_test_lists.py out/Debug out/gnbuild ipc_tests
+#
+# This will compile the given binary in both output directories, then extracts
+# the test lists and prints missing or extra tests between the first and the
+# second build.
+
+import os
+import subprocess
+import sys
+
+def BuildBinary(build_dir, binary_name):
+ """Builds the given binary in the given directory with Ninja.
+
+ Returns True on success."""
+ return subprocess.call(["ninja", "-C", build_dir, binary_name]) == 0
+
+
+def GetTestList(path_to_binary):
+ """Returns a set of full test names.
+
+ Each test will be of the form "Case.Test". There will be a separate line
+ for each combination of Case/Test (there are often multiple tests in each
+ case).
+
+ Throws an exception on failure."""
+ raw_output = subprocess.check_output([path_to_binary, "--gtest_list_tests"])
+ input_lines = raw_output.split('\n')
+
+ # The format of the gtest_list_tests output is:
+ # "Case1."
+ # " Test1 # <Optional extra stuff>"
+ # " Test2"
+ # "Case2."
+ # " Test1"
+ case_name = '' # Includes trailing dot.
+ test_set = set()
+ for line in input_lines:
+ if len(line) > 1:
+ if line[0] == ' ':
+ # Indented means a test in previous case.
+ test_set.add(case_name + line[:line.find('#')].strip())
+ else:
+ # New test case.
+ case_name = line.strip()
+
+ return test_set
+
+
+def PrintSetDiff(a_name, a, b_name, b, binary_name):
+ """Prints the test list difference between the given sets a and b.
+
+ a_name and b_name will be used to refer to the directories of the two sets,
+ and the binary name will be shown as the source of the output."""
+
+ a_not_b = list(a - b)
+ if len(a_not_b):
+ print "\n", binary_name, "tests in", a_name, "but not", b_name
+ a_not_b.sort()
+ for cur in a_not_b:
+ print " ", cur
+
+ b_not_a = list(b - a)
+ if len(b_not_a):
+ print "\n", binary_name, "tests in", b_name, "but not", a_name
+ b_not_a.sort()
+ for cur in b_not_a:
+ print " ", cur
+
+ if len(a_not_b) == 0 and len(b_not_a) == 0:
+ print "\nTests match!"
+
+
+def Run(a_dir, b_dir, binary_name):
+ if not BuildBinary(a_dir, binary_name):
+ print "Building", binary_name, "in", a_dir, "failed"
+ return 1
+ if not BuildBinary(b_dir, binary_name):
+ print "Building", binary_name, "in", b_dir, "failed"
+ return 1
+
+ a_tests = GetTestList(os.path.join(a_dir, binary_name))
+ b_tests = GetTestList(os.path.join(b_dir, binary_name))
+
+ PrintSetDiff(a_dir, a_tests, b_dir, b_tests, binary_name)
+
+
+if len(sys.argv) != 4:
+ print "Usage: compare_test_lists.py <build_dir_1> <build_dir_2> " \
+ "<test_binary_name>"
+ sys.exit(1)
+sys.exit(Run(sys.argv[1], sys.argv[2], sys.argv[3]))
diff --git a/chromium/tools/gn/bin/gn-format.py b/chromium/tools/gn/bin/gn-format.py
new file mode 100644
index 00000000000..c835753eefb
--- /dev/null
+++ b/chromium/tools/gn/bin/gn-format.py
@@ -0,0 +1,58 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Based on clang-format.py.
+#
+# This file is a minimal gn format vim-integration. To install:
+# - Change 'binary' if gn is not on the path (see below).
+# - Add to your .vimrc:
+#
+# map <F1> :pyf <path-to-this-file>/gn-format.py<CR>
+#
+# gn format currently formats only a complete file so visual ranges, etc. won't
+# be used. It operates on the current, potentially unsaved buffer and does not
+# create or save any files. To revert a formatting, just undo.
+
+import difflib
+import subprocess
+import sys
+import vim
+
+# Change this to the full path if gn is not on the path.
+binary = 'gn'
+
+def main():
+ # Get the current text.
+ buf = vim.current.buffer
+ text = '\n'.join(buf)
+
+ # Avoid flashing an ugly cmd prompt on Windows when invoking gn.
+ startupinfo = None
+ if sys.platform.startswith('win32'):
+ startupinfo = subprocess.STARTUPINFO()
+ startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
+ startupinfo.wShowWindow = subprocess.SW_HIDE
+
+ # Call formatter.
+ p = subprocess.Popen([binary, 'format', '--stdin'],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ stdin=subprocess.PIPE, startupinfo=startupinfo,
+ universal_newlines=True)
+ stdout, stderr = p.communicate(input=text)
+ if p.returncode != 0:
+ print 'Formatting failed, please report to gn-dev@chromium.org.'
+ print stdout, stderr
+ else:
+ # Otherwise, replace current buffer.
+ lines = stdout.split('\n')
+ # Last line should have trailing \n, but we don't want to insert a blank
+ # line at the end of the buffer, so remove that.
+ if lines[-1] == '':
+ lines = lines[:-1]
+ sequence = difflib.SequenceMatcher(None, vim.current.buffer, lines)
+ for op in reversed(sequence.get_opcodes()):
+ if op[0] is not 'equal':
+ vim.current.buffer[op[1]:op[2]] = lines[op[3]:op[4]]
+
+main()
diff --git a/chromium/tools/gn/bin/gyp_flag_compare.py b/chromium/tools/gn/bin/gyp_flag_compare.py
new file mode 100755
index 00000000000..ddd260ac666
--- /dev/null
+++ b/chromium/tools/gn/bin/gyp_flag_compare.py
@@ -0,0 +1,280 @@
+#!/usr/bin/env python
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Given the output of -t commands from a ninja build for a gyp and GN generated
+build, report on differences between the command lines."""
+
+
+import os
+import shlex
+import subprocess
+import sys
+
+
+# Must be in src/.
+os.chdir(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
+
+
+g_total_differences = 0
+
+
+def FindAndRemoveArgWithValue(command_line, argname):
+ """Given a command line as a list, remove and return the value of an option
+ that takes a value as a separate entry.
+
+ Modifies |command_line| in place.
+ """
+ if argname not in command_line:
+ return ''
+ location = command_line.index(argname)
+ value = command_line[location + 1]
+ command_line[location:location + 2] = []
+ return value
+
+
+def MergeSpacedArgs(command_line, argname):
+ """Combine all arguments |argname| with their values, separated by a space."""
+ i = 0
+ result = []
+ while i < len(command_line):
+ arg = command_line[i]
+ if arg == argname:
+ result.append(arg + ' ' + command_line[i + 1])
+ i += 1
+ else:
+ result.append(arg)
+ i += 1
+ return result
+
+
+def NormalizeSymbolArguments(command_line):
+ """Normalize -g arguments.
+
+ If there's no -g args, it's equivalent to -g0. -g2 is equivalent to -g.
+ Modifies |command_line| in place.
+ """
+ # Strip -g0 if there's no symbols.
+ have_some_symbols = False
+ for x in command_line:
+ if x.startswith('-g') and x != '-g0':
+ have_some_symbols = True
+ if not have_some_symbols and '-g0' in command_line:
+ command_line.remove('-g0')
+
+ # Rename -g2 to -g.
+ if '-g2' in command_line:
+ command_line[command_line.index('-g2')] = '-g'
+
+
+def GetFlags(lines, build_dir):
+ """Turn a list of command lines into a semi-structured dict."""
+ is_win = sys.platform == 'win32'
+ flags_by_output = {}
+ for line in lines:
+ command_line = shlex.split(line.strip(), posix=not is_win)[1:]
+
+ output_name = FindAndRemoveArgWithValue(command_line, '-o')
+ dep_name = FindAndRemoveArgWithValue(command_line, '-MF')
+
+ NormalizeSymbolArguments(command_line)
+
+ command_line = MergeSpacedArgs(command_line, '-Xclang')
+
+ cc_file = [x for x in command_line if x.endswith('.cc') or
+ x.endswith('.c') or
+ x.endswith('.cpp')]
+ if len(cc_file) != 1:
+ print 'Skipping %s' % command_line
+ continue
+ assert len(cc_file) == 1
+
+ if is_win:
+ rsp_file = [x for x in command_line if x.endswith('.rsp')]
+ assert len(rsp_file) <= 1
+ if rsp_file:
+ rsp_file = os.path.join(build_dir, rsp_file[0][1:])
+ with open(rsp_file, "r") as open_rsp_file:
+ command_line = shlex.split(open_rsp_file, posix=False)
+
+ defines = [x for x in command_line if x.startswith('-D')]
+ include_dirs = [x for x in command_line if x.startswith('-I')]
+ dash_f = [x for x in command_line if x.startswith('-f')]
+ warnings = \
+ [x for x in command_line if x.startswith('/wd' if is_win else '-W')]
+ others = [x for x in command_line if x not in defines and \
+ x not in include_dirs and \
+ x not in dash_f and \
+ x not in warnings and \
+ x not in cc_file]
+
+ for index, value in enumerate(include_dirs):
+ if value == '-Igen':
+ continue
+ path = value[2:]
+ if not os.path.isabs(path):
+ path = os.path.join(build_dir, path)
+ include_dirs[index] = '-I' + os.path.normpath(path)
+
+ # GYP supports paths above the source root like <(DEPTH)/../foo while such
+ # paths are unsupported by gn. But gn allows to use system-absolute paths
+ # instead (paths that start with single '/'). Normalize all paths.
+ cc_file = [os.path.normpath(os.path.join(build_dir, cc_file[0]))]
+
+ # Filter for libFindBadConstructs.so having a relative path in one and
+ # absolute path in the other.
+ others_filtered = []
+ for x in others:
+ if x.startswith('-Xclang ') and x.endswith('libFindBadConstructs.so'):
+ others_filtered.append(
+ '-Xclang ' +
+ os.path.join(os.getcwd(),
+ os.path.normpath(
+ os.path.join('out/gn_flags', x.split(' ', 1)[1]))))
+ elif x.startswith('-B'):
+ others_filtered.append(
+ '-B' +
+ os.path.join(os.getcwd(),
+ os.path.normpath(os.path.join('out/gn_flags', x[2:]))))
+ else:
+ others_filtered.append(x)
+ others = others_filtered
+
+ flags_by_output[cc_file[0]] = {
+ 'output': output_name,
+ 'depname': dep_name,
+ 'defines': sorted(defines),
+ 'include_dirs': sorted(include_dirs), # TODO(scottmg): This is wrong.
+ 'dash_f': sorted(dash_f),
+ 'warnings': sorted(warnings),
+ 'other': sorted(others),
+ }
+ return flags_by_output
+
+
+def CompareLists(gyp, gn, name, dont_care_gyp=None, dont_care_gn=None):
+ """Return a report of any differences between gyp and gn lists, ignoring
+ anything in |dont_care_{gyp|gn}| respectively."""
+ global g_total_differences
+ if not dont_care_gyp:
+ dont_care_gyp = []
+ if not dont_care_gn:
+ dont_care_gn = []
+ output = ''
+ if gyp[name] != gn[name]:
+ gyp_set = set(gyp[name])
+ gn_set = set(gn[name])
+ missing_in_gyp = gyp_set - gn_set
+ missing_in_gn = gn_set - gyp_set
+ missing_in_gyp -= set(dont_care_gyp)
+ missing_in_gn -= set(dont_care_gn)
+ if missing_in_gyp or missing_in_gn:
+ output += ' %s differ:\n' % name
+ if missing_in_gyp:
+ output += ' In gyp, but not in GN:\n %s' % '\n '.join(
+ sorted(missing_in_gyp)) + '\n'
+ g_total_differences += len(missing_in_gyp)
+ if missing_in_gn:
+ output += ' In GN, but not in gyp:\n %s' % '\n '.join(
+ sorted(missing_in_gn)) + '\n\n'
+ g_total_differences += len(missing_in_gn)
+ return output
+
+
+def Run(command_line):
+ """Run |command_line| as a subprocess and return stdout. Raises on error."""
+ return subprocess.check_output(command_line, shell=True)
+
+
+def main():
+ if len(sys.argv) != 2 and len(sys.argv) != 3:
+ print 'usage: %s gyp_target gn_target' % __file__
+ print ' or: %s target' % __file__
+ return 1
+
+ if len(sys.argv) == 2:
+ sys.argv.append(sys.argv[1])
+
+ gn_out_dir = 'out/gn_flags'
+ print >> sys.stderr, 'Regenerating in %s...' % gn_out_dir
+ # Currently only Release, non-component.
+ Run('gn gen %s --args="is_debug=false is_component_build=false"' % gn_out_dir)
+ gn = Run('ninja -C %s -t commands %s' % (gn_out_dir, sys.argv[2]))
+ if sys.platform == 'win32':
+ # On Windows flags are stored in .rsp files which are created during build.
+ print >> sys.stderr, 'Building in %s...' % gn_out_dir
+ Run('ninja -C %s -d keeprsp %s' % (gn_out_dir, sys.argv[2]))
+
+ os.environ.pop('GYP_DEFINES', None)
+ # Remove environment variables required by gn but conflicting with GYP.
+ # Relevant if Windows toolchain isn't provided by depot_tools.
+ os.environ.pop('GYP_MSVS_OVERRIDE_PATH', None)
+ os.environ.pop('WINDOWSSDKDIR', None)
+
+ gyp_out_dir = 'out_gyp_flags/Release'
+ print >> sys.stderr, 'Regenerating in %s...' % gyp_out_dir
+ Run('python build/gyp_chromium -Goutput_dir=out_gyp_flags -Gconfig=Release')
+ gyp = Run('ninja -C %s -t commands %s' % (gyp_out_dir, sys.argv[1]))
+ if sys.platform == 'win32':
+ # On Windows flags are stored in .rsp files which are created during build.
+ print >> sys.stderr, 'Building in %s...' % gyp_out_dir
+ Run('ninja -C %s -d keeprsp %s' % (gyp_out_dir, sys.argv[2]))
+
+ all_gyp_flags = GetFlags(gyp.splitlines(),
+ os.path.join(os.getcwd(), gyp_out_dir))
+ all_gn_flags = GetFlags(gn.splitlines(),
+ os.path.join(os.getcwd(), gn_out_dir))
+ gyp_files = set(all_gyp_flags.keys())
+ gn_files = set(all_gn_flags.keys())
+ different_source_list = gyp_files != gn_files
+ if different_source_list:
+ print 'Different set of sources files:'
+ print ' In gyp, not in GN:\n %s' % '\n '.join(
+ sorted(gyp_files - gn_files))
+ print ' In GN, not in gyp:\n %s' % '\n '.join(
+ sorted(gn_files - gyp_files))
+ print '\nNote that flags will only be compared for files in both sets.\n'
+ file_list = gyp_files & gn_files
+ files_with_given_differences = {}
+ for filename in sorted(file_list):
+ gyp_flags = all_gyp_flags[filename]
+ gn_flags = all_gn_flags[filename]
+ differences = CompareLists(gyp_flags, gn_flags, 'dash_f')
+ differences += CompareLists(gyp_flags, gn_flags, 'defines')
+ differences += CompareLists(gyp_flags, gn_flags, 'include_dirs')
+ differences += CompareLists(gyp_flags, gn_flags, 'warnings',
+ # More conservative warnings in GN we consider to be OK.
+ dont_care_gyp=[
+ '/wd4091', # 'keyword' : ignored on left of 'type' when no variable
+ # is declared.
+ '/wd4456', # Declaration hides previous local declaration.
+ '/wd4457', # Declaration hides function parameter.
+ '/wd4458', # Declaration hides class member.
+ '/wd4459', # Declaration hides global declaration.
+ '/wd4702', # Unreachable code.
+ '/wd4800', # Forcing value to bool 'true' or 'false'.
+ '/wd4838', # Conversion from 'type' to 'type' requires a narrowing
+ # conversion.
+ ] if sys.platform == 'win32' else None,
+ dont_care_gn=[
+ '-Wendif-labels',
+ '-Wextra',
+ '-Wsign-compare',
+ ] if not sys.platform == 'win32' else None)
+ differences += CompareLists(gyp_flags, gn_flags, 'other')
+ if differences:
+ files_with_given_differences.setdefault(differences, []).append(filename)
+
+ for diff, files in files_with_given_differences.iteritems():
+ print '\n'.join(sorted(files))
+ print diff
+
+ print 'Total differences:', g_total_differences
+ # TODO(scottmg): Return failure on difference once we're closer to identical.
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/gn/bin/help_as_html.py b/chromium/tools/gn/bin/help_as_html.py
new file mode 100755
index 00000000000..f8f1c1bc271
--- /dev/null
+++ b/chromium/tools/gn/bin/help_as_html.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Runs 'gn help' and various subhelps, and spits out html.
+# TODO:
+# - Handle numbered and dashed lists -> <ol> <ul>. (See "os" and "toolchain").
+# - Handle "Arguments:" blocks a bit better (the argument names could be
+# distinguished).
+# - Convert "|blahblah|" to <code>.
+# - Spit out other similar formats like wiki, markdown, whatever.
+
+import cgi
+import subprocess
+import sys
+
+
+def GetOutput(*args):
+ try:
+ return subprocess.check_output([sys.argv[1]] + list(args))
+ except subprocess.CalledProcessError:
+ return ''
+
+
+def ParseTopLevel(out):
+ commands = []
+ output = []
+ for line in out.splitlines():
+ if line.startswith(' '):
+ command, sep, rest = line.partition(':')
+ command = command.strip()
+ is_option = command.startswith('-')
+ output_line = ['<li>']
+ if not is_option:
+ commands.append(command)
+ output_line.append('<a href="#' + cgi.escape(command) + '">')
+ output_line.append(cgi.escape(command))
+ if not is_option:
+ output_line.append('</a>')
+ output_line.extend([sep + cgi.escape(rest) + '</li>'])
+ output.append(''.join(output_line))
+ else:
+ output.append('<h2>' + cgi.escape(line) + '</h2>')
+ return commands, output
+
+
+def ParseCommand(command, out):
+ first_line = True
+ got_example = False
+ output = []
+ for line in out.splitlines():
+ if first_line:
+ name, sep, rest = line.partition(':')
+ name = name.strip()
+ output.append('<h3><a name="' + cgi.escape(command) + '">' +
+ cgi.escape(name + sep + rest) + '</a></h3>')
+ first_line = False
+ else:
+ if line.startswith('Example'):
+ # Special subsection that's pre-formatted.
+ if got_example:
+ output.append('</pre>')
+ got_example = True
+ output.append('<h4>Example</h4>')
+ output.append('<pre>')
+ elif not line.strip():
+ output.append('<p>')
+ elif not line.startswith(' ') and line.endswith(':'):
+ # Subsection.
+ output.append('<h4>' + cgi.escape(line[:-1]) + '</h4>')
+ else:
+ output.append(cgi.escape(line))
+ if got_example:
+ output.append('</pre>')
+ return output
+
+
+def main():
+ if len(sys.argv) < 2:
+ print 'usage: help_as_html.py <gn_binary>'
+ return 1
+ header = '''<!DOCTYPE html>
+<html>
+ <head>
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <style>
+ body { font-family: Arial, sans-serif; font-size: small; }
+ pre { font-family: Consolas, monospace; font-size: small; }
+ #container { margin: 0 auto; max-width: 48rem; width: 90%; }
+ </style>
+ </head>
+ <body>
+ <div id="container"><h1>GN</h1>
+'''
+ footer = '</div></body></html>'
+ commands, output = ParseTopLevel(GetOutput('help'))
+ for command in commands:
+ output += ParseCommand(command, GetOutput('help', command))
+ print header + '\n'.join(output) + footer
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/gn/bin/roll_gn.py b/chromium/tools/gn/bin/roll_gn.py
new file mode 100755
index 00000000000..10b218c43af
--- /dev/null
+++ b/chromium/tools/gn/bin/roll_gn.py
@@ -0,0 +1,462 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An auto-roller for GN binaries into Chromium.
+
+This script is used to update the GN binaries that a Chromium
+checkout uses. In order to update the binaries, one must follow
+four steps in order:
+
+1. Trigger try jobs to build a new GN binary at tip-of-tree and upload
+ the newly-built binaries into the right Google CloudStorage bucket.
+2. Wait for the try jobs to complete.
+3. Update the buildtools repo with the .sha1 hashes of the newly built
+ binaries.
+4. Update Chromium's DEPS file to the new version of the buildtools repo.
+
+The script has four commands that correspond to the four steps above:
+'build', 'wait', 'roll_buildtools', and 'roll_deps'.
+
+The script has a fifth command, 'roll', that runs the four in order.
+
+If given no arguments, the script will run the 'roll' command.
+
+It can only be run on linux in a clean Chromium checkout; it should
+error out in most cases if something bad happens, but the error checking
+isn't yet foolproof.
+
+"""
+
+from __future__ import print_function
+
+import argparse
+import json
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import time
+import urllib2
+
+depot_tools_path = None
+for p in os.environ['PATH'].split(os.pathsep):
+ if (p.rstrip(os.sep).endswith('depot_tools') and
+ os.path.isfile(os.path.join(p, 'gclient.py'))):
+ depot_tools_path = p
+
+assert depot_tools_path
+if not depot_tools_path in sys.path:
+ sys.path.insert(0, depot_tools_path)
+
+third_party_path = os.path.join(depot_tools_path, 'third_party')
+if not third_party_path in sys.path:
+ sys.path.insert(0, third_party_path)
+
+import upload
+
+
+CHROMIUM_REPO = 'https://chromium.googlesource.com/chromium/src.git'
+
+CODE_REVIEW_SERVER = 'https://codereview.chromium.org'
+
+COMMITISH_DIGITS = 10
+
+class GNRoller(object):
+ def __init__(self):
+ self.chromium_src_dir = None
+ self.buildtools_dir = None
+ self.old_gn_commitish = None
+ self.new_gn_commitish = None
+ self.old_gn_version = None
+ self.new_gn_version = None
+ self.reviewer = 'dpranke@chromium.org'
+ if os.getenv('USER') == 'dpranke':
+ self.reviewer = 'brettw@chromium.org'
+
+ def Roll(self):
+ parser = argparse.ArgumentParser()
+ parser.usage = __doc__
+ parser.add_argument('command', nargs='?', default='roll',
+ help='build|roll|roll_buildtools|roll_deps|wait'
+ ' (%(default)s is the default)')
+
+ args = parser.parse_args()
+ command = args.command
+ ret = self.SetUp()
+ if not ret and command in ('roll', 'build'):
+ ret = self.TriggerBuild()
+ if not ret and command in ('roll', 'wait'):
+ ret = self.WaitForBuildToFinish()
+ if not ret and command in ('roll', 'roll_buildtools'):
+ ret = self.RollBuildtools()
+ if not ret and command in ('roll', 'roll_deps'):
+ ret = self.RollDEPS()
+
+ return ret
+
+ def SetUp(self):
+ if sys.platform != 'linux2':
+ print('roll_gn is only tested and working on Linux for now.')
+ return 1
+
+ ret, out, _ = self.Call('git config --get remote.origin.url')
+ origin = out.strip()
+ if ret or origin != CHROMIUM_REPO:
+ print('Not in a Chromium repo? git config --get remote.origin.url '
+ 'returned %d: %s' % (ret, origin))
+ return 1
+
+ ret, _, _ = self.Call('git diff -q')
+ if ret:
+ print("Checkout is dirty, exiting")
+ return 1
+
+ _, out, _ = self.Call('git rev-parse --show-toplevel', cwd=os.getcwd())
+ self.chromium_src_dir = out.strip()
+ self.buildtools_dir = os.path.join(self.chromium_src_dir, 'buildtools')
+
+ self.new_gn_commitish, self.new_gn_version = self.GetNewVersions()
+
+ _, out, _ = self.Call('gn --version')
+ self.old_gn_version = out.strip()
+
+ _, out, _ = self.Call('git crrev-parse %s' % self.old_gn_version)
+ self.old_gn_commitish = out.strip()
+ return 0
+
+ def GetNewVersions(self):
+ _, out, _ = self.Call('git log -1 --grep Cr-Commit-Position')
+ commit_msg = out.splitlines()
+ first_line = commit_msg[0]
+ new_gn_commitish = first_line.split()[1]
+
+ last_line = commit_msg[-1]
+ new_gn_version = re.sub('.*master@{#(\d+)}', '\\1', last_line)
+
+ return new_gn_commitish, new_gn_version
+
+ def TriggerBuild(self):
+ ret, _, _ = self.Call('git new-branch build_gn_%s' % self.new_gn_version)
+ if ret:
+ print('Failed to create a new branch for build_gn_%s' %
+ self.new_gn_version)
+ return 1
+
+ self.MakeDummyDepsChange()
+
+ ret, out, err = self.Call('git commit -a -m "Build gn at %s"' %
+ self.new_gn_version)
+ if ret:
+ print('git commit failed: %s' % out + err)
+ return 1
+
+ print('Uploading CL to build GN at {#%s} - %s' %
+ (self.new_gn_version, self.new_gn_commitish))
+ ret, out, err = self.Call('git cl upload -f')
+ if ret:
+ print('git-cl upload failed: %s' % out + err)
+ return 1
+
+ print('Starting try jobs')
+ self.Call('git-cl try -m tryserver.chromium.linux '
+ '-b linux_chromium_gn_upload -r %s' % self.new_gn_commitish)
+ self.Call('git-cl try -m tryserver.chromium.mac '
+ '-b mac_chromium_gn_upload -r %s' % self.new_gn_commitish)
+ self.Call('git-cl try -m tryserver.chromium.win '
+ '-b win8_chromium_gn_upload -r %s' % self.new_gn_commitish)
+
+ return 0
+
+ def MakeDummyDepsChange(self):
+ with open('DEPS') as fp:
+ deps_content = fp.read()
+ new_deps = deps_content.replace("'buildtools_revision':",
+ "'buildtools_revision': ")
+
+ with open('DEPS', 'w') as fp:
+ fp.write(new_deps)
+
+ def WaitForBuildToFinish(self):
+ ret = self.CheckoutBuildBranch()
+ if ret:
+ return ret
+
+ print('Checking build')
+ results = self.CheckBuild()
+ while (len(results) < 3 or
+ any(r['state'] in ('pending', 'started')
+ for r in results.values())):
+ print()
+ print('Sleeping for 30 seconds')
+ time.sleep(30)
+ print('Checking build')
+ results = self.CheckBuild()
+
+ ret = 0 if all(r['state'] == 'success' for r in results.values()) else 1
+ if ret:
+ print('Build failed.')
+ else:
+ print('Builds ready.')
+
+ # Close the build CL and move off of the build branch back to whatever
+ # we were on before.
+ self.Call('git-cl set-close')
+ self.MoveToLastHead()
+
+ return ret
+
+ def CheckoutBuildBranch(self):
+ ret, out, err = self.Call('git checkout build_gn_%s' % self.new_gn_version)
+ if ret:
+ print('Failed to check out build_gn_%s' % self.new_gn_version)
+ if out:
+ print(out)
+ if err:
+ print(err, file=sys.stderr)
+ return ret
+
+ def CheckBuild(self):
+ _, out, _ = self.Call('git-cl issue')
+
+ issue = int(out.split()[2])
+
+ _, out, _ = self.Call('git config user.email')
+ email = ''
+ rpc_server = upload.GetRpcServer(CODE_REVIEW_SERVER, email)
+ try:
+ props = json.loads(rpc_server.Send('/api/%d' % issue))
+ except Exception as _e:
+ raise
+
+ patchset = int(props['patchsets'][-1])
+
+ try:
+ try_job_results = json.loads(rpc_server.Send(
+ '/api/%d/%d/try_job_results' % (issue, patchset)))
+ except Exception as _e:
+ raise
+
+ if not try_job_results:
+ print('No try jobs found on most recent patchset')
+ return {}
+
+ results = {}
+ for job in try_job_results:
+ builder = job['builder']
+ if builder == 'linux_chromium_gn_upload':
+ platform = 'linux64'
+ elif builder == 'mac_chromium_gn_upload':
+ platform = 'mac'
+ elif builder == 'win8_chromium_gn_upload':
+ platform = 'win'
+ else:
+ print('Unexpected builder: %s')
+ continue
+
+ TRY_JOB_RESULT_STATES = ('started', 'success', 'warnings', 'failure',
+ 'skipped', 'exception', 'retry', 'pending')
+ state = TRY_JOB_RESULT_STATES[int(job['result']) + 1]
+ url_str = ' %s' % job['url']
+ build = url_str.split('/')[-1]
+
+ sha1 = '-'
+ results.setdefault(platform, {'build': -1, 'sha1': '', 'url': url_str})
+
+ if state == 'success':
+ jsurl = url_str.replace('/builders/', '/json/builders/')
+ fp = urllib2.urlopen(jsurl)
+ js = json.loads(fp.read())
+ fp.close()
+ for step in js['steps']:
+ if step['name'] == 'gn sha1':
+ sha1 = step['text'][1]
+
+ if results[platform]['build'] < build:
+ results[platform]['build'] = build
+ results[platform]['sha1'] = sha1
+ results[platform]['state'] = state
+ results[platform]['url'] = url_str
+
+ for platform, r in results.items():
+ print(platform)
+ print(' sha1: %s' % r['sha1'])
+ print(' state: %s' % r['state'])
+ print(' build: %s' % r['build'])
+ print(' url: %s' % r['url'])
+ print()
+
+ return results
+
+ def RollBuildtools(self):
+ ret = self.CheckoutBuildBranch()
+ if ret:
+ return ret
+
+ results = self.CheckBuild()
+ if (len(results) < 3 or
+ not all(r['state'] == 'success' for r in results.values())):
+ print("Roll isn't done or didn't succeed, exiting:")
+ return 1
+
+ desc = self.GetBuildtoolsDesc()
+
+ self.Call('git new-branch roll_buildtools_gn_%s' % self.new_gn_version,
+ cwd=self.buildtools_dir)
+
+ for platform in results:
+ fname = 'gn.exe.sha1' if platform == 'win' else 'gn.sha1'
+ path = os.path.join(self.buildtools_dir, platform, fname)
+ with open(path, 'w') as fp:
+ fp.write('%s\n' % results[platform]['sha1'])
+
+ desc_file = tempfile.NamedTemporaryFile(delete=False)
+ try:
+ desc_file.write(desc)
+ desc_file.close()
+ self.Call('git commit -a -F %s' % desc_file.name,
+ cwd=self.buildtools_dir)
+ self.Call('git-cl upload -f --send-mail',
+ cwd=self.buildtools_dir)
+ finally:
+ os.remove(desc_file.name)
+
+ ret, out, err = self.Call('git cl land', cwd=self.buildtools_dir)
+ if ret:
+ print("buildtools git cl land failed: %d" % ret)
+ if out:
+ print(out)
+ if err:
+ print(err)
+ return ret
+
+ # Fetch the revision we just committed so that RollDEPS will find it.
+ self.Call('git fetch', cwd=self.buildtools_dir)
+
+ # Reset buildtools to the new commit so that we're not still on the
+ # merged branch.
+ self.Call('git checkout origin/master', cwd=self.buildtools_dir)
+
+ _, out, _ = self.Call('git rev-parse origin/master',
+ cwd=self.buildtools_dir)
+ new_buildtools_commitish = out.strip()
+ print('Ready to roll buildtools to %s in DEPS' % new_buildtools_commitish)
+
+ return 0
+
+ def RollDEPS(self):
+ ret, _, _ = self.Call('git new-branch roll_gn_%s' % self.new_gn_version)
+ if ret:
+ print('Failed to create a new branch for roll_gn_%s' %
+ self.new_gn_version)
+ return 1
+
+ _, out, _ = self.Call('git rev-parse origin/master',
+ cwd=self.buildtools_dir)
+ new_buildtools_commitish = out.strip()
+
+ new_deps_lines = []
+ old_buildtools_commitish = ''
+ with open(os.path.join(self.chromium_src_dir, 'DEPS')) as fp:
+ for l in fp.readlines():
+ m = re.match(".*'buildtools_revision':.*'(.+)',", l)
+ if m:
+ old_buildtools_commitish = m.group(1)
+ new_deps_lines.append(" 'buildtools_revision': '%s',\n" %
+ new_buildtools_commitish)
+ else:
+ new_deps_lines.append(l)
+
+ if not old_buildtools_commitish:
+ print('Could not update DEPS properly, exiting')
+ return 1
+
+ with open('DEPS', 'w') as fp:
+ fp.write(''.join(new_deps_lines))
+
+ desc = self.GetDEPSRollDesc(old_buildtools_commitish,
+ new_buildtools_commitish)
+ desc_file = tempfile.NamedTemporaryFile(delete=False)
+ try:
+ desc_file.write(desc)
+ desc_file.close()
+ self.Call('git commit -a -F %s' % desc_file.name)
+ self.Call('git-cl upload -f --send-mail --use-commit-queue')
+ finally:
+ os.remove(desc_file.name)
+
+ # Move off of the roll branch onto whatever we were on before.
+ # Do not explicitly close the roll CL issue, however; the CQ
+ # will close it when the roll lands, assuming it does so.
+ self.MoveToLastHead()
+
+ return 0
+
+ def MoveToLastHead(self):
+ # When this is called, there will be a commit + a checkout as
+ # the two most recent entries in the reflog, assuming nothing as
+ # modified the repo while this script has been running.
+ _, out, _ = self.Call('git reflog -2')
+ m = re.search('moving from ([^\s]+)', out)
+ last_head = m.group(1)
+ self.Call('git checkout %s' % last_head)
+
+ def GetBuildtoolsDesc(self):
+ gn_changes = self.GetGNChanges()
+ return (
+ 'Roll gn %s..%s (r%s:r%s)\n'
+ '\n'
+ '%s'
+ '\n'
+ 'TBR=%s\n' % (
+ self.old_gn_commitish[:COMMITISH_DIGITS],
+ self.new_gn_commitish[:COMMITISH_DIGITS],
+ self.old_gn_version,
+ self.new_gn_version,
+ gn_changes,
+ self.reviewer,
+ ))
+
+ def GetDEPSRollDesc(self, old_buildtools_commitish, new_buildtools_commitish):
+ gn_changes = self.GetGNChanges()
+
+ return (
+ 'Roll buildtools %s..%s\n'
+ '\n'
+ ' In order to roll GN %s..%s (r%s:r%s) and pick up\n'
+ ' the following changes:\n'
+ '\n'
+ '%s'
+ '\n'
+ 'TBR=%s\n'
+ 'CQ_EXTRA_TRYBOTS=tryserver.chromium.mac:mac_chromium_gn_dbg;'
+ 'tryserver.chromium.win:win8_chromium_gn_dbg,'
+ 'win_chromium_gn_x64_rel\n' % (
+ old_buildtools_commitish[:COMMITISH_DIGITS],
+ new_buildtools_commitish[:COMMITISH_DIGITS],
+ self.old_gn_commitish[:COMMITISH_DIGITS],
+ self.new_gn_commitish[:COMMITISH_DIGITS],
+ self.old_gn_version,
+ self.new_gn_version,
+ gn_changes,
+ self.reviewer,
+ ))
+
+ def GetGNChanges(self):
+ _, out, _ = self.Call(
+ "git log --pretty=' %h %s' " +
+ "%s..%s tools/gn" % (self.old_gn_commitish, self.new_gn_commitish))
+ return out
+
+ def Call(self, cmd, cwd=None):
+ proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True,
+ cwd=(cwd or self.chromium_src_dir))
+ out, err = proc.communicate()
+ return proc.returncode, out, err
+
+
+if __name__ == '__main__':
+ roller = GNRoller()
+ sys.exit(roller.Roll())
diff --git a/chromium/tools/gn/binary_target_generator.cc b/chromium/tools/gn/binary_target_generator.cc
new file mode 100644
index 00000000000..7c81232dacc
--- /dev/null
+++ b/chromium/tools/gn/binary_target_generator.cc
@@ -0,0 +1,146 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/binary_target_generator.h"
+
+#include "tools/gn/config_values_generator.h"
+#include "tools/gn/deps_iterator.h"
+#include "tools/gn/err.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/value_extractors.h"
+#include "tools/gn/variables.h"
+
+BinaryTargetGenerator::BinaryTargetGenerator(
+ Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Target::OutputType type,
+ Err* err)
+ : TargetGenerator(target, scope, function_call, err),
+ output_type_(type) {
+}
+
+BinaryTargetGenerator::~BinaryTargetGenerator() {
+}
+
+void BinaryTargetGenerator::DoRun() {
+ target_->set_output_type(output_type_);
+
+ if (!FillOutputName())
+ return;
+
+ if (!FillOutputPrefixOverride())
+ return;
+
+ if (!FillOutputExtension())
+ return;
+
+ if (!FillSources())
+ return;
+
+ if (!FillPublic())
+ return;
+
+ if (!FillCheckIncludes())
+ return;
+
+ if (!FillInputs())
+ return;
+
+ if (!FillConfigs())
+ return;
+
+ if (!FillAllowCircularIncludesFrom())
+ return;
+
+ if (!FillCompleteStaticLib())
+ return;
+
+ // Config values (compiler flags, etc.) set directly on this target.
+ ConfigValuesGenerator gen(&target_->config_values(), scope_,
+ scope_->GetSourceDir(), err_);
+ gen.Run();
+ if (err_->has_error())
+ return;
+}
+
+bool BinaryTargetGenerator::FillCompleteStaticLib() {
+ if (target_->output_type() == Target::STATIC_LIBRARY) {
+ const Value* value = scope_->GetValue(variables::kCompleteStaticLib, true);
+ if (!value)
+ return true;
+ if (!value->VerifyTypeIs(Value::BOOLEAN, err_))
+ return false;
+ target_->set_complete_static_lib(value->boolean_value());
+ }
+ return true;
+}
+
+bool BinaryTargetGenerator::FillOutputName() {
+ const Value* value = scope_->GetValue(variables::kOutputName, true);
+ if (!value)
+ return true;
+ if (!value->VerifyTypeIs(Value::STRING, err_))
+ return false;
+ target_->set_output_name(value->string_value());
+ return true;
+}
+
+bool BinaryTargetGenerator::FillOutputPrefixOverride() {
+ const Value* value = scope_->GetValue(variables::kOutputPrefixOverride, true);
+ if (!value)
+ return true;
+ if (!value->VerifyTypeIs(Value::BOOLEAN, err_))
+ return false;
+ target_->set_output_prefix_override(value->boolean_value());
+ return true;
+}
+
+bool BinaryTargetGenerator::FillOutputExtension() {
+ const Value* value = scope_->GetValue(variables::kOutputExtension, true);
+ if (!value)
+ return true;
+ if (!value->VerifyTypeIs(Value::STRING, err_))
+ return false;
+ target_->set_output_extension(value->string_value());
+ return true;
+}
+
+bool BinaryTargetGenerator::FillAllowCircularIncludesFrom() {
+ const Value* value = scope_->GetValue(
+ variables::kAllowCircularIncludesFrom, true);
+ if (!value)
+ return true;
+
+ UniqueVector<Label> circular;
+ ExtractListOfUniqueLabels(*value, scope_->GetSourceDir(),
+ ToolchainLabelForScope(scope_), &circular, err_);
+ if (err_->has_error())
+ return false;
+
+ // Validate that all circular includes entries are in the deps.
+ for (const auto& cur : circular) {
+ bool found_dep = false;
+ for (const auto& dep_pair : target_->GetDeps(Target::DEPS_LINKED)) {
+ if (dep_pair.label == cur) {
+ found_dep = true;
+ break;
+ }
+ }
+ if (!found_dep) {
+ *err_ = Err(*value, "Label not in deps.",
+ "The label \"" + cur.GetUserVisibleName(false) +
+ "\"\nwas not in the deps of this target. "
+ "allow_circular_includes_from only allows\ntargets present in the "
+ "deps.");
+ return false;
+ }
+ }
+
+ // Add to the set.
+ for (const auto& cur : circular)
+ target_->allow_circular_includes_from().insert(cur);
+ return true;
+}
diff --git a/chromium/tools/gn/binary_target_generator.h b/chromium/tools/gn/binary_target_generator.h
new file mode 100644
index 00000000000..40ed432b228
--- /dev/null
+++ b/chromium/tools/gn/binary_target_generator.h
@@ -0,0 +1,38 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_BINARY_TARGET_GENERATOR_H_
+#define TOOLS_GN_BINARY_TARGET_GENERATOR_H_
+
+#include "base/macros.h"
+#include "tools/gn/target.h"
+#include "tools/gn/target_generator.h"
+
+// Populates a Target with the values from a binary rule (executable, shared
+// library, or static library).
+class BinaryTargetGenerator : public TargetGenerator {
+ public:
+ BinaryTargetGenerator(Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Target::OutputType type,
+ Err* err);
+ ~BinaryTargetGenerator() override;
+
+ protected:
+ void DoRun() override;
+
+ private:
+ bool FillCompleteStaticLib();
+ bool FillOutputName();
+ bool FillOutputPrefixOverride();
+ bool FillOutputExtension();
+ bool FillAllowCircularIncludesFrom();
+
+ Target::OutputType output_type_;
+
+ DISALLOW_COPY_AND_ASSIGN(BinaryTargetGenerator);
+};
+
+#endif // TOOLS_GN_BINARY_TARGET_GENERATOR_H_
diff --git a/chromium/tools/gn/bootstrap/OWNERS b/chromium/tools/gn/bootstrap/OWNERS
new file mode 100644
index 00000000000..72e8ffc0db8
--- /dev/null
+++ b/chromium/tools/gn/bootstrap/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/chromium/tools/gn/bootstrap/bootstrap.py b/chromium/tools/gn/bootstrap/bootstrap.py
new file mode 100755
index 00000000000..f1988331b3a
--- /dev/null
+++ b/chromium/tools/gn/bootstrap/bootstrap.py
@@ -0,0 +1,521 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file isn't officially supported by the Chromium project. It's maintained
+# on a best-effort basis by volunteers, so some things may be broken from time
+# to time. If you encounter errors, it's most often due to files in base that
+# have been added or moved since somebody last tried this script. Generally
+# such errors are easy to diagnose.
+
+"""Bootstraps gn.
+
+It is done by first building it manually in a temporary directory, then building
+it with its own BUILD.gn to the final destination.
+"""
+
+import contextlib
+import errno
+import logging
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+BOOTSTRAP_DIR = os.path.dirname(os.path.abspath(__file__))
+GN_ROOT = os.path.dirname(BOOTSTRAP_DIR)
+SRC_ROOT = os.path.dirname(os.path.dirname(GN_ROOT))
+
+is_linux = sys.platform.startswith('linux')
+is_mac = sys.platform.startswith('darwin')
+is_posix = is_linux or is_mac
+
+def check_call(cmd, **kwargs):
+ logging.debug('Running: %s', ' '.join(cmd))
+ subprocess.check_call(cmd, cwd=GN_ROOT, **kwargs)
+
+def mkdir_p(path):
+ try:
+ os.makedirs(path)
+ except OSError as e:
+ if e.errno == errno.EEXIST and os.path.isdir(path):
+ pass
+ else: raise
+
+@contextlib.contextmanager
+def scoped_tempdir():
+ path = tempfile.mkdtemp()
+ try:
+ yield path
+ finally:
+ shutil.rmtree(path)
+
+
+def run_build(tempdir, options):
+ if options.debug:
+ build_rel = os.path.join('out', 'Debug')
+ else:
+ build_rel = os.path.join('out', 'Release')
+ build_root = os.path.join(SRC_ROOT, build_rel)
+
+ print 'Building gn manually in a temporary directory for bootstrapping...'
+ build_gn_with_ninja_manually(tempdir, options)
+ temp_gn = os.path.join(tempdir, 'gn')
+ out_gn = os.path.join(build_root, 'gn')
+
+ if options.no_rebuild:
+ mkdir_p(build_root)
+ shutil.copy2(temp_gn, out_gn)
+ else:
+ print 'Building gn using itself to %s...' % build_rel
+ build_gn_with_gn(temp_gn, build_root, options)
+
+ if options.output:
+ # Preserve the executable permission bit.
+ shutil.copy2(out_gn, options.output)
+
+
+def main(argv):
+ parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
+ parser.add_option('-d', '--debug', action='store_true',
+ help='Do a debug build. Defaults to release build.')
+ parser.add_option('-o', '--output',
+ help='place output in PATH', metavar='PATH')
+ parser.add_option('-s', '--no-rebuild', action='store_true',
+ help='Do not rebuild GN with GN.')
+ parser.add_option('--no-clean', action='store_true',
+ help='Re-used build directory instead of using new '
+ 'temporary location each time')
+ parser.add_option('--gn-gen-args', help='Args to pass to gn gen --args')
+ parser.add_option('-v', '--verbose', action='store_true',
+ help='Log more details')
+ options, args = parser.parse_args(argv)
+
+ if args:
+ parser.error('Unrecognized command line arguments: %s.' % ', '.join(args))
+
+ logging.basicConfig(level=logging.DEBUG if options.verbose else logging.ERROR)
+
+ try:
+ if options.no_clean:
+ build_dir = os.path.join(SRC_ROOT, 'out_bootstrap')
+ if not os.path.exists(build_dir):
+ os.makedirs(build_dir)
+ return run_build(build_dir, options)
+ else:
+ with scoped_tempdir() as tempdir:
+ return run_build(tempdir, options)
+ except subprocess.CalledProcessError as e:
+ print >> sys.stderr, str(e)
+ return 1
+ return 0
+
+
+def build_gn_with_ninja_manually(tempdir, options):
+ root_gen_dir = os.path.join(tempdir, 'gen')
+ mkdir_p(root_gen_dir)
+
+ if is_linux:
+ mkdir_p(os.path.join(root_gen_dir, 'base', 'allocator'))
+ with tempfile.NamedTemporaryFile() as f:
+ f.write('--flags USE_EXPERIMENTAL_ALLOCATOR_SHIM=true')
+ f.flush()
+
+ check_call([
+ os.path.join(SRC_ROOT, 'build', 'write_buildflag_header.py'),
+ '--output', 'base/allocator/features.h',
+ '--gen-dir', root_gen_dir,
+ '--definitions', f.name,
+ ])
+
+ if is_mac:
+ # //base/build_time.cc needs base/generated_build_date.h,
+ # and this file is only included for Mac builds.
+ mkdir_p(os.path.join(root_gen_dir, 'base'))
+ check_call([
+ os.path.join(SRC_ROOT, 'build', 'write_build_date_header.py'),
+ os.path.join(root_gen_dir, 'base', 'generated_build_date.h'),
+ 'default'
+ ])
+
+ write_ninja(os.path.join(tempdir, 'build.ninja'), root_gen_dir, options)
+ cmd = ['ninja', '-C', tempdir]
+ if options.verbose:
+ cmd.append('-v')
+ cmd.append('gn')
+ check_call(cmd)
+
+def write_ninja(path, root_gen_dir, options):
+ cc = os.environ.get('CC', '')
+ cxx = os.environ.get('CXX', '')
+ cflags = os.environ.get('CFLAGS', '').split()
+ cflags_cc = os.environ.get('CXXFLAGS', '').split()
+ ld = os.environ.get('LD', cxx)
+ ldflags = os.environ.get('LDFLAGS', '').split()
+ include_dirs = [root_gen_dir, SRC_ROOT]
+ libs = []
+
+ # //base/allocator/allocator_extension.cc needs this macro defined,
+ # otherwise there would be link errors.
+ cflags.extend(['-DNO_TCMALLOC'])
+
+ if is_posix:
+ if options.debug:
+ cflags.extend(['-O0', '-g'])
+ else:
+ cflags.extend(['-O2', '-g0'])
+
+ cflags.extend([
+ '-D_FILE_OFFSET_BITS=64',
+ '-pthread',
+ '-pipe',
+ '-fno-exceptions'
+ ])
+ cflags_cc.extend(['-std=c++11', '-Wno-c++11-narrowing'])
+
+ static_libraries = {
+ 'base': {'sources': [], 'tool': 'cxx', 'include_dirs': []},
+ 'dynamic_annotations': {'sources': [], 'tool': 'cc', 'include_dirs': []},
+ 'gn': {'sources': [], 'tool': 'cxx', 'include_dirs': []},
+ }
+
+ for name in os.listdir(GN_ROOT):
+ if not name.endswith('.cc'):
+ continue
+ if name.endswith('_unittest.cc'):
+ continue
+ if name == 'run_all_unittests.cc':
+ continue
+ full_path = os.path.join(GN_ROOT, name)
+ static_libraries['gn']['sources'].append(
+ os.path.relpath(full_path, SRC_ROOT))
+
+ static_libraries['dynamic_annotations']['sources'].extend([
+ 'base/third_party/dynamic_annotations/dynamic_annotations.c',
+ 'base/third_party/superfasthash/superfasthash.c',
+ ])
+ static_libraries['base']['sources'].extend([
+ 'base/allocator/allocator_extension.cc',
+ 'base/allocator/allocator_shim.cc',
+ 'base/at_exit.cc',
+ 'base/base_paths.cc',
+ 'base/base_switches.cc',
+ 'base/callback_internal.cc',
+ 'base/command_line.cc',
+ 'base/debug/alias.cc',
+ 'base/debug/stack_trace.cc',
+ 'base/debug/task_annotator.cc',
+ 'base/environment.cc',
+ 'base/files/file.cc',
+ 'base/files/file_enumerator.cc',
+ 'base/files/file_path.cc',
+ 'base/files/file_path_constants.cc',
+ 'base/files/file_tracing.cc',
+ 'base/files/file_util.cc',
+ 'base/files/memory_mapped_file.cc',
+ 'base/files/scoped_file.cc',
+ 'base/hash.cc',
+ 'base/json/json_parser.cc',
+ 'base/json/json_reader.cc',
+ 'base/json/json_string_value_serializer.cc',
+ 'base/json/json_writer.cc',
+ 'base/json/string_escape.cc',
+ 'base/lazy_instance.cc',
+ 'base/location.cc',
+ 'base/logging.cc',
+ 'base/md5.cc',
+ 'base/memory/ref_counted.cc',
+ 'base/memory/ref_counted_memory.cc',
+ 'base/memory/singleton.cc',
+ 'base/memory/weak_ptr.cc',
+ 'base/message_loop/incoming_task_queue.cc',
+ 'base/message_loop/message_loop.cc',
+ 'base/message_loop/message_loop_task_runner.cc',
+ 'base/message_loop/message_pump.cc',
+ 'base/message_loop/message_pump_default.cc',
+ 'base/metrics/bucket_ranges.cc',
+ 'base/metrics/histogram.cc',
+ 'base/metrics/histogram_base.cc',
+ 'base/metrics/histogram_samples.cc',
+ 'base/metrics/metrics_hashes.cc',
+ 'base/metrics/persistent_histogram_allocator.cc',
+ 'base/metrics/persistent_memory_allocator.cc',
+ 'base/metrics/persistent_sample_map.cc',
+ 'base/metrics/sample_map.cc',
+ 'base/metrics/sample_vector.cc',
+ 'base/metrics/sparse_histogram.cc',
+ 'base/metrics/statistics_recorder.cc',
+ 'base/path_service.cc',
+ 'base/pending_task.cc',
+ 'base/pickle.cc',
+ 'base/process/kill.cc',
+ 'base/process/process_iterator.cc',
+ 'base/process/process_metrics.cc',
+ 'base/profiler/scoped_profile.cc',
+ 'base/profiler/scoped_tracker.cc',
+ 'base/profiler/tracked_time.cc',
+ 'base/run_loop.cc',
+ 'base/sequence_checker_impl.cc',
+ 'base/sequenced_task_runner.cc',
+ 'base/sha1_portable.cc',
+ 'base/strings/pattern.cc',
+ 'base/strings/string16.cc',
+ 'base/strings/string_number_conversions.cc',
+ 'base/strings/string_piece.cc',
+ 'base/strings/string_split.cc',
+ 'base/strings/string_util.cc',
+ 'base/strings/string_util_constants.cc',
+ 'base/strings/stringprintf.cc',
+ 'base/strings/utf_string_conversion_utils.cc',
+ 'base/strings/utf_string_conversions.cc',
+ 'base/synchronization/cancellation_flag.cc',
+ 'base/synchronization/lock.cc',
+ 'base/sys_info.cc',
+ 'base/task_runner.cc',
+ 'base/third_party/dmg_fp/dtoa_wrapper.cc',
+ 'base/third_party/dmg_fp/g_fmt.cc',
+ 'base/third_party/icu/icu_utf.cc',
+ 'base/third_party/nspr/prtime.cc',
+ 'base/thread_task_runner_handle.cc',
+ 'base/threading/non_thread_safe_impl.cc',
+ 'base/threading/post_task_and_reply_impl.cc',
+ 'base/threading/sequenced_worker_pool.cc',
+ 'base/threading/simple_thread.cc',
+ 'base/threading/thread.cc',
+ 'base/threading/thread_checker_impl.cc',
+ 'base/threading/thread_collision_warner.cc',
+ 'base/threading/thread_id_name_manager.cc',
+ 'base/threading/thread_local_storage.cc',
+ 'base/threading/thread_restrictions.cc',
+ 'base/threading/worker_pool.cc',
+ 'base/time/time.cc',
+ 'base/timer/elapsed_timer.cc',
+ 'base/timer/timer.cc',
+ 'base/trace_event/heap_profiler_allocation_context.cc',
+ 'base/trace_event/heap_profiler_allocation_context_tracker.cc',
+ 'base/trace_event/heap_profiler_allocation_register.cc',
+ 'base/trace_event/heap_profiler_heap_dump_writer.cc',
+ 'base/trace_event/heap_profiler_stack_frame_deduplicator.cc',
+ 'base/trace_event/heap_profiler_type_name_deduplicator.cc',
+ 'base/trace_event/memory_allocator_dump.cc',
+ 'base/trace_event/memory_allocator_dump_guid.cc',
+ 'base/trace_event/memory_dump_manager.cc',
+ 'base/trace_event/memory_dump_request_args.cc',
+ 'base/trace_event/memory_dump_session_state.cc',
+ 'base/trace_event/process_memory_dump.cc',
+ 'base/trace_event/process_memory_maps.cc',
+ 'base/trace_event/process_memory_totals.cc',
+ 'base/trace_event/trace_buffer.cc',
+ 'base/trace_event/trace_config.cc',
+ 'base/trace_event/trace_event_argument.cc',
+ 'base/trace_event/trace_event_impl.cc',
+ 'base/trace_event/trace_event_memory_overhead.cc',
+ 'base/trace_event/trace_event_synthetic_delay.cc',
+ 'base/trace_event/trace_log.cc',
+ 'base/trace_event/trace_log_constants.cc',
+ 'base/trace_event/trace_sampling_thread.cc',
+ 'base/trace_event/tracing_agent.cc',
+ 'base/tracked_objects.cc',
+ 'base/tracking_info.cc',
+ 'base/values.cc',
+ 'base/vlog.cc',
+ ])
+
+ if is_posix:
+ static_libraries['base']['sources'].extend([
+ 'base/base_paths_posix.cc',
+ 'base/debug/debugger_posix.cc',
+ 'base/debug/stack_trace_posix.cc',
+ 'base/files/file_enumerator_posix.cc',
+ 'base/files/file_posix.cc',
+ 'base/files/file_util_posix.cc',
+ 'base/files/memory_mapped_file_posix.cc',
+ 'base/message_loop/message_pump_libevent.cc',
+ 'base/posix/file_descriptor_shuffle.cc',
+ 'base/posix/safe_strerror.cc',
+ 'base/process/kill_posix.cc',
+ 'base/process/process_handle_posix.cc',
+ 'base/process/process_metrics_posix.cc',
+ 'base/process/process_posix.cc',
+ 'base/synchronization/condition_variable_posix.cc',
+ 'base/synchronization/lock_impl_posix.cc',
+ 'base/synchronization/waitable_event_posix.cc',
+ 'base/sys_info_posix.cc',
+ 'base/threading/platform_thread_internal_posix.cc',
+ 'base/threading/platform_thread_posix.cc',
+ 'base/threading/thread_local_posix.cc',
+ 'base/threading/thread_local_storage_posix.cc',
+ 'base/threading/worker_pool_posix.cc',
+ 'base/time/time_posix.cc',
+ 'base/trace_event/heap_profiler_allocation_register_posix.cc',
+ ])
+ static_libraries['libevent'] = {
+ 'sources': [
+ 'base/third_party/libevent/buffer.c',
+ 'base/third_party/libevent/evbuffer.c',
+ 'base/third_party/libevent/evdns.c',
+ 'base/third_party/libevent/event.c',
+ 'base/third_party/libevent/event_tagging.c',
+ 'base/third_party/libevent/evrpc.c',
+ 'base/third_party/libevent/evutil.c',
+ 'base/third_party/libevent/http.c',
+ 'base/third_party/libevent/log.c',
+ 'base/third_party/libevent/poll.c',
+ 'base/third_party/libevent/select.c',
+ 'base/third_party/libevent/signal.c',
+ 'base/third_party/libevent/strlcpy.c',
+ ],
+ 'tool': 'cc',
+ 'include_dirs': [],
+ 'cflags': cflags + ['-DHAVE_CONFIG_H'],
+ }
+
+
+ if is_linux:
+ libs.extend(['-lrt'])
+ ldflags.extend(['-pthread'])
+
+ static_libraries['xdg_user_dirs'] = {
+ 'sources': [
+ 'base/third_party/xdg_user_dirs/xdg_user_dir_lookup.cc',
+ ],
+ 'tool': 'cxx',
+ }
+ static_libraries['base']['sources'].extend([
+ 'base/allocator/allocator_shim_default_dispatch_to_glibc.cc',
+ 'base/memory/shared_memory_posix.cc',
+ 'base/nix/xdg_util.cc',
+ 'base/process/internal_linux.cc',
+ 'base/process/process_handle_linux.cc',
+ 'base/process/process_iterator_linux.cc',
+ 'base/process/process_linux.cc',
+ 'base/process/process_metrics_linux.cc',
+ 'base/strings/sys_string_conversions_posix.cc',
+ 'base/sys_info_linux.cc',
+ 'base/threading/platform_thread_linux.cc',
+ 'base/trace_event/malloc_dump_provider.cc',
+ ])
+ static_libraries['libevent']['include_dirs'].extend([
+ os.path.join(SRC_ROOT, 'base', 'third_party', 'libevent', 'linux')
+ ])
+ static_libraries['libevent']['sources'].extend([
+ 'base/third_party/libevent/epoll.c',
+ ])
+
+
+ if is_mac:
+ static_libraries['base']['sources'].extend([
+ 'base/base_paths_mac.mm',
+ 'base/build_time.cc',
+ 'base/rand_util.cc',
+ 'base/rand_util_posix.cc',
+ 'base/files/file_util_mac.mm',
+ 'base/mac/bundle_locations.mm',
+ 'base/mac/call_with_eh_frame.cc',
+ 'base/mac/call_with_eh_frame_asm.S',
+ 'base/mac/foundation_util.mm',
+ 'base/mac/mach_logging.cc',
+ 'base/mac/scoped_mach_port.cc',
+ 'base/mac/scoped_mach_vm.cc',
+ 'base/mac/scoped_nsautorelease_pool.mm',
+ 'base/memory/shared_memory_handle_mac.cc',
+ 'base/memory/shared_memory_mac.cc',
+ 'base/message_loop/message_pump_mac.mm',
+ 'base/metrics/field_trial.cc',
+ 'base/process/process_handle_mac.cc',
+ 'base/process/process_iterator_mac.cc',
+ 'base/process/process_metrics_mac.cc',
+ 'base/strings/sys_string_conversions_mac.mm',
+ 'base/time/time_mac.cc',
+ 'base/threading/platform_thread_mac.mm',
+ 'base/trace_event/malloc_dump_provider.cc',
+ ])
+ static_libraries['libevent']['include_dirs'].extend([
+ os.path.join(SRC_ROOT, 'base', 'third_party', 'libevent', 'mac')
+ ])
+ static_libraries['libevent']['sources'].extend([
+ 'base/third_party/libevent/kqueue.c',
+ ])
+
+
+ if is_mac:
+ template_filename = 'build_mac.ninja.template'
+ else:
+ template_filename = 'build.ninja.template'
+
+ with open(os.path.join(GN_ROOT, 'bootstrap', template_filename)) as f:
+ ninja_template = f.read()
+
+ def src_to_obj(path):
+ return '%s' % os.path.splitext(path)[0] + '.o'
+
+ ninja_lines = []
+ for library, settings in static_libraries.iteritems():
+ for src_file in settings['sources']:
+ ninja_lines.extend([
+ 'build %s: %s %s' % (src_to_obj(src_file),
+ settings['tool'],
+ os.path.join(SRC_ROOT, src_file)),
+ ' includes = %s' % ' '.join(
+ ['-I' + dirname for dirname in
+ include_dirs + settings.get('include_dirs', [])]),
+ ' cflags = %s' % ' '.join(cflags + settings.get('cflags', [])),
+ ' cflags_cc = %s' %
+ ' '.join(cflags_cc + settings.get('cflags_cc', [])),
+ ])
+ if cc:
+ ninja_lines.append(' cc = %s' % cc)
+ if cxx:
+ ninja_lines.append(' cxx = %s' % cxx)
+
+ ninja_lines.append('build %s.a: alink_thin %s' % (
+ library,
+ ' '.join([src_to_obj(src_file) for src_file in settings['sources']])))
+
+ if is_mac:
+ libs.extend([
+ '-framework', 'AppKit',
+ '-framework', 'CoreFoundation',
+ '-framework', 'Foundation',
+ '-framework', 'Security',
+ ]);
+
+ ninja_lines.extend([
+ 'build gn: link %s' % (
+ ' '.join(['%s.a' % library for library in static_libraries])),
+ ' ldflags = %s' % ' '.join(ldflags),
+ ' libs = %s' % ' '.join(libs),
+ ])
+ if ld:
+ ninja_lines.append(' ld = %s' % ld)
+ else:
+ ninja_lines.append(' ld = $ldxx')
+
+ ninja_lines.append('') # Make sure the file ends with a newline.
+
+ with open(path, 'w') as f:
+ f.write(ninja_template + '\n'.join(ninja_lines))
+
+
+def build_gn_with_gn(temp_gn, build_dir, options):
+ gn_gen_args = options.gn_gen_args or ''
+ if not options.debug:
+ gn_gen_args += ' is_debug=false'
+ cmd = [temp_gn, 'gen', build_dir, '--args=%s' % gn_gen_args]
+ check_call(cmd)
+
+ cmd = ['ninja', '-C', build_dir]
+ if options.verbose:
+ cmd.append('-v')
+ cmd.append('gn')
+ check_call(cmd)
+
+ if not options.debug:
+ check_call(['strip', os.path.join(build_dir, 'gn')])
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/chromium/tools/gn/bootstrap/build.ninja.template b/chromium/tools/gn/bootstrap/build.ninja.template
new file mode 100644
index 00000000000..2ef5301e328
--- /dev/null
+++ b/chromium/tools/gn/bootstrap/build.ninja.template
@@ -0,0 +1,25 @@
+cc = cc
+cxx = c++
+ld = $cc
+ldxx = $cxx
+ar = ar
+
+rule cc
+ command = $cc -MMD -MF $out.d $defines $includes $cflags $cflags_c -c $in -o $out
+ description = CC $out
+ depfile = $out.d
+ deps = gcc
+
+rule cxx
+ command = $cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc -c $in -o $out
+ description = CXX $out
+ depfile = $out.d
+ deps = gcc
+
+rule alink_thin
+ command = rm -f $out && $ar rcsT $out $in
+ description = AR $out
+
+rule link
+ command = $ld $ldflags -o $out -Wl,--start-group $in $solibs -Wl,--end-group $libs
+ description = LINK $out
diff --git a/chromium/tools/gn/bootstrap/build_mac.ninja.template b/chromium/tools/gn/bootstrap/build_mac.ninja.template
new file mode 100644
index 00000000000..409ea75b1e3
--- /dev/null
+++ b/chromium/tools/gn/bootstrap/build_mac.ninja.template
@@ -0,0 +1,25 @@
+cc = cc
+cxx = c++
+ld = $cc
+ldxx = $cxx
+ar = ar
+
+rule cc
+ command = $cc -MMD -MF $out.d $defines $includes $cflags $cflags_c -c $in -o $out
+ description = CC $out
+ depfile = $out.d
+ deps = gcc
+
+rule cxx
+ command = $cxx -MMD -MF $out.d $defines $includes $cflags $cflags_cc -c $in -o $out
+ description = CXX $out
+ depfile = $out.d
+ deps = gcc
+
+rule alink_thin
+ command = rm -f $out && libtool -static -o $out $in
+ description = AR $out
+
+rule link
+ command = $ld $ldflags -o $out $in $solibs $libs
+ description = LINK $out
diff --git a/chromium/tools/gn/build_settings.cc b/chromium/tools/gn/build_settings.cc
new file mode 100644
index 00000000000..14f1aa8697b
--- /dev/null
+++ b/chromium/tools/gn/build_settings.cc
@@ -0,0 +1,66 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/build_settings.h"
+
+#include <utility>
+
+#include "base/files/file_util.h"
+#include "tools/gn/filesystem_utils.h"
+
+BuildSettings::BuildSettings()
+ : check_for_bad_items_(true) {
+}
+
+BuildSettings::BuildSettings(const BuildSettings& other)
+ : root_path_(other.root_path_),
+ root_path_utf8_(other.root_path_utf8_),
+ secondary_source_path_(other.secondary_source_path_),
+ python_path_(other.python_path_),
+ build_config_file_(other.build_config_file_),
+ build_dir_(other.build_dir_),
+ build_args_(other.build_args_),
+ check_for_bad_items_(true) {
+}
+
+BuildSettings::~BuildSettings() {
+}
+
+void BuildSettings::SetRootPath(const base::FilePath& r) {
+ DCHECK(r.value()[r.value().size() - 1] != base::FilePath::kSeparators[0]);
+ root_path_ = r.NormalizePathSeparatorsTo('/');
+ root_path_utf8_ = FilePathToUTF8(root_path_);
+}
+
+void BuildSettings::SetSecondarySourcePath(const SourceDir& d) {
+ secondary_source_path_ = GetFullPath(d).NormalizePathSeparatorsTo('/');
+}
+
+void BuildSettings::SetBuildDir(const SourceDir& d) {
+ build_dir_ = d;
+}
+
+base::FilePath BuildSettings::GetFullPath(const SourceFile& file) const {
+ return file.Resolve(root_path_).NormalizePathSeparatorsTo('/');
+}
+
+base::FilePath BuildSettings::GetFullPath(const SourceDir& dir) const {
+ return dir.Resolve(root_path_).NormalizePathSeparatorsTo('/');
+}
+
+base::FilePath BuildSettings::GetFullPathSecondary(
+ const SourceFile& file) const {
+ return file.Resolve(secondary_source_path_).NormalizePathSeparatorsTo('/');
+}
+
+base::FilePath BuildSettings::GetFullPathSecondary(
+ const SourceDir& dir) const {
+ return dir.Resolve(secondary_source_path_).NormalizePathSeparatorsTo('/');
+}
+
+void BuildSettings::ItemDefined(std::unique_ptr<Item> item) const {
+ DCHECK(item);
+ if (!item_defined_callback_.is_null())
+ item_defined_callback_.Run(std::move(item));
+}
diff --git a/chromium/tools/gn/build_settings.h b/chromium/tools/gn/build_settings.h
new file mode 100644
index 00000000000..5424cf95234
--- /dev/null
+++ b/chromium/tools/gn/build_settings.h
@@ -0,0 +1,130 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_BUILD_SETTINGS_H_
+#define TOOLS_GN_BUILD_SETTINGS_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <utility>
+
+#include "base/callback.h"
+#include "base/files/file_path.h"
+#include "base/macros.h"
+#include "tools/gn/args.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/source_file.h"
+
+class Item;
+
+// Settings for one build, which is one toplevel output directory. There
+// may be multiple Settings objects that refer to this, one for each toolchain.
+class BuildSettings {
+ public:
+ typedef base::Callback<void(std::unique_ptr<Item>)> ItemDefinedCallback;
+ typedef base::Callback<void(const std::string&)> PrintCallback;
+
+ BuildSettings();
+ BuildSettings(const BuildSettings& other);
+ ~BuildSettings();
+
+ // Absolute path of the source root on the local system. Everything is
+ // relative to this. Does not end in a [back]slash.
+ const base::FilePath& root_path() const { return root_path_; }
+ const std::string& root_path_utf8() const { return root_path_utf8_; }
+ void SetRootPath(const base::FilePath& r);
+
+ // When nonempty, specifies a parallel directory higherarchy in which to
+ // search for buildfiles if they're not found in the root higherarchy. This
+ // allows us to keep buildfiles in a separate tree during development.
+ const base::FilePath& secondary_source_path() const {
+ return secondary_source_path_;
+ }
+ void SetSecondarySourcePath(const SourceDir& d);
+
+ // Path of the python executable to run scripts with.
+ base::FilePath python_path() const { return python_path_; }
+ void set_python_path(const base::FilePath& p) { python_path_ = p; }
+
+ const SourceFile& build_config_file() const { return build_config_file_; }
+ void set_build_config_file(const SourceFile& f) { build_config_file_ = f; }
+
+ // The build directory is the root of all output files. The default toolchain
+ // files go into here, and non-default toolchains will have separate
+ // toolchain-specific root directories inside this.
+ const SourceDir& build_dir() const { return build_dir_; }
+ void SetBuildDir(const SourceDir& dir);
+
+ // The build args are normally specified on the command-line.
+ Args& build_args() { return build_args_; }
+ const Args& build_args() const { return build_args_; }
+
+ // Returns the full absolute OS path cooresponding to the given file in the
+ // root source tree.
+ base::FilePath GetFullPath(const SourceFile& file) const;
+ base::FilePath GetFullPath(const SourceDir& dir) const;
+
+ // Returns the absolute OS path inside the secondary source path. Will return
+ // an empty FilePath if the secondary source path is empty. When loading a
+ // buildfile, the GetFullPath should always be consulted first.
+ base::FilePath GetFullPathSecondary(const SourceFile& file) const;
+ base::FilePath GetFullPathSecondary(const SourceDir& dir) const;
+
+ // Called when an item is defined from a background thread.
+ void ItemDefined(std::unique_ptr<Item> item) const;
+ void set_item_defined_callback(ItemDefinedCallback cb) {
+ item_defined_callback_ = cb;
+ }
+
+ // Defines a callback that will be used to override the behavior of the
+ // print function. This is used in tests to collect print output. If the
+ // callback is is_null() (the default) the output will be printed to the
+ // console.
+ const PrintCallback& print_callback() const { return print_callback_; }
+ void set_print_callback(const PrintCallback& cb) { print_callback_ = cb; }
+
+ // A list of files that can call exec_script(). If the returned pointer is
+ // null, exec_script may be called from anywhere.
+ const std::set<SourceFile>* exec_script_whitelist() const {
+ return exec_script_whitelist_.get();
+ }
+ void set_exec_script_whitelist(std::unique_ptr<std::set<SourceFile>> list) {
+ exec_script_whitelist_ = std::move(list);
+ }
+
+ // When set (the default), code should perform normal validation of inputs
+ // and structures, like undefined or possibly incorrectly used things. For
+ // some interrogation commands, we don't care about this and actually want
+ // to allow the user to check the structure of the build to solve their
+ // problem, and these checks are undesirable.
+ bool check_for_bad_items() const {
+ return check_for_bad_items_;
+ }
+ void set_check_for_bad_items(bool c) {
+ check_for_bad_items_ = c;
+ }
+
+ private:
+ base::FilePath root_path_;
+ std::string root_path_utf8_;
+ base::FilePath secondary_source_path_;
+ base::FilePath python_path_;
+
+ SourceFile build_config_file_;
+ SourceDir build_dir_;
+ Args build_args_;
+
+ ItemDefinedCallback item_defined_callback_;
+ PrintCallback print_callback_;
+
+ std::unique_ptr<std::set<SourceFile>> exec_script_whitelist_;
+
+ bool check_for_bad_items_;
+
+ BuildSettings& operator=(const BuildSettings& other); // Disallow.
+};
+
+#endif // TOOLS_GN_BUILD_SETTINGS_H_
diff --git a/chromium/tools/gn/builder.cc b/chromium/tools/gn/builder.cc
new file mode 100644
index 00000000000..22b4c478c9b
--- /dev/null
+++ b/chromium/tools/gn/builder.cc
@@ -0,0 +1,515 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/builder.h"
+
+#include <stddef.h>
+#include <utility>
+
+#include "tools/gn/config.h"
+#include "tools/gn/deps_iterator.h"
+#include "tools/gn/err.h"
+#include "tools/gn/loader.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/target.h"
+#include "tools/gn/trace.h"
+
+namespace {
+
+typedef BuilderRecord::BuilderRecordSet BuilderRecordSet;
+
+// Recursively looks in the tree for a given node, returning true if it
+// was found in the dependecy graph. This is used to see if a given node
+// participates in a cycle.
+//
+// If this returns true, the cycle will be in *path. This should point to an
+// empty vector for the first call. During computation, the path will contain
+// the full dependency path to the current node.
+//
+// Return false means no cycle was found.
+bool RecursiveFindCycle(const BuilderRecord* search_in,
+ std::vector<const BuilderRecord*>* path) {
+ path->push_back(search_in);
+ for (const auto& cur : search_in->unresolved_deps()) {
+ std::vector<const BuilderRecord*>::iterator found =
+ std::find(path->begin(), path->end(), cur);
+ if (found != path->end()) {
+ // This item is already in the set, we found the cycle. Everything before
+ // the first definition of cur is irrelevant to the cycle.
+ path->erase(path->begin(), found);
+ path->push_back(cur);
+ return true;
+ }
+
+ if (RecursiveFindCycle(cur, path))
+ return true; // Found cycle.
+ }
+ path->pop_back();
+ return false;
+}
+
+} // namespace
+
+Builder::Builder(Loader* loader) : loader_(loader) {
+}
+
+Builder::~Builder() {
+}
+
+void Builder::ItemDefined(std::unique_ptr<Item> item) {
+ ScopedTrace trace(TraceItem::TRACE_DEFINE_TARGET, item->label());
+ trace.SetToolchain(item->settings()->toolchain_label());
+
+ BuilderRecord::ItemType type = BuilderRecord::TypeOfItem(item.get());
+
+ Err err;
+ BuilderRecord* record =
+ GetOrCreateRecordOfType(item->label(), item->defined_from(), type, &err);
+ if (!record) {
+ g_scheduler->FailWithError(err);
+ return;
+ }
+
+ // Check that it's not been already defined.
+ if (record->item()) {
+ err = Err(item->defined_from(), "Duplicate definition.",
+ "The item\n " + item->label().GetUserVisibleName(false) +
+ "\nwas already defined.");
+ err.AppendSubErr(Err(record->item()->defined_from(),
+ "Previous definition:"));
+ g_scheduler->FailWithError(err);
+ return;
+ }
+
+ record->set_item(std::move(item));
+
+ // Do target-specific dependency setup. This will also schedule dependency
+ // loads for targets that are required.
+ switch (type) {
+ case BuilderRecord::ITEM_TARGET:
+ TargetDefined(record, &err);
+ break;
+ case BuilderRecord::ITEM_CONFIG:
+ ConfigDefined(record, &err);
+ break;
+ case BuilderRecord::ITEM_TOOLCHAIN:
+ ToolchainDefined(record, &err);
+ break;
+ default:
+ break;
+ }
+ if (err.has_error()) {
+ g_scheduler->FailWithError(err);
+ return;
+ }
+
+ if (record->can_resolve()) {
+ if (!ResolveItem(record, &err)) {
+ g_scheduler->FailWithError(err);
+ return;
+ }
+ }
+}
+
+const Item* Builder::GetItem(const Label& label) const {
+ const BuilderRecord* record = GetRecord(label);
+ if (!record)
+ return nullptr;
+ return record->item();
+}
+
+const Toolchain* Builder::GetToolchain(const Label& label) const {
+ const BuilderRecord* record = GetRecord(label);
+ if (!record)
+ return nullptr;
+ if (!record->item())
+ return nullptr;
+ return record->item()->AsToolchain();
+}
+
+std::vector<const BuilderRecord*> Builder::GetAllRecords() const {
+ std::vector<const BuilderRecord*> result;
+ result.reserve(records_.size());
+ for (const auto& record : records_)
+ result.push_back(record.second);
+ return result;
+}
+
+std::vector<const Target*> Builder::GetAllResolvedTargets() const {
+ std::vector<const Target*> result;
+ result.reserve(records_.size());
+ for (const auto& record : records_) {
+ if (record.second->type() == BuilderRecord::ITEM_TARGET &&
+ record.second->should_generate() && record.second->item())
+ result.push_back(record.second->item()->AsTarget());
+ }
+ return result;
+}
+
+const BuilderRecord* Builder::GetRecord(const Label& label) const {
+ // Forward to the non-const version.
+ return const_cast<Builder*>(this)->GetRecord(label);
+}
+
+BuilderRecord* Builder::GetRecord(const Label& label) {
+ RecordMap::iterator found = records_.find(label);
+ if (found == records_.end())
+ return nullptr;
+ return found->second;
+}
+
+bool Builder::CheckForBadItems(Err* err) const {
+ // Look for errors where we find a defined node with an item that refers to
+ // an undefined one with no item. There may be other nodes in turn depending
+ // on our defined one, but listing those isn't helpful: we want to find the
+ // broken link.
+ //
+ // This finds normal "missing dependency" errors but does not find circular
+ // dependencies because in this case all items in the cycle will be GENERATED
+ // but none will be resolved. If this happens, we'll check explicitly for
+ // that below.
+ std::vector<const BuilderRecord*> bad_records;
+ std::string depstring;
+ for (const auto& record_pair : records_) {
+ const BuilderRecord* src = record_pair.second;
+ if (!src->should_generate())
+ continue; // Skip ungenerated nodes.
+
+ if (!src->resolved()) {
+ bad_records.push_back(src);
+
+ // Check dependencies.
+ for (const auto& dest : src->unresolved_deps()) {
+ if (!dest->item()) {
+ depstring += src->label().GetUserVisibleName(true) +
+ "\n needs " + dest->label().GetUserVisibleName(true) + "\n";
+ }
+ }
+ }
+ }
+
+ if (!depstring.empty()) {
+ *err = Err(Location(), "Unresolved dependencies.", depstring);
+ return false;
+ }
+
+ if (!bad_records.empty()) {
+ // Our logic above found a bad node but didn't identify the problem. This
+ // normally means a circular dependency.
+ depstring = CheckForCircularDependencies(bad_records);
+ if (depstring.empty()) {
+ // Something's very wrong, just dump out the bad nodes.
+ depstring = "I have no idea what went wrong, but these are unresolved, "
+ "possibly due to an\ninternal error:";
+ for (const auto& bad_record : bad_records) {
+ depstring += "\n\"" +
+ bad_record->label().GetUserVisibleName(false) + "\"";
+ }
+ *err = Err(Location(), "", depstring);
+ } else {
+ *err = Err(Location(), "Dependency cycle:", depstring);
+ }
+ return false;
+ }
+
+ return true;
+}
+
+bool Builder::TargetDefined(BuilderRecord* record, Err* err) {
+ Target* target = record->item()->AsTarget();
+
+ if (!AddDeps(record, target->public_deps(), err) ||
+ !AddDeps(record, target->private_deps(), err) ||
+ !AddDeps(record, target->data_deps(), err) ||
+ !AddDeps(record, target->configs().vector(), err) ||
+ !AddDeps(record, target->all_dependent_configs(), err) ||
+ !AddDeps(record, target->public_configs(), err) ||
+ !AddToolchainDep(record, target, err))
+ return false;
+
+ // All targets in the default toolchain get generated by default. We also
+ // check if this target was previously marked as "required" and force setting
+ // the bit again so the target's dependencies (which we now know) get the
+ // required bit pushed to them.
+ if (record->should_generate() || target->settings()->is_default())
+ RecursiveSetShouldGenerate(record, true);
+
+ return true;
+}
+
+bool Builder::ConfigDefined(BuilderRecord* record, Err* err) {
+ Config* config = record->item()->AsConfig();
+ if (!AddDeps(record, config->configs(), err))
+ return false;
+
+ // Make sure all deps of this config are scheduled to be loaded. For other
+ // item types like targets, the "should generate" flag is propagated around
+ // to mark whether this should happen. We could call
+ // RecursiveSetShouldGenerate to do this step here, but since configs nor
+ // anything they depend on is actually written, the "generate" flag isn't
+ // relevant and means extra book keeping. Just force load any deps of this
+ // config.
+ for (const auto& cur : record->all_deps())
+ ScheduleItemLoadIfNecessary(cur);
+
+ return true;
+}
+
+bool Builder::ToolchainDefined(BuilderRecord* record, Err* err) {
+ Toolchain* toolchain = record->item()->AsToolchain();
+
+ if (!AddDeps(record, toolchain->deps(), err))
+ return false;
+
+ // The default toolchain gets generated by default. Also propogate the
+ // generate flag if it depends on items in a non-default toolchain.
+ if (record->should_generate() ||
+ toolchain->settings()->default_toolchain_label() == toolchain->label())
+ RecursiveSetShouldGenerate(record, true);
+
+ loader_->ToolchainLoaded(toolchain);
+ return true;
+}
+
+BuilderRecord* Builder::GetOrCreateRecordOfType(const Label& label,
+ const ParseNode* request_from,
+ BuilderRecord::ItemType type,
+ Err* err) {
+ BuilderRecord* record = GetRecord(label);
+ if (!record) {
+ // Not seen this record yet, create a new one.
+ record = new BuilderRecord(type, label);
+ record->set_originally_referenced_from(request_from);
+ records_[label] = record;
+ return record;
+ }
+
+ // Check types.
+ if (record->type() != type) {
+ std::string msg =
+ "The type of " + label.GetUserVisibleName(false) +
+ "\nhere is a " + BuilderRecord::GetNameForType(type) +
+ " but was previously seen as a " +
+ BuilderRecord::GetNameForType(record->type()) + ".\n\n"
+ "The most common cause is that the label of a config was put in the\n"
+ "in the deps section of a target (or vice-versa).";
+ *err = Err(request_from, "Item type does not match.", msg);
+ if (record->originally_referenced_from()) {
+ err->AppendSubErr(Err(record->originally_referenced_from(),
+ std::string()));
+ }
+ return nullptr;
+ }
+
+ return record;
+}
+
+BuilderRecord* Builder::GetResolvedRecordOfType(const Label& label,
+ const ParseNode* origin,
+ BuilderRecord::ItemType type,
+ Err* err) {
+ BuilderRecord* record = GetRecord(label);
+ if (!record) {
+ *err = Err(origin, "Item not found",
+ "\"" + label.GetUserVisibleName(false) + "\" doesn't\n"
+ "refer to an existent thing.");
+ return nullptr;
+ }
+
+ const Item* item = record->item();
+ if (!item) {
+ *err = Err(origin, "Item not resolved.",
+ "\"" + label.GetUserVisibleName(false) + "\" hasn't been resolved.\n");
+ return nullptr;
+ }
+
+ if (!BuilderRecord::IsItemOfType(item, type)) {
+ *err = Err(origin,
+ std::string("This is not a ") + BuilderRecord::GetNameForType(type),
+ "\"" + label.GetUserVisibleName(false) + "\" refers to a " +
+ item->GetItemTypeName() + " instead of a " +
+ BuilderRecord::GetNameForType(type) + ".");
+ return nullptr;
+ }
+ return record;
+}
+
+bool Builder::AddDeps(BuilderRecord* record,
+ const LabelConfigVector& configs,
+ Err* err) {
+ for (const auto& config : configs) {
+ BuilderRecord* dep_record = GetOrCreateRecordOfType(
+ config.label, config.origin, BuilderRecord::ITEM_CONFIG, err);
+ if (!dep_record)
+ return false;
+ record->AddDep(dep_record);
+ }
+ return true;
+}
+
+bool Builder::AddDeps(BuilderRecord* record,
+ const UniqueVector<LabelConfigPair>& configs,
+ Err* err) {
+ for (const auto& config : configs) {
+ BuilderRecord* dep_record = GetOrCreateRecordOfType(
+ config.label, config.origin, BuilderRecord::ITEM_CONFIG, err);
+ if (!dep_record)
+ return false;
+ record->AddDep(dep_record);
+ }
+ return true;
+}
+
+bool Builder::AddDeps(BuilderRecord* record,
+ const LabelTargetVector& targets,
+ Err* err) {
+ for (const auto& target : targets) {
+ BuilderRecord* dep_record = GetOrCreateRecordOfType(
+ target.label, target.origin, BuilderRecord::ITEM_TARGET, err);
+ if (!dep_record)
+ return false;
+ record->AddDep(dep_record);
+ }
+ return true;
+}
+
+bool Builder::AddToolchainDep(BuilderRecord* record,
+ const Target* target,
+ Err* err) {
+ BuilderRecord* toolchain_record = GetOrCreateRecordOfType(
+ target->settings()->toolchain_label(), target->defined_from(),
+ BuilderRecord::ITEM_TOOLCHAIN, err);
+ if (!toolchain_record)
+ return false;
+ record->AddDep(toolchain_record);
+
+ return true;
+}
+
+void Builder::RecursiveSetShouldGenerate(BuilderRecord* record,
+ bool force) {
+ if (!force && record->should_generate())
+ return; // Already set.
+ record->set_should_generate(true);
+
+ for (const auto& cur : record->all_deps()) {
+ if (!cur->should_generate()) {
+ ScheduleItemLoadIfNecessary(cur);
+ RecursiveSetShouldGenerate(cur, false);
+ }
+ }
+}
+
+void Builder::ScheduleItemLoadIfNecessary(BuilderRecord* record) {
+ const ParseNode* origin = record->originally_referenced_from();
+ loader_->Load(record->label(),
+ origin ? origin->GetRange() : LocationRange());
+}
+
+bool Builder::ResolveItem(BuilderRecord* record, Err* err) {
+ DCHECK(record->can_resolve() && !record->resolved());
+
+ if (record->type() == BuilderRecord::ITEM_TARGET) {
+ Target* target = record->item()->AsTarget();
+ if (!ResolveDeps(&target->public_deps(), err) ||
+ !ResolveDeps(&target->private_deps(), err) ||
+ !ResolveDeps(&target->data_deps(), err) ||
+ !ResolveConfigs(&target->configs(), err) ||
+ !ResolveConfigs(&target->all_dependent_configs(), err) ||
+ !ResolveConfigs(&target->public_configs(), err) ||
+ !ResolveToolchain(target, err))
+ return false;
+ } else if (record->type() == BuilderRecord::ITEM_CONFIG) {
+ Config* config = record->item()->AsConfig();
+ if (!ResolveConfigs(&config->configs(), err))
+ return false;
+ } else if (record->type() == BuilderRecord::ITEM_TOOLCHAIN) {
+ Toolchain* toolchain = record->item()->AsToolchain();
+ if (!ResolveDeps(&toolchain->deps(), err))
+ return false;
+ }
+
+ record->set_resolved(true);
+
+ if (!record->item()->OnResolved(err))
+ return false;
+ if (!resolved_callback_.is_null())
+ resolved_callback_.Run(record);
+
+ // Recursively update everybody waiting on this item to be resolved.
+ for (BuilderRecord* waiting : record->waiting_on_resolution()) {
+ DCHECK(waiting->unresolved_deps().find(record) !=
+ waiting->unresolved_deps().end());
+ waiting->unresolved_deps().erase(record);
+
+ if (waiting->can_resolve()) {
+ if (!ResolveItem(waiting, err))
+ return false;
+ }
+ }
+ record->waiting_on_resolution().clear();
+ return true;
+}
+
+bool Builder::ResolveDeps(LabelTargetVector* deps, Err* err) {
+ for (LabelTargetPair& cur : *deps) {
+ DCHECK(!cur.ptr);
+
+ BuilderRecord* record = GetResolvedRecordOfType(
+ cur.label, cur.origin, BuilderRecord::ITEM_TARGET, err);
+ if (!record)
+ return false;
+ cur.ptr = record->item()->AsTarget();
+ }
+ return true;
+}
+
+bool Builder::ResolveConfigs(UniqueVector<LabelConfigPair>* configs, Err* err) {
+ for (const auto& cur : *configs) {
+ DCHECK(!cur.ptr);
+
+ BuilderRecord* record = GetResolvedRecordOfType(
+ cur.label, cur.origin, BuilderRecord::ITEM_CONFIG, err);
+ if (!record)
+ return false;
+ const_cast<LabelConfigPair&>(cur).ptr = record->item()->AsConfig();
+ }
+ return true;
+}
+
+bool Builder::ResolveToolchain(Target* target, Err* err) {
+ BuilderRecord* record = GetResolvedRecordOfType(
+ target->settings()->toolchain_label(), target->defined_from(),
+ BuilderRecord::ITEM_TOOLCHAIN, err);
+ if (!record) {
+ *err = Err(target->defined_from(),
+ "Toolchain for target not defined.",
+ "I was hoping to find a toolchain " +
+ target->settings()->toolchain_label().GetUserVisibleName(false));
+ return false;
+ }
+
+ if (!target->SetToolchain(record->item()->AsToolchain(), err))
+ return false;
+
+ return true;
+}
+
+std::string Builder::CheckForCircularDependencies(
+ const std::vector<const BuilderRecord*>& bad_records) const {
+ std::vector<const BuilderRecord*> cycle;
+ if (!RecursiveFindCycle(bad_records[0], &cycle))
+ return std::string(); // Didn't find a cycle, something else is wrong.
+
+ std::string ret;
+ for (size_t i = 0; i < cycle.size(); i++) {
+ ret += " " + cycle[i]->label().GetUserVisibleName(false);
+ if (i != cycle.size() - 1)
+ ret += " ->";
+ ret += "\n";
+ }
+
+ return ret;
+}
diff --git a/chromium/tools/gn/builder.h b/chromium/tools/gn/builder.h
new file mode 100644
index 00000000000..04459760246
--- /dev/null
+++ b/chromium/tools/gn/builder.h
@@ -0,0 +1,141 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_BUILDER_H_
+#define TOOLS_GN_BUILDER_H_
+
+#include "base/callback.h"
+#include "base/containers/hash_tables.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "tools/gn/builder_record.h"
+#include "tools/gn/label.h"
+#include "tools/gn/label_ptr.h"
+#include "tools/gn/unique_vector.h"
+
+class Config;
+class Err;
+class Loader;
+class ParseNode;
+
+class Builder : public base::RefCountedThreadSafe<Builder> {
+ public:
+ typedef base::Callback<void(const BuilderRecord*)> ResolvedCallback;
+
+ explicit Builder(Loader* loader);
+
+ // The resolved callback is called whenever a target has been resolved. This
+ // will be executed only on the main thread.
+ void set_resolved_callback(const ResolvedCallback& cb) {
+ resolved_callback_ = cb;
+ }
+
+ Loader* loader() const { return loader_; }
+
+ void ItemDefined(std::unique_ptr<Item> item);
+
+ // Returns NULL if there is not a thing with the corresponding label.
+ const Item* GetItem(const Label& label) const;
+ const Toolchain* GetToolchain(const Label& label) const;
+
+ std::vector<const BuilderRecord*> GetAllRecords() const;
+
+ // Returns targets which should be generated and which are defined.
+ std::vector<const Target*> GetAllResolvedTargets() const;
+
+ // Returns the record for the given label, or NULL if it doesn't exist.
+ // Mostly used for unit tests.
+ const BuilderRecord* GetRecord(const Label& label) const;
+ BuilderRecord* GetRecord(const Label& label);
+
+ // If there are any undefined references, returns false and sets the error.
+ bool CheckForBadItems(Err* err) const;
+
+ private:
+ friend class base::RefCountedThreadSafe<Builder>;
+
+ virtual ~Builder();
+
+ bool TargetDefined(BuilderRecord* record, Err* err);
+ bool ConfigDefined(BuilderRecord* record, Err* err);
+ bool ToolchainDefined(BuilderRecord* record, Err* err);
+
+ // Returns the record associated with the given label. This function checks
+ // that if we already have references for it, the type matches. If no record
+ // exists yet, a new one will be created.
+ //
+ // If any of the conditions fail, the return value will be null and the error
+ // will be set. request_from is used as the source of the error.
+ BuilderRecord* GetOrCreateRecordOfType(const Label& label,
+ const ParseNode* request_from,
+ BuilderRecord::ItemType type,
+ Err* err);
+
+ // Returns the record associated with the given label. This function checks
+ // that it's already been resolved to the correct type.
+ //
+ // If any of the conditions fail, the return value will be null and the error
+ // will be set. request_from is used as the source of the error.
+ BuilderRecord* GetResolvedRecordOfType(const Label& label,
+ const ParseNode* request_from,
+ BuilderRecord::ItemType type,
+ Err* err);
+
+ bool AddDeps(BuilderRecord* record,
+ const LabelConfigVector& configs,
+ Err* err);
+ bool AddDeps(BuilderRecord* record,
+ const UniqueVector<LabelConfigPair>& configs,
+ Err* err);
+ bool AddDeps(BuilderRecord* record,
+ const LabelTargetVector& targets,
+ Err* err);
+ bool AddToolchainDep(BuilderRecord* record,
+ const Target* target,
+ Err* err);
+
+ // Given a target, sets the "should generate" bit and pushes it through the
+ // dependency tree. Any time the bit it set, we ensure that the given item is
+ // scheduled to be loaded.
+ //
+ // If the force flag is set, we'll ignore the current state of the record's
+ // should_generate flag, and set it on the dependents every time. This is
+ // used when defining a target: the "should generate" may have been set
+ // before the item was defined (if it is required by something that is
+ // required). In this case, we need to re-push the "should generate" flag
+ // to the item's dependencies.
+ void RecursiveSetShouldGenerate(BuilderRecord* record, bool force);
+
+ void ScheduleItemLoadIfNecessary(BuilderRecord* record);
+
+ // This takes a BuilderRecord with resolved depdencies, and fills in the
+ // target's Label*Vectors with the resolved pointers.
+ bool ResolveItem(BuilderRecord* record, Err* err);
+
+ // Fills in the pointers in the given vector based on the labels. We assume
+ // that everything should be resolved by this point, so will return an error
+ // if anything isn't found or if the type doesn't match.
+ bool ResolveDeps(LabelTargetVector* deps, Err* err);
+ bool ResolveConfigs(UniqueVector<LabelConfigPair>* configs, Err* err);
+ bool ResolveToolchain(Target* target, Err* err);
+
+ // Given a list of unresolved records, tries to find any circular
+ // dependencies and returns the string describing the problem. If no circular
+ // deps were found, returns the empty string.
+ std::string CheckForCircularDependencies(
+ const std::vector<const BuilderRecord*>& bad_records) const;
+
+ // Non owning pointer.
+ Loader* loader_;
+
+ // Owning pointers.
+ typedef base::hash_map<Label, BuilderRecord*> RecordMap;
+ RecordMap records_;
+
+ ResolvedCallback resolved_callback_;
+
+ DISALLOW_COPY_AND_ASSIGN(Builder);
+};
+
+#endif // TOOLS_GN_BUILDER_H_
diff --git a/chromium/tools/gn/builder_record.cc b/chromium/tools/gn/builder_record.cc
new file mode 100644
index 00000000000..842dbc4c556
--- /dev/null
+++ b/chromium/tools/gn/builder_record.cc
@@ -0,0 +1,69 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/builder_record.h"
+
+#include "tools/gn/item.h"
+
+BuilderRecord::BuilderRecord(ItemType type, const Label& label)
+ : type_(type),
+ label_(label),
+ originally_referenced_from_(nullptr),
+ should_generate_(false),
+ resolved_(false) {
+}
+
+BuilderRecord::~BuilderRecord() {
+}
+
+// static
+const char* BuilderRecord::GetNameForType(ItemType type) {
+ switch (type) {
+ case ITEM_TARGET:
+ return "target";
+ case ITEM_CONFIG:
+ return "config";
+ case ITEM_TOOLCHAIN:
+ return "toolchain";
+ case ITEM_UNKNOWN:
+ default:
+ return "unknown";
+ }
+}
+
+// static
+bool BuilderRecord::IsItemOfType(const Item* item, ItemType type) {
+ switch (type) {
+ case ITEM_TARGET:
+ return !!item->AsTarget();
+ case ITEM_CONFIG:
+ return !!item->AsConfig();
+ case ITEM_TOOLCHAIN:
+ return !!item->AsToolchain();
+ case ITEM_UNKNOWN:
+ default:
+ return false;
+ }
+}
+
+// static
+BuilderRecord::ItemType BuilderRecord::TypeOfItem(const Item* item) {
+ if (item->AsTarget())
+ return ITEM_TARGET;
+ if (item->AsConfig())
+ return ITEM_CONFIG;
+ if (item->AsToolchain())
+ return ITEM_TOOLCHAIN;
+
+ NOTREACHED();
+ return ITEM_UNKNOWN;
+}
+
+void BuilderRecord::AddDep(BuilderRecord* record) {
+ all_deps_.insert(record);
+ if (!record->resolved()) {
+ unresolved_deps_.insert(record);
+ record->waiting_on_resolution_.insert(this);
+ }
+}
diff --git a/chromium/tools/gn/builder_record.h b/chromium/tools/gn/builder_record.h
new file mode 100644
index 00000000000..a767c9b103b
--- /dev/null
+++ b/chromium/tools/gn/builder_record.h
@@ -0,0 +1,112 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_BUILDER_RECORD_H_
+#define TOOLS_GN_BUILDER_RECORD_H_
+
+#include <memory>
+#include <set>
+#include <utility>
+
+#include "base/macros.h"
+#include "tools/gn/item.h"
+#include "tools/gn/location.h"
+
+class ParseNode;
+
+// This class is used by the builder to manage the loading of the dependency
+// tree. It holds a reference to an item and links to other records that the
+// item depends on, both resolved ones, and unresolved ones.
+//
+// If a target depends on another one that hasn't been defined yet, we'll make
+// a placeholder BuilderRecord with no item, and try to load the buildfile
+// associated with the new item. The item will get filled in when we encounter
+// the declaration for the item (or when we're done and realize there are
+// undefined items).
+//
+// You can also have null item pointers when the target is not required for
+// the current build (should_generate is false).
+class BuilderRecord {
+ public:
+ typedef std::set<BuilderRecord*> BuilderRecordSet;
+
+ enum ItemType {
+ ITEM_UNKNOWN,
+ ITEM_TARGET,
+ ITEM_CONFIG,
+ ITEM_TOOLCHAIN
+ };
+
+ BuilderRecord(ItemType type, const Label& label);
+ ~BuilderRecord();
+
+ ItemType type() const { return type_; }
+ const Label& label() const { return label_; }
+
+ // Returns a user-ready name for the given type. e.g. "target".
+ static const char* GetNameForType(ItemType type);
+
+ // Returns true if the given item is of the given type.
+ static bool IsItemOfType(const Item* item, ItemType type);
+
+ // Returns the type enum for the given item.
+ static ItemType TypeOfItem(const Item* item);
+
+ Item* item() { return item_.get(); }
+ const Item* item() const { return item_.get(); }
+ void set_item(std::unique_ptr<Item> item) { item_ = std::move(item); }
+
+ // Indicates from where this item was originally referenced from that caused
+ // it to be loaded. For targets for which we encountered the declaration
+ // before a reference, this will be the empty range.
+ const ParseNode* originally_referenced_from() const {
+ return originally_referenced_from_;
+ }
+ void set_originally_referenced_from(const ParseNode* pn) {
+ originally_referenced_from_ = pn;
+ }
+
+ bool should_generate() const { return should_generate_; }
+ void set_should_generate(bool sg) { should_generate_ = sg; }
+
+ bool resolved() const { return resolved_; }
+ void set_resolved(bool r) { resolved_ = r; }
+
+ bool can_resolve() const {
+ return item_ && unresolved_deps_.empty();
+ }
+
+ // All records this one is depending on.
+ BuilderRecordSet& all_deps() { return all_deps_; }
+ const BuilderRecordSet& all_deps() const { return all_deps_; }
+
+ // Unresolved records this one is depending on. A subset of all... above.
+ BuilderRecordSet& unresolved_deps() { return unresolved_deps_; }
+ const BuilderRecordSet& unresolved_deps() const { return unresolved_deps_; }
+
+ // Records that are waiting on this one to be resolved. This is the other
+ // end of the "unresolved deps" arrow.
+ BuilderRecordSet& waiting_on_resolution() { return waiting_on_resolution_; }
+ const BuilderRecordSet& waiting_on_resolution() const {
+ return waiting_on_resolution_;
+ }
+
+ void AddDep(BuilderRecord* record);
+
+ private:
+ ItemType type_;
+ Label label_;
+ std::unique_ptr<Item> item_;
+ const ParseNode* originally_referenced_from_;
+ bool should_generate_;
+ bool resolved_;
+
+ BuilderRecordSet all_deps_;
+ BuilderRecordSet unresolved_deps_;
+ BuilderRecordSet waiting_on_resolution_;
+
+ DISALLOW_COPY_AND_ASSIGN(BuilderRecord);
+};
+
+#endif // TOOLS_GN_BUILDER_RECORD_H_
diff --git a/chromium/tools/gn/builder_unittest.cc b/chromium/tools/gn/builder_unittest.cc
new file mode 100644
index 00000000000..96fad7d268b
--- /dev/null
+++ b/chromium/tools/gn/builder_unittest.cc
@@ -0,0 +1,249 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/builder.h"
+#include "tools/gn/config.h"
+#include "tools/gn/loader.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+#include "tools/gn/toolchain.h"
+
+namespace {
+
+class MockLoader : public Loader {
+ public:
+ MockLoader() {
+ }
+
+ // Loader implementation:
+ void Load(const SourceFile& file,
+ const LocationRange& origin,
+ const Label& toolchain_name) override {
+ files_.push_back(file);
+ }
+ void ToolchainLoaded(const Toolchain* toolchain) override {}
+ Label GetDefaultToolchain() const override { return Label(); }
+ const Settings* GetToolchainSettings(const Label& label) const override {
+ return nullptr;
+ }
+
+ bool HasLoadedNone() const {
+ return files_.empty();
+ }
+
+ // Returns true if one/two loads have been requested and they match the given
+ // file(s). This will clear the records so it will be empty for the next call.
+ bool HasLoadedOne(const SourceFile& file) {
+ if (files_.size() != 1u) {
+ files_.clear();
+ return false;
+ }
+ bool match = (files_[0] == file);
+ files_.clear();
+ return match;
+ }
+ bool HasLoadedTwo(const SourceFile& a, const SourceFile& b) {
+ if (files_.size() != 2u) {
+ files_.clear();
+ return false;
+ }
+
+ bool match = (
+ (files_[0] == a && files_[1] == b) ||
+ (files_[0] == b && files_[1] == a));
+ files_.clear();
+ return match;
+ }
+
+ private:
+ ~MockLoader() override {}
+
+ std::vector<SourceFile> files_;
+};
+
+class BuilderTest : public testing::Test {
+ public:
+ BuilderTest()
+ : loader_(new MockLoader),
+ builder_(new Builder(loader_.get())),
+ settings_(&build_settings_, std::string()),
+ scope_(&settings_) {
+ build_settings_.SetBuildDir(SourceDir("//out/"));
+ settings_.set_toolchain_label(Label(SourceDir("//tc/"), "default"));
+ settings_.set_default_toolchain_label(settings_.toolchain_label());
+ }
+
+ Toolchain* DefineToolchain() {
+ Toolchain* tc = new Toolchain(&settings_, settings_.toolchain_label());
+ TestWithScope::SetupToolchain(tc);
+ builder_->ItemDefined(std::unique_ptr<Item>(tc));
+ return tc;
+ }
+
+ protected:
+ scoped_refptr<MockLoader> loader_;
+ scoped_refptr<Builder> builder_;
+ BuildSettings build_settings_;
+ Settings settings_;
+ Scope scope_;
+};
+
+} // namespace
+
+TEST_F(BuilderTest, BasicDeps) {
+ SourceDir toolchain_dir = settings_.toolchain_label().dir();
+ std::string toolchain_name = settings_.toolchain_label().name();
+
+ // Construct a dependency chain: A -> B -> C. Define A first with a
+ // forward-reference to B, then C, then B to test the different orders that
+ // the dependencies are hooked up.
+ Label a_label(SourceDir("//a/"), "a", toolchain_dir, toolchain_name);
+ Label b_label(SourceDir("//b/"), "b", toolchain_dir, toolchain_name);
+ Label c_label(SourceDir("//c/"), "c", toolchain_dir, toolchain_name);
+
+ // The builder will take ownership of the pointers.
+ Target* a = new Target(&settings_, a_label);
+ a->public_deps().push_back(LabelTargetPair(b_label));
+ a->set_output_type(Target::EXECUTABLE);
+ builder_->ItemDefined(std::unique_ptr<Item>(a));
+
+ // Should have requested that B and the toolchain is loaded.
+ EXPECT_TRUE(loader_->HasLoadedTwo(SourceFile("//tc/BUILD.gn"),
+ SourceFile("//b/BUILD.gn")));
+
+ // Define the toolchain.
+ DefineToolchain();
+ BuilderRecord* toolchain_record =
+ builder_->GetRecord(settings_.toolchain_label());
+ ASSERT_TRUE(toolchain_record);
+ EXPECT_EQ(BuilderRecord::ITEM_TOOLCHAIN, toolchain_record->type());
+
+ // A should be unresolved with an item
+ BuilderRecord* a_record = builder_->GetRecord(a_label);
+ EXPECT_TRUE(a_record->item());
+ EXPECT_FALSE(a_record->resolved());
+ EXPECT_FALSE(a_record->can_resolve());
+
+ // B should be unresolved, have no item, and no deps.
+ BuilderRecord* b_record = builder_->GetRecord(b_label);
+ EXPECT_FALSE(b_record->item());
+ EXPECT_FALSE(b_record->resolved());
+ EXPECT_FALSE(b_record->can_resolve());
+ EXPECT_TRUE(b_record->all_deps().empty());
+
+ // A should have two deps: B and the toolchain. Only B should be unresolved.
+ EXPECT_EQ(2u, a_record->all_deps().size());
+ EXPECT_EQ(1u, a_record->unresolved_deps().size());
+ EXPECT_NE(a_record->all_deps().end(),
+ a_record->all_deps().find(toolchain_record));
+ EXPECT_NE(a_record->all_deps().end(),
+ a_record->all_deps().find(b_record));
+ EXPECT_NE(a_record->unresolved_deps().end(),
+ a_record->unresolved_deps().find(b_record));
+
+ // B should be marked as having A waiting on it.
+ EXPECT_EQ(1u, b_record->waiting_on_resolution().size());
+ EXPECT_NE(b_record->waiting_on_resolution().end(),
+ b_record->waiting_on_resolution().find(a_record));
+
+ // Add the C target.
+ Target* c = new Target(&settings_, c_label);
+ c->set_output_type(Target::STATIC_LIBRARY);
+ c->visibility().SetPublic();
+ builder_->ItemDefined(std::unique_ptr<Item>(c));
+
+ // C only depends on the already-loaded toolchain so we shouldn't have
+ // requested anything else.
+ EXPECT_TRUE(loader_->HasLoadedNone());
+
+ // Add the B target.
+ Target* b = new Target(&settings_, b_label);
+ a->public_deps().push_back(LabelTargetPair(c_label));
+ b->set_output_type(Target::SHARED_LIBRARY);
+ b->visibility().SetPublic();
+ builder_->ItemDefined(std::unique_ptr<Item>(b));
+
+ // B depends only on the already-loaded C and toolchain so we shouldn't have
+ // requested anything else.
+ EXPECT_TRUE(loader_->HasLoadedNone());
+
+ // All targets should now be resolved.
+ BuilderRecord* c_record = builder_->GetRecord(c_label);
+ EXPECT_TRUE(a_record->resolved());
+ EXPECT_TRUE(b_record->resolved());
+ EXPECT_TRUE(c_record->resolved());
+
+ EXPECT_TRUE(a_record->unresolved_deps().empty());
+ EXPECT_TRUE(b_record->unresolved_deps().empty());
+ EXPECT_TRUE(c_record->unresolved_deps().empty());
+
+ EXPECT_TRUE(a_record->waiting_on_resolution().empty());
+ EXPECT_TRUE(b_record->waiting_on_resolution().empty());
+ EXPECT_TRUE(c_record->waiting_on_resolution().empty());
+}
+
+// Tests that the "should generate" flag is set and propagated properly.
+TEST_F(BuilderTest, ShouldGenerate) {
+ DefineToolchain();
+
+ // Define a secondary toolchain.
+ Settings settings2(&build_settings_, "secondary/");
+ Label toolchain_label2(SourceDir("//tc/"), "secondary");
+ settings2.set_toolchain_label(toolchain_label2);
+ Toolchain* tc2 = new Toolchain(&settings2, toolchain_label2);
+ TestWithScope::SetupToolchain(tc2);
+ builder_->ItemDefined(std::unique_ptr<Item>(tc2));
+
+ // Construct a dependency chain: A -> B. A is in the default toolchain, B
+ // is not.
+ Label a_label(SourceDir("//foo/"), "a",
+ settings_.toolchain_label().dir(), "a");
+ Label b_label(SourceDir("//foo/"), "b",
+ toolchain_label2.dir(), toolchain_label2.name());
+
+ // First define B.
+ Target* b = new Target(&settings2, b_label);
+ b->visibility().SetPublic();
+ b->set_output_type(Target::EXECUTABLE);
+ builder_->ItemDefined(std::unique_ptr<Item>(b));
+
+ // B should not be marked generated by default.
+ BuilderRecord* b_record = builder_->GetRecord(b_label);
+ EXPECT_FALSE(b_record->should_generate());
+
+ // Define A with a dependency on B.
+ Target* a = new Target(&settings_, a_label);
+ a->public_deps().push_back(LabelTargetPair(b_label));
+ a->set_output_type(Target::EXECUTABLE);
+ builder_->ItemDefined(std::unique_ptr<Item>(a));
+
+ // A should have the generate bit set since it's in the default toolchain.
+ BuilderRecord* a_record = builder_->GetRecord(a_label);
+ EXPECT_TRUE(a_record->should_generate());
+
+ // It should have gotten pushed to B.
+ EXPECT_TRUE(b_record->should_generate());
+}
+
+// Tests that configs applied to a config get loaded (bug 536844).
+TEST_F(BuilderTest, ConfigLoad) {
+ SourceDir toolchain_dir = settings_.toolchain_label().dir();
+ std::string toolchain_name = settings_.toolchain_label().name();
+
+ // Construct a dependency chain: A -> B -> C. Define A first with a
+ // forward-reference to B, then C, then B to test the different orders that
+ // the dependencies are hooked up.
+ Label a_label(SourceDir("//a/"), "a", toolchain_dir, toolchain_name);
+ Label b_label(SourceDir("//b/"), "b", toolchain_dir, toolchain_name);
+ Label c_label(SourceDir("//c/"), "c", toolchain_dir, toolchain_name);
+
+ // The builder will take ownership of the pointers.
+ Config* a = new Config(&settings_, a_label);
+ a->configs().push_back(LabelConfigPair(b_label));
+ builder_->ItemDefined(std::unique_ptr<Item>(a));
+
+ // Should have requested that B is loaded.
+ EXPECT_TRUE(loader_->HasLoadedOne(SourceFile("//b/BUILD.gn")));
+}
diff --git a/chromium/tools/gn/bundle_data.cc b/chromium/tools/gn/bundle_data.cc
new file mode 100644
index 00000000000..7faccbdc51a
--- /dev/null
+++ b/chromium/tools/gn/bundle_data.cc
@@ -0,0 +1,112 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/bundle_data.h"
+
+#include "base/logging.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/target.h"
+
+namespace {
+
+// Return directory of |path| without the trailing directory separator.
+base::StringPiece FindDirNoTrailingSeparator(const base::StringPiece& path) {
+ base::StringPiece::size_type pos = path.find_last_of("/\\");
+ if (pos == base::StringPiece::npos)
+ return base::StringPiece();
+ return base::StringPiece(path.data(), pos);
+}
+
+} // namespace
+
+bool IsSourceFileFromAssetCatalog(const SourceFile& source,
+ SourceFile* asset_catalog) {
+ // Check that the file matches the following pattern:
+ // .*\.xcassets/[^/]*\.imageset/[^/]*
+ base::StringPiece dir;
+ dir = FindDirNoTrailingSeparator(source.value());
+ if (!dir.ends_with(".imageset"))
+ return false;
+ dir = FindDirNoTrailingSeparator(dir);
+ if (!dir.ends_with(".xcassets"))
+ return false;
+ if (asset_catalog) {
+ std::string asset_catalog_path = dir.as_string();
+ *asset_catalog = SourceFile(SourceFile::SWAP_IN, &asset_catalog_path);
+ }
+ return true;
+}
+
+BundleData::BundleData() {}
+
+BundleData::~BundleData() {}
+
+void BundleData::AddBundleData(const Target* target) {
+ DCHECK_EQ(target->output_type(), Target::BUNDLE_DATA);
+ bundle_deps_.push_back(target);
+}
+
+void BundleData::OnTargetResolved(Target* owning_target) {
+ // Only initialize file_rules_ and asset_catalog_sources for "create_bundle"
+ // target (properties are only used by those targets).
+ if (owning_target->output_type() != Target::CREATE_BUNDLE)
+ return;
+
+ for (const Target* target : bundle_deps_) {
+ SourceFiles file_rule_sources;
+ for (const SourceFile& source_file : target->sources()) {
+ if (IsSourceFileFromAssetCatalog(source_file, nullptr)) {
+ asset_catalog_sources_.push_back(source_file);
+ } else {
+ file_rule_sources.push_back(source_file);
+ }
+ }
+
+ if (!file_rule_sources.empty()) {
+ DCHECK_EQ(target->action_values().outputs().list().size(), 1u);
+ file_rules_.push_back(BundleFileRule(
+ file_rule_sources, target->action_values().outputs().list()[0]));
+ }
+ }
+
+ GetSourceFiles(&owning_target->sources());
+}
+
+void BundleData::GetSourceFiles(SourceFiles* sources) const {
+ for (const BundleFileRule& file_rule : file_rules_) {
+ sources->insert(sources->end(), file_rule.sources().begin(),
+ file_rule.sources().end());
+ }
+ sources->insert(sources->end(), asset_catalog_sources_.begin(),
+ asset_catalog_sources_.end());
+}
+
+void BundleData::GetOutputFiles(const Settings* settings,
+ OutputFiles* outputs) const {
+ SourceFiles outputs_as_sources;
+ GetOutputsAsSourceFiles(settings, &outputs_as_sources);
+ for (const SourceFile& source_file : outputs_as_sources)
+ outputs->push_back(OutputFile(settings->build_settings(), source_file));
+}
+
+void BundleData::GetOutputsAsSourceFiles(
+ const Settings* settings,
+ SourceFiles* outputs_as_source) const {
+ for (const BundleFileRule& file_rule : file_rules_) {
+ for (const SourceFile& source : file_rule.sources()) {
+ outputs_as_source->push_back(
+ file_rule.ApplyPatternToSource(settings, *this, source));
+ }
+ }
+
+ if (!asset_catalog_sources_.empty())
+ outputs_as_source->push_back(GetCompiledAssetCatalogPath());
+}
+
+SourceFile BundleData::GetCompiledAssetCatalogPath() const {
+ DCHECK(!asset_catalog_sources_.empty());
+ std::string assets_car_path = resources_dir_ + "/Assets.car";
+ return SourceFile(SourceFile::SWAP_IN, &assets_car_path);
+}
diff --git a/chromium/tools/gn/bundle_data.h b/chromium/tools/gn/bundle_data.h
new file mode 100644
index 00000000000..e78589e9e91
--- /dev/null
+++ b/chromium/tools/gn/bundle_data.h
@@ -0,0 +1,104 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_BUNDLE_DATA_H_
+#define TOOLS_GN_BUNDLE_DATA_H_
+
+#include <string>
+#include <vector>
+
+#include "tools/gn/bundle_file_rule.h"
+#include "tools/gn/unique_vector.h"
+
+class OutputFile;
+class SourceFile;
+class Settings;
+class Target;
+
+// Returns true if |source| correspond to the path of a file in an asset
+// catalog. If defined |asset_catalog| is set to its path.
+//
+// An asset catalog is an OS X bundle with the ".xcassets" extension. It
+// contains one directory per assets each of them with the ".imageset"
+// extension.
+//
+// All asset catalogs are compiled by Xcode into single Assets.car file as
+// part of the creation of an application or framework bundle. BundleData
+// emulates this with the "compile_xcassets" tool.
+bool IsSourceFileFromAssetCatalog(const SourceFile& source,
+ SourceFile* asset_catalog);
+
+// BundleData holds the information required by "create_bundle" target.
+class BundleData {
+ public:
+ using UniqueTargets = UniqueVector<const Target*>;
+ using SourceFiles = std::vector<SourceFile>;
+ using OutputFiles = std::vector<OutputFile>;
+ using BundleFileRules = std::vector<BundleFileRule>;
+
+ BundleData();
+ ~BundleData();
+
+ // Adds a bundle_data target to the recursive collection of all bundle_data
+ // that the target depends on.
+ void AddBundleData(const Target* target);
+
+ // Called upon resolution of the target owning this instance of BundleData.
+ // |owning_target| is the owning target.
+ void OnTargetResolved(Target* owning_target);
+
+ // Returns the list of inputs.
+ void GetSourceFiles(SourceFiles* sources) const;
+
+ // Returns the list of outputs.
+ void GetOutputFiles(const Settings* settings,
+ OutputFiles* outputs) const;
+
+ // Returns the list of outputs as SourceFile.
+ void GetOutputsAsSourceFiles(
+ const Settings* settings,
+ SourceFiles* outputs_as_source) const;
+
+ // Returns the path to the compiled asset catalog. Only valid if
+ // asset_catalog_sources() is not empty.
+ SourceFile GetCompiledAssetCatalogPath() const;
+
+ // Returns the list of inputs for the compilation of the asset catalog.
+ SourceFiles& asset_catalog_sources() { return asset_catalog_sources_; }
+ const SourceFiles& asset_catalog_sources() const {
+ return asset_catalog_sources_;
+ }
+
+ BundleFileRules& file_rules() { return file_rules_; }
+ const BundleFileRules& file_rules() const { return file_rules_; }
+
+ std::string& root_dir() { return root_dir_; }
+ const std::string& root_dir() const { return root_dir_; }
+
+ std::string& resources_dir() { return resources_dir_; }
+ const std::string& resources_dir() const { return resources_dir_; }
+
+ std::string& executable_dir() { return executable_dir_; }
+ const std::string& executable_dir() const { return executable_dir_; }
+
+ std::string& plugins_dir() { return plugins_dir_; }
+ const std::string& plugins_dir() const { return plugins_dir_; }
+
+ // Recursive collection of all bundle_data that the target depends on.
+ const UniqueTargets& bundle_deps() const { return bundle_deps_; }
+
+ private:
+ SourceFiles asset_catalog_sources_;
+ BundleFileRules file_rules_;
+ UniqueTargets bundle_deps_;
+
+ // All those values are subdirectories relative to root_build_dir, and apart
+ // from root_dir, they are either equal to root_dir_ or subdirectories of it.
+ std::string root_dir_;
+ std::string resources_dir_;
+ std::string executable_dir_;
+ std::string plugins_dir_;
+};
+
+#endif // TOOLS_GN_BUNDLE_DATA_H_
diff --git a/chromium/tools/gn/bundle_data_target_generator.cc b/chromium/tools/gn/bundle_data_target_generator.cc
new file mode 100644
index 00000000000..81a9b959ebc
--- /dev/null
+++ b/chromium/tools/gn/bundle_data_target_generator.cc
@@ -0,0 +1,94 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/bundle_data_target_generator.h"
+
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/substitution_type.h"
+#include "tools/gn/target.h"
+#include "tools/gn/value.h"
+#include "tools/gn/variables.h"
+
+BundleDataTargetGenerator::BundleDataTargetGenerator(
+ Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Err* err) : TargetGenerator(target, scope, function_call, err) {}
+
+BundleDataTargetGenerator::~BundleDataTargetGenerator() {}
+
+void BundleDataTargetGenerator::DoRun() {
+ target_->set_output_type(Target::BUNDLE_DATA);
+
+ if (!FillSources())
+ return;
+ if (!FillOutputs())
+ return;
+
+ if (target_->sources().empty()) {
+ *err_ = Err(function_call_, "Empty sources for bundle_data target."
+ "You have to specify at least one file in the \"sources\".");
+ return;
+ }
+ if (target_->action_values().outputs().list().size() != 1) {
+ *err_ = Err(function_call_,
+ "Target bundle_data must have exactly one ouput.",
+ "You must specify exactly one value in the \"output\" array for the"
+ "destination\ninto the generated bundle (see \"gn help bundle_data\"). "
+ "If there are multiple\nsources to copy, use source expansion (see "
+ "\"gn help source_expansion\").");
+ return;
+ }
+}
+
+bool BundleDataTargetGenerator::FillOutputs() {
+ const Value* value = scope_->GetValue(variables::kOutputs, true);
+ if (!value)
+ return true;
+
+ SubstitutionList& outputs = target_->action_values().outputs();
+ if (!outputs.Parse(*value, err_))
+ return false;
+
+ // Check the substitutions used are valid for this purpose.
+ for (SubstitutionType type : outputs.required_types()) {
+ if (!IsValidBundleDataSubstitution(type)) {
+ *err_ = Err(value->origin(), "Invalid substitution type.",
+ "The substitution " + std::string(kSubstitutionNames[type]) +
+ " isn't valid for something\n"
+ "operating on a bundle_data file such as this.");
+ return false;
+ }
+ }
+
+ // Validate that outputs are in the bundle.
+ CHECK(outputs.list().size() == value->list_value().size());
+ for (size_t i = 0; i < outputs.list().size(); i++) {
+ if (!EnsureSubstitutionIsInBundleDir(outputs.list()[i],
+ value->list_value()[i]))
+ return false;
+ }
+
+ return true;
+}
+
+bool BundleDataTargetGenerator::EnsureSubstitutionIsInBundleDir(
+ const SubstitutionPattern& pattern,
+ const Value& original_value) {
+ if (pattern.ranges().empty()) {
+ // Pattern is empty, error out (this prevents weirdness below).
+ *err_ = Err(original_value, "This has an empty value in it.");
+ return false;
+ }
+
+ if (SubstitutionIsInBundleDir(pattern.ranges()[0].type))
+ return true;
+
+ *err_ = Err(original_value,
+ "File is not inside bundle directory.",
+ "The given file should be in the output directory. Normally you\n"
+ "would specify {{bundle_resources_dir}} or such substitution.");
+ return false;
+}
diff --git a/chromium/tools/gn/bundle_data_target_generator.h b/chromium/tools/gn/bundle_data_target_generator.h
new file mode 100644
index 00000000000..49bcc45249f
--- /dev/null
+++ b/chromium/tools/gn/bundle_data_target_generator.h
@@ -0,0 +1,33 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_BUNDLE_DATA_TARGET_GENERATOR_H_
+#define TOOLS_GN_BUNDLE_DATA_TARGET_GENERATOR_H_
+
+#include "base/macros.h"
+#include "tools/gn/target_generator.h"
+
+// Populates a Target with the values from a bundle_data rule.
+class BundleDataTargetGenerator : public TargetGenerator {
+ public:
+ BundleDataTargetGenerator(Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Err* err);
+ ~BundleDataTargetGenerator() override;
+
+ protected:
+ void DoRun() override;
+
+ private:
+ bool FillOutputs();
+
+ bool EnsureSubstitutionIsInBundleDir(
+ const SubstitutionPattern& pattern,
+ const Value& original_value);
+
+ DISALLOW_COPY_AND_ASSIGN(BundleDataTargetGenerator);
+};
+
+#endif // TOOLS_GN_BUNDLE_DATA_TARGET_GENERATOR_H_
diff --git a/chromium/tools/gn/bundle_file_rule.cc b/chromium/tools/gn/bundle_file_rule.cc
new file mode 100644
index 00000000000..7684196c356
--- /dev/null
+++ b/chromium/tools/gn/bundle_file_rule.cc
@@ -0,0 +1,59 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/bundle_file_rule.h"
+
+#include "tools/gn/output_file.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/substitution_pattern.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+
+BundleFileRule::BundleFileRule(const std::vector<SourceFile> sources,
+ const SubstitutionPattern& pattern)
+ : sources_(sources), pattern_(pattern) {}
+
+BundleFileRule::BundleFileRule(const BundleFileRule& other) = default;
+
+BundleFileRule::~BundleFileRule() {}
+
+SourceFile BundleFileRule::ApplyPatternToSource(
+ const Settings* settings,
+ const BundleData& bundle_data,
+ const SourceFile& source_file) const {
+ std::string output_path;
+ for (const auto& subrange : pattern_.ranges()) {
+ switch (subrange.type) {
+ case SUBSTITUTION_LITERAL:
+ output_path.append(subrange.literal);
+ break;
+ case SUBSTITUTION_BUNDLE_ROOT_DIR:
+ output_path.append(bundle_data.root_dir());
+ break;
+ case SUBSTITUTION_BUNDLE_RESOURCES_DIR:
+ output_path.append(bundle_data.resources_dir());
+ break;
+ case SUBSTITUTION_BUNDLE_EXECUTABLE_DIR:
+ output_path.append(bundle_data.executable_dir());
+ break;
+ case SUBSTITUTION_BUNDLE_PLUGINS_DIR:
+ output_path.append(bundle_data.plugins_dir());
+ break;
+ default:
+ output_path.append(SubstitutionWriter::GetSourceSubstitution(
+ settings, source_file, subrange.type,
+ SubstitutionWriter::OUTPUT_ABSOLUTE, SourceDir()));
+ break;
+ }
+ }
+ return SourceFile(SourceFile::SWAP_IN, &output_path);
+}
+
+OutputFile BundleFileRule::ApplyPatternToSourceAsOutputFile(
+ const Settings* settings,
+ const BundleData& bundle_data,
+ const SourceFile& source_file) const {
+ return OutputFile(settings->build_settings(),
+ ApplyPatternToSource(settings, bundle_data, source_file));
+}
diff --git a/chromium/tools/gn/bundle_file_rule.h b/chromium/tools/gn/bundle_file_rule.h
new file mode 100644
index 00000000000..9fbf01214d4
--- /dev/null
+++ b/chromium/tools/gn/bundle_file_rule.h
@@ -0,0 +1,44 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_BUNDLE_FILE_RULE_H_
+#define TOOLS_GN_BUNDLE_FILE_RULE_H_
+
+#include <vector>
+
+#include "tools/gn/source_file.h"
+#include "tools/gn/substitution_pattern.h"
+
+class BundleData;
+class Settings;
+class SourceFile;
+class OutputFile;
+
+// BundleFileRule contains the information found in a "bundle_data" target.
+class BundleFileRule {
+ public:
+ BundleFileRule(const std::vector<SourceFile> sources,
+ const SubstitutionPattern& pattern);
+ BundleFileRule(const BundleFileRule& other);
+ ~BundleFileRule();
+
+ // Applies the substitution pattern to a source file, returning the result
+ // as either a SourceFile or an OutputFile.
+ SourceFile ApplyPatternToSource(const Settings* settings,
+ const BundleData& bundle_data,
+ const SourceFile& source_file) const;
+ OutputFile ApplyPatternToSourceAsOutputFile(
+ const Settings* settings,
+ const BundleData& bundle_data,
+ const SourceFile& source_file) const;
+
+ // Returns the list of SourceFiles.
+ const std::vector<SourceFile>& sources() const { return sources_; }
+
+ private:
+ std::vector<SourceFile> sources_;
+ SubstitutionPattern pattern_;
+};
+
+#endif // TOOLS_GN_BUNDLE_FILE_RULE_H_
diff --git a/chromium/tools/gn/c_include_iterator.cc b/chromium/tools/gn/c_include_iterator.cc
new file mode 100644
index 00000000000..cc3cb35d27f
--- /dev/null
+++ b/chromium/tools/gn/c_include_iterator.cc
@@ -0,0 +1,176 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/c_include_iterator.h"
+
+#include "base/logging.h"
+#include "base/macros.h"
+#include "base/strings/string_util.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/location.h"
+
+namespace {
+
+enum IncludeType {
+ INCLUDE_NONE,
+ INCLUDE_SYSTEM, // #include <...>
+ INCLUDE_USER // #include "..."
+};
+
+// Returns a new string piece referencing the same buffer as the argument, but
+// with leading space trimmed. This only checks for space and tab characters
+// since we're dealing with lines in C source files.
+base::StringPiece TrimLeadingWhitespace(const base::StringPiece& str) {
+ size_t new_begin = 0;
+ while (new_begin < str.size() &&
+ (str[new_begin] == ' ' || str[new_begin] == '\t'))
+ new_begin++;
+ return str.substr(new_begin);
+}
+
+// We don't want to count comment lines and preprocessor lines toward our
+// "max lines to look at before giving up" since the beginnings of some files
+// may have a lot of comments.
+//
+// We only handle C-style "//" comments since this is the normal commenting
+// style used in Chrome, and do so pretty stupidly. We don't want to write a
+// full C++ parser here, we're just trying to get a good heuristic for checking
+// the file.
+//
+// We assume the line has leading whitespace trimmed. We also assume that empty
+// lines have already been filtered out.
+bool ShouldCountTowardNonIncludeLines(const base::StringPiece& line) {
+ if (base::StartsWith(line, "//", base::CompareCase::SENSITIVE))
+ return false; // Don't count comments.
+ if (base::StartsWith(line, "/*", base::CompareCase::SENSITIVE) ||
+ base::StartsWith(line, " *", base::CompareCase::SENSITIVE))
+ return false; // C-style comment blocks with stars along the left side.
+ if (base::StartsWith(line, "#", base::CompareCase::SENSITIVE))
+ return false; // Don't count preprocessor.
+ if (base::ContainsOnlyChars(line, base::kWhitespaceASCII))
+ return false; // Don't count whitespace lines.
+ return true; // Count everything else.
+}
+
+// Given a line, checks to see if it looks like an include or import and
+// extract the path. The type of include is returned. Returns INCLUDE_NONE on
+// error or if this is not an include line.
+//
+// The 1-based character number on the line that the include was found at
+// will be filled into *begin_char.
+IncludeType ExtractInclude(const base::StringPiece& line,
+ base::StringPiece* path,
+ int* begin_char) {
+ static const char kInclude[] = "#include";
+ static const size_t kIncludeLen = arraysize(kInclude) - 1; // No null.
+ static const char kImport[] = "#import";
+ static const size_t kImportLen = arraysize(kImport) - 1; // No null.
+
+ base::StringPiece trimmed = TrimLeadingWhitespace(line);
+ if (trimmed.empty())
+ return INCLUDE_NONE;
+
+ base::StringPiece contents;
+ if (base::StartsWith(trimmed, base::StringPiece(kInclude, kIncludeLen),
+ base::CompareCase::SENSITIVE))
+ contents = TrimLeadingWhitespace(trimmed.substr(kIncludeLen));
+ else if (base::StartsWith(trimmed, base::StringPiece(kImport, kImportLen),
+ base::CompareCase::SENSITIVE))
+ contents = TrimLeadingWhitespace(trimmed.substr(kImportLen));
+
+ if (contents.empty())
+ return INCLUDE_NONE;
+
+ IncludeType type = INCLUDE_NONE;
+ char terminating_char = 0;
+ if (contents[0] == '"') {
+ type = INCLUDE_USER;
+ terminating_char = '"';
+ } else if (contents[0] == '<') {
+ type = INCLUDE_SYSTEM;
+ terminating_char = '>';
+ } else {
+ return INCLUDE_NONE;
+ }
+
+ // Count everything to next "/> as the contents.
+ size_t terminator_index = contents.find(terminating_char, 1);
+ if (terminator_index == base::StringPiece::npos)
+ return INCLUDE_NONE;
+
+ *path = contents.substr(1, terminator_index - 1);
+ // Note: one based so we do "+ 1".
+ *begin_char = static_cast<int>(path->data() - line.data()) + 1;
+ return type;
+}
+
+// Returns true if this line has a "nogncheck" comment associated with it.
+bool HasNoCheckAnnotation(const base::StringPiece& line) {
+ return line.find("nogncheck") != base::StringPiece::npos;
+}
+
+} // namespace
+
+const int CIncludeIterator::kMaxNonIncludeLines = 10;
+
+CIncludeIterator::CIncludeIterator(const InputFile* input)
+ : input_file_(input),
+ file_(input->contents()),
+ offset_(0),
+ line_number_(0),
+ lines_since_last_include_(0) {
+}
+
+CIncludeIterator::~CIncludeIterator() {
+}
+
+bool CIncludeIterator::GetNextIncludeString(base::StringPiece* out,
+ LocationRange* location) {
+ base::StringPiece line;
+ int cur_line_number = 0;
+ while (lines_since_last_include_ <= kMaxNonIncludeLines &&
+ GetNextLine(&line, &cur_line_number)) {
+ base::StringPiece include_contents;
+ int begin_char;
+ IncludeType type = ExtractInclude(line, &include_contents, &begin_char);
+ if (type == INCLUDE_USER && !HasNoCheckAnnotation(line)) {
+ // Only count user includes for now.
+ *out = include_contents;
+ *location = LocationRange(
+ Location(input_file_,
+ cur_line_number,
+ begin_char,
+ -1 /* TODO(scottmg): Is this important? */),
+ Location(input_file_,
+ cur_line_number,
+ begin_char + static_cast<int>(include_contents.size()),
+ -1 /* TODO(scottmg): Is this important? */));
+
+ lines_since_last_include_ = 0;
+ return true;
+ }
+
+ if (ShouldCountTowardNonIncludeLines(line))
+ lines_since_last_include_++;
+ }
+ return false;
+}
+
+bool CIncludeIterator::GetNextLine(base::StringPiece* line, int* line_number) {
+ if (offset_ == file_.size())
+ return false;
+
+ size_t begin = offset_;
+ while (offset_ < file_.size() && file_[offset_] != '\n')
+ offset_++;
+ line_number_++;
+
+ *line = file_.substr(begin, offset_ - begin);
+ *line_number = line_number_;
+
+ // If we didn't hit EOF, skip past the newline for the next one.
+ if (offset_ < file_.size())
+ offset_++;
+ return true;
+}
diff --git a/chromium/tools/gn/c_include_iterator.h b/chromium/tools/gn/c_include_iterator.h
new file mode 100644
index 00000000000..e81a60c9383
--- /dev/null
+++ b/chromium/tools/gn/c_include_iterator.h
@@ -0,0 +1,57 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_C_INCLUDE_ITERATOR_H_
+#define TOOLS_GN_C_INCLUDE_ITERATOR_H_
+
+#include <stddef.h>
+
+#include "base/macros.h"
+#include "base/strings/string_piece.h"
+
+class InputFile;
+class LocationRange;
+
+// Iterates through #includes in C source and header files.
+//
+// This only returns includes we want to check, which is user includes with
+// double-quotes: #include "..."
+class CIncludeIterator {
+ public:
+ // The InputFile pointed to must outlive this class.
+ explicit CIncludeIterator(const InputFile* input);
+ ~CIncludeIterator();
+
+ // Fills in the string with the contents of the next include, and the
+ // location with where it came from, and returns true, or returns false if
+ // there are no more includes.
+ bool GetNextIncludeString(base::StringPiece* out, LocationRange* location);
+
+ // Maximum numbef of non-includes we'll tolerate before giving up. This does
+ // not count comments or preprocessor.
+ static const int kMaxNonIncludeLines;
+
+ private:
+ // Returns false on EOF, otherwise fills in the given line and the one-based
+ // line number into *line_number;
+ bool GetNextLine(base::StringPiece* line, int* line_number);
+
+ const InputFile* input_file_;
+
+ // This just points into input_file_.contents() for convenience.
+ base::StringPiece file_;
+
+ // 0-based offset into the file.
+ size_t offset_;
+
+ int line_number_; // One-based. Indicates the last line we read.
+
+ // Number of lines we've processed since seeing the last include (or the
+ // beginning of the file) with some exceptions.
+ int lines_since_last_include_;
+
+ DISALLOW_COPY_AND_ASSIGN(CIncludeIterator);
+};
+
+#endif // TOOLS_GN_C_INCLUDE_ITERATOR_H_
diff --git a/chromium/tools/gn/c_include_iterator_unittest.cc b/chromium/tools/gn/c_include_iterator_unittest.cc
new file mode 100644
index 00000000000..a4278b21200
--- /dev/null
+++ b/chromium/tools/gn/c_include_iterator_unittest.cc
@@ -0,0 +1,159 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/c_include_iterator.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/location.h"
+
+namespace {
+
+bool RangeIs(const LocationRange& range,
+ int line, int begin_char, int end_char) {
+ return range.begin().line_number() == line &&
+ range.end().line_number() == line &&
+ range.begin().column_number() == begin_char &&
+ range.end().column_number() == end_char;
+}
+
+} // namespace
+
+TEST(CIncludeIterator, Basic) {
+ std::string buffer;
+ buffer.append("// Some comment\n");
+ buffer.append("\n");
+ buffer.append("#include \"foo/bar.h\"\n");
+ buffer.append("\n");
+ buffer.append("#include <stdio.h>\n");
+ buffer.append("\n");
+ buffer.append(" #include \"foo/baz.h\"\n"); // Leading whitespace
+ buffer.append("#include \"la/deda.h\"\n");
+ // Line annotated with "// nogncheck"
+ buffer.append("#include \"should_be_skipped.h\" // nogncheck\n");
+ buffer.append("#import \"weird_mac_import.h\"\n");
+ buffer.append("\n");
+ buffer.append("void SomeCode() {\n");
+
+ InputFile file(SourceFile("//foo.cc"));
+ file.SetContents(buffer);
+
+ CIncludeIterator iter(&file);
+
+ base::StringPiece contents;
+ LocationRange range;
+ EXPECT_TRUE(iter.GetNextIncludeString(&contents, &range));
+ EXPECT_EQ("foo/bar.h", contents);
+ EXPECT_TRUE(RangeIs(range, 3, 11, 20)) << range.begin().Describe(true);
+
+ EXPECT_TRUE(iter.GetNextIncludeString(&contents, &range));
+ EXPECT_EQ("foo/baz.h", contents);
+ EXPECT_TRUE(RangeIs(range, 7, 12, 21)) << range.begin().Describe(true);
+
+ EXPECT_TRUE(iter.GetNextIncludeString(&contents, &range));
+ EXPECT_EQ("la/deda.h", contents);
+ EXPECT_TRUE(RangeIs(range, 8, 11, 20)) << range.begin().Describe(true);
+
+ // The line annotated with "nogncheck" should be skipped.
+
+ EXPECT_TRUE(iter.GetNextIncludeString(&contents, &range));
+ EXPECT_EQ("weird_mac_import.h", contents);
+ EXPECT_TRUE(RangeIs(range, 10, 10, 28)) << range.begin().Describe(true);
+
+ EXPECT_FALSE(iter.GetNextIncludeString(&contents, &range));
+}
+
+// Tests that we don't search for includes indefinitely.
+TEST(CIncludeIterator, GiveUp) {
+ std::string buffer;
+ for (size_t i = 0; i < 1000; i++)
+ buffer.append("x\n");
+ buffer.append("#include \"foo/bar.h\"\n");
+
+ InputFile file(SourceFile("//foo.cc"));
+ file.SetContents(buffer);
+
+ base::StringPiece contents;
+ LocationRange range;
+
+ CIncludeIterator iter(&file);
+ EXPECT_FALSE(iter.GetNextIncludeString(&contents, &range));
+ EXPECT_TRUE(contents.empty());
+}
+
+// Don't count blank lines, comments, and preprocessor when giving up.
+TEST(CIncludeIterator, DontGiveUp) {
+ std::string buffer;
+ for (size_t i = 0; i < 1000; i++)
+ buffer.push_back('\n');
+ for (size_t i = 0; i < 1000; i++)
+ buffer.append("// comment\n");
+ for (size_t i = 0; i < 1000; i++)
+ buffer.append("#preproc\n");
+ buffer.append("#include \"foo/bar.h\"\n");
+
+ InputFile file(SourceFile("//foo.cc"));
+ file.SetContents(buffer);
+
+ base::StringPiece contents;
+ LocationRange range;
+
+ CIncludeIterator iter(&file);
+ EXPECT_TRUE(iter.GetNextIncludeString(&contents, &range));
+ EXPECT_EQ("foo/bar.h", contents);
+}
+
+// Tests that we'll tolerate some small numbers of non-includes interspersed
+// with real includes.
+TEST(CIncludeIterator, TolerateNonIncludes) {
+ const size_t kSkip = CIncludeIterator::kMaxNonIncludeLines - 2;
+ const size_t kGroupCount = 100;
+
+ std::string include("foo/bar.h");
+
+ // Allow a series of includes with blanks in between.
+ std::string buffer;
+ for (size_t group = 0; group < kGroupCount; group++) {
+ for (size_t i = 0; i < kSkip; i++)
+ buffer.append("foo\n");
+ buffer.append("#include \"" + include + "\"\n");
+ }
+
+ InputFile file(SourceFile("//foo.cc"));
+ file.SetContents(buffer);
+
+ base::StringPiece contents;
+ LocationRange range;
+
+ CIncludeIterator iter(&file);
+ for (size_t group = 0; group < kGroupCount; group++) {
+ EXPECT_TRUE(iter.GetNextIncludeString(&contents, &range));
+ EXPECT_EQ(include, contents.as_string());
+ }
+ EXPECT_FALSE(iter.GetNextIncludeString(&contents, &range));
+}
+
+// Tests that comments of the form
+// /*
+// *
+// */
+// are not counted toward the non-include line count.
+TEST(CIncludeIterator, CStyleComments) {
+ std::string buffer("/*");
+ for (size_t i = 0; i < 1000; i++)
+ buffer.append(" *\n");
+ buffer.append(" */\n\n");
+ buffer.append("#include \"foo/bar.h\"\n");
+
+ InputFile file(SourceFile("//foo.cc"));
+ file.SetContents(buffer);
+
+ base::StringPiece contents;
+ LocationRange range;
+
+ CIncludeIterator iter(&file);
+ EXPECT_TRUE(iter.GetNextIncludeString(&contents, &range));
+ EXPECT_EQ("foo/bar.h", contents);
+}
diff --git a/chromium/tools/gn/command_args.cc b/chromium/tools/gn/command_args.cc
new file mode 100644
index 00000000000..12c20d75027
--- /dev/null
+++ b/chromium/tools/gn/command_args.cc
@@ -0,0 +1,352 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <map>
+
+#include "base/command_line.h"
+#include "base/environment.h"
+#include "base/files/file_util.h"
+#include "base/process/launch.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_util.h"
+#include "build/build_config.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/setup.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/tokenizer.h"
+#include "tools/gn/trace.h"
+
+#if defined(OS_WIN)
+#include <windows.h>
+#include <shellapi.h>
+#endif
+
+namespace commands {
+
+namespace {
+
+const char kSwitchList[] = "list";
+const char kSwitchShort[] = "short";
+
+bool DoesLineBeginWithComment(const base::StringPiece& line) {
+ // Skip whitespace.
+ size_t i = 0;
+ while (i < line.size() && base::IsAsciiWhitespace(line[i]))
+ i++;
+
+ return i < line.size() && line[i] == '#';
+}
+
+// Returns the offset of the beginning of the line identified by |offset|.
+size_t BackUpToLineBegin(const std::string& data, size_t offset) {
+ // Degenerate case of an empty line. Below we'll try to return the
+ // character after the newline, but that will be incorrect in this case.
+ if (offset == 0 || Tokenizer::IsNewline(data, offset))
+ return offset;
+
+ size_t cur = offset;
+ do {
+ cur --;
+ if (Tokenizer::IsNewline(data, cur))
+ return cur + 1; // Want the first character *after* the newline.
+ } while (cur > 0);
+ return 0;
+}
+
+// Assumes DoesLineBeginWithComment(), this strips the # character from the
+// beginning and normalizes preceeding whitespace.
+std::string StripHashFromLine(const base::StringPiece& line) {
+ // Replace the # sign and everything before it with 3 spaces, so that a
+ // normal comment that has a space after the # will be indented 4 spaces
+ // (which makes our formatting come out nicely). If the comment is indented
+ // from there, we want to preserve that indenting.
+ return " " + line.substr(line.find('#') + 1).as_string();
+}
+
+// Tries to find the comment before the setting of the given value.
+void GetContextForValue(const Value& value,
+ std::string* location_str,
+ std::string* comment) {
+ Location location = value.origin()->GetRange().begin();
+ const InputFile* file = location.file();
+ if (!file)
+ return;
+
+ *location_str = file->name().value() + ":" +
+ base::IntToString(location.line_number());
+
+ const std::string& data = file->contents();
+ size_t line_off =
+ Tokenizer::ByteOffsetOfNthLine(data, location.line_number());
+
+ while (line_off > 1) {
+ line_off -= 2; // Back up to end of previous line.
+ size_t previous_line_offset = BackUpToLineBegin(data, line_off);
+
+ base::StringPiece line(&data[previous_line_offset],
+ line_off - previous_line_offset + 1);
+ if (!DoesLineBeginWithComment(line))
+ break;
+
+ comment->insert(0, StripHashFromLine(line) + "\n");
+ line_off = previous_line_offset;
+ }
+}
+
+void PrintArgHelp(const base::StringPiece& name, const Value& value) {
+ OutputString(name.as_string(), DECORATION_YELLOW);
+ OutputString(" Default = " + value.ToString(true) + "\n");
+
+ if (value.origin()) {
+ std::string location, comment;
+ GetContextForValue(value, &location, &comment);
+ OutputString(" " + location + "\n" + comment);
+ } else {
+ OutputString(" (Internally set)\n");
+ }
+}
+
+int ListArgs(const std::string& build_dir) {
+ Setup* setup = new Setup;
+ setup->build_settings().set_check_for_bad_items(false);
+ if (!setup->DoSetup(build_dir, false) || !setup->Run())
+ return 1;
+
+ Scope::KeyValueMap build_args;
+ setup->build_settings().build_args().MergeDeclaredArguments(&build_args);
+
+ // Find all of the arguments we care about. Use a regular map so they're
+ // sorted nicely when we write them out.
+ std::map<base::StringPiece, Value> sorted_args;
+ std::string list_value =
+ base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(kSwitchList);
+ if (list_value.empty()) {
+ // List all values.
+ for (const auto& arg : build_args)
+ sorted_args.insert(arg);
+ } else {
+ // List just the one specified as the parameter to --list.
+ Scope::KeyValueMap::const_iterator found_arg = build_args.find(list_value);
+ if (found_arg == build_args.end()) {
+ Err(Location(), "Unknown build argument.",
+ "You asked for \"" + list_value + "\" which I didn't find in any "
+ "build file\nassociated with this build.").PrintToStdout();
+ return 1;
+ }
+ sorted_args.insert(*found_arg);
+ }
+
+ if (base::CommandLine::ForCurrentProcess()->HasSwitch(kSwitchShort)) {
+ // Short key=value output.
+ for (const auto& arg : sorted_args) {
+ OutputString(arg.first.as_string());
+ OutputString(" = ");
+ OutputString(arg.second.ToString(true));
+ OutputString("\n");
+ }
+ return 0;
+ }
+
+ // Long output.
+ for (const auto& arg : sorted_args) {
+ PrintArgHelp(arg.first, arg.second);
+ OutputString("\n");
+ }
+
+ return 0;
+}
+
+#if defined(OS_WIN)
+
+bool RunEditor(const base::FilePath& file_to_edit) {
+ SHELLEXECUTEINFO info;
+ memset(&info, 0, sizeof(info));
+ info.cbSize = sizeof(info);
+ info.fMask = SEE_MASK_NOCLOSEPROCESS | SEE_MASK_CLASSNAME;
+ info.lpFile = file_to_edit.value().c_str();
+ info.nShow = SW_SHOW;
+ info.lpClass = L".txt";
+ if (!::ShellExecuteEx(&info)) {
+ Err(Location(), "Couldn't run editor.",
+ "Just edit \"" + FilePathToUTF8(file_to_edit) +
+ "\" manually instead.").PrintToStdout();
+ return false;
+ }
+
+ if (!info.hProcess) {
+ // Windows re-used an existing process.
+ OutputString("\"" + FilePathToUTF8(file_to_edit) +
+ "\" opened in editor, save it and press <Enter> when done.\n");
+ getchar();
+ } else {
+ OutputString("Waiting for editor on \"" + FilePathToUTF8(file_to_edit) +
+ "\"...\n");
+ ::WaitForSingleObject(info.hProcess, INFINITE);
+ ::CloseHandle(info.hProcess);
+ }
+ return true;
+}
+
+#else // POSIX
+
+bool RunEditor(const base::FilePath& file_to_edit) {
+ const char* editor_ptr = getenv("VISUAL");
+ if (!editor_ptr)
+ editor_ptr = getenv("GN_EDITOR");
+ if (!editor_ptr)
+ editor_ptr = getenv("EDITOR");
+ if (!editor_ptr)
+ editor_ptr = "vi";
+
+ std::string cmd(editor_ptr);
+ cmd.append(" \"");
+
+ // Its impossible to do this properly since we don't know the user's shell,
+ // but quoting and escaping internal quotes should handle 99.999% of all
+ // cases.
+ std::string escaped_name = file_to_edit.value();
+ base::ReplaceSubstringsAfterOffset(&escaped_name, 0, "\"", "\\\"");
+ cmd.append(escaped_name);
+ cmd.push_back('"');
+
+ OutputString("Waiting for editor on \"" + file_to_edit.value() +
+ "\"...\n");
+ return system(cmd.c_str()) == 0;
+}
+
+#endif
+
+int EditArgsFile(const std::string& build_dir) {
+ {
+ // Scope the setup. We only use it for some basic state. We'll do the
+ // "real" build below in the gen command.
+ Setup setup;
+ setup.build_settings().set_check_for_bad_items(false);
+ // Don't fill build arguments. We're about to edit the file which supplies
+ // these in the first place.
+ setup.set_fill_arguments(false);
+ if (!setup.DoSetup(build_dir, true))
+ return 1;
+
+ // Ensure the file exists. Need to normalize path separators since on
+ // Windows they can come out as forward slashes here, and that confuses some
+ // of the commands.
+ base::FilePath arg_file =
+ setup.build_settings().GetFullPath(setup.GetBuildArgFile())
+ .NormalizePathSeparators();
+ if (!base::PathExists(arg_file)) {
+ std::string argfile_default_contents =
+ "# Build arguments go here. Examples:\n"
+ "# is_component_build = true\n"
+ "# is_debug = false\n"
+ "# See \"gn args <out_dir> --list\" for available build "
+ "arguments.\n";
+#if defined(OS_WIN)
+ // Use Windows lineendings for this file since it will often open in
+ // Notepad which can't handle Unix ones.
+ base::ReplaceSubstringsAfterOffset(
+ &argfile_default_contents, 0, "\n", "\r\n");
+#endif
+ base::CreateDirectory(arg_file.DirName());
+ base::WriteFile(arg_file, argfile_default_contents.c_str(),
+ static_cast<int>(argfile_default_contents.size()));
+ }
+
+ ScopedTrace editor_trace(TraceItem::TRACE_SETUP, "Waiting for editor");
+ if (!RunEditor(arg_file))
+ return 1;
+ }
+
+ // Now do a normal "gen" command.
+ OutputString("Generating files...\n");
+ std::vector<std::string> gen_commands;
+ gen_commands.push_back(build_dir);
+ return RunGen(gen_commands);
+}
+
+} // namespace
+
+extern const char kArgs[] = "args";
+extern const char kArgs_HelpShort[] =
+ "args: Display or configure arguments declared by the build.";
+extern const char kArgs_Help[] =
+ "gn args <out_dir> [--list] [--short] [--args]\n"
+ "\n"
+ " See also \"gn help buildargs\" for a more high-level overview of how\n"
+ " build arguments work.\n"
+ "\n"
+ "Usage\n"
+ " gn args <out_dir>\n"
+ " Open the arguments for the given build directory in an editor\n"
+ " (as specified by the EDITOR environment variable). If the given\n"
+ " build directory doesn't exist, it will be created and an empty\n"
+ " args file will be opened in the editor. You would type something\n"
+ " like this into that file:\n"
+ " enable_doom_melon=false\n"
+ " os=\"android\"\n"
+ "\n"
+ " Note: you can edit the build args manually by editing the file\n"
+ " \"args.gn\" in the build directory and then running\n"
+ " \"gn gen <out_dir>\".\n"
+ "\n"
+ " gn args <out_dir> --list[=<exact_arg>] [--short]\n"
+ " Lists all build arguments available in the current configuration,\n"
+ " or, if an exact_arg is specified for the list flag, just that one\n"
+ " build argument.\n"
+ "\n"
+ " The output will list the declaration location, default value, and\n"
+ " comment preceeding the declaration. If --short is specified,\n"
+ " only the names and values will be printed.\n"
+ "\n"
+ " If the out_dir is specified, the build configuration will be\n"
+ " taken from that build directory. The reason this is needed is that\n"
+ " the definition of some arguments is dependent on the build\n"
+ " configuration, so setting some values might add, remove, or change\n"
+ " the default values for other arguments. Specifying your exact\n"
+ " configuration allows the proper arguments to be displayed.\n"
+ "\n"
+ " Instead of specifying the out_dir, you can also use the\n"
+ " command-line flag to specify the build configuration:\n"
+ " --args=<exact list of args to use>\n"
+ "\n"
+ "Examples\n"
+ " gn args out/Debug\n"
+ " Opens an editor with the args for out/Debug.\n"
+ "\n"
+ " gn args out/Debug --list --short\n"
+ " Prints all arguments with their default values for the out/Debug\n"
+ " build.\n"
+ "\n"
+ " gn args out/Debug --list=target_cpu\n"
+ " Prints information about the \"target_cpu\" argument for the "
+ "out/Debug\n"
+ " build.\n"
+ "\n"
+ " gn args --list --args=\"os=\\\"android\\\" enable_doom_melon=true\"\n"
+ " Prints all arguments with the default values for a build with the\n"
+ " given arguments set (which may affect the values of other\n"
+ " arguments).\n";
+
+int RunArgs(const std::vector<std::string>& args) {
+ if (args.size() != 1) {
+ Err(Location(), "Exactly one build dir needed.",
+ "Usage: \"gn args <out_dir>\"\n"
+ "Or see \"gn help args\" for more variants.").PrintToStdout();
+ return 1;
+ }
+
+ if (base::CommandLine::ForCurrentProcess()->HasSwitch(kSwitchList))
+ return ListArgs(args[0]);
+ return EditArgsFile(args[0]);
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/command_check.cc b/chromium/tools/gn/command_check.cc
new file mode 100644
index 00000000000..40931a68709
--- /dev/null
+++ b/chromium/tools/gn/command_check.cc
@@ -0,0 +1,262 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include "base/command_line.h"
+#include "base/strings/stringprintf.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/header_checker.h"
+#include "tools/gn/setup.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/switches.h"
+#include "tools/gn/target.h"
+#include "tools/gn/trace.h"
+
+namespace commands {
+
+const char kNoGnCheck_Help[] =
+ "nogncheck: Skip an include line from checking.\n"
+ "\n"
+ " GN's header checker helps validate that the includes match the build\n"
+ " dependency graph. Sometimes an include might be conditional or\n"
+ " otherwise problematic, but you want to specifically allow it. In this\n"
+ " case, it can be whitelisted.\n"
+ "\n"
+ " Include lines containing the substring \"nogncheck\" will be excluded\n"
+ " from header checking. The most common case is a conditional include:\n"
+ "\n"
+ " #if defined(ENABLE_DOOM_MELON)\n"
+ " #include \"tools/doom_melon/doom_melon.h\" // nogncheck\n"
+ " #endif\n"
+ "\n"
+ " If the build file has a conditional dependency on the corresponding\n"
+ " target that matches the conditional include, everything will always\n"
+ " link correctly:\n"
+ "\n"
+ " source_set(\"mytarget\") {\n"
+ " ...\n"
+ " if (enable_doom_melon) {\n"
+ " defines = [ \"ENABLE_DOOM_MELON\" ]\n"
+ " deps += [ \"//tools/doom_melon\" ]\n"
+ " }\n"
+ "\n"
+ " But GN's header checker does not understand preprocessor directives,\n"
+ " won't know it matches the build dependencies, and will flag this\n"
+ " include as incorrect when the condition is false.\n"
+ "\n"
+ "More information\n"
+ "\n"
+ " The topic \"gn help check\" has general information on how checking\n"
+ " works and advice on fixing problems. Targets can also opt-out of\n"
+ " checking, see \"gn help check_includes\".\n";
+
+const char kCheck[] = "check";
+const char kCheck_HelpShort[] =
+ "check: Check header dependencies.";
+const char kCheck_Help[] =
+ "gn check <out_dir> [<label_pattern>] [--force]\n"
+ "\n"
+ " GN's include header checker validates that the includes for C-like\n"
+ " source files match the build dependency graph.\n"
+ "\n"
+ " \"gn check\" is the same thing as \"gn gen\" with the \"--check\" flag\n"
+ " except that this command does not write out any build files. It's\n"
+ " intended to be an easy way to manually trigger include file checking.\n"
+ "\n"
+ " The <label_pattern> can take exact labels or patterns that match more\n"
+ " than one (although not general regular expressions). If specified,\n"
+ " only those matching targets will be checked. See\n"
+ " \"gn help label_pattern\" for details.\n"
+ "\n"
+ "Command-specific switches\n"
+ "\n"
+ " --force\n"
+ " Ignores specifications of \"check_includes = false\" and checks\n"
+ " all target's files that match the target label.\n"
+ "\n"
+ "What gets checked\n"
+ "\n"
+ " The .gn file may specify a list of targets to be checked. Only these\n"
+ " targets will be checked if no label_pattern is specified on the\n"
+ " command line. Otherwise, the command-line list is used instead. See\n"
+ " \"gn help dotfile\".\n"
+ "\n"
+ " Targets can opt-out from checking with \"check_includes = false\"\n"
+ " (see \"gn help check_includes\").\n"
+ "\n"
+ " For targets being checked:\n"
+ "\n"
+ " - GN opens all C-like source files in the targets to be checked and\n"
+ " scans the top for includes.\n"
+ "\n"
+ " - Includes with a \"nogncheck\" annotation are skipped (see\n"
+ " \"gn help nogncheck\").\n"
+ "\n"
+ " - Only includes using \"quotes\" are checked. <brackets> are assumed\n"
+ " to be system includes.\n"
+ "\n"
+ " - Include paths are assumed to be relative to either the source root\n"
+ " or the \"root_gen_dir\" and must include all the path components.\n"
+ " (It might be nice in the future to incorporate GN's knowledge of\n"
+ " the include path to handle other include styles.)\n"
+ "\n"
+ " - GN does not run the preprocessor so will not understand\n"
+ " conditional includes.\n"
+ "\n"
+ " - Only includes matching known files in the build are checked:\n"
+ " includes matching unknown paths are ignored.\n"
+ "\n"
+ " For an include to be valid:\n"
+ "\n"
+ " - The included file must be in the current target, or there must\n"
+ " be a path following only public dependencies to a target with the\n"
+ " file in it (\"gn path\" is a good way to diagnose problems).\n"
+ "\n"
+ " - There can be multiple targets with an included file: only one\n"
+ " needs to be valid for the include to be allowed.\n"
+ "\n"
+ " - If there are only \"sources\" in a target, all are considered to\n"
+ " be public and can be included by other targets with a valid public\n"
+ " dependency path.\n"
+ "\n"
+ " - If a target lists files as \"public\", only those files are\n"
+ " able to be included by other targets. Anything in the sources\n"
+ " will be considered private and will not be includable regardless\n"
+ " of dependency paths.\n"
+ "\n"
+ " - Ouptuts from actions are treated like public sources on that\n"
+ " target.\n"
+ "\n"
+ " - A target can include headers from a target that depends on it\n"
+ " if the other target is annotated accordingly. See\n"
+ " \"gn help allow_circular_includes_from\".\n"
+ "\n"
+ "Advice on fixing problems\n"
+ "\n"
+ " If you have a third party project that uses relative includes,\n"
+ " it's generally best to exclude that target from checking altogether\n"
+ " via \"check_includes = false\".\n"
+ "\n"
+ " If you have conditional includes, make sure the build conditions\n"
+ " and the preprocessor conditions match, and annotate the line with\n"
+ " \"nogncheck\" (see \"gn help nogncheck\" for an example).\n"
+ "\n"
+ " If two targets are hopelessly intertwined, use the\n"
+ " \"allow_circular_includes_from\" annotation. Ideally each should have\n"
+ " identical dependencies so configs inherited from those dependencies\n"
+ " are consistent (see \"gn help allow_circular_includes_from\").\n"
+ "\n"
+ " If you have a standalone header file or files that need to be shared\n"
+ " between a few targets, you can consider making a source_set listing\n"
+ " only those headers as public sources. With only header files, the\n"
+ " source set will be a no-op from a build perspective, but will give a\n"
+ " central place to refer to those headers. That source set's files\n"
+ " will still need to pass \"gn check\" in isolation.\n"
+ "\n"
+ " In rare cases it makes sense to list a header in more than one\n"
+ " target if it could be considered conceptually a member of both.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " gn check out/Debug\n"
+ " Check everything.\n"
+ "\n"
+ " gn check out/Default //foo:bar\n"
+ " Check only the files in the //foo:bar target.\n"
+ "\n"
+ " gn check out/Default \"//foo/*\n"
+ " Check only the files in targets in the //foo directory tree.\n";
+
+int RunCheck(const std::vector<std::string>& args) {
+ if (args.size() != 1 && args.size() != 2) {
+ Err(Location(), "You're holding it wrong.",
+ "Usage: \"gn check <out_dir> [<target_label>]\"").PrintToStdout();
+ return 1;
+ }
+
+ // Deliberately leaked to avoid expensive process teardown.
+ Setup* setup = new Setup();
+ if (!setup->DoSetup(args[0], false))
+ return 1;
+ if (!setup->Run())
+ return 1;
+
+ std::vector<const Target*> all_targets =
+ setup->builder()->GetAllResolvedTargets();
+
+ bool filtered_by_build_config = false;
+ std::vector<const Target*> targets_to_check;
+ if (args.size() > 1) {
+ // Compute the targets to check.
+ std::vector<std::string> inputs(args.begin() + 1, args.end());
+ UniqueVector<const Target*> target_matches;
+ UniqueVector<const Config*> config_matches;
+ UniqueVector<const Toolchain*> toolchain_matches;
+ UniqueVector<SourceFile> file_matches;
+ if (!ResolveFromCommandLineInput(setup, inputs, false,
+ &target_matches, &config_matches,
+ &toolchain_matches, &file_matches))
+ return 1;
+
+ if (target_matches.size() == 0) {
+ OutputString("No matching targets.\n");
+ return 1;
+ }
+ targets_to_check.insert(targets_to_check.begin(),
+ target_matches.begin(), target_matches.end());
+ } else {
+ // No argument means to check everything allowed by the filter in
+ // the build config file.
+ if (setup->check_patterns()) {
+ FilterTargetsByPatterns(all_targets, *setup->check_patterns(),
+ &targets_to_check);
+ filtered_by_build_config = targets_to_check.size() != all_targets.size();
+ } else {
+ // No global filter, check everything.
+ targets_to_check = all_targets;
+ }
+ }
+
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+ bool force = cmdline->HasSwitch("force");
+
+ if (!CheckPublicHeaders(&setup->build_settings(), all_targets,
+ targets_to_check, force))
+ return 1;
+
+ if (!base::CommandLine::ForCurrentProcess()->HasSwitch(switches::kQuiet)) {
+ if (filtered_by_build_config) {
+ // Tell the user about the implicit filtering since this is obscure.
+ OutputString(base::StringPrintf(
+ "%d targets out of %d checked based on the check_targets defined in"
+ " \".gn\".\n",
+ static_cast<int>(targets_to_check.size()),
+ static_cast<int>(all_targets.size())));
+ }
+ OutputString("Header dependency check OK\n", DECORATION_GREEN);
+ }
+ return 0;
+}
+
+bool CheckPublicHeaders(const BuildSettings* build_settings,
+ const std::vector<const Target*>& all_targets,
+ const std::vector<const Target*>& to_check,
+ bool force_check) {
+ ScopedTrace trace(TraceItem::TRACE_CHECK_HEADERS, "Check headers");
+
+ scoped_refptr<HeaderChecker> header_checker(
+ new HeaderChecker(build_settings, all_targets));
+
+ std::vector<Err> header_errors;
+ header_checker->Run(to_check, force_check, &header_errors);
+ for (size_t i = 0; i < header_errors.size(); i++) {
+ if (i > 0)
+ OutputString("___________________\n", DECORATION_YELLOW);
+ header_errors[i].PrintToStdout();
+ }
+ return header_errors.empty();
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/command_clean.cc b/chromium/tools/gn/command_clean.cc
new file mode 100644
index 00000000000..d14b22d84a3
--- /dev/null
+++ b/chromium/tools/gn/command_clean.cc
@@ -0,0 +1,150 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/files/file_path.h"
+#include "base/files/file_util.h"
+#include "base/strings/string_split.h"
+#include "base/strings/stringprintf.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/err.h"
+#include "tools/gn/setup.h"
+
+namespace {
+
+// Extracts from a build.ninja the commands to run GN.
+//
+// The commands to run GN are the gn rule and build.ninja build step at the top
+// of the build.ninja file. We want to keep these when deleting GN builds since
+// we want to preserve the command-line flags to GN.
+//
+// On error, returns the empty string.
+std::string ExtractGNBuildCommands(const base::FilePath& build_ninja_file) {
+ std::string file_contents;
+ if (!base::ReadFileToString(build_ninja_file, &file_contents)) {
+ return std::string();
+ }
+
+ std::vector<std::string> lines = base::SplitString(
+ file_contents, "\n", base::KEEP_WHITESPACE, base::SPLIT_WANT_ALL);
+
+ std::string result;
+ int num_blank_lines = 0;
+ for (const auto& line : lines) {
+ result += line;
+ result += "\n";
+ if (line.empty()) {
+ ++num_blank_lines;
+ }
+ if (num_blank_lines == 2)
+ break;
+ }
+
+ return result;
+}
+
+const char kDefaultNinjaFile[] =
+ "rule gn\n"
+ " command = gn -q gen //out/%s/\n"
+ " description = Regenerating ninja files\n"
+ "\n"
+ "build build.ninja: gn\n"
+ " generator = 1\n"
+ " depfile = build.ninja.d\n";
+
+} // namespace
+
+namespace commands {
+
+const char kClean[] = "clean";
+const char kClean_HelpShort[] =
+ "clean: Cleans the output directory.";
+const char kClean_Help[] =
+ "gn clean <out_dir>\n"
+ "\n"
+ " Deletes the contents of the output directory except for args.gn and\n"
+ " creates a Ninja build environment sufficient to regenerate the build.\n";
+
+int RunClean(const std::vector<std::string>& args) {
+ if (args.size() != 1) {
+ Err(Location(), "You're holding it wrong.",
+ "Usage: \"gn clean <out_dir>\"").PrintToStdout();
+ return 1;
+ }
+
+ Setup* setup = new Setup;
+ if (!setup->DoSetup(args[0], false))
+ return 1;
+
+ base::FilePath build_dir(setup->build_settings().GetFullPath(
+ SourceDir(setup->build_settings().build_dir().value())));
+
+ // NOTE: Not all GN builds have args.gn file hence we check here
+ // if a build.ninja.d files exists instead.
+ base::FilePath build_ninja_d_file = build_dir.AppendASCII("build.ninja.d");
+ if (!base::PathExists(build_ninja_d_file)) {
+ Err(Location(),
+ base::StringPrintf("%s does not look like a build directory.\n",
+ build_ninja_d_file.DirName().value().c_str()))
+ .PrintToStdout();
+ return 1;
+ }
+
+ // Erase everything but the args file, and write a dummy build.ninja file that
+ // will automatically rerun GN the next time Ninja is run.
+ base::FilePath build_ninja_file = build_dir.AppendASCII("build.ninja");
+ std::string build_commands = ExtractGNBuildCommands(build_ninja_file);
+
+ // Read the args.gn file, if any. Not all GN builds have one.
+ base::FilePath gn_args_file = build_dir.AppendASCII("args.gn");
+ std::string args_contents;
+ base::ReadFileToString(gn_args_file, &args_contents);
+
+ base::DeleteFile(build_dir, true);
+
+ // Put back the args.gn file (if any).
+ base::CreateDirectory(build_dir);
+ if (!args_contents.empty()) {
+ if (base::WriteFile(gn_args_file, args_contents.data(),
+ static_cast<int>(args_contents.size())) == -1) {
+ Err(Location(), std::string("Failed to write args.gn.")).PrintToStdout();
+ return 1;
+ }
+ }
+
+ // Write the build.ninja file sufficiently to regenerate itself.
+ if (!build_commands.empty()) {
+ if (base::WriteFile(build_ninja_file, build_commands.data(),
+ static_cast<int>(build_commands.size())) == -1) {
+ Err(Location(), std::string("Failed to write build.ninja."))
+ .PrintToStdout();
+ return 1;
+ }
+ } else {
+ // Couldn't parse the build.ninja file, write a default thing.
+ std::vector<base::FilePath::StringType> components;
+ build_ninja_file.GetComponents(&components);
+ std::string default_build_file = base::StringPrintf(
+ kDefaultNinjaFile, components[components.size() - 2].c_str());
+ if (base::WriteFile(build_ninja_file, default_build_file.data(),
+ static_cast<int>(default_build_file.size())) == -1) {
+ Err(Location(), std::string("Failed to write build.ninja."))
+ .PrintToStdout();
+ return 1;
+ }
+ }
+
+ // Write a .d file for the build which references a nonexistant file.
+ // This will make Ninja always mark the build as dirty.
+ std::string dummy_content("build.ninja: nonexistant_file.gn\n");
+ if (base::WriteFile(build_ninja_d_file, dummy_content.data(),
+ static_cast<int>(dummy_content.size())) == -1) {
+ Err(Location(), std::string("Failed to write build.ninja.d."))
+ .PrintToStdout();
+ return 1;
+ }
+
+ return 0;
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/command_desc.cc b/chromium/tools/gn/command_desc.cc
new file mode 100644
index 00000000000..15efe77cd6e
--- /dev/null
+++ b/chromium/tools/gn/command_desc.cc
@@ -0,0 +1,763 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include <algorithm>
+#include <set>
+#include <sstream>
+
+#include "base/command_line.h"
+#include "build/build_config.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/config.h"
+#include "tools/gn/config_values_extractors.h"
+#include "tools/gn/deps_iterator.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/item.h"
+#include "tools/gn/label.h"
+#include "tools/gn/runtime_deps.h"
+#include "tools/gn/setup.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/variables.h"
+
+namespace commands {
+
+namespace {
+
+// Desc-specific command line switches.
+const char kBlame[] = "blame";
+const char kTree[] = "tree";
+
+// Prints the given directory in a nice way for the user to view.
+std::string FormatSourceDir(const SourceDir& dir) {
+#if defined(OS_WIN)
+ // On Windows we fix up system absolute paths to look like native ones.
+ // Internally, they'll look like "/C:\foo\bar/"
+ if (dir.is_system_absolute()) {
+ std::string buf = dir.value();
+ if (buf.size() > 3 && buf[2] == ':') {
+ buf.erase(buf.begin()); // Erase beginning slash.
+ return buf;
+ }
+ }
+#endif
+ return dir.value();
+}
+
+void RecursiveCollectChildDeps(const Target* target,
+ std::set<const Target*>* result);
+
+void RecursiveCollectDeps(const Target* target,
+ std::set<const Target*>* result) {
+ if (result->find(target) != result->end())
+ return; // Already did this target.
+ result->insert(target);
+
+ RecursiveCollectChildDeps(target, result);
+}
+
+void RecursiveCollectChildDeps(const Target* target,
+ std::set<const Target*>* result) {
+ for (const auto& pair : target->GetDeps(Target::DEPS_ALL))
+ RecursiveCollectDeps(pair.ptr, result);
+}
+
+// Prints dependencies of the given target (not the target itself). If the
+// set is non-null, new targets encountered will be added to the set, and if
+// a dependency is in the set already, it will not be recused into. When the
+// set is null, all dependencies will be printed.
+void RecursivePrintDeps(const Target* target,
+ const Label& default_toolchain,
+ std::set<const Target*>* seen_targets,
+ int indent_level) {
+ // Combine all deps into one sorted list.
+ std::vector<LabelTargetPair> sorted_deps;
+ for (const auto& pair : target->GetDeps(Target::DEPS_ALL))
+ sorted_deps.push_back(pair);
+ std::sort(sorted_deps.begin(), sorted_deps.end(),
+ LabelPtrLabelLess<Target>());
+
+ std::string indent(indent_level * 2, ' ');
+ for (const auto& pair : sorted_deps) {
+ const Target* cur_dep = pair.ptr;
+
+ OutputString(indent +
+ cur_dep->label().GetUserVisibleName(default_toolchain));
+ bool print_children = true;
+ if (seen_targets) {
+ if (seen_targets->find(cur_dep) == seen_targets->end()) {
+ // New target, mark it visited.
+ seen_targets->insert(cur_dep);
+ } else {
+ // Already seen.
+ print_children = false;
+ // Only print "..." if something is actually elided, which means that
+ // the current target has children.
+ if (!cur_dep->public_deps().empty() ||
+ !cur_dep->private_deps().empty() ||
+ !cur_dep->data_deps().empty())
+ OutputString("...");
+ }
+ }
+
+ OutputString("\n");
+ if (print_children) {
+ RecursivePrintDeps(cur_dep, default_toolchain, seen_targets,
+ indent_level + 1);
+ }
+ }
+}
+
+void PrintDeps(const Target* target, bool display_header) {
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+ Label toolchain_label = target->label().GetToolchainLabel();
+
+ // Tree mode is separate.
+ if (cmdline->HasSwitch(kTree)) {
+ if (display_header)
+ OutputString("\nDependency tree:\n");
+
+ if (cmdline->HasSwitch("all")) {
+ // Show all tree deps with no eliding.
+ RecursivePrintDeps(target, toolchain_label, nullptr, 1);
+ } else {
+ // Don't recurse into duplicates.
+ std::set<const Target*> seen_targets;
+ RecursivePrintDeps(target, toolchain_label, &seen_targets, 1);
+ }
+ return;
+ }
+
+ // Collect the deps to display.
+ if (cmdline->HasSwitch("all")) {
+ // Show all dependencies.
+ if (display_header)
+ OutputString("\nAll recursive dependencies:\n");
+
+ std::set<const Target*> all_deps;
+ RecursiveCollectChildDeps(target, &all_deps);
+ FilterAndPrintTargetSet(display_header, all_deps);
+ } else {
+ std::vector<const Target*> deps;
+ // Show direct dependencies only.
+ if (display_header) {
+ OutputString(
+ "\nDirect dependencies "
+ "(try also \"--all\", \"--tree\", or even \"--all --tree\"):\n");
+ }
+ for (const auto& pair : target->GetDeps(Target::DEPS_ALL))
+ deps.push_back(pair.ptr);
+ std::sort(deps.begin(), deps.end());
+ FilterAndPrintTargets(display_header, &deps);
+ }
+}
+
+// libs and lib_dirs are special in that they're inherited. We don't currently
+// implement a blame feature for this since the bottom-up inheritance makes
+// this difficult.
+void PrintLibDirs(const Target* target, bool display_header) {
+ const OrderedSet<SourceDir>& lib_dirs = target->all_lib_dirs();
+ if (lib_dirs.empty())
+ return;
+
+ if (display_header)
+ OutputString("\nlib_dirs\n");
+
+ for (size_t i = 0; i < lib_dirs.size(); i++)
+ OutputString(" " + FormatSourceDir(lib_dirs[i]) + "\n");
+}
+
+void PrintLibs(const Target* target, bool display_header) {
+ const OrderedSet<LibFile>& libs = target->all_libs();
+ if (libs.empty())
+ return;
+
+ if (display_header)
+ OutputString("\nlibs\n");
+
+ for (size_t i = 0; i < libs.size(); i++)
+ OutputString(" " + libs[i].value() + "\n");
+}
+
+void PrintPublic(const Target* target, bool display_header) {
+ if (display_header)
+ OutputString("\npublic:\n");
+
+ if (target->all_headers_public()) {
+ OutputString(" [All headers listed in the sources are public.]\n");
+ return;
+ }
+
+ Target::FileList public_headers = target->public_headers();
+ std::sort(public_headers.begin(), public_headers.end());
+ for (const auto& hdr : public_headers)
+ OutputString(" " + hdr.value() + "\n");
+}
+
+void PrintCheckIncludes(const Target* target, bool display_header) {
+ if (display_header)
+ OutputString("\ncheck_includes:\n");
+
+ if (target->check_includes())
+ OutputString(" true\n");
+ else
+ OutputString(" false\n");
+}
+
+void PrintAllowCircularIncludesFrom(const Target* target, bool display_header) {
+ if (display_header)
+ OutputString("\nallow_circular_includes_from:\n");
+
+ Label toolchain_label = target->label().GetToolchainLabel();
+ for (const auto& cur : target->allow_circular_includes_from())
+ OutputString(" " + cur.GetUserVisibleName(toolchain_label) + "\n");
+}
+
+void PrintVisibility(const Target* target, bool display_header) {
+ if (display_header)
+ OutputString("\nvisibility:\n");
+
+ OutputString(target->visibility().Describe(2, false));
+}
+
+void PrintTestonly(const Target* target, bool display_header) {
+ if (display_header)
+ OutputString("\ntestonly:\n");
+
+ if (target->testonly())
+ OutputString(" true\n");
+ else
+ OutputString(" false\n");
+}
+
+// Recursively prints subconfigs of a config.
+void PrintSubConfigs(const Config* config, int indent_level) {
+ if (config->configs().empty())
+ return;
+
+ std::string indent(indent_level * 2, ' ');
+ Label toolchain_label = config->label().GetToolchainLabel();
+ for (const auto& pair : config->configs()) {
+ OutputString(
+ indent + pair.label.GetUserVisibleName(toolchain_label) + "\n");
+ PrintSubConfigs(pair.ptr, indent_level + 1);
+ }
+}
+
+// This allows configs stored as either std::vector<LabelConfigPair> or
+// UniqueVector<LabelConfigPair> to be printed.
+template <class VectorType>
+void PrintConfigsVector(const Target* target,
+ const VectorType& configs,
+ const std::string& heading,
+ bool display_header) {
+ if (configs.empty())
+ return;
+
+ bool tree = base::CommandLine::ForCurrentProcess()->HasSwitch(kTree);
+
+ // Don't sort since the order determines how things are processed.
+ if (display_header) {
+ if (tree)
+ OutputString("\n" + heading + " tree (in order applying):\n");
+ else
+ OutputString("\n" + heading + " (in order applying, try also --tree):\n");
+ }
+
+ Label toolchain_label = target->label().GetToolchainLabel();
+ for (const auto& config : configs) {
+ OutputString(" " + config.label.GetUserVisibleName(toolchain_label) +
+ "\n");
+ if (tree)
+ PrintSubConfigs(config.ptr, 2); // 2 = start with double-indent.
+ }
+}
+
+void PrintConfigs(const Target* target, bool display_header) {
+ PrintConfigsVector(target, target->configs().vector(), "configs",
+ display_header);
+}
+
+void PrintPublicConfigs(const Target* target, bool display_header) {
+ PrintConfigsVector(target, target->public_configs(),
+ "public_configs", display_header);
+}
+
+void PrintAllDependentConfigs(const Target* target, bool display_header) {
+ PrintConfigsVector(target, target->all_dependent_configs(),
+ "all_dependent_configs", display_header);
+}
+
+void PrintFileList(const Target::FileList& files,
+ const std::string& header,
+ bool indent_extra,
+ bool display_header) {
+ if (files.empty())
+ return;
+
+ if (display_header)
+ OutputString("\n" + header + ":\n");
+
+ std::string indent = indent_extra ? " " : " ";
+
+ Target::FileList sorted = files;
+ std::sort(sorted.begin(), sorted.end());
+ for (const auto& elem : sorted)
+ OutputString(indent + elem.value() + "\n");
+}
+
+void PrintSources(const Target* target, bool display_header) {
+ PrintFileList(target->sources(), "sources", false, display_header);
+}
+
+void PrintInputs(const Target* target, bool display_header) {
+ PrintFileList(target->inputs(), "inputs", false, display_header);
+}
+
+void PrintOutputs(const Target* target, bool display_header) {
+ if (display_header)
+ OutputString("\noutputs:\n");
+
+ if (target->output_type() == Target::ACTION) {
+ // Action, print out outputs, don't apply sources to it.
+ for (const auto& elem : target->action_values().outputs().list()) {
+ OutputString(" " + elem.AsString() + "\n");
+ }
+ } else if (target->output_type() == Target::CREATE_BUNDLE) {
+ std::vector<SourceFile> output_files;
+ target->bundle_data().GetOutputsAsSourceFiles(target->settings(),
+ &output_files);
+ PrintFileList(output_files, "", true, false);
+ } else {
+ const SubstitutionList& outputs = target->action_values().outputs();
+ if (!outputs.required_types().empty()) {
+ // Display the pattern and resolved pattern separately, since there are
+ // subtitutions used.
+ OutputString(" Output pattern:\n");
+ for (const auto& elem : outputs.list())
+ OutputString(" " + elem.AsString() + "\n");
+
+ // Now display what that resolves to given the sources.
+ OutputString("\n Resolved output file list:\n");
+ }
+
+ // Resolved output list.
+ std::vector<SourceFile> output_files;
+ SubstitutionWriter::ApplyListToSources(target->settings(), outputs,
+ target->sources(), &output_files);
+ PrintFileList(output_files, "", true, false);
+ }
+}
+
+void PrintScript(const Target* target, bool display_header) {
+ if (display_header)
+ OutputString("\nscript:\n");
+ OutputString(" " + target->action_values().script().value() + "\n");
+}
+
+void PrintArgs(const Target* target, bool display_header) {
+ if (display_header)
+ OutputString("\nargs:\n");
+ for (const auto& elem : target->action_values().args().list()) {
+ OutputString(" " + elem.AsString() + "\n");
+ }
+}
+
+void PrintDepfile(const Target* target, bool display_header) {
+ if (target->action_values().depfile().empty())
+ return;
+ if (display_header)
+ OutputString("\ndepfile:\n");
+ OutputString(" " + target->action_values().depfile().AsString() + "\n");
+}
+
+// Attribute the origin for attributing from where a target came from. Does
+// nothing if the input is null or it does not have a location.
+void OutputSourceOfDep(const ParseNode* origin, std::ostream& out) {
+ if (!origin)
+ return;
+ Location location = origin->GetRange().begin();
+ out << " (Added by " + location.file()->name().value() << ":"
+ << location.line_number() << ")\n";
+}
+
+// Templatized writer for writing out different config value types.
+template<typename T> struct DescValueWriter {};
+template<> struct DescValueWriter<std::string> {
+ void operator()(const std::string& str, std::ostream& out) const {
+ out << " " << str << "\n";
+ }
+};
+template<> struct DescValueWriter<SourceDir> {
+ void operator()(const SourceDir& dir, std::ostream& out) const {
+ out << " " << FormatSourceDir(dir) << "\n";
+ }
+};
+
+// Writes a given config value type to the string, optionally with attribution.
+// This should match RecursiveTargetConfigToStream in the order it traverses.
+template<typename T> void OutputRecursiveTargetConfig(
+ const Target* target,
+ const char* header_name,
+ const std::vector<T>& (ConfigValues::* getter)() const) {
+ bool display_blame =
+ base::CommandLine::ForCurrentProcess()->HasSwitch(kBlame);
+
+ DescValueWriter<T> writer;
+ std::ostringstream out;
+
+ for (ConfigValuesIterator iter(target); !iter.done(); iter.Next()) {
+ if ((iter.cur().*getter)().empty())
+ continue;
+
+ // Optional blame sub-head.
+ if (display_blame) {
+ const Config* config = iter.GetCurrentConfig();
+ if (config) {
+ // Source of this value is a config.
+ out << " From " << config->label().GetUserVisibleName(false) << "\n";
+ OutputSourceOfDep(iter.origin(), out);
+ } else {
+ // Source of this value is the target itself.
+ out << " From " << target->label().GetUserVisibleName(false) << "\n";
+ }
+ }
+
+ // Actual values.
+ ConfigValuesToStream(iter.cur(), getter, writer, out);
+ }
+
+ std::string out_str = out.str();
+ if (!out_str.empty()) {
+ OutputString("\n" + std::string(header_name) + "\n");
+ OutputString(out_str);
+ }
+}
+
+void PrintRuntimeDeps(const Target* target) {
+ bool display_blame =
+ base::CommandLine::ForCurrentProcess()->HasSwitch(kBlame);
+ Label toolchain = target->label().GetToolchainLabel();
+
+ const Target* previous_from = NULL;
+ for (const auto& pair : ComputeRuntimeDeps(target)) {
+ if (display_blame) {
+ // Generally a target's runtime deps will be listed sequentially, so
+ // group them and don't duplicate the "from" label for two in a row.
+ if (previous_from == pair.second) {
+ OutputString(" "); // Just indent.
+ } else {
+ previous_from = pair.second;
+ OutputString("From ");
+ OutputString(pair.second->label().GetUserVisibleName(toolchain));
+ OutputString("\n "); // Make the file name indented.
+ }
+ }
+ OutputString(pair.first.value());
+ OutputString("\n");
+ }
+}
+
+} // namespace
+
+// desc ------------------------------------------------------------------------
+
+const char kDesc[] = "desc";
+const char kDesc_HelpShort[] =
+ "desc: Show lots of insightful information about a target.";
+const char kDesc_Help[] =
+ "gn desc <out_dir> <target label> [<what to show>] [--blame]\n"
+ "\n"
+ " Displays information about a given labeled target for the given build.\n"
+ " The build parameters will be taken for the build in the given\n"
+ " <out_dir>.\n"
+ "\n"
+ "Possibilities for <what to show>\n"
+ " (If unspecified an overall summary will be displayed.)\n"
+ "\n"
+ " sources\n"
+ " Source files.\n"
+ "\n"
+ " inputs\n"
+ " Additional input dependencies.\n"
+ "\n"
+ " public\n"
+ " Public header files.\n"
+ "\n"
+ " check_includes\n"
+ " Whether \"gn check\" checks this target for include usage.\n"
+ "\n"
+ " allow_circular_includes_from\n"
+ " Permit includes from these targets.\n"
+ "\n"
+ " visibility\n"
+ " Prints which targets can depend on this one.\n"
+ "\n"
+ " testonly\n"
+ " Whether this target may only be used in tests.\n"
+ "\n"
+ " configs\n"
+ " Shows configs applied to the given target, sorted in the order\n"
+ " they're specified. This includes both configs specified in the\n"
+ " \"configs\" variable, as well as configs pushed onto this target\n"
+ " via dependencies specifying \"all\" or \"direct\" dependent\n"
+ " configs.\n"
+ "\n"
+ " deps\n"
+ " Show immediate or recursive dependencies. See below for flags that\n"
+ " control deps printing.\n"
+ "\n"
+ " public_configs\n"
+ " all_dependent_configs\n"
+ " Shows the labels of configs applied to targets that depend on this\n"
+ " one (either directly or all of them).\n"
+ "\n"
+ " script\n"
+ " args\n"
+ " depfile\n"
+ " Actions only. The script and related values.\n"
+ "\n"
+ " outputs\n"
+ " Outputs for script and copy target types.\n"
+ "\n"
+ " defines [--blame]\n"
+ " include_dirs [--blame]\n"
+ " cflags [--blame]\n"
+ " cflags_cc [--blame]\n"
+ " cflags_cxx [--blame]\n"
+ " ldflags [--blame]\n"
+ " lib_dirs\n"
+ " libs\n"
+ " Shows the given values taken from the target and all configs\n"
+ " applying. See \"--blame\" below.\n"
+ "\n"
+ " runtime_deps\n"
+ " Compute all runtime deps for the given target. This is a\n"
+ " computed list and does not correspond to any GN variable, unlike\n"
+ " most other values here.\n"
+ "\n"
+ " The output is a list of file names relative to the build\n"
+ " directory. See \"gn help runtime_deps\" for how this is computed.\n"
+ " This also works with \"--blame\" to see the source of the\n"
+ " dependency.\n"
+ "\n"
+ "Shared flags\n"
+ "\n"
+ " --blame\n"
+ " Used with any value specified by a config, this will name\n"
+ " the config that specified the value. This doesn't currently work\n"
+ " for libs and lib_dirs because those are inherited and are more\n"
+ " complicated to figure out the blame (patches welcome).\n"
+ "\n"
+ "Flags that control how deps are printed\n"
+ "\n"
+ " --all\n"
+ " Collects all recursive dependencies and prints a sorted flat list.\n"
+ " Also usable with --tree (see below).\n"
+ "\n"
+ TARGET_PRINTING_MODE_COMMAND_LINE_HELP
+ "\n"
+ TARGET_TESTONLY_FILTER_COMMAND_LINE_HELP
+ "\n"
+ " --tree\n"
+ " Print a dependency tree. By default, duplicates will be elided\n"
+ " with \"...\" but when --all and -tree are used together, no\n"
+ " eliding will be performed.\n"
+ "\n"
+ " The \"deps\", \"public_deps\", and \"data_deps\" will all be\n"
+ " included in the tree.\n"
+ "\n"
+ " Tree output can not be used with the filtering or output flags:\n"
+ " --as, --type, --testonly.\n"
+ "\n"
+ TARGET_TYPE_FILTER_COMMAND_LINE_HELP
+ "\n"
+ "Note\n"
+ "\n"
+ " This command will show the full name of directories and source files,\n"
+ " but when directories and source paths are written to the build file,\n"
+ " they will be adjusted to be relative to the build directory. So the\n"
+ " values for paths displayed by this command won't match (but should\n"
+ " mean the same thing).\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " gn desc out/Debug //base:base\n"
+ " Summarizes the given target.\n"
+ "\n"
+ " gn desc out/Foo :base_unittests deps --tree\n"
+ " Shows a dependency tree of the \"base_unittests\" project in\n"
+ " the current directory.\n"
+ "\n"
+ " gn desc out/Debug //base defines --blame\n"
+ " Shows defines set for the //base:base target, annotated by where\n"
+ " each one was set from.\n";
+
+#define OUTPUT_CONFIG_VALUE(name, type) \
+ OutputRecursiveTargetConfig<type>(target, #name, &ConfigValues::name);
+
+int RunDesc(const std::vector<std::string>& args) {
+ if (args.size() != 2 && args.size() != 3) {
+ Err(Location(), "You're holding it wrong.",
+ "Usage: \"gn desc <out_dir> <target_name> [<what to display>]\"")
+ .PrintToStdout();
+ return 1;
+ }
+
+ // Deliberately leaked to avoid expensive process teardown.
+ Setup* setup = new Setup;
+ setup->build_settings().set_check_for_bad_items(false);
+ if (!setup->DoSetup(args[0], false))
+ return 1;
+ if (!setup->Run())
+ return 1;
+
+ const Target* target = ResolveTargetFromCommandLineString(setup, args[1]);
+ if (!target)
+ return 1;
+
+#define CONFIG_VALUE_HANDLER(name, type) \
+ } else if (what == #name) { OUTPUT_CONFIG_VALUE(name, type)
+
+ if (args.size() == 3) {
+ // User specified one thing to display.
+ const std::string& what = args[2];
+ if (what == variables::kConfigs) {
+ PrintConfigs(target, false);
+ } else if (what == variables::kPublicConfigs) {
+ PrintPublicConfigs(target, false);
+ } else if (what == variables::kAllDependentConfigs) {
+ PrintAllDependentConfigs(target, false);
+ } else if (what == variables::kSources) {
+ PrintSources(target, false);
+ } else if (what == variables::kPublic) {
+ PrintPublic(target, false);
+ } else if (what == variables::kCheckIncludes) {
+ PrintCheckIncludes(target, false);
+ } else if (what == variables::kAllowCircularIncludesFrom) {
+ PrintAllowCircularIncludesFrom(target, false);
+ } else if (what == variables::kVisibility) {
+ PrintVisibility(target, false);
+ } else if (what == variables::kTestonly) {
+ PrintTestonly(target, false);
+ } else if (what == variables::kInputs) {
+ PrintInputs(target, false);
+ } else if (what == variables::kScript) {
+ PrintScript(target, false);
+ } else if (what == variables::kArgs) {
+ PrintArgs(target, false);
+ } else if (what == variables::kDepfile) {
+ PrintDepfile(target, false);
+ } else if (what == variables::kOutputs) {
+ PrintOutputs(target, false);
+ } else if (what == variables::kDeps) {
+ PrintDeps(target, false);
+ } else if (what == variables::kLibDirs) {
+ PrintLibDirs(target, false);
+ } else if (what == variables::kLibs) {
+ PrintLibs(target, false);
+ } else if (what == "runtime_deps") {
+ PrintRuntimeDeps(target);
+
+ CONFIG_VALUE_HANDLER(defines, std::string)
+ CONFIG_VALUE_HANDLER(include_dirs, SourceDir)
+ CONFIG_VALUE_HANDLER(asmflags, std::string)
+ CONFIG_VALUE_HANDLER(cflags, std::string)
+ CONFIG_VALUE_HANDLER(cflags_c, std::string)
+ CONFIG_VALUE_HANDLER(cflags_cc, std::string)
+ CONFIG_VALUE_HANDLER(cflags_objc, std::string)
+ CONFIG_VALUE_HANDLER(cflags_objcc, std::string)
+ CONFIG_VALUE_HANDLER(ldflags, std::string)
+
+ } else {
+ OutputString("Don't know how to display \"" + what + "\".\n");
+ return 1;
+ }
+
+#undef CONFIG_VALUE_HANDLER
+ return 0;
+ }
+
+ // Display summary.
+
+ // Display this only applicable to binary targets.
+ bool is_binary_output =
+ target->output_type() != Target::GROUP &&
+ target->output_type() != Target::COPY_FILES &&
+ target->output_type() != Target::ACTION &&
+ target->output_type() != Target::ACTION_FOREACH &&
+ target->output_type() != Target::BUNDLE_DATA &&
+ target->output_type() != Target::CREATE_BUNDLE;
+
+ // Generally we only want to display toolchains on labels when the toolchain
+ // is different than the default one for this target (which we always print
+ // in the header).
+ Label target_toolchain = target->label().GetToolchainLabel();
+
+ // Header.
+ OutputString("Target: ", DECORATION_YELLOW);
+ OutputString(target->label().GetUserVisibleName(false) + "\n");
+ OutputString("Type: ", DECORATION_YELLOW);
+ OutputString(std::string(
+ Target::GetStringForOutputType(target->output_type())) + "\n");
+ OutputString("Toolchain: ", DECORATION_YELLOW);
+ OutputString(target_toolchain.GetUserVisibleName(false) + "\n");
+
+ PrintSources(target, true);
+ if (is_binary_output) {
+ PrintPublic(target, true);
+ PrintCheckIncludes(target, true);
+ PrintAllowCircularIncludesFrom(target, true);
+ }
+ PrintVisibility(target, true);
+ if (is_binary_output) {
+ PrintTestonly(target, true);
+ PrintConfigs(target, true);
+ }
+
+ PrintPublicConfigs(target, true);
+ PrintAllDependentConfigs(target, true);
+
+ PrintInputs(target, true);
+
+ if (is_binary_output) {
+ OUTPUT_CONFIG_VALUE(defines, std::string)
+ OUTPUT_CONFIG_VALUE(include_dirs, SourceDir)
+ OUTPUT_CONFIG_VALUE(asmflags, std::string)
+ OUTPUT_CONFIG_VALUE(cflags, std::string)
+ OUTPUT_CONFIG_VALUE(cflags_c, std::string)
+ OUTPUT_CONFIG_VALUE(cflags_cc, std::string)
+ OUTPUT_CONFIG_VALUE(cflags_objc, std::string)
+ OUTPUT_CONFIG_VALUE(cflags_objcc, std::string)
+ OUTPUT_CONFIG_VALUE(ldflags, std::string)
+ }
+
+ if (target->output_type() == Target::ACTION ||
+ target->output_type() == Target::ACTION_FOREACH) {
+ PrintScript(target, true);
+ PrintArgs(target, true);
+ PrintDepfile(target, true);
+ }
+
+ if (target->output_type() == Target::ACTION ||
+ target->output_type() == Target::ACTION_FOREACH ||
+ target->output_type() == Target::COPY_FILES ||
+ target->output_type() == Target::CREATE_BUNDLE) {
+ PrintOutputs(target, true);
+ }
+
+ // Libs can be part of any target and get recursively pushed up the chain,
+ // so always display them, even for groups and such.
+ PrintLibs(target, true);
+ PrintLibDirs(target, true);
+
+ PrintDeps(target, true);
+
+ return 0;
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/command_format.cc b/chromium/tools/gn/command_format.cc
new file mode 100644
index 00000000000..a054b21b9b9
--- /dev/null
+++ b/chromium/tools/gn/command_format.cc
@@ -0,0 +1,1077 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include <sstream>
+
+#include "base/command_line.h"
+#include "base/files/file_util.h"
+#include "base/macros.h"
+#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parser.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/setup.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/tokenizer.h"
+
+namespace commands {
+
+const char kSwitchDryRun[] = "dry-run";
+const char kSwitchDumpTree[] = "dump-tree";
+const char kSwitchInPlace[] = "in-place";
+const char kSwitchStdin[] = "stdin";
+
+const char kFormat[] = "format";
+const char kFormat_HelpShort[] =
+ "format: Format .gn file.";
+const char kFormat_Help[] =
+ "gn format [--dump-tree] [--in-place] [--stdin] BUILD.gn\n"
+ "\n"
+ " Formats .gn file to a standard format.\n"
+ "\n"
+ " The contents of some lists ('sources', 'deps', etc.) will be sorted to\n"
+ " a canonical order. To suppress this, you can add a comment of the form\n"
+ " \"# NOSORT\" immediately preceeding the assignment. e.g.\n"
+ "\n"
+ " # NOSORT\n"
+ " sources = [\n"
+ " \"z.cc\",\n"
+ " \"a.cc\",\n"
+ " ]\n"
+ "\n"
+ "Arguments\n"
+ " --dry-run\n"
+ " Does not change or output anything, but sets the process exit code\n"
+ " based on whether output would be different than what's on disk.\n"
+ " This is useful for presubmit/lint-type checks.\n"
+ " - Exit code 0: successful format, matches on disk.\n"
+ " - Exit code 1: general failure (parse error, etc.)\n"
+ " - Exit code 2: successful format, but differs from on disk.\n"
+ "\n"
+ " --dump-tree\n"
+ " For debugging only, dumps the parse tree.\n"
+ "\n"
+ " --in-place\n"
+ " Instead of writing the formatted file to stdout, replace the input\n"
+ " file with the formatted output. If no reformatting is required,\n"
+ " the input file will not be touched, and nothing printed.\n"
+ "\n"
+ " --stdin\n"
+ " Read input from stdin (and write to stdout). Not compatible with\n"
+ " --in-place of course.\n"
+ "\n"
+ "Examples\n"
+ " gn format //some/BUILD.gn\n"
+ " gn format some\\BUILD.gn\n"
+ " gn format /abspath/some/BUILD.gn\n"
+ " gn format --stdin\n";
+
+namespace {
+
+const int kIndentSize = 2;
+const int kMaximumWidth = 80;
+
+const int kPenaltyLineBreak = 500;
+const int kPenaltyHorizontalSeparation = 100;
+const int kPenaltyExcess = 10000;
+const int kPenaltyBrokenLineOnOneLiner = 5000;
+
+enum Precedence {
+ kPrecedenceLowest,
+ kPrecedenceAssign,
+ kPrecedenceOr,
+ kPrecedenceAnd,
+ kPrecedenceCompare,
+ kPrecedenceAdd,
+ kPrecedenceUnary,
+ kPrecedenceSuffix,
+};
+
+int CountLines(const std::string& str) {
+ return static_cast<int>(base::SplitStringPiece(
+ str, "\n", base::KEEP_WHITESPACE, base::SPLIT_WANT_ALL).size());
+}
+
+class Printer {
+ public:
+ Printer();
+ ~Printer();
+
+ void Block(const ParseNode* file);
+
+ std::string String() const { return output_; }
+
+ private:
+ // Format a list of values using the given style.
+ enum SequenceStyle {
+ kSequenceStyleList,
+ kSequenceStyleBlock,
+ kSequenceStyleBracedBlock,
+ };
+
+ struct Metrics {
+ Metrics() : first_length(-1), longest_length(-1), multiline(false) {}
+ int first_length;
+ int longest_length;
+ bool multiline;
+ };
+
+ // Add to output.
+ void Print(base::StringPiece str);
+
+ // Add the current margin (as spaces) to the output.
+ void PrintMargin();
+
+ void TrimAndPrintToken(const Token& token);
+
+ // End the current line, flushing end of line comments.
+ void Newline();
+
+ // Remove trailing spaces from the current line.
+ void Trim();
+
+ // Whether there's a blank separator line at the current position.
+ bool HaveBlankLine();
+
+ // Flag assignments to sources, deps, etc. to make their RHSs multiline.
+ void AnnotatePreferredMultilineAssignment(const BinaryOpNode* binop);
+
+ // Sort a list on the RHS if the LHS is 'sources', 'deps' or 'public_deps'.
+ // The 'sources' are sorted alphabetically while the 'deps' and 'public_deps'
+ // are sorted putting first the relative targets and then the global ones
+ // (both sorted alphabetically).
+ void SortIfSourcesOrDeps(const BinaryOpNode* binop);
+
+ // Heuristics to decide if there should be a blank line added between two
+ // items. For various "small" items, it doesn't look nice if there's too much
+ // vertical whitespace added.
+ bool ShouldAddBlankLineInBetween(const ParseNode* a, const ParseNode* b);
+
+ // Get the 0-based x position on the current line.
+ int CurrentColumn() const;
+
+ // Get the current line in the output;
+ int CurrentLine() const;
+
+ // Adds an opening ( if prec is less than the outers (to maintain evalution
+ // order for a subexpression). If an opening paren is emitted, *parenthesized
+ // will be set so it can be closed at the end of the expression.
+ void AddParen(int prec, int outer_prec, bool* parenthesized);
+
+ // Print the expression to the output buffer. Returns the type of element
+ // added to the output. The value of outer_prec gives the precedence of the
+ // operator outside this Expr. If that operator binds tighter than root's,
+ // Expr must introduce parentheses.
+ int Expr(const ParseNode* root, int outer_prec, const std::string& suffix);
+
+ // Generic penalties for exceeding maximum width, adding more lines, etc.
+ int AssessPenalty(const std::string& output);
+
+ // Tests if any lines exceed the maximum width.
+ bool ExceedsMaximumWidth(const std::string& output);
+
+ // Format a list of values using the given style.
+ // |end| holds any trailing comments to be printed just before the closing
+ // bracket.
+ template <class PARSENODE> // Just for const covariance.
+ void Sequence(SequenceStyle style,
+ const std::vector<PARSENODE*>& list,
+ const ParseNode* end,
+ bool force_multiline);
+
+ // Returns the penalty.
+ int FunctionCall(const FunctionCallNode* func_call,
+ const std::string& suffix);
+
+ // Create a clone of this Printer in a similar state (other than the output,
+ // but including margins, etc.) to be used for dry run measurements.
+ void InitializeSub(Printer* sub);
+
+ template <class PARSENODE>
+ bool ListWillBeMultiline(const std::vector<PARSENODE*>& list,
+ const ParseNode* end);
+
+ std::string output_; // Output buffer.
+ std::vector<Token> comments_; // Pending end-of-line comments.
+ int margin() const { return stack_.back().margin; }
+
+ int penalty_depth_;
+ int GetPenaltyForLineBreak() const {
+ return penalty_depth_ * kPenaltyLineBreak;
+ }
+
+ struct IndentState {
+ IndentState()
+ : margin(0),
+ continuation_requires_indent(false),
+ parent_is_boolean_or(false) {}
+ IndentState(int margin,
+ bool continuation_requires_indent,
+ bool parent_is_boolean_or)
+ : margin(margin),
+ continuation_requires_indent(continuation_requires_indent),
+ parent_is_boolean_or(parent_is_boolean_or) {}
+
+ // The left margin (number of spaces).
+ int margin;
+
+ bool continuation_requires_indent;
+
+ bool parent_is_boolean_or;
+ };
+ // Stack used to track
+ std::vector<IndentState> stack_;
+
+ // Gives the precedence for operators in a BinaryOpNode.
+ std::map<base::StringPiece, Precedence> precedence_;
+
+ DISALLOW_COPY_AND_ASSIGN(Printer);
+};
+
+Printer::Printer() : penalty_depth_(0) {
+ output_.reserve(100 << 10);
+ precedence_["="] = kPrecedenceAssign;
+ precedence_["+="] = kPrecedenceAssign;
+ precedence_["-="] = kPrecedenceAssign;
+ precedence_["||"] = kPrecedenceOr;
+ precedence_["&&"] = kPrecedenceAnd;
+ precedence_["<"] = kPrecedenceCompare;
+ precedence_[">"] = kPrecedenceCompare;
+ precedence_["=="] = kPrecedenceCompare;
+ precedence_["!="] = kPrecedenceCompare;
+ precedence_["<="] = kPrecedenceCompare;
+ precedence_[">="] = kPrecedenceCompare;
+ precedence_["+"] = kPrecedenceAdd;
+ precedence_["-"] = kPrecedenceAdd;
+ precedence_["!"] = kPrecedenceUnary;
+ stack_.push_back(IndentState());
+}
+
+Printer::~Printer() {
+}
+
+void Printer::Print(base::StringPiece str) {
+ str.AppendToString(&output_);
+}
+
+void Printer::PrintMargin() {
+ output_ += std::string(margin(), ' ');
+}
+
+void Printer::TrimAndPrintToken(const Token& token) {
+ std::string trimmed;
+ TrimWhitespaceASCII(token.value().as_string(), base::TRIM_ALL, &trimmed);
+ Print(trimmed);
+}
+
+void Printer::Newline() {
+ if (!comments_.empty()) {
+ Print(" ");
+ // Save the margin, and temporarily set it to where the first comment
+ // starts so that multiple suffix comments are vertically aligned. This
+ // will need to be fancier once we enforce 80 col.
+ stack_.push_back(IndentState(CurrentColumn(), false, false));
+ int i = 0;
+ for (const auto& c : comments_) {
+ if (i > 0) {
+ Trim();
+ Print("\n");
+ PrintMargin();
+ }
+ TrimAndPrintToken(c);
+ ++i;
+ }
+ stack_.pop_back();
+ comments_.clear();
+ }
+ Trim();
+ Print("\n");
+ PrintMargin();
+}
+
+void Printer::Trim() {
+ size_t n = output_.size();
+ while (n > 0 && output_[n - 1] == ' ')
+ --n;
+ output_.resize(n);
+}
+
+bool Printer::HaveBlankLine() {
+ size_t n = output_.size();
+ while (n > 0 && output_[n - 1] == ' ')
+ --n;
+ return n > 2 && output_[n - 1] == '\n' && output_[n - 2] == '\n';
+}
+
+void Printer::AnnotatePreferredMultilineAssignment(const BinaryOpNode* binop) {
+ const IdentifierNode* ident = binop->left()->AsIdentifier();
+ const ListNode* list = binop->right()->AsList();
+ // This is somewhat arbitrary, but we include the 'deps'- and 'sources'-like
+ // things, but not flags things.
+ if (binop->op().value() == "=" && ident && list) {
+ const base::StringPiece lhs = ident->value().value();
+ if (lhs == "data" || lhs == "datadeps" || lhs == "data_deps" ||
+ lhs == "deps" || lhs == "inputs" || lhs == "outputs" ||
+ lhs == "public" || lhs == "public_deps" || lhs == "sources") {
+ const_cast<ListNode*>(list)->set_prefer_multiline(true);
+ }
+ }
+}
+
+void Printer::SortIfSourcesOrDeps(const BinaryOpNode* binop) {
+ if (binop && binop->comments() && !binop->comments()->before().empty() &&
+ binop->comments()->before()[0].value().as_string() == "# NOSORT") {
+ // Allow disabling of sort for specific actions that might be
+ // order-sensitive.
+ return;
+ }
+ const IdentifierNode* ident = binop->left()->AsIdentifier();
+ const ListNode* list = binop->right()->AsList();
+ if ((binop->op().value() == "=" || binop->op().value() == "+=" ||
+ binop->op().value() == "-=") &&
+ ident && list) {
+ const base::StringPiece lhs = ident->value().value();
+ if (lhs == "sources")
+ const_cast<ListNode*>(list)->SortAsStringsList();
+ else if (lhs == "deps" || lhs == "public_deps")
+ const_cast<ListNode*>(list)->SortAsDepsList();
+ }
+}
+
+bool Printer::ShouldAddBlankLineInBetween(const ParseNode* a,
+ const ParseNode* b) {
+ LocationRange a_range = a->GetRange();
+ LocationRange b_range = b->GetRange();
+ // If they're already separated by 1 or more lines, then we want to keep a
+ // blank line.
+ return (b_range.begin().line_number() > a_range.end().line_number() + 1) ||
+ // Always put a blank line before a block comment.
+ b->AsBlockComment();
+}
+
+int Printer::CurrentColumn() const {
+ int n = 0;
+ while (n < static_cast<int>(output_.size()) &&
+ output_[output_.size() - 1 - n] != '\n') {
+ ++n;
+ }
+ return n;
+}
+
+int Printer::CurrentLine() const {
+ int count = 1;
+ for (const char* p = output_.c_str(); (p = strchr(p, '\n')) != nullptr;) {
+ ++count;
+ ++p;
+ }
+ return count;
+}
+
+void Printer::Block(const ParseNode* root) {
+ const BlockNode* block = root->AsBlock();
+
+ if (block->comments()) {
+ for (const auto& c : block->comments()->before()) {
+ TrimAndPrintToken(c);
+ Newline();
+ }
+ }
+
+ size_t i = 0;
+ for (const auto& stmt : block->statements()) {
+ Expr(stmt, kPrecedenceLowest, std::string());
+ Newline();
+ if (stmt->comments()) {
+ // Why are before() not printed here too? before() are handled inside
+ // Expr(), as are suffix() which are queued to the next Newline().
+ // However, because it's a general expression handler, it doesn't insert
+ // the newline itself, which only happens between block statements. So,
+ // the after are handled explicitly here.
+ for (const auto& c : stmt->comments()->after()) {
+ TrimAndPrintToken(c);
+ Newline();
+ }
+ }
+ if (i < block->statements().size() - 1 &&
+ (ShouldAddBlankLineInBetween(block->statements()[i],
+ block->statements()[i + 1]))) {
+ Newline();
+ }
+ ++i;
+ }
+
+ if (block->comments()) {
+ for (const auto& c : block->comments()->after()) {
+ TrimAndPrintToken(c);
+ Newline();
+ }
+ }
+}
+
+int Printer::AssessPenalty(const std::string& output) {
+ int penalty = 0;
+ std::vector<std::string> lines = base::SplitString(
+ output, "\n", base::KEEP_WHITESPACE, base::SPLIT_WANT_ALL);
+ penalty += static_cast<int>(lines.size() - 1) * GetPenaltyForLineBreak();
+ for (const auto& line : lines) {
+ if (line.size() > kMaximumWidth)
+ penalty += static_cast<int>(line.size() - kMaximumWidth) * kPenaltyExcess;
+ }
+ return penalty;
+}
+
+bool Printer::ExceedsMaximumWidth(const std::string& output) {
+ for (const auto& line : base::SplitString(
+ output, "\n", base::KEEP_WHITESPACE, base::SPLIT_WANT_ALL)) {
+ if (line.size() > kMaximumWidth)
+ return true;
+ }
+ return false;
+}
+
+void Printer::AddParen(int prec, int outer_prec, bool* parenthesized) {
+ if (prec < outer_prec) {
+ Print("(");
+ *parenthesized = true;
+ }
+}
+
+int Printer::Expr(const ParseNode* root,
+ int outer_prec,
+ const std::string& suffix) {
+ std::string at_end = suffix;
+ int penalty = 0;
+ penalty_depth_++;
+
+ if (root->comments()) {
+ if (!root->comments()->before().empty()) {
+ Trim();
+ // If there's already other text on the line, start a new line.
+ if (CurrentColumn() > 0)
+ Print("\n");
+ // We're printing a line comment, so we need to be at the current margin.
+ PrintMargin();
+ for (const auto& c : root->comments()->before()) {
+ TrimAndPrintToken(c);
+ Newline();
+ }
+ }
+ }
+
+ bool parenthesized = false;
+
+ if (const AccessorNode* accessor = root->AsAccessor()) {
+ AddParen(kPrecedenceSuffix, outer_prec, &parenthesized);
+ Print(accessor->base().value());
+ if (accessor->member()) {
+ Print(".");
+ Expr(accessor->member(), kPrecedenceLowest, std::string());
+ } else {
+ CHECK(accessor->index());
+ Print("[");
+ Expr(accessor->index(), kPrecedenceLowest, "]");
+ }
+ } else if (const BinaryOpNode* binop = root->AsBinaryOp()) {
+ CHECK(precedence_.find(binop->op().value()) != precedence_.end());
+ AnnotatePreferredMultilineAssignment(binop);
+
+ SortIfSourcesOrDeps(binop);
+
+ Precedence prec = precedence_[binop->op().value()];
+
+ // Since binary operators format left-to-right, it is ok for the left side
+ // use the same operator without parentheses, so the left uses prec. For the
+ // same reason, the right side cannot reuse the same operator, or else "x +
+ // (y + z)" would format as "x + y + z" which means "(x + y) + z". So, treat
+ // the right expression as appearing one precedence level higher.
+ // However, because the source parens are not in the parse tree, as a
+ // special case for && and || we insert strictly-redundant-but-helpful-for-
+ // human-readers parentheses.
+ int prec_left = prec;
+ int prec_right = prec + 1;
+ if (binop->op().value() == "&&" && stack_.back().parent_is_boolean_or) {
+ Print("(");
+ parenthesized = true;
+ } else {
+ AddParen(prec_left, outer_prec, &parenthesized);
+ }
+
+ int start_line = CurrentLine();
+ int start_column = CurrentColumn();
+ bool is_assignment = binop->op().value() == "=" ||
+ binop->op().value() == "+=" ||
+ binop->op().value() == "-=";
+ // A sort of funny special case for the long lists that are common in .gn
+ // files, don't indent them + 4, even though they're just continuations when
+ // they're simple lists like "x = [ a, b, c, ... ]"
+ const ListNode* right_as_list = binop->right()->AsList();
+ int indent_column =
+ (is_assignment &&
+ (!right_as_list || (!right_as_list->prefer_multiline() &&
+ !ListWillBeMultiline(right_as_list->contents(),
+ right_as_list->End()))))
+ ? margin() + kIndentSize * 2
+ : start_column;
+ if (stack_.back().continuation_requires_indent)
+ indent_column += kIndentSize * 2;
+
+ stack_.push_back(IndentState(indent_column,
+ stack_.back().continuation_requires_indent,
+ binop->op().value() == "||"));
+ Printer sub_left;
+ InitializeSub(&sub_left);
+ sub_left.Expr(binop->left(),
+ prec_left,
+ std::string(" ") + binop->op().value().as_string());
+ bool left_is_multiline = CountLines(sub_left.String()) > 1;
+ // Avoid walking the whole left redundantly times (see timing of Format.046)
+ // so pull the output and comments from subprinter.
+ Print(sub_left.String().substr(start_column));
+ std::copy(sub_left.comments_.begin(),
+ sub_left.comments_.end(),
+ std::back_inserter(comments_));
+
+ // Single line.
+ Printer sub1;
+ InitializeSub(&sub1);
+ sub1.Print(" ");
+ int penalty_current_line =
+ sub1.Expr(binop->right(), prec_right, std::string());
+ sub1.Print(suffix);
+ penalty_current_line += AssessPenalty(sub1.String());
+ if (!is_assignment && left_is_multiline) {
+ // In e.g. xxx + yyy, if xxx is already multiline, then we want a penalty
+ // for trying to continue as if this were one line.
+ penalty_current_line +=
+ (CountLines(sub1.String()) - 1) * kPenaltyBrokenLineOnOneLiner;
+ }
+
+ // Break after operator.
+ Printer sub2;
+ InitializeSub(&sub2);
+ sub2.Newline();
+ int penalty_next_line =
+ sub2.Expr(binop->right(), prec_right, std::string());
+ sub2.Print(suffix);
+ penalty_next_line += AssessPenalty(sub2.String());
+
+ // If in both cases it was forced past 80col, then we don't break to avoid
+ // breaking after '=' in the case of:
+ // variable = "... very long string ..."
+ // as breaking and indenting doesn't make things much more readable, even
+ // though there's less characters past the maximum width.
+ bool exceeds_maximum_either_way = ExceedsMaximumWidth(sub1.String()) &&
+ ExceedsMaximumWidth(sub2.String());
+
+ if (penalty_current_line < penalty_next_line ||
+ exceeds_maximum_either_way) {
+ Print(" ");
+ Expr(binop->right(), prec_right, std::string());
+ } else {
+ // Otherwise, put first argument and op, and indent next.
+ Newline();
+ penalty += std::abs(CurrentColumn() - start_column) *
+ kPenaltyHorizontalSeparation;
+ Expr(binop->right(), prec_right, std::string());
+ }
+ stack_.pop_back();
+ penalty += (CurrentLine() - start_line) * GetPenaltyForLineBreak();
+ } else if (const BlockNode* block = root->AsBlock()) {
+ Sequence(
+ kSequenceStyleBracedBlock, block->statements(), block->End(), false);
+ } else if (const ConditionNode* condition = root->AsConditionNode()) {
+ Print("if (");
+ // TODO(scottmg): The { needs to be included in the suffix here.
+ Expr(condition->condition(), kPrecedenceLowest, ") ");
+ Sequence(kSequenceStyleBracedBlock,
+ condition->if_true()->statements(),
+ condition->if_true()->End(),
+ false);
+ if (condition->if_false()) {
+ Print(" else ");
+ // If it's a block it's a bare 'else', otherwise it's an 'else if'. See
+ // ConditionNode::Execute.
+ bool is_else_if = condition->if_false()->AsBlock() == nullptr;
+ if (is_else_if) {
+ Expr(condition->if_false(), kPrecedenceLowest, std::string());
+ } else {
+ Sequence(kSequenceStyleBracedBlock,
+ condition->if_false()->AsBlock()->statements(),
+ condition->if_false()->AsBlock()->End(),
+ false);
+ }
+ }
+ } else if (const FunctionCallNode* func_call = root->AsFunctionCall()) {
+ penalty += FunctionCall(func_call, at_end);
+ at_end = "";
+ } else if (const IdentifierNode* identifier = root->AsIdentifier()) {
+ Print(identifier->value().value());
+ } else if (const ListNode* list = root->AsList()) {
+ bool force_multiline =
+ list->prefer_multiline() && !list->contents().empty();
+ Sequence(
+ kSequenceStyleList, list->contents(), list->End(), force_multiline);
+ } else if (const LiteralNode* literal = root->AsLiteral()) {
+ Print(literal->value().value());
+ } else if (const UnaryOpNode* unaryop = root->AsUnaryOp()) {
+ Print(unaryop->op().value());
+ Expr(unaryop->operand(), kPrecedenceUnary, std::string());
+ } else if (const BlockCommentNode* block_comment = root->AsBlockComment()) {
+ Print(block_comment->comment().value());
+ } else if (const EndNode* end = root->AsEnd()) {
+ Print(end->value().value());
+ } else {
+ CHECK(false) << "Unhandled case in Expr.";
+ }
+
+ if (parenthesized)
+ Print(")");
+
+ // Defer any end of line comment until we reach the newline.
+ if (root->comments() && !root->comments()->suffix().empty()) {
+ std::copy(root->comments()->suffix().begin(),
+ root->comments()->suffix().end(),
+ std::back_inserter(comments_));
+ }
+
+ Print(at_end);
+
+ penalty_depth_--;
+ return penalty;
+}
+
+template <class PARSENODE>
+void Printer::Sequence(SequenceStyle style,
+ const std::vector<PARSENODE*>& list,
+ const ParseNode* end,
+ bool force_multiline) {
+ if (style == kSequenceStyleList)
+ Print("[");
+ else if (style == kSequenceStyleBracedBlock)
+ Print("{");
+
+ if (style == kSequenceStyleBlock || style == kSequenceStyleBracedBlock)
+ force_multiline = true;
+
+ force_multiline |= ListWillBeMultiline(list, end);
+
+ if (list.size() == 0 && !force_multiline) {
+ // No elements, and not forcing newlines, print nothing.
+ } else if (list.size() == 1 && !force_multiline) {
+ Print(" ");
+ Expr(list[0], kPrecedenceLowest, std::string());
+ CHECK(!list[0]->comments() || list[0]->comments()->after().empty());
+ Print(" ");
+ } else {
+ stack_.push_back(IndentState(margin() + kIndentSize,
+ style == kSequenceStyleList,
+ false));
+ size_t i = 0;
+ for (const auto& x : list) {
+ Newline();
+ // If:
+ // - we're going to output some comments, and;
+ // - we haven't just started this multiline list, and;
+ // - there isn't already a blank line here;
+ // Then: insert one.
+ if (i != 0 && x->comments() && !x->comments()->before().empty() &&
+ !HaveBlankLine()) {
+ Newline();
+ }
+ bool body_of_list = i < list.size() - 1 || style == kSequenceStyleList;
+ bool want_comma =
+ body_of_list && (style == kSequenceStyleList && !x->AsBlockComment());
+ Expr(x, kPrecedenceLowest, want_comma ? "," : std::string());
+ CHECK(!x->comments() || x->comments()->after().empty());
+ if (body_of_list) {
+ if (i < list.size() - 1 &&
+ ShouldAddBlankLineInBetween(list[i], list[i + 1]))
+ Newline();
+ }
+ ++i;
+ }
+
+ // Trailing comments.
+ if (end->comments() && !end->comments()->before().empty()) {
+ if (list.size() >= 2)
+ Newline();
+ for (const auto& c : end->comments()->before()) {
+ Newline();
+ TrimAndPrintToken(c);
+ }
+ }
+
+ stack_.pop_back();
+ Newline();
+
+ // Defer any end of line comment until we reach the newline.
+ if (end->comments() && !end->comments()->suffix().empty()) {
+ std::copy(end->comments()->suffix().begin(),
+ end->comments()->suffix().end(),
+ std::back_inserter(comments_));
+ }
+ }
+
+ if (style == kSequenceStyleList)
+ Print("]");
+ else if (style == kSequenceStyleBracedBlock)
+ Print("}");
+}
+
+int Printer::FunctionCall(const FunctionCallNode* func_call,
+ const std::string& suffix) {
+ int start_line = CurrentLine();
+ int start_column = CurrentColumn();
+ Print(func_call->function().value());
+ Print("(");
+
+ bool have_block = func_call->block() != nullptr;
+ bool force_multiline = false;
+
+ const std::vector<const ParseNode*>& list = func_call->args()->contents();
+ const ParseNode* end = func_call->args()->End();
+
+ if (end && end->comments() && !end->comments()->before().empty())
+ force_multiline = true;
+
+ // If there's before line comments, make sure we have a place to put them.
+ for (const auto& i : list) {
+ if (i->comments() && !i->comments()->before().empty())
+ force_multiline = true;
+ }
+
+ // Calculate the penalties for 3 possible layouts:
+ // 1. all on same line;
+ // 2. starting on same line, broken at each comma but paren aligned;
+ // 3. broken to next line + 4, broken at each comma.
+ std::string terminator = ")";
+ if (have_block)
+ terminator += " {";
+ terminator += suffix;
+
+ // Special case to make function calls of one arg taking a long list of
+ // boolean operators not indent.
+ bool continuation_requires_indent =
+ list.size() != 1 || !list[0]->AsBinaryOp();
+
+ // 1: Same line.
+ Printer sub1;
+ InitializeSub(&sub1);
+ sub1.stack_.push_back(
+ IndentState(CurrentColumn(), continuation_requires_indent, false));
+ int penalty_one_line = 0;
+ for (size_t i = 0; i < list.size(); ++i) {
+ penalty_one_line += sub1.Expr(list[i], kPrecedenceLowest,
+ i < list.size() - 1 ? ", " : std::string());
+ }
+ sub1.Print(terminator);
+ penalty_one_line += AssessPenalty(sub1.String());
+ // This extra penalty prevents a short second argument from being squeezed in
+ // after a first argument that went multiline (and instead preferring a
+ // variant below).
+ penalty_one_line +=
+ (CountLines(sub1.String()) - 1) * kPenaltyBrokenLineOnOneLiner;
+
+ // 2: Starting on same line, broken at commas.
+ Printer sub2;
+ InitializeSub(&sub2);
+ sub2.stack_.push_back(
+ IndentState(CurrentColumn(), continuation_requires_indent, false));
+ int penalty_multiline_start_same_line = 0;
+ for (size_t i = 0; i < list.size(); ++i) {
+ penalty_multiline_start_same_line += sub2.Expr(
+ list[i], kPrecedenceLowest, i < list.size() - 1 ? "," : std::string());
+ if (i < list.size() - 1) {
+ sub2.Newline();
+ }
+ }
+ sub2.Print(terminator);
+ penalty_multiline_start_same_line += AssessPenalty(sub2.String());
+
+ // 3: Starting on next line, broken at commas.
+ Printer sub3;
+ InitializeSub(&sub3);
+ sub3.stack_.push_back(IndentState(margin() + kIndentSize * 2,
+ continuation_requires_indent, false));
+ sub3.Newline();
+ int penalty_multiline_start_next_line = 0;
+ for (size_t i = 0; i < list.size(); ++i) {
+ if (i == 0) {
+ penalty_multiline_start_next_line +=
+ std::abs(sub3.CurrentColumn() - start_column) *
+ kPenaltyHorizontalSeparation;
+ }
+ penalty_multiline_start_next_line += sub3.Expr(
+ list[i], kPrecedenceLowest, i < list.size() - 1 ? "," : std::string());
+ if (i < list.size() - 1) {
+ sub3.Newline();
+ }
+ }
+ sub3.Print(terminator);
+ penalty_multiline_start_next_line += AssessPenalty(sub3.String());
+
+ int penalty = penalty_multiline_start_next_line;
+ bool fits_on_current_line = false;
+ if (penalty_one_line < penalty_multiline_start_next_line ||
+ penalty_multiline_start_same_line < penalty_multiline_start_next_line) {
+ fits_on_current_line = true;
+ penalty = penalty_one_line;
+ if (penalty_multiline_start_same_line < penalty_one_line) {
+ penalty = penalty_multiline_start_same_line;
+ force_multiline = true;
+ }
+ } else {
+ force_multiline = true;
+ }
+
+ if (list.size() == 0 && !force_multiline) {
+ // No elements, and not forcing newlines, print nothing.
+ } else {
+ if (penalty_multiline_start_next_line < penalty_multiline_start_same_line) {
+ stack_.push_back(IndentState(margin() + kIndentSize * 2,
+ continuation_requires_indent,
+ false));
+ Newline();
+ } else {
+ stack_.push_back(
+ IndentState(CurrentColumn(), continuation_requires_indent, false));
+ }
+
+ for (size_t i = 0; i < list.size(); ++i) {
+ const auto& x = list[i];
+ if (i > 0) {
+ if (fits_on_current_line && !force_multiline)
+ Print(" ");
+ else
+ Newline();
+ }
+ bool want_comma = i < list.size() - 1 && !x->AsBlockComment();
+ Expr(x, kPrecedenceLowest, want_comma ? "," : std::string());
+ CHECK(!x->comments() || x->comments()->after().empty());
+ if (i < list.size() - 1) {
+ if (!want_comma)
+ Newline();
+ }
+ }
+
+ // Trailing comments.
+ if (end->comments() && !end->comments()->before().empty()) {
+ if (!list.empty())
+ Newline();
+ for (const auto& c : end->comments()->before()) {
+ Newline();
+ TrimAndPrintToken(c);
+ }
+ Newline();
+ }
+ stack_.pop_back();
+ }
+
+ // Defer any end of line comment until we reach the newline.
+ if (end->comments() && !end->comments()->suffix().empty()) {
+ std::copy(end->comments()->suffix().begin(),
+ end->comments()->suffix().end(), std::back_inserter(comments_));
+ }
+
+ Print(")");
+ Print(suffix);
+
+ if (have_block) {
+ Print(" ");
+ Sequence(kSequenceStyleBracedBlock,
+ func_call->block()->statements(),
+ func_call->block()->End(),
+ false);
+ }
+ return penalty + (CurrentLine() - start_line) * GetPenaltyForLineBreak();
+}
+
+void Printer::InitializeSub(Printer* sub) {
+ sub->stack_ = stack_;
+ sub->comments_ = comments_;
+ sub->penalty_depth_ = penalty_depth_;
+ sub->Print(std::string(CurrentColumn(), 'x'));
+}
+
+template <class PARSENODE>
+bool Printer::ListWillBeMultiline(const std::vector<PARSENODE*>& list,
+ const ParseNode* end) {
+ if (list.size() > 1)
+ return true;
+
+ if (end && end->comments() && !end->comments()->before().empty())
+ return true;
+
+ // If there's before line comments, make sure we have a place to put them.
+ for (const auto& i : list) {
+ if (i->comments() && !i->comments()->before().empty())
+ return true;
+ }
+
+ return false;
+}
+
+void DoFormat(const ParseNode* root, bool dump_tree, std::string* output) {
+ if (dump_tree) {
+ std::ostringstream os;
+ root->Print(os, 0);
+ printf("----------------------\n");
+ printf("-- PARSE TREE --------\n");
+ printf("----------------------\n");
+ printf("%s", os.str().c_str());
+ printf("----------------------\n");
+ }
+ Printer pr;
+ pr.Block(root);
+ *output = pr.String();
+}
+
+std::string ReadStdin() {
+ static const int kBufferSize = 256;
+ char buffer[kBufferSize];
+ std::string result;
+ while (true) {
+ char* input = nullptr;
+ input = fgets(buffer, kBufferSize, stdin);
+ if (input == nullptr && feof(stdin))
+ return result;
+ int length = static_cast<int>(strlen(buffer));
+ if (length == 0)
+ return result;
+ else
+ result += std::string(buffer, length);
+ }
+}
+
+} // namespace
+
+bool FormatFileToString(Setup* setup,
+ const SourceFile& file,
+ bool dump_tree,
+ std::string* output) {
+ Err err;
+ const ParseNode* parse_node =
+ setup->scheduler().input_file_manager()->SyncLoadFile(
+ LocationRange(), &setup->build_settings(), file, &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+ DoFormat(parse_node, dump_tree, output);
+ return true;
+}
+
+bool FormatStringToString(const std::string& input,
+ bool dump_tree,
+ std::string* output) {
+ SourceFile source_file;
+ InputFile file(source_file);
+ file.SetContents(input);
+ Err err;
+ // Tokenize.
+ std::vector<Token> tokens = Tokenizer::Tokenize(&file, &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ // Parse.
+ std::unique_ptr<ParseNode> parse_node = Parser::Parse(tokens, &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ DoFormat(parse_node.get(), dump_tree, output);
+ return true;
+}
+
+int RunFormat(const std::vector<std::string>& args) {
+ bool dry_run =
+ base::CommandLine::ForCurrentProcess()->HasSwitch(kSwitchDryRun);
+ bool dump_tree =
+ base::CommandLine::ForCurrentProcess()->HasSwitch(kSwitchDumpTree);
+ bool from_stdin =
+ base::CommandLine::ForCurrentProcess()->HasSwitch(kSwitchStdin);
+ bool in_place =
+ base::CommandLine::ForCurrentProcess()->HasSwitch(kSwitchInPlace);
+
+ if (dry_run) {
+ // --dry-run only works with an actual file to compare to.
+ from_stdin = false;
+ in_place = true;
+ }
+
+ if (from_stdin) {
+ if (args.size() != 0) {
+ Err(Location(), "Expecting no arguments when reading from stdin.\n")
+ .PrintToStdout();
+ return 1;
+ }
+ std::string input = ReadStdin();
+ std::string output;
+ if (!FormatStringToString(input, dump_tree, &output))
+ return 1;
+ printf("%s", output.c_str());
+ return 0;
+ }
+
+ // TODO(scottmg): Eventually, this should be a list/spec of files, and they
+ // should all be done in parallel.
+ if (args.size() != 1) {
+ Err(Location(), "Expecting exactly one argument, see `gn help format`.\n")
+ .PrintToStdout();
+ return 1;
+ }
+
+ Setup setup;
+ SourceDir source_dir =
+ SourceDirForCurrentDirectory(setup.build_settings().root_path());
+
+ Err err;
+ SourceFile file = source_dir.ResolveRelativeFile(Value(nullptr, args[0]),
+ &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return 1;
+ }
+
+ std::string output_string;
+ if (FormatFileToString(&setup, file, dump_tree, &output_string)) {
+ if (in_place) {
+ base::FilePath to_write = setup.build_settings().GetFullPath(file);
+ std::string original_contents;
+ if (!base::ReadFileToString(to_write, &original_contents)) {
+ Err(Location(), std::string("Couldn't read \"") +
+ to_write.AsUTF8Unsafe() +
+ std::string("\" for comparison.")).PrintToStdout();
+ return 1;
+ }
+ if (dry_run)
+ return original_contents == output_string ? 0 : 2;
+ if (original_contents != output_string) {
+ if (base::WriteFile(to_write,
+ output_string.data(),
+ static_cast<int>(output_string.size())) == -1) {
+ Err(Location(),
+ std::string("Failed to write formatted output back to \"") +
+ to_write.AsUTF8Unsafe() + std::string("\".")).PrintToStdout();
+ return 1;
+ }
+ printf("Wrote formatted to '%s'.\n", to_write.AsUTF8Unsafe().c_str());
+ }
+ } else {
+ printf("%s", output_string.c_str());
+ }
+ }
+
+ return 0;
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/command_format_unittest.cc b/chromium/tools/gn/command_format_unittest.cc
new file mode 100644
index 00000000000..7c79d415a55
--- /dev/null
+++ b/chromium/tools/gn/command_format_unittest.cc
@@ -0,0 +1,106 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/files/file_util.h"
+#include "base/path_service.h"
+#include "base/strings/string_util.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/setup.h"
+
+namespace commands {
+bool FormatFileToString(Setup* setup,
+ const SourceFile& file,
+ bool dump_tree,
+ std::string* output);
+} // namespace commands
+
+#define FORMAT_TEST(n) \
+ TEST(Format, n) { \
+ ::Setup setup; \
+ std::string out; \
+ std::string expected; \
+ base::FilePath src_dir; \
+ PathService::Get(base::DIR_SOURCE_ROOT, &src_dir); \
+ base::SetCurrentDirectory(src_dir); \
+ EXPECT_TRUE(commands::FormatFileToString( \
+ &setup, SourceFile("//tools/gn/format_test_data/" #n ".gn"), false, \
+ &out)); \
+ ASSERT_TRUE(base::ReadFileToString( \
+ base::FilePath(FILE_PATH_LITERAL("tools/gn/format_test_data/") \
+ FILE_PATH_LITERAL(#n) \
+ FILE_PATH_LITERAL(".golden")), \
+ &expected)); \
+ EXPECT_EQ(expected, out); \
+ }
+
+// These are expanded out this way rather than a runtime loop so that
+// --gtest_filter works as expected for individual test running.
+FORMAT_TEST(001)
+FORMAT_TEST(002)
+FORMAT_TEST(003)
+FORMAT_TEST(004)
+FORMAT_TEST(005)
+FORMAT_TEST(006)
+FORMAT_TEST(007)
+FORMAT_TEST(008)
+FORMAT_TEST(009)
+FORMAT_TEST(010)
+FORMAT_TEST(011)
+FORMAT_TEST(012)
+FORMAT_TEST(013)
+FORMAT_TEST(014)
+FORMAT_TEST(015)
+FORMAT_TEST(016)
+FORMAT_TEST(017)
+FORMAT_TEST(018)
+FORMAT_TEST(019)
+FORMAT_TEST(020)
+FORMAT_TEST(021)
+FORMAT_TEST(022)
+FORMAT_TEST(023)
+FORMAT_TEST(024)
+FORMAT_TEST(025)
+FORMAT_TEST(026)
+FORMAT_TEST(027)
+FORMAT_TEST(028)
+FORMAT_TEST(029)
+FORMAT_TEST(030)
+FORMAT_TEST(031)
+FORMAT_TEST(032)
+FORMAT_TEST(033)
+// TODO(scottmg): args+rebase_path unnecessarily split: FORMAT_TEST(034)
+FORMAT_TEST(035)
+FORMAT_TEST(036)
+FORMAT_TEST(037)
+FORMAT_TEST(038)
+FORMAT_TEST(039)
+// TODO(scottmg): Bad break, exceeding 80 col: FORMAT_TEST(040)
+FORMAT_TEST(041)
+FORMAT_TEST(042)
+FORMAT_TEST(043)
+// TODO(scottmg): Dewrapped caused exceeding 80 col: FORMAT_TEST(044)
+FORMAT_TEST(045)
+FORMAT_TEST(046)
+FORMAT_TEST(047)
+FORMAT_TEST(048)
+// TODO(scottmg): Eval is broken (!) and comment output might have extra ,
+// FORMAT_TEST(049)
+FORMAT_TEST(050)
+FORMAT_TEST(051)
+FORMAT_TEST(052)
+FORMAT_TEST(053)
+FORMAT_TEST(054)
+FORMAT_TEST(055)
+FORMAT_TEST(056)
+FORMAT_TEST(057)
+FORMAT_TEST(058)
+FORMAT_TEST(059)
+FORMAT_TEST(060)
+FORMAT_TEST(061)
+FORMAT_TEST(062)
+FORMAT_TEST(063)
+FORMAT_TEST(064)
+FORMAT_TEST(065)
+FORMAT_TEST(066)
diff --git a/chromium/tools/gn/command_gen.cc b/chromium/tools/gn/command_gen.cc
new file mode 100644
index 00000000000..fee3f1a31f8
--- /dev/null
+++ b/chromium/tools/gn/command_gen.cc
@@ -0,0 +1,323 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/atomicops.h"
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/stringprintf.h"
+#include "base/timer/elapsed_timer.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/eclipse_writer.h"
+#include "tools/gn/ninja_target_writer.h"
+#include "tools/gn/ninja_writer.h"
+#include "tools/gn/runtime_deps.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/setup.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/switches.h"
+#include "tools/gn/target.h"
+#include "tools/gn/visual_studio_writer.h"
+
+namespace commands {
+
+namespace {
+
+const char kSwitchCheck[] = "check";
+const char kSwitchFilters[] = "filters";
+const char kSwitchIde[] = "ide";
+const char kSwitchIdeValueEclipse[] = "eclipse";
+const char kSwitchIdeValueVs[] = "vs";
+const char kSwitchIdeValueVs2013[] = "vs2013";
+const char kSwitchIdeValueVs2015[] = "vs2015";
+const char kSwitchSln[] = "sln";
+
+// Called on worker thread to write the ninja file.
+void BackgroundDoWrite(const Target* target) {
+ NinjaTargetWriter::RunAndWriteFile(target);
+ g_scheduler->DecrementWorkCount();
+}
+
+// Called on the main thread.
+void ItemResolvedCallback(base::subtle::Atomic32* write_counter,
+ scoped_refptr<Builder> builder,
+ const BuilderRecord* record) {
+ base::subtle::NoBarrier_AtomicIncrement(write_counter, 1);
+
+ const Item* item = record->item();
+ const Target* target = item->AsTarget();
+ if (target) {
+ g_scheduler->IncrementWorkCount();
+ g_scheduler->ScheduleWork(base::Bind(&BackgroundDoWrite, target));
+ }
+}
+
+// Returns a pointer to the target with the given file as an output, or null
+// if no targets generate the file. This is brute force since this is an
+// error condition and performance shouldn't matter.
+const Target* FindTargetThatGeneratesFile(const Builder* builder,
+ const SourceFile& file) {
+ std::vector<const Target*> targets = builder->GetAllResolvedTargets();
+ if (targets.empty())
+ return nullptr;
+
+ OutputFile output_file(targets[0]->settings()->build_settings(), file);
+ for (const Target* target : targets) {
+ for (const auto& cur_output : target->computed_outputs()) {
+ if (cur_output == output_file)
+ return target;
+ }
+ }
+ return nullptr;
+}
+
+// Prints an error that the given file was present as a source or input in
+// the given target(s) but was not generated by any of its dependencies.
+void PrintInvalidGeneratedInput(const Builder* builder,
+ const SourceFile& file,
+ const std::vector<const Target*>& targets) {
+ std::string err;
+
+ // Only show the toolchain labels (which can be confusing) if something
+ // isn't the default.
+ bool show_toolchains = false;
+ const Label& default_toolchain =
+ targets[0]->settings()->default_toolchain_label();
+ for (const Target* target : targets) {
+ if (target->settings()->toolchain_label() != default_toolchain) {
+ show_toolchains = true;
+ break;
+ }
+ }
+
+ const Target* generator = FindTargetThatGeneratesFile(builder, file);
+ if (generator &&
+ generator->settings()->toolchain_label() != default_toolchain)
+ show_toolchains = true;
+
+ const std::string target_str = targets.size() > 1 ? "targets" : "target";
+ err += "The file:\n";
+ err += " " + file.value() + "\n";
+ err += "is listed as an input or source for the " + target_str + ":\n";
+ for (const Target* target : targets)
+ err += " " + target->label().GetUserVisibleName(show_toolchains) + "\n";
+
+ if (generator) {
+ err += "but this file was not generated by any dependencies of the " +
+ target_str + ". The target\nthat generates the file is:\n ";
+ err += generator->label().GetUserVisibleName(show_toolchains);
+ } else {
+ err += "but no targets in the build generate that file.";
+ }
+
+ Err(Location(), "Input to " + target_str + " not generated by a dependency.",
+ err).PrintToStdout();
+}
+
+bool CheckForInvalidGeneratedInputs(Setup* setup) {
+ std::multimap<SourceFile, const Target*> unknown_inputs =
+ g_scheduler->GetUnknownGeneratedInputs();
+ if (unknown_inputs.empty())
+ return true; // No bad files.
+
+ int errors_found = 0;
+ auto cur = unknown_inputs.begin();
+ while (cur != unknown_inputs.end()) {
+ errors_found++;
+ auto end_of_range = unknown_inputs.upper_bound(cur->first);
+
+ // Package the values more conveniently for printing.
+ SourceFile bad_input = cur->first;
+ std::vector<const Target*> targets;
+ while (cur != end_of_range)
+ targets.push_back((cur++)->second);
+
+ PrintInvalidGeneratedInput(setup->builder(), bad_input, targets);
+ OutputString("\n");
+ }
+
+ OutputString(
+ "If you have generated inputs, there needs to be a dependency path "
+ "between the\ntwo targets in addition to just listing the files. For "
+ "indirect dependencies,\nthe intermediate ones must be public_deps. "
+ "data_deps don't count since they're\nonly runtime dependencies. If "
+ "you think a dependency chain exists, it might be\nbecause the chain "
+ "is private. Try \"gn path\" to analyze.\n");
+
+ if (errors_found > 1) {
+ OutputString(base::StringPrintf("\n%d generated input errors found.\n",
+ errors_found), DECORATION_YELLOW);
+ }
+ return false;
+}
+
+bool RunIdeWriter(const std::string& ide,
+ const BuildSettings* build_settings,
+ Builder* builder,
+ Err* err) {
+ const base::CommandLine* command_line =
+ base::CommandLine::ForCurrentProcess();
+ base::ElapsedTimer timer;
+
+ if (ide == kSwitchIdeValueEclipse) {
+ bool res = EclipseWriter::RunAndWriteFile(build_settings, builder, err);
+ if (res && !command_line->HasSwitch(switches::kQuiet)) {
+ OutputString("Generating Eclipse settings took " +
+ base::Int64ToString(timer.Elapsed().InMilliseconds()) +
+ "ms\n");
+ }
+ return res;
+ } else if (ide == kSwitchIdeValueVs || ide == kSwitchIdeValueVs2013 ||
+ ide == kSwitchIdeValueVs2015) {
+ VisualStudioWriter::Version version =
+ ide == kSwitchIdeValueVs2013 ? VisualStudioWriter::Version::Vs2013
+ : VisualStudioWriter::Version::Vs2015;
+ std::string sln_name;
+ if (command_line->HasSwitch(kSwitchSln))
+ sln_name = command_line->GetSwitchValueASCII(kSwitchSln);
+ std::string filters;
+ if (command_line->HasSwitch(kSwitchFilters))
+ filters = command_line->GetSwitchValueASCII(kSwitchFilters);
+ bool res = VisualStudioWriter::RunAndWriteFiles(
+ build_settings, builder, version, sln_name, filters, err);
+ if (res && !command_line->HasSwitch(switches::kQuiet)) {
+ OutputString("Generating Visual Studio projects took " +
+ base::Int64ToString(timer.Elapsed().InMilliseconds()) +
+ "ms\n");
+ }
+ return res;
+ }
+
+ *err = Err(Location(), "Unknown IDE: " + ide);
+ return false;
+}
+
+} // namespace
+
+const char kGen[] = "gen";
+const char kGen_HelpShort[] =
+ "gen: Generate ninja files.";
+const char kGen_Help[] =
+ "gn gen: Generate ninja files.\n"
+ "\n"
+ " gn gen [<ide options>] <out_dir>\n"
+ "\n"
+ " Generates ninja files from the current tree and puts them in the given\n"
+ " output directory.\n"
+ "\n"
+ " The output directory can be a source-repo-absolute path name such as:\n"
+ " //out/foo\n"
+ " Or it can be a directory relative to the current directory such as:\n"
+ " out/foo\n"
+ "\n"
+ " See \"gn help switches\" for the common command-line switches.\n"
+ "\n"
+ "IDE options\n"
+ "\n"
+ " GN optionally generates files for IDE. Possibilities for <ide options>\n"
+ "\n"
+ " --ide=<ide_name>\n"
+ " Generate files for an IDE. Currently supported values:\n"
+ " \"eclipse\" - Eclipse CDT settings file.\n"
+ " \"vs\" - Visual Studio project/solution files.\n"
+ " (default Visual Studio version: 2015)\n"
+ " \"vs2013\" - Visual Studio 2013 project/solution files.\n"
+ " \"vs2015\" - Visual Studio 2015 project/solution files.\n"
+ "\n"
+ " --sln=<file_name>\n"
+ " Override default sln file name (\"all\"). Solution file is written\n"
+ " to the root build directory. Only for Visual Studio.\n"
+ "\n"
+ " --filters=<path_prefixes>\n"
+ " Semicolon-separated list of label patterns used to limit the set\n"
+ " of generated projects (see \"gn help label_pattern\"). Only\n"
+ " matching targets will be included to the solution. Only for Visual\n"
+ " Studio.\n"
+ "\n"
+ "Eclipse IDE Support\n"
+ "\n"
+ " GN DOES NOT generate Eclipse CDT projects. Instead, it generates a\n"
+ " settings file which can be imported into an Eclipse CDT project. The\n"
+ " XML file contains a list of include paths and defines. Because GN does\n"
+ " not generate a full .cproject definition, it is not possible to\n"
+ " properly define includes/defines for each file individually.\n"
+ " Instead, one set of includes/defines is generated for the entire\n"
+ " project. This works fairly well but may still result in a few indexer\n"
+ " issues here and there.\n";
+
+int RunGen(const std::vector<std::string>& args) {
+ base::ElapsedTimer timer;
+
+ if (args.size() != 1) {
+ Err(Location(), "Need exactly one build directory to generate.",
+ "I expected something more like \"gn gen out/foo\"\n"
+ "You can also see \"gn help gen\".").PrintToStdout();
+ return 1;
+ }
+
+ // Deliberately leaked to avoid expensive process teardown.
+ Setup* setup = new Setup();
+ if (!setup->DoSetup(args[0], true))
+ return 1;
+
+ const base::CommandLine* command_line =
+ base::CommandLine::ForCurrentProcess();
+ if (command_line->HasSwitch(kSwitchCheck))
+ setup->set_check_public_headers(true);
+
+ // Cause the load to also generate the ninja files for each target. We wrap
+ // the writing to maintain a counter.
+ base::subtle::Atomic32 write_counter = 0;
+ setup->builder()->set_resolved_callback(
+ base::Bind(&ItemResolvedCallback, &write_counter,
+ scoped_refptr<Builder>(setup->builder())));
+
+ // Do the actual load. This will also write out the target ninja files.
+ if (!setup->Run())
+ return 1;
+
+ Err err;
+ // Write the root ninja files.
+ if (!NinjaWriter::RunAndWriteFiles(&setup->build_settings(),
+ setup->builder(),
+ &err)) {
+ err.PrintToStdout();
+ return 1;
+ }
+
+ if (!WriteRuntimeDepsFilesIfNecessary(*setup->builder(), &err)) {
+ err.PrintToStdout();
+ return 1;
+ }
+
+ if (!CheckForInvalidGeneratedInputs(setup))
+ return 1;
+
+ if (command_line->HasSwitch(kSwitchIde) &&
+ !RunIdeWriter(command_line->GetSwitchValueASCII(kSwitchIde),
+ &setup->build_settings(), setup->builder(), &err)) {
+ err.PrintToStdout();
+ return 1;
+ }
+
+ base::TimeDelta elapsed_time = timer.Elapsed();
+
+ if (!command_line->HasSwitch(switches::kQuiet)) {
+ OutputString("Done. ", DECORATION_GREEN);
+
+ std::string stats = "Wrote " +
+ base::IntToString(static_cast<int>(write_counter)) +
+ " targets from " +
+ base::IntToString(
+ setup->scheduler().input_file_manager()->GetInputFileCount()) +
+ " files in " +
+ base::Int64ToString(elapsed_time.InMilliseconds()) + "ms\n";
+ OutputString(stats);
+ }
+
+ return 0;
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/command_help.cc b/chromium/tools/gn/command_help.cc
new file mode 100644
index 00000000000..9bac894f9d6
--- /dev/null
+++ b/chromium/tools/gn/command_help.cc
@@ -0,0 +1,276 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <algorithm>
+#include <iostream>
+
+#include "base/command_line.h"
+#include "tools/gn/args.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/err.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/input_conversion.h"
+#include "tools/gn/label_pattern.h"
+#include "tools/gn/parser.h"
+#include "tools/gn/runtime_deps.h"
+#include "tools/gn/setup.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/string_utils.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/switches.h"
+#include "tools/gn/variables.h"
+
+namespace commands {
+
+namespace {
+
+void PrintToplevelHelp() {
+ OutputString("Commands (type \"gn help <command>\" for more details):\n");
+ for (const auto& cmd : commands::GetCommands())
+ PrintShortHelp(cmd.second.help_short);
+
+ // Target declarations.
+ OutputString("\nTarget declarations (type \"gn help <function>\" for more "
+ "details):\n");
+ for (const auto& func : functions::GetFunctions()) {
+ if (func.second.is_target)
+ PrintShortHelp(func.second.help_short);
+ }
+
+ // Functions.
+ OutputString("\nBuildfile functions (type \"gn help <function>\" for more "
+ "details):\n");
+ for (const auto& func : functions::GetFunctions()) {
+ if (!func.second.is_target)
+ PrintShortHelp(func.second.help_short);
+ }
+
+ // Built-in variables.
+ OutputString("\nBuilt-in predefined variables (type \"gn help <variable>\" "
+ "for more details):\n");
+ for (const auto& builtin : variables::GetBuiltinVariables())
+ PrintShortHelp(builtin.second.help_short);
+
+ // Target variables.
+ OutputString("\nVariables you set in targets (type \"gn help <variable>\" "
+ "for more details):\n");
+ for (const auto& target : variables::GetTargetVariables())
+ PrintShortHelp(target.second.help_short);
+
+ OutputString("\nOther help topics:\n");
+ PrintShortHelp("all: Print all the help at once");
+ PrintShortHelp("buildargs: How build arguments work.");
+ PrintShortHelp("dotfile: Info about the toplevel .gn file.");
+ PrintShortHelp("grammar: Formal grammar for GN build files.");
+ PrintShortHelp(
+ "input_conversion: Processing input from exec_script and read_file.");
+ PrintShortHelp("label_pattern: Matching more than one label.");
+ PrintShortHelp("nogncheck: Annotating includes for checking.");
+ PrintShortHelp("runtime_deps: How runtime dependency computation works.");
+ PrintShortHelp("source_expansion: Map sources to outputs for scripts.");
+ PrintShortHelp("switches: Show available command-line switches.");
+}
+
+void PrintSwitchHelp() {
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+ bool use_markdown = cmdline->HasSwitch(switches::kMarkdown);
+
+ OutputString("Available global switches\n", DECORATION_YELLOW);
+ OutputString(
+ " Do \"gn help --the_switch_you_want_help_on\" for more. Individual\n"
+ " commands may take command-specific switches not listed here. See the\n"
+ " help on your specific command for more.\n\n");
+
+ if (use_markdown)
+ OutputString("```\n\n", DECORATION_NONE);
+
+ for (const auto& s : switches::GetSwitches())
+ PrintShortHelp(s.second.short_help);
+
+ if (use_markdown)
+ OutputString("\n```\n", DECORATION_NONE);
+}
+
+void PrintAllHelp() {
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+ if (cmdline->HasSwitch(switches::kMarkdown)) {
+ OutputString("# GN Reference\n\n");
+
+ // TODO: https://code.google.com/p/gitiles/issues/detail?id=75
+ // Gitiles crashes when rendering the table of contents, so we must omit
+ // it until the bug is fixed.
+ // OutputString("[TOC]\n\n");
+ OutputString("*This page is automatically generated from* "
+ "`gn help --markdown all`.\n\n");
+ } else {
+ PrintToplevelHelp();
+ }
+
+ for (const auto& s : switches::GetSwitches())
+ PrintLongHelp(s.second.long_help);
+
+ for (const auto& c: commands::GetCommands())
+ PrintLongHelp(c.second.help);
+
+ for (const auto& f: functions::GetFunctions())
+ PrintLongHelp(f.second.help);
+
+ for (const auto& v: variables::GetBuiltinVariables())
+ PrintLongHelp(v.second.help);
+
+ for (const auto& v: variables::GetTargetVariables())
+ PrintLongHelp(v.second.help);
+
+ PrintLongHelp(kBuildArgs_Help);
+ PrintLongHelp(kDotfile_Help);
+ PrintLongHelp(kGrammar_Help);
+ PrintLongHelp(kInputConversion_Help);
+ PrintLongHelp(kLabelPattern_Help);
+ PrintLongHelp(kNoGnCheck_Help);
+ PrintLongHelp(kRuntimeDeps_Help);
+ PrintLongHelp(kSourceExpansion_Help);
+ PrintSwitchHelp();
+}
+
+// Prints help on the given switch. There should be no leading hyphens. Returns
+// true if the switch was found and help was printed. False means the switch is
+// unknown.
+bool PrintHelpOnSwitch(const std::string& what) {
+ const switches::SwitchInfoMap& all = switches::GetSwitches();
+ switches::SwitchInfoMap::const_iterator found =
+ all.find(base::StringPiece(what));
+ if (found == all.end())
+ return false;
+ PrintLongHelp(found->second.long_help);
+ return true;
+}
+
+} // namespace
+
+const char kHelp[] = "help";
+const char kHelp_HelpShort[] =
+ "help: Does what you think.";
+const char kHelp_Help[] =
+ "gn help <anything>\n"
+ "\n"
+ " Yo dawg, I heard you like help on your help so I put help on the help\n"
+ " in the help.\n"
+ "\n"
+ " You can also use \"all\" as the parameter to get all help at once.\n"
+ "\n"
+ "Switches\n"
+ "\n"
+ " --markdown\n"
+ " Format output in markdown syntax.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " gn help --markdown all\n"
+ " Dump all help to stdout in markdown format.\n";
+
+int RunHelp(const std::vector<std::string>& args) {
+ std::string what;
+ if (args.size() == 0) {
+ // If no argument is specified, check for switches to allow things like
+ // "gn help --args" for help on the args switch.
+ const base::CommandLine::SwitchMap& switches =
+ base::CommandLine::ForCurrentProcess()->GetSwitches();
+ if (switches.empty()) {
+ // Still nothing, show help overview.
+ PrintToplevelHelp();
+ return 0;
+ }
+
+ // Switch help needs to be done separately. The CommandLine will strip the
+ // switch separators so --args will come out as "args" which is then
+ // ambiguous with the variable named "args".
+ if (!PrintHelpOnSwitch(switches.begin()->first))
+ PrintToplevelHelp();
+ return 0;
+ } else {
+ what = args[0];
+ }
+
+ std::vector<base::StringPiece> all_help_topics;
+
+ // Check commands.
+ const commands::CommandInfoMap& command_map = commands::GetCommands();
+ auto found_command = command_map.find(what);
+ if (found_command != command_map.end()) {
+ PrintLongHelp(found_command->second.help);
+ return 0;
+ }
+ for (const auto& entry : command_map)
+ all_help_topics.push_back(entry.first);
+
+ // Check functions.
+ const functions::FunctionInfoMap& function_map = functions::GetFunctions();
+ auto found_function = function_map.find(what);
+ if (found_function != function_map.end()) {
+ PrintLongHelp(found_function->second.help);
+ return 0;
+ }
+ for (const auto& entry : function_map)
+ all_help_topics.push_back(entry.first);
+
+ // Builtin variables.
+ const variables::VariableInfoMap& builtin_vars =
+ variables::GetBuiltinVariables();
+ auto found_builtin_var = builtin_vars.find(what);
+ if (found_builtin_var != builtin_vars.end()) {
+ PrintLongHelp(found_builtin_var->second.help);
+ return 0;
+ }
+ for (const auto& entry : builtin_vars)
+ all_help_topics.push_back(entry.first);
+
+ // Target variables.
+ const variables::VariableInfoMap& target_vars =
+ variables::GetTargetVariables();
+ auto found_target_var = target_vars.find(what);
+ if (found_target_var != target_vars.end()) {
+ PrintLongHelp(found_target_var->second.help);
+ return 0;
+ }
+ for (const auto& entry : target_vars)
+ all_help_topics.push_back(entry.first);
+
+ // Random other topics.
+ std::map<std::string, void(*)()> random_topics;
+ random_topics["all"] = PrintAllHelp;
+ random_topics["buildargs"] = []() { PrintLongHelp(kBuildArgs_Help); };
+ random_topics["dotfile"] = []() { PrintLongHelp(kDotfile_Help); };
+ random_topics["grammar"] = []() { PrintLongHelp(kGrammar_Help); };
+ random_topics["input_conversion"] = []() {
+ PrintLongHelp(kInputConversion_Help);
+ };
+ random_topics["label_pattern"] = []() { PrintLongHelp(kLabelPattern_Help); };
+ random_topics["nogncheck"] = []() { PrintLongHelp(kNoGnCheck_Help); };
+ random_topics["runtime_deps"] = []() { PrintLongHelp(kRuntimeDeps_Help); };
+ random_topics["source_expansion"] = []() {
+ PrintLongHelp(kSourceExpansion_Help);
+ };
+ random_topics["switches"] = PrintSwitchHelp;
+ auto found_random_topic = random_topics.find(what);
+ if (found_random_topic != random_topics.end()) {
+ found_random_topic->second();
+ return 0;
+ }
+ for (const auto& entry : random_topics)
+ all_help_topics.push_back(entry.first);
+
+ // No help on this.
+ Err(Location(), "No help on \"" + what + "\".").PrintToStdout();
+ base::StringPiece suggestion = SpellcheckString(what, all_help_topics);
+ if (suggestion.empty()) {
+ OutputString("Run `gn help` for a list of available topics.\n",
+ DECORATION_NONE);
+ } else {
+ OutputString("Did you mean `gn help " + suggestion.as_string() + "`?\n",
+ DECORATION_NONE);
+ }
+ return 1;
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/command_ls.cc b/chromium/tools/gn/command_ls.cc
new file mode 100644
index 00000000000..eddb51c3797
--- /dev/null
+++ b/chromium/tools/gn/command_ls.cc
@@ -0,0 +1,115 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <algorithm>
+#include <set>
+
+#include "base/command_line.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/label_pattern.h"
+#include "tools/gn/setup.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/target.h"
+
+namespace commands {
+
+const char kLs[] = "ls";
+const char kLs_HelpShort[] =
+ "ls: List matching targets.";
+const char kLs_Help[] =
+ "gn ls <out_dir> [<label_pattern>] [--all-toolchains] [--as=...]\n"
+ " [--type=...] [--testonly=...]\n"
+ "\n"
+ " Lists all targets matching the given pattern for the given build\n"
+ " directory. By default, only targets in the default toolchain will\n"
+ " be matched unless a toolchain is explicitly supplied.\n"
+ "\n"
+ " If the label pattern is unspecified, list all targets. The label\n"
+ " pattern is not a general regular expression (see\n"
+ " \"gn help label_pattern\"). If you need more complex expressions,\n"
+ " pipe the result through grep.\n"
+ "\n"
+ "Options\n"
+ "\n"
+ TARGET_PRINTING_MODE_COMMAND_LINE_HELP
+ "\n"
+ " --all-toolchains\n"
+ " Matches all toolchains. When set, if the label pattern does not\n"
+ " specify an explicit toolchain, labels from all toolchains will be\n"
+ " matched. When unset, only targets in the default toolchain will\n"
+ " be matched unless an explicit toolchain in the label is set.\n"
+ "\n"
+ TARGET_TESTONLY_FILTER_COMMAND_LINE_HELP
+ "\n"
+ TARGET_TYPE_FILTER_COMMAND_LINE_HELP
+ "\n"
+ "Examples\n"
+ "\n"
+ " gn ls out/Debug\n"
+ " Lists all targets in the default toolchain.\n"
+ "\n"
+ " gn ls out/Debug \"//base/*\"\n"
+ " Lists all targets in the directory base and all subdirectories.\n"
+ "\n"
+ " gn ls out/Debug \"//base:*\"\n"
+ " Lists all targets defined in //base/BUILD.gn.\n"
+ "\n"
+ " gn ls out/Debug //base --as=output\n"
+ " Lists the build output file for //base:base\n"
+ "\n"
+ " gn ls out/Debug --type=executable\n"
+ " Lists all executables produced by the build.\n"
+ "\n"
+ " gn ls out/Debug \"//base/*\" --as=output | xargs ninja -C out/Debug\n"
+ " Builds all targets in //base and all subdirectories.\n"
+ "\n"
+ " gn ls out/Debug //base --all-toolchains\n"
+ " Lists all variants of the target //base:base (it may be referenced\n"
+ " in multiple toolchains).\n";
+
+int RunLs(const std::vector<std::string>& args) {
+ if (args.size() == 0) {
+ Err(Location(), "You're holding it wrong.",
+ "Usage: \"gn ls <build dir> [<label_pattern>]*\"").PrintToStdout();
+ return 1;
+ }
+
+ Setup* setup = new Setup;
+ setup->build_settings().set_check_for_bad_items(false);
+ if (!setup->DoSetup(args[0], false) || !setup->Run())
+ return 1;
+
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+ bool all_toolchains = cmdline->HasSwitch("all-toolchains");
+
+ std::vector<const Target*> matches;
+ if (args.size() > 1) {
+ // Some patterns or explicit labels were specified.
+ std::vector<std::string> inputs(args.begin() + 1, args.end());
+
+ UniqueVector<const Target*> target_matches;
+ UniqueVector<const Config*> config_matches;
+ UniqueVector<const Toolchain*> toolchain_matches;
+ UniqueVector<SourceFile> file_matches;
+ if (!ResolveFromCommandLineInput(setup, inputs, all_toolchains,
+ &target_matches, &config_matches,
+ &toolchain_matches, &file_matches))
+ return 1;
+ matches.insert(matches.begin(),
+ target_matches.begin(), target_matches.end());
+ } else if (all_toolchains) {
+ // List all resolved targets.
+ matches = setup->builder()->GetAllResolvedTargets();
+ } else {
+ // List all resolved targets in the default toolchain.
+ for (const auto& target : setup->builder()->GetAllResolvedTargets()) {
+ if (target->settings()->is_default())
+ matches.push_back(target);
+ }
+ }
+ FilterAndPrintTargets(false, &matches);
+ return 0;
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/command_path.cc b/chromium/tools/gn/command_path.cc
new file mode 100644
index 00000000000..2368788d581
--- /dev/null
+++ b/chromium/tools/gn/command_path.cc
@@ -0,0 +1,320 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include <algorithm>
+
+#include "base/command_line.h"
+#include "base/containers/hash_tables.h"
+#include "base/strings/stringprintf.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/setup.h"
+#include "tools/gn/standard_out.h"
+
+namespace commands {
+
+namespace {
+
+enum DepType {
+ DEP_NONE,
+ DEP_PUBLIC,
+ DEP_PRIVATE,
+ DEP_DATA
+};
+
+// As we do a depth-first search, this vector will store the current path
+// the current target for printing when a match is found.
+using TargetDep = std::pair<const Target*, DepType>;
+using DepStack = std::vector<TargetDep>;
+
+// Note that this uses raw pointers. These need to be manually deleted (which
+// we won't normally bother with). This allows the vector to be resized
+// more quickly.
+using DepStackVector = std::vector<DepStack*>;
+
+using DepSet = base::hash_set<const Target*>;
+
+struct Options {
+ Options()
+ : all(false),
+ public_only(false),
+ with_data(false) {
+ }
+
+ bool all;
+ bool public_only;
+ bool with_data;
+};
+
+struct State {
+ State() : found_count(0) {
+ // Reserve fairly large buffers for the found vectors.
+ const size_t kReserveSize = 32768;
+ found_public.reserve(kReserveSize);
+ found_other.reserve(kReserveSize);
+ }
+
+ // Stores targets that do not have any paths to the destination. This is
+ // an optimization to avoid revisiting useless paths.
+ DepSet rejected;
+
+ // Total number of paths found.
+ int found_count;
+
+ // The pointers in these vectors are owned by this object, but are
+ // deliberately leaked. There can be a lot of them which can take a long time
+ // to free, and GN will just exit after this is used anyway.
+ DepStackVector found_public;
+ DepStackVector found_other;
+};
+
+void PrintDepStack(const DepStack& stack) {
+ // Don't print toolchains unless they differ from the first target.
+ const Label& default_toolchain = stack[0].first->label().GetToolchainLabel();
+
+ for (const auto& pair : stack) {
+ OutputString(pair.first->label().GetUserVisibleName(default_toolchain));
+ switch (pair.second) {
+ case DEP_NONE:
+ break;
+ case DEP_PUBLIC:
+ OutputString(" --[public]-->", DECORATION_DIM);
+ break;
+ case DEP_PRIVATE:
+ OutputString(" --[private]-->", DECORATION_DIM);
+ break;
+ case DEP_DATA:
+ OutputString(" --[data]-->", DECORATION_DIM);
+ break;
+ }
+ OutputString("\n");
+ }
+ OutputString("\n");
+}
+
+bool AreAllPublic(const DepStack& stack) {
+ // Don't check the type of the last one since that doesn't point to anything.
+ for (size_t i = 0; i < stack.size() - 1; i++) {
+ if (stack[i].second != DEP_PUBLIC)
+ return false;
+ }
+ return true;
+}
+
+// Increments *found_count to reflect how many results are found. If print_all
+// is not set, only the first result will be printed.
+//
+// As an optimization, targets that do not have any paths are added to
+// *reject so this function doesn't waste time revisiting them.
+void RecursiveFindPath(const Options& options,
+ State* state,
+ const Target* current,
+ const Target* desired,
+ DepStack* stack) {
+ if (state->rejected.find(current) != state->rejected.end())
+ return;
+ int initial_found_count = state->found_count;
+
+ if (current == desired) {
+ // Found a path.
+ state->found_count++;
+ stack->push_back(TargetDep(current, DEP_NONE));
+ if (AreAllPublic(*stack))
+ state->found_public.push_back(new DepStack(*stack));
+ else
+ state->found_other.push_back(new DepStack(*stack));
+ stack->pop_back();
+ return;
+ }
+
+ stack->push_back(TargetDep(current, DEP_PUBLIC));
+ for (const auto& pair : current->public_deps())
+ RecursiveFindPath(options, state, pair.ptr, desired, stack);
+
+ if (!options.public_only) {
+ stack->back().second = DEP_PRIVATE;
+ for (const auto& pair : current->private_deps())
+ RecursiveFindPath(options, state, pair.ptr, desired, stack);
+ }
+
+ if (options.with_data) {
+ stack->back().second = DEP_DATA;
+ for (const auto& pair : current->data_deps())
+ RecursiveFindPath(options, state, pair.ptr, desired, stack);
+ }
+
+ stack->pop_back();
+
+ if (state->found_count == initial_found_count)
+ state->rejected.insert(current); // Eliminated this target.
+}
+
+bool StackLengthLess(const DepStack* a, const DepStack* b) {
+ return a->size() < b->size();
+}
+
+// Prints one result vector. The vector will be modified.
+void PrintResultVector(const Options& options, DepStackVector* result) {
+ if (!options.all && !result->empty()) {
+ // Just print the smallest one.
+ PrintDepStack(**std::min_element(result->begin(), result->end(),
+ &StackLengthLess));
+ return;
+ }
+
+ // Print all in order of increasing length.
+ std::sort(result->begin(), result->end(), &StackLengthLess);
+ for (const auto& stack : *result)
+ PrintDepStack(*stack);
+}
+
+void PrintResults(const Options& options, State* state) {
+ PrintResultVector(options, &state->found_public);
+
+ // Consider non-public paths only if all paths are requested or there were
+ // no public paths.
+ if (state->found_public.empty() || options.all)
+ PrintResultVector(options, &state->found_other);
+}
+
+} // namespace
+
+const char kPath[] = "path";
+const char kPath_HelpShort[] =
+ "path: Find paths between two targets.";
+const char kPath_Help[] =
+ "gn path <out_dir> <target_one> <target_two>\n"
+ "\n"
+ " Finds paths of dependencies between two targets. Each unique path\n"
+ " will be printed in one group, and groups will be separate by newlines.\n"
+ " The two targets can appear in either order: paths will be found going\n"
+ " in either direction.\n"
+ "\n"
+ " By default, a single path will be printed. If there is a path with\n"
+ " only public dependencies, the shortest public path will be printed.\n"
+ " Otherwise, the shortest path using either public or private\n"
+ " dependencies will be printed. If --with-data is specified, data deps\n"
+ " will also be considered. If there are multiple shortest paths, an\n"
+ " arbitrary one will be selected.\n"
+ "\n"
+ "Options\n"
+ "\n"
+ " --all\n"
+ " Prints all paths found rather than just the first one. Public paths\n"
+ " will be printed first in order of increasing length, followed by\n"
+ " non-public paths in order of increasing length.\n"
+ "\n"
+ " --public\n"
+ " Considers only public paths. Can't be used with --with-data.\n"
+ "\n"
+ " --with-data\n"
+ " Additionally follows data deps. Without this flag, only public and\n"
+ " private linked deps will be followed. Can't be used with --public.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " gn path out/Default //base //tools/gn\n";
+
+int RunPath(const std::vector<std::string>& args) {
+ if (args.size() != 3) {
+ Err(Location(), "You're holding it wrong.",
+ "Usage: \"gn path <out_dir> <target_one> <target_two>\"")
+ .PrintToStdout();
+ return 1;
+ }
+
+ Setup* setup = new Setup;
+ if (!setup->DoSetup(args[0], false))
+ return 1;
+ if (!setup->Run())
+ return 1;
+
+ const Target* target1 = ResolveTargetFromCommandLineString(setup, args[1]);
+ if (!target1)
+ return 1;
+ const Target* target2 = ResolveTargetFromCommandLineString(setup, args[2]);
+ if (!target2)
+ return 1;
+
+ Options options;
+ options.all = base::CommandLine::ForCurrentProcess()->HasSwitch("all");
+ options.public_only =
+ base::CommandLine::ForCurrentProcess()->HasSwitch("public");
+ options.with_data =
+ base::CommandLine::ForCurrentProcess()->HasSwitch("with-data");
+ if (options.public_only && options.with_data) {
+ Err(Location(), "Can't use --public with --with-data for 'gn path'.",
+ "Your zealous over-use of arguments has inevitably resulted in an "
+ "invalid\ncombination of flags.").PrintToStdout();
+ return 1;
+ }
+
+ // If we don't find a path going "forwards", try the reverse direction. Deps
+ // can only go in one direction without having a cycle, which will have
+ // caused a run failure above.
+ State state;
+ DepStack stack;
+ RecursiveFindPath(options, &state, target1, target2, &stack);
+ if (state.found_count == 0) {
+ // Need to reset the rejected set for a new invocation since the reverse
+ // search will revisit the same targets looking for something else.
+ state.rejected.clear();
+ RecursiveFindPath(options, &state, target2, target1, &stack);
+ }
+
+ PrintResults(options, &state);
+
+ // This string is inserted in the results to annotate whether the result
+ // is only public or includes data deps or not.
+ const char* path_annotation = "";
+ if (options.public_only)
+ path_annotation = "public ";
+ else if (!options.with_data)
+ path_annotation = "non-data ";
+
+ if (state.found_count == 0) {
+ // No results.
+ OutputString(base::StringPrintf(
+ "No %spaths found between these two targets.\n", path_annotation),
+ DECORATION_YELLOW);
+ } else if (state.found_count == 1) {
+ // Exactly one result.
+ OutputString(base::StringPrintf("1 %spath found.", path_annotation),
+ DECORATION_YELLOW);
+ if (!options.public_only) {
+ if (state.found_public.empty())
+ OutputString(" It is not public.");
+ else
+ OutputString(" It is public.");
+ }
+ OutputString("\n");
+ } else {
+ if (options.all) {
+ // Showing all paths when there are many.
+ OutputString(base::StringPrintf("%d unique %spaths found.",
+ state.found_count, path_annotation),
+ DECORATION_YELLOW);
+ if (!options.public_only) {
+ OutputString(base::StringPrintf(" %d of them are public.",
+ static_cast<int>(state.found_public.size())));
+ }
+ OutputString("\n");
+ } else {
+ // Showing one path when there are many.
+ OutputString(
+ base::StringPrintf("Showing one of %d unique %spaths.",
+ state.found_count, path_annotation),
+ DECORATION_YELLOW);
+ if (!options.public_only) {
+ OutputString(base::StringPrintf(" %d of them are public.\n",
+ static_cast<int>(state.found_public.size())));
+ }
+ OutputString("Use --all to print all paths.\n");
+ }
+ }
+ return 0;
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/command_refs.cc b/chromium/tools/gn/command_refs.cc
new file mode 100644
index 00000000000..821fd88d7e9
--- /dev/null
+++ b/chromium/tools/gn/command_refs.cc
@@ -0,0 +1,491 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include <map>
+#include <set>
+
+#include "base/command_line.h"
+#include "base/files/file_util.h"
+#include "base/strings/string_split.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/deps_iterator.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/item.h"
+#include "tools/gn/setup.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/target.h"
+
+namespace commands {
+
+namespace {
+
+typedef std::set<const Target*> TargetSet;
+typedef std::vector<const Target*> TargetVector;
+
+// Maps targets to the list of targets that depend on them.
+typedef std::multimap<const Target*, const Target*> DepMap;
+
+// Populates the reverse dependency map for the targets in the Setup.
+void FillDepMap(Setup* setup, DepMap* dep_map) {
+ for (const auto& target : setup->builder()->GetAllResolvedTargets()) {
+ for (const auto& dep_pair : target->GetDeps(Target::DEPS_ALL))
+ dep_map->insert(std::make_pair(dep_pair.ptr, target));
+ }
+}
+
+// Forward declaration for function below.
+size_t RecursivePrintTargetDeps(const DepMap& dep_map,
+ const Target* target,
+ TargetSet* seen_targets,
+ int indent_level);
+
+// Prints the target and its dependencies in tree form. If the set is non-null,
+// new targets encountered will be added to the set, and if a ref is in the set
+// already, it will not be recused into. When the set is null, all refs will be
+// printed.
+//
+// Returns the number of items printed.
+size_t RecursivePrintTarget(const DepMap& dep_map,
+ const Target* target,
+ TargetSet* seen_targets,
+ int indent_level) {
+ std::string indent(indent_level * 2, ' ');
+ size_t count = 1;
+
+ // Only print the toolchain for non-default-toolchain targets.
+ OutputString(indent + target->label().GetUserVisibleName(
+ !target->settings()->is_default()));
+
+ bool print_children = true;
+ if (seen_targets) {
+ if (seen_targets->find(target) == seen_targets->end()) {
+ // New target, mark it visited.
+ seen_targets->insert(target);
+ } else {
+ // Already seen.
+ print_children = false;
+ // Only print "..." if something is actually elided, which means that
+ // the current target has children.
+ if (dep_map.lower_bound(target) != dep_map.upper_bound(target))
+ OutputString("...");
+ }
+ }
+
+ OutputString("\n");
+ if (print_children) {
+ count += RecursivePrintTargetDeps(dep_map, target, seen_targets,
+ indent_level + 1);
+ }
+ return count;
+}
+
+// Prints refs of the given target (not the target itself). See
+// RecursivePrintTarget.
+size_t RecursivePrintTargetDeps(const DepMap& dep_map,
+ const Target* target,
+ TargetSet* seen_targets,
+ int indent_level) {
+ DepMap::const_iterator dep_begin = dep_map.lower_bound(target);
+ DepMap::const_iterator dep_end = dep_map.upper_bound(target);
+ size_t count = 0;
+ for (DepMap::const_iterator cur_dep = dep_begin;
+ cur_dep != dep_end; cur_dep++) {
+ count += RecursivePrintTarget(dep_map, cur_dep->second, seen_targets,
+ indent_level);
+ }
+ return count;
+}
+
+void RecursiveCollectChildRefs(const DepMap& dep_map,
+ const Target* target,
+ TargetSet* results);
+
+// Recursively finds all targets that reference the given one, and additionally
+// adds the current one to the list.
+void RecursiveCollectRefs(const DepMap& dep_map,
+ const Target* target,
+ TargetSet* results) {
+ if (results->find(target) != results->end())
+ return; // Already found this target.
+ results->insert(target);
+ RecursiveCollectChildRefs(dep_map, target, results);
+}
+
+// Recursively finds all targets that reference the given one.
+void RecursiveCollectChildRefs(const DepMap& dep_map,
+ const Target* target,
+ TargetSet* results) {
+ DepMap::const_iterator dep_begin = dep_map.lower_bound(target);
+ DepMap::const_iterator dep_end = dep_map.upper_bound(target);
+ for (DepMap::const_iterator cur_dep = dep_begin;
+ cur_dep != dep_end; cur_dep++)
+ RecursiveCollectRefs(dep_map, cur_dep->second, results);
+}
+
+bool TargetContainsFile(const Target* target, const SourceFile& file) {
+ for (const auto& cur_file : target->sources()) {
+ if (cur_file == file)
+ return true;
+ }
+ for (const auto& cur_file : target->public_headers()) {
+ if (cur_file == file)
+ return true;
+ }
+ for (const auto& cur_file : target->inputs()) {
+ if (cur_file == file)
+ return true;
+ }
+ for (const auto& cur_file : target->data()) {
+ if (cur_file == file.value())
+ return true;
+ }
+
+ std::vector<SourceFile> outputs;
+ target->action_values().GetOutputsAsSourceFiles(target, &outputs);
+ for (const auto& cur_file : outputs) {
+ if (cur_file == file)
+ return true;
+ }
+ return false;
+}
+
+void GetTargetsContainingFile(Setup* setup,
+ const std::vector<const Target*>& all_targets,
+ const SourceFile& file,
+ bool all_toolchains,
+ UniqueVector<const Target*>* matches) {
+ Label default_toolchain = setup->loader()->default_toolchain_label();
+ for (const auto& target : all_targets) {
+ if (!all_toolchains) {
+ // Only check targets in the default toolchain.
+ if (target->label().GetToolchainLabel() != default_toolchain)
+ continue;
+ }
+ if (TargetContainsFile(target, file))
+ matches->push_back(target);
+ }
+}
+
+bool TargetReferencesConfig(const Target* target, const Config* config) {
+ for (const LabelConfigPair& cur : target->configs()) {
+ if (cur.ptr == config)
+ return true;
+ }
+ for (const LabelConfigPair& cur : target->public_configs()) {
+ if (cur.ptr == config)
+ return true;
+ }
+ return false;
+}
+
+void GetTargetsReferencingConfig(Setup* setup,
+ const std::vector<const Target*>& all_targets,
+ const Config* config,
+ bool all_toolchains,
+ UniqueVector<const Target*>* matches) {
+ Label default_toolchain = setup->loader()->default_toolchain_label();
+ for (const auto& target : all_targets) {
+ if (!all_toolchains) {
+ // Only check targets in the default toolchain.
+ if (target->label().GetToolchainLabel() != default_toolchain)
+ continue;
+ }
+ if (TargetReferencesConfig(target, config))
+ matches->push_back(target);
+ }
+}
+
+// Returns the number of matches printed.
+size_t DoTreeOutput(const DepMap& dep_map,
+ const UniqueVector<const Target*>& implicit_target_matches,
+ const UniqueVector<const Target*>& explicit_target_matches,
+ bool all) {
+ TargetSet seen_targets;
+ size_t count = 0;
+
+ // Implicit targets don't get printed themselves.
+ for (const Target* target : implicit_target_matches) {
+ if (all)
+ count += RecursivePrintTargetDeps(dep_map, target, nullptr, 0);
+ else
+ count += RecursivePrintTargetDeps(dep_map, target, &seen_targets, 0);
+ }
+
+ // Explicit targets appear in the output.
+ for (const Target* target : implicit_target_matches) {
+ if (all)
+ count += RecursivePrintTarget(dep_map, target, nullptr, 0);
+ else
+ count += RecursivePrintTarget(dep_map, target, &seen_targets, 0);
+ }
+
+ return count;
+}
+
+// Returns the number of matches printed.
+size_t DoAllListOutput(
+ const DepMap& dep_map,
+ const UniqueVector<const Target*>& implicit_target_matches,
+ const UniqueVector<const Target*>& explicit_target_matches) {
+ // Output recursive dependencies, uniquified and flattened.
+ TargetSet results;
+
+ for (const Target* target : implicit_target_matches)
+ RecursiveCollectChildRefs(dep_map, target, &results);
+ for (const Target* target : explicit_target_matches) {
+ // Explicit targets also get added to the output themselves.
+ results.insert(target);
+ RecursiveCollectChildRefs(dep_map, target, &results);
+ }
+
+ FilterAndPrintTargetSet(false, results);
+ return results.size();
+}
+
+// Returns the number of matches printed.
+size_t DoDirectListOutput(
+ const DepMap& dep_map,
+ const UniqueVector<const Target*>& implicit_target_matches,
+ const UniqueVector<const Target*>& explicit_target_matches) {
+ TargetSet results;
+
+ // Output everything that refers to the implicit ones.
+ for (const Target* target : implicit_target_matches) {
+ DepMap::const_iterator dep_begin = dep_map.lower_bound(target);
+ DepMap::const_iterator dep_end = dep_map.upper_bound(target);
+ for (DepMap::const_iterator cur_dep = dep_begin;
+ cur_dep != dep_end; cur_dep++)
+ results.insert(cur_dep->second);
+ }
+
+ // And just output the explicit ones directly (these are the target matches
+ // when referring to what references a file or config).
+ for (const Target* target : explicit_target_matches)
+ results.insert(target);
+
+ FilterAndPrintTargetSet(false, results);
+ return results.size();
+}
+
+} // namespace
+
+const char kRefs[] = "refs";
+const char kRefs_HelpShort[] =
+ "refs: Find stuff referencing a target or file.";
+const char kRefs_Help[] =
+ "gn refs <out_dir> (<label_pattern>|<label>|<file>|@<response_file>)* "
+ "[--all]\n"
+ " [--all-toolchains] [--as=...] [--testonly=...] [--type=...]\n"
+ "\n"
+ " Finds reverse dependencies (which targets reference something). The\n"
+ " input is a list containing:\n"
+ "\n"
+ " - Target label: The result will be which targets depend on it.\n"
+ "\n"
+ " - Config label: The result will be which targets list the given\n"
+ " config in its \"configs\" or \"public_configs\" list.\n"
+ "\n"
+ " - Label pattern: The result will be which targets depend on any\n"
+ " target matching the given pattern. Patterns will not match\n"
+ " configs. These are not general regular expressions, see\n"
+ " \"gn help label_pattern\" for details.\n"
+ "\n"
+ " - File name: The result will be which targets list the given file in\n"
+ " its \"inputs\", \"sources\", \"public\", \"data\", or \"outputs\".\n"
+ " Any input that does not contain wildcards and does not match a\n"
+ " target or a config will be treated as a file.\n"
+ "\n"
+ " - Response file: If the input starts with an \"@\", it will be\n"
+ " interpreted as a path to a file containing a list of labels or\n"
+ " file names, one per line. This allows us to handle long lists\n"
+ " of inputs without worrying about command line limits.\n"
+ "\n"
+ "Options\n"
+ "\n"
+ " --all\n"
+ " When used without --tree, will recurse and display all unique\n"
+ " dependencies of the given targets. For example, if the input is\n"
+ " a target, this will output all targets that depend directly or\n"
+ " indirectly on the input. If the input is a file, this will output\n"
+ " all targets that depend directly or indirectly on that file.\n"
+ "\n"
+ " When used with --tree, turns off eliding to show a complete tree.\n"
+ "\n"
+ " --all-toolchains\n"
+ " Normally only inputs in the default toolchain will be included.\n"
+ " This switch will turn on matching all toolchains.\n"
+ "\n"
+ " For example, a file is in a target might be compiled twice:\n"
+ " once in the default toolchain and once in a secondary one. Without\n"
+ " this flag, only the default toolchain one will be matched and\n"
+ " printed (potentially with its recursive dependencies, depending on\n"
+ " the other options). With this flag, both will be printed\n"
+ " (potentially with both of their recursive dependencies).\n"
+ "\n"
+ TARGET_PRINTING_MODE_COMMAND_LINE_HELP
+ "\n"
+ " -q\n"
+ " Quiet. If nothing matches, don't print any output. Without this\n"
+ " option, if there are no matches there will be an informational\n"
+ " message printed which might interfere with scripts processing the\n"
+ " output.\n"
+ "\n"
+ TARGET_TESTONLY_FILTER_COMMAND_LINE_HELP
+ "\n"
+ " --tree\n"
+ " Outputs a reverse dependency tree from the given target.\n"
+ " Duplicates will be elided. Combine with --all to see a full\n"
+ " dependency tree.\n"
+ "\n"
+ " Tree output can not be used with the filtering or output flags:\n"
+ " --as, --type, --testonly.\n"
+ "\n"
+ TARGET_TYPE_FILTER_COMMAND_LINE_HELP
+ "\n"
+ "Examples (target input)\n"
+ "\n"
+ " gn refs out/Debug //tools/gn:gn\n"
+ " Find all targets depending on the given exact target name.\n"
+ "\n"
+ " gn refs out/Debug //base:i18n --as=buildfiles | xargs gvim\n"
+ " Edit all .gn files containing references to //base:i18n\n"
+ "\n"
+ " gn refs out/Debug //base --all\n"
+ " List all targets depending directly or indirectly on //base:base.\n"
+ "\n"
+ " gn refs out/Debug \"//base/*\"\n"
+ " List all targets depending directly on any target in //base or\n"
+ " its subdirectories.\n"
+ "\n"
+ " gn refs out/Debug \"//base:*\"\n"
+ " List all targets depending directly on any target in\n"
+ " //base/BUILD.gn.\n"
+ "\n"
+ " gn refs out/Debug //base --tree\n"
+ " Print a reverse dependency tree of //base:base\n"
+ "\n"
+ "Examples (file input)\n"
+ "\n"
+ " gn refs out/Debug //base/macros.h\n"
+ " Print target(s) listing //base/macros.h as a source.\n"
+ "\n"
+ " gn refs out/Debug //base/macros.h --tree\n"
+ " Display a reverse dependency tree to get to the given file. This\n"
+ " will show how dependencies will reference that file.\n"
+ "\n"
+ " gn refs out/Debug //base/macros.h //base/at_exit.h --all\n"
+ " Display all unique targets with some dependency path to a target\n"
+ " containing either of the given files as a source.\n"
+ "\n"
+ " gn refs out/Debug //base/macros.h --testonly=true --type=executable\n"
+ " --all --as=output\n"
+ " Display the executable file names of all test executables\n"
+ " potentially affected by a change to the given file.\n";
+
+int RunRefs(const std::vector<std::string>& args) {
+ if (args.size() <= 1) {
+ Err(Location(), "You're holding it wrong.",
+ "Usage: \"gn refs <out_dir> (<label_pattern>|<file>)*\"")
+ .PrintToStdout();
+ return 1;
+ }
+
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+ bool tree = cmdline->HasSwitch("tree");
+ bool all = cmdline->HasSwitch("all");
+ bool all_toolchains = cmdline->HasSwitch("all-toolchains");
+
+ Setup* setup = new Setup;
+ setup->build_settings().set_check_for_bad_items(false);
+ if (!setup->DoSetup(args[0], false) || !setup->Run())
+ return 1;
+
+ // The inputs are everything but the first arg (which is the build dir).
+ std::vector<std::string> inputs;
+ for (size_t i = 1; i < args.size(); i++) {
+ if (args[i][0] == '@') {
+ // The argument is as a path to a response file.
+ std::string contents;
+ bool ret = base::ReadFileToString(UTF8ToFilePath(args[i].substr(1)),
+ &contents);
+ if (!ret) {
+ Err(Location(), "Response file " + args[i].substr(1) + " not found.")
+ .PrintToStdout();
+ return 1;
+ }
+ for (const std::string& line : base::SplitString(
+ contents, "\n", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL)) {
+ if (!line.empty())
+ inputs.push_back(line);
+ }
+ } else {
+ // The argument is a label or a path.
+ inputs.push_back(args[i]);
+ }
+ }
+
+ // Get the matches for the command-line input.
+ UniqueVector<const Target*> target_matches;
+ UniqueVector<const Config*> config_matches;
+ UniqueVector<const Toolchain*> toolchain_matches;
+ UniqueVector<SourceFile> file_matches;
+ if (!ResolveFromCommandLineInput(setup, inputs, all_toolchains,
+ &target_matches, &config_matches,
+ &toolchain_matches, &file_matches))
+ return 1;
+
+ // When you give a file or config as an input, you want the targets that are
+ // associated with it. We don't want to just append this to the
+ // target_matches, however, since these targets should actually be listed in
+ // the output, while for normal targets you don't want to see the inputs,
+ // only what refers to them.
+ std::vector<const Target*> all_targets =
+ setup->builder()->GetAllResolvedTargets();
+ UniqueVector<const Target*> explicit_target_matches;
+ for (const auto& file : file_matches) {
+ GetTargetsContainingFile(setup, all_targets, file, all_toolchains,
+ &explicit_target_matches);
+ }
+ for (const auto& config : config_matches) {
+ GetTargetsReferencingConfig(setup, all_targets, config, all_toolchains,
+ &explicit_target_matches);
+ }
+
+ // Tell the user if their input matches no files or labels. We need to check
+ // both that it matched no targets and no configs. File input will already
+ // have been converted to targets at this point. Configs will have been
+ // converted to targets also, but there could be no targets referencing the
+ // config, which is different than no config with that name.
+ bool quiet = cmdline->HasSwitch("q");
+ if (!quiet && config_matches.empty() &&
+ explicit_target_matches.empty() && target_matches.empty()) {
+ OutputString("The input matches no targets, configs, or files.\n",
+ DECORATION_YELLOW);
+ return 1;
+ }
+
+ // Construct the reverse dependency tree.
+ DepMap dep_map;
+ FillDepMap(setup, &dep_map);
+
+ size_t cnt = 0;
+ if (tree)
+ cnt = DoTreeOutput(dep_map, target_matches, explicit_target_matches, all);
+ else if (all)
+ cnt = DoAllListOutput(dep_map, target_matches, explicit_target_matches);
+ else
+ cnt = DoDirectListOutput(dep_map, target_matches, explicit_target_matches);
+
+ // If you ask for the references of a valid target, but that target has
+ // nothing referencing it, we'll get here without having printed anything.
+ if (!quiet && cnt == 0)
+ OutputString("Nothing references this.\n", DECORATION_YELLOW);
+
+ return 0;
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/commands.cc b/chromium/tools/gn/commands.cc
new file mode 100644
index 00000000000..ed7a009d6af
--- /dev/null
+++ b/chromium/tools/gn/commands.cc
@@ -0,0 +1,502 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/commands.h"
+
+#include "base/command_line.h"
+#include "tools/gn/builder.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/item.h"
+#include "tools/gn/label.h"
+#include "tools/gn/label_pattern.h"
+#include "tools/gn/setup.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/target.h"
+
+namespace commands {
+
+namespace {
+
+// Like above but the input string can be a pattern that matches multiple
+// targets. If the input does not parse as a pattern, prints and error and
+// returns false. If the pattern is valid, fills the vector (which might be
+// empty if there are no matches) and returns true.
+//
+// If all_toolchains is false, a pattern with an unspecified toolchain will
+// match the default toolchain only. If true, all toolchains will be matched.
+bool ResolveTargetsFromCommandLinePattern(
+ Setup* setup,
+ const std::string& label_pattern,
+ bool all_toolchains,
+ std::vector<const Target*>* matches) {
+ Value pattern_value(nullptr, label_pattern);
+
+ Err err;
+ LabelPattern pattern = LabelPattern::GetPattern(
+ SourceDirForCurrentDirectory(setup->build_settings().root_path()),
+ pattern_value,
+ &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ if (!all_toolchains) {
+ // By default a pattern with an empty toolchain will match all toolchains.
+ // If the caller wants to default to the main toolchain only, set it
+ // explicitly.
+ if (pattern.toolchain().is_null()) {
+ // No explicit toolchain set.
+ pattern.set_toolchain(setup->loader()->default_toolchain_label());
+ }
+ }
+
+ std::vector<LabelPattern> pattern_vector;
+ pattern_vector.push_back(pattern);
+ FilterTargetsByPatterns(setup->builder()->GetAllResolvedTargets(),
+ pattern_vector, matches);
+ return true;
+}
+
+
+// If there's an error, it will be printed and false will be returned.
+bool ResolveStringFromCommandLineInput(
+ Setup* setup,
+ const SourceDir& current_dir,
+ const std::string& input,
+ bool all_toolchains,
+ UniqueVector<const Target*>* target_matches,
+ UniqueVector<const Config*>* config_matches,
+ UniqueVector<const Toolchain*>* toolchain_matches,
+ UniqueVector<SourceFile>* file_matches) {
+ if (LabelPattern::HasWildcard(input)) {
+ // For now, only match patterns against targets. It might be nice in the
+ // future to allow the user to specify which types of things they want to
+ // match, but it should probably only match targets by default.
+ std::vector<const Target*> target_match_vector;
+ if (!ResolveTargetsFromCommandLinePattern(setup, input, all_toolchains,
+ &target_match_vector))
+ return false;
+ for (const Target* target : target_match_vector)
+ target_matches->push_back(target);
+ return true;
+ }
+
+ // Try to figure out what this thing is.
+ Err err;
+ Label label = Label::Resolve(current_dir,
+ setup->loader()->default_toolchain_label(),
+ Value(nullptr, input), &err);
+ if (err.has_error()) {
+ // Not a valid label, assume this must be a file.
+ err = Err();
+ file_matches->push_back(current_dir.ResolveRelativeFile(
+ Value(nullptr, input), &err, setup->build_settings().root_path_utf8()));
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+ return true;
+ }
+
+ const Item* item = setup->builder()->GetItem(label);
+ if (item) {
+ if (const Config* as_config = item->AsConfig())
+ config_matches->push_back(as_config);
+ else if (const Target* as_target = item->AsTarget())
+ target_matches->push_back(as_target);
+ else if (const Toolchain* as_toolchain = item->AsToolchain())
+ toolchain_matches->push_back(as_toolchain);
+ } else {
+ // Not an item, assume this must be a file.
+ file_matches->push_back(current_dir.ResolveRelativeFile(
+ Value(nullptr, input), &err, setup->build_settings().root_path_utf8()));
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+ }
+
+ return true;
+}
+
+enum TargetPrintingMode {
+ TARGET_PRINT_BUILDFILE,
+ TARGET_PRINT_LABEL,
+ TARGET_PRINT_OUTPUT,
+};
+
+// Retrieves the target printing mode based on the command line flags for the
+// current process. Returns true on success. On error, prints a message to the
+// console and returns false.
+bool GetTargetPrintingMode(TargetPrintingMode* mode) {
+ std::string switch_key = "as";
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+
+ if (!cmdline->HasSwitch(switch_key)) {
+ // Default to labels.
+ *mode = TARGET_PRINT_LABEL;
+ return true;
+ }
+
+ std::string value = cmdline->GetSwitchValueASCII(switch_key);
+ if (value == "buildfile") {
+ *mode = TARGET_PRINT_BUILDFILE;
+ return true;
+ }
+ if (value == "label") {
+ *mode = TARGET_PRINT_LABEL;
+ return true;
+ }
+ if (value == "output") {
+ *mode = TARGET_PRINT_OUTPUT;
+ return true;
+ }
+
+ Err(Location(), "Invalid value for \"--as\".",
+ "I was expecting \"buildfile\", \"label\", or \"output\" but you\n"
+ "said \"" + value + "\".").PrintToStdout();
+ return false;
+}
+
+// Returns the target type filter based on the command line flags for the
+// current process. Returns true on success. On error, prints a message to the
+// console and returns false.
+//
+// Target::UNKNOWN will be set if there is no filter. Target::ACTION_FOREACH
+// will never be returned. Code applying the filters should apply Target::ACTION
+// to both ACTION and ACTION_FOREACH.
+bool GetTargetTypeFilter(Target::OutputType* type) {
+ std::string switch_key = "type";
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+
+ if (!cmdline->HasSwitch(switch_key)) {
+ // Default to unknown -> no filtering.
+ *type = Target::UNKNOWN;
+ return true;
+ }
+
+ std::string value = cmdline->GetSwitchValueASCII(switch_key);
+ if (value == "group") {
+ *type = Target::GROUP;
+ return true;
+ }
+ if (value == "executable") {
+ *type = Target::EXECUTABLE;
+ return true;
+ }
+ if (value == "shared_library") {
+ *type = Target::SHARED_LIBRARY;
+ return true;
+ }
+ if (value == "loadable_module") {
+ *type = Target::LOADABLE_MODULE;
+ return true;
+ }
+ if (value == "static_library") {
+ *type = Target::STATIC_LIBRARY;
+ return true;
+ }
+ if (value == "source_set") {
+ *type = Target::SOURCE_SET;
+ return true;
+ }
+ if (value == "copy") {
+ *type = Target::COPY_FILES;
+ return true;
+ }
+ if (value == "action") {
+ *type = Target::ACTION;
+ return true;
+ }
+
+ Err(Location(), "Invalid value for \"--type\".").PrintToStdout();
+ return false;
+}
+
+
+// Applies any testonly filtering specified on the command line to the given
+// target set. On failure, prints an error and returns false.
+bool ApplyTestonlyFilter(std::vector<const Target*>* targets) {
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+ std::string testonly_key = "testonly";
+
+ if (targets->empty() || !cmdline->HasSwitch(testonly_key))
+ return true;
+
+ std::string testonly_value = cmdline->GetSwitchValueASCII(testonly_key);
+ bool testonly = false;
+ if (testonly_value == "true") {
+ testonly = true;
+ } else if (testonly_value != "false") {
+ Err(Location(), "Bad value for --testonly.",
+ "I was expecting --testonly=true or --testonly=false.")
+ .PrintToStdout();
+ return false;
+ }
+
+ // Filter into a copy of the vector, then swap to output.
+ std::vector<const Target*> result;
+ result.reserve(targets->size());
+
+ for (const Target* target : *targets) {
+ if (target->testonly() == testonly)
+ result.push_back(target);
+ }
+
+ targets->swap(result);
+ return true;
+}
+
+// Applies any target type filtering specified on the command line to the given
+// target set. On failure, prints an error and returns false.
+bool ApplyTypeFilter(std::vector<const Target*>* targets) {
+ Target::OutputType type = Target::UNKNOWN;
+ if (!GetTargetTypeFilter(&type))
+ return false;
+ if (targets->empty() || type == Target::UNKNOWN)
+ return true; // Nothing to filter out.
+
+ // Filter into a copy of the vector, then swap to output.
+ std::vector<const Target*> result;
+ result.reserve(targets->size());
+
+ for (const Target* target : *targets) {
+ // Make "action" also apply to ACTION_FOREACH.
+ if (target->output_type() == type ||
+ (type == Target::ACTION &&
+ target->output_type() == Target::ACTION_FOREACH))
+ result.push_back(target);
+ }
+
+ targets->swap(result);
+ return true;
+}
+
+// Returns the file path generating this item.
+base::FilePath BuildFileForItem(const Item* item) {
+ return item->defined_from()->GetRange().begin().file()->physical_name();
+}
+
+void PrintTargetsAsBuildfiles(bool indent,
+ const std::vector<const Target*>& targets) {
+ // Output the set of unique source files.
+ std::set<std::string> unique_files;
+ for (const Target* target : targets)
+ unique_files.insert(FilePathToUTF8(BuildFileForItem(target)));
+
+ for (const std::string& file : unique_files) {
+ if (indent)
+ OutputString(" ");
+ OutputString(file + "\n");
+ }
+}
+
+void PrintTargetsAsLabels(bool indent,
+ const std::vector<const Target*>& targets) {
+ // Putting the labels into a set automatically sorts them for us.
+ std::set<Label> unique_labels;
+ for (const auto& target : targets)
+ unique_labels.insert(target->label());
+
+ // Grab the label of the default toolchain from the first target.
+ Label default_tc_label =
+ targets[0]->settings()->default_toolchain_label();
+
+ for (const Label& label : unique_labels) {
+ // Print toolchain only for ones not in the default toolchain.
+ if (indent)
+ OutputString(" ");
+ OutputString(label.GetUserVisibleName(
+ label.GetToolchainLabel() != default_tc_label));
+ OutputString("\n");
+ }
+}
+
+void PrintTargetsAsOutputs(bool indent,
+ const std::vector<const Target*>& targets) {
+ if (targets.empty())
+ return;
+
+ // Grab the build settings from a random target.
+ const BuildSettings* build_settings =
+ targets[0]->settings()->build_settings();
+
+ for (const Target* target : targets) {
+ // Use the link output file if there is one, otherwise fall back to the
+ // dependency output file (for actions, for example).
+ OutputFile output_file = target->link_output_file();
+ if (output_file.value().empty())
+ output_file = target->dependency_output_file();
+
+ SourceFile output_as_source =
+ output_file.AsSourceFile(build_settings);
+ std::string result = RebasePath(output_as_source.value(),
+ build_settings->build_dir(),
+ build_settings->root_path_utf8());
+ if (indent)
+ OutputString(" ");
+ OutputString(result);
+ OutputString("\n");
+ }
+}
+
+} // namespace
+
+CommandInfo::CommandInfo()
+ : help_short(nullptr),
+ help(nullptr),
+ runner(nullptr) {
+}
+
+CommandInfo::CommandInfo(const char* in_help_short,
+ const char* in_help,
+ CommandRunner in_runner)
+ : help_short(in_help_short),
+ help(in_help),
+ runner(in_runner) {
+}
+
+const CommandInfoMap& GetCommands() {
+ static CommandInfoMap info_map;
+ if (info_map.empty()) {
+ #define INSERT_COMMAND(cmd) \
+ info_map[k##cmd] = CommandInfo(k##cmd##_HelpShort, \
+ k##cmd##_Help, \
+ &Run##cmd);
+
+ INSERT_COMMAND(Args)
+ INSERT_COMMAND(Check)
+ INSERT_COMMAND(Clean)
+ INSERT_COMMAND(Desc)
+ INSERT_COMMAND(Gen)
+ INSERT_COMMAND(Format)
+ INSERT_COMMAND(Help)
+ INSERT_COMMAND(Ls)
+ INSERT_COMMAND(Path)
+ INSERT_COMMAND(Refs)
+
+ #undef INSERT_COMMAND
+ }
+ return info_map;
+}
+
+const Target* ResolveTargetFromCommandLineString(
+ Setup* setup,
+ const std::string& label_string) {
+ // Need to resolve the label after we know the default toolchain.
+ Label default_toolchain = setup->loader()->default_toolchain_label();
+ Value arg_value(nullptr, label_string);
+ Err err;
+ Label label = Label::Resolve(SourceDirForCurrentDirectory(
+ setup->build_settings().root_path()),
+ default_toolchain, arg_value, &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return nullptr;
+ }
+
+ const Item* item = setup->builder()->GetItem(label);
+ if (!item) {
+ Err(Location(), "Label not found.",
+ label.GetUserVisibleName(false) + " not found.").PrintToStdout();
+ return nullptr;
+ }
+
+ const Target* target = item->AsTarget();
+ if (!target) {
+ Err(Location(), "Not a target.",
+ "The \"" + label.GetUserVisibleName(false) + "\" thing\n"
+ "is not a target. Somebody should probably implement this command for "
+ "other\nitem types.").PrintToStdout();
+ return nullptr;
+ }
+
+ return target;
+}
+
+bool ResolveFromCommandLineInput(
+ Setup* setup,
+ const std::vector<std::string>& input,
+ bool all_toolchains,
+ UniqueVector<const Target*>* target_matches,
+ UniqueVector<const Config*>* config_matches,
+ UniqueVector<const Toolchain*>* toolchain_matches,
+ UniqueVector<SourceFile>* file_matches) {
+ if (input.empty()) {
+ Err(Location(), "You need to specify a label, file, or pattern.")
+ .PrintToStdout();
+ return false;
+ }
+
+ SourceDir cur_dir =
+ SourceDirForCurrentDirectory(setup->build_settings().root_path());
+ for (const auto& cur : input) {
+ if (!ResolveStringFromCommandLineInput(setup, cur_dir, cur,
+ all_toolchains, target_matches,
+ config_matches, toolchain_matches,
+ file_matches))
+ return false;
+ }
+ return true;
+}
+
+void FilterTargetsByPatterns(const std::vector<const Target*>& input,
+ const std::vector<LabelPattern>& filter,
+ std::vector<const Target*>* output) {
+ for (const auto& target : input) {
+ for (const auto& pattern : filter) {
+ if (pattern.Matches(target->label())) {
+ output->push_back(target);
+ break;
+ }
+ }
+ }
+}
+
+void FilterTargetsByPatterns(const std::vector<const Target*>& input,
+ const std::vector<LabelPattern>& filter,
+ UniqueVector<const Target*>* output) {
+ for (const auto& target : input) {
+ for (const auto& pattern : filter) {
+ if (pattern.Matches(target->label())) {
+ output->push_back(target);
+ break;
+ }
+ }
+ }
+}
+
+void FilterAndPrintTargets(bool indent, std::vector<const Target*>* targets) {
+ if (targets->empty())
+ return;
+
+ if (!ApplyTestonlyFilter(targets))
+ return;
+ if (!ApplyTypeFilter(targets))
+ return;
+
+ TargetPrintingMode printing_mode = TARGET_PRINT_LABEL;
+ if (targets->empty() || !GetTargetPrintingMode(&printing_mode))
+ return;
+ switch (printing_mode) {
+ case TARGET_PRINT_BUILDFILE:
+ PrintTargetsAsBuildfiles(indent, *targets);
+ break;
+ case TARGET_PRINT_LABEL:
+ PrintTargetsAsLabels(indent, *targets);
+ break;
+ case TARGET_PRINT_OUTPUT:
+ PrintTargetsAsOutputs(indent, *targets);
+ break;
+ }
+}
+
+void FilterAndPrintTargetSet(bool indent,
+ const std::set<const Target*>& targets) {
+ std::vector<const Target*> target_vector(targets.begin(), targets.end());
+ FilterAndPrintTargets(indent, &target_vector);
+}
+
+} // namespace commands
diff --git a/chromium/tools/gn/commands.h b/chromium/tools/gn/commands.h
new file mode 100644
index 00000000000..9d8af5cb7d0
--- /dev/null
+++ b/chromium/tools/gn/commands.h
@@ -0,0 +1,184 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_COMMANDS_H_
+#define TOOLS_GN_COMMANDS_H_
+
+#include <map>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "base/strings/string_piece.h"
+#include "tools/gn/target.h"
+#include "tools/gn/unique_vector.h"
+
+class BuildSettings;
+class Config;
+class LabelPattern;
+class Setup;
+class SourceFile;
+class Target;
+class Toolchain;
+
+// Each "Run" command returns the value we should return from main().
+
+namespace commands {
+
+typedef int (*CommandRunner)(const std::vector<std::string>&);
+
+extern const char kArgs[];
+extern const char kArgs_HelpShort[];
+extern const char kArgs_Help[];
+int RunArgs(const std::vector<std::string>& args);
+
+extern const char kCheck[];
+extern const char kCheck_HelpShort[];
+extern const char kCheck_Help[];
+int RunCheck(const std::vector<std::string>& args);
+
+extern const char kClean[];
+extern const char kClean_HelpShort[];
+extern const char kClean_Help[];
+int RunClean(const std::vector<std::string>& args);
+
+extern const char kDesc[];
+extern const char kDesc_HelpShort[];
+extern const char kDesc_Help[];
+int RunDesc(const std::vector<std::string>& args);
+
+extern const char kGen[];
+extern const char kGen_HelpShort[];
+extern const char kGen_Help[];
+int RunGen(const std::vector<std::string>& args);
+
+extern const char kFormat[];
+extern const char kFormat_HelpShort[];
+extern const char kFormat_Help[];
+int RunFormat(const std::vector<std::string>& args);
+
+extern const char kHelp[];
+extern const char kHelp_HelpShort[];
+extern const char kHelp_Help[];
+int RunHelp(const std::vector<std::string>& args);
+
+extern const char kLs[];
+extern const char kLs_HelpShort[];
+extern const char kLs_Help[];
+int RunLs(const std::vector<std::string>& args);
+
+extern const char kPath[];
+extern const char kPath_HelpShort[];
+extern const char kPath_Help[];
+int RunPath(const std::vector<std::string>& args);
+
+extern const char kRefs[];
+extern const char kRefs_HelpShort[];
+extern const char kRefs_Help[];
+int RunRefs(const std::vector<std::string>& args);
+
+// -----------------------------------------------------------------------------
+
+struct CommandInfo {
+ CommandInfo();
+ CommandInfo(const char* in_help_short,
+ const char* in_help,
+ CommandRunner in_runner);
+
+ const char* help_short;
+ const char* help;
+ CommandRunner runner;
+};
+
+typedef std::map<base::StringPiece, CommandInfo> CommandInfoMap;
+
+const CommandInfoMap& GetCommands();
+
+// Helper functions for some commands ------------------------------------------
+
+// Given a setup that has already been run and some command-line input,
+// resolves that input as a target label and returns the corresponding target.
+// On failure, returns null and prints the error to the standard output.
+const Target* ResolveTargetFromCommandLineString(
+ Setup* setup,
+ const std::string& label_string);
+
+// Resolves a vector of command line inputs and figures out the full set of
+// things they resolve to.
+//
+// Patterns with wildcards will only match targets. The file_matches aren't
+// validated that they are real files or referenced by any targets. They're just
+// the set of things that didn't match anything else.
+bool ResolveFromCommandLineInput(
+ Setup* setup,
+ const std::vector<std::string>& input,
+ bool all_toolchains,
+ UniqueVector<const Target*>* target_matches,
+ UniqueVector<const Config*>* config_matches,
+ UniqueVector<const Toolchain*>* toolchain_matches,
+ UniqueVector<SourceFile>* file_matches);
+
+// Runs the header checker. All targets in the build should be given in
+// all_targets, and the specific targets to check should be in to_check.
+//
+// force_check, if true, will override targets opting out of header checking
+// with "check_includes = false" and will check them anyway.
+//
+// On success, returns true. If the check fails, the error(s) will be printed
+// to stdout and false will be returned.
+bool CheckPublicHeaders(const BuildSettings* build_settings,
+ const std::vector<const Target*>& all_targets,
+ const std::vector<const Target*>& to_check,
+ bool force_check);
+
+// Filters the given list of targets by the given pattern list.
+void FilterTargetsByPatterns(const std::vector<const Target*>& input,
+ const std::vector<LabelPattern>& filter,
+ std::vector<const Target*>* output);
+void FilterTargetsByPatterns(const std::vector<const Target*>& input,
+ const std::vector<LabelPattern>& filter,
+ UniqueVector<const Target*>* output);
+
+// These are the documentation strings for the command-line flags used by
+// FilterAndPrintTargets. Commands that call that function should incorporate
+// these into their help.
+#define TARGET_PRINTING_MODE_COMMAND_LINE_HELP \
+ " --as=(buildfile|label|output)\n"\
+ " How to print targets.\n"\
+ "\n"\
+ " buildfile\n"\
+ " Prints the build files where the given target was declared as\n"\
+ " file names.\n"\
+ " label (default)\n"\
+ " Prints the label of the target.\n"\
+ " output\n"\
+ " Prints the first output file for the target relative to the\n"\
+ " root build directory.\n"
+#define TARGET_TYPE_FILTER_COMMAND_LINE_HELP \
+ " --type=(action|copy|executable|group|loadable_module|shared_library|\n"\
+ " source_set|static_library)\n"\
+ " Restrict outputs to targets matching the given type. If\n"\
+ " unspecified, no filtering will be performed.\n"
+#define TARGET_TESTONLY_FILTER_COMMAND_LINE_HELP \
+ " --testonly=(true|false)\n"\
+ " Restrict outputs to targets with the testonly flag set\n"\
+ " accordingly. When unspecified, the target's testonly flags are\n"\
+ " ignored.\n"
+
+// Applies any testonly and type filters specified on the command line,
+// and prints the targets as specified by the --as command line flag.
+//
+// If indent is true, the results will be indented two spaces.
+//
+// The vector will be modified so that only the printed targets will remain.
+void FilterAndPrintTargets(bool indent, std::vector<const Target*>* targets);
+void FilterAndPrintTargetSet(bool indent,
+ const std::set<const Target*>& targets);
+
+// Extra help from command_check.cc
+extern const char kNoGnCheck_Help[];
+
+} // namespace commands
+
+#endif // TOOLS_GN_COMMANDS_H_
diff --git a/chromium/tools/gn/config.cc b/chromium/tools/gn/config.cc
new file mode 100644
index 00000000000..c06632449a4
--- /dev/null
+++ b/chromium/tools/gn/config.cc
@@ -0,0 +1,51 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/config.h"
+
+#include "tools/gn/err.h"
+#include "tools/gn/input_file_manager.h"
+#include "tools/gn/scheduler.h"
+
+Config::Config(const Settings* settings, const Label& label)
+ : Item(settings, label),
+ resolved_(false) {
+}
+
+Config::~Config() {
+}
+
+Config* Config::AsConfig() {
+ return this;
+}
+
+const Config* Config::AsConfig() const {
+ return this;
+}
+
+bool Config::OnResolved(Err* err) {
+ DCHECK(!resolved_);
+ resolved_ = true;
+
+ if (!configs_.empty()) {
+ // Subconfigs, flatten.
+ //
+ // Implementation note for the future: Flattening these here means we
+ // lose the ability to de-dupe subconfigs. If a subconfig is listed as
+ // a separate config or a subconfig that also applies to the target, the
+ // subconfig's flags will be duplicated.
+ //
+ // If we want to be able to de-dupe these, here's one idea. As a config is
+ // resolved, inline any sub-sub configs so the configs_ vector is a flat
+ // list, much the same way that libs and lib_dirs are pushed through
+ // targets. Do the same for Target.configs_ when a target is resolved. This
+ // will naturally de-dupe and also prevents recursive config walking to
+ // compute every possible flag, although it will expand the configs list on
+ // a target nontrivially (depending on build configuration).
+ composite_values_ = own_values_;
+ for (const auto& pair : configs_)
+ composite_values_.AppendValues(pair.ptr->resolved_values());
+ }
+ return true;
+}
diff --git a/chromium/tools/gn/config.h b/chromium/tools/gn/config.h
new file mode 100644
index 00000000000..20cfe7e40dc
--- /dev/null
+++ b/chromium/tools/gn/config.h
@@ -0,0 +1,65 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_CONFIG_H_
+#define TOOLS_GN_CONFIG_H_
+
+#include "base/logging.h"
+#include "base/macros.h"
+#include "tools/gn/config_values.h"
+#include "tools/gn/item.h"
+#include "tools/gn/label_ptr.h"
+#include "tools/gn/unique_vector.h"
+
+// Represents a named config in the dependency graph.
+//
+// A config can list other configs. We track both the data assigned directly
+// on the config, this list of sub-configs, and (when the config is resolved)
+// the resulting values of everything merged together. The flatten step
+// means we can avoid doing a recursive config walk for every target to compute
+// flags.
+class Config : public Item {
+ public:
+ Config(const Settings* settings, const Label& label);
+ ~Config() override;
+
+ // Item implementation.
+ Config* AsConfig() override;
+ const Config* AsConfig() const override;
+ bool OnResolved(Err* err) override;
+
+ // The values set directly on this config. This will not contain data from
+ // sub-configs.
+ ConfigValues& own_values() { return own_values_; }
+ const ConfigValues& own_values() const { return own_values_; }
+
+ // The values that represent this config and all sub-configs combined into
+ // one. This is only valid after the config is resolved (when we know the
+ // contents of the sub-configs).
+ const ConfigValues& resolved_values() const {
+ DCHECK(resolved_);
+ if (configs_.empty()) // No sub configs, just use the regular values.
+ return own_values_;
+ return composite_values_;
+ }
+
+ // List of sub-configs.
+ const UniqueVector<LabelConfigPair>& configs() const { return configs_; }
+ UniqueVector<LabelConfigPair>& configs() { return configs_; }
+
+ private:
+ ConfigValues own_values_;
+
+ // Contains the own_values combined with sub-configs. Most configs don't have
+ // sub-configs. So as an optimization, this is not populated if there are no
+ // items in configs_. The resolved_values() getter handles this.
+ bool resolved_;
+ ConfigValues composite_values_;
+
+ UniqueVector<LabelConfigPair> configs_;
+
+ DISALLOW_COPY_AND_ASSIGN(Config);
+};
+
+#endif // TOOLS_GN_CONFIG_H_
diff --git a/chromium/tools/gn/config_unittest.cc b/chromium/tools/gn/config_unittest.cc
new file mode 100644
index 00000000000..9a1e01df62f
--- /dev/null
+++ b/chromium/tools/gn/config_unittest.cc
@@ -0,0 +1,85 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/config.h"
+#include "tools/gn/test_with_scope.h"
+
+// Tests that the "resolved" values are the same as "own" values when there
+// are no subconfigs.
+TEST(Config, ResolvedNoSub) {
+ TestWithScope setup;
+ Err err;
+
+ Config config(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ config.own_values().defines().push_back("FOO");
+ ASSERT_TRUE(config.OnResolved(&err));
+
+ // The resolved values should be the same as the value we put in to
+ // own_values().
+ ASSERT_EQ(1u, config.resolved_values().defines().size());
+ EXPECT_EQ("FOO", config.resolved_values().defines()[0]);
+
+ // As an optimization, the string should actually refer to the original. This
+ // isn't required to pass for semantic correctness, though.
+ EXPECT_TRUE(&config.own_values() == &config.resolved_values());
+}
+
+// Tests that subconfigs are resolved in the correct order.
+TEST(Config, ResolvedSub) {
+ TestWithScope setup;
+ Err err;
+
+ Config sub1(setup.settings(), Label(SourceDir("//foo/"), "1"));
+ sub1.own_values().defines().push_back("ONE");
+ ASSERT_TRUE(sub1.OnResolved(&err));
+
+ Config sub2(setup.settings(), Label(SourceDir("//foo/"), "2"));
+ sub2.own_values().defines().push_back("TWO");
+ ASSERT_TRUE(sub2.OnResolved(&err));
+
+ Config config(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ config.own_values().defines().push_back("FOO");
+ config.configs().push_back(LabelConfigPair(&sub1));
+ config.configs().push_back(LabelConfigPair(&sub2));
+ ASSERT_TRUE(config.OnResolved(&err));
+
+ // The resolved values should be the same as the value we put in to
+ // own_values().
+ ASSERT_EQ(3u, config.resolved_values().defines().size());
+ EXPECT_EQ("FOO", config.resolved_values().defines()[0]);
+ EXPECT_EQ("ONE", config.resolved_values().defines()[1]);
+ EXPECT_EQ("TWO", config.resolved_values().defines()[2]);
+
+ // The "own" values should be unchanged.
+ ASSERT_EQ(1u, config.own_values().defines().size());
+ EXPECT_EQ("FOO", config.own_values().defines()[0]);
+}
+
+// Tests that subconfigs of subconfigs are resolved properly.
+TEST(Config, SubSub) {
+ TestWithScope setup;
+ Err err;
+
+ // Set up first -> middle -> last configs.
+ Config last(setup.settings(), Label(SourceDir("//foo/"), "last"));
+ last.own_values().defines().push_back("LAST");
+ ASSERT_TRUE(last.OnResolved(&err));
+
+ Config middle(setup.settings(), Label(SourceDir("//foo/"), "middle"));
+ middle.own_values().defines().push_back("MIDDLE");
+ middle.configs().push_back(LabelConfigPair(&last));
+ ASSERT_TRUE(middle.OnResolved(&err));
+
+ Config first(setup.settings(), Label(SourceDir("//foo/"), "first"));
+ first.own_values().defines().push_back("FIRST");
+ first.configs().push_back(LabelConfigPair(&middle));
+ ASSERT_TRUE(first.OnResolved(&err));
+
+ // Check final resolved defines on "first".
+ ASSERT_EQ(3u, first.resolved_values().defines().size());
+ EXPECT_EQ("FIRST", first.resolved_values().defines()[0]);
+ EXPECT_EQ("MIDDLE", first.resolved_values().defines()[1]);
+ EXPECT_EQ("LAST", first.resolved_values().defines()[2]);
+}
diff --git a/chromium/tools/gn/config_values.cc b/chromium/tools/gn/config_values.cc
new file mode 100644
index 00000000000..73486cd84bf
--- /dev/null
+++ b/chromium/tools/gn/config_values.cc
@@ -0,0 +1,46 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/config_values.h"
+
+namespace {
+
+template<typename T>
+void VectorAppend(std::vector<T>* append_to,
+ const std::vector<T>& append_this) {
+ if (append_this.empty())
+ return;
+ append_to->insert(append_to->end(),append_this.begin(), append_this.end());
+}
+
+} // namespace
+
+ConfigValues::ConfigValues() {
+}
+
+ConfigValues::~ConfigValues() {
+}
+
+void ConfigValues::AppendValues(const ConfigValues& append) {
+ VectorAppend(&asmflags_, append.asmflags_);
+ VectorAppend(&cflags_, append.cflags_);
+ VectorAppend(&cflags_c_, append.cflags_c_);
+ VectorAppend(&cflags_cc_, append.cflags_cc_);
+ VectorAppend(&cflags_objc_, append.cflags_objc_);
+ VectorAppend(&cflags_objcc_, append.cflags_objcc_);
+ VectorAppend(&defines_, append.defines_);
+ VectorAppend(&include_dirs_, append.include_dirs_);
+ VectorAppend(&ldflags_, append.ldflags_);
+ VectorAppend(&lib_dirs_, append.lib_dirs_);
+ VectorAppend(&libs_, append.libs_);
+
+ // Only append precompiled header if there isn't one. It might be nice to
+ // throw an error if there are conflicting precompiled headers, but that
+ // requires piping through some context of the actual configs involved, and
+ // conflicts here should be very unusual. Instead, use the first value.
+ if (!append.precompiled_header_.empty() && !precompiled_header_.empty())
+ precompiled_header_ = append.precompiled_header_;
+ if (!append.precompiled_source_.is_null() && !precompiled_source_.is_null())
+ precompiled_source_ = append.precompiled_source_;
+}
diff --git a/chromium/tools/gn/config_values.h b/chromium/tools/gn/config_values.h
new file mode 100644
index 00000000000..823f1df9fc4
--- /dev/null
+++ b/chromium/tools/gn/config_values.h
@@ -0,0 +1,85 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_CONFIG_VALUES_H_
+#define TOOLS_GN_CONFIG_VALUES_H_
+
+#include <string>
+#include <vector>
+
+#include "base/macros.h"
+#include "tools/gn/lib_file.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/source_file.h"
+
+// Holds the values (include_dirs, defines, compiler flags, etc.) for a given
+// config or target.
+class ConfigValues {
+ public:
+ ConfigValues();
+ ~ConfigValues();
+
+ // Appends the values from the given config to this one.
+ void AppendValues(const ConfigValues& append);
+
+#define STRING_VALUES_ACCESSOR(name) \
+ const std::vector<std::string>& name() const { return name##_; } \
+ std::vector<std::string>& name() { return name##_; }
+#define DIR_VALUES_ACCESSOR(name) \
+ const std::vector<SourceDir>& name() const { return name##_; } \
+ std::vector<SourceDir>& name() { return name##_; }
+
+ STRING_VALUES_ACCESSOR(asmflags)
+ STRING_VALUES_ACCESSOR(cflags)
+ STRING_VALUES_ACCESSOR(cflags_c)
+ STRING_VALUES_ACCESSOR(cflags_cc)
+ STRING_VALUES_ACCESSOR(cflags_objc)
+ STRING_VALUES_ACCESSOR(cflags_objcc)
+ STRING_VALUES_ACCESSOR(defines)
+ DIR_VALUES_ACCESSOR (include_dirs)
+ STRING_VALUES_ACCESSOR(ldflags)
+ DIR_VALUES_ACCESSOR (lib_dirs)
+ // If you add a new one, be sure to update AppendValues().
+
+#undef STRING_VALUES_ACCESSOR
+#undef DIR_VALUES_ACCESSOR
+
+ const std::vector<LibFile>& libs() const { return libs_; }
+ std::vector<LibFile>& libs() { return libs_; }
+
+ bool has_precompiled_headers() const {
+ return !precompiled_header_.empty() || !precompiled_source_.is_null();
+ }
+ const std::string& precompiled_header() const {
+ return precompiled_header_;
+ }
+ void set_precompiled_header(const std::string& f) {
+ precompiled_header_ = f;
+ }
+ const SourceFile& precompiled_source() const {
+ return precompiled_source_;
+ }
+ void set_precompiled_source(const SourceFile& f) {
+ precompiled_source_ = f;
+ }
+
+ private:
+ std::vector<std::string> asmflags_;
+ std::vector<std::string> cflags_;
+ std::vector<std::string> cflags_c_;
+ std::vector<std::string> cflags_cc_;
+ std::vector<std::string> cflags_objc_;
+ std::vector<std::string> cflags_objcc_;
+ std::vector<std::string> defines_;
+ std::vector<SourceDir> include_dirs_;
+ std::vector<std::string> ldflags_;
+ std::vector<SourceDir> lib_dirs_;
+ std::vector<LibFile> libs_;
+ // If you add a new one, be sure to update AppendValues().
+
+ std::string precompiled_header_;
+ SourceFile precompiled_source_;
+};
+
+#endif // TOOLS_GN_CONFIG_VALUES_H_
diff --git a/chromium/tools/gn/config_values_extractors.cc b/chromium/tools/gn/config_values_extractors.cc
new file mode 100644
index 00000000000..18617db7970
--- /dev/null
+++ b/chromium/tools/gn/config_values_extractors.cc
@@ -0,0 +1,35 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/config_values_extractors.h"
+
+#include "tools/gn/escape.h"
+
+namespace {
+
+class EscapedStringWriter {
+ public:
+ explicit EscapedStringWriter(const EscapeOptions& escape_options)
+ : escape_options_(escape_options) {
+ }
+
+ void operator()(const std::string& s, std::ostream& out) const {
+ out << " ";
+ EscapeStringToStream(out, s, escape_options_);
+ }
+
+ private:
+ const EscapeOptions& escape_options_;
+};
+
+} // namespace
+
+void RecursiveTargetConfigStringsToStream(
+ const Target* target,
+ const std::vector<std::string>& (ConfigValues::* getter)() const,
+ const EscapeOptions& escape_options,
+ std::ostream& out) {
+ RecursiveTargetConfigToStream(target, getter,
+ EscapedStringWriter(escape_options), out);
+}
diff --git a/chromium/tools/gn/config_values_extractors.h b/chromium/tools/gn/config_values_extractors.h
new file mode 100644
index 00000000000..f87f52b42e4
--- /dev/null
+++ b/chromium/tools/gn/config_values_extractors.h
@@ -0,0 +1,107 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_CONFIG_VALUES_EXTRACTORS_H_
+#define TOOLS_GN_CONFIG_VALUES_EXTRACTORS_H_
+
+#include <stddef.h>
+
+#include <ostream>
+#include <string>
+#include <vector>
+
+#include "tools/gn/config.h"
+#include "tools/gn/config_values.h"
+#include "tools/gn/target.h"
+
+struct EscapeOptions;
+
+// Provides a way to iterate through all ConfigValues applying to a given
+// target. This is more complicated than normal because the target has a list
+// of configs applying to it, and also config values on the target itself.
+//
+// This iterator allows one to iterate through all of these in a defined order
+// in one convenient loop. The order is defined to be the ConfigValues on the
+// target itself first, then the applying configs, in order.
+//
+// Example:
+// for (ConfigValueIterator iter(target); !iter.done(); iter.Next())
+// DoSomething(iter.cur());
+class ConfigValuesIterator {
+ public:
+ explicit ConfigValuesIterator(const Target* target)
+ : target_(target),
+ cur_index_(-1) {
+ }
+
+ bool done() const {
+ return cur_index_ >= static_cast<int>(target_->configs().size());
+ }
+
+ const ConfigValues& cur() const {
+ if (cur_index_ == -1)
+ return target_->config_values();
+ return target_->configs()[cur_index_].ptr->resolved_values();
+ }
+
+ // Returns the origin of who added this config, if any. This will always be
+ // null for the config values of a target itself.
+ const ParseNode* origin() const {
+ if (cur_index_ == -1)
+ return nullptr;
+ return target_->configs()[cur_index_].origin;
+ }
+
+ void Next() {
+ cur_index_++;
+ }
+
+ // Returns the config holding the current config values, or NULL for those
+ // config values associated with the target itself.
+ const Config* GetCurrentConfig() const {
+ if (cur_index_ == -1)
+ return nullptr;
+ return target_->configs()[cur_index_].ptr;
+ }
+
+ private:
+ const Target* target_;
+
+ // Represents an index into the target_'s configs() or, when -1, the config
+ // values on the target itself.
+ int cur_index_;
+};
+
+template<typename T, class Writer>
+inline void ConfigValuesToStream(
+ const ConfigValues& values,
+ const std::vector<T>& (ConfigValues::* getter)() const,
+ const Writer& writer,
+ std::ostream& out) {
+ const std::vector<T>& v = (values.*getter)();
+ for (size_t i = 0; i < v.size(); i++)
+ writer(v[i], out);
+}
+
+// Writes a given config value that applies to a given target. This collects
+// all values from the target itself and all configs that apply, and writes
+// then in order.
+template<typename T, class Writer>
+inline void RecursiveTargetConfigToStream(
+ const Target* target,
+ const std::vector<T>& (ConfigValues::* getter)() const,
+ const Writer& writer,
+ std::ostream& out) {
+ for (ConfigValuesIterator iter(target); !iter.done(); iter.Next())
+ ConfigValuesToStream(iter.cur(), getter, writer, out);
+}
+
+// Writes the values out as strings with no transformation.
+void RecursiveTargetConfigStringsToStream(
+ const Target* target,
+ const std::vector<std::string>& (ConfigValues::* getter)() const,
+ const EscapeOptions& escape_options,
+ std::ostream& out);
+
+#endif // TOOLS_GN_CONFIG_VALUES_EXTRACTORS_H_
diff --git a/chromium/tools/gn/config_values_extractors_unittest.cc b/chromium/tools/gn/config_values_extractors_unittest.cc
new file mode 100644
index 00000000000..b8875eafa85
--- /dev/null
+++ b/chromium/tools/gn/config_values_extractors_unittest.cc
@@ -0,0 +1,138 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <sstream>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/config.h"
+#include "tools/gn/config_values_extractors.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+struct FlagWriter {
+ void operator()(const std::string& dir, std::ostream& out) const {
+ out << dir << " ";
+ }
+};
+
+struct IncludeWriter {
+ void operator()(const SourceDir& dir, std::ostream& out) const {
+ out << dir.value() << " ";
+ }
+};
+
+} // namespace
+
+TEST(ConfigValuesExtractors, IncludeOrdering) {
+ TestWithScope setup;
+ Err err;
+
+ // Construct a chain of dependencies: target -> dep1 -> dep2
+ // Add representative values: cflags (opaque, always copied) and include_dirs
+ // (uniquified) to each one so we can check what comes out the other end.
+
+ // Set up dep2, direct and all dependent configs.
+ Config dep2_all(setup.settings(), Label(SourceDir("//dep2/"), "all"));
+ dep2_all.own_values().cflags().push_back("--dep2-all");
+ dep2_all.own_values().include_dirs().push_back(SourceDir("//dep2/all/"));
+ ASSERT_TRUE(dep2_all.OnResolved(&err));
+
+ Config dep2_direct(setup.settings(), Label(SourceDir("//dep2/"), "direct"));
+ dep2_direct.own_values().cflags().push_back("--dep2-direct");
+ dep2_direct.own_values().include_dirs().push_back(
+ SourceDir("//dep2/direct/"));
+ ASSERT_TRUE(dep2_direct.OnResolved(&err));
+
+ Target dep2(setup.settings(), Label(SourceDir("//dep2/"), "dep2"));
+ dep2.set_output_type(Target::SOURCE_SET);
+ dep2.visibility().SetPublic();
+ dep2.SetToolchain(setup.toolchain());
+ dep2.all_dependent_configs().push_back(LabelConfigPair(&dep2_all));
+ dep2.public_configs().push_back(LabelConfigPair(&dep2_direct));
+
+ // Set up dep1, direct and all dependent configs. Also set up a subconfig
+ // on "dep1_all" to test sub configs.
+ Config dep1_all_sub(setup.settings(), Label(SourceDir("//dep1"), "allch"));
+ dep1_all_sub.own_values().cflags().push_back("--dep1-all-sub");
+ ASSERT_TRUE(dep1_all_sub.OnResolved(&err));
+
+ Config dep1_all(setup.settings(), Label(SourceDir("//dep1/"), "all"));
+ dep1_all.own_values().cflags().push_back("--dep1-all");
+ dep1_all.own_values().include_dirs().push_back(SourceDir("//dep1/all/"));
+ dep1_all.configs().push_back(LabelConfigPair(&dep1_all_sub));
+ ASSERT_TRUE(dep1_all.OnResolved(&err));
+
+ Config dep1_direct(setup.settings(), Label(SourceDir("//dep1/"), "direct"));
+ dep1_direct.own_values().cflags().push_back("--dep1-direct");
+ dep1_direct.own_values().include_dirs().push_back(
+ SourceDir("//dep1/direct/"));
+ ASSERT_TRUE(dep1_direct.OnResolved(&err));
+
+ Target dep1(setup.settings(), Label(SourceDir("//dep1/"), "dep1"));
+ dep1.set_output_type(Target::SOURCE_SET);
+ dep1.visibility().SetPublic();
+ dep1.SetToolchain(setup.toolchain());
+ dep1.all_dependent_configs().push_back(LabelConfigPair(&dep1_all));
+ dep1.public_configs().push_back(LabelConfigPair(&dep1_direct));
+ dep1.private_deps().push_back(LabelTargetPair(&dep2));
+
+ // Set up target, direct and all dependent configs.
+ Config target_all(setup.settings(), Label(SourceDir("//target/"), "all"));
+ target_all.own_values().cflags().push_back("--target-all");
+ target_all.own_values().include_dirs().push_back(SourceDir("//target/all/"));
+ ASSERT_TRUE(target_all.OnResolved(&err));
+
+ Config target_direct(setup.settings(),
+ Label(SourceDir("//target/"), "direct"));
+ target_direct.own_values().cflags().push_back("--target-direct");
+ target_direct.own_values().include_dirs().push_back(
+ SourceDir("//target/direct/"));
+ ASSERT_TRUE(target_direct.OnResolved(&err));
+
+ // This config is applied directly to target.
+ Config target_config(setup.settings(),
+ Label(SourceDir("//target/"), "config"));
+ target_config.own_values().cflags().push_back("--target-config");
+ target_config.own_values().include_dirs().push_back(
+ SourceDir("//target/config/"));
+ ASSERT_TRUE(target_config.OnResolved(&err));
+
+ Target target(setup.settings(), Label(SourceDir("//target/"), "target"));
+ target.set_output_type(Target::SOURCE_SET);
+ target.SetToolchain(setup.toolchain());
+ target.all_dependent_configs().push_back(LabelConfigPair(&target_all));
+ target.public_configs().push_back(LabelConfigPair(&target_direct));
+ target.configs().push_back(LabelConfigPair(&target_config));
+ target.private_deps().push_back(LabelTargetPair(&dep1));
+
+ // Additionally add some values directly on "target".
+ target.config_values().cflags().push_back("--target");
+ target.config_values().include_dirs().push_back(
+ SourceDir("//target/"));
+
+ // Mark targets resolved. This should push dependent configs.
+ ASSERT_TRUE(dep2.OnResolved(&err));
+ ASSERT_TRUE(dep1.OnResolved(&err));
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ // Verify cflags by serializing.
+ std::ostringstream flag_out;
+ FlagWriter flag_writer;
+ RecursiveTargetConfigToStream<std::string, FlagWriter>(
+ &target, &ConfigValues::cflags, flag_writer, flag_out);
+ EXPECT_EQ(flag_out.str(),
+ "--target --target-config --target-all --target-direct "
+ "--dep1-all --dep1-all-sub --dep2-all --dep1-direct ");
+
+ // Verify include dirs by serializing.
+ std::ostringstream include_out;
+ IncludeWriter include_writer;
+ RecursiveTargetConfigToStream<SourceDir, IncludeWriter>(
+ &target, &ConfigValues::include_dirs, include_writer, include_out);
+ EXPECT_EQ(include_out.str(),
+ "//target/ //target/config/ //target/all/ //target/direct/ "
+ "//dep1/all/ //dep2/all/ //dep1/direct/ ");
+}
diff --git a/chromium/tools/gn/config_values_generator.cc b/chromium/tools/gn/config_values_generator.cc
new file mode 100644
index 00000000000..3cc8235face
--- /dev/null
+++ b/chromium/tools/gn/config_values_generator.cc
@@ -0,0 +1,120 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/config_values_generator.h"
+
+#include "base/strings/string_util.h"
+#include "tools/gn/config_values.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/value.h"
+#include "tools/gn/value_extractors.h"
+#include "tools/gn/variables.h"
+
+namespace {
+
+void GetStringList(
+ Scope* scope,
+ const char* var_name,
+ ConfigValues* config_values,
+ std::vector<std::string>& (ConfigValues::* accessor)(),
+ Err* err) {
+ const Value* value = scope->GetValue(var_name, true);
+ if (!value)
+ return; // No value, empty input and succeed.
+
+ ExtractListOfStringValues(*value, &(config_values->*accessor)(), err);
+}
+
+void GetDirList(
+ Scope* scope,
+ const char* var_name,
+ ConfigValues* config_values,
+ const SourceDir input_dir,
+ std::vector<SourceDir>& (ConfigValues::* accessor)(),
+ Err* err) {
+ const Value* value = scope->GetValue(var_name, true);
+ if (!value)
+ return; // No value, empty input and succeed.
+
+ std::vector<SourceDir> result;
+ ExtractListOfRelativeDirs(scope->settings()->build_settings(),
+ *value, input_dir, &result, err);
+ (config_values->*accessor)().swap(result);
+}
+
+} // namespace
+
+ConfigValuesGenerator::ConfigValuesGenerator(
+ ConfigValues* dest_values,
+ Scope* scope,
+ const SourceDir& input_dir,
+ Err* err)
+ : config_values_(dest_values),
+ scope_(scope),
+ input_dir_(input_dir),
+ err_(err) {
+}
+
+ConfigValuesGenerator::~ConfigValuesGenerator() {
+}
+
+void ConfigValuesGenerator::Run() {
+#define FILL_STRING_CONFIG_VALUE(name) \
+ GetStringList(scope_, #name, config_values_, &ConfigValues::name, err_);
+#define FILL_DIR_CONFIG_VALUE(name) \
+ GetDirList(scope_, #name, config_values_, input_dir_, \
+ &ConfigValues::name, err_);
+
+ FILL_STRING_CONFIG_VALUE(asmflags)
+ FILL_STRING_CONFIG_VALUE(cflags)
+ FILL_STRING_CONFIG_VALUE(cflags_c)
+ FILL_STRING_CONFIG_VALUE(cflags_cc)
+ FILL_STRING_CONFIG_VALUE(cflags_objc)
+ FILL_STRING_CONFIG_VALUE(cflags_objcc)
+ FILL_STRING_CONFIG_VALUE(defines)
+ FILL_DIR_CONFIG_VALUE( include_dirs)
+ FILL_STRING_CONFIG_VALUE(ldflags)
+ FILL_DIR_CONFIG_VALUE( lib_dirs)
+
+#undef FILL_STRING_CONFIG_VALUE
+#undef FILL_DIR_CONFIG_VALUE
+
+ // Libs
+ const Value* libs_value = scope_->GetValue("libs", true);
+ if (libs_value) {
+ ExtractListOfLibs(scope_->settings()->build_settings(), *libs_value,
+ input_dir_, &config_values_->libs(), err_);
+ }
+
+ // Precompiled headers.
+ const Value* precompiled_header_value =
+ scope_->GetValue(variables::kPrecompiledHeader, true);
+ if (precompiled_header_value) {
+ if (!precompiled_header_value->VerifyTypeIs(Value::STRING, err_))
+ return;
+
+ // Check for common errors. This is a string and not a file.
+ const std::string& pch_string = precompiled_header_value->string_value();
+ if (base::StartsWith(pch_string, "//", base::CompareCase::SENSITIVE)) {
+ *err_ = Err(*precompiled_header_value,
+ "This precompiled_header value is wrong.",
+ "You need to specify a string that the compiler will match against\n"
+ "the #include lines rather than a GN-style file name.\n");
+ return;
+ }
+ config_values_->set_precompiled_header(pch_string);
+ }
+
+ const Value* precompiled_source_value =
+ scope_->GetValue(variables::kPrecompiledSource, true);
+ if (precompiled_source_value) {
+ config_values_->set_precompiled_source(
+ input_dir_.ResolveRelativeFile(
+ *precompiled_source_value, err_,
+ scope_->settings()->build_settings()->root_path_utf8()));
+ if (err_->has_error())
+ return;
+ }
+}
diff --git a/chromium/tools/gn/config_values_generator.h b/chromium/tools/gn/config_values_generator.h
new file mode 100644
index 00000000000..6087878618f
--- /dev/null
+++ b/chromium/tools/gn/config_values_generator.h
@@ -0,0 +1,46 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_CONFIG_VALUES_GENERATOR_H_
+#define TOOLS_GN_CONFIG_VALUES_GENERATOR_H_
+
+#include "base/macros.h"
+#include "tools/gn/source_dir.h"
+
+class ConfigValues;
+class Err;
+class Scope;
+class Token;
+
+// This class fills in the config values from a given scope. It's shared
+// between the "config" function call and all the different binary target types
+// (shared library, static library, etc.) since all of these support the
+// various flags stored in the ConfigValues class.
+class ConfigValuesGenerator {
+ public:
+ ConfigValuesGenerator(ConfigValues* dest_values,
+ Scope* scope,
+ const SourceDir& input_dir,
+ Err* err);
+ ~ConfigValuesGenerator();
+
+ // Sets the error passed to the constructor on failure.
+ void Run();
+
+ private:
+ ConfigValues* config_values_;
+ Scope* scope_;
+ const SourceDir input_dir_;
+ Err* err_;
+
+ DISALLOW_COPY_AND_ASSIGN(ConfigValuesGenerator);
+};
+
+// For using in documentation for functions which use this.
+#define CONFIG_VALUES_VARS_HELP \
+ " Flags: cflags, cflags_c, cflags_cc, cflags_objc, cflags_objcc,\n" \
+ " asmflags, defines, include_dirs, ldflags, lib_dirs, libs,\n" \
+ " precompiled_header, precompiled_source\n"
+
+#endif // TOOLS_GN_CONFIG_VALUES_GENERATOR_H_
diff --git a/chromium/tools/gn/copy_target_generator.cc b/chromium/tools/gn/copy_target_generator.cc
new file mode 100644
index 00000000000..994011e824e
--- /dev/null
+++ b/chromium/tools/gn/copy_target_generator.cc
@@ -0,0 +1,44 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/copy_target_generator.h"
+
+#include "tools/gn/build_settings.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/value.h"
+
+CopyTargetGenerator::CopyTargetGenerator(Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Err* err)
+ : TargetGenerator(target, scope, function_call, err) {
+}
+
+CopyTargetGenerator::~CopyTargetGenerator() {
+}
+
+void CopyTargetGenerator::DoRun() {
+ target_->set_output_type(Target::COPY_FILES);
+
+ if (!FillSources())
+ return;
+ if (!FillOutputs(true))
+ return;
+
+ if (target_->sources().empty()) {
+ *err_ = Err(function_call_, "Empty sources for copy command.",
+ "You have to specify at least one file to copy in the \"sources\".");
+ return;
+ }
+ if (target_->action_values().outputs().list().size() != 1) {
+ *err_ = Err(function_call_, "Copy command must have exactly one output.",
+ "You must specify exactly one value in the \"outputs\" array for the "
+ "destination of the copy\n(see \"gn help copy\"). If there are "
+ "multiple sources to copy, use source expansion\n(see \"gn help "
+ "source_expansion\").");
+ return;
+ }
+}
diff --git a/chromium/tools/gn/copy_target_generator.h b/chromium/tools/gn/copy_target_generator.h
new file mode 100644
index 00000000000..b05855f6ae5
--- /dev/null
+++ b/chromium/tools/gn/copy_target_generator.h
@@ -0,0 +1,28 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_COPY_TARGET_GENERATOR_H_
+#define TOOLS_GN_COPY_TARGET_GENERATOR_H_
+
+#include "base/macros.h"
+#include "tools/gn/target_generator.h"
+
+// Populates a Target with the values from a copy rule.
+class CopyTargetGenerator : public TargetGenerator {
+ public:
+ CopyTargetGenerator(Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Err* err);
+ ~CopyTargetGenerator() override;
+
+ protected:
+ void DoRun() override;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(CopyTargetGenerator);
+};
+
+#endif // TOOLS_GN_COPY_TARGET_GENERATOR_H_
+
diff --git a/chromium/tools/gn/create_bundle_target_generator.cc b/chromium/tools/gn/create_bundle_target_generator.cc
new file mode 100644
index 00000000000..206a91861e9
--- /dev/null
+++ b/chromium/tools/gn/create_bundle_target_generator.cc
@@ -0,0 +1,69 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/create_bundle_target_generator.h"
+
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/substitution_type.h"
+#include "tools/gn/target.h"
+#include "tools/gn/value.h"
+#include "tools/gn/variables.h"
+
+CreateBundleTargetGenerator::CreateBundleTargetGenerator(
+ Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Err* err)
+ : TargetGenerator(target, scope, function_call, err) {}
+
+CreateBundleTargetGenerator::~CreateBundleTargetGenerator() {}
+
+void CreateBundleTargetGenerator::DoRun() {
+ target_->set_output_type(Target::CREATE_BUNDLE);
+
+ BundleData& bundle_data = target_->bundle_data();
+ if (!GetBundleDir(std::string(),
+ variables::kBundleRootDir,
+ &bundle_data.root_dir()))
+ return;
+ if (!GetBundleDir(bundle_data.root_dir(),
+ variables::kBundleResourcesDir,
+ &bundle_data.resources_dir()))
+ return;
+ if (!GetBundleDir(bundle_data.root_dir(),
+ variables::kBundleExecutableDir,
+ &bundle_data.executable_dir()))
+ return;
+ if (!GetBundleDir(bundle_data.root_dir(),
+ variables::kBundlePlugInsDir,
+ &bundle_data.plugins_dir()))
+ return;
+}
+
+bool CreateBundleTargetGenerator::GetBundleDir(
+ const std::string& bundle_root_dir,
+ const base::StringPiece& name,
+ std::string* bundle_dir) {
+ const Value* value = scope_->GetValue(name, true);
+ if (!value)
+ return true;
+ if (!value->VerifyTypeIs(Value::STRING, err_))
+ return false;
+ const std::string& str = value->string_value();
+ if (!EnsureStringIsInOutputDir(GetBuildSettings()->build_dir(), str,
+ value->origin(), err_))
+ return false;
+ if (str != bundle_root_dir &&
+ !IsStringInOutputDir(SourceDir(bundle_root_dir), str)) {
+ *err_ = Err(value->origin(), "Path is not in bundle root dir.",
+ "The given file should be in the bundle root directory or below.\n"
+ "Normally you would do \"$bundle_root_dir/foo\". I interpreted this\n"
+ "as \"" + str + "\".");
+ return false;
+ }
+ bundle_dir->assign(value->string_value());
+ return true;
+}
diff --git a/chromium/tools/gn/create_bundle_target_generator.h b/chromium/tools/gn/create_bundle_target_generator.h
new file mode 100644
index 00000000000..db82b8a4c0b
--- /dev/null
+++ b/chromium/tools/gn/create_bundle_target_generator.h
@@ -0,0 +1,31 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_CREATE_BUNDLE_TARGET_GENERATOR_H_
+#define TOOLS_GN_CREATE_BUNDLE_TARGET_GENERATOR_H_
+
+#include "base/macros.h"
+#include "tools/gn/target_generator.h"
+
+// Populates a Target with the values from a create_bundle rule.
+class CreateBundleTargetGenerator : public TargetGenerator {
+ public:
+ CreateBundleTargetGenerator(Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Err* err);
+ ~CreateBundleTargetGenerator() override;
+
+ protected:
+ void DoRun() override;
+
+ private:
+ bool GetBundleDir(const std::string& bundle_root_dir,
+ const base::StringPiece& name,
+ std::string* bundle_dir);
+
+ DISALLOW_COPY_AND_ASSIGN(CreateBundleTargetGenerator);
+};
+
+#endif // TOOLS_GN_CREATE_BUNDLE_TARGET_GENERATOR_H_
diff --git a/chromium/tools/gn/deps_iterator.cc b/chromium/tools/gn/deps_iterator.cc
new file mode 100644
index 00000000000..8bbb760e59a
--- /dev/null
+++ b/chromium/tools/gn/deps_iterator.cc
@@ -0,0 +1,56 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/deps_iterator.h"
+
+#include "tools/gn/target.h"
+
+DepsIterator::DepsIterator() : current_index_(0) {
+ vect_stack_[0] = nullptr;
+ vect_stack_[1] = nullptr;
+ vect_stack_[2] = nullptr;
+}
+
+DepsIterator::DepsIterator(const LabelTargetVector* a,
+ const LabelTargetVector* b,
+ const LabelTargetVector* c)
+ : current_index_(0) {
+ vect_stack_[0] = a;
+ vect_stack_[1] = b;
+ vect_stack_[2] = c;
+
+ if (vect_stack_[0] && vect_stack_[0]->empty())
+ operator++();
+}
+
+// Advance to the next position. This assumes there are more vectors.
+//
+// For internal use, this function tolerates an initial index equal to the
+// length of the current vector. In this case, it will advance to the next
+// one.
+DepsIterator& DepsIterator::operator++() {
+ DCHECK(vect_stack_[0]);
+
+ current_index_++;
+ if (current_index_ >= vect_stack_[0]->size()) {
+ // Advance to next vect. Shift the elements left by one.
+ vect_stack_[0] = vect_stack_[1];
+ vect_stack_[1] = vect_stack_[2];
+ vect_stack_[2] = nullptr;
+
+ current_index_ = 0;
+
+ if (vect_stack_[0] && vect_stack_[0]->empty())
+ operator++();
+ }
+ return *this;
+}
+
+DepsIteratorRange::DepsIteratorRange(const DepsIterator& b)
+ : begin_(b),
+ end_() {
+}
+
+DepsIteratorRange::~DepsIteratorRange() {
+}
diff --git a/chromium/tools/gn/deps_iterator.h b/chromium/tools/gn/deps_iterator.h
new file mode 100644
index 00000000000..58f2b342258
--- /dev/null
+++ b/chromium/tools/gn/deps_iterator.h
@@ -0,0 +1,74 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_DEPS_ITERATOR_H_
+#define TOOLS_GN_DEPS_ITERATOR_H_
+
+#include <stddef.h>
+
+#include "tools/gn/label_ptr.h"
+
+class Target;
+
+// Provides an iterator for iterating over multiple LabelTargetVectors to
+// make it convenient to iterate over all deps of a target.
+//
+// This works by maintaining a simple stack of vectors (since we have a fixed
+// number of deps types). When the stack is empty, we've reached the end. This
+// means that the default-constructed iterator == end() for any sequence.
+class DepsIterator {
+ public:
+ // Creates an empty iterator.
+ DepsIterator();
+
+ // Iterate over the deps in the given vectors. If passing less than three,
+ // pad with nulls.
+ DepsIterator(const LabelTargetVector* a,
+ const LabelTargetVector* b,
+ const LabelTargetVector* c);
+
+ // Prefix increment operator. This assumes there are more items (i.e.
+ // *this != DepsIterator()).
+ //
+ // For internal use, this function tolerates an initial index equal to the
+ // length of the current vector. In this case, it will advance to the next
+ // one.
+ DepsIterator& operator++();
+
+ // Comparison for STL-based loops.
+ bool operator!=(const DepsIterator& other) {
+ return current_index_ != other.current_index_ ||
+ vect_stack_[0] != other.vect_stack_[0] ||
+ vect_stack_[1] != other.vect_stack_[1] ||
+ vect_stack_[2] != other.vect_stack_[2];
+ }
+
+ // Dereference operator for STL-compatible iterators.
+ const LabelTargetPair& operator*() const {
+ DCHECK_LT(current_index_, vect_stack_[0]->size());
+ return (*vect_stack_[0])[current_index_];
+ }
+
+ private:
+ const LabelTargetVector* vect_stack_[3];
+
+ size_t current_index_;
+};
+
+// Provides a virtual container implementing begin() and end() for a
+// sequence of deps. This can then be used in range-based for loops.
+class DepsIteratorRange {
+ public:
+ explicit DepsIteratorRange(const DepsIterator& b);
+ ~DepsIteratorRange();
+
+ const DepsIterator& begin() const { return begin_; }
+ const DepsIterator& end() const { return end_; }
+
+ private:
+ DepsIterator begin_;
+ DepsIterator end_;
+};
+
+#endif // TOOLS_GN_DEPS_ITERATOR_H_
diff --git a/chromium/tools/gn/docs/check.md b/chromium/tools/gn/docs/check.md
new file mode 100644
index 00000000000..e99f6a4879a
--- /dev/null
+++ b/chromium/tools/gn/docs/check.md
@@ -0,0 +1,112 @@
+# GN Check
+
+GN has several different ways to check dependencies. Many of them are checked by
+the `gn check` command. Running checks involve opening and scanning all source
+files so this isn't run every time a build is updated. To run check on an
+existing build:
+
+ gn check out/mybuild
+
+To run the check as part of the "gen" command to update the build (this is what
+the bots do):
+
+ gn gen out/mybuild --check
+
+[TOC]
+
+## Concepts
+
+### Visibility
+
+Targets can control which other targets may depend on them by specifying
+`visibility`. Visibility is always checked when running any GN command (not just
+`gn check`.
+
+By default, targets are "public" meaning any target can depend on them. If you
+supply a list, visibility will be listed to those targets (possibly including
+wildcards):
+
+```
+visibility = [
+ ":*", # All targets in this file.
+ "//content/*", # All targets in content and any subdirectory thereof.
+ "//tools:doom_melon", # This specific target.
+]
+```
+
+See `gn help visibility` for more details and examples.
+
+### Public header files
+
+Targets can control which headers may be included by dependent targets so as to
+define a public API. If your target specifies only `sources`, then all headers
+listed there are public and can be included by all dependents.
+
+If your target defines a `public` variable, only the files listed in that list
+will be public. Files in `sources` but not `public` (they can be in both or only
+one) may not be included by dependent targets.
+
+```
+source_set("foo") {
+ public = [
+ "foo.h",
+ "foo_config.h",
+ ]
+ sources = [
+ "foo.cc",
+ "foo.h",
+ "bar.cc",
+ "bar.h",
+ ]
+}
+```
+
+### Public dependencies
+
+In order to include files from your target, that target must be listed in your
+target's dependencies. By default, transitively depending on a target doesn't
+give your files this privilege.
+
+If a target exposes a dependency as part of its public API, then it can list
+that dependency as a `public_deps`:
+
+```
+source_set("foo") {
+ sources = [ ... ]
+ public_deps = [
+ "//base",
+ ]
+ deps = [
+ "//tools/doom_melon",
+ ]
+}
+```
+
+Targets that depend on `foo` can include files from `base` but not from
+`doom_melon`. To include public headers from `doom\_melon, a target would need
+to depend directly on it.
+
+Public dependencies work transitively, so listing a target as a public
+dependency also exposes that target's public dependencies. Along with the
+ability to include headers, public dependencies forward the `public_configs`
+which allow settings like defines and include directories to apply to
+dependents.
+
+## Putting it all together
+
+In order to include a header from target Y in a file that is part of target X:
+
+* X must be in Y's `visibility` list (or B must have no `visibility` defined).
+* The header must be in Y's `public` headers (or Y must have no `public`
+ variable defined).
+* X must depend directly on Y, or there must be a path from X to Y following
+ only public dependencies.
+
+### What gets checked
+
+Chrome currently doesn't come close to passing a `gn check` pass. You can check
+specific targets or subtrees for issues:
+
+ gn check out/mybuild //base
+
+ gn check out/mybuild "//mojo/*"
diff --git a/chromium/tools/gn/docs/cookbook.md b/chromium/tools/gn/docs/cookbook.md
new file mode 100644
index 00000000000..febc16c17a6
--- /dev/null
+++ b/chromium/tools/gn/docs/cookbook.md
@@ -0,0 +1,680 @@
+# GYP->GN Conversion Cookbook
+
+[TOC]
+
+## Targets
+
+| *GYP* | *GN* |
+|:-------------------------------------------------|:---------------------------------------------------|
+| `'type': 'static_library', 'name': 'foo',` | `static_library("foo") {` or `source_set("foo") {` |
+| `'type': 'shared_library', 'name': 'foo',` | `shared_library("foo") {` |
+| `'type': '<(component)', 'name': 'foo',` | `component("foo") {` |
+| `'type': 'executable', 'name': 'foo',` | `executable("foo") {` |
+| `'type': '<(gtest_target_type)', 'name': 'foo',` | `test("foo") {` |
+| `'type': 'none', 'name': 'foo',` | `group("foo") {` |
+
+### Note on static libraries
+
+A source\_set is basically a transparent static\_library. The source files
+are compiled with the given options but not linked into anything.
+Targets that depend on a source set get the source set's object files
+linked into it. This saves the overhead of creating a static library on
+disk, avoids weird linker issues when a static library has no source
+files, and you can link source sets into shared libraries and have
+symbols exported from the shared library.
+
+The last issue is a cause of a lot of headaches in the GYP build. If you
+annotate a symbol as exported (i.e. `BASE_EXPORT`) then you can't have
+it in a file that goes into a static library because the function might
+be [stripped out](http://blogs.msdn.com/b/oldnewthing/archive/2014/03/21/10509670.aspx)
+if it's not called from within the static library. This
+prevents composing components of static libraries and exporting their
+symbols. A source set avoids this issue and `EXPORT` has the desired
+meaning of "export from the component this gets linked into" with no
+surprising dead code stripping behavior.
+
+A disadvantage of source sets is that if an object file is completely
+unused, it will still be linked into the result, which is not the case
+for static libraries. A few places in the build depend on this behavior
+(deliberately or accidentally). In general, small libraries that we
+expect to be entirely used, test helpers, etc. can be source sets. There
+is slightly less risk of subtle issues if you keep static libraries
+static libraries, however.
+
+### Actions
+
+GYP
+
+```
+{
+ 'action_name': 'foo',
+ 'inputs': [ 'bar.py' ],
+ 'outputs': [ '<(SHARED_INTERMEDIATE_DIR)/bar.out' ],
+ 'action': ['python', 'bar.py', '--la_dee_da' ],
+},
+```
+
+Unlike GYP, where an action is a part of a target, GN actions are
+separate targets that you then depend on via deps from other targets:
+
+```
+action("foo") {
+ script = "bar.py"
+ outputs = [ "$target_gen_dir/bar.out" ]
+ args = [ "--la_dee_da" ]
+}
+
+executable("foo.exe") {
+ ...
+ deps = [ ":foo" ] # Depend on the action to make sure it runs.
+}
+```
+
+Rules in GYP become `action_foreach` in GN which work like actions but
+iterate over a set of sources.
+
+### Copies
+
+GYP
+
+```
+'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/',
+ 'files': [
+ '../build/win/dbghelp_xp/dbghelp.dll',
+ ],
+ },
+],
+```
+
+Unlike GYP, where copies are part of a target, GN copies are
+separate targets that you then depend on via deps from other targets:
+
+```
+copy("bar") {
+ sources = [ "../path/to/secret.dll" ]
+ outputs = [ "$root_out_dir/{{source_file_part}}" ]
+}
+
+component("base") {
+ ...
+ deps = [ "bar" } # Depend on the copy to make sure it runs.
+}
+```
+
+## Platform checking
+
+| *GYP* | *GN* |
+|:-------------------------------------|:---------------------|
+| `'conditions': [['OS=="win"', {` | `if (is_win) {` |
+| `'conditions': [['OS=="linux"', {` | `if (is_linux) {` |
+| `'conditions': [['OS=="android"', {` | `if (is_android) {` |
+| `'conditions': [['OS=="mac"', {` | `if (is_mac) {` |
+| `'conditions': [['OS=="ios"', {` | `if (is_ios) {` |
+| `'conditions': [['chromeos==1', {` | `if (is_chromeos) {` |
+
+## Typical sources and deps modifications
+
+### GYP
+
+```
+'sources': [
+ 'a.cc',
+ 'b.cc',
+],
+'dependencies': [
+ '<(DEPTH)/base/base.gyp:foo',
+],
+'conditions': [
+ ['OS=="win"': {
+ 'sources!': [
+ 'a.cc',
+ ],
+ 'sources': [
+ 'foo.cc',
+ ],
+ 'dependencies': [
+ '<(DEPTH)/base/base.gyp:bar',
+ ],
+ }, {
+ 'sources/': [
+ ['exclude', '^b\\.cc$'],
+ ],
+ }],
+],
+```
+
+### GN
+
+```
+sources = [
+ "a.cc",
+ "b.cc",
+]
+deps = [
+ "//base:foo",
+]
+
+if (is_win) {
+ sources -= [ "a.cc" ]
+ sources += [ "foo.cc" ]
+ deps += [ "//base:bar" ]
+} else {
+ sources -= [ "b.cc" ]
+}
+```
+
+## Variable mappings
+
+### Build configuration
+
+Build configuration and feature flags are usually global in GYP. In GN
+we try to limit global variables and instead put variables used by only
+some files into `.gni` files. These files are then imported into your
+buildfile by specifying at the top:
+
+```
+import("//build/config/features.gni")
+
+# ... now you can use the variables declared in features.gni.
+if (is_tsan) {
+ # ...
+}
+if (cld_version == 2) {
+ # ...
+}
+```
+
+Other flags only apply to one `BUILD.gn` file and those flags are
+declared directly in that file (so other files can't use them). These
+places are noted in the table below.
+
+| *GYP* | *GN* | *GN import* |
+|:------------------------------------------------|:-------------------------------------------|:-----------------------------------------------|
+| `arm_float_abi` | `arm_float_abi` | `//build/config/arm.gni` |
+| `arm_neon` (0/1) | `arm_use_neon` (true/false) | `//build/config/arm.gni` |
+| `arm_neon_optional` (0/1) | `arm_optionally_use_neon` (true/false) | `//build/config/arm.gni` |
+| `arm_version` | `arm_version` | `//build/config/arm.gni` |
+| `asan` (0/1) | `is_asan` (true/false) | `//build/config/sanitizers/sanitizers.gni` |
+| `branding` ("Chromium"/"Chrome") | `is_chrome_branded` (true/false) | `//build/config/chrome_build.gni` |
+| `build_for_tool=="drmemory"` | `enable_iterator_debugging=false` | (internal to `//build/config/BUILD.gn`) |
+| `build_for_tool=="tsan"` | `enable_iterator_debugging=false` | (internal to `//build/config/BUILD.gn`) |
+| `buildtype` ("Official"/"Dev") | `is_official_build` (true/false) | `//build/config/chrome_build.gni` |
+| `chrome_multiple_dll` (0/1) | `is_multi_dll_chrome` (true/false) | `//build/config/chrome_build.gni` |
+| `clang` (0/1) | `is_clang` (true/false) | (global) |
+| `clang_use_chrome_plugins` (0/1) | `clang_use_chrome_plugins` (true/false) | (internal to `//build/config/clang/BUILD.gn`) |
+| `component` ("shared_library"/"static_library") | `is_component_build` (true/false) | (global) |
+| `desktop_linux` (0/1) | `is_desktop_linux` (true/false) | (global) |
+| `disable_glibcxx_debug` (0/1) | `enable_iterator_debugging` (true/false) | (internal to `//build/config/BUILD.gn`) |
+| `fastbuild` (0/1/2) | `symbol_level` (2/1/0 — values inverted) | `//build/config/compiler/compiler.gni` |
+| `gomadir` | `goma_dir` | `//build/toolchain/goma.gni` |
+| `ios_deployment_target` (string) | `ios_deployment_target` | `//build/config/ios/ios_sdk.gni` |
+| `GYP_MSVS_OVERRIDE_PATH` environment variable | `visual_studio_path` | `//build/config/win/visual_studio_version.gni` |
+| `GYP_MSVS_VERSION` environment variable | (none) | |
+| `ios_sdk_path` | `ios_sdk_path` and `use_ios_simulator` | `//build/config/ios/ios_sdk.gni` |
+| `lsan` (0/1) | `is_lsan` (true/false) | `//build/config/sanitizers/sanitizers.gni` |
+| `mac_sdk_min` | `mac_sdk_min` | `//build/config/mac/mac_sdk.gni` |
+| `mac_sdk_path` | `mac_sdk_path` | `//build/config/mac/mac_sdk.gni` |
+| `mac_sdk` | `mac_sdk_version` | `//build/config/mac/mac_sdk.gni` |
+| `msan` (0/1) | `is_msan` (true/false) | `//build/config/sanitizers/sanitizers.gni` |
+| `SDKROOT` (Mac) | `sysroot` | `//build/config/sysroot.gni` |
+| `sysroot` | `sysroot` | `//build/config/sysroot.gni` |
+| `target_arch` ("ia32"/"x64"/"arm"/"mipsel") | `target_cpu` ("x86"/"x64"/"arm"/"mipsel") | (global) |
+| `toolkit_views` (0/1) | `toolkit_views` | `//build/config/ui.gni` |
+| `tsan` (0/1) | `is_tsan` (true/false) | `//build/config/sanitizers/sanitizers.gni` |
+| `windows_sdk_path` | `windows_sdk_path` | (internal to `//build/config/win/BUILD.gn`) |
+
+### Feature flags
+
+| *GYP* | *GN* | *GN import* |
+|:----------------------------------------|:-----------------------------------------------|:------------------------------|
+| `cld_version` (number) | `cld_version` (number) | `//build/config/features.gni` |
+| `configuration_policy` (0/1) | `enable_configuration_policy` (true/false) | `//build/config/features.gni` |
+| `debug_devtools` (0/1) | `debug_devtools` (true/false) | `//build/config/features.gni` |
+| `disable_ftp_support` (0/1) | `disable_ftp_support` (true/false) | `//build/config/features.gni` |
+| `disable_nacl` (0/1) | `enable_nacl` (true/false) | `//build/config/features.gni` |
+| `enable_app_list` (0/1) | `enable_app_list` (true/false) | `//build/config/features.gni` |
+| `enable_autofill_dialog` (0/1) | `enable_autofill_dialog` (true/false) | `//build/config/features.gni` |
+| `enable_background` (0/1) | `enable_background` (true/false) | `//build/config/features.gni` |
+| `enable_captive_portal_detection` (0/1) | `enable_captive_portal_detection` (true/false) | `//build/config/features.gni` |
+| `enable_chromevox_next` (0/1) | `enable_chromevox_next` (true/false) | `//build/config/features.gni` |
+| `enable_extensions` (0/1) | `enable_extensions` (true/false) | `//build/config/features.gni` |
+| `enable_google_now` (0/1) | `enable_google_now` (true/false) | `//build/config/features.gni` |
+| `enable_hidpi` (0/1) | `enable_hidpi` (true/false) | `//ui/base/ui_features.gni` |
+| `enable_managed_users` (0/1) | `enable_managed_users` (true/false) | `//build/config/features.gni` |
+| `enable_mdns` (0/1) | `enable_mdns` (true/false) | `//build/config/features.gni` |
+| `enable_one_click_signin` (0/1) | `enable_one_click_signin` (true/false) | `//chrome/common/features.gni` |
+| `enable_pepper_cdms` (0/1) | `enable_pepper_cdms` (true/false) | `//build/config/features.gni` |
+| `enable_plugins` (0/1) | `enable_plugins` (true/false) | `//build/config/features.gni` |
+| `enable_plugin_installation` (0/1) | `enable_plugin_installation` (true/false) | `//build/config/features.gni` |
+| `enable_basic_printing` (0/1) | `enable_basic_printing` (true/false) | `//build/config/features.gni` |
+| `enable_print_preview` (0/1) | `enable_print_preview` (true/false) | `//build/config/features.gni` |
+| `enable_rlz` (0/1) | `enable_rlz` (true/false) | `//build/config/features.gni` |
+| `enable_service_discovery` (0/1) | `enable_service_discovery` (true/false) | `//build/config/features.gni` |
+| `enable_spellcheck` (0/1) | `enable_spellcheck` (true/false) | `//build/config/features.gni` |
+| `enable_session_service` (0/1) | `enable_session_service` (true/false) | `//build/config/features.gni` |
+| `enable_settings_app` (0/1) | `enable_settings_app` (true/false) | `//build/config/features.gni` |
+| `enable_task_manager` (0/1) | `enable_task_manager` (true/false) | `//build/config/features.gni` |
+| `enable_themes` (0/1) | `enable_themes` (true/false) | `//build/config/features.gni` |
+| `enable_webrtc` (0/1) | `enable_webrtc` (true/false) | `//build/config/features.gni` |
+| `image_loader_extension` (0/1) | `enable_image_loader_extension` (true/false) | `//build/config/features.gni` |
+| `input_speech` (0/1) | `enable_speech_input` (true/false) | `//build/config/features.gni` |
+| `notifications` (0/1) | `enable_notifications` (true/false) | `//build/config/features.gni` |
+| `ozone_platform_dri` (0/1) | `ozone_platform_dri` (true/false) | `//build/config/ui.gni` |
+| `remoting` (0/1) | `enable_remoting` (true/false) | `//build/config/features.gni` |
+| `safe_browsing` (0/1/2) | `safe_browsing_mode` (0/1/2) | `//build/config/features.gni` |
+| `use_allocator` (`'none'`|`'tcmalloc'`) | `use_allocator` (`"none"`|`"tcmalloc"`) | (See "Allocator" below) |
+| `ui_compositor_image_transport` (0/1) | `ui_compositor_image_transport` (true/false) | `//build/config/ui.gni` |
+| `use_ash` (0/1) | `use_ash` (true/false) | `//build/config/ui.gni` |
+| `use_athena` (0/1) | `use_athena` (true/false) | `//build/config/ui.gni` |
+| `use_aura` (0/1) | `use_aura` (true/false) | `//build/config/ui.gni` |
+| `use_brlapi` (0/1) | `use_brlapi` (true/false) | `//build/config/features.gni` |
+| `use_cairo` (0/1) | `use_cairo` (true/false) | `//build/config/ui.gni` |
+| `use_clipboard_aurax11` (0/1) | `use_aura && use_x11` | |
+| `use_cups` (0/1) | `use_cups` (true/false) | `//build/config/features.gni` |
+| `use_dbus` (0/1) | `use_dbus` (true/false) | `//build/config/features.gni` |
+| `use_gconf` (0/1) | `use_gconf` (true/false) | `//build/config/features.gni` |
+| `use_glib` (0/1) | `is_linux` (true/false) | (global) |
+| `use_gnome_keyring` (0/1) | `is_desktop_linux` (true/false) | |
+| `use_goma` (0/1) | `use_goma` (true/false) | `//build/toolchain/goma.gni` |
+| `use_nss_certs` (0/1) | `use_nss_certs` (true/false) | `//build/config/crypto.gni` (Many of these conditions can be deleted, see the "SSL" notes on targets below.) |
+| `use_nss_verifier` (0/1) | `use_nss_verifier` (true/false) | `//build/config/crypto.gni` (Many of these conditions can be deleted, see the "SSL" notes on targets below.) |
+| `use_openssl` (0/1) | `use_openssl` (true/false) | `//build/config/crypto.gni` (Many of these conditions can be deleted, see the "SSL" notes on targets below.) |
+| `use_pango` (0/1) | `use_pango` (true/false) | `//build/config/ui.gni` |
+| `use_ozone` (0/1) | `use_ozone` (true/false) | `//build/config/ui.gni` |
+| `use_seccomp_bpf` (0/1) | `use_seccomp_bpf` (true/false) | `//build/config/features.gni` |
+| `use_udev` (0/1) | `use_udev` (true/false) | `//build/config/features.gni` |
+| `use_x11` (0/1) | `use_x11` (true/false) | `//build/config/ui.gni` |
+| `use_xi2_mt` (0/1) | `use_xi2_mt` (true/false) | `//build/config/ui.gni` |
+| `win_use_allocator_shim` (0/1) | | (See "Allocator" below) |
+
+### Common target conversion
+
+Some targets that lots of projects depend on and how the GN ones
+correspond to GYP ones. (This is for commonly-depended-on or weird
+targets only, don't add stuff here just because you converted it.)
+
+| *GYP* | *GN* | *Notes* (see below) |
+|:-----------------------------------------------------------------------------------|:-----------------------------------------|:---------------------|
+| `base/base.gyp:base` | `//base` | |
+| `base/base.gyp:base_i18n` | `//base:i18n` | |
+| `base/base.gyp:run_all_unittests` | `//base/test:run_all_unittests` | |
+| `base/base.gyp:test_support_base` | `//base/test:test_support` | |
+| `base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations` | `//base/third_party/dynamic_annotations` | |
+| `build/linux/system.gyp:*` (except ssl) | `//build/config/linux:*` | Linux system targets |
+| `build/linux/system.gyp:ssl` | `//crypto:platform` | SSL |
+| `net/third_party/nss/ssl.gyp:libssl` | `//crypto:platform` | SSL |
+| `skia/skia.gyp:skia` | `//skia` | |
+| `testing/gmock.gyp:gmock` | `//testing/gmock` | Secondary tree |
+| `testing/gtest.gyp:gtest` | `//testing/gtest` | Secondary treeo |
+| `third_party/icu/icu.gyp:icui18n` | `//third_party/icu` | Secondary tree, ICU |
+| `third_party/icu/icu.gyp:icuuc` | `//third_party/icu` | Secondary tree, ICU |
+| `url/url.gyp:url_lib` | `//url` || ||
+
+Notes:
+
+ * *ICU:* GN has separate `//third_party/icu:icuuc` and
+ `//third_party/icu:icui18n` targets just like GYP. You can use these
+ if you only need one of them. Most targets want both, so GN made a
+ meta target that's just `//third_party/icu` which you can use that
+ redirects to both "uc" and "i18n".
+
+ * *Linux system targets:* Generally the names in GN patch the GYP
+ names for the Linux system-related stuff. However, most of them are
+ configs instead of actual targets (in GYP they're all targets). For
+ example, since "x11" is just a list of libraries and include
+ directories, and includes no sources it's a config that just adds
+ this configuration to your target. To use a config, do `configs += [
+ "//build/config/linux:x11" ]`
+
+ * *Secondary tree:* Some projects are DEPSed in and we want it to look
+ like a BUILD.gn file is in that directory without checking it in to
+ the upstream repo. The directory `build/secondary` mirrors the main
+ tree and is checked for BUILD.gn files if an expected file in the
+ main tree wasn't found.
+
+ * *SSL:* In GYP there are lots of conditions around NSS vs. OpenSSL
+ and different platforms that determine which of the different SSL
+ libraries is linked. In GN, there is a meta-target
+ `//crypto:platform` that will "do the right thing" according to the
+ current build platform and flags. Generally its safe to replace any
+ conditional reference to a SSL library with this one.
+
+## Visibility and header file issues
+
+GN is much more strict about header file checking. You may encounter
+errors that your target doesn't depend on the target containing a
+certain header file. The most common example is including
+`base/macros.h` without having `//base` in your project's dependency
+list. The solution is to just add the missing dependency.
+
+The dependency tree must be a DAG. Some components might share headers
+between a number of internal targets that makes adding the "proper"
+dependencies impossible. In this case, you can separate out a
+`source_set` type target containing just the header(s) in question, and
+make the targets that use that header depend on that source set to break
+the cycle.
+
+## Other stuff
+
+### Target conditions
+
+`target_conditions` are like normal conditions but expanded in a
+different phase of GYP. You can generally just convert the conditions
+inside and not worry about the `conditions`/`target_conditions`
+difference.
+
+### xcode_settings
+
+Some xcode settings are obvious:
+
+```
+ 'xcode_settings': {'OTHER_LDFLAGS': ['-foo']},
+```
+
+Should just expand to:
+
+```
+ ldflags = [ "-foo" ]
+```
+
+Other flags are less obvious:
+
+```
+ 'xcode_settings': { 'GCC_SYMBOLS_PRIVATE_EXTERN': 'NO', },
+```
+
+These all correspond to various flags that get passed to the compiler or
+linker. You can use your favorite search engine to see what it
+corresponds to, but many of them are not well documented. You can also
+search for the string in
+[tools/gyp/pylib/gyp/xcode_emulation.py](https://code.google.com/p/chromium/codesearch#chromium/src/tools/gyp/pylib/gyp/xcode_emulation.py). GYP uses this file to decode
+the Xcode settings into command line flags for the ninja build.
+
+### wexit-time destructors
+
+Replace
+
+```
+'enable_wexit_time_destructors': 1,
+```
+
+with
+
+```
+configs += [ "//build/config/compiler:wexit_time_destructors" ]
+```
+
+### Chromium code
+
+In GYP code is "non-Chromium" by default, and you opt into higher warning levels using:
+
+```
+'chromium_code': 1,
+```
+
+In GN, all code is Chromium code by default. If you're compiling a
+static library that needs more lax warnings, opt out of the
+Chromium-code settings with:
+
+```
+configs -= [ "//build/config/compiler:chromium_code" ]
+configs += [ "//build/config/compiler:no_chromium_code" ]
+```
+
+### -fvisibility
+
+All symbols in the build have "hidden" visibility by default (this means
+that symbols aren't exported from shared libraries, a concept different
+than GN's target visibility). If you needed to export all symbols (for a
+third party library) by default in GYP you would do:
+
+```
+'xcode_settings': [
+ 'GCC_SYMBOLS_PRIVATE_EXTERN': 'NO', # no -fvisibility=hidden
+],
+'cflags!': [
+ '-fvisibility=hidden',
+],
+```
+
+In GN the equivalent is:
+
+```
+if (!is_win) {
+ configs -= [ "//build/config/gcc:symbol_visibility_hidden" ]
+}
+```
+
+### Dependent settings
+
+In GYP you'll see stuff like this, especially in third-party code.
+
+```
+'direct_dependent_settings': {
+ 'include_dirs': [
+ '.', # This directory.
+ '../..', # Root "src" path.
+ ],
+ 'defines': [
+ 'FOO',
+ ],
+},
+```
+
+Note that many of the includes are trying to add the root "src"
+directory to the include path. This is always present in GN so you can
+remove these.
+
+GYP also requires you to duplicate these settings, once for the target
+itself, and once for the direct/all dependent settings. In GN,
+public/all dependent configs also apply to the current target so you
+only need to specify it once.
+
+In GN, put the settings in a config (declared above your target), and
+then reference that as a public config in your target:
+
+```
+config("foo_config") {
+ include_dirs = [ "." ]
+ defines = [ "FOO" ]
+}
+
+component("foo") {
+ ...
+ public_configs = [ ":foo_config" ]
+}
+```
+
+Targets that depend on `foo` will now get `foo_config` applied.
+
+GYP would say `export_dependent_settings` to forward
+`direct_dependent_settings` up the dependency chain. In GN, put the
+dependency in the `public_deps` section and this will happen
+automatically.
+
+### MSVS disabled warnings
+
+In GYP you'll see for third-party code:
+
+```
+'msvs_disabled_warnings': [ 4018, 4244, 4267, ],
+```
+
+At least half of the warnings in these blocks are already disabled
+globally (we added more global ones later). From the command line, do:
+
+```
+$ cd src/build/config
+$ git grep 4018
+compiler/BUILD.gn: "/wd4018", # Comparing signed and unsigned values.
+```
+
+tells us that warning 4018 is already disabled globally from the
+`default_warning_flags` variable in `//build/config/compiler`, and the same
+for 4244. So ignore these.
+
+Always comment what the warning is. Use your favorite search engine and
+type "vc warning 4267" to look it up. You'll end up with:
+
+```
+if (is_win) {
+ cflags += [
+ "/wd4267", # Conversion from size_t to 'type'.
+ ]
+}
+```
+
+(Use `=` instead of `+=` is you haven't already defined a `cflags` variable.)
+
+### Mac frameworks
+
+GN knows to convert `.framework` files in the `libs` list to the right
+thing on Mac. You don't need to specify the directories either. So
+convert this:
+
+```
+'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Accelerate.framework',
+ ],
+},
+```
+
+to this:
+
+```
+libs = [ "Accelerate.framework" ]
+```
+
+### hard_dependency
+
+GYP code sometimes sets
+
+```
+ 'hard_dependency': 1,
+```
+
+to indicate that the current target must be build before its dependents.
+GN can deduce this internally, so you can ignore this directive.
+
+### Allocator
+
+GYP has `win_use_allocator_shim` and `use_allocator`. In GN, these are
+merged into `use_allocator` which is defined in
+`//build/config/allocator.gni`. _However_ you should almost never need
+to use this flag. The `//base/allocator` target will change depending on
+the current allocator flag, so you can unconditionally depend on this
+target to pick up the current build defaults.
+
+This:
+
+```
+ ['use_allocator!='none'', {
+ 'dependencies': [ '../base/allocator/allocator.gyp:allocator' ]
+ }],
+ ['win_use_allocator_shim==1', {
+ 'dependencies': [ '<(allocator_target)' ],
+ }],
+```
+
+Becomes:
+
+```
+ deps = [ "//base/allocator" ]
+```
+
+As in GYP, the allocator should only be depended on by executables (and
+tests). Libraries should not set the allocator.
+
+### optimize: max
+
+In Gyp:
+
+```
+'optimize': 'max',
+```
+
+only affects Windows and will optimize for speed rather than size. To
+get the same behavior in GN, do:
+
+```
+if (!is_debug && is_win) {
+ configs -= [ "//build/config/compiler:default_optimization" ]
+ configs += [ "//build/config/compiler:optimize_max" ]
+}
+```
+
+The `is_win` check is needed because the `optimize_max` config also
+affects Posix systems. Some components might additionally specify `-O2`
+on Posix further optimize, in which case you can remove the `is_win`
+check.
+
+### Protobufs
+
+```
+import("//third_party/protobuf/proto_library.gni")
+
+proto_library("myproto") {
+ sources = [ "foo.proto" ]
+}
+```
+
+See the `third_party/protobuf/proto_library.gni` file for full
+documentation and extra flags.
+
+### Java stuff
+
+JNI generator in GYP:
+
+```
+{
+ 'target_name': 'foo_headers',
+ 'type': 'none',
+ 'sources': [ <java files> ]
+ 'variables': { 'jni_gen_package': 'foobar' }
+ 'includes': [ 'build/jni_generator.gypi' ]
+}
+```
+
+JNI generator in GN:
+
+```
+# At top of file:
+if (is_android) {
+ import("//build/config/android/rules.gni")
+}
+
+# Later:
+if (is_android) {
+ generate_jni("foo_headers") {
+ sources = [ <java files> ]
+ jni_package = "foobar"
+ }
+}
+```
+
+### Grit
+
+```
+import("//tools/grit/grit_rule.gni")
+
+grit("resources") {
+ source = "my_resources.grd"
+}
+```
+
+See `src/build/secondary/tools/grit/grit_rule.gni` for more documentation.
+
+### Mojo
+
+```
+import("//mojo/public/tools/bindings/mojom.gni")
+
+mojom("mojo_bindings") {
+ sources = [
+ "foo.mojom",
+ ]
+}
+```
diff --git a/chromium/tools/gn/docs/cross_compiles.md b/chromium/tools/gn/docs/cross_compiles.md
new file mode 100644
index 00000000000..68d9bcb8570
--- /dev/null
+++ b/chromium/tools/gn/docs/cross_compiles.md
@@ -0,0 +1,96 @@
+# How GN handles cross-compiling
+
+## As a GN user
+
+GN has robust support for doing cross compiles and building things for
+multiple architectures in a single build (e.g., to build some things to
+run locally and some things to run on an embedded device). In fact,
+there is no limit on the number of different architectures you can build
+at once; the Chromium build uses at least four in some configurations.
+
+To start, GN has the concepts of a _host_ and a _target_. The host is
+the platform that the build is run on, and the target is the platform
+where the code will actually run (This is different from
+[autotools](http://www.gnu.org/software/automake/manual/html_node/Cross_002dCompilation.html)'
+terminology, but uses the more common terminology for cross
+compiling**).**
+
+(Confusingly, GN also refers to each build artifact -- an executable,
+library, etc. -- as a target. On this page, we will use "target" only to
+refer to the system you want to run your code on, and use "rule" or some
+other synonym to refer to a specific build artifact).
+
+When GN starts up, the `host_os` and `host_cpu` variables are set
+automatically to match the operating system (they can be overridden in
+args files, which can be useful in weird corner cases). The user can
+specify that they want to do a cross-compile by setting either or both
+of `target_os` and `target_cpu`; if they are not set, the build config
+files will usually set them to the host's values, though the Chromium
+build will set target\_cpu to "arm" if target\_os is set to "android").
+
+So, for example, running on an x64 Linux machine:
+
+```
+gn gen out/Default
+```
+
+is equivalent to:
+
+```
+gn gen out/Default --args='target_os="linux" target_cpu="x64"'
+```
+
+To do an 32-bit ARM Android cross-compile, do:
+
+```
+gn gen out/Default --args='target_os="android"'
+```
+
+(We don't have to specify target\_cpu because of the conditionals
+mentioned above).
+
+And, to do a 64-bit MIPS ChromeOS cross-compile:
+
+```
+gn gen out/Default --args='target_os="chromeos" target_cpu="mips64el"'
+```
+
+## As a BUILD.gn author
+
+If you are editing build files outside of the //build directory (i.e.,
+not directly working on toolchains, compiler configs, etc.), generally
+you only need to worry about a few things:
+
+The `current_toolchain`, `current_cpu`, and `current_os` variables
+reflect the settings that are **currently** in effect in a given rule.
+The `is_linux`, `is_win` etc. variables are updated to reflect the
+current settings, and changes to `cflags`, `ldflags` and so forth also
+only apply to the current toolchain and the current thing being built.
+
+You can also refer to the `target_cpu` and `target_os` variables. This
+is useful if you need to do something different on the host depending on
+which target\_arch is requested; the values are constant across all
+toolchains. You can do similar things for the `host_cpu` and `host_os`
+variables, but should generally never need to.
+
+By default, dependencies listed in the `deps` variable of a rule use the
+same (currently active) toolchain. You may specify a different toolchain
+using the `foo(bar)` label notation as described in
+[GNLanguage#Labels](language.md#Labels).
+
+## As a //build/config or //build/toolchain author
+
+As described in
+[GNLanguage#Overall-build-flow](language.md#Overall-build-flow), the
+`default_toolchain` is declared in the `//build/config/BUILDCONFIG.gn`
+file. Usually the `default_toolchain` should be the toolchain for the
+`target_os` and `target_cpu`. The `current_toolchain` reflects the
+toolchain that is currently in effect for a rule.
+
+Be sure you understand the differences between `host_cpu`, `target_cpu`,
+`current_cpu`, and `toolchain_cpu` (and the os equivalents). The first
+two are set as described above. You are responsible for making sure that
+`current_cpu` is set appropriately in your toolchain definitions; if you
+are using the stock templates like `gcc_toolchain` and `msvc_toolchain`,
+that means you are responsible for making sure that `toolchain_cpu` and
+`toolchain_os` are set as appropriate in the template invocations.
diff --git a/chromium/tools/gn/docs/faq.md b/chromium/tools/gn/docs/faq.md
new file mode 100644
index 00000000000..5a4bb07d5e9
--- /dev/null
+++ b/chromium/tools/gn/docs/faq.md
@@ -0,0 +1,113 @@
+# GN Frequently Asked Questions
+
+[TOC]
+
+## How will the build be converted?
+
+We intend to build a second independent build in parallel to the GYP
+build. Previous efforts to generate GYP as an intermediate stage proved
+difficult. There will be some smaller set of bots compiling this build,
+and we'll keep the GN build running on these configurations.
+
+## What is unsupported in GN?
+
+The main features not supported in GN yet are:
+ * Mac/iOS bundles
+
+## Where is the GN documentation?
+
+Rather than on a separate wiki, it is versioned with the tool. Run `gn
+help`. See also the [quick start](quick_start.md) guide and the
+[language and operation details](language.md).
+
+## What is likely to break?
+
+Since common.gypi is not used for GN-generated GYP files, any rules
+there will no longer apply. There is a _lot_ of logic in there for many
+build configurations and various conditions where certain flags should
+or should not be used. Some of these build configurations aren't even
+supported any more. Some are run on other waterfalls or are used by
+individuals manually setting GYP\_DEFINES on their local system.
+
+## Will XCode/Visual Studio still be supported?
+
+Visual Studio is supported. Visual Studio can be used as an IDE for code
+browsing or debugging but Ninja is used for building.
+Run `gn help gen` for more details.
+
+XCode is not supported yet. We need help!
+
+## I'm weird. Will my uncommon build mode be supported?
+
+One of the main benefits of the build changeover is that it will
+encourage us to refactor the build system. The project has generally not
+been as strict with build complexity and maintenance as we have with
+source code, and a lot of cruft has accumulated.
+
+In most cases, we will want to rethink how build flags are supported. We
+want to be more modular rather than throwing everything in the
+common.gypi equivalent. The bar for additions at this level will be very
+high, and we will need to figure out how to design certain build
+features. If you are interested in some weird configurations, this will
+likely make your life more difficult in the short term, but will
+hopefully bring long-term benefits for everybody.
+
+In some cases, we may decide that the overhead of supporting your build
+for BeOS running on a DEC Alpha is not in the interests of the project.
+There will likely be discussions about where to draw the line, and how
+to allow those who want to do weird things to do them cleanly without
+negatively affecting the rest of the Chromium community.
+
+## I'm only a little bit weird, will my development build flag be supported?
+
+Only if you do it yourself!
+
+Some features have build flags that turn on a debugging mode or switch
+between internal/external builds. This can be supported, but as with
+GYP, your team will have to add an maintain the support.
+
+## I use supplement.gypi, what's the GN equivalent?
+
+Some projects use files called `supplement.gypi` to set build flags. GYP
+looks in each directory under `src` and adds merges these files into the
+build. The model is that then adding something to your gclient to add
+something to your build (e.g `src-internal`) automatically sets flags.
+
+This behavior is fragile and mysterious. Some people get build flags and
+they don't know why. If you remove the entry from your `.gclient` and
+don't remove the directory you'll be stuck on an old version of the
+flags/code and not know why. You can't have builds in the same checkout
+with the corresponding flags on and off. Some people and projects were
+abusing this behavior.
+
+In GN, such things should be done with build arguments (`gn args`) and
+configured on your build directory when you set it up. For some people,
+this will be an extra step. But it is explicit and clear, and you can
+have multiple builds in the same checkout with different flags.
+
+## How do I generate common build variants?
+
+In GN, args go with a build directory rather than being global in the
+environment. To edit the args for your `out/Default` build directory:
+
+```
+gn args out/Default
+```
+
+You can set variables in that file:
+
+ * The default is a debug build. To do a release build add
+ `is_debug = false`
+ * The default is a static build. To do a component build add
+ `is_component_build = true`
+ * The default is a developer build. To do an official build, set
+ `is_official_build = true`
+ * The default is Chromium branding. To do Chrome branding, set
+ `is_chrome_branded = true`
+
+## How do I do cross-compiles?
+
+GN has robust support for doing cross compiles and building things for
+multiple architectures in a single build.
+
+See [GNCrossCompiles](cross_compiles.md) for more info.
diff --git a/chromium/tools/gn/docs/hacking.md b/chromium/tools/gn/docs/hacking.md
new file mode 100644
index 00000000000..ab94b58c927
--- /dev/null
+++ b/chromium/tools/gn/docs/hacking.md
@@ -0,0 +1,23 @@
+# Hacking on the GN binary itself
+
+## Building GN itself
+
+GN is part of the Chromium tree. If you have a Chromium checkout, you
+already have the source and you can do `ninja -C out/Debug gn` to
+build it.
+
+To build gn using gn, run (in the root `src` directory):
+
+```
+gn gen out/Default
+ninja -C out/Default gn
+```
+
+Change `out/Default` as necessary to put the build directory where you
+want.
+
+## Running GN's unit tests
+
+```
+ninja -C out/Default gn_unittests && out/Default/gn_unittests
+```
diff --git a/chromium/tools/gn/docs/language.md b/chromium/tools/gn/docs/language.md
new file mode 100644
index 00000000000..6032329227b
--- /dev/null
+++ b/chromium/tools/gn/docs/language.md
@@ -0,0 +1,810 @@
+# GN Language and Operation
+
+[TOC]
+
+## Introduction
+
+This page describes many of the language details and behaviors.
+
+### Use the built-in help!
+
+GN has an extensive built-in help system which provides a reference for
+every function and built-in variable. This page is more high-level.
+
+```
+gn help
+```
+
+You can also see the
+[slides](https://docs.google.com/presentation/d/15Zwb53JcncHfEwHpnG_PoIbbzQ3GQi_cpujYwbpcbZo/edit?usp=sharing)
+from a March, 2016 introduction to GN. The speaker notes contain the full
+content.
+
+### Design philosophy
+
+ * Writing build files should not be a creative endeavour. Ideally two
+ people should produce the same buildfile given the same
+ requirements. There should be no flexibility unless it's absolutely
+ needed. As many things should be fatal errors as possible.
+
+ * The definition should read more like code than rules. I don't want
+ to write or debug Prolog. But everybody on our team can write and
+ debug C++ and Python.
+
+ * The build language should be opinionated as to how the build should
+ work. It should not necessarily be easy or even possible to express
+ arbitrary things. We should be changing source and tooling to make
+ the build simpler rather than making everything more complicated to
+ conform to external requirements (within reason).
+
+ * Be like Blaze when it makes sense (see "Differences and similarities
+ to Blaze" below).
+
+## Language
+
+GN uses an extremely simple, dynamically typed language. The types are:
+
+ * Boolean (`true`, `false`).
+ * 64-bit signed integers.
+ * Strings.
+ * Lists (of any other types).
+ * Scopes (sort of like a dictionary, only for built-in stuff).
+
+There are some built-in variables whose values depend on the current
+environment. See `gn help` for more.
+
+There are purposefully many omissions in the language. There are no
+user-defined function calls, for example (templates are the closest thing). As
+per the above design philosophy, if you need this kind of thing you're probably
+doing it wrong.
+
+The variable `sources` has a special rule: when assigning to it, a list
+of exclusion patterns is applied to it. This is designed to
+automatically filter out some types of files. See `gn help
+set_sources_assignment_filter` and `gn help label_pattern` for more.
+
+The full grammar for language nerds is available in `gn help grammar`.
+
+### Strings
+
+Strings are enclosed in double-quotes and use backslash as the escape
+character. The only escape sequences supported are:
+
+ * `\"` (for literal quote)
+ * `\$` (for literal dollars sign)
+ * `\\` (for literal backslash)
+
+Any other use of a backslash is treated as a literal backslash. So, for
+example, `\b` used in patterns does not need to be escaped, nor do most Windows
+paths like `"C:\foo\bar.h"`.
+
+Simple variable substitution is supported via `$`, where the word
+following the dollars sign is replaced with the value of the variable.
+You can optionally surround the name with `{}` if there is not a
+non-variable-name character to terminate the variable name. More complex
+expressions are not supported, only variable name substitution.
+
+```
+a = "mypath"
+b = "$a/foo.cc" # b -> "mypath/foo.cc"
+c = "foo${a}bar.cc" # c -> "foomypathbar.cc"
+```
+
+You can encode 8-bit haracters using "$0xFF" syntax, so a string with newlines
+(hex 0A) would `"look$0x0Alike$0x0Athis"
+
+### Lists
+
+There is no way to get the length of a list. If you find yourself
+wanting to do this kind of thing, you're trying to do too much work in
+the build.
+
+Lists support appending:
+
+```
+a = [ "first" ]
+a += [ "second" ] # [ "first", "second" ]
+a += [ "third", "fourth" ] # [ "first", "second", "third", "fourth" ]
+b = a + [ "fifth" ] # [ "first", "second", "third", "fourth", "fifth" ]
+```
+
+Appending a list to another list appends the items in the second list
+rather than appending the list as a nested member.
+
+You can remove items from a list:
+
+```
+a = [ "first", "second", "third", "first" ]
+b = a - [ "first" ] # [ "second", "third" ]
+a -= [ "second" ] # [ "first", "third", "fourth" ]
+```
+
+The - operator on a list searches for matches and removes all matching
+items. Subtracting a list from another list will remove each item in the
+second list.
+
+If no matching items are found, an error will be thrown, so you need to
+know in advance that the item is there before removing it. Given that
+there is no way to test for inclusion, the main use-case is to set up a
+master list of files or flags, and to remove ones that don't apply to
+the current build based on various conditions.
+
+Stylistically, prefer to only add to lists and have each source file or
+dependency appear once. This is the opposite of the advice Chrome-team used to
+give for GYP (GYP would prefer to list all files, and then remove the ones you
+didn't want in conditionals).
+
+Lists support zero-based subscripting to extract values:
+
+```
+a = [ "first", "second", "third" ]
+b = a[1] # -> "second"
+```
+
+The \[\] operator is read-only and can not be used to mutate the
+list. The primary use-case of this is when an external script returns
+several known values and you want to extract them.
+
+There are some cases where it's easy to overwrite a list when you mean
+to append to it instead. To help catch this case, it is an error to
+assign a nonempty list to a variable containing an existing nonempty
+list. If you want to get around this restriction, first assign the
+destination variable to the empty list.
+
+```
+a = [ "one" ]
+a = [ "two" ] # Error: overwriting nonempty list with a nonempty list.
+a = [] # OK
+a = [ "two" ] # OK
+```
+
+Note that execution of the build script is done without intrinsic
+knowledge of the meaning of the underlying data. This means that it
+doesn't know that `sources` is a list of file names, for example. So if
+you remove an item, it must match the literal string rather than
+specifying a different name that will resolve to the same file name.
+
+### Conditionals
+
+Conditionals look like C:
+
+```
+ if (is_linux || (is_win && target_cpu == "x86")) {
+ sources -= [ "something.cc" ]
+ } else if (...) {
+ ...
+ } else {
+ ...
+ }
+```
+
+You can use them in most places, even around entire targets if the
+target should only be declared in certain circumstances.
+
+### Looping
+
+You can iterate over a list with `foreach`. This is discouraged. Most things
+the build should do can normally be expressed without doing this, and if you
+find it necessary it may be an indication you're doing too much work in the
+metabuild.
+
+```
+foreach(i, mylist) {
+ print(i) # Note: i is a copy of each element, not a reference to it.
+}
+```
+
+### Function calls
+
+Simple function calls look like most other languages:
+
+```
+print("hello, world")
+assert(is_win, "This should only be executed on Windows")
+```
+
+Such functions are built-in and the user can not define new ones.
+
+Some functions take a block of code enclosed by `{ }` following them:
+
+```
+static_library("mylibrary") {
+ sources = [ "a.cc" ]
+}
+```
+
+Most of these define targets. The user can define new functions like this
+with the template mechanism discussed below.
+
+Precisely, this expression means that the block becomes an argument to the
+function for the function to execute. Most of the block-style functions execute
+the block and treat the resulting scope as a dictionary of variables to read.
+
+### Scoping and execution
+
+Files and function calls followed by `{ }` blocks introduce new scopes. Scopes
+are nested. When you read a variable, the containing scopes will be searched in
+reverse order until a matching name is found. Variable writes always go to the
+innermost scope.
+
+There is no way to modify any enclosing scope other than the innermost
+one. This means that when you define a target, for example, nothing you
+do inside of the block will "leak out" into the rest of the file.
+
+`if`/`else`/`foreach` statements, even though they use `{ }`, do not introduce
+a new scope so changes will persist outside of the statement.
+
+## Naming things
+
+### File and directory names
+
+File and directory names are strings and are interpreted as relative to
+the current build file's directory. There are three possible forms:
+
+Relative names:
+
+```
+"foo.cc"
+"src/foo.cc"
+"../src/foo.cc"
+```
+
+Source-tree absolute names:
+
+```
+"//net/foo.cc"
+"//base/test/foo.cc"
+```
+
+System absolute names (rare, normally used for include directories):
+
+```
+"/usr/local/include/"
+"/C:/Program Files/Windows Kits/Include"
+```
+
+### Labels
+
+Everything that can participate in the dependency graph (targets,
+configs, and toolchains) are identified by labels which are strings of a
+defined format. A common label looks like this:
+
+```
+"//base/test:test_support"
+```
+
+which consists of a source-root-absolute path, a colon, and a name. This
+means to look for the thing named "test\_support" in
+`src/base/test/BUILD.gn`.
+
+When loading a build file, if it doesn't exist in the given location
+relative to the source root, GN will look in the secondary tree in
+`build/secondary`. The structure of this tree mirrors the main
+repository and is a way to add build files for directories that may be
+pulled from other repositories where we can't easily check in BUILD
+files. The secondary tree is a fallback rather than an override, so a file in
+the normal location always takes precedence.
+
+A canonical label also includes the label of the toolchain being used.
+Normally, the toolchain label is implicitly inherited, but you can
+include it to specify cross-toolchain dependencies (see "Toolchains"
+below).
+
+```
+"//base/test:test_support(//build/toolchain/win:msvc)"
+```
+
+In this case it will look for the toolchain definition called "msvc"
+in the file `//build/toolchain/win` to know how to compile this target.
+
+If you want to refer to something in the same buildfile, you can omit
+the path name and just start with a colon.
+
+```
+":base"
+```
+
+Labels can be specified as being relative to the current directory.
+Stylistically, we prefer to use absolute paths for all non-file-local
+references unless a build file needs to be run in different contexts (like
+a project needs to be both standalone and pulled into other projects in
+difference places in the directory hierarchy).
+
+```
+"source/plugin:myplugin" # Prefer not to do these.
+"../net:url_request"
+```
+
+If a name is unspecified, it will inherit the directory name. Stylistically, we
+prefer to omit the colon and name in these cases.
+
+```
+"//net" = "//net:net"
+"//tools/gn" = "//tools/gn:gn"
+```
+
+## Build configuration
+
+### Overall build flow
+
+ 1. Look for `.gn` file in the current directory and walk up the
+ directory tree until one is found. Set this directory to be the
+ "source root" and interpret this file to find the name of the build
+ config file.
+ 2. Execute the build config file (this is the default toolchain). In Chrome
+ this is `//build/config/BUILDCONFIG.gn`.
+ 3. Load the `BUILD.gn` file in the root directory.
+ 4. Recursively load `BUILD.gn` in other directories to resolve all
+ current dependencies. If a BUILD file isn't found in the specified
+ location, GN will look in the corresponding location inside
+ `build/secondary`.
+ 5. When a target's dependencies are resolved, write out the `.ninja`
+ file to disk.
+ 6. When all targets are resolved, write out the root `build.ninja`
+ file.
+
+### The build config file
+
+The first file executed is the build config file. The name of this file
+is specified in the `.gn` file that marks the root of the repository. In
+Chrome it is `//build/config/BUILDCONFIG.gn`. There is only one build
+config file.
+
+This file sets up the scope in which all other build files will execute.
+Any arguments, variables, defaults, etc. set up in this file will be
+visible to all files in the build.
+
+It is executed once for each toolchain (see "Toolchains").
+
+### Build arguments
+
+Arguments can be passed in from the command line (and from other
+toolchains, see "Toolchains" below). You declare which arguments you
+accept and specify default values via `declare_args`.
+
+See `gn help buildargs` for an overview of how this works. See `gn help
+declare_args` for specifics on declaring them.
+
+It is an error to declare a given argument more than once in a given
+scope. Typically arguments would be declared in an imported file (to
+share them among some subset of the build) or in the main build config
+file (to make them global).
+
+### Target defaults
+
+You can set up some default values for a given target type. This is
+normally done in the build config file to set a list of default configs
+that defines the build flags and other setup information for each target
+type.
+
+See `gn help set_defaults`.
+
+For example, when you declare a `static_library`, the target defaults
+for a static library are applied. These values can be overwritten,
+modified, or preserved by a target.
+
+```
+# This call is typically in the build config file (see above).
+set_defaults("static_library") {
+ configs = [ "//build:rtti_setup", "//build:extra_warnings" ]
+}
+
+# This would be in your directory's BUILD.gn file.
+static_library("mylib") {
+ # At this point configs is set to [ "//build:rtti_setup", "//build:extra_warnings" ]
+ # by default but may be modified.
+ configs -= "//build:extra_warnings" # Don't want these warnings.
+ configs += ":mylib_config" # Add some more configs.
+}
+```
+
+The other use-case for setting target defaults is when you define your
+own target type via `template` and want to specify certain default
+values.
+
+## Targets
+
+A target is a node in the build graph. It usually represents some kind
+of executable or library file that will be generated. Targets depend on
+other targets. The built-in target types (see `gn help <targettype>` for
+more help) are:
+
+ * `action`: Run a script to generate a file.
+ * `action_foreach`: Run a script once for each source file.
+ * `bundle_data`: Declare data to go into a Mac/iOS bundle.
+ * `create_bundle`: Creates a Mac/iOS bundle.
+ * `executable`: Generates an executable file.
+ * `group`: A virtual dependency node that refers to one or more other
+ targets.
+ * `shared_library`: A .dll or .so.
+ * `loadable_module`: A .dll or .so loadable only at runtime.
+ * `source_set`: A lightweight virtual static library (usually
+ preferrable over a real static library since it will build faster).
+ * `static_library`: A .lib or .a file (normally you'll want a
+ `source_set` instead).
+
+You can extend this to make custom target types using templates (see below). In
+Chrome some of the more commonly-used templates are:
+
+ * `component`: Either a source set or shared library, depending on the
+ build type.
+ * `test`: A test executable. On mobile this will create the appropritate
+ native app type for tests.
+ * `app`: Executable or Mac/iOS application.
+ * `android_apk`: Make an APK. There are a _lot_ of other Android ones, see
+ `//build/config/android/rules.gni`.
+
+## Configs
+
+Configs are named objects that specify sets of flags, include
+directories, and defines. They can be applied to a target and pushed to
+dependent targets.
+
+To define a config:
+
+```
+config("myconfig") {
+ includes = [ "src/include" ]
+ defines = [ "ENABLE_DOOM_MELON" ]
+}
+```
+
+To apply a config to a target:
+
+```
+executable("doom_melon") {
+ configs = [ ":myconfig" ]
+}
+```
+
+It is common for the build config file to specify target defaults that
+set a default list of configs. Targets can add or remove to this list as
+needed. So in practice you would usually use `configs += ":myconfig"` to
+append to the list of defaults.
+
+See `gn help config` for more information about how configs are declared
+and applied.
+
+### Public configs
+
+A target can apply settings to other targets that depend on it. The most
+common example is a third party target that requires some defines or
+include directories for its headers to compile properly. You want these
+settings to apply both to the compile of the third party library itself,
+as well as all targets that use the library.
+
+To do this, you write a config with the settings you want to apply:
+
+```
+config("my_external_library_config") {
+ includes = "."
+ defines = [ "DISABLE_JANK" ]
+}
+```
+
+Then this config is added to the target as a "public" config. It will
+apply both to the target as well as targets that directly depend on it.
+
+```
+shared_library("my_external_library") {
+ ...
+ # Targets that depend on this get this config applied.
+ public_configs = [ ":my_external_library_config" ]
+}
+```
+
+Dependent targets can in turn forward this up the dependency tree
+another level by adding your target as a "public" dependency.
+
+```
+static_library("intermediate_library") {
+ ...
+ # Targets that depend on this one also get the configs from "my external library".
+ public_deps = [ ":my_external_library" ]
+}
+```
+
+A target can forward a config to all dependents until a link boundary is
+reached by setting it as an `all_dependent_config`. This is strongly
+discouraged as it can spray flags and defines over more of the build than
+necessary. Instead, use public_deps to control which flags apply where.
+
+In Chrome, prefer the build flag header system (`build/buildflag_header.gni`)
+for defines which prevents most screw-ups with compiler defines.
+
+## Toolchains
+
+A toolchain is a set of build commands to run for different types of
+input files and link tasks.
+
+You can have multiple toolchains in the build. It's easiest to think
+about each one as completely separate builds that can additionally have
+dependencies between them. This means, for example, that the 32-bit
+Windows build might depend on a 64-bit helper target. Each of them can
+depend on `"//base:base"` which will be the 32-bit base in the context
+of the 32-bit toolchain, and the 64-bit base in the context of the
+64-bit toolchain
+
+When a target specifies a dependency on another target, the current
+toolchain is inherited unless it is explicitly overridden (see "Labels"
+above).
+
+### Toolchains and the build configuration
+
+When you have a simple build with only one toolchain, the build config
+file is loaded only once at the beginning of the build. It must call
+`set_default_toolchain` to tell GN the label of the toolchain definition
+to use. This toolchain definition has the commands to use for the
+compiler and linker. The `toolchain_args` section of the toolchain
+definition is ignored.
+
+When a target has a dependency on a target using different toolchain, GN
+will start a build using that secondary toolchain to resolve the target.
+GN will load the build config file with the arguments specified in the
+toolchain definition. Since the toolchain is already known, calls to
+`set_default_toolchain` are ignored.
+
+So the toolchain configuration is two-way. In the default toolchain
+(i.e. the main build target) the configuration flows from the build
+config file to the toolchain: the build config file looks at the state
+of the build (OS type, CPU architecture, etc.) and decides which
+toolchain to use (via `set_default_toolchain`). In secondary toolchains,
+the configuration flows from the toolchain to the build config file: the
+`toolchain_args` in the toolchain definition specifies the arguments to
+re-invoke the build.
+
+### Toolchain example
+
+Say the default build is a 64-bit build. Either this is the default CPU
+architecture based on the current system, or the user has passed
+`target_cpu="x64"` on the command line. The build config file might look
+like this to set up the default toolchain:
+
+```
+# Set default toolchain only has an effect when run in the context of
+# the default toolchain. Pick the right one according to the current CPU
+# architecture.
+if (target_cpu == "x64") {
+ set_default_toolchain("//toolchains:64")
+} else if (target_cpu == "x86") {
+ set_default_toolchain("//toolchains:32")
+}
+```
+
+If a 64-bit target wants to depend on a 32-bit binary, it would specify
+a dependency using `data_deps` (data deps are like deps that are only
+needed at runtime and aren't linked, since you can't link a 32-bit and a
+64-bit library).
+
+```
+executable("my_program") {
+ ...
+ if (target_cpu == "x64") {
+ # The 64-bit build needs this 32-bit helper.
+ data_deps = [ ":helper(//toolchains:32)" ]
+ }
+}
+
+if (target_cpu == "x86") {
+ # Our helper library is only compiled in 32-bits.
+ shared_library("helper") {
+ ...
+ }
+}
+```
+
+The toolchain file referenced above (`toolchains/BUILD.gn`) would define
+two toolchains:
+
+```
+toolchain("32") {
+ tool("cc") {
+ ...
+ }
+ ... more tools ...
+
+ # Arguments to the build when re-invoking as a secondary toolchain.
+ toolchain_args() {
+ toolchain_cpu = "x86"
+ }
+}
+
+toolchain("64") {
+ tool("cc") {
+ ...
+ }
+ ... more tools ...
+
+ # Arguments to the build when re-invoking as a secondary toolchain.
+ toolchain_args() {
+ toolchain_cpu = "x64"
+ }
+}
+```
+
+The toolchain args specifies the CPU architecture explicitly, so if a
+target depends on something using that toolchain, that cpu architecture
+will be set when re-invoking the build. These args are ignored for the
+default toolchain since by the time they're known the build config has
+already been run. In general, the toolchain args and the conditions used
+to set the default toolchain should agree.
+
+The nice thing about the multiple-build setup is that you can write
+conditionals in your targets referencing the current toolchain state.
+The build files will be re-run with different state for each toolchain.
+For the `my_program` example above, you can see it queries the CPU
+architecture, adding a dependency only for the 64-bit build of the
+program. The 32-bit build would not get this dependency.
+
+### Declaring a toolchain
+
+Toolchains are declared with the `toolchain` command, which sets the
+commands to use for each compile and link operation. The toolchain also
+specifies a set of arguments to pass to the build config file when
+executing. This allows you to pass configuration information to the
+alternate toolchain.
+
+## Templates
+
+Templates are GN's primary way to re-use code. Typically, a template
+would expand to one or more other target types.
+
+```
+# Declares a script that compiles IDL files to source, and then compiles those
+# source files.
+template("idl") {
+ # Always base helper targets on target_name so they're unique. Target name
+ # will be the string passed as the name when the template is invoked.
+ idl_target_name = "${target_name}_generate"
+ action_foreach(idl_target_name) {
+ ...
+ }
+
+ # Your template should always define a target with the name target_name.
+ # When other targets depend on your template invocation, this will be the
+ # destination of that dependency.
+ source_set(target_name) {
+ ...
+ deps = [ ":$idl_target_name" ] # Require the sources to be compiled.
+ }
+}
+```
+
+Typically your template definition would go in a `.gni` file and users
+would import that file to see the template definition:
+
+```
+import("//tools/idl_compiler.gni")
+
+idl("my_interfaces") {
+ sources = [ "a.idl", "b.idl" ]
+}
+```
+
+Declaring a template creates a closure around the variables in scope at
+that time. When the template is invoked, the magic variable `invoker` is
+used to read variables out of the invoking scope. The template would
+generally copy the values its interested in into its own scope:
+
+```
+template("idl") {
+ source_set(target_name) {
+ sources = invoker.sources
+ }
+}
+```
+
+The current directory when a template executes will be that of the
+invoking build file rather than the template source file. This is so
+files passed in from the template invoker will be correct (this
+generally accounts for most file handling in a template). However, if
+the template has files itself (perhaps it generates an action that runs
+a script), you will want to use absolute paths ("//foo/...") to refer to
+these files to account for the fact that the current directory will be
+unpredictable during invocation. See `gn help template` for more
+information and more complete examples.
+
+## Other features
+
+### Imports
+
+You can import `.gni` files into the current scope with the `import`
+function. This is _not_ an include in the C++ sense. The imported file is
+executed independently and the resulting scope is copied into the current file
+(C++ executes the included file in the current context of when the
+include directive appeared). This allows the results of the import to be
+cached, and also prevents some of the more "creative" uses of includes like
+multiply-included files.
+
+Typically, a `.gni` would define build arguments and templates. See `gn
+help import` for more.
+
+Your `.gni` file can define temporary variables that are not exported files
+that include it by using a preceding underscore in the name like `_this`.
+
+### Path processing
+
+Often you will want to make a file name or a list of file names relative
+to a different directory. This is especially common when running
+scripts, which are executed with the build output directory as the
+current directory, while build files usually refer to files relative to
+their containing directory.
+
+You can use `rebase_path` to convert directories. See `gn help
+rebase_path` for more help and examples. Typical usage to convert a file
+name relative to the current directory to be relative to the root build
+directory would be: ``` new_paths = rebase_path("myfile.c",
+root_build_dir) ```
+
+### Patterns
+
+Patterns are used to generate the output file names for a given set of
+inputs for custom target types, and to automatically remove files from
+the `sources` variable (see `gn help set_sources_assignment_filter`).
+
+They are like simple regular expressions. See `gn help label_pattern`
+for more.
+
+### Executing scripts
+
+There are two ways to execute scripts. All external scripts in GN are in
+Python. The first way is as a build step. Such a script would take some
+input and generate some output as part of the build. Targets that invoke
+scripts are declared with the "action" target type (see `gn help
+action`).
+
+The second way to execute scripts is synchronously during build file
+execution. This is necessary in some cases to determine the set of files
+to compile, or to get certain system configurations that the build file
+might depend on. The build file can read the stdout of the script and
+act on it in different ways.
+
+Synchronous script execution is done by the `exec_script` function (see
+`gn help exec_script` for details and examples). Because synchronously
+executing a script requires that the current buildfile execution be
+suspended until a Python process completes execution, relying on
+external scripts is slow and should be minimized.
+
+To prevent abuse, files permitted to call `exec_script` can be whitelisted in
+the toplevel `.gn` file. Chrome does this to require additional code review
+for such additions. See `gn help dotfile`.
+
+You can synchronously read and write files which is discouraged but
+occasionally necessary when synchronously running scripts. The typical use-case
+would be to pass a list of file names longer than the command-line limits of
+the current platform. See `gn help read_file` and `gn help write_file` for how
+to read and write files. These functions should be avoided if at all possible.
+
+Actions that exceed command-line length limits can use response files to
+get around this limitation without synchronously writing files. See
+`gn help response_file_contents`.
+
+# Differences and similarities to Blaze
+
+Blaze is Google's internal build system, now publicly released as
+[Bazel](http://bazel.io/). It has inspired a number of other systems such as
+[Pants](https://github.com/twitter/commons/tree/master/src/python/twitter/pants)
+and [Buck](http://facebook.github.io/buck/).
+
+In Google's homogeneous environment, the need for conditionals is very
+low and they can get by with a few hacks (`abi_deps`). Chrome uses
+conditionals all over the place and the need to add these is the main
+reason for the files looking different.
+
+GN also adds the concept of "configs" to manage some of the trickier
+dependency and configuration problems which likewise don't arise on the
+server. Blaze has a concept of a "configuration" which is like a GN
+toolchain, but built into the tool itself. The way that toolchains work
+in GN is a result of trying to separate this concept out into the build
+files in a clean way.
+
+GN keeps some GYP concept like "all dependent" settings which work a bit
+differently in Blaze. This is partially to make conversion from the existing
+GYP code easier, and the GYP constructs generally offer more fine-grained
+control (which is either good or bad, depending on the situation).
+
+GN also uses GYP names like "sources" instead of "srcs" since
+abbreviating this seems needlessly obscure, although it uses Blaze's
+"deps" since "dependencies" is so hard to type. Chromium also compiles
+multiple languages in one target so specifying the language type on the
+target name prefix was dropped (e.g. from `cc_library`).
diff --git a/chromium/tools/gn/docs/quick_start.md b/chromium/tools/gn/docs/quick_start.md
new file mode 100644
index 00000000000..8b064deb18d
--- /dev/null
+++ b/chromium/tools/gn/docs/quick_start.md
@@ -0,0 +1,366 @@
+# GN Quick Start guide
+
+[TOC]
+
+## Running GN
+
+You just run `gn` from the command line. There is a script in
+depot\_tools (which is presumably on your path) with this name. The
+script will find the binary in the source tree containing the current
+directory and run it.
+
+## Setting up a build
+
+In GYP, the system would generate `Debug` and `Release` build
+directories for you and configure them accordingly. GN doesn't do this.
+Instead, you set up whatever build directory you want with whatever
+configuration you want. The Ninja files will be automatically
+regenerated if they're out of date when you build in that directory.
+
+To make a build directory:
+
+```
+gn gen out/my_build
+```
+
+## Passing build arguments
+
+Set build arguments on your build directory by running:
+
+```
+gn args out/my_build
+```
+
+This will bring up an editor. Type build args into that file like this:
+
+```
+is_component_build = true
+is_debug = false
+```
+
+You can see the list of available arguments and their default values by
+typing
+
+```
+gn args --list out/my_build
+```
+
+on the command line. See "Taking build arguments" below for information
+on how to use these in your code. (Note that you have to specify the
+build directory for this command because the available arguments can
+change according to what's set.
+
+Chrome developers can also read the [Chrome-specific build
+configuration](http://www.chromium.org/developers/gn-build-configuration)
+instructions for more information.
+
+## Cross-compiling to a target OS or architecture
+
+Run `gn args out/Default` (substituting your build directory as needed) and
+add one or more of the following lines for common cross-compiling options.
+
+```
+target_os = "chromeos"
+target_os = "android"
+
+target_cpu = "arm"
+target_cpu = "x86"
+target_cpu = "x64"
+```
+
+See [GNCrossCompiles](cross_compiles.md) for more info.
+
+## Configuring goma
+
+Run `gn args out/Default` (substituting your build directory as needed).
+Add:
+
+```
+use_goma = true
+goma_dir = "~/foo/bar/goma"
+```
+
+If your goma is in the default location (`~/goma`) then you can omit the
+`goma_dir` line.
+
+## Configuring component mode
+
+This is a build arg like the goma flags. run `gn args out/Default` and add:
+
+```
+is_component_build = true
+```
+
+## Step-by-step
+
+### Adding a build file
+
+Create a `tools/gn/tutorial/BUILD.gn` file and enter the following:
+
+```
+executable("hello_world") {
+ sources = [
+ "hello_world.cc",
+ ]
+}
+```
+
+There should already be a `hello_world.cc` file in that directory,
+containing what you expect. That's it! Now we just need to tell the
+build about this file. Open the `BUILD.gn` file in the root directory
+and add the label of this target to the dependencies of one of the root
+groups (a "group" target is a meta-target that is just a collection of
+other targets):
+
+```
+group("root") {
+ deps = [
+ ...
+ "//url",
+ "//tools/gn/tutorial:hello_world",
+ ]
+}
+```
+
+You can see the label of your target is "//" (indicating the source
+root), followed by the directory name, a colon, and the target name.
+
+### Testing your addition
+
+From the command line in the source root directory:
+
+```
+gn gen out/Default
+ninja -C out/Default hello_world
+out/Default/hello_world
+```
+
+GN encourages target names for static libraries that aren't globally
+unique. To build one of these, you can pass the label with no leading
+"//" to ninja:
+
+```
+ninja -C out/Default tools/gn/tutorial:hello_world
+```
+
+### Declaring dependencies
+
+Let's make a static library that has a function to say hello to random
+people. There is a source file `hello.cc` in that directory which has a
+function to do this. Open the `tools/gn/tutorial/BUILD.gn` file and add
+the static library to the bottom of the existing file:
+
+```
+static_library("hello") {
+ sources = [
+ "hello.cc",
+ ]
+}
+```
+
+Now let's add an executable that depends on this library:
+
+```
+executable("say_hello") {
+ sources = [
+ "say_hello.cc",
+ ]
+ deps = [
+ ":hello",
+ ]
+}
+```
+
+This executable includes one source file and depends on the previous
+static library. The static library is referenced by its label in the
+`deps`. You could have used the full label `//tools/gn/tutorial:hello`
+but if you're referencing a target in the same build file, you can use
+the shortcut `:hello`.
+
+### Test the static library version
+
+From the command line in the source root directory:
+
+```
+ninja -C out/Default say_hello
+out/Default/say_hello
+```
+
+Note that you **didn't** need to re-run GN. GN will automatically rebuild
+the ninja files when any build file has changed. You know this happens
+when ninja prints `[1/1] Regenerating ninja files` at the beginning of
+execution.
+
+### Compiler settings
+
+Our hello library has a new feature, the ability to say hello to two
+people at once. This feature is controlled by defining `TWO_PEOPLE`. We
+can add defines like so:
+
+```
+static_library("hello") {
+ sources = [
+ "hello.cc",
+ ]
+ defines = [
+ "TWO_PEOPLE",
+ ]
+}
+```
+
+### Putting settings in a config
+
+However, users of the library also need to know about this define, and
+putting it in the static library target defines it only for the files
+there. If somebody else includes `hello.h`, they won't see the new
+definition. To see the new definition, everybody will have to define
+`TWO_PEOPLE`.
+
+GN has a concept called a "config" which encapsulates settings. Let's
+create one that defines our preprocessor define:
+
+```
+config("hello_config") {
+ defines = [
+ "TWO_PEOPLE",
+ ]
+}
+```
+
+To apply these settings to your target, you only need to add the
+config's label to the list of configs in the target:
+
+```
+static_library("hello") {
+ ...
+ configs += [
+ ":hello_config",
+ ]
+}
+```
+
+Note that you need "+=" here instead of "=" since the build
+configuration has a default set of configs applied to each target that
+set up the default build stuff. You want to add to this list rather than
+overwrite it. To see the default configs, you can use the `print`
+function in the build file or the `desc` command-line subcommand (see
+below for examples of both).
+
+### Dependent configs
+
+This nicely encapsulates our settings, but still requires everybody that
+uses our library to set the config on themselves. It would be nice if
+everybody that depends on our `hello` library can get this
+automatically. Change your library definition to:
+
+```
+static_library("hello") {
+ sources = [
+ "hello.cc",
+ ]
+ all_dependent_configs = [
+ ":hello_config"
+ ]
+}
+```
+
+This applies the `hello_config` to the `hello` target itself, plus all
+targets that transitively depend on the current one. Now everybody that
+depends on us will get our settings. You can also set `public_configs`
+which applies only to targets that directly depend on your target (not
+transitively).
+
+Now if you compile and run, you'll see the new version with two people:
+
+```
+> ninja -C out/Default say_hello
+ninja: Entering directory 'out/Default'
+[1/1] Regenerating ninja files
+[4/4] LINK say_hello
+> out/Default/say_hello
+Hello, Bill and Joy.
+```
+
+## Add a new build argument
+
+You declare which arguments you accept and specify default values via
+`declare_args`.
+
+```
+declare_args() {
+ enable_teleporter = true
+ enable_doom_melon = false
+}
+```
+
+See `gn help buildargs` for an overview of how this works.
+See `gn help declare_args` for specifics on declaring them.
+
+It is an error to declare a given argument more than once in a given scope, so
+care should be used in scoping and naming arguments.
+
+## Don't know what's going on?
+
+You can run GN in verbose mode to see lots of messages about what it's
+doing. Use `-v` for this.
+
+### Print debugging
+
+There is a `print` command which just writes to stdout:
+
+```
+static_library("hello") {
+ ...
+ print(configs)
+}
+```
+
+This will print all of the configs applying to your target (including
+the default ones).
+
+### The "desc" command
+
+You can run `gn desc <build_dir> <targetname>` to get information about
+a given target:
+
+```
+gn desc out/Default //tools/gn/tutorial:say_hello
+```
+
+will print out lots of exciting information. You can also print just one
+section. Lets say you wanted to know where your `TWO_PEOPLE` define
+came from on the `say_hello` target:
+
+```
+> gn desc out/Default //tools/gn/tutorial:say_hello defines --blame
+...lots of other stuff omitted...
+ From //tools/gn/tutorial:hello_config
+ (Added by //tools/gn/tutorial/BUILD.gn:12)
+ TWO_PEOPLE
+```
+
+You can see that `TWO_PEOPLE` was defined by a config, and you can also
+see the which line caused that config to be applied to your target (in
+this case, the `all_dependent_configs` line).
+
+Another particularly interesting variation:
+
+```
+gn desc out/Default //base:base_i18n deps --tree
+```
+
+See `gn help desc` for more.
+
+### Performance
+
+You can see what took a long time by running it with the --time command
+line flag. This will output a summary of timings for various things.
+
+You can also make a trace of how the build files were executed:
+
+```
+gn --tracelog=mylog.trace
+```
+
+and you can load the resulting file in Chrome's `about:tracing` page to
+look at everything.
diff --git a/chromium/tools/gn/docs/reference.md b/chromium/tools/gn/docs/reference.md
new file mode 100644
index 00000000000..22cbee0aa68
--- /dev/null
+++ b/chromium/tools/gn/docs/reference.md
@@ -0,0 +1,5838 @@
+# GN Reference
+
+*This page is automatically generated from* `gn help --markdown all`.
+
+## **\--args**: Specifies build arguments overrides.
+
+```
+ See "gn help buildargs" for an overview of how build arguments work.
+
+ Most operations take a build directory. The build arguments are taken
+ from the previous build done in that directory. If a command specifies
+ --args, it will override the previous arguments stored in the build
+ directory, and use the specified ones.
+
+ The args specified will be saved to the build directory for subsequent
+ commands. Specifying --args="" will clear all build arguments.
+
+```
+
+### **Formatting**
+
+```
+ The value of the switch is interpreted in GN syntax. For typical usage
+ of string arguments, you will need to be careful about escaping of
+ quotes.
+
+```
+
+### **Examples**
+
+```
+ gn gen out/Default --args="foo=\"bar\""
+
+ gn gen out/Default --args='foo="bar" enable=true blah=7'
+
+ gn check out/Default --args=""
+ Clears existing build args from the directory.
+
+ gn desc out/Default --args="some_list=[1, false, \"foo\"]"
+
+
+```
+## **\--[no]color**: Forces colored output on or off.
+
+```
+ Normally GN will try to detect whether it is outputting to a terminal
+ and will enable or disable color accordingly. Use of these switches
+ will override the default.
+
+```
+
+### **Examples**
+
+```
+ gn gen out/Default --color
+
+ gn gen out/Default --nocolor
+
+
+```
+## **\--dotfile**: Override the name of the ".gn" file.
+
+```
+ Normally GN loads the ".gn"file from the source root for some basic
+ configuration (see "gn help dotfile"). This flag allows you to
+ use a different file.
+
+ Note that this interacts with "--root" in a possibly incorrect way.
+ It would be nice to test the edge cases and document or fix.
+
+
+```
+## **\--markdown**: write the output in the Markdown format.
+
+## **\--[no]color**: Forces colored output on or off.
+
+```
+ Normally GN will try to detect whether it is outputting to a terminal
+ and will enable or disable color accordingly. Use of these switches
+ will override the default.
+
+```
+
+### **Examples**
+
+```
+ gn gen out/Default --color
+
+ gn gen out/Default --nocolor
+
+
+```
+## **-q**: Quiet mode. Don't print output on success.
+
+```
+ This is useful when running as a part of another script.
+
+
+```
+## **\--root**: Explicitly specify source root.
+
+```
+ Normally GN will look up in the directory tree from the current
+ directory to find a ".gn" file. The source root directory specifies
+ the meaning of "//" beginning with paths, and the BUILD.gn file
+ in that directory will be the first thing loaded.
+
+ Specifying --root allows GN to do builds in a specific directory
+ regardless of the current directory.
+
+```
+
+### **Examples**
+
+```
+ gn gen //out/Default --root=/home/baracko/src
+
+ gn desc //out/Default --root="C:\Users\BObama\My Documents\foo"
+
+
+```
+## **\--runtime-deps-list-file**: Save runtime dependencies for targets in file.
+
+```
+ --runtime-deps-list-file=<filename>
+
+ Where <filename> is a text file consisting of the labels, one per
+ line, of the targets for which runtime dependencies are desired.
+
+ See "gn help runtime_deps" for a description of how runtime
+ dependencies are computed.
+
+```
+
+### **Runtime deps output file**
+
+```
+ For each target requested, GN will write a separate runtime dependency
+ file. The runtime dependency file will be in the output directory
+ alongside the output file of the target, with a ".runtime_deps"
+ extension. For example, if the target "//foo:bar" is listed in the
+ input file, and that target produces an output file "bar.so", GN
+ will create a file "bar.so.runtime_deps" in the build directory.
+
+ If a source set, action, copy, or group is listed, the runtime deps
+ file will correspond to the .stamp file corresponding to that target.
+ This is probably not useful; the use-case for this feature is
+ generally executable targets.
+
+ The runtime dependency file will list one file per line, with no
+ escaping. The files will be relative to the root_build_dir. The first
+ line of the file will be the main output file of the target itself
+ (in the above example, "bar.so").
+
+
+```
+## **\--threads**: Specify number of worker threads.
+
+```
+ GN runs many threads to load and run build files. This can make
+ debugging challenging. Or you may want to experiment with different
+ values to see how it affects performance.
+
+ The parameter is the number of worker threads. This does not count the
+ main thread (so there are always at least two).
+
+```
+
+### **Examples**
+
+```
+ gen gen out/Default --threads=1
+
+
+```
+## **\--time**: Outputs a summary of how long everything took.
+
+```
+ Hopefully self-explanatory.
+
+```
+
+### **Examples**
+
+```
+ gn gen out/Default --time
+
+
+```
+## **\--tracelog**: Writes a Chrome-compatible trace log to the given file.
+
+```
+ The trace log will show file loads, executions, scripts, and writes.
+ This allows performance analysis of the generation step.
+
+ To view the trace, open Chrome and navigate to "chrome://tracing/",
+ then press "Load" and specify the file you passed to this parameter.
+
+```
+
+### **Examples**
+
+```
+ gn gen out/Default --tracelog=mytrace.trace
+
+
+```
+## **-v**: Verbose logging.
+
+```
+ This will spew logging events to the console for debugging issues.
+ Good luck!
+
+
+```
+## **gn args <out_dir> [\--list] [\--short] [\--args]**
+
+```
+ See also "gn help buildargs" for a more high-level overview of how
+ build arguments work.
+
+```
+
+### **Usage**
+```
+ gn args <out_dir>
+ Open the arguments for the given build directory in an editor
+ (as specified by the EDITOR environment variable). If the given
+ build directory doesn't exist, it will be created and an empty
+ args file will be opened in the editor. You would type something
+ like this into that file:
+ enable_doom_melon=false
+ os="android"
+
+ Note: you can edit the build args manually by editing the file
+ "args.gn" in the build directory and then running
+ "gn gen <out_dir>".
+
+ gn args <out_dir> --list[=<exact_arg>] [--short]
+ Lists all build arguments available in the current configuration,
+ or, if an exact_arg is specified for the list flag, just that one
+ build argument.
+
+ The output will list the declaration location, default value, and
+ comment preceeding the declaration. If --short is specified,
+ only the names and values will be printed.
+
+ If the out_dir is specified, the build configuration will be
+ taken from that build directory. The reason this is needed is that
+ the definition of some arguments is dependent on the build
+ configuration, so setting some values might add, remove, or change
+ the default values for other arguments. Specifying your exact
+ configuration allows the proper arguments to be displayed.
+
+ Instead of specifying the out_dir, you can also use the
+ command-line flag to specify the build configuration:
+ --args=<exact list of args to use>
+
+```
+
+### **Examples**
+```
+ gn args out/Debug
+ Opens an editor with the args for out/Debug.
+
+ gn args out/Debug --list --short
+ Prints all arguments with their default values for the out/Debug
+ build.
+
+ gn args out/Debug --list=target_cpu
+ Prints information about the "target_cpu" argument for the out/Debug
+ build.
+
+ gn args --list --args="os=\"android\" enable_doom_melon=true"
+ Prints all arguments with the default values for a build with the
+ given arguments set (which may affect the values of other
+ arguments).
+
+
+```
+## **gn check <out_dir> [<label_pattern>] [\--force]**
+
+```
+ GN's include header checker validates that the includes for C-like
+ source files match the build dependency graph.
+
+ "gn check" is the same thing as "gn gen" with the "--check" flag
+ except that this command does not write out any build files. It's
+ intended to be an easy way to manually trigger include file checking.
+
+ The <label_pattern> can take exact labels or patterns that match more
+ than one (although not general regular expressions). If specified,
+ only those matching targets will be checked. See
+ "gn help label_pattern" for details.
+
+```
+
+### **Command-specific switches**
+
+```
+ --force
+ Ignores specifications of "check_includes = false" and checks
+ all target's files that match the target label.
+
+```
+
+### **What gets checked**
+
+```
+ The .gn file may specify a list of targets to be checked. Only these
+ targets will be checked if no label_pattern is specified on the
+ command line. Otherwise, the command-line list is used instead. See
+ "gn help dotfile".
+
+ Targets can opt-out from checking with "check_includes = false"
+ (see "gn help check_includes").
+
+ For targets being checked:
+
+ - GN opens all C-like source files in the targets to be checked and
+ scans the top for includes.
+
+ - Includes with a "nogncheck" annotation are skipped (see
+ "gn help nogncheck").
+
+ - Only includes using "quotes" are checked. <brackets> are assumed
+ to be system includes.
+
+ - Include paths are assumed to be relative to either the source root
+ or the "root_gen_dir" and must include all the path components.
+ (It might be nice in the future to incorporate GN's knowledge of
+ the include path to handle other include styles.)
+
+ - GN does not run the preprocessor so will not understand
+ conditional includes.
+
+ - Only includes matching known files in the build are checked:
+ includes matching unknown paths are ignored.
+
+ For an include to be valid:
+
+ - The included file must be in the current target, or there must
+ be a path following only public dependencies to a target with the
+ file in it ("gn path" is a good way to diagnose problems).
+
+ - There can be multiple targets with an included file: only one
+ needs to be valid for the include to be allowed.
+
+ - If there are only "sources" in a target, all are considered to
+ be public and can be included by other targets with a valid public
+ dependency path.
+
+ - If a target lists files as "public", only those files are
+ able to be included by other targets. Anything in the sources
+ will be considered private and will not be includable regardless
+ of dependency paths.
+
+ - Ouptuts from actions are treated like public sources on that
+ target.
+
+ - A target can include headers from a target that depends on it
+ if the other target is annotated accordingly. See
+ "gn help allow_circular_includes_from".
+
+```
+
+### **Advice on fixing problems**
+
+```
+ If you have a third party project that uses relative includes,
+ it's generally best to exclude that target from checking altogether
+ via "check_includes = false".
+
+ If you have conditional includes, make sure the build conditions
+ and the preprocessor conditions match, and annotate the line with
+ "nogncheck" (see "gn help nogncheck" for an example).
+
+ If two targets are hopelessly intertwined, use the
+ "allow_circular_includes_from" annotation. Ideally each should have
+ identical dependencies so configs inherited from those dependencies
+ are consistent (see "gn help allow_circular_includes_from").
+
+ If you have a standalone header file or files that need to be shared
+ between a few targets, you can consider making a source_set listing
+ only those headers as public sources. With only header files, the
+ source set will be a no-op from a build perspective, but will give a
+ central place to refer to those headers. That source set's files
+ will still need to pass "gn check" in isolation.
+
+ In rare cases it makes sense to list a header in more than one
+ target if it could be considered conceptually a member of both.
+
+```
+
+### **Examples**
+
+```
+ gn check out/Debug
+ Check everything.
+
+ gn check out/Default //foo:bar
+ Check only the files in the //foo:bar target.
+
+ gn check out/Default "//foo/*
+ Check only the files in targets in the //foo directory tree.
+
+
+```
+## **gn clean <out_dir>**
+
+```
+ Deletes the contents of the output directory except for args.gn and
+ creates a Ninja build environment sufficient to regenerate the build.
+
+
+```
+## **gn desc <out_dir> <target label> [<what to show>] [\--blame]**
+
+```
+ Displays information about a given labeled target for the given build.
+ The build parameters will be taken for the build in the given
+ <out_dir>.
+
+```
+
+### **Possibilities for <what to show>**
+```
+ (If unspecified an overall summary will be displayed.)
+
+ sources
+ Source files.
+
+ inputs
+ Additional input dependencies.
+
+ public
+ Public header files.
+
+ check_includes
+ Whether "gn check" checks this target for include usage.
+
+ allow_circular_includes_from
+ Permit includes from these targets.
+
+ visibility
+ Prints which targets can depend on this one.
+
+ testonly
+ Whether this target may only be used in tests.
+
+ configs
+ Shows configs applied to the given target, sorted in the order
+ they're specified. This includes both configs specified in the
+ "configs" variable, as well as configs pushed onto this target
+ via dependencies specifying "all" or "direct" dependent
+ configs.
+
+ deps
+ Show immediate or recursive dependencies. See below for flags that
+ control deps printing.
+
+ public_configs
+ all_dependent_configs
+ Shows the labels of configs applied to targets that depend on this
+ one (either directly or all of them).
+
+ script
+ args
+ depfile
+ Actions only. The script and related values.
+
+ outputs
+ Outputs for script and copy target types.
+
+ defines [--blame]
+ include_dirs [--blame]
+ cflags [--blame]
+ cflags_cc [--blame]
+ cflags_cxx [--blame]
+ ldflags [--blame]
+ lib_dirs
+ libs
+ Shows the given values taken from the target and all configs
+ applying. See "--blame" below.
+
+ runtime_deps
+ Compute all runtime deps for the given target. This is a
+ computed list and does not correspond to any GN variable, unlike
+ most other values here.
+
+ The output is a list of file names relative to the build
+ directory. See "gn help runtime_deps" for how this is computed.
+ This also works with "--blame" to see the source of the
+ dependency.
+
+```
+
+### **Shared flags**
+
+```
+ --blame
+ Used with any value specified by a config, this will name
+ the config that specified the value. This doesn't currently work
+ for libs and lib_dirs because those are inherited and are more
+ complicated to figure out the blame (patches welcome).
+
+```
+
+### **Flags that control how deps are printed**
+
+```
+ --all
+ Collects all recursive dependencies and prints a sorted flat list.
+ Also usable with --tree (see below).
+
+ --as=(buildfile|label|output)
+ How to print targets.
+
+ buildfile
+ Prints the build files where the given target was declared as
+ file names.
+ label (default)
+ Prints the label of the target.
+ output
+ Prints the first output file for the target relative to the
+ root build directory.
+
+ --testonly=(true|false)
+ Restrict outputs to targets with the testonly flag set
+ accordingly. When unspecified, the target's testonly flags are
+ ignored.
+
+ --tree
+ Print a dependency tree. By default, duplicates will be elided
+ with "..." but when --all and -tree are used together, no
+ eliding will be performed.
+
+ The "deps", "public_deps", and "data_deps" will all be
+ included in the tree.
+
+ Tree output can not be used with the filtering or output flags:
+ --as, --type, --testonly.
+
+ --type=(action|copy|executable|group|loadable_module|shared_library|
+ source_set|static_library)
+ Restrict outputs to targets matching the given type. If
+ unspecified, no filtering will be performed.
+
+```
+
+### **Note**
+
+```
+ This command will show the full name of directories and source files,
+ but when directories and source paths are written to the build file,
+ they will be adjusted to be relative to the build directory. So the
+ values for paths displayed by this command won't match (but should
+ mean the same thing).
+
+```
+
+### **Examples**
+
+```
+ gn desc out/Debug //base:base
+ Summarizes the given target.
+
+ gn desc out/Foo :base_unittests deps --tree
+ Shows a dependency tree of the "base_unittests" project in
+ the current directory.
+
+ gn desc out/Debug //base defines --blame
+ Shows defines set for the //base:base target, annotated by where
+ each one was set from.
+
+
+```
+## **gn format [\--dump-tree] [\--in-place] [\--stdin] BUILD.gn**
+
+```
+ Formats .gn file to a standard format.
+
+ The contents of some lists ('sources', 'deps', etc.) will be sorted to
+ a canonical order. To suppress this, you can add a comment of the form
+ "# NOSORT" immediately preceeding the assignment. e.g.
+
+ # NOSORT
+ sources = [
+ "z.cc",
+ "a.cc",
+ ]
+
+```
+
+### **Arguments**
+```
+ --dry-run
+ Does not change or output anything, but sets the process exit code
+ based on whether output would be different than what's on disk.
+ This is useful for presubmit/lint-type checks.
+ - Exit code 0: successful format, matches on disk.
+ - Exit code 1: general failure (parse error, etc.)
+ - Exit code 2: successful format, but differs from on disk.
+
+ --dump-tree
+ For debugging only, dumps the parse tree.
+
+ --in-place
+ Instead of writing the formatted file to stdout, replace the input
+ file with the formatted output. If no reformatting is required,
+ the input file will not be touched, and nothing printed.
+
+ --stdin
+ Read input from stdin (and write to stdout). Not compatible with
+ --in-place of course.
+
+```
+
+### **Examples**
+```
+ gn format //some/BUILD.gn
+ gn format some\BUILD.gn
+ gn format /abspath/some/BUILD.gn
+ gn format --stdin
+
+
+```
+## **gn gen**: Generate ninja files.
+
+```
+ gn gen [<ide options>] <out_dir>
+
+ Generates ninja files from the current tree and puts them in the given
+ output directory.
+
+ The output directory can be a source-repo-absolute path name such as:
+ //out/foo
+ Or it can be a directory relative to the current directory such as:
+ out/foo
+
+ See "gn help switches" for the common command-line switches.
+
+```
+
+### **IDE options**
+
+```
+ GN optionally generates files for IDE. Possibilities for <ide options>
+
+ --ide=<ide_name>
+ Generate files for an IDE. Currently supported values:
+ "eclipse" - Eclipse CDT settings file.
+ "vs" - Visual Studio project/solution files.
+ (default Visual Studio version: 2015)
+ "vs2013" - Visual Studio 2013 project/solution files.
+ "vs2015" - Visual Studio 2015 project/solution files.
+
+ --sln=<file_name>
+ Override default sln file name ("all"). Solution file is written
+ to the root build directory. Only for Visual Studio.
+
+ --filters=<path_prefixes>
+ Semicolon-separated list of label patterns used to limit the set
+ of generated projects (see "gn help label_pattern"). Only
+ matching targets will be included to the solution. Only for Visual
+ Studio.
+
+```
+
+### **Eclipse IDE Support**
+
+```
+ GN DOES NOT generate Eclipse CDT projects. Instead, it generates a
+ settings file which can be imported into an Eclipse CDT project. The
+ XML file contains a list of include paths and defines. Because GN does
+ not generate a full .cproject definition, it is not possible to
+ properly define includes/defines for each file individually.
+ Instead, one set of includes/defines is generated for the entire
+ project. This works fairly well but may still result in a few indexer
+ issues here and there.
+
+
+```
+## **gn help <anything>**
+
+```
+ Yo dawg, I heard you like help on your help so I put help on the help
+ in the help.
+
+ You can also use "all" as the parameter to get all help at once.
+
+```
+
+### **Switches**
+
+```
+ --markdown
+ Format output in markdown syntax.
+
+```
+
+### **Example**
+
+```
+ gn help --markdown all
+ Dump all help to stdout in markdown format.
+
+
+```
+## **gn ls <out_dir> [<label_pattern>] [\--all-toolchains] [\--as=...]**
+```
+ [--type=...] [--testonly=...]
+
+ Lists all targets matching the given pattern for the given build
+ directory. By default, only targets in the default toolchain will
+ be matched unless a toolchain is explicitly supplied.
+
+ If the label pattern is unspecified, list all targets. The label
+ pattern is not a general regular expression (see
+ "gn help label_pattern"). If you need more complex expressions,
+ pipe the result through grep.
+
+```
+
+### **Options**
+
+```
+ --as=(buildfile|label|output)
+ How to print targets.
+
+ buildfile
+ Prints the build files where the given target was declared as
+ file names.
+ label (default)
+ Prints the label of the target.
+ output
+ Prints the first output file for the target relative to the
+ root build directory.
+
+ --all-toolchains
+ Matches all toolchains. When set, if the label pattern does not
+ specify an explicit toolchain, labels from all toolchains will be
+ matched. When unset, only targets in the default toolchain will
+ be matched unless an explicit toolchain in the label is set.
+
+ --testonly=(true|false)
+ Restrict outputs to targets with the testonly flag set
+ accordingly. When unspecified, the target's testonly flags are
+ ignored.
+
+ --type=(action|copy|executable|group|loadable_module|shared_library|
+ source_set|static_library)
+ Restrict outputs to targets matching the given type. If
+ unspecified, no filtering will be performed.
+
+```
+
+### **Examples**
+
+```
+ gn ls out/Debug
+ Lists all targets in the default toolchain.
+
+ gn ls out/Debug "//base/*"
+ Lists all targets in the directory base and all subdirectories.
+
+ gn ls out/Debug "//base:*"
+ Lists all targets defined in //base/BUILD.gn.
+
+ gn ls out/Debug //base --as=output
+ Lists the build output file for //base:base
+
+ gn ls out/Debug --type=executable
+ Lists all executables produced by the build.
+
+ gn ls out/Debug "//base/*" --as=output | xargs ninja -C out/Debug
+ Builds all targets in //base and all subdirectories.
+
+ gn ls out/Debug //base --all-toolchains
+ Lists all variants of the target //base:base (it may be referenced
+ in multiple toolchains).
+
+
+```
+## **gn path <out_dir> <target_one> <target_two>**
+
+```
+ Finds paths of dependencies between two targets. Each unique path
+ will be printed in one group, and groups will be separate by newlines.
+ The two targets can appear in either order: paths will be found going
+ in either direction.
+
+ By default, a single path will be printed. If there is a path with
+ only public dependencies, the shortest public path will be printed.
+ Otherwise, the shortest path using either public or private
+ dependencies will be printed. If --with-data is specified, data deps
+ will also be considered. If there are multiple shortest paths, an
+ arbitrary one will be selected.
+
+```
+
+### **Options**
+
+```
+ --all
+ Prints all paths found rather than just the first one. Public paths
+ will be printed first in order of increasing length, followed by
+ non-public paths in order of increasing length.
+
+ --public
+ Considers only public paths. Can't be used with --with-data.
+
+ --with-data
+ Additionally follows data deps. Without this flag, only public and
+ private linked deps will be followed. Can't be used with --public.
+
+```
+
+### **Example**
+
+```
+ gn path out/Default //base //tools/gn
+
+
+```
+## **gn refs <out_dir> (<label_pattern>|<label>|<file>|@<response_file>)* [\--all]**
+```
+ [--all-toolchains] [--as=...] [--testonly=...] [--type=...]
+
+ Finds reverse dependencies (which targets reference something). The
+ input is a list containing:
+
+ - Target label: The result will be which targets depend on it.
+
+ - Config label: The result will be which targets list the given
+ config in its "configs" or "public_configs" list.
+
+ - Label pattern: The result will be which targets depend on any
+ target matching the given pattern. Patterns will not match
+ configs. These are not general regular expressions, see
+ "gn help label_pattern" for details.
+
+ - File name: The result will be which targets list the given file in
+ its "inputs", "sources", "public", "data", or "outputs".
+ Any input that does not contain wildcards and does not match a
+ target or a config will be treated as a file.
+
+ - Response file: If the input starts with an "@", it will be
+ interpreted as a path to a file containing a list of labels or
+ file names, one per line. This allows us to handle long lists
+ of inputs without worrying about command line limits.
+
+```
+
+### **Options**
+
+```
+ --all
+ When used without --tree, will recurse and display all unique
+ dependencies of the given targets. For example, if the input is
+ a target, this will output all targets that depend directly or
+ indirectly on the input. If the input is a file, this will output
+ all targets that depend directly or indirectly on that file.
+
+ When used with --tree, turns off eliding to show a complete tree.
+
+ --all-toolchains
+ Normally only inputs in the default toolchain will be included.
+ This switch will turn on matching all toolchains.
+
+ For example, a file is in a target might be compiled twice:
+ once in the default toolchain and once in a secondary one. Without
+ this flag, only the default toolchain one will be matched and
+ printed (potentially with its recursive dependencies, depending on
+ the other options). With this flag, both will be printed
+ (potentially with both of their recursive dependencies).
+
+ --as=(buildfile|label|output)
+ How to print targets.
+
+ buildfile
+ Prints the build files where the given target was declared as
+ file names.
+ label (default)
+ Prints the label of the target.
+ output
+ Prints the first output file for the target relative to the
+ root build directory.
+
+ -q
+ Quiet. If nothing matches, don't print any output. Without this
+ option, if there are no matches there will be an informational
+ message printed which might interfere with scripts processing the
+ output.
+
+ --testonly=(true|false)
+ Restrict outputs to targets with the testonly flag set
+ accordingly. When unspecified, the target's testonly flags are
+ ignored.
+
+ --tree
+ Outputs a reverse dependency tree from the given target.
+ Duplicates will be elided. Combine with --all to see a full
+ dependency tree.
+
+ Tree output can not be used with the filtering or output flags:
+ --as, --type, --testonly.
+
+ --type=(action|copy|executable|group|loadable_module|shared_library|
+ source_set|static_library)
+ Restrict outputs to targets matching the given type. If
+ unspecified, no filtering will be performed.
+
+```
+
+### **Examples (target input)**
+
+```
+ gn refs out/Debug //tools/gn:gn
+ Find all targets depending on the given exact target name.
+
+ gn refs out/Debug //base:i18n --as=buildfiles | xargs gvim
+ Edit all .gn files containing references to //base:i18n
+
+ gn refs out/Debug //base --all
+ List all targets depending directly or indirectly on //base:base.
+
+ gn refs out/Debug "//base/*"
+ List all targets depending directly on any target in //base or
+ its subdirectories.
+
+ gn refs out/Debug "//base:*"
+ List all targets depending directly on any target in
+ //base/BUILD.gn.
+
+ gn refs out/Debug //base --tree
+ Print a reverse dependency tree of //base:base
+
+```
+
+### **Examples (file input)**
+
+```
+ gn refs out/Debug //base/macros.h
+ Print target(s) listing //base/macros.h as a source.
+
+ gn refs out/Debug //base/macros.h --tree
+ Display a reverse dependency tree to get to the given file. This
+ will show how dependencies will reference that file.
+
+ gn refs out/Debug //base/macros.h //base/at_exit.h --all
+ Display all unique targets with some dependency path to a target
+ containing either of the given files as a source.
+
+ gn refs out/Debug //base/macros.h --testonly=true --type=executable
+ --all --as=output
+ Display the executable file names of all test executables
+ potentially affected by a change to the given file.
+
+
+```
+## **action**: Declare a target that runs a script a single time.
+
+```
+ This target type allows you to run a script a single time to produce
+ or more output files. If you want to run a script once for each of a
+ set of input files, see "gn help action_foreach".
+
+```
+
+### **Inputs**
+
+```
+ In an action the "sources" and "inputs" are treated the same:
+ they're both input dependencies on script execution with no special
+ handling. If you want to pass the sources to your script, you must do
+ so explicitly by including them in the "args". Note also that this
+ means there is no special handling of paths since GN doesn't know
+ which of the args are paths and not. You will want to use
+ rebase_path() to convert paths to be relative to the root_build_dir.
+
+ You can dynamically write input dependencies (for incremental rebuilds
+ if an input file changes) by writing a depfile when the script is run
+ (see "gn help depfile"). This is more flexible than "inputs".
+
+ If the command line length is very long, you can use response files
+ to pass args to your script. See "gn help response_file_contents".
+
+ It is recommended you put inputs to your script in the "sources"
+ variable, and stuff like other Python files required to run your
+ script in the "inputs" variable.
+
+ The "deps" and "public_deps" for an action will always be
+ completed before any part of the action is run so it can depend on
+ the output of previous steps. The "data_deps" will be built if the
+ action is built, but may not have completed before all steps of the
+ action are started. This can give additional parallelism in the build
+ for runtime-only dependencies.
+
+```
+
+### **Outputs**
+
+```
+ You should specify files created by your script by specifying them in
+ the "outputs".
+
+ The script will be executed with the given arguments with the current
+ directory being that of the root build directory. If you pass files
+ to your script, see "gn help rebase_path" for how to convert
+ file names to be relative to the build directory (file names in the
+ sources, outputs, and inputs will be all treated as relative to the
+ current build file and converted as needed automatically).
+
+```
+
+### **File name handling**
+
+```
+ All output files must be inside the output directory of the build.
+ You would generally use |$target_out_dir| or |$target_gen_dir| to
+ reference the output or generated intermediate file directories,
+ respectively.
+
+```
+
+### **Variables**
+
+```
+ args, console, data, data_deps, depfile, deps, inputs, outputs*,
+ response_file_contents, script*, sources
+ * = required
+
+```
+
+### **Example**
+
+```
+ action("run_this_guy_once") {
+ script = "doprocessing.py"
+ sources = [ "my_configuration.txt" ]
+ outputs = [ "$target_gen_dir/insightful_output.txt" ]
+
+ # Our script imports this Python file so we want to rebuild if it
+ # changes.
+ inputs = [ "helper_library.py" ]
+
+ # Note that we have to manually pass the sources to our script if
+ # the script needs them as inputs.
+ args = [ "--out", rebase_path(target_gen_dir, root_build_dir) ] +
+ rebase_path(sources, root_build_dir)
+ }
+
+
+```
+## **action_foreach**: Declare a target that runs a script over a set of files.
+
+```
+ This target type allows you to run a script once-per-file over a set
+ of sources. If you want to run a script once that takes many files as
+ input, see "gn help action".
+
+```
+
+### **Inputs**
+
+```
+ The script will be run once per file in the "sources" variable. The
+ "outputs" variable should specify one or more files with a source
+ expansion pattern in it (see "gn help source_expansion"). The output
+ file(s) for each script invocation should be unique. Normally you
+ use "{{source_name_part}}" in each output file.
+
+ If your script takes additional data as input, such as a shared
+ configuration file or a Python module it uses, those files should be
+ listed in the "inputs" variable. These files are treated as
+ dependencies of each script invocation.
+
+ If the command line length is very long, you can use response files
+ to pass args to your script. See "gn help response_file_contents".
+
+ You can dynamically write input dependencies (for incremental rebuilds
+ if an input file changes) by writing a depfile when the script is run
+ (see "gn help depfile"). This is more flexible than "inputs".
+
+ The "deps" and "public_deps" for an action will always be
+ completed before any part of the action is run so it can depend on
+ the output of previous steps. The "data_deps" will be built if the
+ action is built, but may not have completed before all steps of the
+ action are started. This can give additional parallelism in the build
+ for runtime-only dependencies.
+
+```
+
+### **Outputs**
+
+```
+ The script will be executed with the given arguments with the current
+ directory being that of the root build directory. If you pass files
+ to your script, see "gn help rebase_path" for how to convert
+ file names to be relative to the build directory (file names in the
+ sources, outputs, and inputs will be all treated as relative to the
+ current build file and converted as needed automatically).
+
+```
+
+### **File name handling**
+
+```
+ All output files must be inside the output directory of the build.
+ You would generally use |$target_out_dir| or |$target_gen_dir| to
+ reference the output or generated intermediate file directories,
+ respectively.
+
+```
+
+### **Variables**
+
+```
+ args, console, data, data_deps, depfile, deps, inputs, outputs*,
+ response_file_contents, script*, sources*
+ * = required
+
+```
+
+### **Example**
+
+```
+ # Runs the script over each IDL file. The IDL script will generate
+ # both a .cc and a .h file for each input.
+ action_foreach("my_idl") {
+ script = "idl_processor.py"
+ sources = [ "foo.idl", "bar.idl" ]
+
+ # Our script reads this file each time, so we need to list is as a
+ # dependency so we can rebuild if it changes.
+ inputs = [ "my_configuration.txt" ]
+
+ # Transformation from source file name to output file names.
+ outputs = [ "$target_gen_dir/{{source_name_part}}.h",
+ "$target_gen_dir/{{source_name_part}}.cc" ]
+
+ # Note that since "args" is opaque to GN, if you specify paths
+ # here, you will need to convert it to be relative to the build
+ # directory using "rebase_path()".
+ args = [
+ "{{source}}",
+ "-o",
+ rebase_path(relative_target_gen_dir, root_build_dir) +
+ "/{{source_name_part}}.h" ]
+ }
+
+
+
+```
+## **assert**: Assert an expression is true at generation time.
+
+```
+ assert(<condition> [, <error string>])
+
+ If the condition is false, the build will fail with an error. If the
+ optional second argument is provided, that string will be printed
+ with the error message.
+
+```
+
+### **Examples**:
+```
+ assert(is_win)
+ assert(defined(sources), "Sources must be defined")
+
+
+```
+## **bundle_data**: [iOS/OS X] Declare a target without output.
+
+```
+ This target type allows to declare data that is required at runtime.
+ It is used to inform "create_bundle" targets of the files to copy
+ into generated bundle, see "gn help create_bundle" for help.
+
+ The target must define a list of files as "sources" and a single
+ "outputs". If there are multiple files, source expansions must be
+ used to express the output. The output must reference a file inside
+ of {{bundle_root_dir}}.
+
+ This target can be used on all platforms though it is designed only to
+ generate iOS/OS X bundle. In cross-platform projects, it is advised to
+ put it behind iOS/Mac conditionals.
+
+ See "gn help create_bundle" for more information.
+
+```
+
+### **Variables**
+
+```
+ sources*, outputs*, deps, data_deps, public_deps, visibility
+ * = required
+
+```
+
+### **Examples**
+
+```
+ bundle_data("icudata") {
+ sources = [ "sources/data/in/icudtl.dat" ]
+ outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ]
+ }
+
+ bundle_data("base_unittests_bundle_data]") {
+ sources = [ "test/data" ]
+ outputs = [
+ "{{bundle_resources_dir}}/{{source_root_relative_dir}}/" +
+ "{{source_file_part}}"
+ ]
+ }
+
+ bundle_data("material_typography_bundle_data") {
+ sources = [
+ "src/MaterialTypography.bundle/Roboto-Bold.ttf",
+ "src/MaterialTypography.bundle/Roboto-Italic.ttf",
+ "src/MaterialTypography.bundle/Roboto-Regular.ttf",
+ "src/MaterialTypography.bundle/Roboto-Thin.ttf",
+ ]
+ outputs = [
+ "{{bundle_resources_dir}}/MaterialTypography.bundle/"
+ "{{source_file_part}}"
+ ]
+ }
+
+
+```
+## **config**: Defines a configuration object.
+
+```
+ Configuration objects can be applied to targets and specify sets of
+ compiler flags, includes, defines, etc. They provide a way to
+ conveniently group sets of this configuration information.
+
+ A config is referenced by its label just like a target.
+
+ The values in a config are additive only. If you want to remove a flag
+ you need to remove the corresponding config that sets it. The final
+ set of flags, defines, etc. for a target is generated in this order:
+
+ 1. The values specified directly on the target (rather than using a
+ config.
+ 2. The configs specified in the target's "configs" list, in order.
+ 3. Public_configs from a breadth-first traversal of the dependency
+ tree in the order that the targets appear in "deps".
+ 4. All dependent configs from a breadth-first traversal of the
+ dependency tree in the order that the targets appear in "deps".
+
+```
+
+### **Variables valid in a config definition**
+
+```
+ Flags: cflags, cflags_c, cflags_cc, cflags_objc, cflags_objcc,
+ asmflags, defines, include_dirs, ldflags, lib_dirs, libs,
+ precompiled_header, precompiled_source
+ Nested configs: configs
+
+```
+
+### **Variables on a target used to apply configs**
+
+```
+ all_dependent_configs, configs, public_configs
+
+```
+
+### **Example**
+
+```
+ config("myconfig") {
+ includes = [ "include/common" ]
+ defines = [ "ENABLE_DOOM_MELON" ]
+ }
+
+ executable("mything") {
+ configs = [ ":myconfig" ]
+ }
+
+
+```
+## **copy**: Declare a target that copies files.
+
+### **File name handling**
+
+```
+ All output files must be inside the output directory of the build.
+ You would generally use |$target_out_dir| or |$target_gen_dir| to
+ reference the output or generated intermediate file directories,
+ respectively.
+
+ Both "sources" and "outputs" must be specified. Sources can include
+ as many files as you want, but there can only be one item in the
+ outputs list (plural is used for the name for consistency with
+ other target types).
+
+ If there is more than one source file, your output name should specify
+ a mapping from each source file to an output file name using source
+ expansion (see "gn help source_expansion"). The placeholders will
+ look like "{{source_name_part}}", for example.
+
+```
+
+### **Examples**
+
+```
+ # Write a rule that copies a checked-in DLL to the output directory.
+ copy("mydll") {
+ sources = [ "mydll.dll" ]
+ outputs = [ "$target_out_dir/mydll.dll" ]
+ }
+
+ # Write a rule to copy several files to the target generated files
+ # directory.
+ copy("myfiles") {
+ sources = [ "data1.dat", "data2.dat", "data3.dat" ]
+
+ # Use source expansion to generate output files with the
+ # corresponding file names in the gen dir. This will just copy each
+ # file.
+ outputs = [ "$target_gen_dir/{{source_file_part}}" ]
+ }
+
+
+```
+## **create_bundle**: [iOS/OS X] Build an OS X / iOS bundle.
+
+```
+ This target generates an iOS/OS X bundle (which is a directory with a
+ well-know structure). This target does not define any sources, instead
+ they are computed from all "bundle_data" target this one depends on
+ transitively (the recursion stops at "create_bundle" targets).
+
+ The "bundle_*_dir" properties must be defined. They will be used for
+ the expansion of {{bundle_*_dir}} rules in "bundle_data" outputs.
+
+ This target can be used on all platforms though it is designed only to
+ generate iOS/OS X bundle. In cross-platform projects, it is advised to
+ put it behind iOS/Mac conditionals.
+
+```
+
+### **Variables**
+
+```
+ bundle_root_dir*, bundle_resources_dir*, bundle_executable_dir*,
+ bundle_plugins_dir*, deps, data_deps, public_deps, visibility
+ * = required
+
+```
+
+### **Example**
+
+```
+ # Defines a template to create an application. On most platform, this
+ # is just an alias for an "executable" target, but on iOS/OS X, it
+ # builds an application bundle.
+ template("app") {
+ if (!is_ios && !is_mac) {
+ executable(target_name) {
+ forward_variables_from(invoker, "*")
+ }
+ } else {
+ app_name = target_name
+ gen_path = target_gen_dir
+
+ action("${app_name}_generate_info_plist") {
+ script = [ "//build/ios/ios_gen_plist.py" ]
+ sources = [ "templates/Info.plist" ]
+ outputs = [ "$gen_path/Info.plist" ]
+ args = rebase_path(sources, root_build_dir) +
+ rebase_path(outputs, root_build_dir)
+ }
+
+ bundle_data("${app_name}_bundle_info_plist") {
+ deps = [ ":${app_name}_generate_info_plist" ]
+ sources = [ "$gen_path/Info.plist" ]
+ outputs = [ "{{bundle_root_dir}}/Info.plist" ]
+ }
+
+ executable("${app_name}_generate_executable") {
+ forward_variables_from(invoker, "*", [
+ "output_name",
+ "visibility",
+ ])
+ output_name =
+ rebase_path("$gen_path/$app_name", root_build_dir)
+ }
+
+ bundle_data("${app_name}_bundle_executable") {
+ deps = [ ":${app_name}_generate_executable" ]
+ sources = [ "$gen_path/$app_name" ]
+ outputs = [ "{{bundle_executable_dir}}/$app_name" ]
+ }
+
+ create_bundle("${app_name}.app") {
+ deps = [
+ ":${app_name}_bundle_executable",
+ ":${app_name}_bundle_info_plist",
+ ]
+ if (is_ios) {
+ bundle_root_dir = "${root_build_dir}/$target_name"
+ bundle_resources_dir = bundle_root_dir
+ bundle_executable_dir = bundle_root_dir
+ bundle_plugins_dir = bundle_root_dir + "/Plugins"
+ } else {
+ bundle_root_dir = "${root_build_dir}/target_name/Contents"
+ bundle_resources_dir = bundle_root_dir + "/Resources"
+ bundle_executable_dir = bundle_root_dir + "/MacOS"
+ bundle_plugins_dir = bundle_root_dir + "/Plugins"
+ }
+ }
+
+ group(target_name) {
+ forward_variables_from(invoker, ["visibility"])
+ deps = [ ":${app_name}.app" ]
+ }
+ }
+ }
+
+
+```
+## **declare_args**: Declare build arguments.
+
+```
+ Introduces the given arguments into the current scope. If they are
+ not specified on the command line or in a toolchain's arguments,
+ the default values given in the declare_args block will be used.
+ However, these defaults will not override command-line values.
+
+ See also "gn help buildargs" for an overview.
+
+ The precise behavior of declare args is:
+
+ 1. The declare_arg block executes. Any variables in the enclosing
+ scope are available for reading.
+
+ 2. At the end of executing the block, any variables set within that
+ scope are saved globally as build arguments, with their current
+ values being saved as the "default value" for that argument.
+
+ 3. User-defined overrides are applied. Anything set in "gn args"
+ now overrides any default values. The resulting set of variables
+ is promoted to be readable from the following code in the file.
+
+ This has some ramifications that may not be obvious:
+
+ - You should not perform difficult work inside a declare_args block
+ since this only sets a default value that may be discarded. In
+ particular, don't use the result of exec_script() to set the
+ default value. If you want to have a script-defined default, set
+ some default "undefined" value like [], "", or -1, and after
+ the declare_args block, call exec_script if the value is unset by
+ the user.
+
+ - Any code inside of the declare_args block will see the default
+ values of previous variables defined in the block rather than
+ the user-overridden value. This can be surprising because you will
+ be used to seeing the overridden value. If you need to make the
+ default value of one arg dependent on the possibly-overridden
+ value of another, write two separate declare_args blocks:
+
+ declare_args() {
+ enable_foo = true
+ }
+ declare_args() {
+ # Bar defaults to same user-overridden state as foo.
+ enable_bar = enable_foo
+ }
+
+```
+
+### **Example**
+
+```
+ declare_args() {
+ enable_teleporter = true
+ enable_doom_melon = false
+ }
+
+ If you want to override the (default disabled) Doom Melon:
+ gn --args="enable_doom_melon=true enable_teleporter=false"
+ This also sets the teleporter, but it's already defaulted to on so
+ it will have no effect.
+
+
+```
+## **defined**: Returns whether an identifier is defined.
+
+```
+ Returns true if the given argument is defined. This is most useful in
+ templates to assert that the caller set things up properly.
+
+ You can pass an identifier:
+ defined(foo)
+ which will return true or false depending on whether foo is defined in
+ the current scope.
+
+ You can also check a named scope:
+ defined(foo.bar)
+ which will return true or false depending on whether bar is defined in
+ the named scope foo. It will throw an error if foo is not defined or
+ is not a scope.
+
+```
+
+### **Example**:
+
+```
+ template("mytemplate") {
+ # To help users call this template properly...
+ assert(defined(invoker.sources), "Sources must be defined")
+
+ # If we want to accept an optional "values" argument, we don't
+ # want to dereference something that may not be defined.
+ if (defined(invoker.values)) {
+ values = invoker.values
+ } else {
+ values = "some default value"
+ }
+ }
+
+
+```
+## **exec_script**: Synchronously run a script and return the output.
+
+```
+ exec_script(filename,
+ arguments = [],
+ input_conversion = "",
+ file_dependencies = [])
+
+ Runs the given script, returning the stdout of the script. The build
+ generation will fail if the script does not exist or returns a nonzero
+ exit code.
+
+ The current directory when executing the script will be the root
+ build directory. If you are passing file names, you will want to use
+ the rebase_path() function to make file names relative to this
+ path (see "gn help rebase_path").
+
+```
+
+### **Arguments**:
+
+```
+ filename:
+ File name of python script to execute. Non-absolute names will
+ be treated as relative to the current build file.
+
+ arguments:
+ A list of strings to be passed to the script as arguments.
+ May be unspecified or the empty list which means no arguments.
+
+ input_conversion:
+ Controls how the file is read and parsed.
+ See "gn help input_conversion".
+
+ If unspecified, defaults to the empty string which causes the
+ script result to be discarded. exec script will return None.
+
+ dependencies:
+ (Optional) A list of files that this script reads or otherwise
+ depends on. These dependencies will be added to the build result
+ such that if any of them change, the build will be regenerated and
+ the script will be re-run.
+
+ The script itself will be an implicit dependency so you do not
+ need to list it.
+
+```
+
+### **Example**:
+
+```
+ all_lines = exec_script(
+ "myscript.py", [some_input], "list lines",
+ [ rebase_path("data_file.txt", root_build_dir) ])
+
+ # This example just calls the script with no arguments and discards
+ # the result.
+ exec_script("//foo/bar/myscript.py")
+
+
+```
+## **executable**: Declare an executable target.
+
+### **Variables**
+
+```
+ Flags: cflags, cflags_c, cflags_cc, cflags_objc, cflags_objcc,
+ asmflags, defines, include_dirs, ldflags, lib_dirs, libs,
+ precompiled_header, precompiled_source
+ Deps: data_deps, deps, public_deps
+ Dependent configs: all_dependent_configs, public_configs
+ General: check_includes, configs, data, inputs, output_name,
+ output_extension, public, sources, testonly, visibility
+
+
+```
+## **foreach**: Iterate over a list.
+
+```
+ foreach(<loop_var>, <list>) {
+ <loop contents>
+ }
+
+ Executes the loop contents block over each item in the list,
+ assigning the loop_var to each item in sequence. The loop_var will be
+ a copy so assigning to it will not mutate the list.
+
+ The block does not introduce a new scope, so that variable assignments
+ inside the loop will be visible once the loop terminates.
+
+ The loop variable will temporarily shadow any existing variables with
+ the same name for the duration of the loop. After the loop terminates
+ the loop variable will no longer be in scope, and the previous value
+ (if any) will be restored.
+
+```
+
+### **Example**
+
+```
+ mylist = [ "a", "b", "c" ]
+ foreach(i, mylist) {
+ print(i)
+ }
+
+ Prints:
+ a
+ b
+ c
+
+
+```
+## **forward_variables_from**: Copies variables from a different scope.
+
+```
+ forward_variables_from(from_scope, variable_list_or_star,
+ variable_to_not_forward_list = [])
+
+ Copies the given variables from the given scope to the local scope
+ if they exist. This is normally used in the context of templates to
+ use the values of variables defined in the template invocation to
+ a template-defined target.
+
+ The variables in the given variable_list will be copied if they exist
+ in the given scope or any enclosing scope. If they do not exist,
+ nothing will happen and they be left undefined in the current scope.
+
+ As a special case, if the variable_list is a string with the value of
+ "*", all variables from the given scope will be copied. "*" only
+ copies variables set directly on the from_scope, not enclosing ones.
+ Otherwise it would duplicate all global variables.
+
+ When an explicit list of variables is supplied, if the variable exists
+ in the current (destination) scope already, an error will be thrown.
+ If "*" is specified, variables in the current scope will be
+ clobbered (the latter is important because most targets have an
+ implicit configs list, which means it wouldn't work at all if it
+ didn't clobber).
+
+ The sources assignment filter (see "gn help set_sources_assignment_filter")
+ is never applied by this function. It's assumed than any desired
+ filtering was already done when sources was set on the from_scope.
+
+ If variables_to_not_forward_list is non-empty, then it must contains
+ a list of variable names that will not be forwarded. This is mostly
+ useful when variable_list_or_star has a value of "*".
+
+```
+
+### **Examples**
+
+```
+ # This is a common action template. It would invoke a script with
+ # some given parameters, and wants to use the various types of deps
+ # and the visibility from the invoker if it's defined. It also injects
+ # an additional dependency to all targets.
+ template("my_test") {
+ action(target_name) {
+ forward_variables_from(invoker, [ "data_deps", "deps",
+ "public_deps", "visibility" ])
+ # Add our test code to the dependencies.
+ # "deps" may or may not be defined at this point.
+ if (defined(deps)) {
+ deps += [ "//tools/doom_melon" ]
+ } else {
+ deps = [ "//tools/doom_melon" ]
+ }
+ }
+ }
+
+ # This is a template around either a target whose type depends on a
+ # global variable. It forwards all values from the invoker.
+ template("my_wrapper") {
+ target(my_wrapper_target_type, target_name) {
+ forward_variables_from(invoker, "*")
+ }
+ }
+
+ # A template that wraps another. It adds behavior based on one
+ # variable, and forwards all others to the nested target.
+ template("my_ios_test_app") {
+ ios_test_app(target_name) {
+ forward_variables_from(invoker, "*", ["test_bundle_name"])
+ if (!defined(extra_substitutions)) {
+ extra_substitutions = []
+ }
+ extra_substitutions += [ "BUNDLE_ID_TEST_NAME=$test_bundle_name" ]
+ }
+ }
+
+
+```
+## **get_label_info**: Get an attribute from a target's label.
+
+```
+ get_label_info(target_label, what)
+
+ Given the label of a target, returns some attribute of that target.
+ The target need not have been previously defined in the same file,
+ since none of the attributes depend on the actual target definition,
+ only the label itself.
+
+ See also "gn help get_target_outputs".
+
+```
+
+### **Possible values for the "what" parameter**
+
+```
+ "name"
+ The short name of the target. This will match the value of the
+ "target_name" variable inside that target's declaration. For the
+ label "//foo/bar:baz" this will return "baz".
+
+ "dir"
+ The directory containing the target's definition, with no slash at
+ the end. For the label "//foo/bar:baz" this will return
+ "//foo/bar".
+
+ "target_gen_dir"
+ The generated file directory for the target. This will match the
+ value of the "target_gen_dir" variable when inside that target's
+ declaration.
+
+ "root_gen_dir"
+ The root of the generated file tree for the target. This will
+ match the value of the "root_gen_dir" variable when inside that
+ target's declaration.
+
+ "target_out_dir
+ The output directory for the target. This will match the
+ value of the "target_out_dir" variable when inside that target's
+ declaration.
+
+ "root_out_dir"
+ The root of the output file tree for the target. This will
+ match the value of the "root_out_dir" variable when inside that
+ target's declaration.
+
+ "label_no_toolchain"
+ The fully qualified version of this label, not including the
+ toolchain. For the input ":bar" it might return
+ "//foo:bar".
+
+ "label_with_toolchain"
+ The fully qualified version of this label, including the
+ toolchain. For the input ":bar" it might return
+ "//foo:bar(//toolchain:x64)".
+
+ "toolchain"
+ The label of the toolchain. This will match the value of the
+ "current_toolchain" variable when inside that target's
+ declaration.
+
+```
+
+### **Examples**
+
+```
+ get_label_info(":foo", "name")
+ # Returns string "foo".
+
+ get_label_info("//foo/bar:baz", "gen_dir")
+ # Returns string "//out/Debug/gen/foo/bar".
+
+
+```
+## **get_path_info**: Extract parts of a file or directory name.
+
+```
+ get_path_info(input, what)
+
+ The first argument is either a string representing a file or
+ directory name, or a list of such strings. If the input is a list
+ the return value will be a list containing the result of applying the
+ rule to each item in the input.
+
+```
+
+### **Possible values for the "what" parameter**
+
+```
+ "file"
+ The substring after the last slash in the path, including the name
+ and extension. If the input ends in a slash, the empty string will
+ be returned.
+ "foo/bar.txt" => "bar.txt"
+ "bar.txt" => "bar.txt"
+ "foo/" => ""
+ "" => ""
+
+ "name"
+ The substring of the file name not including the extension.
+ "foo/bar.txt" => "bar"
+ "foo/bar" => "bar"
+ "foo/" => ""
+
+ "extension"
+ The substring following the last period following the last slash,
+ or the empty string if not found. The period is not included.
+ "foo/bar.txt" => "txt"
+ "foo/bar" => ""
+
+ "dir"
+ The directory portion of the name, not including the slash.
+ "foo/bar.txt" => "foo"
+ "//foo/bar" => "//foo"
+ "foo" => "."
+
+ The result will never end in a slash, so if the resulting
+ is empty, the system ("/") or source ("//") roots, a "."
+ will be appended such that it is always legal to append a slash
+ and a filename and get a valid path.
+
+ "out_dir"
+ The output file directory corresponding to the path of the
+ given file, not including a trailing slash.
+ "//foo/bar/baz.txt" => "//out/Default/obj/foo/bar"
+ "gen_dir"
+ The generated file directory corresponding to the path of the
+ given file, not including a trailing slash.
+ "//foo/bar/baz.txt" => "//out/Default/gen/foo/bar"
+
+ "abspath"
+ The full absolute path name to the file or directory. It will be
+ resolved relative to the current directory, and then the source-
+ absolute version will be returned. If the input is system-
+ absolute, the same input will be returned.
+ "foo/bar.txt" => "//mydir/foo/bar.txt"
+ "foo/" => "//mydir/foo/"
+ "//foo/bar" => "//foo/bar" (already absolute)
+ "/usr/include" => "/usr/include" (already absolute)
+
+ If you want to make the path relative to another directory, or to
+ be system-absolute, see rebase_path().
+
+```
+
+### **Examples**
+```
+ sources = [ "foo.cc", "foo.h" ]
+ result = get_path_info(source, "abspath")
+ # result will be [ "//mydir/foo.cc", "//mydir/foo.h" ]
+
+ result = get_path_info("//foo/bar/baz.cc", "dir")
+ # result will be "//foo/bar"
+
+ # Extract the source-absolute directory name,
+ result = get_path_info(get_path_info(path, "dir"), "abspath")
+
+
+```
+## **get_target_outputs**: [file list] Get the list of outputs from a target.
+
+```
+ get_target_outputs(target_label)
+
+ Returns a list of output files for the named target. The named target
+ must have been previously defined in the current file before this
+ function is called (it can't reference targets in other files because
+ there isn't a defined execution order, and it obviously can't
+ reference targets that are defined after the function call).
+
+ Only copy and action targets are supported. The outputs from binary
+ targets will depend on the toolchain definition which won't
+ necessarily have been loaded by the time a given line of code has run,
+ and source sets and groups have no useful output file.
+
+```
+
+### **Return value**
+
+```
+ The names in the resulting list will be absolute file paths (normally
+ like "//out/Debug/bar.exe", depending on the build directory).
+
+ action targets: this will just return the files specified in the
+ "outputs" variable of the target.
+
+ action_foreach targets: this will return the result of applying
+ the output template to the sources (see "gn help source_expansion").
+ This will be the same result (though with guaranteed absolute file
+ paths), as process_file_template will return for those inputs
+ (see "gn help process_file_template").
+
+ binary targets (executables, libraries): this will return a list
+ of the resulting binary file(s). The "main output" (the actual
+ binary or library) will always be the 0th element in the result.
+ Depending on the platform and output type, there may be other output
+ files as well (like import libraries) which will follow.
+
+ source sets and groups: this will return a list containing the path of
+ the "stamp" file that Ninja will produce once all outputs are
+ generated. This probably isn't very useful.
+
+```
+
+### **Example**
+
+```
+ # Say this action generates a bunch of C source files.
+ action_foreach("my_action") {
+ sources = [ ... ]
+ outputs = [ ... ]
+ }
+
+ # Compile the resulting source files into a source set.
+ source_set("my_lib") {
+ sources = get_target_outputs(":my_action")
+ }
+
+
+```
+## **getenv**: Get an environment variable.
+
+```
+ value = getenv(env_var_name)
+
+ Returns the value of the given enironment variable. If the value is
+ not found, it will try to look up the variable with the "opposite"
+ case (based on the case of the first letter of the variable), but
+ is otherwise case-sensitive.
+
+ If the environment variable is not found, the empty string will be
+ returned. Note: it might be nice to extend this if we had the concept
+ of "none" in the language to indicate lookup failure.
+
+```
+
+### **Example**:
+
+```
+ home_dir = getenv("HOME")
+
+
+```
+## **group**: Declare a named group of targets.
+
+```
+ This target type allows you to create meta-targets that just collect a
+ set of dependencies into one named target. Groups can additionally
+ specify configs that apply to their dependents.
+
+ Depending on a group is exactly like depending directly on that
+ group's deps.
+
+```
+
+### **Variables**
+
+```
+ Deps: data_deps, deps, public_deps
+ Dependent configs: all_dependent_configs, public_configs
+
+```
+
+### **Example**
+
+```
+ group("all") {
+ deps = [
+ "//project:runner",
+ "//project:unit_tests",
+ ]
+ }
+
+
+```
+## **import**: Import a file into the current scope.
+
+```
+ The import command loads the rules and variables resulting from
+ executing the given file into the current scope.
+
+ By convention, imported files are named with a .gni extension.
+
+ An import is different than a C++ "include". The imported file is
+ executed in a standalone environment from the caller of the import
+ command. The results of this execution are cached for other files that
+ import the same .gni file.
+
+ Note that you can not import a BUILD.gn file that's otherwise used
+ in the build. Files must either be imported or implicitly loaded as
+ a result of deps rules, but not both.
+
+ The imported file's scope will be merged with the scope at the point
+ import was called. If there is a conflict (both the current scope and
+ the imported file define some variable or rule with the same name but
+ different value), a runtime error will be thrown. Therefore, it's good
+ practice to minimize the stuff that an imported file defines.
+
+ Variables and templates beginning with an underscore '_' are
+ considered private and will not be imported. Imported files can use
+ such variables for internal computation without affecting other files.
+
+```
+
+### **Examples**:
+
+```
+ import("//build/rules/idl_compilation_rule.gni")
+
+ # Looks in the current directory.
+ import("my_vars.gni")
+
+
+```
+## **loadable_module**: Declare a loadable module target.
+
+```
+ This target type allows you to create an object file that is (and can
+ only be) loaded and unloaded at runtime.
+
+ A loadable module will be specified on the linker line for targets
+ listing the loadable module in its "deps". If you don't want this
+ (if you don't need to dynamically load the library at runtime), then
+ you should use a "shared_library" target type instead.
+
+```
+
+### **Variables**
+
+```
+ Flags: cflags, cflags_c, cflags_cc, cflags_objc, cflags_objcc,
+ asmflags, defines, include_dirs, ldflags, lib_dirs, libs,
+ precompiled_header, precompiled_source
+ Deps: data_deps, deps, public_deps
+ Dependent configs: all_dependent_configs, public_configs
+ General: check_includes, configs, data, inputs, output_name,
+ output_extension, public, sources, testonly, visibility
+
+
+```
+## **print**: Prints to the console.
+
+```
+ Prints all arguments to the console separated by spaces. A newline is
+ automatically appended to the end.
+
+ This function is intended for debugging. Note that build files are run
+ in parallel so you may get interleaved prints. A buildfile may also
+ be executed more than once in parallel in the context of different
+ toolchains so the prints from one file may be duplicated or
+ interleaved with itself.
+
+```
+
+### **Examples**:
+```
+ print("Hello world")
+
+ print(sources, deps)
+
+
+```
+## **process_file_template**: Do template expansion over a list of files.
+
+```
+ process_file_template(source_list, template)
+
+ process_file_template applies a template list to a source file list,
+ returning the result of applying each template to each source. This is
+ typically used for computing output file names from input files.
+
+ In most cases, get_target_outputs() will give the same result with
+ shorter, more maintainable code. This function should only be used
+ when that function can't be used (like there's no target or the target
+ is defined in another build file).
+
+```
+
+### **Arguments**:
+
+```
+ The source_list is a list of file names.
+
+ The template can be a string or a list. If it is a list, multiple
+ output strings are generated for each input.
+
+ The template should contain source expansions to which each name in
+ the source list is applied. See "gn help source_expansion".
+
+```
+
+### **Example**:
+
+```
+ sources = [
+ "foo.idl",
+ "bar.idl",
+ ]
+ myoutputs = process_file_template(
+ sources,
+ [ "$target_gen_dir/{{source_name_part}}.cc",
+ "$target_gen_dir/{{source_name_part}}.h" ])
+
+ The result in this case will be:
+ [ "//out/Debug/foo.cc"
+ "//out/Debug/foo.h"
+ "//out/Debug/bar.cc"
+ "//out/Debug/bar.h" ]
+
+
+```
+## **read_file**: Read a file into a variable.
+
+```
+ read_file(filename, input_conversion)
+
+ Whitespace will be trimmed from the end of the file. Throws an error
+ if the file can not be opened.
+
+```
+
+### **Arguments**:
+
+```
+ filename
+ Filename to read, relative to the build file.
+
+ input_conversion
+ Controls how the file is read and parsed.
+ See "gn help input_conversion".
+
+```
+
+### **Example**
+```
+ lines = read_file("foo.txt", "list lines")
+
+
+```
+## **rebase_path**: Rebase a file or directory to another location.
+
+```
+ converted = rebase_path(input,
+ new_base = "",
+ current_base = ".")
+
+ Takes a string argument representing a file name, or a list of such
+ strings and converts it/them to be relative to a different base
+ directory.
+
+ When invoking the compiler or scripts, GN will automatically convert
+ sources and include directories to be relative to the build directory.
+ However, if you're passing files directly in the "args" array or
+ doing other manual manipulations where GN doesn't know something is
+ a file name, you will need to convert paths to be relative to what
+ your tool is expecting.
+
+ The common case is to use this to convert paths relative to the
+ current directory to be relative to the build directory (which will
+ be the current directory when executing scripts).
+
+ If you want to convert a file path to be source-absolute (that is,
+ beginning with a double slash like "//foo/bar"), you should use
+ the get_path_info() function. This function won't work because it will
+ always make relative paths, and it needs to support making paths
+ relative to the source root, so can't also generate source-absolute
+ paths without more special-cases.
+
+```
+
+### **Arguments**
+
+```
+ input
+ A string or list of strings representing file or directory names
+ These can be relative paths ("foo/bar.txt"), system absolute
+ paths ("/foo/bar.txt"), or source absolute paths
+ ("//foo/bar.txt").
+
+ new_base
+ The directory to convert the paths to be relative to. This can be
+ an absolute path or a relative path (which will be treated
+ as being relative to the current BUILD-file's directory).
+
+ As a special case, if new_base is the empty string (the default),
+ all paths will be converted to system-absolute native style paths
+ with system path separators. This is useful for invoking external
+ programs.
+
+ current_base
+ Directory representing the base for relative paths in the input.
+ If this is not an absolute path, it will be treated as being
+ relative to the current build file. Use "." (the default) to
+ convert paths from the current BUILD-file's directory.
+
+```
+
+### **Return value**
+
+```
+ The return value will be the same type as the input value (either a
+ string or a list of strings). All relative and source-absolute file
+ names will be converted to be relative to the requested output
+ System-absolute paths will be unchanged.
+
+ Whether an output path will end in a slash will match whether the
+ corresponding input path ends in a slash. It will return "." or
+ "./" (depending on whether the input ends in a slash) to avoid
+ returning empty strings. This means if you want a root path
+ ("//" or "/") not ending in a slash, you can add a dot ("//.").
+
+```
+
+### **Example**
+
+```
+ # Convert a file in the current directory to be relative to the build
+ # directory (the current dir when executing compilers and scripts).
+ foo = rebase_path("myfile.txt", root_build_dir)
+ # might produce "../../project/myfile.txt".
+
+ # Convert a file to be system absolute:
+ foo = rebase_path("myfile.txt")
+ # Might produce "D:\source\project\myfile.txt" on Windows or
+ # "/home/you/source/project/myfile.txt" on Linux.
+
+ # Typical usage for converting to the build directory for a script.
+ action("myscript") {
+ # Don't convert sources, GN will automatically convert these to be
+ # relative to the build directory when it constructs the command
+ # line for your script.
+ sources = [ "foo.txt", "bar.txt" ]
+
+ # Extra file args passed manually need to be explicitly converted
+ # to be relative to the build directory:
+ args = [
+ "--data",
+ rebase_path("//mything/data/input.dat", root_build_dir),
+ "--rel",
+ rebase_path("relative_path.txt", root_build_dir)
+ ] + rebase_path(sources, root_build_dir)
+ }
+
+
+```
+## **set_default_toolchain**: Sets the default toolchain name.
+
+```
+ set_default_toolchain(toolchain_label)
+
+ The given label should identify a toolchain definition (see
+ "help toolchain"). This toolchain will be used for all targets
+ unless otherwise specified.
+
+ This function is only valid to call during the processing of the build
+ configuration file. Since the build configuration file is processed
+ separately for each toolchain, this function will be a no-op when
+ called under any non-default toolchains.
+
+ For example, the default toolchain should be appropriate for the
+ current environment. If the current environment is 32-bit and
+ somebody references a target with a 64-bit toolchain, we wouldn't
+ want processing of the build config file for the 64-bit toolchain to
+ reset the default toolchain to 64-bit, we want to keep it 32-bits.
+
+```
+
+### **Argument**:
+
+```
+ toolchain_label
+ Toolchain name.
+
+```
+
+### **Example**:
+
+```
+ set_default_toolchain("//build/config/win:vs32")
+
+```
+## **set_defaults**: Set default values for a target type.
+
+```
+ set_defaults(<target_type_name>) { <values...> }
+
+ Sets the default values for a given target type. Whenever
+ target_type_name is seen in the future, the values specified in
+ set_default's block will be copied into the current scope.
+
+ When the target type is used, the variable copying is very strict.
+ If a variable with that name is already in scope, the build will fail
+ with an error.
+
+ set_defaults can be used for built-in target types ("executable",
+ "shared_library", etc.) and custom ones defined via the "template"
+ command.
+
+```
+
+### **Example**:
+```
+ set_defaults("static_library") {
+ configs = [ "//tools/mything:settings" ]
+ }
+
+ static_library("mylib")
+ # The configs will be auto-populated as above. You can remove it if
+ # you don't want the default for a particular default:
+ configs -= "//tools/mything:settings"
+ }
+
+
+```
+## **set_sources_assignment_filter**: Set a pattern to filter source files.
+
+```
+ The sources assignment filter is a list of patterns that remove files
+ from the list implicitly whenever the "sources" variable is
+ assigned to. This is intended to be used to globally filter out files
+ with platform-specific naming schemes when they don't apply, for
+ example, you may want to filter out all "*_win.cc" files on non-
+ Windows platforms.
+
+ Typically this will be called once in the master build config script
+ to set up the filter for the current platform. Subsequent calls will
+ overwrite the previous values.
+
+ If you want to bypass the filter and add a file even if it might
+ be filtered out, call set_sources_assignment_filter([]) to clear the
+ list of filters. This will apply until the current scope exits
+
+```
+
+### **How to use patterns**
+
+```
+ File patterns are VERY limited regular expressions. They must match
+ the entire input string to be counted as a match. In regular
+ expression parlance, there is an implicit "^...$" surrounding your
+ input. If you want to match a substring, you need to use wildcards at
+ the beginning and end.
+
+ There are only two special tokens understood by the pattern matcher.
+ Everything else is a literal.
+
+ * Matches zero or more of any character. It does not depend on the
+ preceding character (in regular expression parlance it is
+ equivalent to ".*").
+
+ \b Matches a path boundary. This will match the beginning or end of
+ a string, or a slash.
+
+```
+
+### **Pattern examples**
+
+```
+ "*asdf*"
+ Matches a string containing "asdf" anywhere.
+
+ "asdf"
+ Matches only the exact string "asdf".
+
+ "*.cc"
+ Matches strings ending in the literal ".cc".
+
+ "\bwin/*"
+ Matches "win/foo" and "foo/win/bar.cc" but not "iwin/foo".
+
+```
+
+### **Sources assignment example**
+
+```
+ # Filter out all _win files.
+ set_sources_assignment_filter([ "*_win.cc", "*_win.h" ])
+ sources = [ "a.cc", "b_win.cc" ]
+ print(sources)
+ # Will print [ "a.cc" ]. b_win one was filtered out.
+
+
+```
+## **shared_library**: Declare a shared library target.
+
+```
+ A shared library will be specified on the linker line for targets
+ listing the shared library in its "deps". If you don't want this
+ (say you dynamically load the library at runtime), then you should
+ depend on the shared library via "data_deps" or, on Darwin
+ platforms, use a "loadable_module" target type instead.
+
+```
+
+### **Variables**
+
+```
+ Flags: cflags, cflags_c, cflags_cc, cflags_objc, cflags_objcc,
+ asmflags, defines, include_dirs, ldflags, lib_dirs, libs,
+ precompiled_header, precompiled_source
+ Deps: data_deps, deps, public_deps
+ Dependent configs: all_dependent_configs, public_configs
+ General: check_includes, configs, data, inputs, output_name,
+ output_extension, public, sources, testonly, visibility
+
+
+```
+## **source_set**: Declare a source set target.
+
+```
+ A source set is a collection of sources that get compiled, but are not
+ linked to produce any kind of library. Instead, the resulting object
+ files are implicitly added to the linker line of all targets that
+ depend on the source set.
+
+ In most cases, a source set will behave like a static library, except
+ no actual library file will be produced. This will make the build go
+ a little faster by skipping creation of a large static library, while
+ maintaining the organizational benefits of focused build targets.
+
+ The main difference between a source set and a static library is
+ around handling of exported symbols. Most linkers assume declaring
+ a function exported means exported from the static library. The linker
+ can then do dead code elimination to delete code not reachable from
+ exported functions.
+
+ A source set will not do this code elimination since there is no link
+ step. This allows you to link many sources sets into a shared library
+ and have the "exported symbol" notation indicate "export from the
+ final shared library and not from the intermediate targets." There is
+ no way to express this concept when linking multiple static libraries
+ into a shared library.
+
+```
+
+### **Variables**
+
+```
+ Flags: cflags, cflags_c, cflags_cc, cflags_objc, cflags_objcc,
+ asmflags, defines, include_dirs, ldflags, lib_dirs, libs,
+ precompiled_header, precompiled_source
+ Deps: data_deps, deps, public_deps
+ Dependent configs: all_dependent_configs, public_configs
+ General: check_includes, configs, data, inputs, output_name,
+ output_extension, public, sources, testonly, visibility
+
+
+```
+## **static_library**: Declare a static library target.
+
+```
+ Make a ".a" / ".lib" file.
+
+ If you only need the static library for intermediate results in the
+ build, you should consider a source_set instead since it will skip
+ the (potentially slow) step of creating the intermediate library file.
+
+```
+
+### **Variables**
+
+```
+ Flags: cflags, cflags_c, cflags_cc, cflags_objc, cflags_objcc,
+ asmflags, defines, include_dirs, ldflags, lib_dirs, libs,
+ precompiled_header, precompiled_source
+ Deps: data_deps, deps, public_deps
+ Dependent configs: all_dependent_configs, public_configs
+ General: check_includes, configs, data, inputs, output_name,
+ output_extension, public, sources, testonly, visibility
+
+
+```
+## **target**: Declare an target with the given programmatic type.
+
+```
+ target(target_type_string, target_name_string) { ... }
+
+ The target() function is a way to invoke a built-in target or template
+ with a type determined at runtime. This is useful for cases where the
+ type of a target might not be known statically.
+
+ Only templates and built-in target functions are supported for the
+ target_type_string parameter. Arbitrary functions, configs, and
+ toolchains are not supported.
+
+ The call:
+ target("source_set", "doom_melon") {
+ Is equivalent to:
+ source_set("doom_melon") {
+
+```
+
+### **Example**
+
+```
+ if (foo_build_as_shared) {
+ my_type = "shared_library"
+ } else {
+ my_type = "source_set"
+ }
+
+ target(my_type, "foo") {
+ ...
+ }
+
+
+```
+## **template**: Define a template rule.
+
+```
+ A template defines a custom name that acts like a function. It
+ provides a way to add to the built-in target types.
+
+ The template() function is used to declare a template. To invoke the
+ template, just use the name of the template like any other target
+ type.
+
+ Often you will want to declare your template in a special file that
+ other files will import (see "gn help import") so your template
+ rule can be shared across build files.
+
+```
+
+### **Variables and templates**:
+
+```
+ When you call template() it creates a closure around all variables
+ currently in scope with the code in the template block. When the
+ template is invoked, the closure will be executed.
+
+ When the template is invoked, the code in the caller is executed and
+ passed to the template code as an implicit "invoker" variable. The
+ template uses this to read state out of the invoking code.
+
+ One thing explicitly excluded from the closure is the "current
+ directory" against which relative file names are resolved. The
+ current directory will be that of the invoking code, since typically
+ that code specifies the file names. This means all files internal
+ to the template should use absolute names.
+
+ A template will typically forward some or all variables from the
+ invoking scope to a target that it defines. Often, such variables
+ might be optional. Use the pattern:
+
+ if (defined(invoker.deps)) {
+ deps = invoker.deps
+ }
+
+ The function forward_variables_from() provides a shortcut to forward
+ one or more or possibly all variables in this manner:
+
+ forward_variables_from(invoker, ["deps", "public_deps"])
+
+```
+
+### **Target naming**:
+
+```
+ Your template should almost always define a built-in target with the
+ name the template invoker specified. For example, if you have an IDL
+ template and somebody does:
+ idl("foo") {...
+ you will normally want this to expand to something defining a
+ source_set or static_library named "foo" (among other things you may
+ need). This way, when another target specifies a dependency on
+ "foo", the static_library or source_set will be linked.
+
+ It is also important that any other targets your template expands to
+ have globally unique names, or you will get collisions.
+
+ Access the invoking name in your template via the implicit
+ "target_name" variable. This should also be the basis for how other
+ targets that a template expands to ensure uniqueness.
+
+ A typical example would be a template that defines an action to
+ generate some source files, and a source_set to compile that source.
+ Your template would name the source_set "target_name" because
+ that's what you want external targets to depend on to link your code.
+ And you would name the action something like "${target_name}_action"
+ to make it unique. The source set would have a dependency on the
+ action to make it run.
+
+```
+
+### **Example of defining a template**:
+
+```
+ template("my_idl") {
+ # Be nice and help callers debug problems by checking that the
+ # variables the template requires are defined. This gives a nice
+ # message rather than giving the user an error about an
+ # undefined variable in the file defining the template
+ #
+ # You can also use defined() to give default values to variables
+ # unspecified by the invoker.
+ assert(defined(invoker.sources),
+ "Need sources in $target_name listing the idl files.")
+
+ # Name of the intermediate target that does the code gen. This must
+ # incorporate the target name so it's unique across template
+ # instantiations.
+ code_gen_target_name = target_name + "_code_gen"
+
+ # Intermediate target to convert IDL to C source. Note that the name
+ # is based on the name the invoker of the template specified. This
+ # way, each time the template is invoked we get a unique
+ # intermediate action name (since all target names are in the global
+ # scope).
+ action_foreach(code_gen_target_name) {
+ # Access the scope defined by the invoker via the implicit
+ # "invoker" variable.
+ sources = invoker.sources
+
+ # Note that we need an absolute path for our script file name.
+ # The current directory when executing this code will be that of
+ # the invoker (this is why we can use the "sources" directly
+ # above without having to rebase all of the paths). But if we need
+ # to reference a script relative to the template file, we'll need
+ # to use an absolute path instead.
+ script = "//tools/idl/idl_code_generator.py"
+
+ # Tell GN how to expand output names given the sources.
+ # See "gn help source_expansion" for more.
+ outputs = [ "$target_gen_dir/{{source_name_part}}.cc",
+ "$target_gen_dir/{{source_name_part}}.h" ]
+ }
+
+ # Name the source set the same as the template invocation so
+ # instancing this template produces something that other targets
+ # can link to in their deps.
+ source_set(target_name) {
+ # Generates the list of sources, we get these from the
+ # action_foreach above.
+ sources = get_target_outputs(":$code_gen_target_name")
+
+ # This target depends on the files produced by the above code gen
+ # target.
+ deps = [ ":$code_gen_target_name" ]
+ }
+ }
+
+```
+
+### **Example of invoking the resulting template**:
+
+```
+ # This calls the template code above, defining target_name to be
+ # "foo_idl_files" and "invoker" to be the set of stuff defined in
+ # the curly brackets.
+ my_idl("foo_idl_files") {
+ # Goes into the template as "invoker.sources".
+ sources = [ "foo.idl", "bar.idl" ]
+ }
+
+ # Here is a target that depends on our template.
+ executable("my_exe") {
+ # Depend on the name we gave the template call above. Internally,
+ # this will produce a dependency from executable to the source_set
+ # inside the template (since it has this name), which will in turn
+ # depend on the code gen action.
+ deps = [ ":foo_idl_files" ]
+ }
+
+
+```
+## **tool**: Specify arguments to a toolchain tool.
+
+### **Usage**:
+
+```
+ tool(<tool type>) {
+ <tool variables...>
+ }
+
+```
+
+### **Tool types**
+
+```
+ Compiler tools:
+ "cc": C compiler
+ "cxx": C++ compiler
+ "objc": Objective C compiler
+ "objcxx": Objective C++ compiler
+ "rc": Resource compiler (Windows .rc files)
+ "asm": Assembler
+
+ Linker tools:
+ "alink": Linker for static libraries (archives)
+ "solink": Linker for shared libraries
+ "link": Linker for executables
+
+ Other tools:
+ "stamp": Tool for creating stamp files
+ "copy": Tool to copy files.
+
+ Platform specific tools:
+ "copy_bundle_data": [iOS, OS X] Tool to copy files in a bundle.
+ "compile_xcassets": [iOS, OS X] Tool to compile asset catalogs.
+
+```
+
+### **Tool variables**
+
+```
+ command [string with substitutions]
+ Valid for: all tools (required)
+
+ The command to run.
+
+ default_output_extension [string]
+ Valid for: linker tools
+
+ Extension for the main output of a linkable tool. It includes
+ the leading dot. This will be the default value for the
+ {{output_extension}} expansion (discussed below) but will be
+ overridden by by the "output extension" variable in a target,
+ if one is specified. Empty string means no extension.
+
+ GN doesn't actually do anything with this extension other than
+ pass it along, potentially with target-specific overrides. One
+ would typically use the {{output_extension}} value in the
+ "outputs" to read this value.
+
+ Example: default_output_extension = ".exe"
+
+ depfile [string]
+ Valid for: compiler tools (optional)
+
+ If the tool can write ".d" files, this specifies the name of
+ the resulting file. These files are used to list header file
+ dependencies (or other implicit input dependencies) that are
+ discovered at build time. See also "depsformat".
+
+ Example: depfile = "{{output}}.d"
+
+ depsformat [string]
+ Valid for: compiler tools (when depfile is specified)
+
+ Format for the deps outputs. This is either "gcc" or "msvc".
+ See the ninja documentation for "deps" for more information.
+
+ Example: depsformat = "gcc"
+
+ description [string with substitutions, optional]
+ Valid for: all tools
+
+ What to print when the command is run.
+
+ Example: description = "Compiling {{source}}"
+
+ lib_switch [string, optional, link tools only]
+ lib_dir_switch [string, optional, link tools only]
+ Valid for: Linker tools except "alink"
+
+ These strings will be prepended to the libraries and library
+ search directories, respectively, because linkers differ on how
+ specify them. If you specified:
+ lib_switch = "-l"
+ lib_dir_switch = "-L"
+ then the "{{libs}}" expansion for [ "freetype", "expat"]
+ would be "-lfreetype -lexpat".
+
+ outputs [list of strings with substitutions]
+ Valid for: Linker and compiler tools (required)
+
+ An array of names for the output files the tool produces. These
+ are relative to the build output directory. There must always be
+ at least one output file. There can be more than one output (a
+ linker might produce a library and an import library, for
+ example).
+
+ This array just declares to GN what files the tool will
+ produce. It is your responsibility to specify the tool command
+ that actually produces these files.
+
+ If you specify more than one output for shared library links,
+ you should consider setting link_output, depend_output, and
+ runtime_link_output. Otherwise, the first entry in the
+ outputs list should always be the main output which will be
+ linked to.
+
+ Example for a compiler tool that produces .obj files:
+ outputs = [
+ "{{source_out_dir}}/{{source_name_part}}.obj"
+ ]
+
+ Example for a linker tool that produces a .dll and a .lib. The
+ use of {{output_extension}} rather than hardcoding ".dll"
+ allows the extension of the library to be overridden on a
+ target-by-target basis, but in this example, it always
+ produces a ".lib" import library:
+ outputs = [
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}",
+ "{{root_out_dir}}/{{target_output_name}}.lib",
+ ]
+
+ link_output [string with substitutions]
+ depend_output [string with substitutions]
+ runtime_link_output [string with substitutions]
+ Valid for: "solink" only (optional)
+
+ These three files specify which of the outputs from the solink
+ tool should be used for linking and dependency tracking. These
+ should match entries in the "outputs". If unspecified, the
+ first item in the "outputs" array will be used for all. See
+ "Separate linking and dependencies for shared libraries"
+ below for more. If link_output is set but runtime_link_output
+ is not set, runtime_link_output defaults to link_output.
+
+ On Windows, where the tools produce a .dll shared library and
+ a .lib import library, you will want the first two to be the
+ import library and the third one to be the .dll file.
+ On Linux, if you're not doing the separate linking/dependency
+ optimization, all of these should be the .so output.
+
+ output_prefix [string]
+ Valid for: Linker tools (optional)
+
+ Prefix to use for the output name. Defaults to empty. This
+ prefix will be prepended to the name of the target (or the
+ output_name if one is manually specified for it) if the prefix
+ is not already there. The result will show up in the
+ {{output_name}} substitution pattern.
+
+ Individual targets can opt-out of the output prefix by setting:
+ output_prefix_override = true
+ (see "gn help output_prefix_override").
+
+ This is typically used to prepend "lib" to libraries on
+ Posix systems:
+ output_prefix = "lib"
+
+ precompiled_header_type [string]
+ Valid for: "cc", "cxx", "objc", "objcxx"
+
+ Type of precompiled headers. If undefined or the empty string,
+ precompiled headers will not be used for this tool. Otherwise
+ use "gcc" or "msvc".
+
+ For precompiled headers to be used for a given target, the
+ target (or a config applied to it) must also specify a
+ "precompiled_header" and, for "msvc"-style headers, a
+ "precompiled_source" value. If the type is "gcc", then both
+ "precompiled_header" and "precompiled_source" must resolve
+ to the same file, despite the different formats required for each.
+ See "gn help precompiled_header" for more.
+
+ restat [boolean]
+ Valid for: all tools (optional, defaults to false)
+
+ Requests that Ninja check the file timestamp after this tool has
+ run to determine if anything changed. Set this if your tool has
+ the ability to skip writing output if the output file has not
+ changed.
+
+ Normally, Ninja will assume that when a tool runs the output
+ be new and downstream dependents must be rebuild. When this is
+ set to trye, Ninja can skip rebuilding downstream dependents for
+ input changes that don't actually affect the output.
+
+ Example:
+ restat = true
+
+ rspfile [string with substitutions]
+ Valid for: all tools (optional)
+
+ Name of the response file. If empty, no response file will be
+ used. See "rspfile_content".
+
+ rspfile_content [string with substitutions]
+ Valid for: all tools (required when "rspfile" is specified)
+
+ The contents to be written to the response file. This may
+ include all or part of the command to send to the tool which
+ allows you to get around OS command-line length limits.
+
+ This example adds the inputs and libraries to a response file,
+ but passes the linker flags directly on the command line:
+ tool("link") {
+ command = "link -o {{output}} {{ldflags}} @{{output}}.rsp"
+ rspfile = "{{output}}.rsp"
+ rspfile_content = "{{inputs}} {{solibs}} {{libs}}"
+ }
+
+```
+
+### **Expansions for tool variables**
+
+```
+ All paths are relative to the root build directory, which is the
+ current directory for running all tools. These expansions are
+ available to all tools:
+
+ {{label}}
+ The label of the current target. This is typically used in the
+ "description" field for link tools. The toolchain will be
+ omitted from the label for targets in the default toolchain, and
+ will be included for targets in other toolchains.
+
+ {{label_name}}
+ The short name of the label of the target. This is the part
+ after the colon. For "//foo/bar:baz" this will be "baz".
+ Unlike {{target_output_name}}, this is not affected by the
+ "output_prefix" in the tool or the "output_name" set
+ on the target.
+
+ {{output}}
+ The relative path and name of the output(s) of the current
+ build step. If there is more than one output, this will expand
+ to a list of all of them.
+ Example: "out/base/my_file.o"
+
+ {{target_gen_dir}}
+ {{target_out_dir}}
+ The directory of the generated file and output directories,
+ respectively, for the current target. There is no trailing
+ slash.
+ Example: "out/base/test"
+
+ {{target_output_name}}
+ The short name of the current target with no path information,
+ or the value of the "output_name" variable if one is specified
+ in the target. This will include the "output_prefix" if any.
+ See also {{label_name}}.
+ Example: "libfoo" for the target named "foo" and an
+ output prefix for the linker tool of "lib".
+
+ Compiler tools have the notion of a single input and a single output,
+ along with a set of compiler-specific flags. The following expansions
+ are available:
+
+ {{asmflags}}
+ {{cflags}}
+ {{cflags_c}}
+ {{cflags_cc}}
+ {{cflags_objc}}
+ {{cflags_objcc}}
+ {{defines}}
+ {{include_dirs}}
+ Strings correspond that to the processed flags/defines/include
+ directories specified for the target.
+ Example: "--enable-foo --enable-bar"
+
+ Defines will be prefixed by "-D" and include directories will
+ be prefixed by "-I" (these work with Posix tools as well as
+ Microsoft ones).
+
+ {{source}}
+ The relative path and name of the current input file.
+ Example: "../../base/my_file.cc"
+
+ {{source_file_part}}
+ The file part of the source including the extension (with no
+ directory information).
+ Example: "foo.cc"
+
+ {{source_name_part}}
+ The filename part of the source file with no directory or
+ extension.
+ Example: "foo"
+
+ {{source_gen_dir}}
+ {{source_out_dir}}
+ The directory in the generated file and output directories,
+ respectively, for the current input file. If the source file
+ is in the same directory as the target is declared in, they will
+ will be the same as the "target" versions above.
+ Example: "gen/base/test"
+
+ Linker tools have multiple inputs and (potentially) multiple outputs
+ The following expansions are available:
+
+ {{inputs}}
+ {{inputs_newline}}
+ Expands to the inputs to the link step. This will be a list of
+ object files and static libraries.
+ Example: "obj/foo.o obj/bar.o obj/somelibrary.a"
+
+ The "_newline" version will separate the input files with
+ newlines instead of spaces. This is useful in response files:
+ some linkers can take a "-filelist" flag which expects newline
+ separated files, and some Microsoft tools have a fixed-sized
+ buffer for parsing each line of a response file.
+
+ {{ldflags}}
+ Expands to the processed set of ldflags and library search paths
+ specified for the target.
+ Example: "-m64 -fPIC -pthread -L/usr/local/mylib"
+
+ {{libs}}
+ Expands to the list of system libraries to link to. Each will
+ be prefixed by the "lib_prefix".
+
+ As a special case to support Mac, libraries with names ending in
+ ".framework" will be added to the {{libs}} with "-framework"
+ preceeding it, and the lib prefix will be ignored.
+
+ Example: "-lfoo -lbar"
+
+ {{output_extension}}
+ The value of the "output_extension" variable in the target,
+ or the value of the "default_output_extension" value in the
+ tool if the target does not specify an output extension.
+ Example: ".so"
+
+ {{solibs}}
+ Extra libraries from shared library dependencide not specified
+ in the {{inputs}}. This is the list of link_output files from
+ shared libraries (if the solink tool specifies a "link_output"
+ variable separate from the "depend_output").
+
+ These should generally be treated the same as libs by your tool.
+ Example: "libfoo.so libbar.so"
+
+ The copy tool allows the common compiler/linker substitutions, plus
+ {{source}} which is the source of the copy. The stamp tool allows
+ only the common tool substitutions.
+
+ The copy_bundle_data and compile_xcassets tools only allows the common
+ tool substitutions. Both tools are required to create iOS/OS X bundles
+ and need only be defined on those platforms.
+
+ The copy_bundle_data tool will be called with one source and needs to
+ copy (optionally optimizing the data representation) to its output. It
+ may be called with a directory as input and it needs to be recursively
+ copied.
+
+ The compile_xcassets tool will be called with one or more source (each
+ an asset catalog) that needs to be compiled to a single output.
+
+```
+
+### **Separate linking and dependencies for shared libraries**
+
+```
+ Shared libraries are special in that not all changes to them require
+ that dependent targets be re-linked. If the shared library is changed
+ but no imports or exports are different, dependent code needn't be
+ relinked, which can speed up the build.
+
+ If your link step can output a list of exports from a shared library
+ and writes the file only if the new one is different, the timestamp of
+ this file can be used for triggering re-links, while the actual shared
+ library would be used for linking.
+
+ You will need to specify
+ restat = true
+ in the linker tool to make this work, so Ninja will detect if the
+ timestamp of the dependency file has changed after linking (otherwise
+ it will always assume that running a command updates the output):
+
+ tool("solink") {
+ command = "..."
+ outputs = [
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}",
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.TOC",
+ ]
+ link_output =
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"
+ depend_output =
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.TOC"
+ restat = true
+ }
+
+```
+
+### **Example**
+
+```
+ toolchain("my_toolchain") {
+ # Put these at the top to apply to all tools below.
+ lib_prefix = "-l"
+ lib_dir_prefix = "-L"
+
+ tool("cc") {
+ command = "gcc {{source}} -o {{output}}"
+ outputs = [ "{{source_out_dir}}/{{source_name_part}}.o" ]
+ description = "GCC {{source}}"
+ }
+ tool("cxx") {
+ command = "g++ {{source}} -o {{output}}"
+ outputs = [ "{{source_out_dir}}/{{source_name_part}}.o" ]
+ description = "G++ {{source}}"
+ }
+ }
+
+
+```
+## **toolchain**: Defines a toolchain.
+
+```
+ A toolchain is a set of commands and build flags used to compile the
+ source code. You can have more than one toolchain in use at once in
+ a build.
+
+```
+
+### **Functions and variables**
+
+```
+ tool()
+ The tool() function call specifies the commands commands to run for
+ a given step. See "gn help tool".
+
+ toolchain_args()
+ List of arguments to pass to the toolchain when invoking this
+ toolchain. This applies only to non-default toolchains. See
+ "gn help toolchain_args" for more.
+
+ deps
+ Dependencies of this toolchain. These dependencies will be resolved
+ before any target in the toolchain is compiled. To avoid circular
+ dependencies these must be targets defined in another toolchain.
+
+ This is expressed as a list of targets, and generally these targets
+ will always specify a toolchain:
+ deps = [ "//foo/bar:baz(//build/toolchain:bootstrap)" ]
+
+ This concept is somewhat inefficient to express in Ninja (it
+ requires a lot of duplicate of rules) so should only be used when
+ absolutely necessary.
+
+ concurrent_links
+ In integer expressing the number of links that Ninja will perform in
+ parallel. GN will create a pool for shared library and executable
+ link steps with this many processes. Since linking is memory- and
+ I/O-intensive, projects with many large targets may want to limit
+ the number of parallel steps to avoid overloading the computer.
+ Since creating static libraries is generally not as intensive
+ there is no limit to "alink" steps.
+
+ Defaults to 0 which Ninja interprets as "no limit".
+
+ The value used will be the one from the default toolchain of the
+ current build.
+
+```
+
+### **Invoking targets in toolchains**:
+
+```
+ By default, when a target depends on another, there is an implicit
+ toolchain label that is inherited, so the dependee has the same one
+ as the dependent.
+
+ You can override this and refer to any other toolchain by explicitly
+ labeling the toolchain to use. For example:
+ data_deps = [ "//plugins:mine(//toolchains:plugin_toolchain)" ]
+ The string "//build/toolchains:plugin_toolchain" is a label that
+ identifies the toolchain declaration for compiling the sources.
+
+ To load a file in an alternate toolchain, GN does the following:
+
+ 1. Loads the file with the toolchain definition in it (as determined
+ by the toolchain label).
+ 2. Re-runs the master build configuration file, applying the
+ arguments specified by the toolchain_args section of the toolchain
+ definition (see "gn help toolchain_args").
+ 3. Loads the destination build file in the context of the
+ configuration file in the previous step.
+
+```
+
+### **Example**:
+```
+ toolchain("plugin_toolchain") {
+ concurrent_links = 8
+
+ tool("cc") {
+ command = "gcc {{source}}"
+ ...
+ }
+
+ toolchain_args() {
+ is_plugin = true
+ is_32bit = true
+ is_64bit = false
+ }
+ }
+
+
+```
+## **toolchain_args**: Set build arguments for toolchain build setup.
+
+```
+ Used inside a toolchain definition to pass arguments to an alternate
+ toolchain's invocation of the build.
+
+ When you specify a target using an alternate toolchain, the master
+ build configuration file is re-interpreted in the context of that
+ toolchain (see "gn help toolchain"). The toolchain_args function
+ allows you to control the arguments passed into this alternate
+ invocation of the build.
+
+ Any default system arguments or arguments passed in on the command-
+ line will also be passed to the alternate invocation unless explicitly
+ overridden by toolchain_args.
+
+ The toolchain_args will be ignored when the toolchain being defined
+ is the default. In this case, it's expected you want the default
+ argument values.
+
+ See also "gn help buildargs" for an overview of these arguments.
+
+```
+
+### **Example**:
+```
+ toolchain("my_weird_toolchain") {
+ ...
+ toolchain_args() {
+ # Override the system values for a generic Posix system.
+ is_win = false
+ is_posix = true
+
+ # Pass this new value for specific setup for my toolchain.
+ is_my_weird_system = true
+ }
+ }
+
+
+```
+## **write_file**: Write a file to disk.
+
+```
+ write_file(filename, data)
+
+ If data is a list, the list will be written one-item-per-line with no
+ quoting or brackets.
+
+ If the file exists and the contents are identical to that being
+ written, the file will not be updated. This will prevent unnecessary
+ rebuilds of targets that depend on this file.
+
+ One use for write_file is to write a list of inputs to an script
+ that might be too long for the command line. However, it is
+ preferrable to use response files for this purpose. See
+ "gn help response_file_contents".
+
+ TODO(brettw) we probably need an optional third argument to control
+ list formatting.
+
+```
+
+### **Arguments**
+
+```
+ filename
+ Filename to write. This must be within the output directory.
+
+ data:
+ The list or string to write.
+
+
+```
+## **current_cpu**: The processor architecture of the current toolchain.
+
+```
+ The build configuration usually sets this value based on the value
+ of "host_cpu" (see "gn help host_cpu") and then threads
+ this through the toolchain definitions to ensure that it always
+ reflects the appropriate value.
+
+ This value is not used internally by GN for any purpose. It is
+ set it to the empty string ("") by default but is declared so
+ that it can be overridden on the command line if so desired.
+
+ See "gn help target_cpu" for a list of common values returned.
+
+
+```
+## **current_os**: The operating system of the current toolchain.
+
+```
+ The build configuration usually sets this value based on the value
+ of "target_os" (see "gn help target_os"), and then threads this
+ through the toolchain definitions to ensure that it always reflects
+ the appropriate value.
+
+ This value is not used internally by GN for any purpose. It is
+ set it to the empty string ("") by default but is declared so
+ that it can be overridden on the command line if so desired.
+
+ See "gn help target_os" for a list of common values returned.
+
+
+```
+## **current_toolchain**: Label of the current toolchain.
+
+```
+ A fully-qualified label representing the current toolchain. You can
+ use this to make toolchain-related decisions in the build. See also
+ "default_toolchain".
+
+```
+
+### **Example**
+
+```
+ if (current_toolchain == "//build:64_bit_toolchain") {
+ executable("output_thats_64_bit_only") {
+ ...
+
+
+```
+## **default_toolchain**: [string] Label of the default toolchain.
+
+```
+ A fully-qualified label representing the default toolchain, which may
+ not necessarily be the current one (see "current_toolchain").
+
+
+```
+## **host_cpu**: The processor architecture that GN is running on.
+
+```
+ This is value is exposed so that cross-compile toolchains can
+ access the host architecture when needed.
+
+ The value should generally be considered read-only, but it can be
+ overriden in order to handle unusual cases where there might
+ be multiple plausible values for the host architecture (e.g., if
+ you can do either 32-bit or 64-bit builds). The value is not used
+ internally by GN for any purpose.
+
+```
+
+### **Some possible values**:
+```
+ - "x64"
+ - "x86"
+
+
+```
+## **host_os**: [string] The operating system that GN is running on.
+
+```
+ This value is exposed so that cross-compiles can access the host
+ build system's settings.
+
+ This value should generally be treated as read-only. It, however,
+ is not used internally by GN for any purpose.
+
+```
+
+### **Some possible values**:
+```
+ - "linux"
+ - "mac"
+ - "win"
+
+
+```
+## **python_path**: Absolute path of Python.
+
+```
+ Normally used in toolchain definitions if running some command
+ requires Python. You will normally not need this when invoking scripts
+ since GN automatically finds it for you.
+
+
+```
+## **root_build_dir**: [string] Directory where build commands are run.
+
+```
+ This is the root build output directory which will be the current
+ directory when executing all compilers and scripts.
+
+ Most often this is used with rebase_path (see "gn help rebase_path")
+ to convert arguments to be relative to a script's current directory.
+
+
+```
+## **root_gen_dir**: Directory for the toolchain's generated files.
+
+```
+ Absolute path to the root of the generated output directory tree for
+ the current toolchain. An example would be "//out/Debug/gen" for the
+ default toolchain, or "//out/Debug/arm/gen" for the "arm"
+ toolchain.
+
+ This is primarily useful for setting up include paths for generated
+ files. If you are passing this to a script, you will want to pass it
+ through rebase_path() (see "gn help rebase_path") to convert it
+ to be relative to the build directory.
+
+ See also "target_gen_dir" which is usually a better location for
+ generated files. It will be inside the root generated dir.
+
+
+```
+## **root_out_dir**: [string] Root directory for toolchain output files.
+
+```
+ Absolute path to the root of the output directory tree for the current
+ toolchain. It will not have a trailing slash.
+
+ For the default toolchain this will be the same as the root_build_dir.
+ An example would be "//out/Debug" for the default toolchain, or
+ "//out/Debug/arm" for the "arm" toolchain.
+
+ This is primarily useful for setting up script calls. If you are
+ passing this to a script, you will want to pass it through
+ rebase_path() (see "gn help rebase_path") to convert it
+ to be relative to the build directory.
+
+ See also "target_out_dir" which is usually a better location for
+ output files. It will be inside the root output dir.
+
+```
+
+### **Example**
+
+```
+ action("myscript") {
+ # Pass the output dir to the script.
+ args = [ "-o", rebase_path(root_out_dir, root_build_dir) ]
+ }
+
+
+```
+## **target_cpu**: The desired cpu architecture for the build.
+
+```
+ This value should be used to indicate the desired architecture for
+ the primary objects of the build. It will match the cpu architecture
+ of the default toolchain.
+
+ In many cases, this is the same as "host_cpu", but in the case
+ of cross-compiles, this can be set to something different. This
+ value is different from "current_cpu" in that it can be referenced
+ from inside any toolchain. This value can also be ignored if it is
+ not needed or meaningful for a project.
+
+ This value is not used internally by GN for any purpose, so it
+ may be set to whatever value is needed for the build.
+ GN defaults this value to the empty string ("") and the
+ configuration files should set it to an appropriate value
+ (e.g., setting it to the value of "host_cpu") if it is not
+ overridden on the command line or in the args.gn file.
+
+ Where practical, use one of the following list of common values:
+
+```
+
+### **Possible values**:
+```
+ - "x86"
+ - "x64"
+ - "arm"
+ - "arm64"
+ - "mipsel"
+
+
+```
+## **target_gen_dir**: Directory for a target's generated files.
+
+```
+ Absolute path to the target's generated file directory. This will be
+ the "root_gen_dir" followed by the relative path to the current
+ build file. If your file is in "//tools/doom_melon" then
+ target_gen_dir would be "//out/Debug/gen/tools/doom_melon". It will
+ not have a trailing slash.
+
+ This is primarily useful for setting up include paths for generated
+ files. If you are passing this to a script, you will want to pass it
+ through rebase_path() (see "gn help rebase_path") to convert it
+ to be relative to the build directory.
+
+ See also "gn help root_gen_dir".
+
+```
+
+### **Example**
+
+```
+ action("myscript") {
+ # Pass the generated output dir to the script.
+ args = [ "-o", rebase_path(target_gen_dir, root_build_dir) ]
+ }
+
+
+```
+## **target_os**: The desired operating system for the build.
+
+```
+ This value should be used to indicate the desired operating system
+ for the primary object(s) of the build. It will match the OS of
+ the default toolchain.
+
+ In many cases, this is the same as "host_os", but in the case of
+ cross-compiles, it may be different. This variable differs from
+ "current_os" in that it can be referenced from inside any
+ toolchain and will always return the initial value.
+
+ This should be set to the most specific value possible. So,
+ "android" or "chromeos" should be used instead of "linux"
+ where applicable, even though Android and ChromeOS are both Linux
+ variants. This can mean that one needs to write
+
+ if (target_os == "android" || target_os == "linux") {
+ # ...
+ }
+
+ and so forth.
+
+ This value is not used internally by GN for any purpose, so it
+ may be set to whatever value is needed for the build.
+ GN defaults this value to the empty string ("") and the
+ configuration files should set it to an appropriate value
+ (e.g., setting it to the value of "host_os") if it is not
+ set via the command line or in the args.gn file.
+
+ Where practical, use one of the following list of common values:
+
+```
+
+### **Possible values**:
+```
+ - "android"
+ - "chromeos"
+ - "ios"
+ - "linux"
+ - "nacl"
+ - "mac"
+ - "win"
+
+
+```
+## **target_out_dir**: [string] Directory for target output files.
+
+```
+ Absolute path to the target's generated file directory. If your
+ current target is in "//tools/doom_melon" then this value might be
+ "//out/Debug/obj/tools/doom_melon". It will not have a trailing
+ slash.
+
+ This is primarily useful for setting up arguments for calling
+ scripts. If you are passing this to a script, you will want to pass it
+ through rebase_path() (see "gn help rebase_path") to convert it
+ to be relative to the build directory.
+
+ See also "gn help root_out_dir".
+
+```
+
+### **Example**
+
+```
+ action("myscript") {
+ # Pass the output dir to the script.
+ args = [ "-o", rebase_path(target_out_dir, root_build_dir) ]
+ }
+
+
+```
+## **all_dependent_configs**: Configs to be forced on dependents.
+
+```
+ A list of config labels.
+
+ All targets depending on this one, and recursively, all targets
+ depending on those, will have the configs listed in this variable
+ added to them. These configs will also apply to the current target.
+
+ This addition happens in a second phase once a target and all of its
+ dependencies have been resolved. Therefore, a target will not see
+ these force-added configs in their "configs" variable while the
+ script is running, and then can not be removed. As a result, this
+ capability should generally only be used to add defines and include
+ directories necessary to compile a target's headers.
+
+ See also "public_configs".
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+
+```
+## **allow_circular_includes_from**: Permit includes from deps.
+
+```
+ A list of target labels. Must be a subset of the target's "deps".
+ These targets will be permitted to include headers from the current
+ target despite the dependency going in the opposite direction.
+
+ When you use this, both targets must be included in a final binary
+ for it to link. To keep linker errors from happening, it is good
+ practice to have all external dependencies depend only on one of
+ the two targets, and to set the visibility on the other to enforce
+ this. Thus the targets will always be linked together in any output.
+
+```
+
+### **Details**
+
+```
+ Normally, for a file in target A to include a file from target B,
+ A must list B as a dependency. This invariant is enforced by the
+ "gn check" command (and the --check flag to "gn gen" -- see
+ "gn help check").
+
+ Sometimes, two targets might be the same unit for linking purposes
+ (two source sets or static libraries that would always be linked
+ together in a final executable or shared library) and they each
+ include headers from the other: you want A to be able to include B's
+ headers, and B to include A's headers. This is not an ideal situation
+ but is sometimes unavoidable.
+
+ This list, if specified, lists which of the dependencies of the
+ current target can include header files from the current target.
+ That is, if A depends on B, B can only include headers from A if it is
+ in A's allow_circular_includes_from list. Normally includes must
+ follow the direction of dependencies, this flag allows them to go
+ in the opposite direction.
+
+```
+
+### **Danger**
+
+```
+ In the above example, A's headers are likely to include headers from
+ A's dependencies. Those dependencies may have public_configs that
+ apply flags, defines, and include paths that make those headers work
+ properly.
+
+ With allow_circular_includes_from, B can include A's headers, and
+ transitively from A's dependencies, without having the dependencies
+ that would bring in the public_configs those headers need. The result
+ may be errors or inconsistent builds.
+
+ So when you use allow_circular_includes_from, make sure that any
+ compiler settings, flags, and include directories are the same between
+ both targets (consider putting such things in a shared config they can
+ both reference). Make sure the dependencies are also the same (you
+ might consider a group to collect such dependencies they both
+ depend on).
+
+```
+
+### **Example**
+
+```
+ source_set("a") {
+ deps = [ ":b", ":a_b_shared_deps" ]
+ allow_circular_includes_from = [ ":b" ]
+ ...
+ }
+
+ source_set("b") {
+ deps = [ ":a_b_shared_deps" ]
+ # Sources here can include headers from a despite lack of deps.
+ ...
+ }
+
+ group("a_b_shared_deps") {
+ public_deps = [ ":c" ]
+ }
+
+
+```
+## **args**: Arguments passed to an action.
+
+```
+ For action and action_foreach targets, args is the list of arguments
+ to pass to the script. Typically you would use source expansion (see
+ "gn help source_expansion") to insert the source file names.
+
+ See also "gn help action" and "gn help action_foreach".
+
+
+```
+## **asmflags**: Flags passed to the assembler.
+
+```
+ A list of strings.
+
+ "asmflags" are passed to any invocation of a tool that takes an
+ .asm or .S file as input.
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+
+```
+## **assert_no_deps**: Ensure no deps on these targets.
+
+```
+ A list of label patterns.
+
+ This list is a list of patterns that must not match any of the
+ transitive dependencies of the target. These include all public,
+ private, and data dependencies, and cross shared library boundaries.
+ This allows you to express that undesirable code isn't accidentally
+ added to downstream dependencies in a way that might otherwise be
+ difficult to notice.
+
+ Checking does not cross executable boundaries. If a target depends on
+ an executable, it's assumed that the executable is a tool that is
+ producing part of the build rather than something that is linked and
+ distributed. This allows assert_no_deps to express what is distributed
+ in the final target rather than depend on the internal build steps
+ (which may include non-distributable code).
+
+ See "gn help label_pattern" for the format of the entries in the
+ list. These patterns allow blacklisting individual targets or whole
+ directory hierarchies.
+
+ Sometimes it is desirable to enforce that many targets have no
+ dependencies on a target or set of targets. One efficient way to
+ express this is to create a group with the assert_no_deps rule on
+ it, and make that group depend on all targets you want to apply that
+ assertion to.
+
+```
+
+### **Example**
+
+```
+ executable("doom_melon") {
+ deps = [ "//foo:bar" ]
+ ...
+ assert_no_deps = [
+ "//evil/*", # Don't link any code from the evil directory.
+ "//foo:test_support", # This target is also disallowed.
+ ]
+ }
+
+
+```
+## **bundle_executable_dir**: Expansion of {{bundle_executable_dir}} in create_bundle.
+
+```
+ A string corresponding to a path in $root_build_dir.
+
+ This string is used by the "create_bundle" target to expand the
+ {{bundle_executable_dir}} of the "bundle_data" target it depends on.
+ This must correspond to a path under "bundle_root_dir".
+
+ See "gn help bundle_root_dir" for examples.
+
+
+```
+## **bundle_plugins_dir**: Expansion of {{bundle_plugins_dir}} in create_bundle.
+
+```
+ A string corresponding to a path in $root_build_dir.
+
+ This string is used by the "create_bundle" target to expand the
+ {{bundle_plugins_dir}} of the "bundle_data" target it depends on.
+ This must correspond to a path under "bundle_root_dir".
+
+ See "gn help bundle_root_dir" for examples.
+
+
+```
+## **bundle_resources_dir**: Expansion of {{bundle_resources_dir}} in create_bundle.
+
+```
+ A string corresponding to a path in $root_build_dir.
+
+ This string is used by the "create_bundle" target to expand the
+ {{bundle_resources_dir}} of the "bundle_data" target it depends on.
+ This must correspond to a path under "bundle_root_dir".
+
+ See "gn help bundle_root_dir" for examples.
+
+
+```
+## **bundle_root_dir**: Expansion of {{bundle_root_dir}} in create_bundle.
+
+```
+ A string corresponding to a path in root_build_dir.
+
+ This string is used by the "create_bundle" target to expand the
+ {{bundle_root_dir}} of the "bundle_data" target it depends on.
+ This must correspond to a path under root_build_dir.
+
+```
+
+### **Example**
+
+```
+ bundle_data("info_plist") {
+ sources = [ "Info.plist" ]
+ outputs = [ "{{bundle_root_dir}}/Info.plist" ]
+ }
+
+ create_bundle("doom_melon.app") {
+ deps = [ ":info_plist" ]
+ bundle_root_dir = root_build_dir + "/doom_melon.app/Contents"
+ bundle_resources_dir = bundle_root_dir + "/Resources"
+ bundle_executable_dir = bundle_root_dir + "/MacOS"
+ bundle_plugins_dir = bundle_root_dir + "/PlugIns"
+ }
+
+
+```
+## **cflags***: Flags passed to the C compiler.
+
+```
+ A list of strings.
+
+ "cflags" are passed to all invocations of the C, C++, Objective C,
+ and Objective C++ compilers.
+
+ To target one of these variants individually, use "cflags_c",
+ "cflags_cc", "cflags_objc", and "cflags_objcc",
+ respectively. These variant-specific versions of cflags* will be
+ appended on the compiler command line after "cflags".
+
+ See also "asmflags" for flags for assembly-language files.
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+
+```
+## **cflags***: Flags passed to the C compiler.
+
+```
+ A list of strings.
+
+ "cflags" are passed to all invocations of the C, C++, Objective C,
+ and Objective C++ compilers.
+
+ To target one of these variants individually, use "cflags_c",
+ "cflags_cc", "cflags_objc", and "cflags_objcc",
+ respectively. These variant-specific versions of cflags* will be
+ appended on the compiler command line after "cflags".
+
+ See also "asmflags" for flags for assembly-language files.
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+
+```
+## **cflags***: Flags passed to the C compiler.
+
+```
+ A list of strings.
+
+ "cflags" are passed to all invocations of the C, C++, Objective C,
+ and Objective C++ compilers.
+
+ To target one of these variants individually, use "cflags_c",
+ "cflags_cc", "cflags_objc", and "cflags_objcc",
+ respectively. These variant-specific versions of cflags* will be
+ appended on the compiler command line after "cflags".
+
+ See also "asmflags" for flags for assembly-language files.
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+
+```
+## **cflags***: Flags passed to the C compiler.
+
+```
+ A list of strings.
+
+ "cflags" are passed to all invocations of the C, C++, Objective C,
+ and Objective C++ compilers.
+
+ To target one of these variants individually, use "cflags_c",
+ "cflags_cc", "cflags_objc", and "cflags_objcc",
+ respectively. These variant-specific versions of cflags* will be
+ appended on the compiler command line after "cflags".
+
+ See also "asmflags" for flags for assembly-language files.
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+
+```
+## **cflags***: Flags passed to the C compiler.
+
+```
+ A list of strings.
+
+ "cflags" are passed to all invocations of the C, C++, Objective C,
+ and Objective C++ compilers.
+
+ To target one of these variants individually, use "cflags_c",
+ "cflags_cc", "cflags_objc", and "cflags_objcc",
+ respectively. These variant-specific versions of cflags* will be
+ appended on the compiler command line after "cflags".
+
+ See also "asmflags" for flags for assembly-language files.
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+
+```
+## **check_includes**: [boolean] Controls whether a target's files are checked.
+
+```
+ When true (the default), the "gn check" command (as well as
+ "gn gen" with the --check flag) will check this target's sources
+ and headers for proper dependencies.
+
+ When false, the files in this target will be skipped by default.
+ This does not affect other targets that depend on the current target,
+ it just skips checking the includes of the current target's files.
+
+ If there are a few conditionally included headers that trip up
+ checking, you can exclude headers individually by annotating them with
+ "nogncheck" (see "gn help nogncheck").
+
+ The topic "gn help check" has general information on how checking
+ works and advice on how to pass a check in problematic cases.
+
+```
+
+### **Example**
+
+```
+ source_set("busted_includes") {
+ # This target's includes are messed up, exclude it from checking.
+ check_includes = false
+ ...
+ }
+
+
+```
+## **complete_static_lib**: [boolean] Links all deps into a static library.
+
+```
+ A static library normally doesn't include code from dependencies, but
+ instead forwards the static libraries and source sets in its deps up
+ the dependency chain until a linkable target (an executable or shared
+ library) is reached. The final linkable target only links each static
+ library once, even if it appears more than once in its dependency
+ graph.
+
+ In some cases the static library might be the final desired output.
+ For example, you may be producing a static library for distribution to
+ third parties. In this case, the static library should include code
+ for all dependencies in one complete package. Since GN does not unpack
+ static libraries to forward their contents up the dependency chain,
+ it is an error for complete static libraries to depend on other static
+
+ In rare cases it makes sense to list a header in more than one
+ target if it could be considered conceptually a member of both.
+ libraries.
+
+```
+
+### **Example**
+
+```
+ static_library("foo") {
+ complete_static_lib = true
+ deps = [ "bar" ]
+ }
+
+
+```
+## **configs**: Configs applying to this target or config.
+
+```
+ A list of config labels.
+
+```
+
+### **Configs on a target**
+
+```
+ When used on a target, the include_dirs, defines, etc. in each config
+ are appended in the order they appear to the compile command for each
+ file in the target. They will appear after the include_dirs, defines,
+ etc. that the target sets directly.
+
+ Since configs apply after the values set on a target, directly setting
+ a compiler flag will prepend it to the command line. If you want to
+ append a flag instead, you can put that flag in a one-off config and
+ append that config to the target's configs list.
+
+ The build configuration script will generally set up the default
+ configs applying to a given target type (see "set_defaults").
+ When a target is being defined, it can add to or remove from this
+ list.
+
+```
+
+### **Configs on a config**
+
+```
+ It is possible to create composite configs by specifying configs on a
+ config. One might do this to forward values, or to factor out blocks
+ of settings from very large configs into more manageable named chunks.
+
+ In this case, the composite config is expanded to be the concatenation
+ of its own values, and in order, the values from its sub-configs
+ *before* anything else happens. This has some ramifications:
+
+ - A target has no visibility into a config's sub-configs. Target
+ code only sees the name of the composite config. It can't remove
+ sub-configs or opt in to only parts of it. The composite config may
+ not even be defined before the target is.
+
+ - You can get duplication of values if a config is listed twice, say,
+ on a target and in a sub-config that also applies. In other cases,
+ the configs applying to a target are de-duped. It's expected that
+ if a config is listed as a sub-config that it is only used in that
+ context. (Note that it's possible to fix this and de-dupe, but it's
+ not normally relevant and complicates the implementation.)
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+```
+
+### **Example**
+
+```
+ # Configs on a target.
+ source_set("foo") {
+ # Don't use the default RTTI config that BUILDCONFIG applied to us.
+ configs -= [ "//build:no_rtti" ]
+
+ # Add some of our own settings.
+ configs += [ ":mysettings" ]
+ }
+
+ # Create a default_optimization config that forwards to one of a set
+ # of more specialized configs depending on build flags. This pattern
+ # is useful because it allows a target to opt in to either a default
+ # set, or a more specific set, while avoid duplicating the settings in
+ # two places.
+ config("super_optimization") {
+ cflags = [ ... ]
+ }
+ config("default_optimization") {
+ if (optimize_everything) {
+ configs = [ ":super_optimization" ]
+ } else {
+ configs = [ ":no_optimization" ]
+ }
+ }
+
+
+```
+## **console**: Run this action in the console pool.
+
+```
+ Boolean. Defaults to false.
+
+ Actions marked "console = true" will be run in the built-in ninja
+ "console" pool. They will have access to real stdin and stdout, and
+ output will not be buffered by ninja. This can be useful for
+ long-running actions with progress logs, or actions that require user
+ input.
+
+ Only one console pool target can run at any one time in Ninja. Refer
+ to the Ninja documentation on the console pool for more info.
+
+```
+
+### **Example**
+
+```
+ action("long_action_with_progress_logs") {
+ console = true
+ }
+
+
+```
+## **data**: Runtime data file dependencies.
+
+```
+ Lists files or directories required to run the given target. These are
+ typically data files or directories of data files. The paths are
+ interpreted as being relative to the current build file. Since these
+ are runtime dependencies, they do not affect which targets are built
+ or when. To declare input files to a script, use "inputs".
+
+ Appearing in the "data" section does not imply any special handling
+ such as copying them to the output directory. This is just used for
+ declaring runtime dependencies. Runtime dependencies can be queried
+ using the "runtime_deps" category of "gn desc" or written during
+ build generation via "--runtime-deps-list-file".
+
+ GN doesn't require data files to exist at build-time. So actions that
+ produce files that are in turn runtime dependencies can list those
+ generated files both in the "outputs" list as well as the "data"
+ list.
+
+ By convention, directories are be listed with a trailing slash:
+ data = [ "test/data/" ]
+ However, no verification is done on these so GN doesn't enforce this.
+ The paths are just rebased and passed along when requested.
+
+ See "gn help runtime_deps" for how these are used.
+
+
+```
+## **data_deps**: Non-linked dependencies.
+
+```
+ A list of target labels.
+
+ Specifies dependencies of a target that are not actually linked into
+ the current target. Such dependencies will be built and will be
+ available at runtime.
+
+ This is normally used for things like plugins or helper programs that
+ a target needs at runtime.
+
+ See also "gn help deps" and "gn help data".
+
+```
+
+### **Example**
+
+```
+ executable("foo") {
+ deps = [ "//base" ]
+ data_deps = [ "//plugins:my_runtime_plugin" ]
+ }
+
+
+```
+## **defines**: C preprocessor defines.
+
+```
+ A list of strings
+
+ These strings will be passed to the C/C++ compiler as #defines. The
+ strings may or may not include an "=" to assign a value.
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+```
+
+### **Example**
+
+```
+ defines = [ "AWESOME_FEATURE", "LOG_LEVEL=3" ]
+
+
+```
+## **depfile**: [string] File name for input dependencies for actions.
+
+```
+ If nonempty, this string specifies that the current action or
+ action_foreach target will generate the given ".d" file containing
+ the dependencies of the input. Empty or unset means that the script
+ doesn't generate the files.
+
+ The .d file should go in the target output directory. If you have more
+ than one source file that the script is being run over, you can use
+ the output file expansions described in "gn help action_foreach" to
+ name the .d file according to the input.
+ The format is that of a Makefile, and all of the paths should be
+ relative to the root build directory.
+
+```
+
+### **Example**
+
+```
+ action_foreach("myscript_target") {
+ script = "myscript.py"
+ sources = [ ... ]
+
+ # Locate the depfile in the output directory named like the
+ # inputs but with a ".d" appended.
+ depfile = "$relative_target_output_dir/{{source_name}}.d"
+
+ # Say our script uses "-o <d file>" to indicate the depfile.
+ args = [ "{{source}}", "-o", depfile ]
+ }
+
+
+```
+## **deps**: Private linked dependencies.
+
+```
+ A list of target labels.
+
+ Specifies private dependencies of a target. Private dependencies are
+ propagated up the dependency tree and linked to dependant targets, but
+ do not grant the ability to include headers from the dependency.
+ Public configs are not forwarded.
+
+```
+
+### **Details of dependency propagation**
+
+```
+ Source sets, shared libraries, and non-complete static libraries
+ will be propagated up the dependency tree across groups, non-complete
+ static libraries and source sets.
+
+ Executables, shared libraries, and complete static libraries will
+ link all propagated targets and stop propagation. Actions and copy
+ steps also stop propagation, allowing them to take a library as an
+ input but not force dependants to link to it.
+
+ Propagation of all_dependent_configs and public_configs happens
+ independently of target type. all_dependent_configs are always
+ propagated across all types of targets, and public_configs
+ are always propagated across public deps of all types of targets.
+
+ Data dependencies are propagated differently. See
+ "gn help data_deps" and "gn help runtime_deps".
+
+ See also "public_deps".
+
+
+```
+## **include_dirs**: Additional include directories.
+
+```
+ A list of source directories.
+
+ The directories in this list will be added to the include path for
+ the files in the affected target.
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+```
+
+### **Example**
+
+```
+ include_dirs = [ "src/include", "//third_party/foo" ]
+
+
+```
+## **inputs**: Additional compile-time dependencies.
+
+```
+ Inputs are compile-time dependencies of the current target. This means
+ that all inputs must be available before compiling any of the sources
+ or executing any actions.
+
+ Inputs are typically only used for action and action_foreach targets.
+
+```
+
+### **Inputs for actions**
+
+```
+ For action and action_foreach targets, inputs should be the inputs to
+ script that don't vary. These should be all .py files that the script
+ uses via imports (the main script itself will be an implicit dependency
+ of the action so need not be listed).
+
+ For action targets, inputs and sources are treated the same, but from
+ a style perspective, it's recommended to follow the same rule as
+ action_foreach and put helper files in the inputs, and the data used
+ by the script (if any) in sources.
+
+ Note that another way to declare input dependencies from an action
+ is to have the action write a depfile (see "gn help depfile"). This
+ allows the script to dynamically write input dependencies, that might
+ not be known until actually executing the script. This is more
+ efficient than doing processing while running GN to determine the
+ inputs, and is easier to keep in-sync than hardcoding the list.
+
+```
+
+### **Script input gotchas**
+
+```
+ It may be tempting to write a script that enumerates all files in a
+ directory as inputs. Don't do this! Even if you specify all the files
+ in the inputs or sources in the GN target (or worse, enumerate the
+ files in an exec_script call when running GN, which will be slow), the
+ dependencies will be broken.
+
+ The problem happens if a file is ever removed because the inputs are
+ not listed on the command line to the script. Because the script
+ hasn't changed and all inputs are up-to-date, the script will not
+ re-run and you will get a stale build. Instead, either list all
+ inputs on the command line to the script, or if there are many, create
+ a separate list file that the script reads. As long as this file is
+ listed in the inputs, the build will detect when it has changed in any
+ way and the action will re-run.
+
+```
+
+### **Inputs for binary targets**
+
+```
+ Any input dependencies will be resolved before compiling any sources.
+ Normally, all actions that a target depends on will be run before any
+ files in a target are compiled. So if you depend on generated headers,
+ you do not typically need to list them in the inputs section.
+
+ Inputs for binary targets will be treated as order-only dependencies,
+ meaning that they will be forced up-to-date before compiling or
+ any files in the target, but changes in the inputs will not
+ necessarily force the target to compile. This is because it is
+ expected that the compiler will report the precise list of input
+ dependencies required to recompile each file once the initial build
+ is done.
+
+```
+
+### **Example**
+
+```
+ action("myscript") {
+ script = "domything.py"
+ inputs = [ "input.data" ]
+ }
+
+
+```
+## **ldflags**: Flags passed to the linker.
+
+```
+ A list of strings.
+
+ These flags are passed on the command-line to the linker and generally
+ specify various linking options. Most targets will not need these and
+ will use "libs" and "lib_dirs" instead.
+
+ ldflags are NOT pushed to dependents, so applying ldflags to source
+ sets or static libraries will be a no-op. If you want to apply ldflags
+ to dependent targets, put them in a config and set it in the
+ all_dependent_configs or public_configs.
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+
+```
+## **lib_dirs**: Additional library directories.
+
+```
+ A list of directories.
+
+ Specifies additional directories passed to the linker for searching
+ for the required libraries. If an item is not an absolute path, it
+ will be treated as being relative to the current build file.
+
+ libs and lib_dirs work differently than other flags in two respects.
+ First, then are inherited across static library boundaries until a
+ shared library or executable target is reached. Second, they are
+ uniquified so each one is only passed once (the first instance of it
+ will be the one used).
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+ For "libs" and "lib_dirs" only, the values propagated from
+ dependencies (as described above) are applied last assuming they
+ are not already in the list.
+
+```
+
+### **Example**
+
+```
+ lib_dirs = [ "/usr/lib/foo", "lib/doom_melon" ]
+
+
+```
+## **libs**: Additional libraries to link.
+
+```
+ A list of library names or library paths.
+
+ These libraries will be linked into the final binary (executable or
+ shared library) containing the current target.
+
+ libs and lib_dirs work differently than other flags in two respects.
+ First, then are inherited across static library boundaries until a
+ shared library or executable target is reached. Second, they are
+ uniquified so each one is only passed once (the first instance of it
+ will be the one used).
+
+```
+
+### **Types of libs**
+
+```
+ There are several different things that can be expressed in libs:
+
+ File paths
+ Values containing '/' will be treated as references to files in
+ the checkout. They will be rebased to be relative to the build
+ directory and specified in the "libs" for linker tools. This
+ facility should be used for libraries that are checked in to the
+ version control. For libraries that are generated by the build,
+ use normal GN deps to link them.
+
+ System libraries
+ Values not containing '/' will be treated as system library names.
+ These will be passed unmodified to the linker and prefixed with
+ the "lib_prefix" attribute of the linker tool. Generally you
+ would set the "lib_dirs" so the given library is found. Your
+ BUILD.gn file should not specify the switch (like "-l"): this
+ will be encoded in the "lib_prefix" of the tool.
+
+ Apple frameworks
+ System libraries ending in ".framework" will be special-cased:
+ the switch "-framework" will be prepended instead of the
+ lib_prefix, and the ".framework" suffix will be trimmed. This is
+ to support the way Mac links framework dependencies.
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+ For "libs" and "lib_dirs" only, the values propagated from
+ dependencies (as described above) are applied last assuming they
+ are not already in the list.
+
+```
+
+### **Examples**
+
+```
+ On Windows:
+ libs = [ "ctl3d.lib" ]
+
+ On Linux:
+ libs = [ "ld" ]
+
+
+```
+## **output_extension**: Value to use for the output's file extension.
+
+```
+ Normally the file extension for a target is based on the target
+ type and the operating system, but in rare cases you will need to
+ override the name (for example to use "libfreetype.so.6" instead
+ of libfreetype.so on Linux).
+
+ This value should not include a leading dot. If undefined, the default
+ specified on the tool will be used. If set to the empty string, no
+ output extension will be used.
+
+ The output_extension will be used to set the "{{output_extension}}"
+ expansion which the linker tool will generally use to specify the
+ output file name. See "gn help tool".
+
+```
+
+### **Example**
+
+```
+ shared_library("freetype") {
+ if (is_linux) {
+ # Call the output "libfreetype.so.6"
+ output_extension = "so.6"
+ }
+ ...
+ }
+
+ # On Windows, generate a "mysettings.cpl" control panel applet.
+ # Control panel applets are actually special shared libraries.
+ if (is_win) {
+ shared_library("mysettings") {
+ output_extension = "cpl"
+ ...
+ }
+ }
+
+
+```
+## **output_name**: Define a name for the output file other than the default.
+
+```
+ Normally the output name of a target will be based on the target name,
+ so the target "//foo/bar:bar_unittests" will generate an output
+ file such as "bar_unittests.exe" (using Windows as an example).
+
+ Sometimes you will want an alternate name to avoid collisions or
+ if the internal name isn't appropriate for public distribution.
+
+ The output name should have no extension or prefixes, these will be
+ added using the default system rules. For example, on Linux an output
+ name of "foo" will produce a shared library "libfoo.so". There
+ is no way to override the output prefix of a linker tool on a per-
+ target basis. If you need more flexibility, create a copy target
+ to produce the file you want.
+
+ This variable is valid for all binary output target types.
+
+```
+
+### **Example**
+
+```
+ static_library("doom_melon") {
+ output_name = "fluffy_bunny"
+ }
+
+
+```
+## **output_prefix_override**: Don't use prefix for output name.
+
+```
+ A boolean that overrides the output prefix for a target. Defaults to
+ false.
+
+ Some systems use prefixes for the names of the final target output
+ file. The normal example is "libfoo.so" on Linux for a target
+ named "foo".
+
+ The output prefix for a given target type is specified on the linker
+ tool (see "gn help tool"). Sometimes this prefix is undesired.
+
+ See also "gn help output_extension".
+
+```
+
+### **Example**
+
+```
+ shared_library("doom_melon") {
+ # Normally this will produce "libdoom_melon.so" on Linux, setting
+ # Setting this flag will produce "doom_melon.so".
+ output_prefix_override = true
+ ...
+ }
+
+
+```
+## **outputs**: Output files for actions and copy targets.
+
+```
+ Outputs is valid for "copy", "action", and "action_foreach"
+ target types and indicates the resulting files. Outputs must always
+ refer to files in the build directory.
+
+ copy
+ Copy targets should have exactly one entry in the outputs list. If
+ there is exactly one source, this can be a literal file name or a
+ source expansion. If there is more than one source, this must
+ contain a source expansion to map a single input name to a single
+ output name. See "gn help copy".
+
+ action_foreach
+ Action_foreach targets must always use source expansions to map
+ input files to output files. There can be more than one output,
+ which means that each invocation of the script will produce a set of
+ files (presumably based on the name of the input file). See
+ "gn help action_foreach".
+
+ action
+ Action targets (excluding action_foreach) must list literal output
+ file(s) with no source expansions. See "gn help action".
+
+
+```
+## **precompiled_header**: [string] Header file to precompile.
+
+```
+ Precompiled headers will be used when a target specifies this
+ value, or a config applying to this target specifies this value.
+ In addition, the tool corresponding to the source files must also
+ specify precompiled headers (see "gn help tool"). The tool
+ will also specify what type of precompiled headers to use.
+
+ The precompiled header/source variables can be specified on a target
+ or a config, but must be the same for all configs applying to a given
+ target since a target can only have one precompiled header.
+
+```
+
+### **MSVC precompiled headers**
+
+```
+ When using MSVC-style precompiled headers, the "precompiled_header"
+ value is a string corresponding to the header. This is NOT a path
+ to a file that GN recognises, but rather the exact string that appears
+ in quotes after an #include line in source code. The compiler will
+ match this string against includes or forced includes (/FI).
+
+ MSVC also requires a source file to compile the header with. This must
+ be specified by the "precompiled_source" value. In contrast to the
+ header value, this IS a GN-style file name, and tells GN which source
+ file to compile to make the .pch file used for subsequent compiles.
+
+ If you use both C and C++ sources, the precompiled header and source
+ file will be compiled using both tools. You will want to make sure
+ to wrap C++ includes in __cplusplus #ifdefs so the file will compile
+ in C mode.
+
+ For example, if the toolchain specifies MSVC headers:
+
+ toolchain("vc_x64") {
+ ...
+ tool("cxx") {
+ precompiled_header_type = "msvc"
+ ...
+
+ You might make a config like this:
+
+ config("use_precompiled_headers") {
+ precompiled_header = "build/precompile.h"
+ precompiled_source = "//build/precompile.cc"
+
+ # Either your source files should #include "build/precompile.h"
+ # first, or you can do this to force-include the header.
+ cflags = [ "/FI$precompiled_header" ]
+ }
+
+ And then define a target that uses the config:
+
+ executable("doom_melon") {
+ configs += [ ":use_precompiled_headers" ]
+ ...
+
+
+
+```
+## **precompiled_source**: [file name] Source file to precompile.
+
+```
+ The source file that goes along with the precompiled_header when
+ using "msvc"-style precompiled headers. It will be implicitly added
+ to the sources of the target. See "gn help precompiled_header".
+
+
+```
+## **public**: Declare public header files for a target.
+
+```
+ A list of files that other targets can include. These permissions are
+ checked via the "check" command (see "gn help check").
+
+ If no public files are declared, other targets (assuming they have
+ visibility to depend on this target can include any file in the
+ sources list. If this variable is defined on a target, dependent
+ targets may only include files on this whitelist.
+
+ Header file permissions are also subject to visibility. A target
+ must be visible to another target to include any files from it at all
+ and the public headers indicate which subset of those files are
+ permitted. See "gn help visibility" for more.
+
+ Public files are inherited through the dependency tree. So if there is
+ a dependency A -> B -> C, then A can include C's public headers.
+ However, the same is NOT true of visibility, so unless A is in C's
+ visibility list, the include will be rejected.
+
+ GN only knows about files declared in the "sources" and "public"
+ sections of targets. If a file is included that is not known to the
+ build, it will be allowed.
+
+```
+
+### **Examples**
+
+```
+ These exact files are public:
+ public = [ "foo.h", "bar.h" ]
+
+ No files are public (no targets may include headers from this one):
+ public = []
+
+
+```
+## **public_configs**: Configs to be applied on dependents.
+
+```
+ A list of config labels.
+
+ Targets directly depending on this one will have the configs listed in
+ this variable added to them. These configs will also apply to the
+ current target.
+
+ This addition happens in a second phase once a target and all of its
+ dependencies have been resolved. Therefore, a target will not see
+ these force-added configs in their "configs" variable while the
+ script is running, and then can not be removed. As a result, this
+ capability should generally only be used to add defines and include
+ directories necessary to compile a target's headers.
+
+ See also "all_dependent_configs".
+
+```
+
+### **Ordering of flags and values**
+
+```
+ 1. Those set on the current target (not in a config).
+ 2. Those set on the "configs" on the target in order that the
+ configs appear in the list.
+ 3. Those set on the "all_dependent_configs" on the target in order
+ that the configs appear in the list.
+ 4. Those set on the "public_configs" on the target in order that
+ those configs appear in the list.
+ 5. all_dependent_configs pulled from dependencies, in the order of
+ the "deps" list. This is done recursively. If a config appears
+ more than once, only the first occurance will be used.
+ 6. public_configs pulled from dependencies, in the order of the
+ "deps" list. If a dependency is public, they will be applied
+ recursively.
+
+
+```
+## **public_deps**: Declare public dependencies.
+
+```
+ Public dependencies are like private dependencies (see
+ "gn help deps") but additionally express that the current target
+ exposes the listed deps as part of its public API.
+
+ This has several ramifications:
+
+ - public_configs that are part of the dependency are forwarded
+ to direct dependents.
+
+ - Public headers in the dependency are usable by dependents
+ (includes do not require a direct dependency or visibility).
+
+ - If the current target is a shared library, other shared libraries
+ that it publicly depends on (directly or indirectly) are
+ propagated up the dependency tree to dependents for linking.
+
+```
+
+### **Discussion**
+
+```
+ Say you have three targets: A -> B -> C. C's visibility may allow
+ B to depend on it but not A. Normally, this would prevent A from
+ including any headers from C, and C's public_configs would apply
+ only to B.
+
+ If B lists C in its public_deps instead of regular deps, A will now
+ inherit C's public_configs and the ability to include C's public
+ headers.
+
+ Generally if you are writing a target B and you include C's headers
+ as part of B's public headers, or targets depending on B should
+ consider B and C to be part of a unit, you should use public_deps
+ instead of deps.
+
+```
+
+### **Example**
+
+```
+ # This target can include files from "c" but not from
+ # "super_secret_implementation_details".
+ executable("a") {
+ deps = [ ":b" ]
+ }
+
+ shared_library("b") {
+ deps = [ ":super_secret_implementation_details" ]
+ public_deps = [ ":c" ]
+ }
+
+
+```
+## **response_file_contents**: Contents of a response file for actions.
+
+```
+ Sometimes the arguments passed to a script can be too long for the
+ system's command-line capabilities. This is especially the case on
+ Windows where the maximum command-line length is less than 8K. A
+ response file allows you to pass an unlimited amount of data to a
+ script in a temporary file for an action or action_foreach target.
+
+ If the response_file_contents variable is defined and non-empty, the
+ list will be treated as script args (including possibly substitution
+ patterns) that will be written to a temporary file at build time.
+ The name of the temporary file will be substituted for
+ "{{response_file_name}}" in the script args.
+
+ The response file contents will always be quoted and escaped
+ according to Unix shell rules. To parse the response file, the Python
+ script should use "shlex.split(file_contents)".
+
+```
+
+### **Example**
+
+```
+ action("process_lots_of_files") {
+ script = "process.py",
+ inputs = [ ... huge list of files ... ]
+
+ # Write all the inputs to a response file for the script. Also,
+ # make the paths relative to the script working directory.
+ response_file_contents = rebase_path(inputs, root_build_dir)
+
+ # The script expects the name of the response file in --file-list.
+ args = [
+ "--enable-foo",
+ "--file-list={{response_file_name}}",
+ ]
+ }
+
+
+```
+## **script**: Script file for actions.
+
+```
+ An absolute or buildfile-relative file name of a Python script to run
+ for a action and action_foreach targets (see "gn help action" and
+ "gn help action_foreach").
+
+
+```
+## **sources**: Source files for a target
+
+```
+ A list of files. Non-absolute paths will be resolved relative to the
+ current build file.
+
+```
+
+### **Sources for binary targets**
+
+```
+ For binary targets (source sets, executables, and libraries), the
+ known file types will be compiled with the associated tools. Unknown
+ file types and headers will be skipped. However, you should still
+ list all C/C+ header files so GN knows about the existance of those
+ files for the purposes of include checking.
+
+ As a special case, a file ending in ".def" will be treated as a
+ Windows module definition file. It will be appended to the link
+ line with a preceeding "/DEF:" string. There must be at most one
+ .def file in a target and they do not cross dependency boundaries
+ (so specifying a .def file in a static library or source set will have
+ no effect on the executable or shared library they're linked into).
+
+```
+
+### **Sources for non-binary targets**
+
+```
+ action_foreach
+ The sources are the set of files that the script will be executed
+ over. The script will run once per file.
+
+ action
+ The sources will be treated the same as inputs. See "gn help inputs"
+ for more information and usage advice.
+
+ copy
+ The source are the source files to copy.
+
+
+```
+## **testonly**: Declares a target must only be used for testing.
+
+```
+ Boolean. Defaults to false.
+
+ When a target is marked "testonly = true", it must only be depended
+ on by other test-only targets. Otherwise, GN will issue an error
+ that the depenedency is not allowed.
+
+ This feature is intended to prevent accidentally shipping test code
+ in a final product.
+
+```
+
+### **Example**
+
+```
+ source_set("test_support") {
+ testonly = true
+ ...
+ }
+
+
+```
+## **visibility**: A list of labels that can depend on a target.
+
+```
+ A list of labels and label patterns that define which targets can
+ depend on the current one. These permissions are checked via the
+ "check" command (see "gn help check").
+
+ If visibility is not defined, it defaults to public ("*").
+
+ If visibility is defined, only the targets with labels that match it
+ can depend on the current target. The empty list means no targets
+ can depend on the current target.
+
+ Tip: Often you will want the same visibility for all targets in a
+ BUILD file. In this case you can just put the definition at the top,
+ outside of any target, and the targets will inherit that scope and see
+ the definition.
+
+```
+
+### **Patterns**
+
+```
+ See "gn help label_pattern" for more details on what types of
+ patterns are supported. If a toolchain is specified, only targets
+ in that toolchain will be matched. If a toolchain is not specified on
+ a pattern, targets in all toolchains will be matched.
+
+```
+
+### **Examples**
+
+```
+ Only targets in the current buildfile ("private"):
+ visibility = [ ":*" ]
+
+ No targets (used for targets that should be leaf nodes):
+ visibility = []
+
+ Any target ("public", the default):
+ visibility = [ "*" ]
+
+ All targets in the current directory and any subdirectory:
+ visibility = [ "./*" ]
+
+ Any target in "//bar/BUILD.gn":
+ visibility = [ "//bar:*" ]
+
+ Any target in "//bar/" or any subdirectory thereof:
+ visibility = [ "//bar/*" ]
+
+ Just these specific targets:
+ visibility = [ ":mything", "//foo:something_else" ]
+
+ Any target in the current directory and any subdirectory thereof, plus
+ any targets in "//bar/" and any subdirectory thereof.
+ visibility = [ "./*", "//bar/*" ]
+
+
+```
+## **write_runtime_deps**: Writes the target's runtime_deps to the given path.
+
+```
+ Does not synchronously write the file, but rather schedules it
+ to be written at the end of generation.
+
+ If the file exists and the contents are identical to that being
+ written, the file will not be updated. This will prevent unnecessary
+ rebuilds of targets that depend on this file.
+
+ Path must be within the output directory.
+
+ See "gn help runtime_deps" for how the runtime dependencies are
+ computed.
+
+ The format of this file will list one file per line with no escaping.
+ The files will be relative to the root_build_dir. The first line of
+ the file will be the main output file of the target itself. The file
+ contents will be the same as requesting the runtime deps be written on
+ the command line (see "gn help --runtime-deps-list-file").
+
+
+```
+## **Build Arguments Overview**
+
+```
+ Build arguments are variables passed in from outside of the build
+ that build files can query to determine how the build works.
+
+```
+
+### **How build arguments are set**
+
+```
+ First, system default arguments are set based on the current system.
+ The built-in arguments are:
+ - host_cpu
+ - host_os
+ - current_cpu
+ - current_os
+ - target_cpu
+ - target_os
+
+ If specified, arguments from the --args command line flag are used. If
+ that flag is not specified, args from previous builds in the build
+ directory will be used (this is in the file args.gn in the build
+ directory).
+
+ Last, for targets being compiled with a non-default toolchain, the
+ toolchain overrides are applied. These are specified in the
+ toolchain_args section of a toolchain definition. The use-case for
+ this is that a toolchain may be building code for a different
+ platform, and that it may want to always specify Posix, for example.
+ See "gn help toolchain_args" for more.
+
+ If you specify an override for a build argument that never appears in
+ a "declare_args" call, a nonfatal error will be displayed.
+
+```
+
+### **Examples**
+
+```
+ gn args out/FooBar
+ Create the directory out/FooBar and open an editor. You would type
+ something like this into that file:
+ enable_doom_melon=false
+ os="android"
+
+ gn gen out/FooBar --args="enable_doom_melon=true os=\"android\""
+ This will overwrite the build directory with the given arguments.
+ (Note that the quotes inside the args command will usually need to
+ be escaped for your shell to pass through strings values.)
+
+```
+
+### **How build arguments are used**
+
+```
+ If you want to use an argument, you use declare_args() and specify
+ default values. These default values will apply if none of the steps
+ listed in the "How build arguments are set" section above apply to
+ the given argument, but the defaults will not override any of these.
+
+ Often, the root build config file will declare global arguments that
+ will be passed to all buildfiles. Individual build files can also
+ specify arguments that apply only to those files. It is also useful
+ to specify build args in an "import"-ed file if you want such
+ arguments to apply to multiple buildfiles.
+
+
+```
+## **.gn file**
+
+```
+ When gn starts, it will search the current directory and parent
+ directories for a file called ".gn". This indicates the source root.
+ You can override this detection by using the --root command-line
+ argument
+
+ The .gn file in the source root will be executed. The syntax is the
+ same as a buildfile, but with very limited build setup-specific
+ meaning.
+
+ If you specify --root, by default GN will look for the file .gn in
+ that directory. If you want to specify a different file, you can
+ additionally pass --dotfile:
+
+ gn gen out/Debug --root=/home/build --dotfile=/home/my_gn_file.gn
+
+```
+
+### **Variables**
+
+```
+ buildconfig [required]
+ Label of the build config file. This file will be used to set up
+ the build file execution environment for each toolchain.
+
+ check_targets [optional]
+ A list of labels and label patterns that should be checked when
+ running "gn check" or "gn gen --check". If unspecified, all
+ targets will be checked. If it is the empty list, no targets will
+ be checked.
+
+ The format of this list is identical to that of "visibility"
+ so see "gn help visibility" for examples.
+
+ exec_script_whitelist [optional]
+ A list of .gn/.gni files (not labels) that have permission to call
+ the exec_script function. If this list is defined, calls to
+ exec_script will be checked against this list and GN will fail if
+ the current file isn't in the list.
+
+ This is to allow the use of exec_script to be restricted since
+ is easy to use inappropriately. Wildcards are not supported.
+ Files in the secondary_source tree (if defined) should be
+ referenced by ignoring the secondary tree and naming them as if
+ they are in the main tree.
+
+ If unspecified, the ability to call exec_script is unrestricted.
+
+ Example:
+ exec_script_whitelist = [
+ "//base/BUILD.gn",
+ "//build/my_config.gni",
+ ]
+
+ root [optional]
+ Label of the root build target. The GN build will start by loading
+ the build file containing this target name. This defaults to
+ "//:" which will cause the file //BUILD.gn to be loaded.
+
+ secondary_source [optional]
+ Label of an alternate directory tree to find input files. When
+ searching for a BUILD.gn file (or the build config file discussed
+ above), the file will first be looked for in the source root.
+ If it's not found, the secondary source root will be checked
+ (which would contain a parallel directory hierarchy).
+
+ This behavior is intended to be used when BUILD.gn files can't be
+ checked in to certain source directories for whatever reason.
+
+ The secondary source root must be inside the main source tree.
+
+```
+
+### **Example .gn file contents**
+
+```
+ buildconfig = "//build/config/BUILDCONFIG.gn"
+
+ check_targets = [
+ "//doom_melon/*", # Check everything in this subtree.
+ "//tools:mind_controlling_ant", # Check this specific target.
+ ]
+
+ root = "//:root"
+
+ secondary_source = "//build/config/temporary_buildfiles/"
+
+
+```
+## **GN build language grammar**
+
+### **Tokens**
+
+```
+ GN build files are read as sequences of tokens. While splitting the
+ file into tokens, the next token is the longest sequence of characters
+ that form a valid token.
+
+```
+
+### **White space and comments**
+
+```
+ White space is comprised of spaces (U+0020), horizontal tabs (U+0009),
+ carriage returns (U+000D), and newlines (U+000A).
+
+ Comments start at the character "#" and stop at the next newline.
+
+ White space and comments are ignored except that they may separate
+ tokens that would otherwise combine into a single token.
+
+```
+
+### **Identifiers**
+
+```
+ Identifiers name variables and functions.
+
+ identifier = letter { letter | digit } .
+ letter = "A" ... "Z" | "a" ... "z" | "_" .
+ digit = "0" ... "9" .
+
+```
+
+### **Keywords**
+
+```
+ The following keywords are reserved and may not be used as
+ identifiers:
+
+ else false if true
+
+```
+
+### **Integer literals**
+
+```
+ An integer literal represents a decimal integer value.
+
+ integer = [ "-" ] digit { digit } .
+
+ Leading zeros and negative zero are disallowed.
+
+```
+
+### **String literals**
+
+```
+ A string literal represents a string value consisting of the quoted
+ characters with possible escape sequences and variable expansions.
+
+ string = `"` { char | escape | expansion } `"` .
+ escape = `\` ( "$" | `"` | char ) .
+ BracketExpansion = "{" ( identifier | ArrayAccess | ScopeAccess ) "}" .
+ Hex = "0x" [0-9A-Fa-f][0-9A-Fa-f]
+ expansion = "$" ( identifier | BracketExpansion | Hex ) .
+ char = /* any character except "$", `"`, or newline */ .
+
+ After a backslash, certain sequences represent special characters:
+
+ \" U+0022 quotation mark
+ \$ U+0024 dollar sign
+ \\ U+005C backslash
+
+ All other backslashes represent themselves.
+
+ To insert an arbitrary byte value, use $0xFF. For example, to
+ insert a newline character: "Line one$0x0ALine two".
+
+```
+
+### **Punctuation**
+
+```
+ The following character sequences represent punctuation:
+
+ + += == != ( )
+ - -= < <= [ ]
+ ! = > >= { }
+ && || . ,
+
+```
+
+### **Grammar**
+
+```
+ The input tokens form a syntax tree following a context-free grammar:
+
+ File = StatementList .
+
+ Statement = Assignment | Call | Condition .
+ Assignment = identifier AssignOp Expr .
+ Call = identifier "(" [ ExprList ] ")" [ Block ] .
+ Condition = "if" "(" Expr ")" Block
+ [ "else" ( Condition | Block ) ] .
+ Block = "{" StatementList "}" .
+ StatementList = { Statement } .
+
+ ArrayAccess = identifier "[" { identifier | integer } "]" .
+ ScopeAccess = identifier "." identifier .
+ Expr = UnaryExpr | Expr BinaryOp Expr .
+ UnaryExpr = PrimaryExpr | UnaryOp UnaryExpr .
+ PrimaryExpr = identifier | integer | string | Call
+ | ArrayAccess | ScopeAccess
+ | "(" Expr ")"
+ | "[" [ ExprList [ "," ] ] "]" .
+ ExprList = Expr { "," Expr } .
+
+ AssignOp = "=" | "+=" | "-=" .
+ UnaryOp = "!" .
+ BinaryOp = "+" | "-" // highest priority
+ | "<" | "<=" | ">" | ">="
+ | "==" | "!="
+ | "&&"
+ | "||" . // lowest priority
+
+ All binary operators are left-associative.
+
+
+```
+## **input_conversion**: Specifies how to transform input to a variable.
+
+```
+ input_conversion is an argument to read_file and exec_script that
+ specifies how the result of the read operation should be converted
+ into a variable.
+
+ "" (the default)
+ Discard the result and return None.
+
+ "list lines"
+ Return the file contents as a list, with a string for each line.
+ The newlines will not be present in the result. The last line may
+ or may not end in a newline.
+
+ After splitting, each individual line will be trimmed of
+ whitespace on both ends.
+
+ "scope"
+ Execute the block as GN code and return a scope with the
+ resulting values in it. If the input was:
+ a = [ "hello.cc", "world.cc" ]
+ b = 26
+ and you read the result into a variable named "val", then you
+ could access contents the "." operator on "val":
+ sources = val.a
+ some_count = val.b
+
+ "string"
+ Return the file contents into a single string.
+
+ "value"
+ Parse the input as if it was a literal rvalue in a buildfile.
+ Examples of typical program output using this mode:
+ [ "foo", "bar" ] (result will be a list)
+ or
+ "foo bar" (result will be a string)
+ or
+ 5 (result will be an integer)
+
+ Note that if the input is empty, the result will be a null value
+ which will produce an error if assigned to a variable.
+
+ "trim ..."
+ Prefixing any of the other transformations with the word "trim"
+ will result in whitespace being trimmed from the beginning and end
+ of the result before processing.
+
+ Examples: "trim string" or "trim list lines"
+
+ Note that "trim value" is useless because the value parser skips
+ whitespace anyway.
+
+
+```
+## **Label patterns**
+
+```
+ A label pattern is a way of expressing one or more labels in a portion
+ of the source tree. They are not general regular expressions.
+
+ They can take the following forms only:
+
+ - Explicit (no wildcard):
+ "//foo/bar:baz"
+ ":baz"
+
+ - Wildcard target names:
+ "//foo/bar:*" (all targets in the //foo/bar/BUILD.gn file)
+ ":*" (all targets in the current build file)
+
+ - Wildcard directory names ("*" is only supported at the end)
+ "*" (all targets)
+ "//foo/bar/*" (all targets in any subdir of //foo/bar)
+ "./*" (all targets in the current build file or sub dirs)
+
+ Any of the above forms can additionally take an explicit toolchain.
+ In this case, the toolchain must be fully qualified (no wildcards
+ are supported in the toolchain name).
+
+ "//foo:bar(//build/toochain:mac)"
+ An explicit target in an explicit toolchain.
+
+ ":*(//build/toolchain/linux:32bit)"
+ All targets in the current build file using the 32-bit Linux
+ toolchain.
+
+ "//foo/*(//build/toolchain:win)"
+ All targets in //foo and any subdirectory using the Windows
+ toolchain.
+
+
+```
+## **nogncheck**: Skip an include line from checking.
+
+```
+ GN's header checker helps validate that the includes match the build
+ dependency graph. Sometimes an include might be conditional or
+ otherwise problematic, but you want to specifically allow it. In this
+ case, it can be whitelisted.
+
+ Include lines containing the substring "nogncheck" will be excluded
+ from header checking. The most common case is a conditional include:
+
+ #if defined(ENABLE_DOOM_MELON)
+ #include "tools/doom_melon/doom_melon.h" // nogncheck
+ #endif
+
+ If the build file has a conditional dependency on the corresponding
+ target that matches the conditional include, everything will always
+ link correctly:
+
+ source_set("mytarget") {
+ ...
+ if (enable_doom_melon) {
+ defines = [ "ENABLE_DOOM_MELON" ]
+ deps += [ "//tools/doom_melon" ]
+ }
+
+ But GN's header checker does not understand preprocessor directives,
+ won't know it matches the build dependencies, and will flag this
+ include as incorrect when the condition is false.
+
+```
+
+### **More information**
+
+```
+ The topic "gn help check" has general information on how checking
+ works and advice on fixing problems. Targets can also opt-out of
+ checking, see "gn help check_includes".
+
+
+```
+## **Runtime dependencies**
+
+```
+ Runtime dependencies of a target are exposed via the "runtime_deps"
+ category of "gn desc" (see "gn help desc") or they can be written
+ at build generation time via write_runtime_deps(), or
+ --runtime-deps-list-file (see "gn help --runtime-deps-list-file").
+
+ To a first approximation, the runtime dependencies of a target are
+ the set of "data" files, data directories, and the shared libraries
+ from all transitive dependencies. Executables, shared libraries, and
+ loadable modules are considered runtime dependencies of themselves.
+
+```
+
+### **Executables**
+
+```
+ Executable targets and those executable targets' transitive
+ dependencies are not considered unless that executable is listed in
+ "data_deps". Otherwise, GN assumes that the executable (and
+ everything it requires) is a build-time dependency only.
+
+```
+
+### **Actions and copies**
+
+```
+ Action and copy targets that are listed as "data_deps" will have all
+ of their outputs and data files considered as runtime dependencies.
+ Action and copy targets that are "deps" or "public_deps" will have
+ only their data files considered as runtime dependencies. These
+ targets can list an output file in both the "outputs" and "data"
+ lists to force an output file as a runtime dependency in all cases.
+
+ The different rules for deps and data_deps are to express build-time
+ (deps) vs. run-time (data_deps) outputs. If GN counted all build-time
+ copy steps as data dependencies, there would be a lot of extra stuff,
+ and if GN counted all run-time dependencies as regular deps, the
+ build's parallelism would be unnecessarily constrained.
+
+ This rule can sometimes lead to unintuitive results. For example,
+ given the three targets:
+ A --[data_deps]--> B --[deps]--> ACTION
+ GN would say that A does not have runtime deps on the result of the
+ ACTION, which is often correct. But the purpose of the B target might
+ be to collect many actions into one logic unit, and the "data"-ness
+ of A's dependency is lost. Solutions:
+
+ - List the outputs of the action in it's data section (if the
+ results of that action are always runtime files).
+ - Have B list the action in data_deps (if the outputs of the actions
+ are always runtime files).
+ - Have B list the action in both deps and data deps (if the outputs
+ might be used in both contexts and you don't care about unnecessary
+ entries in the list of files required at runtime).
+ - Split B into run-time and build-time versions with the appropriate
+ "deps" for each.
+
+```
+
+### **Static libraries and source sets**
+
+```
+ The results of static_library or source_set targets are not considered
+ runtime dependencies since these are assumed to be intermediate
+ targets only. If you need to list a static library as a runtime
+ dependency, you can manually compute the .a/.lib file name for the
+ current platform and list it in the "data" list of a target
+ (possibly on the static library target itself).
+
+```
+
+### **Multiple outputs**
+
+```
+ When a tool produces more than one output, only the first output
+ is considered. For example, a shared library target may produce a
+ .dll and a .lib file on Windows. Only the .dll file will be considered
+ a runtime dependency. This applies only to linker tools, scripts and
+ copy steps with multiple outputs will also get all outputs listed.
+
+
+```
+## **How Source Expansion Works**
+
+```
+ Source expansion is used for the action_foreach and copy target types
+ to map source file names to output file names or arguments.
+
+ To perform source expansion in the outputs, GN maps every entry in the
+ sources to every entry in the outputs list, producing the cross
+ product of all combinations, expanding placeholders (see below).
+
+ Source expansion in the args works similarly, but performing the
+ placeholder substitution produces a different set of arguments for
+ each invocation of the script.
+
+ If no placeholders are found, the outputs or args list will be treated
+ as a static list of literal file names that do not depend on the
+ sources.
+
+ See "gn help copy" and "gn help action_foreach" for more on how
+ this is applied.
+
+```
+
+### **Placeholders**
+
+```
+ {{source}}
+ The name of the source file including directory (*). This will
+ generally be used for specifying inputs to a script in the
+ "args" variable.
+ "//foo/bar/baz.txt" => "../../foo/bar/baz.txt"
+
+ {{source_file_part}}
+ The file part of the source including the extension.
+ "//foo/bar/baz.txt" => "baz.txt"
+
+ {{source_name_part}}
+ The filename part of the source file with no directory or
+ extension. This will generally be used for specifying a
+ transformation from a source file to a destination file with the
+ same name but different extension.
+ "//foo/bar/baz.txt" => "baz"
+
+ {{source_dir}}
+ The directory (*) containing the source file with no
+ trailing slash.
+ "//foo/bar/baz.txt" => "../../foo/bar"
+
+ {{source_root_relative_dir}}
+ The path to the source file's directory relative to the source
+ root, with no leading "//" or trailing slashes. If the path is
+ system-absolute, (beginning in a single slash) this will just
+ return the path with no trailing slash. This value will always
+ be the same, regardless of whether it appears in the "outputs"
+ or "args" section.
+ "//foo/bar/baz.txt" => "foo/bar"
+
+ {{source_gen_dir}}
+ The generated file directory (*) corresponding to the source
+ file's path. This will be different than the target's generated
+ file directory if the source file is in a different directory
+ than the BUILD.gn file.
+ "//foo/bar/baz.txt" => "gen/foo/bar"
+
+ {{source_out_dir}}
+ The object file directory (*) corresponding to the source file's
+ path, relative to the build directory. this us be different than
+ the target's out directory if the source file is in a different
+ directory than the build.gn file.
+ "//foo/bar/baz.txt" => "obj/foo/bar"
+
+```
+
+### **(*) Note on directories**
+
+```
+ Paths containing directories (except the source_root_relative_dir)
+ will be different depending on what context the expansion is evaluated
+ in. Generally it should "just work" but it means you can't
+ concatenate strings containing these values with reasonable results.
+
+ Details: source expansions can be used in the "outputs" variable,
+ the "args" variable, and in calls to "process_file_template". The
+ "args" are passed to a script which is run from the build directory,
+ so these directories will relative to the build directory for the
+ script to find. In the other cases, the directories will be source-
+ absolute (begin with a "//") because the results of those expansions
+ will be handled by GN internally.
+
+```
+
+### **Examples**
+
+```
+ Non-varying outputs:
+ action("hardcoded_outputs") {
+ sources = [ "input1.idl", "input2.idl" ]
+ outputs = [ "$target_out_dir/output1.dat",
+ "$target_out_dir/output2.dat" ]
+ }
+ The outputs in this case will be the two literal files given.
+
+ Varying outputs:
+ action_foreach("varying_outputs") {
+ sources = [ "input1.idl", "input2.idl" ]
+ outputs = [ "{{source_gen_dir}}/{{source_name_part}}.h",
+ "{{source_gen_dir}}/{{source_name_part}}.cc" ]
+ }
+ Performing source expansion will result in the following output names:
+ //out/Debug/obj/mydirectory/input1.h
+ //out/Debug/obj/mydirectory/input1.cc
+ //out/Debug/obj/mydirectory/input2.h
+ //out/Debug/obj/mydirectory/input2.cc
+
+
+```
+**Available global switches
+** Do "gn help --the_switch_you_want_help_on" for more. Individual
+ commands may take command-specific switches not listed here. See the
+ help on your specific command for more.
+
+```
+
+** \--args**: Specifies build arguments overrides.
+** \--color**: Force colored output.
+** \--dotfile**: Override the name of the ".gn" file.
+** \--markdown**: write the output in the Markdown format.
+** \--nocolor**: Force non-colored output.
+** -q**: Quiet mode. Don't print output on success.
+** \--root**: Explicitly specify source root.
+** \--runtime-deps-list-file**: Save runtime dependencies for targets in file.
+** \--threads**: Specify number of worker threads.
+** \--time**: Outputs a summary of how long everything took.
+** \--tracelog**: Writes a Chrome-compatible trace log to the given file.
+** -v**: Verbose logging.
+** \--version**: Prints the GN version number and exits.
+
+```
diff --git a/chromium/tools/gn/docs/standalone.md b/chromium/tools/gn/docs/standalone.md
new file mode 100644
index 00000000000..faeb4260d7d
--- /dev/null
+++ b/chromium/tools/gn/docs/standalone.md
@@ -0,0 +1,41 @@
+# Introduction
+
+This page is about how to design a project that can build independently
+with GN but also be brought into the Chrome build.
+
+GN is in principle no different than GYP in that there is some core
+configuration that must be the same between both the standalone build
+and the Chrome build. However, GN is much more explicit in its naming
+and configuration, so the similarities between the two builds are also
+much more explicit and there is less flexibility in how things are
+configured.
+
+# What you need for a minimal GN build
+
+Requirements:
+
+ * A master build config file. Chrome's is `//build/config/BUILDCONFIG.gn`
+ * A separate build file for the toolchain definition. It's not a good idea
+ to put these in a BUILD.gn file shared with any target definitions for
+ complex reasons. Chrome's are in `//build/toolchain/<platform>/BUILD.gn`.
+ * A `BUILD.gn` file in the root directory. This will be loaded after the
+ build config file to start the build.
+
+You may want a `.gn` file in the root directory. When you run GN it
+recursively looks up the directory tree until it finds this file, and it
+treats the containing directory as the "source root". This file also
+defines the location of the master build config file:
+
+ * See Chrome's `src/.gn` file.
+ * Unlike Chrome, you probably don't need to define a secondary root.
+ * see `gn help dotfile` for more.
+
+Adding a `.gn` file in a repository that is pulled into Chrome means
+that then running GN in your subdirectory will configure a build for
+your subproject rather than for all of Chrome. This could be an
+advantage or a disadvantage.
+
+If you would rather avoid using this file, you can use the command-line
+flags `--root` and `--dotfile` to set these values.
+
+# How the standalone and Chrome builds interact
diff --git a/chromium/tools/gn/docs/style_guide.md b/chromium/tools/gn/docs/style_guide.md
new file mode 100644
index 00000000000..cdb04ff7286
--- /dev/null
+++ b/chromium/tools/gn/docs/style_guide.md
@@ -0,0 +1,214 @@
+# GN Style Guide
+
+[TOC]
+## Naming and ordering within the file
+
+### Location of build files
+
+It usually makes sense to have more build files closer to the code than
+fewer ones at the toplevel (this is in contrast with what we did with
+GYP). This makes things easier to find and owners reviews easier since
+changes are more focused.
+
+### Targets
+
+ * Most BUILD files should have a target with the same name of the
+ directory. This target should be the first target.
+ * Other targets should be in "some order that makes sense." Usually
+ more important targets will be first, and unit tests will follow the
+ corresponding target. If there's no clear ordering, consider
+ alphabetical order.
+ * Test support libraries should be source sets named "test\_support".
+ So "//ui/compositor:test\_support". Test support libraries should
+ include as public deps the non-test-support version of the library
+ so tests need only depend on the test\_support target (rather than
+ both).
+
+Naming advice
+
+ * Targets and configs should be named using lowercase with underscores
+ separating words, unless there is a strong reason to do otherwise.
+ * Source sets, groups, and static libraries do not need globally unique names.
+ Prefer to give such targets short, non-redundant names without worrying
+ about global uniqueness. For example, it looks much better to write a
+ dependency as `"//mojo/public/bindings"` rather than
+ `"//mojo/public/bindings:mojo_bindings"
+ * Shared libraries (and by extension, components) must have globally unique
+ output names. Give such targets short non-unique names above, and then
+ provide a globally unique `output_name` for that target.
+ * Executables and tests should be given a globally unique name. Technically
+ only the output names must be unique, but since only the output names
+ appear in the shell and on bots, it's much less confusing if the name
+ matches the other places the executable appears.
+
+### Configs
+
+ * A config associated with a single target should be named the same as
+ the target with `_config` following it.
+ * A config should appear immediately before the corresponding target
+ that uses it.
+
+### Example
+
+Example for the `src/foo/BUILD.gn` file:
+
+```
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Config for foo is named foo_config and immediately precedes it in the file.
+config("foo_config") {
+}
+
+# Target matching path name is the first target.
+executable("foo") {
+}
+
+# Test for foo follows it.
+test("foo_unittests") {
+}
+
+config("bar_config") {
+}
+
+source_set("bar") {
+}
+```
+
+## Ordering within a target
+
+ 1. `output_name` / `visibility` / `testonly`
+ 2. `sources`
+ 3. `cflags`, `include_dirs`, `defines`, `configs` etc. in whatever
+ order makes sense to you.
+ 4. `public_deps`
+ 5. `deps`
+
+### Conditions
+
+Simple conditions affecting just one variable (e.g. adding a single
+source or adding a flag for one particular OS) can go beneath the
+variable they affect. More complicated conditions affecting more than
+one thing should go at the bottom.
+
+Conditions should be written to minimize the number of conditional blocks.
+
+## Formatting and indenting
+
+GN contains a built-in code formatter which defines the formatting style.
+Some additional notes:
+
+ * Variables are `lower_case_with_underscores`
+ * Comments should be complete sentences with periods at the end.
+ * Compiler flags and such should always be commented with what they do
+ and why the flag is needed.
+
+### Sources
+
+Prefer to list sources only once. It is OK to conditionally include sources
+rather than listing them all at the top and then conditionally excluding them
+when they don't apply. Conditional inclusion is often clearer since a file is
+only listed once and it's easier to reason about when reading.
+
+```
+ sources = [
+ "main.cc",
+ ]
+ if (use_aura) {
+ sources += [ "thing_aura.cc" ]
+ }
+ if (use_gtk) {
+ sources += [ "thing_gtk.cc" ]
+ }
+```
+
+### Deps
+
+ * Deps should be in alphabetical order.
+ * Deps within the current file should be written first and not
+ qualified with the file name (just `:foo`).
+ * Other deps should always use fully-qualified path names unless
+ relative ones are required for some reason.
+
+```
+ deps = [
+ ":a_thing",
+ ":mystatic",
+ "//foo/bar:other_thing",
+ "//foo/baz:that_thing",
+ ]
+```
+
+### Import
+
+Use fully-qualified paths for imports:
+
+```
+import("//foo/bar/baz.gni") # Even if this file is in the foo/bar directory
+```
+
+## Usage
+
+Use `source_set` rather than `static_library` unless you have a reason
+to do otherwise. A static library is a standalone library which can be
+slow to generate. A source set just links all the object files from that
+target into the targets depending on it, which saves the "lib" step.
+
+## Build arguments
+
+### Scope
+
+Build arguments should be scoped to a unit of behavior, e.g. enabling a feature.
+Typically an argument would be declared in an imported file to share it with
+the subset of the build that could make use of it.
+
+Chrome has many legacy flags in `//build/config/features.gni`,
+`//build/config/ui.gni`. These locations are deprecated. Feature flags should
+go along with the code for the feature. Many browser-level features can go
+somewhere in `//chrome/` without lower-level code knowing about it. Some
+UI environment flags can go into `//ui/`, and many flags can also go with
+the corresponding code in `//components/`. You can write a `.gni` file in
+components and have build files in chrome or content import it if necessary.
+
+The way to think about things in the `//build` directory is that this is
+DEPSed into various projects like V8 and WebRTC. Build flags specific to
+code outside of the build directory shouldn't be in the build directory, and
+V8 shouldn't get feature defines for Chrome features.
+
+New feature defines should use the buildflag system. See
+`//build/buildflag_header.gni` which allows preprocessor defines to be
+modularized without many of the disadvantages that made us use global defines
+in the past.
+
+### Type
+
+Arguments support all the [GN language types](language.md#Language).
+
+In the vast majority of cases `boolean` is the preferred type, since most
+arguments are enabling or disabling features or includes.
+
+`String`s are typically used for filepaths. They are also used for enumerated
+types, though `integer`s are sometimes used as well.
+
+### Naming conventions
+
+While there are no hard and fast rules around argument naming there are
+many common conventions. If you ever want to see the current list of argument
+names and default values for your current checkout use
+`gn args out/Debug --list --short`.
+
+`use_foo` - indicates dependencies or major codepaths to include (e.g.
+`use_open_ssl`, `use_ozone`, `use_cups`)
+
+`enable_foo` - indicates feature or tools to be enabled (e.g.
+`enable_google_now`, `enable_nacl`, `enable_remoting`, `enable_pdf`)
+
+`disable_foo` - _NOT_ recommended, use `enable_foo` instead with swapped default
+value
+
+`is_foo` - usually a global state descriptor (e.g. `is_chrome_branded`,
+`is_desktop_linux`); poor choice for non-globals
+
+`foo_use_bar` - prefixes can be used to indicate a limited scope for an argument
+(e.g. `rtc_use_h264`, `v8_use_snapshot`)
diff --git a/chromium/tools/gn/docs/update_binaries.md b/chromium/tools/gn/docs/update_binaries.md
new file mode 100644
index 00000000000..738da3c020a
--- /dev/null
+++ b/chromium/tools/gn/docs/update_binaries.md
@@ -0,0 +1,4 @@
+# How to update the [GN binaries](gn.md) that Chromium uses.
+
+Any committer should be able to do a roll by running //tools/gn/bin/roll_gn.py
+on linux or mac.
diff --git a/chromium/tools/gn/eclipse_writer.cc b/chromium/tools/gn/eclipse_writer.cc
new file mode 100644
index 00000000000..ee91f61dfb0
--- /dev/null
+++ b/chromium/tools/gn/eclipse_writer.cc
@@ -0,0 +1,172 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/eclipse_writer.h"
+
+#include <fstream>
+#include <memory>
+
+#include "base/files/file_path.h"
+#include "tools/gn/builder.h"
+#include "tools/gn/config_values_extractors.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/loader.h"
+#include "tools/gn/xml_element_writer.h"
+
+namespace {
+
+// Escapes |unescaped| for use in XML element content.
+std::string EscapeForXML(const std::string& unescaped) {
+ std::string result;
+ result.reserve(unescaped.length());
+ for (const char c : unescaped) {
+ if (c == '<')
+ result += "&lt;";
+ else if (c == '>')
+ result += "&gt;";
+ else if (c == '&')
+ result += "&amp;";
+ else
+ result.push_back(c);
+ }
+ return result;
+}
+
+} // namespace
+
+EclipseWriter::EclipseWriter(const BuildSettings* build_settings,
+ const Builder* builder,
+ std::ostream& out)
+ : build_settings_(build_settings), builder_(builder), out_(out) {
+ languages_.push_back("C++ Source File");
+ languages_.push_back("C Source File");
+ languages_.push_back("Assembly Source File");
+ languages_.push_back("GNU C++");
+ languages_.push_back("GNU C");
+ languages_.push_back("Assembly");
+}
+
+EclipseWriter::~EclipseWriter() {}
+
+// static
+bool EclipseWriter::RunAndWriteFile(
+ const BuildSettings* build_settings,
+ const Builder* builder,
+ Err* err) {
+ base::FilePath file = build_settings->GetFullPath(build_settings->build_dir())
+ .AppendASCII("eclipse-cdt-settings.xml");
+ std::ofstream file_out;
+ file_out.open(FilePathToUTF8(file).c_str(),
+ std::ios_base::out | std::ios_base::binary);
+ if (file_out.fail()) {
+ *err =
+ Err(Location(), "Couldn't open eclipse-cdt-settings.xml for writing");
+ return false;
+ }
+
+ EclipseWriter gen(build_settings, builder, file_out);
+ gen.Run();
+ return true;
+}
+
+void EclipseWriter::Run() {
+ GetAllIncludeDirs();
+ GetAllDefines();
+ WriteCDTSettings();
+}
+
+void EclipseWriter::GetAllIncludeDirs() {
+ std::vector<const Target*> targets = builder_->GetAllResolvedTargets();
+ for (const Target* target : targets) {
+ if (!UsesDefaultToolchain(target))
+ continue;
+
+ for (ConfigValuesIterator it(target); !it.done(); it.Next()) {
+ for (const SourceDir& include_dir : it.cur().include_dirs()) {
+ include_dirs_.insert(
+ FilePathToUTF8(build_settings_->GetFullPath(include_dir)));
+ }
+ }
+ }
+}
+
+void EclipseWriter::GetAllDefines() {
+ std::vector<const Target*> targets = builder_->GetAllResolvedTargets();
+ for (const Target* target : targets) {
+ if (!UsesDefaultToolchain(target))
+ continue;
+
+ for (ConfigValuesIterator it(target); !it.done(); it.Next()) {
+ for (const std::string& define : it.cur().defines()) {
+ size_t equal_pos = define.find('=');
+ std::string define_key;
+ std::string define_value;
+ if (equal_pos == std::string::npos) {
+ define_key = define;
+ } else {
+ define_key = define.substr(0, equal_pos);
+ define_value = define.substr(equal_pos + 1);
+ }
+ defines_[define_key] = define_value;
+ }
+ }
+ }
+}
+
+bool EclipseWriter::UsesDefaultToolchain(const Target* target) const {
+ return target->toolchain()->label() ==
+ builder_->loader()->GetDefaultToolchain();
+}
+
+void EclipseWriter::WriteCDTSettings() {
+ out_ << "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" << std::endl;
+ XmlElementWriter cdt_properties_element(out_, "cdtprojectproperties",
+ XmlAttributes());
+
+ {
+ const char* kIncludesSectionName =
+ "org.eclipse.cdt.internal.ui.wizards.settingswizards.IncludePaths";
+ std::unique_ptr<XmlElementWriter> section_element =
+ cdt_properties_element.SubElement(
+ "section", XmlAttributes("name", kIncludesSectionName));
+
+ section_element->SubElement(
+ "language", XmlAttributes("name", "holder for library settings"));
+
+ for (const std::string& language : languages_) {
+ std::unique_ptr<XmlElementWriter> language_element =
+ section_element->SubElement("language",
+ XmlAttributes("name", language));
+ for (const std::string& include_dir : include_dirs_) {
+ language_element
+ ->SubElement("includepath",
+ XmlAttributes("workspace_path", "false"))
+ ->Text(EscapeForXML(include_dir));
+ }
+ }
+ }
+
+ {
+ const char* kMacrosSectionName =
+ "org.eclipse.cdt.internal.ui.wizards.settingswizards.Macros";
+ std::unique_ptr<XmlElementWriter> section_element =
+ cdt_properties_element.SubElement(
+ "section", XmlAttributes("name", kMacrosSectionName));
+
+ section_element->SubElement(
+ "language", XmlAttributes("name", "holder for library settings"));
+
+ for (const std::string& language : languages_) {
+ std::unique_ptr<XmlElementWriter> language_element =
+ section_element->SubElement("language",
+ XmlAttributes("name", language));
+ for (const auto& key_val : defines_) {
+ std::unique_ptr<XmlElementWriter> macro_element =
+ language_element->SubElement("macro");
+ macro_element->SubElement("name")->Text(EscapeForXML(key_val.first));
+ macro_element->SubElement("value")->Text(EscapeForXML(key_val.second));
+ }
+ }
+ }
+}
diff --git a/chromium/tools/gn/eclipse_writer.h b/chromium/tools/gn/eclipse_writer.h
new file mode 100644
index 00000000000..560b8729e02
--- /dev/null
+++ b/chromium/tools/gn/eclipse_writer.h
@@ -0,0 +1,67 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_ECLIPSE_WRITER_H_
+#define TOOLS_GN_ECLIPSE_WRITER_H_
+
+#include <iosfwd>
+#include <map>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "base/macros.h"
+
+class BuildSettings;
+class Builder;
+class Err;
+class Target;
+
+class EclipseWriter {
+ public:
+ static bool RunAndWriteFile(const BuildSettings* build_settings,
+ const Builder* builder,
+ Err* err);
+
+ private:
+ EclipseWriter(const BuildSettings* build_settings,
+ const Builder* builder,
+ std::ostream& out);
+ ~EclipseWriter();
+
+ void Run();
+
+ // Populates |include_dirs_| with the include dirs of all the targets for the
+ // default toolchain.
+ void GetAllIncludeDirs();
+
+ // Populates |defines_| with the defines of all the targets for the default
+ // toolchain.
+ void GetAllDefines();
+
+ // Returns true if |target| uses the default toolchain.
+ bool UsesDefaultToolchain(const Target* target) const;
+
+ // Writes the XML settings file.
+ void WriteCDTSettings();
+
+ const BuildSettings* build_settings_;
+ const Builder* builder_;
+
+ // The output stream for the settings file.
+ std::ostream& out_;
+
+ // Eclipse languages for which the include dirs and defines apply.
+ std::vector<std::string> languages_;
+
+ // The include dirs of all the targets which use the default toolchain.
+ std::set<std::string> include_dirs_;
+
+ // The defines of all the targets which use the default toolchain.
+ std::map<std::string, std::string> defines_;
+
+ DISALLOW_COPY_AND_ASSIGN(EclipseWriter);
+};
+
+#endif // TOOLS_GN_ECLIPSE_WRITER_H_
diff --git a/chromium/tools/gn/err.cc b/chromium/tools/gn/err.cc
new file mode 100644
index 00000000000..56e93bd5e96
--- /dev/null
+++ b/chromium/tools/gn/err.cc
@@ -0,0 +1,195 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/err.h"
+
+#include <stddef.h>
+
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_util.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/tokenizer.h"
+#include "tools/gn/value.h"
+
+namespace {
+
+std::string GetNthLine(const base::StringPiece& data, int n) {
+ size_t line_off = Tokenizer::ByteOffsetOfNthLine(data, n);
+ size_t end = line_off + 1;
+ while (end < data.size() && !Tokenizer::IsNewline(data, end))
+ end++;
+ return data.substr(line_off, end - line_off).as_string();
+}
+
+void FillRangeOnLine(const LocationRange& range, int line_number,
+ std::string* line) {
+ // Only bother if the range's begin or end overlaps the line. If the entire
+ // line is highlighted as a result of this range, it's not very helpful.
+ if (range.begin().line_number() != line_number &&
+ range.end().line_number() != line_number)
+ return;
+
+ // Watch out, the char offsets in the location are 1-based, so we have to
+ // subtract 1.
+ int begin_char;
+ if (range.begin().line_number() < line_number)
+ begin_char = 0;
+ else
+ begin_char = range.begin().column_number() - 1;
+
+ int end_char;
+ if (range.end().line_number() > line_number)
+ end_char = static_cast<int>(line->size()); // Ending is non-inclusive.
+ else
+ end_char = range.end().column_number() - 1;
+
+ CHECK(end_char >= begin_char);
+ CHECK(begin_char >= 0 && begin_char <= static_cast<int>(line->size()));
+ CHECK(end_char >= 0 && end_char <= static_cast<int>(line->size()));
+ for (int i = begin_char; i < end_char; i++)
+ line->at(i) = '-';
+}
+
+// The line length is used to clip the maximum length of the markers we'll
+// make if the error spans more than one line (like unterminated literals).
+void OutputHighlighedPosition(const Location& location,
+ const Err::RangeList& ranges,
+ size_t line_length) {
+ // Make a buffer of the line in spaces.
+ std::string highlight;
+ highlight.resize(line_length);
+ for (size_t i = 0; i < line_length; i++)
+ highlight[i] = ' ';
+
+ // Highlight all the ranges on the line.
+ for (const auto& range : ranges)
+ FillRangeOnLine(range, location.line_number(), &highlight);
+
+ // Allow the marker to be one past the end of the line for marking the end.
+ highlight.push_back(' ');
+ CHECK(location.column_number() - 1 >= 0 &&
+ location.column_number() - 1 < static_cast<int>(highlight.size()));
+ highlight[location.column_number() - 1] = '^';
+
+ // Trim unused spaces from end of line.
+ while (!highlight.empty() && highlight[highlight.size() - 1] == ' ')
+ highlight.resize(highlight.size() - 1);
+
+ highlight += "\n";
+ OutputString(highlight, DECORATION_BLUE);
+}
+
+} // namespace
+
+Err::Err() : has_error_(false) {
+}
+
+Err::Err(const Location& location,
+ const std::string& msg,
+ const std::string& help)
+ : has_error_(true),
+ location_(location),
+ message_(msg),
+ help_text_(help) {
+}
+
+Err::Err(const LocationRange& range,
+ const std::string& msg,
+ const std::string& help)
+ : has_error_(true),
+ location_(range.begin()),
+ message_(msg),
+ help_text_(help) {
+ ranges_.push_back(range);
+}
+
+Err::Err(const Token& token,
+ const std::string& msg,
+ const std::string& help)
+ : has_error_(true),
+ location_(token.location()),
+ message_(msg),
+ help_text_(help) {
+ ranges_.push_back(token.range());
+}
+
+Err::Err(const ParseNode* node,
+ const std::string& msg,
+ const std::string& help_text)
+ : has_error_(true),
+ message_(msg),
+ help_text_(help_text) {
+ // Node will be null in certain tests.
+ if (node) {
+ LocationRange range = node->GetRange();
+ location_ = range.begin();
+ ranges_.push_back(range);
+ }
+}
+
+Err::Err(const Value& value,
+ const std::string msg,
+ const std::string& help_text)
+ : has_error_(true),
+ message_(msg),
+ help_text_(help_text) {
+ if (value.origin()) {
+ LocationRange range = value.origin()->GetRange();
+ location_ = range.begin();
+ ranges_.push_back(range);
+ }
+}
+
+Err::Err(const Err& other) = default;
+
+Err::~Err() {
+}
+
+void Err::PrintToStdout() const {
+ InternalPrintToStdout(false);
+}
+
+void Err::AppendSubErr(const Err& err) {
+ sub_errs_.push_back(err);
+}
+
+void Err::InternalPrintToStdout(bool is_sub_err) const {
+ DCHECK(has_error_);
+
+ if (!is_sub_err)
+ OutputString("ERROR ", DECORATION_RED);
+
+ // File name and location.
+ const InputFile* input_file = location_.file();
+ std::string loc_str = location_.Describe(true);
+ if (!loc_str.empty()) {
+ if (is_sub_err)
+ loc_str.insert(0, "See ");
+ else
+ loc_str.insert(0, "at ");
+ loc_str.append(": ");
+ }
+ OutputString(loc_str + message_ + "\n");
+
+ // Quoted line.
+ if (input_file) {
+ std::string line = GetNthLine(input_file->contents(),
+ location_.line_number());
+ if (!base::ContainsOnlyChars(line, base::kWhitespaceASCII)) {
+ OutputString(line + "\n", DECORATION_DIM);
+ OutputHighlighedPosition(location_, ranges_, line.size());
+ }
+ }
+
+ // Optional help text.
+ if (!help_text_.empty())
+ OutputString(help_text_ + "\n");
+
+ // Sub errors.
+ for (const auto& sub_err : sub_errs_)
+ sub_err.InternalPrintToStdout(true);
+}
diff --git a/chromium/tools/gn/err.h b/chromium/tools/gn/err.h
new file mode 100644
index 00000000000..eeec31abc6e
--- /dev/null
+++ b/chromium/tools/gn/err.h
@@ -0,0 +1,87 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_ERR_H_
+#define TOOLS_GN_ERR_H_
+
+#include <string>
+#include <vector>
+
+#include "tools/gn/location.h"
+#include "tools/gn/token.h"
+
+class ParseNode;
+class Value;
+
+// Result of doing some operation. Check has_error() to see if an error
+// occurred.
+//
+// An error has a location and a message. Below that, is some optional help
+// text to go with the annotation of the location.
+//
+// An error can also have sub-errors which are additionally printed out
+// below. They can provide additional context.
+class Err {
+ public:
+ typedef std::vector<LocationRange> RangeList;
+
+ // Indicates no error.
+ Err();
+
+ // Error at a single point.
+ Err(const Location& location,
+ const std::string& msg,
+ const std::string& help = std::string());
+
+ // Error at a given range.
+ Err(const LocationRange& range,
+ const std::string& msg,
+ const std::string& help = std::string());
+
+ // Error at a given token.
+ Err(const Token& token,
+ const std::string& msg,
+ const std::string& help_text = std::string());
+
+ // Error at a given node.
+ Err(const ParseNode* node,
+ const std::string& msg,
+ const std::string& help_text = std::string());
+
+ // Error at a given value.
+ Err(const Value& value,
+ const std::string msg,
+ const std::string& help_text = std::string());
+
+ Err(const Err& other);
+
+ ~Err();
+
+ bool has_error() const { return has_error_; }
+ const Location& location() const { return location_; }
+ const std::string& message() const { return message_; }
+ const std::string& help_text() const { return help_text_; }
+
+ void AppendRange(const LocationRange& range) { ranges_.push_back(range); }
+ const RangeList& ranges() const { return ranges_; }
+
+ void AppendSubErr(const Err& err);
+
+ void PrintToStdout() const;
+
+ private:
+ void InternalPrintToStdout(bool is_sub_err) const;
+
+ bool has_error_;
+ Location location_;
+
+ std::vector<LocationRange> ranges_;
+
+ std::string message_;
+ std::string help_text_;
+
+ std::vector<Err> sub_errs_;
+};
+
+#endif // TOOLS_GN_ERR_H_
diff --git a/chromium/tools/gn/escape.cc b/chromium/tools/gn/escape.cc
new file mode 100644
index 00000000000..5f275234646
--- /dev/null
+++ b/chromium/tools/gn/escape.cc
@@ -0,0 +1,209 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/escape.h"
+
+#include <stddef.h>
+
+#include "base/containers/stack_container.h"
+#include "base/logging.h"
+#include "build/build_config.h"
+
+namespace {
+
+// A "1" in this lookup table means that char is valid in the Posix shell.
+const char kShellValid[0x80] = {
+// 00-1f: all are invalid
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+// ' ' ! " # $ % & ' ( ) * + , - . /
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1,
+// 0 1 2 3 4 5 6 7 8 9 : ; < = > ?
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0,
+// @ A B C D E F G H I J K L M N O
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+// P Q R S T U V W X Y Z [ \ ] ^ _
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1,
+// ` a b c d e f g h i j k l m n o
+ 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
+// p q r s t u v w x y z { | } ~
+ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0 };
+
+// Append one character to the given string, escaping it for Ninja.
+//
+// Ninja's escaping rules are very simple. We always escape colons even
+// though they're OK in many places, in case the resulting string is used on
+// the left-hand-side of a rule.
+template<typename DestString>
+inline void NinjaEscapeChar(char ch, DestString* dest) {
+ if (ch == '$' || ch == ' ' || ch == ':')
+ dest->push_back('$');
+ dest->push_back(ch);
+}
+
+template<typename DestString>
+void EscapeStringToString_Ninja(const base::StringPiece& str,
+ const EscapeOptions& options,
+ DestString* dest,
+ bool* needed_quoting) {
+ for (const auto& elem : str)
+ NinjaEscapeChar(elem, dest);
+}
+
+template<typename DestString>
+void EscapeStringToString_NinjaPreformatted(const base::StringPiece& str,
+ DestString* dest) {
+ // Only Ninja-escape $.
+ for (const auto& elem : str) {
+ if (elem == '$')
+ dest->push_back('$');
+ dest->push_back(elem);
+ }
+}
+
+// Escape for CommandLineToArgvW and additionally escape Ninja characters.
+//
+// The basic algorithm is if the string doesn't contain any parse-affecting
+// characters, don't do anything (other than the Ninja processing). If it does,
+// quote the string, and backslash-escape all quotes and backslashes.
+// See:
+// http://blogs.msdn.com/b/twistylittlepassagesallalike/archive/2011/04/23/everyone-quotes-arguments-the-wrong-way.aspx
+// http://blogs.msdn.com/b/oldnewthing/archive/2010/09/17/10063629.aspx
+template<typename DestString>
+void EscapeStringToString_WindowsNinjaFork(const base::StringPiece& str,
+ const EscapeOptions& options,
+ DestString* dest,
+ bool* needed_quoting) {
+ // We assume we don't have any whitespace chars that aren't spaces.
+ DCHECK(str.find_first_of("\r\n\v\t") == std::string::npos);
+
+ if (str.find_first_of(" \"") == std::string::npos) {
+ // Simple case, don't quote.
+ EscapeStringToString_Ninja(str, options, dest, needed_quoting);
+ } else {
+ if (!options.inhibit_quoting)
+ dest->push_back('"');
+
+ for (size_t i = 0; i < str.size(); i++) {
+ // Count backslashes in case they're followed by a quote.
+ size_t backslash_count = 0;
+ while (i < str.size() && str[i] == '\\') {
+ i++;
+ backslash_count++;
+ }
+ if (i == str.size()) {
+ // Backslashes at end of string. Backslash-escape all of them since
+ // they'll be followed by a quote.
+ dest->append(backslash_count * 2, '\\');
+ } else if (str[i] == '"') {
+ // 0 or more backslashes followed by a quote. Backslash-escape the
+ // backslashes, then backslash-escape the quote.
+ dest->append(backslash_count * 2 + 1, '\\');
+ dest->push_back('"');
+ } else {
+ // Non-special Windows character, just escape for Ninja. Also, add any
+ // backslashes we read previously, these are literals.
+ dest->append(backslash_count, '\\');
+ NinjaEscapeChar(str[i], dest);
+ }
+ }
+
+ if (!options.inhibit_quoting)
+ dest->push_back('"');
+ if (needed_quoting)
+ *needed_quoting = true;
+ }
+}
+
+template<typename DestString>
+void EscapeStringToString_PosixNinjaFork(const base::StringPiece& str,
+ const EscapeOptions& options,
+ DestString* dest,
+ bool* needed_quoting) {
+ for (const auto& elem : str) {
+ if (elem == '$' || elem == ' ') {
+ // Space and $ are special to both Ninja and the shell. '$' escape for
+ // Ninja, then backslash-escape for the shell.
+ dest->push_back('\\');
+ dest->push_back('$');
+ dest->push_back(elem);
+ } else if (elem == ':') {
+ // Colon is the only other Ninja special char, which is not special to
+ // the shell.
+ dest->push_back('$');
+ dest->push_back(':');
+ } else if (static_cast<unsigned>(elem) >= 0x80 ||
+ !kShellValid[static_cast<int>(elem)]) {
+ // All other invalid shell chars get backslash-escaped.
+ dest->push_back('\\');
+ dest->push_back(elem);
+ } else {
+ // Everything else is a literal.
+ dest->push_back(elem);
+ }
+ }
+}
+
+template<typename DestString>
+void EscapeStringToString(const base::StringPiece& str,
+ const EscapeOptions& options,
+ DestString* dest,
+ bool* needed_quoting) {
+ switch (options.mode) {
+ case ESCAPE_NONE:
+ dest->append(str.data(), str.size());
+ break;
+ case ESCAPE_NINJA:
+ EscapeStringToString_Ninja(str, options, dest, needed_quoting);
+ break;
+ case ESCAPE_NINJA_COMMAND:
+ switch (options.platform) {
+ case ESCAPE_PLATFORM_CURRENT:
+#if defined(OS_WIN)
+ EscapeStringToString_WindowsNinjaFork(str, options, dest,
+ needed_quoting);
+#else
+ EscapeStringToString_PosixNinjaFork(str, options, dest,
+ needed_quoting);
+#endif
+ break;
+ case ESCAPE_PLATFORM_WIN:
+ EscapeStringToString_WindowsNinjaFork(str, options, dest,
+ needed_quoting);
+ break;
+ case ESCAPE_PLATFORM_POSIX:
+ EscapeStringToString_PosixNinjaFork(str, options, dest,
+ needed_quoting);
+ break;
+ default:
+ NOTREACHED();
+ }
+ break;
+ case ESCAPE_NINJA_PREFORMATTED_COMMAND:
+ EscapeStringToString_NinjaPreformatted(str, dest);
+ break;
+ default:
+ NOTREACHED();
+ }
+}
+
+} // namespace
+
+std::string EscapeString(const base::StringPiece& str,
+ const EscapeOptions& options,
+ bool* needed_quoting) {
+ std::string result;
+ result.reserve(str.size() + 4); // Guess we'll add a couple of extra chars.
+ EscapeStringToString(str, options, &result, needed_quoting);
+ return result;
+}
+
+void EscapeStringToStream(std::ostream& out,
+ const base::StringPiece& str,
+ const EscapeOptions& options) {
+ base::StackString<256> escaped;
+ EscapeStringToString(str, options, &escaped.container(), nullptr);
+ if (!escaped->empty())
+ out.write(escaped->data(), escaped->size());
+}
diff --git a/chromium/tools/gn/escape.h b/chromium/tools/gn/escape.h
new file mode 100644
index 00000000000..838de943709
--- /dev/null
+++ b/chromium/tools/gn/escape.h
@@ -0,0 +1,80 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_ESCAPE_H_
+#define TOOLS_GN_ESCAPE_H_
+
+#include <iosfwd>
+
+#include "base/strings/string_piece.h"
+
+enum EscapingMode {
+ // No escaping.
+ ESCAPE_NONE,
+
+ // Ninja string escaping.
+ ESCAPE_NINJA,
+
+ // For writing commands to ninja files. This assumes the output is "one
+ // thing" like a filename, so will escape or quote spaces as necessary for
+ // both Ninja and the shell to keep that thing together.
+ ESCAPE_NINJA_COMMAND,
+
+ // For writing preformatted shell commands to Ninja files. This assumes the
+ // output already has the proper quoting and may include special shell
+ // shell characters which we want to pass to the shell (like when writing
+ // tool commands). Only Ninja "$" are escaped.
+ ESCAPE_NINJA_PREFORMATTED_COMMAND,
+};
+
+enum EscapingPlatform {
+ // Do escaping for the current platform.
+ ESCAPE_PLATFORM_CURRENT,
+
+ // Force escaping for the given platform.
+ ESCAPE_PLATFORM_POSIX,
+ ESCAPE_PLATFORM_WIN,
+};
+
+struct EscapeOptions {
+ EscapeOptions()
+ : mode(ESCAPE_NONE),
+ platform(ESCAPE_PLATFORM_CURRENT),
+ inhibit_quoting(false) {
+ }
+
+ EscapingMode mode;
+
+ // Controls how "fork" escaping is done. You will generally want to keep the
+ // default "current" platform.
+ EscapingPlatform platform;
+
+ // When the escaping mode is ESCAPE_SHELL, the escaper will normally put
+ // quotes around things with spaces. If this value is set to true, we'll
+ // disable the quoting feature and just add the spaces.
+ //
+ // This mode is for when quoting is done at some higher-level. Defaults to
+ // false. Note that Windows has strange behavior where the meaning of the
+ // backslashes changes according to if it is followed by a quote. The
+ // escaping rules assume that a double-quote will be appended to the result.
+ bool inhibit_quoting;
+};
+
+// Escapes the given input, returnining the result.
+//
+// If needed_quoting is non-null, whether the string was or should have been
+// (if inhibit_quoting was set) quoted will be written to it. This value should
+// be initialized to false by the caller and will be written to only if it's
+// true (the common use-case is for chaining calls).
+std::string EscapeString(const base::StringPiece& str,
+ const EscapeOptions& options,
+ bool* needed_quoting);
+
+// Same as EscapeString but writes the results to the given stream, saving a
+// copy.
+void EscapeStringToStream(std::ostream& out,
+ const base::StringPiece& str,
+ const EscapeOptions& options);
+
+#endif // TOOLS_GN_ESCAPE_H_
diff --git a/chromium/tools/gn/escape_unittest.cc b/chromium/tools/gn/escape_unittest.cc
new file mode 100644
index 00000000000..f2b2eae1974
--- /dev/null
+++ b/chromium/tools/gn/escape_unittest.cc
@@ -0,0 +1,60 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/escape.h"
+
+TEST(Escape, Ninja) {
+ EscapeOptions opts;
+ opts.mode = ESCAPE_NINJA;
+ std::string result = EscapeString("asdf: \"$\\bar", opts, nullptr);
+ EXPECT_EQ("asdf$:$ \"$$\\bar", result);
+}
+
+TEST(Escape, WindowsCommand) {
+ EscapeOptions opts;
+ opts.mode = ESCAPE_NINJA_COMMAND;
+ opts.platform = ESCAPE_PLATFORM_WIN;
+
+ // Regular string is passed, even if it has backslashes.
+ EXPECT_EQ("foo\\bar", EscapeString("foo\\bar", opts, nullptr));
+
+ // Spaces means the string is quoted, normal backslahes untouched.
+ bool needs_quoting = false;
+ EXPECT_EQ("\"foo\\$ bar\"", EscapeString("foo\\ bar", opts, &needs_quoting));
+ EXPECT_TRUE(needs_quoting);
+
+ // Inhibit quoting.
+ needs_quoting = false;
+ opts.inhibit_quoting = true;
+ EXPECT_EQ("foo\\$ bar", EscapeString("foo\\ bar", opts, &needs_quoting));
+ EXPECT_TRUE(needs_quoting);
+ opts.inhibit_quoting = false;
+
+ // Backslashes at the end of the string get escaped.
+ EXPECT_EQ("\"foo$ bar\\\\\\\\\"", EscapeString("foo bar\\\\", opts, nullptr));
+
+ // Backslashes preceeding quotes are escaped, and the quote is escaped.
+ EXPECT_EQ("\"foo\\\\\\\"$ bar\"", EscapeString("foo\\\" bar", opts, nullptr));
+}
+
+TEST(Escape, PosixCommand) {
+ EscapeOptions opts;
+ opts.mode = ESCAPE_NINJA_COMMAND;
+ opts.platform = ESCAPE_PLATFORM_POSIX;
+
+ // : and $ ninja escaped with $. Then Shell-escape backslashes and quotes.
+ EXPECT_EQ("a$:\\$ \\\"\\$$\\\\b", EscapeString("a: \"$\\b", opts, nullptr));
+
+ // Some more generic shell chars.
+ EXPECT_EQ("a_\\;\\<\\*b", EscapeString("a_;<*b", opts, nullptr));
+}
+
+TEST(Escape, NinjaPreformatted) {
+ EscapeOptions opts;
+ opts.mode = ESCAPE_NINJA_PREFORMATTED_COMMAND;
+
+ // Only $ is escaped.
+ EXPECT_EQ("a: \"$$\\b<;", EscapeString("a: \"$\\b<;", opts, nullptr));
+}
diff --git a/chromium/tools/gn/example/.gn b/chromium/tools/gn/example/.gn
new file mode 100644
index 00000000000..e5b6d4a3db7
--- /dev/null
+++ b/chromium/tools/gn/example/.gn
@@ -0,0 +1,2 @@
+# The location of the build configuration file.
+buildconfig = "//build/BUILDCONFIG.gn"
diff --git a/chromium/tools/gn/example/BUILD.gn b/chromium/tools/gn/example/BUILD.gn
new file mode 100644
index 00000000000..a18390e6489
--- /dev/null
+++ b/chromium/tools/gn/example/BUILD.gn
@@ -0,0 +1,30 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+executable("hello") {
+ sources = [
+ "hello.cc",
+ ]
+
+ deps = [
+ ":hello_shared",
+ ":hello_static",
+ ]
+}
+
+shared_library("hello_shared") {
+ sources = [
+ "hello_shared.cc",
+ "hello_shared.h",
+ ]
+
+ defines = [ "HELLO_SHARED_IMPLEMENTATION" ]
+}
+
+static_library("hello_static") {
+ sources = [
+ "hello_static.cc",
+ "hello_static.h",
+ ]
+}
diff --git a/chromium/tools/gn/example/README.txt b/chromium/tools/gn/example/README.txt
new file mode 100644
index 00000000000..d0ddeed8b47
--- /dev/null
+++ b/chromium/tools/gn/example/README.txt
@@ -0,0 +1,4 @@
+This is an example directory structure that compiles some simple targets using
+gcc. It is intended to show how to set up a simple GN build.
+
+Don't miss the ".gn" file in this directory!
diff --git a/chromium/tools/gn/example/build/BUILD.gn b/chromium/tools/gn/example/build/BUILD.gn
new file mode 100644
index 00000000000..8eae46a1aad
--- /dev/null
+++ b/chromium/tools/gn/example/build/BUILD.gn
@@ -0,0 +1,19 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("compiler_defaults") {
+ if (current_os == "linux") {
+ cflags = [
+ "-fPIC",
+ "-pthread",
+ ]
+ }
+}
+
+config("executable_ldconfig") {
+ ldflags = [
+ "-Wl,-rpath=\$ORIGIN/",
+ "-Wl,-rpath-link=",
+ ]
+}
diff --git a/chromium/tools/gn/example/build/BUILDCONFIG.gn b/chromium/tools/gn/example/build/BUILDCONFIG.gn
new file mode 100644
index 00000000000..e419fd96a8a
--- /dev/null
+++ b/chromium/tools/gn/example/build/BUILDCONFIG.gn
@@ -0,0 +1,38 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (target_os == "") {
+ target_os = host_os
+}
+if (target_cpu == "") {
+ target_cpu = host_cpu
+}
+if (current_cpu == "") {
+ current_cpu = target_cpu
+}
+if (current_os == "") {
+ current_os = target_os
+}
+
+# All binary targets will get this list of configs by default.
+_shared_binary_target_configs = [ "//build:compiler_defaults" ]
+
+# Apply that default list to the binary target types.
+set_defaults("executable") {
+ configs = _shared_binary_target_configs
+
+ # Executables get this additional configuration.
+ configs += [ "//build:executable_ldconfig" ]
+}
+set_defaults("static_library") {
+ configs = _shared_binary_target_configs
+}
+set_defaults("shared_library") {
+ configs = _shared_binary_target_configs
+}
+set_defaults("source_set") {
+ configs = _shared_binary_target_configs
+}
+
+set_default_toolchain("//build/toolchain:gcc")
diff --git a/chromium/tools/gn/example/build/toolchain/BUILD.gn b/chromium/tools/gn/example/build/toolchain/BUILD.gn
new file mode 100644
index 00000000000..77e33fecf61
--- /dev/null
+++ b/chromium/tools/gn/example/build/toolchain/BUILD.gn
@@ -0,0 +1,80 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+toolchain("gcc") {
+ tool("cc") {
+ depfile = "{{output}}.d"
+ command = "gcc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CC {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+ ]
+ }
+
+ tool("cxx") {
+ depfile = "{{output}}.d"
+ command = "g++ -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+ depsformat = "gcc"
+ description = "CXX {{output}}"
+ outputs = [
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o",
+ ]
+ }
+
+ tool("alink") {
+ rspfile = "{{output}}.rsp"
+ command = "rm -f {{output}} && ar rcs {{output}} @$rspfile"
+ description = "AR {{target_output_name}}{{output_extension}}"
+ rspfile_content = "{{inputs}}"
+ outputs = [
+ "{{target_out_dir}}/{{target_output_name}}{{output_extension}}",
+ ]
+ default_output_extension = ".a"
+ output_prefix = "lib"
+ }
+
+ tool("solink") {
+ soname = "{{target_output_name}}{{output_extension}}" # e.g. "libfoo.so".
+ rspfile = soname + ".rsp"
+
+ command = "g++ -shared {{ldflags}} -o $soname -Wl,-soname=$soname @$rspfile"
+ rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive {{libs}}"
+
+ description = "SOLINK $soname"
+
+ # Use this for {{output_extension}} expansions unless a target manually
+ # overrides it (in which case {{output_extension}} will be what the target
+ # specifies).
+ default_output_extension = ".so"
+
+ outputs = [
+ soname,
+ ]
+ link_output = soname
+ depend_output = soname
+ output_prefix = "lib"
+ }
+
+ tool("link") {
+ outfile = "{{target_output_name}}{{output_extension}}"
+ rspfile = "$outfile.rsp"
+ command = "g++ {{ldflags}} -o $outfile -Wl,--start-group @$rspfile {{solibs}} -Wl,--end-group {{libs}}"
+ description = "LINK $outfile"
+ rspfile_content = "{{inputs}}"
+ outputs = [
+ outfile,
+ ]
+ }
+
+ tool("stamp") {
+ command = "touch {{output}}"
+ description = "STAMP {{output}}"
+ }
+
+ tool("copy") {
+ command = "cp -af {{source}} {{output}}"
+ description = "COPY {{source}} {{output}}"
+ }
+}
diff --git a/chromium/tools/gn/example/hello.cc b/chromium/tools/gn/example/hello.cc
new file mode 100644
index 00000000000..c4aa4482949
--- /dev/null
+++ b/chromium/tools/gn/example/hello.cc
@@ -0,0 +1,13 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+#include "hello_shared.h"
+#include "hello_static.h"
+
+int main(int argc, char* argv[]) {
+ printf("%s, %s\n", GetStaticText(), GetSharedText());
+ return 0;
+}
diff --git a/chromium/tools/gn/example/hello_shared.cc b/chromium/tools/gn/example/hello_shared.cc
new file mode 100644
index 00000000000..58be84c27b4
--- /dev/null
+++ b/chromium/tools/gn/example/hello_shared.cc
@@ -0,0 +1,9 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "hello_shared.h"
+
+const char* GetSharedText() {
+ return "world";
+}
diff --git a/chromium/tools/gn/example/hello_shared.h b/chromium/tools/gn/example/hello_shared.h
new file mode 100644
index 00000000000..7af804b3be4
--- /dev/null
+++ b/chromium/tools/gn/example/hello_shared.h
@@ -0,0 +1,32 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_EXAMPLE_HELLO_SHARED_H_
+#define TOOLS_GN_EXAMPLE_HELLO_SHARED_H_
+
+#if defined(WIN32)
+
+#if defined(HELLO_SHARED_IMPLEMENTATION)
+#define HELLO_EXPORT __declspec(dllexport)
+#define HELLO_EXPORT_PRIVATE __declspec(dllexport)
+#else
+#define HELLO_EXPORT __declspec(dllimport)
+#define HELLO_EXPORT_PRIVATE __declspec(dllimport)
+#endif // defined(HELLO_SHARED_IMPLEMENTATION)
+
+#else
+
+#if defined(HELLO_SHARED_IMPLEMENTATION)
+#define HELLO_EXPORT __attribute__((visibility("default")))
+#define HELLO_EXPORT_PRIVATE __attribute__((visibility("default")))
+#else
+#define HELLO_EXPORT
+#define HELLO_EXPORT_PRIVATE
+#endif // defined(HELLO_SHARED_IMPLEMENTATION)
+
+#endif
+
+HELLO_EXPORT const char* GetSharedText();
+
+#endif // TOOLS_GN_EXAMPLE_HELLO_SHARED_H_
diff --git a/chromium/tools/gn/example/hello_static.cc b/chromium/tools/gn/example/hello_static.cc
new file mode 100644
index 00000000000..cdf4e67b05c
--- /dev/null
+++ b/chromium/tools/gn/example/hello_static.cc
@@ -0,0 +1,9 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "hello_static.h"
+
+const char* GetStaticText() {
+ return "Hello";
+}
diff --git a/chromium/tools/gn/example/hello_static.h b/chromium/tools/gn/example/hello_static.h
new file mode 100644
index 00000000000..f15a6336d2a
--- /dev/null
+++ b/chromium/tools/gn/example/hello_static.h
@@ -0,0 +1,10 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_EXAMPLE_HELLO_STATIC_H_
+#define TOOLS_GN_EXAMPLE_HELLO_STATIC_H_
+
+const char* GetStaticText();
+
+#endif // TOOLS_GN_EXAMPLE_HELLO_STATIC_H_
diff --git a/chromium/tools/gn/exec_process.cc b/chromium/tools/gn/exec_process.cc
new file mode 100644
index 00000000000..8a47fbe2053
--- /dev/null
+++ b/chromium/tools/gn/exec_process.cc
@@ -0,0 +1,260 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/exec_process.h"
+
+#include <stddef.h>
+
+#include <memory>
+
+#include "base/command_line.h"
+#include "base/files/file_util.h"
+#include "base/logging.h"
+#include "base/process/kill.h"
+#include "base/process/launch.h"
+#include "base/process/process.h"
+#include "build/build_config.h"
+
+#if defined(OS_WIN)
+#include <windows.h>
+
+#include "base/win/scoped_handle.h"
+#include "base/win/scoped_process_information.h"
+#else
+#include <errno.h>
+#include <fcntl.h>
+#include <unistd.h>
+
+#include "base/posix/eintr_wrapper.h"
+#include "base/posix/file_descriptor_shuffle.h"
+#endif
+
+namespace internal {
+
+#if defined(OS_WIN)
+bool ExecProcess(const base::CommandLine& cmdline,
+ const base::FilePath& startup_dir,
+ std::string* std_out,
+ std::string* std_err,
+ int* exit_code) {
+ SECURITY_ATTRIBUTES sa_attr;
+ // Set the bInheritHandle flag so pipe handles are inherited.
+ sa_attr.nLength = sizeof(SECURITY_ATTRIBUTES);
+ sa_attr.bInheritHandle = TRUE;
+ sa_attr.lpSecurityDescriptor = nullptr;
+
+ // Create the pipe for the child process's STDOUT.
+ HANDLE out_read = nullptr;
+ HANDLE out_write = nullptr;
+ if (!CreatePipe(&out_read, &out_write, &sa_attr, 0)) {
+ NOTREACHED() << "Failed to create pipe";
+ return false;
+ }
+ base::win::ScopedHandle scoped_out_read(out_read);
+ base::win::ScopedHandle scoped_out_write(out_write);
+
+ // Create the pipe for the child process's STDERR.
+ HANDLE err_read = nullptr;
+ HANDLE err_write = nullptr;
+ if (!CreatePipe(&err_read, &err_write, &sa_attr, 0)) {
+ NOTREACHED() << "Failed to create pipe";
+ return false;
+ }
+ base::win::ScopedHandle scoped_err_read(err_read);
+ base::win::ScopedHandle scoped_err_write(err_write);
+
+ // Ensure the read handle to the pipe for STDOUT/STDERR is not inherited.
+ if (!SetHandleInformation(out_read, HANDLE_FLAG_INHERIT, 0)) {
+ NOTREACHED() << "Failed to disabled pipe inheritance";
+ return false;
+ }
+ if (!SetHandleInformation(err_read, HANDLE_FLAG_INHERIT, 0)) {
+ NOTREACHED() << "Failed to disabled pipe inheritance";
+ return false;
+ }
+
+ base::FilePath::StringType cmdline_str(cmdline.GetCommandLineString());
+
+ STARTUPINFO start_info = {};
+
+ start_info.cb = sizeof(STARTUPINFO);
+ start_info.hStdOutput = out_write;
+ // Keep the normal stdin.
+ start_info.hStdInput = GetStdHandle(STD_INPUT_HANDLE);
+ // FIXME(brettw) set stderr here when we actually read it below.
+ //start_info.hStdError = err_write;
+ start_info.hStdError = GetStdHandle(STD_ERROR_HANDLE);
+ start_info.dwFlags |= STARTF_USESTDHANDLES;
+
+ // Create the child process.
+ PROCESS_INFORMATION temp_process_info = {};
+ if (!CreateProcess(nullptr,
+ &cmdline_str[0],
+ nullptr, nullptr,
+ TRUE, // Handles are inherited.
+ 0, nullptr,
+ startup_dir.value().c_str(),
+ &start_info, &temp_process_info)) {
+ return false;
+ }
+ base::win::ScopedProcessInformation proc_info(temp_process_info);
+
+ // Close our writing end of pipes now. Otherwise later read would not be able
+ // to detect end of child's output.
+ scoped_out_write.Close();
+ scoped_err_write.Close();
+
+ // Read output from the child process's pipe for STDOUT
+ const int kBufferSize = 1024;
+ char buffer[kBufferSize];
+
+ // FIXME(brettw) read from stderr here! This is complicated because we want
+ // to read both of them at the same time, probably need overlapped I/O.
+ // Also uncomment start_info code above.
+ for (;;) {
+ DWORD bytes_read = 0;
+ BOOL success =
+ ReadFile(out_read, buffer, kBufferSize, &bytes_read, nullptr);
+ if (!success || bytes_read == 0)
+ break;
+ std_out->append(buffer, bytes_read);
+ }
+
+ // Let's wait for the process to finish.
+ WaitForSingleObject(proc_info.process_handle(), INFINITE);
+
+ DWORD dw_exit_code;
+ GetExitCodeProcess(proc_info.process_handle(), &dw_exit_code);
+ *exit_code = static_cast<int>(dw_exit_code);
+
+ return true;
+}
+#else
+// Reads from the provided file descriptor and appends to output. Returns false
+// if the fd is closed or there is an unexpected error (not
+// EINTR/EAGAIN/EWOULDBLOCK).
+bool ReadFromPipe(int fd, std::string* output) {
+ char buffer[256];
+ int bytes_read = HANDLE_EINTR(read(fd, buffer, sizeof(buffer)));
+ if (bytes_read == -1) {
+ return errno == EAGAIN || errno == EWOULDBLOCK;
+ } else if (bytes_read <= 0) {
+ return false;
+ }
+ output->append(buffer, bytes_read);
+ return true;
+}
+
+bool ExecProcess(const base::CommandLine& cmdline,
+ const base::FilePath& startup_dir,
+ std::string* std_out,
+ std::string* std_err,
+ int* exit_code) {
+ *exit_code = EXIT_FAILURE;
+
+ std::vector<std::string> argv = cmdline.argv();
+
+ int out_fd[2], err_fd[2];
+ pid_t pid;
+ base::InjectiveMultimap fd_shuffle1, fd_shuffle2;
+ std::unique_ptr<char* []> argv_cstr(new char*[argv.size() + 1]);
+
+ fd_shuffle1.reserve(3);
+ fd_shuffle2.reserve(3);
+
+ if (pipe(out_fd) < 0)
+ return false;
+ base::ScopedFD out_read(out_fd[0]), out_write(out_fd[1]);
+
+ if (pipe(err_fd) < 0)
+ return false;
+ base::ScopedFD err_read(err_fd[0]), err_write(err_fd[1]);
+
+ if (out_read.get() >= FD_SETSIZE || err_read.get() >= FD_SETSIZE)
+ return false;
+
+ switch (pid = fork()) {
+ case -1: // error
+ return false;
+ case 0: // child
+ {
+ // DANGER: no calls to malloc are allowed from now on:
+ // http://crbug.com/36678
+ //
+ // STL iterators are also not allowed (including those implied
+ // by range-based for loops), since debug iterators use locks.
+
+ // Obscure fork() rule: in the child, if you don't end up doing exec*(),
+ // you call _exit() instead of exit(). This is because _exit() does not
+ // call any previously-registered (in the parent) exit handlers, which
+ // might do things like block waiting for threads that don't even exist
+ // in the child.
+ int dev_null = open("/dev/null", O_WRONLY);
+ if (dev_null < 0)
+ _exit(127);
+
+ fd_shuffle1.push_back(
+ base::InjectionArc(out_write.get(), STDOUT_FILENO, true));
+ fd_shuffle1.push_back(
+ base::InjectionArc(err_write.get(), STDERR_FILENO, true));
+ fd_shuffle1.push_back(
+ base::InjectionArc(dev_null, STDIN_FILENO, true));
+ // Adding another element here? Remeber to increase the argument to
+ // reserve(), above.
+
+ // DANGER: Do NOT convert to range-based for loop!
+ for (size_t i = 0; i < fd_shuffle1.size(); ++i)
+ fd_shuffle2.push_back(fd_shuffle1[i]);
+
+ if (!ShuffleFileDescriptors(&fd_shuffle1))
+ _exit(127);
+
+ base::SetCurrentDirectory(startup_dir);
+
+ // TODO(brettw) the base version GetAppOutput does a
+ // CloseSuperfluousFds call here. Do we need this?
+
+ // DANGER: Do NOT convert to range-based for loop!
+ for (size_t i = 0; i < argv.size(); i++)
+ argv_cstr[i] = const_cast<char*>(argv[i].c_str());
+ argv_cstr[argv.size()] = nullptr;
+ execvp(argv_cstr[0], argv_cstr.get());
+ _exit(127);
+ }
+ default: // parent
+ {
+ // Close our writing end of pipe now. Otherwise later read would not
+ // be able to detect end of child's output (in theory we could still
+ // write to the pipe).
+ out_write.reset();
+ err_write.reset();
+
+ bool out_open = true, err_open = true;
+ while (out_open || err_open) {
+ fd_set read_fds;
+ FD_ZERO(&read_fds);
+ FD_SET(out_read.get(), &read_fds);
+ FD_SET(err_read.get(), &read_fds);
+ int res =
+ HANDLE_EINTR(select(std::max(out_read.get(), err_read.get()) + 1,
+ &read_fds, nullptr, nullptr, nullptr));
+ if (res <= 0)
+ break;
+ if (FD_ISSET(out_read.get(), &read_fds))
+ out_open = ReadFromPipe(out_read.get(), std_out);
+ if (FD_ISSET(err_read.get(), &read_fds))
+ err_open = ReadFromPipe(err_read.get(), std_err);
+ }
+
+ base::Process process(pid);
+ return process.WaitForExit(exit_code);
+ }
+ }
+
+ return false;
+}
+#endif
+
+} // namespace internal
+
diff --git a/chromium/tools/gn/exec_process.h b/chromium/tools/gn/exec_process.h
new file mode 100644
index 00000000000..4d010d519f9
--- /dev/null
+++ b/chromium/tools/gn/exec_process.h
@@ -0,0 +1,25 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_EXEC_PROCESS_H_
+#define TOOLS_GN_EXEC_PROCESS_H_
+
+#include <string>
+
+namespace base {
+class CommandLine;
+class FilePath;
+}
+
+namespace internal {
+
+bool ExecProcess(const base::CommandLine& cmdline,
+ const base::FilePath& startup_dir,
+ std::string* std_out,
+ std::string* std_err,
+ int* exit_code);
+
+} // namespace internal
+
+#endif // TOOLS_GN_EXEC_PROCESS_H_
diff --git a/chromium/tools/gn/exec_process_unittest.cc b/chromium/tools/gn/exec_process_unittest.cc
new file mode 100644
index 00000000000..70a208c923d
--- /dev/null
+++ b/chromium/tools/gn/exec_process_unittest.cc
@@ -0,0 +1,130 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/exec_process.h"
+
+#include "base/command_line.h"
+#include "base/files/scoped_temp_dir.h"
+#include "base/strings/string_util.h"
+#include "build/build_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+#if defined(OS_WIN)
+#include "base/strings/utf_string_conversions.h"
+#endif
+
+namespace internal {
+
+// TODO(cjhopman): Enable these tests when windows ExecProcess handles stderr.
+// 'python' is not runnable on Windows. Adding ["cmd", "/c"] fails because
+// CommandLine does unusual reordering of args.
+#if !defined(OS_WIN)
+namespace {
+bool ExecPython(const std::string& command,
+ std::string* std_out,
+ std::string* std_err,
+ int* exit_code) {
+ base::ScopedTempDir temp_dir;
+ base::CommandLine::StringVector args;
+#if defined(OS_WIN)
+ args.push_back(L"python");
+ args.push_back(L"-c");
+ args.push_back(base::UTF8ToUTF16(command));
+#else
+ args.push_back("python");
+ args.push_back("-c");
+ args.push_back(command);
+#endif
+ return ExecProcess(
+ base::CommandLine(args), temp_dir.path(), std_out, std_err, exit_code);
+}
+} // namespace
+
+TEST(ExecProcessTest, TestExitCode) {
+ std::string std_out, std_err;
+ int exit_code;
+
+ ASSERT_TRUE(
+ ExecPython("import sys; sys.exit(0)", &std_out, &std_err, &exit_code));
+ EXPECT_EQ(0, exit_code);
+
+ ASSERT_TRUE(
+ ExecPython("import sys; sys.exit(1)", &std_out, &std_err, &exit_code));
+ EXPECT_EQ(1, exit_code);
+
+ ASSERT_TRUE(
+ ExecPython("import sys; sys.exit(253)", &std_out, &std_err, &exit_code));
+ EXPECT_EQ(253, exit_code);
+
+ ASSERT_TRUE(
+ ExecPython("throw Exception()", &std_out, &std_err, &exit_code));
+ EXPECT_EQ(1, exit_code);
+}
+
+// Test that large output is handled correctly. There are various ways that this
+// could potentially fail. For example, non-blocking Linux pipes have a 65536
+// byte buffer and, if stdout is non-blocking, python will throw an IOError when
+// a write exceeds the buffer size.
+TEST(ExecProcessTest, TestLargeOutput) {
+ base::ScopedTempDir temp_dir;
+ std::string std_out, std_err;
+ int exit_code;
+
+ ASSERT_TRUE(ExecPython(
+ "import sys; print 'o' * 1000000", &std_out, &std_err, &exit_code));
+ EXPECT_EQ(0, exit_code);
+ EXPECT_EQ(1000001u, std_out.size());
+}
+
+TEST(ExecProcessTest, TestStdoutAndStderrOutput) {
+ base::ScopedTempDir temp_dir;
+ std::string std_out, std_err;
+ int exit_code;
+
+ ASSERT_TRUE(ExecPython(
+ "import sys; print 'o' * 10000; print >>sys.stderr, 'e' * 10000",
+ &std_out,
+ &std_err,
+ &exit_code));
+ EXPECT_EQ(0, exit_code);
+ EXPECT_EQ(10001u, std_out.size());
+ EXPECT_EQ(10001u, std_err.size());
+
+ std_out.clear();
+ std_err.clear();
+ ASSERT_TRUE(ExecPython(
+ "import sys; print >>sys.stderr, 'e' * 10000; print 'o' * 10000",
+ &std_out,
+ &std_err,
+ &exit_code));
+ EXPECT_EQ(0, exit_code);
+ EXPECT_EQ(10001u, std_out.size());
+ EXPECT_EQ(10001u, std_err.size());
+}
+
+TEST(ExecProcessTest, TestOneOutputClosed) {
+ std::string std_out, std_err;
+ int exit_code;
+
+ ASSERT_TRUE(ExecPython("import sys; sys.stderr.close(); print 'o' * 10000",
+ &std_out,
+ &std_err,
+ &exit_code));
+ EXPECT_EQ(0, exit_code);
+ EXPECT_EQ(10001u, std_out.size());
+ EXPECT_EQ(std_err.size(), 0u);
+
+ std_out.clear();
+ std_err.clear();
+ ASSERT_TRUE(ExecPython(
+ "import sys; sys.stdout.close(); print >>sys.stderr, 'e' * 10000",
+ &std_out,
+ &std_err,
+ &exit_code));
+ EXPECT_EQ(0, exit_code);
+ EXPECT_EQ(0u, std_out.size());
+ EXPECT_EQ(10001u, std_err.size());
+}
+#endif
+} // namespace internal
diff --git a/chromium/tools/gn/filesystem_utils.cc b/chromium/tools/gn/filesystem_utils.cc
new file mode 100644
index 00000000000..facf442e88d
--- /dev/null
+++ b/chromium/tools/gn/filesystem_utils.cc
@@ -0,0 +1,937 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/filesystem_utils.h"
+
+#include <algorithm>
+
+#include "base/files/file_util.h"
+#include "base/logging.h"
+#include "base/strings/string_util.h"
+#include "base/strings/utf_string_conversions.h"
+#include "build/build_config.h"
+#include "tools/gn/location.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/source_dir.h"
+
+namespace {
+
+enum DotDisposition {
+ // The given dot is just part of a filename and is not special.
+ NOT_A_DIRECTORY,
+
+ // The given dot is the current directory.
+ DIRECTORY_CUR,
+
+ // The given dot is the first of a double dot that should take us up one.
+ DIRECTORY_UP
+};
+
+// When we find a dot, this function is called with the character following
+// that dot to see what it is. The return value indicates what type this dot is
+// (see above). This code handles the case where the dot is at the end of the
+// input.
+//
+// |*consumed_len| will contain the number of characters in the input that
+// express what we found.
+DotDisposition ClassifyAfterDot(const std::string& path,
+ size_t after_dot,
+ size_t* consumed_len) {
+ if (after_dot == path.size()) {
+ // Single dot at the end.
+ *consumed_len = 1;
+ return DIRECTORY_CUR;
+ }
+ if (IsSlash(path[after_dot])) {
+ // Single dot followed by a slash.
+ *consumed_len = 2; // Consume the slash
+ return DIRECTORY_CUR;
+ }
+
+ if (path[after_dot] == '.') {
+ // Two dots.
+ if (after_dot + 1 == path.size()) {
+ // Double dot at the end.
+ *consumed_len = 2;
+ return DIRECTORY_UP;
+ }
+ if (IsSlash(path[after_dot + 1])) {
+ // Double dot folowed by a slash.
+ *consumed_len = 3;
+ return DIRECTORY_UP;
+ }
+ }
+
+ // The dots are followed by something else, not a directory.
+ *consumed_len = 1;
+ return NOT_A_DIRECTORY;
+}
+
+#if defined(OS_WIN)
+inline char NormalizeWindowsPathChar(char c) {
+ if (c == '/')
+ return '\\';
+ return base::ToLowerASCII(c);
+}
+
+// Attempts to do a case and slash-insensitive comparison of two 8-bit Windows
+// paths.
+bool AreAbsoluteWindowsPathsEqual(const base::StringPiece& a,
+ const base::StringPiece& b) {
+ if (a.size() != b.size())
+ return false;
+
+ // For now, just do a case-insensitive ASCII comparison. We could convert to
+ // UTF-16 and use ICU if necessary.
+ for (size_t i = 0; i < a.size(); i++) {
+ if (NormalizeWindowsPathChar(a[i]) != NormalizeWindowsPathChar(b[i]))
+ return false;
+ }
+ return true;
+}
+
+bool DoesBeginWindowsDriveLetter(const base::StringPiece& path) {
+ if (path.size() < 3)
+ return false;
+
+ // Check colon first, this will generally fail fastest.
+ if (path[1] != ':')
+ return false;
+
+ // Check drive letter.
+ if (!base::IsAsciiAlpha(path[0]))
+ return false;
+
+ if (!IsSlash(path[2]))
+ return false;
+ return true;
+}
+#endif
+
+// A wrapper around FilePath.GetComponents that works the way we need. This is
+// not super efficient since it does some O(n) transformations on the path. If
+// this is called a lot, we might want to optimize.
+std::vector<base::FilePath::StringType> GetPathComponents(
+ const base::FilePath& path) {
+ std::vector<base::FilePath::StringType> result;
+ path.GetComponents(&result);
+
+ if (result.empty())
+ return result;
+
+ // GetComponents will preserve the "/" at the beginning, which confuses us.
+ // We don't expect to have relative paths in this function.
+ // Don't use IsSeparator since we always want to allow backslashes.
+ if (result[0] == FILE_PATH_LITERAL("/") ||
+ result[0] == FILE_PATH_LITERAL("\\"))
+ result.erase(result.begin());
+
+#if defined(OS_WIN)
+ // On Windows, GetComponents will give us [ "C:", "/", "foo" ], and we
+ // don't want the slash in there. This doesn't support input like "C:foo"
+ // which means foo relative to the current directory of the C drive but
+ // that's basically legacy DOS behavior we don't need to support.
+ if (result.size() >= 2 && result[1].size() == 1 &&
+ IsSlash(static_cast<char>(result[1][0])))
+ result.erase(result.begin() + 1);
+#endif
+
+ return result;
+}
+
+// Provides the equivalent of == for filesystem strings, trying to do
+// approximately the right thing with case.
+bool FilesystemStringsEqual(const base::FilePath::StringType& a,
+ const base::FilePath::StringType& b) {
+#if defined(OS_WIN)
+ // Assume case-insensitive filesystems on Windows. We use the CompareString
+ // function to do a case-insensitive comparison based on the current locale
+ // (we don't want GN to depend on ICU which is large and requires data
+ // files). This isn't perfect, but getting this perfectly right is very
+ // difficult and requires I/O, and this comparison should cover 99.9999% of
+ // all cases.
+ //
+ // Note: The documentation for CompareString says it runs fastest on
+ // null-terminated strings with -1 passed for the length, so we do that here.
+ // There should not be embedded nulls in filesystem strings.
+ return ::CompareString(LOCALE_USER_DEFAULT, LINGUISTIC_IGNORECASE,
+ a.c_str(), -1, b.c_str(), -1) == CSTR_EQUAL;
+#else
+ // Assume case-sensitive filesystems on non-Windows.
+ return a == b;
+#endif
+}
+
+} // namespace
+
+std::string FilePathToUTF8(const base::FilePath::StringType& str) {
+#if defined(OS_WIN)
+ return base::WideToUTF8(str);
+#else
+ return str;
+#endif
+}
+
+base::FilePath UTF8ToFilePath(const base::StringPiece& sp) {
+#if defined(OS_WIN)
+ return base::FilePath(base::UTF8ToWide(sp));
+#else
+ return base::FilePath(sp.as_string());
+#endif
+}
+
+size_t FindExtensionOffset(const std::string& path) {
+ for (int i = static_cast<int>(path.size()); i >= 0; i--) {
+ if (IsSlash(path[i]))
+ break;
+ if (path[i] == '.')
+ return i + 1;
+ }
+ return std::string::npos;
+}
+
+base::StringPiece FindExtension(const std::string* path) {
+ size_t extension_offset = FindExtensionOffset(*path);
+ if (extension_offset == std::string::npos)
+ return base::StringPiece();
+ return base::StringPiece(&path->data()[extension_offset],
+ path->size() - extension_offset);
+}
+
+size_t FindFilenameOffset(const std::string& path) {
+ for (int i = static_cast<int>(path.size()) - 1; i >= 0; i--) {
+ if (IsSlash(path[i]))
+ return i + 1;
+ }
+ return 0; // No filename found means everything was the filename.
+}
+
+base::StringPiece FindFilename(const std::string* path) {
+ size_t filename_offset = FindFilenameOffset(*path);
+ if (filename_offset == 0)
+ return base::StringPiece(*path); // Everything is the file name.
+ return base::StringPiece(&(*path).data()[filename_offset],
+ path->size() - filename_offset);
+}
+
+base::StringPiece FindFilenameNoExtension(const std::string* path) {
+ if (path->empty())
+ return base::StringPiece();
+ size_t filename_offset = FindFilenameOffset(*path);
+ size_t extension_offset = FindExtensionOffset(*path);
+
+ size_t name_len;
+ if (extension_offset == std::string::npos)
+ name_len = path->size() - filename_offset;
+ else
+ name_len = extension_offset - filename_offset - 1;
+
+ return base::StringPiece(&(*path).data()[filename_offset], name_len);
+}
+
+void RemoveFilename(std::string* path) {
+ path->resize(FindFilenameOffset(*path));
+}
+
+bool EndsWithSlash(const std::string& s) {
+ return !s.empty() && IsSlash(s[s.size() - 1]);
+}
+
+base::StringPiece FindDir(const std::string* path) {
+ size_t filename_offset = FindFilenameOffset(*path);
+ if (filename_offset == 0u)
+ return base::StringPiece();
+ return base::StringPiece(path->data(), filename_offset);
+}
+
+base::StringPiece FindLastDirComponent(const SourceDir& dir) {
+ const std::string& dir_string = dir.value();
+
+ if (dir_string.empty())
+ return base::StringPiece();
+ int cur = static_cast<int>(dir_string.size()) - 1;
+ DCHECK(dir_string[cur] == '/');
+ int end = cur;
+ cur--; // Skip before the last slash.
+
+ for (; cur >= 0; cur--) {
+ if (dir_string[cur] == '/')
+ return base::StringPiece(&dir_string[cur + 1], end - cur - 1);
+ }
+ return base::StringPiece(&dir_string[0], end);
+}
+
+bool IsStringInOutputDir(const SourceDir& output_dir, const std::string& str) {
+ // This check will be wrong for all proper prefixes "e.g. "/output" will
+ // match "/out" but we don't really care since this is just a sanity check.
+ const std::string& dir_str = output_dir.value();
+ return str.compare(0, dir_str.length(), dir_str) == 0;
+}
+
+bool EnsureStringIsInOutputDir(const SourceDir& output_dir,
+ const std::string& str,
+ const ParseNode* origin,
+ Err* err) {
+ if (IsStringInOutputDir(output_dir, str))
+ return true; // Output directory is hardcoded.
+
+ *err = Err(origin, "File is not inside output directory.",
+ "The given file should be in the output directory. Normally you would "
+ "specify\n\"$target_out_dir/foo\" or "
+ "\"$target_gen_dir/foo\". I interpreted this as\n\""
+ + str + "\".");
+ return false;
+}
+
+bool IsPathAbsolute(const base::StringPiece& path) {
+ if (path.empty())
+ return false;
+
+ if (!IsSlash(path[0])) {
+#if defined(OS_WIN)
+ // Check for Windows system paths like "C:\foo".
+ if (path.size() > 2 && path[1] == ':' && IsSlash(path[2]))
+ return true;
+#endif
+ return false; // Doesn't begin with a slash, is relative.
+ }
+
+ // Double forward slash at the beginning means source-relative (we don't
+ // allow backslashes for denoting this).
+ if (path.size() > 1 && path[1] == '/')
+ return false;
+
+ return true;
+}
+
+bool MakeAbsolutePathRelativeIfPossible(const base::StringPiece& source_root,
+ const base::StringPiece& path,
+ std::string* dest) {
+ DCHECK(IsPathAbsolute(source_root));
+ DCHECK(IsPathAbsolute(path));
+
+ dest->clear();
+
+ if (source_root.size() > path.size())
+ return false; // The source root is longer: the path can never be inside.
+
+#if defined(OS_WIN)
+ // Source root should be canonical on Windows. Note that the initial slash
+ // must be forward slash, but that the other ones can be either forward or
+ // backward.
+ DCHECK(source_root.size() > 2 && source_root[0] != '/' &&
+ source_root[1] == ':' && IsSlash(source_root[2]));
+
+ size_t after_common_index = std::string::npos;
+ if (DoesBeginWindowsDriveLetter(path)) {
+ // Handle "C:\foo"
+ if (AreAbsoluteWindowsPathsEqual(source_root,
+ path.substr(0, source_root.size())))
+ after_common_index = source_root.size();
+ else
+ return false;
+ } else if (path[0] == '/' && source_root.size() <= path.size() - 1 &&
+ DoesBeginWindowsDriveLetter(path.substr(1))) {
+ // Handle "/C:/foo"
+ if (AreAbsoluteWindowsPathsEqual(source_root,
+ path.substr(1, source_root.size())))
+ after_common_index = source_root.size() + 1;
+ else
+ return false;
+ } else {
+ return false;
+ }
+
+ // If we get here, there's a match and after_common_index identifies the
+ // part after it.
+
+ // The base may or may not have a trailing slash, so skip all slashes from
+ // the path after our prefix match.
+ size_t first_after_slash = after_common_index;
+ while (first_after_slash < path.size() && IsSlash(path[first_after_slash]))
+ first_after_slash++;
+
+ dest->assign("//"); // Result is source root relative.
+ dest->append(&path.data()[first_after_slash],
+ path.size() - first_after_slash);
+ return true;
+
+#else
+
+ // On non-Windows this is easy. Since we know both are absolute, just do a
+ // prefix check.
+ if (path.substr(0, source_root.size()) == source_root) {
+ // The base may or may not have a trailing slash, so skip all slashes from
+ // the path after our prefix match.
+ size_t first_after_slash = source_root.size();
+ while (first_after_slash < path.size() && IsSlash(path[first_after_slash]))
+ first_after_slash++;
+
+ dest->assign("//"); // Result is source root relative.
+ dest->append(&path.data()[first_after_slash],
+ path.size() - first_after_slash);
+ return true;
+ }
+ return false;
+#endif
+}
+
+void NormalizePath(std::string* path, const base::StringPiece& source_root) {
+ char* pathbuf = path->empty() ? nullptr : &(*path)[0];
+
+ // top_index is the first character we can modify in the path. Anything
+ // before this indicates where the path is relative to.
+ size_t top_index = 0;
+ bool is_relative = true;
+ if (!path->empty() && pathbuf[0] == '/') {
+ is_relative = false;
+
+ if (path->size() > 1 && pathbuf[1] == '/') {
+ // Two leading slashes, this is a path into the source dir.
+ top_index = 2;
+ } else {
+ // One leading slash, this is a system-absolute path.
+ top_index = 1;
+ }
+ }
+
+ size_t dest_i = top_index;
+ for (size_t src_i = top_index; src_i < path->size(); /* nothing */) {
+ if (pathbuf[src_i] == '.') {
+ if (src_i == 0 || IsSlash(pathbuf[src_i - 1])) {
+ // Slash followed by a dot, see if it's something special.
+ size_t consumed_len;
+ switch (ClassifyAfterDot(*path, src_i + 1, &consumed_len)) {
+ case NOT_A_DIRECTORY:
+ // Copy the dot to the output, it means nothing special.
+ pathbuf[dest_i++] = pathbuf[src_i++];
+ break;
+ case DIRECTORY_CUR:
+ // Current directory, just skip the input.
+ src_i += consumed_len;
+ break;
+ case DIRECTORY_UP:
+ // Back up over previous directory component. If we're already
+ // at the top, preserve the "..".
+ if (dest_i > top_index) {
+ // The previous char was a slash, remove it.
+ dest_i--;
+ }
+
+ if (dest_i == top_index) {
+ if (is_relative) {
+ // We're already at the beginning of a relative input, copy the
+ // ".." and continue. We need the trailing slash if there was
+ // one before (otherwise we're at the end of the input).
+ pathbuf[dest_i++] = '.';
+ pathbuf[dest_i++] = '.';
+ if (consumed_len == 3)
+ pathbuf[dest_i++] = '/';
+
+ // This also makes a new "root" that we can't delete by going
+ // up more levels. Otherwise "../.." would collapse to
+ // nothing.
+ top_index = dest_i;
+ } else if (top_index == 2 && !source_root.empty()) {
+ // |path| was passed in as a source-absolute path. Prepend
+ // |source_root| to make |path| absolute. |source_root| must not
+ // end with a slash unless we are at root.
+ DCHECK(source_root.size() == 1u ||
+ !IsSlash(source_root[source_root.size() - 1u]));
+ size_t source_root_len = source_root.size();
+
+#if defined(OS_WIN)
+ // On Windows, if the source_root does not start with a slash,
+ // append one here for consistency.
+ if (!IsSlash(source_root[0])) {
+ path->insert(0, "/" + source_root.as_string());
+ source_root_len++;
+ } else {
+ path->insert(0, source_root.data(), source_root_len);
+ }
+
+ // Normalize slashes in source root portion.
+ for (size_t i = 0; i < source_root_len; ++i) {
+ if ((*path)[i] == '\\')
+ (*path)[i] = '/';
+ }
+#else
+ path->insert(0, source_root.data(), source_root_len);
+#endif
+
+ // |path| is now absolute, so |top_index| is 1. |dest_i| and
+ // |src_i| should be incremented to keep the same relative
+ // position. Comsume the leading "//" by decrementing |dest_i|.
+ top_index = 1;
+ pathbuf = &(*path)[0];
+ dest_i += source_root_len - 2;
+ src_i += source_root_len;
+
+ // Just find the previous slash or the beginning of input.
+ while (dest_i > 0 && !IsSlash(pathbuf[dest_i - 1]))
+ dest_i--;
+ }
+ // Otherwise we're at the beginning of a system-absolute path, or
+ // a source-absolute path for which we don't know the absolute
+ // path. Don't allow ".." to go up another level, and just eat it.
+ } else {
+ // Just find the previous slash or the beginning of input.
+ while (dest_i > 0 && !IsSlash(pathbuf[dest_i - 1]))
+ dest_i--;
+ }
+ src_i += consumed_len;
+ }
+ } else {
+ // Dot not preceeded by a slash, copy it literally.
+ pathbuf[dest_i++] = pathbuf[src_i++];
+ }
+ } else if (IsSlash(pathbuf[src_i])) {
+ if (src_i > 0 && IsSlash(pathbuf[src_i - 1])) {
+ // Two slashes in a row, skip over it.
+ src_i++;
+ } else {
+ // Just one slash, copy it, normalizing to foward slash.
+ pathbuf[dest_i] = '/';
+ dest_i++;
+ src_i++;
+ }
+ } else {
+ // Input nothing special, just copy it.
+ pathbuf[dest_i++] = pathbuf[src_i++];
+ }
+ }
+ path->resize(dest_i);
+}
+
+void ConvertPathToSystem(std::string* path) {
+#if defined(OS_WIN)
+ for (size_t i = 0; i < path->size(); i++) {
+ if ((*path)[i] == '/')
+ (*path)[i] = '\\';
+ }
+#endif
+}
+
+std::string MakeRelativePath(const std::string& input,
+ const std::string& dest) {
+#if defined(OS_WIN)
+ // Make sure that absolute |input| path starts with a slash if |dest| path
+ // does. Otherwise skipping common prefixes won't work properly. Ensure the
+ // same for |dest| path too.
+ if (IsPathAbsolute(input) && !IsSlash(input[0]) && IsSlash(dest[0])) {
+ std::string corrected_input(1, dest[0]);
+ corrected_input.append(input);
+ return MakeRelativePath(corrected_input, dest);
+ }
+ if (IsPathAbsolute(dest) && !IsSlash(dest[0]) && IsSlash(input[0])) {
+ std::string corrected_dest(1, input[0]);
+ corrected_dest.append(dest);
+ return MakeRelativePath(input, corrected_dest);
+ }
+
+ // Make sure that both absolute paths use the same drive letter case.
+ if (IsPathAbsolute(input) && IsPathAbsolute(dest) && input.size() > 1 &&
+ dest.size() > 1) {
+ int letter_pos = base::IsAsciiAlpha(input[0]) ? 0 : 1;
+ if (input[letter_pos] != dest[letter_pos] &&
+ base::ToUpperASCII(input[letter_pos]) ==
+ base::ToUpperASCII(dest[letter_pos])) {
+ std::string corrected_input = input;
+ corrected_input[letter_pos] = dest[letter_pos];
+ return MakeRelativePath(corrected_input, dest);
+ }
+ }
+#endif
+
+ std::string ret;
+
+ // Skip the common prefixes of the source and dest as long as they end in
+ // a [back]slash.
+ size_t common_prefix_len = 0;
+ size_t max_common_length = std::min(input.size(), dest.size());
+ for (size_t i = common_prefix_len; i < max_common_length; i++) {
+ if (IsSlash(input[i]) && IsSlash(dest[i]))
+ common_prefix_len = i + 1;
+ else if (input[i] != dest[i])
+ break;
+ }
+
+ // Invert the dest dir starting from the end of the common prefix.
+ for (size_t i = common_prefix_len; i < dest.size(); i++) {
+ if (IsSlash(dest[i]))
+ ret.append("../");
+ }
+
+ // Append any remaining unique input.
+ ret.append(&input[common_prefix_len], input.size() - common_prefix_len);
+
+ // If the result is still empty, the paths are the same.
+ if (ret.empty())
+ ret.push_back('.');
+
+ return ret;
+}
+
+std::string RebasePath(const std::string& input,
+ const SourceDir& dest_dir,
+ const base::StringPiece& source_root) {
+ std::string ret;
+ DCHECK(source_root.empty() || !source_root.ends_with("/"));
+
+ bool input_is_source_path = (input.size() >= 2 &&
+ input[0] == '/' && input[1] == '/');
+
+ if (!source_root.empty() &&
+ (!input_is_source_path || !dest_dir.is_source_absolute())) {
+ std::string input_full;
+ std::string dest_full;
+ if (input_is_source_path) {
+ source_root.AppendToString(&input_full);
+ input_full.push_back('/');
+ input_full.append(input, 2, std::string::npos);
+ } else {
+ input_full.append(input);
+ }
+ if (dest_dir.is_source_absolute()) {
+ source_root.AppendToString(&dest_full);
+ dest_full.push_back('/');
+ dest_full.append(dest_dir.value(), 2, std::string::npos);
+ } else {
+#if defined(OS_WIN)
+ // On Windows, SourceDir system-absolute paths start
+ // with /, e.g. "/C:/foo/bar".
+ const std::string& value = dest_dir.value();
+ if (value.size() > 2 && value[2] == ':')
+ dest_full.append(dest_dir.value().substr(1));
+ else
+ dest_full.append(dest_dir.value());
+#else
+ dest_full.append(dest_dir.value());
+#endif
+ }
+ bool remove_slash = false;
+ if (!EndsWithSlash(input_full)) {
+ input_full.push_back('/');
+ remove_slash = true;
+ }
+ ret = MakeRelativePath(input_full, dest_full);
+ if (remove_slash && ret.size() > 1)
+ ret.resize(ret.size() - 1);
+ return ret;
+ }
+
+ ret = MakeRelativePath(input, dest_dir.value());
+ return ret;
+}
+
+std::string DirectoryWithNoLastSlash(const SourceDir& dir) {
+ std::string ret;
+
+ if (dir.value().empty()) {
+ // Just keep input the same.
+ } else if (dir.value() == "/") {
+ ret.assign("/.");
+ } else if (dir.value() == "//") {
+ ret.assign("//.");
+ } else {
+ ret.assign(dir.value());
+ ret.resize(ret.size() - 1);
+ }
+ return ret;
+}
+
+SourceDir SourceDirForPath(const base::FilePath& source_root,
+ const base::FilePath& path) {
+ std::vector<base::FilePath::StringType> source_comp =
+ GetPathComponents(source_root);
+ std::vector<base::FilePath::StringType> path_comp =
+ GetPathComponents(path);
+
+ // See if path is inside the source root by looking for each of source root's
+ // components at the beginning of path.
+ bool is_inside_source;
+ if (path_comp.size() < source_comp.size() || source_root.empty()) {
+ // Too small to fit.
+ is_inside_source = false;
+ } else {
+ is_inside_source = true;
+ for (size_t i = 0; i < source_comp.size(); i++) {
+ if (!FilesystemStringsEqual(source_comp[i], path_comp[i])) {
+ is_inside_source = false;
+ break;
+ }
+ }
+ }
+
+ std::string result_str;
+ size_t initial_path_comp_to_use;
+ if (is_inside_source) {
+ // Construct a source-relative path beginning in // and skip all of the
+ // shared directories.
+ result_str = "//";
+ initial_path_comp_to_use = source_comp.size();
+ } else {
+ // Not inside source code, construct a system-absolute path.
+ result_str = "/";
+ initial_path_comp_to_use = 0;
+ }
+
+ for (size_t i = initial_path_comp_to_use; i < path_comp.size(); i++) {
+ result_str.append(FilePathToUTF8(path_comp[i]));
+ result_str.push_back('/');
+ }
+ return SourceDir(result_str);
+}
+
+SourceDir SourceDirForCurrentDirectory(const base::FilePath& source_root) {
+ base::FilePath cd;
+ base::GetCurrentDirectory(&cd);
+ return SourceDirForPath(source_root, cd);
+}
+
+std::string GetOutputSubdirName(const Label& toolchain_label, bool is_default) {
+ // The default toolchain has no subdir.
+ if (is_default)
+ return std::string();
+
+ // For now just assume the toolchain name is always a valid dir name. We may
+ // want to clean up the in the future.
+ return toolchain_label.name() + "/";
+}
+
+bool ContentsEqual(const base::FilePath& file_path, const std::string& data) {
+ // Compare file and stream sizes first. Quick and will save us some time if
+ // they are different sizes.
+ int64_t file_size;
+ if (!base::GetFileSize(file_path, &file_size) ||
+ static_cast<size_t>(file_size) != data.size()) {
+ return false;
+ }
+
+ std::string file_data;
+ file_data.resize(file_size);
+ if (!base::ReadFileToString(file_path, &file_data))
+ return false;
+
+ return file_data == data;
+}
+
+bool WriteFileIfChanged(const base::FilePath& file_path,
+ const std::string& data,
+ Err* err) {
+ if (ContentsEqual(file_path, data))
+ return true;
+
+ // Create the directory if necessary.
+ if (!base::CreateDirectory(file_path.DirName())) {
+ if (err) {
+ *err =
+ Err(Location(), "Unable to create directory.",
+ "I was using \"" + FilePathToUTF8(file_path.DirName()) + "\".");
+ }
+ return false;
+ }
+
+ int size = static_cast<int>(data.size());
+ bool write_success = false;
+
+#if defined(OS_WIN)
+ // On Windows, provide a custom implementation of base::WriteFile. Sometimes
+ // the base version fails, especially on the bots. The guess is that Windows
+ // Defender or other antivirus programs still have the file open (after
+ // checking for the read) when the write happens immediately after. This
+ // version opens with FILE_SHARE_READ (normally not what you want when
+ // replacing the entire contents of the file) which lets us continue even if
+ // another program has the file open for reading. See http://crbug.com/468437
+ base::win::ScopedHandle file(::CreateFile(file_path.value().c_str(),
+ GENERIC_WRITE, FILE_SHARE_READ,
+ NULL, CREATE_ALWAYS, 0, NULL));
+ if (file.IsValid()) {
+ DWORD written;
+ BOOL result = ::WriteFile(file.Get(), data.c_str(), size, &written, NULL);
+ if (result) {
+ if (static_cast<int>(written) == size) {
+ write_success = true;
+ } else {
+ // Didn't write all the bytes.
+ LOG(ERROR) << "wrote" << written << " bytes to "
+ << base::UTF16ToUTF8(file_path.value()) << " expected "
+ << size;
+ }
+ } else {
+ // WriteFile failed.
+ PLOG(ERROR) << "writing file " << base::UTF16ToUTF8(file_path.value())
+ << " failed";
+ }
+ } else {
+ PLOG(ERROR) << "CreateFile failed for path "
+ << base::UTF16ToUTF8(file_path.value());
+ }
+#else
+ write_success = base::WriteFile(file_path, data.c_str(), size) == size;
+#endif
+
+ if (!write_success && err) {
+ *err = Err(Location(), "Unable to write file.",
+ "I was writing \"" + FilePathToUTF8(file_path) + "\".");
+ }
+
+ return write_success;
+}
+
+SourceDir GetToolchainOutputDir(const Settings* settings) {
+ return settings->toolchain_output_subdir().AsSourceDir(
+ settings->build_settings());
+}
+
+SourceDir GetToolchainOutputDir(const BuildSettings* build_settings,
+ const Label& toolchain_label, bool is_default) {
+ std::string result = build_settings->build_dir().value();
+ result.append(GetOutputSubdirName(toolchain_label, is_default));
+ return SourceDir(SourceDir::SWAP_IN, &result);
+}
+
+SourceDir GetToolchainGenDir(const Settings* settings) {
+ return GetToolchainGenDirAsOutputFile(settings).AsSourceDir(
+ settings->build_settings());
+}
+
+OutputFile GetToolchainGenDirAsOutputFile(const Settings* settings) {
+ OutputFile result(settings->toolchain_output_subdir());
+ result.value().append("gen/");
+ return result;
+}
+
+SourceDir GetToolchainGenDir(const BuildSettings* build_settings,
+ const Label& toolchain_label, bool is_default) {
+ std::string result = GetToolchainOutputDir(
+ build_settings, toolchain_label, is_default).value();
+ result.append("gen/");
+ return SourceDir(SourceDir::SWAP_IN, &result);
+}
+
+SourceDir GetOutputDirForSourceDir(const Settings* settings,
+ const SourceDir& source_dir) {
+ return GetOutputDirForSourceDir(
+ settings->build_settings(), source_dir,
+ settings->toolchain_label(), settings->is_default());
+}
+
+void AppendFixedAbsolutePathSuffix(const BuildSettings* build_settings,
+ const SourceDir& source_dir,
+ OutputFile* result) {
+ const std::string& build_dir = build_settings->build_dir().value();
+
+ if (base::StartsWith(source_dir.value(), build_dir,
+ base::CompareCase::SENSITIVE)) {
+ size_t build_dir_size = build_dir.size();
+ result->value().append(&source_dir.value()[build_dir_size],
+ source_dir.value().size() - build_dir_size);
+ } else {
+ result->value().append("ABS_PATH");
+#if defined(OS_WIN)
+ // Windows absolute path contains ':' after drive letter. Remove it to
+ // avoid inserting ':' in the middle of path (eg. "ABS_PATH/C:/").
+ std::string src_dir_value = source_dir.value();
+ const auto colon_pos = src_dir_value.find(':');
+ if (colon_pos != std::string::npos)
+ src_dir_value.erase(src_dir_value.begin() + colon_pos);
+#else
+ const std::string& src_dir_value = source_dir.value();
+#endif
+ result->value().append(src_dir_value);
+ }
+}
+
+SourceDir GetOutputDirForSourceDir(
+ const BuildSettings* build_settings,
+ const SourceDir& source_dir,
+ const Label& toolchain_label,
+ bool is_default_toolchain) {
+ return GetOutputDirForSourceDirAsOutputFile(
+ build_settings, source_dir, toolchain_label, is_default_toolchain)
+ .AsSourceDir(build_settings);
+}
+
+OutputFile GetOutputDirForSourceDirAsOutputFile(
+ const BuildSettings* build_settings,
+ const SourceDir& source_dir,
+ const Label& toolchain_label,
+ bool is_default_toolchain) {
+ OutputFile result(GetOutputSubdirName(toolchain_label, is_default_toolchain));
+ result.value().append("obj/");
+
+ if (source_dir.is_source_absolute()) {
+ // The source dir is source-absolute, so we trim off the two leading
+ // slashes to append to the toolchain object directory.
+ result.value().append(&source_dir.value()[2],
+ source_dir.value().size() - 2);
+ } else {
+ // System-absolute.
+ AppendFixedAbsolutePathSuffix(build_settings, source_dir, &result);
+ }
+ return result;
+}
+
+OutputFile GetOutputDirForSourceDirAsOutputFile(const Settings* settings,
+ const SourceDir& source_dir) {
+ return GetOutputDirForSourceDirAsOutputFile(
+ settings->build_settings(), source_dir,
+ settings->toolchain_label(), settings->is_default());
+}
+
+SourceDir GetGenDirForSourceDir(const Settings* settings,
+ const SourceDir& source_dir) {
+ return GetGenDirForSourceDirAsOutputFile(settings, source_dir).AsSourceDir(
+ settings->build_settings());
+}
+
+OutputFile GetGenDirForSourceDirAsOutputFile(const Settings* settings,
+ const SourceDir& source_dir) {
+ OutputFile result = GetToolchainGenDirAsOutputFile(settings);
+
+ if (source_dir.is_source_absolute()) {
+ // The source dir should be source-absolute, so we trim off the two leading
+ // slashes to append to the toolchain object directory.
+ DCHECK(source_dir.is_source_absolute());
+ result.value().append(&source_dir.value()[2],
+ source_dir.value().size() - 2);
+ } else {
+ // System-absolute.
+ AppendFixedAbsolutePathSuffix(settings->build_settings(), source_dir,
+ &result);
+ }
+ return result;
+}
+
+SourceDir GetTargetOutputDir(const Target* target) {
+ return GetOutputDirForSourceDirAsOutputFile(
+ target->settings(), target->label().dir()).AsSourceDir(
+ target->settings()->build_settings());
+}
+
+OutputFile GetTargetOutputDirAsOutputFile(const Target* target) {
+ return GetOutputDirForSourceDirAsOutputFile(
+ target->settings(), target->label().dir());
+}
+
+SourceDir GetTargetGenDir(const Target* target) {
+ return GetTargetGenDirAsOutputFile(target).AsSourceDir(
+ target->settings()->build_settings());
+}
+
+OutputFile GetTargetGenDirAsOutputFile(const Target* target) {
+ return GetGenDirForSourceDirAsOutputFile(
+ target->settings(), target->label().dir());
+}
+
+SourceDir GetCurrentOutputDir(const Scope* scope) {
+ return GetOutputDirForSourceDirAsOutputFile(
+ scope->settings(), scope->GetSourceDir()).AsSourceDir(
+ scope->settings()->build_settings());
+}
+
+SourceDir GetCurrentGenDir(const Scope* scope) {
+ return GetGenDirForSourceDir(scope->settings(), scope->GetSourceDir());
+}
diff --git a/chromium/tools/gn/filesystem_utils.h b/chromium/tools/gn/filesystem_utils.h
new file mode 100644
index 00000000000..aaa08babde9
--- /dev/null
+++ b/chromium/tools/gn/filesystem_utils.h
@@ -0,0 +1,218 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_FILESYSTEM_UTILS_H_
+#define TOOLS_GN_FILESYSTEM_UTILS_H_
+
+#include <stddef.h>
+
+#include <string>
+
+#include "base/files/file_path.h"
+#include "base/strings/string_piece.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/target.h"
+
+class Err;
+class Location;
+class Value;
+
+std::string FilePathToUTF8(const base::FilePath::StringType& str);
+inline std::string FilePathToUTF8(const base::FilePath& path) {
+ return FilePathToUTF8(path.value());
+}
+base::FilePath UTF8ToFilePath(const base::StringPiece& sp);
+
+// Extensions -----------------------------------------------------------------
+
+// Returns the index of the extension (character after the last dot not after a
+// slash). Returns std::string::npos if not found. Returns path.size() if the
+// file ends with a dot.
+size_t FindExtensionOffset(const std::string& path);
+
+// Returns a string piece pointing into the input string identifying the
+// extension. Note that the input pointer must outlive the output.
+base::StringPiece FindExtension(const std::string* path);
+
+// Filename parts -------------------------------------------------------------
+
+// Returns the offset of the character following the last slash, or
+// 0 if no slash was found. Returns path.size() if the path ends with a slash.
+// Note that the input pointer must outlive the output.
+size_t FindFilenameOffset(const std::string& path);
+
+// Returns a string piece pointing into the input string identifying the
+// file name (following the last slash, including the extension). Note that the
+// input pointer must outlive the output.
+base::StringPiece FindFilename(const std::string* path);
+
+// Like FindFilename but does not include the extension.
+base::StringPiece FindFilenameNoExtension(const std::string* path);
+
+// Removes everything after the last slash. The last slash, if any, will be
+// preserved.
+void RemoveFilename(std::string* path);
+
+// Returns if the given character is a slash. This allows both slashes and
+// backslashes for consistency between Posix and Windows (as opposed to
+// FilePath::IsSeparator which is based on the current platform).
+inline bool IsSlash(const char ch) {
+ return ch == '/' || ch == '\\';
+}
+
+// Returns true if the given path ends with a slash.
+bool EndsWithSlash(const std::string& s);
+
+// Path parts -----------------------------------------------------------------
+
+// Returns a string piece pointing into the input string identifying the
+// directory name of the given path, including the last slash. Note that the
+// input pointer must outlive the output.
+base::StringPiece FindDir(const std::string* path);
+
+// Returns the substring identifying the last component of the dir, or the
+// empty substring if none. For example "//foo/bar/" -> "bar".
+base::StringPiece FindLastDirComponent(const SourceDir& dir);
+
+// Returns true if the given string is in the given output dir. This is pretty
+// stupid and doesn't handle "." and "..", etc., it is designed for a sanity
+// check to keep people from writing output files to the source directory
+// accidentally.
+bool IsStringInOutputDir(const SourceDir& output_dir, const std::string& str);
+
+// Verifies that the given string references a file inside of the given
+// directory. This just uses IsStringInOutputDir above.
+//
+// The origin will be blamed in the error.
+//
+// If the file isn't in the dir, returns false and sets the error. Otherwise
+// returns true and leaves the error untouched.
+bool EnsureStringIsInOutputDir(const SourceDir& output_dir,
+ const std::string& str,
+ const ParseNode* origin,
+ Err* err);
+
+// ----------------------------------------------------------------------------
+
+// Returns true if the input string is absolute. Double-slashes at the
+// beginning are treated as source-relative paths. On Windows, this handles
+// paths of both the native format: "C:/foo" and ours "/C:/foo"
+bool IsPathAbsolute(const base::StringPiece& path);
+
+// Given an absolute path, checks to see if is it is inside the source root.
+// If it is, fills a source-absolute path into the given output and returns
+// true. If it isn't, clears the dest and returns false.
+//
+// The source_root should be a base::FilePath converted to UTF-8. On Windows,
+// it should begin with a "C:/" rather than being our SourceFile's style
+// ("/C:/"). The source root can end with a slash or not.
+//
+// Note that this does not attempt to normalize slashes in the output.
+bool MakeAbsolutePathRelativeIfPossible(const base::StringPiece& source_root,
+ const base::StringPiece& path,
+ std::string* dest);
+
+// Collapses "." and sequential "/"s and evaluates "..". |path| may be
+// system-absolute, source-absolute, or relative. If |path| is source-absolute
+// and |source_root| is non-empty, |path| may be system absolute after this
+// function returns, if |path| references the filesystem outside of
+// |source_root| (ex. path = "//.."). In this case on Windows, |path| will have
+// a leading slash. Otherwise, |path| will retain its relativity. |source_root|
+// must not end with a slash.
+void NormalizePath(std::string* path,
+ const base::StringPiece& source_root = base::StringPiece());
+
+// Converts slashes to backslashes for Windows. Keeps the string unchanged
+// for other systems.
+void ConvertPathToSystem(std::string* path);
+
+// Takes a path, |input|, and makes it relative to the given directory
+// |dest_dir|. Both inputs may be source-relative (e.g. begins with
+// with "//") or may be absolute.
+//
+// If supplied, the |source_root| parameter is the absolute path to
+// the source root and not end in a slash. Unless you know that the
+// inputs are always source relative, this should be supplied.
+std::string RebasePath(
+ const std::string& input,
+ const SourceDir& dest_dir,
+ const base::StringPiece& source_root = base::StringPiece());
+
+// Returns the given directory with no terminating slash at the end, such that
+// appending a slash and more stuff will produce a valid path.
+//
+// If the directory refers to either the source or system root, we'll append
+// a "." so this remains valid.
+std::string DirectoryWithNoLastSlash(const SourceDir& dir);
+
+// Returns the "best" SourceDir representing the given path. If it's inside the
+// given source_root, a source-relative directory will be returned (e.g.
+// "//foo/bar.cc". If it's outside of the source root or the source root is
+// empty, a system-absolute directory will be returned.
+SourceDir SourceDirForPath(const base::FilePath& source_root,
+ const base::FilePath& path);
+
+// Like SourceDirForPath but returns the SourceDir representing the current
+// directory.
+SourceDir SourceDirForCurrentDirectory(const base::FilePath& source_root);
+
+// Given the label of a toolchain and whether that toolchain is the default
+// toolchain, returns the name of the subdirectory for that toolchain's
+// output. This will be the empty string to indicate that the toolchain outputs
+// go in the root build directory. Otherwise, the result will end in a slash.
+std::string GetOutputSubdirName(const Label& toolchain_label, bool is_default);
+
+// Returns true if the contents of the file and stream given are equal, false
+// otherwise.
+bool ContentsEqual(const base::FilePath& file_path, const std::string& data);
+
+// Writes given stream contents to the given file if it differs from existing
+// file contents. Returns true if new contents was successfully written or
+// existing file contents doesn't need updating, false on write error. |err| is
+// set on write error if not nullptr.
+bool WriteFileIfChanged(const base::FilePath& file_path,
+ const std::string& data,
+ Err* err);
+
+// -----------------------------------------------------------------------------
+
+// These functions return the various flavors of output and gen directories.
+SourceDir GetToolchainOutputDir(const Settings* settings);
+SourceDir GetToolchainOutputDir(const BuildSettings* build_settings,
+ const Label& label, bool is_default);
+
+SourceDir GetToolchainGenDir(const Settings* settings);
+OutputFile GetToolchainGenDirAsOutputFile(const Settings* settings);
+SourceDir GetToolchainGenDir(const BuildSettings* build_settings,
+ const Label& toolchain_label,
+ bool is_default);
+
+SourceDir GetOutputDirForSourceDir(const Settings* settings,
+ const SourceDir& source_dir);
+SourceDir GetOutputDirForSourceDir(const BuildSettings* build_settings,
+ const SourceDir& source_dir,
+ const Label& toolchain_label,
+ bool is_default_toolchain);
+OutputFile GetOutputDirForSourceDirAsOutputFile(
+ const BuildSettings* build_settings,
+ const SourceDir& source_dir,
+ const Label& toolchain_label,
+ bool is_default_toolchain);
+OutputFile GetOutputDirForSourceDirAsOutputFile(const Settings* settings,
+ const SourceDir& source_dir);
+
+SourceDir GetGenDirForSourceDir(const Settings* settings,
+ const SourceDir& source_dir);
+OutputFile GetGenDirForSourceDirAsOutputFile(const Settings* settings,
+ const SourceDir& source_dir);
+
+SourceDir GetTargetOutputDir(const Target* target);
+OutputFile GetTargetOutputDirAsOutputFile(const Target* target);
+SourceDir GetTargetGenDir(const Target* target);
+OutputFile GetTargetGenDirAsOutputFile(const Target* target);
+
+SourceDir GetCurrentOutputDir(const Scope* scope);
+SourceDir GetCurrentGenDir(const Scope* scope);
+
+#endif // TOOLS_GN_FILESYSTEM_UTILS_H_
diff --git a/chromium/tools/gn/filesystem_utils_unittest.cc b/chromium/tools/gn/filesystem_utils_unittest.cc
new file mode 100644
index 00000000000..894b3da7d23
--- /dev/null
+++ b/chromium/tools/gn/filesystem_utils_unittest.cc
@@ -0,0 +1,805 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/files/file_path.h"
+#include "base/files/file_util.h"
+#include "base/files/scoped_temp_dir.h"
+#include "base/strings/string_util.h"
+#include "base/strings/utf_string_conversions.h"
+#include "base/threading/platform_thread.h"
+#include "build/build_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/target.h"
+
+TEST(FilesystemUtils, FileExtensionOffset) {
+ EXPECT_EQ(std::string::npos, FindExtensionOffset(""));
+ EXPECT_EQ(std::string::npos, FindExtensionOffset("foo/bar/baz"));
+ EXPECT_EQ(4u, FindExtensionOffset("foo."));
+ EXPECT_EQ(4u, FindExtensionOffset("f.o.bar"));
+ EXPECT_EQ(std::string::npos, FindExtensionOffset("foo.bar/"));
+ EXPECT_EQ(std::string::npos, FindExtensionOffset("foo.bar/baz"));
+}
+
+TEST(FilesystemUtils, FindExtension) {
+ std::string input;
+ EXPECT_EQ("", FindExtension(&input).as_string());
+ input = "foo/bar/baz";
+ EXPECT_EQ("", FindExtension(&input).as_string());
+ input = "foo.";
+ EXPECT_EQ("", FindExtension(&input).as_string());
+ input = "f.o.bar";
+ EXPECT_EQ("bar", FindExtension(&input).as_string());
+ input = "foo.bar/";
+ EXPECT_EQ("", FindExtension(&input).as_string());
+ input = "foo.bar/baz";
+ EXPECT_EQ("", FindExtension(&input).as_string());
+}
+
+TEST(FilesystemUtils, FindFilenameOffset) {
+ EXPECT_EQ(0u, FindFilenameOffset(""));
+ EXPECT_EQ(0u, FindFilenameOffset("foo"));
+ EXPECT_EQ(4u, FindFilenameOffset("foo/"));
+ EXPECT_EQ(4u, FindFilenameOffset("foo/bar"));
+}
+
+TEST(FilesystemUtils, RemoveFilename) {
+ std::string s;
+
+ RemoveFilename(&s);
+ EXPECT_STREQ("", s.c_str());
+
+ s = "foo";
+ RemoveFilename(&s);
+ EXPECT_STREQ("", s.c_str());
+
+ s = "/";
+ RemoveFilename(&s);
+ EXPECT_STREQ("/", s.c_str());
+
+ s = "foo/bar";
+ RemoveFilename(&s);
+ EXPECT_STREQ("foo/", s.c_str());
+
+ s = "foo/bar/baz.cc";
+ RemoveFilename(&s);
+ EXPECT_STREQ("foo/bar/", s.c_str());
+}
+
+TEST(FilesystemUtils, FindDir) {
+ std::string input;
+ EXPECT_EQ("", FindDir(&input));
+ input = "/";
+ EXPECT_EQ("/", FindDir(&input));
+ input = "foo/";
+ EXPECT_EQ("foo/", FindDir(&input));
+ input = "foo/bar/baz";
+ EXPECT_EQ("foo/bar/", FindDir(&input));
+}
+
+TEST(FilesystemUtils, FindLastDirComponent) {
+ SourceDir empty;
+ EXPECT_EQ("", FindLastDirComponent(empty));
+
+ SourceDir root("/");
+ EXPECT_EQ("", FindLastDirComponent(root));
+
+ SourceDir srcroot("//");
+ EXPECT_EQ("", FindLastDirComponent(srcroot));
+
+ SourceDir regular1("//foo/");
+ EXPECT_EQ("foo", FindLastDirComponent(regular1));
+
+ SourceDir regular2("//foo/bar/");
+ EXPECT_EQ("bar", FindLastDirComponent(regular2));
+}
+
+TEST(FilesystemUtils, EnsureStringIsInOutputDir) {
+ SourceDir output_dir("//out/Debug/");
+
+ // Some outside.
+ Err err;
+ EXPECT_FALSE(EnsureStringIsInOutputDir(output_dir, "//foo", nullptr, &err));
+ EXPECT_TRUE(err.has_error());
+ err = Err();
+ EXPECT_FALSE(
+ EnsureStringIsInOutputDir(output_dir, "//out/Debugit", nullptr, &err));
+ EXPECT_TRUE(err.has_error());
+
+ // Some inside.
+ err = Err();
+ EXPECT_TRUE(
+ EnsureStringIsInOutputDir(output_dir, "//out/Debug/", nullptr, &err));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(
+ EnsureStringIsInOutputDir(output_dir, "//out/Debug/foo", nullptr, &err));
+ EXPECT_FALSE(err.has_error());
+
+ // Pattern but no template expansions are allowed.
+ EXPECT_FALSE(EnsureStringIsInOutputDir(output_dir, "{{source_gen_dir}}",
+ nullptr, &err));
+ EXPECT_TRUE(err.has_error());
+}
+
+TEST(FilesystemUtils, IsPathAbsolute) {
+ EXPECT_TRUE(IsPathAbsolute("/foo/bar"));
+ EXPECT_TRUE(IsPathAbsolute("/"));
+ EXPECT_FALSE(IsPathAbsolute(""));
+ EXPECT_FALSE(IsPathAbsolute("//"));
+ EXPECT_FALSE(IsPathAbsolute("//foo/bar"));
+
+#if defined(OS_WIN)
+ EXPECT_TRUE(IsPathAbsolute("C:/foo"));
+ EXPECT_TRUE(IsPathAbsolute("C:/"));
+ EXPECT_TRUE(IsPathAbsolute("C:\\foo"));
+ EXPECT_TRUE(IsPathAbsolute("C:\\"));
+ EXPECT_TRUE(IsPathAbsolute("/C:/foo"));
+ EXPECT_TRUE(IsPathAbsolute("/C:\\foo"));
+#endif
+}
+
+TEST(FilesystemUtils, MakeAbsolutePathRelativeIfPossible) {
+ std::string dest;
+
+#if defined(OS_WIN)
+ EXPECT_TRUE(MakeAbsolutePathRelativeIfPossible("C:\\base", "C:\\base\\foo",
+ &dest));
+ EXPECT_EQ("//foo", dest);
+ EXPECT_TRUE(MakeAbsolutePathRelativeIfPossible("C:\\base", "/C:/base/foo",
+ &dest));
+ EXPECT_EQ("//foo", dest);
+ EXPECT_TRUE(MakeAbsolutePathRelativeIfPossible("c:\\base", "C:\\base\\foo\\",
+ &dest));
+ EXPECT_EQ("//foo\\", dest);
+
+ EXPECT_FALSE(MakeAbsolutePathRelativeIfPossible("C:\\base", "C:\\ba", &dest));
+ EXPECT_FALSE(MakeAbsolutePathRelativeIfPossible("C:\\base",
+ "C:\\/notbase/foo",
+ &dest));
+#else
+
+ EXPECT_TRUE(MakeAbsolutePathRelativeIfPossible("/base", "/base/foo/", &dest));
+ EXPECT_EQ("//foo/", dest);
+ EXPECT_TRUE(MakeAbsolutePathRelativeIfPossible("/base", "/base/foo", &dest));
+ EXPECT_EQ("//foo", dest);
+ EXPECT_TRUE(MakeAbsolutePathRelativeIfPossible("/base/", "/base/foo/",
+ &dest));
+ EXPECT_EQ("//foo/", dest);
+
+ EXPECT_FALSE(MakeAbsolutePathRelativeIfPossible("/base", "/ba", &dest));
+ EXPECT_FALSE(MakeAbsolutePathRelativeIfPossible("/base", "/notbase/foo",
+ &dest));
+#endif
+}
+
+TEST(FilesystemUtils, NormalizePath) {
+ std::string input;
+
+ NormalizePath(&input);
+ EXPECT_EQ("", input);
+
+ input = "foo/bar.txt";
+ NormalizePath(&input);
+ EXPECT_EQ("foo/bar.txt", input);
+
+ input = ".";
+ NormalizePath(&input);
+ EXPECT_EQ("", input);
+
+ input = "..";
+ NormalizePath(&input);
+ EXPECT_EQ("..", input);
+
+ input = "foo//bar";
+ NormalizePath(&input);
+ EXPECT_EQ("foo/bar", input);
+
+ input = "//foo";
+ NormalizePath(&input);
+ EXPECT_EQ("//foo", input);
+
+ input = "foo/..//bar";
+ NormalizePath(&input);
+ EXPECT_EQ("bar", input);
+
+ input = "foo/../../bar";
+ NormalizePath(&input);
+ EXPECT_EQ("../bar", input);
+
+ input = "/../foo"; // Don't go above the root dir.
+ NormalizePath(&input);
+ EXPECT_EQ("/foo", input);
+
+ input = "//../foo"; // Don't go above the root dir.
+ NormalizePath(&input);
+ EXPECT_EQ("//foo", input);
+
+ input = "../foo";
+ NormalizePath(&input);
+ EXPECT_EQ("../foo", input);
+
+ input = "..";
+ NormalizePath(&input);
+ EXPECT_EQ("..", input);
+
+ input = "./././.";
+ NormalizePath(&input);
+ EXPECT_EQ("", input);
+
+ input = "../../..";
+ NormalizePath(&input);
+ EXPECT_EQ("../../..", input);
+
+ input = "../";
+ NormalizePath(&input);
+ EXPECT_EQ("../", input);
+
+ // Backslash normalization.
+ input = "foo\\..\\..\\bar";
+ NormalizePath(&input);
+ EXPECT_EQ("../bar", input);
+
+ // Trailing slashes should get preserved.
+ input = "//foo/bar/";
+ NormalizePath(&input);
+ EXPECT_EQ("//foo/bar/", input);
+
+#if defined(OS_WIN)
+ // Go above and outside of the source root.
+ input = "//../foo";
+ NormalizePath(&input, "/C:/source/root");
+ EXPECT_EQ("/C:/source/foo", input);
+
+ input = "//../foo";
+ NormalizePath(&input, "C:\\source\\root");
+ EXPECT_EQ("/C:/source/foo", input);
+
+ input = "//../";
+ NormalizePath(&input, "/C:/source/root");
+ EXPECT_EQ("/C:/source/", input);
+
+ input = "//../foo.txt";
+ NormalizePath(&input, "/C:/source/root");
+ EXPECT_EQ("/C:/source/foo.txt", input);
+
+ input = "//../foo/bar/";
+ NormalizePath(&input, "/C:/source/root");
+ EXPECT_EQ("/C:/source/foo/bar/", input);
+
+ // Go above and back into the source root. This should return a system-
+ // absolute path. We could arguably return this as a source-absolute path,
+ // but that would require additional handling to account for a rare edge
+ // case.
+ input = "//../root/foo";
+ NormalizePath(&input, "/C:/source/root");
+ EXPECT_EQ("/C:/source/root/foo", input);
+
+ input = "//../root/foo/bar/";
+ NormalizePath(&input, "/C:/source/root");
+ EXPECT_EQ("/C:/source/root/foo/bar/", input);
+
+ // Stay inside the source root
+ input = "//foo/bar";
+ NormalizePath(&input, "/C:/source/root");
+ EXPECT_EQ("//foo/bar", input);
+
+ input = "//foo/bar/";
+ NormalizePath(&input, "/C:/source/root");
+ EXPECT_EQ("//foo/bar/", input);
+
+ // The path should not go above the system root. Note that on Windows, this
+ // will consume the drive (C:).
+ input = "//../../../../../foo/bar";
+ NormalizePath(&input, "/C:/source/root");
+ EXPECT_EQ("/foo/bar", input);
+
+ // Test when the source root is the letter drive.
+ input = "//../foo";
+ NormalizePath(&input, "/C:");
+ EXPECT_EQ("/foo", input);
+
+ input = "//../foo";
+ NormalizePath(&input, "C:");
+ EXPECT_EQ("/foo", input);
+
+ input = "//../foo";
+ NormalizePath(&input, "/");
+ EXPECT_EQ("/foo", input);
+
+ input = "//../";
+ NormalizePath(&input, "\\C:");
+ EXPECT_EQ("/", input);
+
+ input = "//../foo.txt";
+ NormalizePath(&input, "/C:");
+ EXPECT_EQ("/foo.txt", input);
+#else
+ // Go above and outside of the source root.
+ input = "//../foo";
+ NormalizePath(&input, "/source/root");
+ EXPECT_EQ("/source/foo", input);
+
+ input = "//../";
+ NormalizePath(&input, "/source/root");
+ EXPECT_EQ("/source/", input);
+
+ input = "//../foo.txt";
+ NormalizePath(&input, "/source/root");
+ EXPECT_EQ("/source/foo.txt", input);
+
+ input = "//../foo/bar/";
+ NormalizePath(&input, "/source/root");
+ EXPECT_EQ("/source/foo/bar/", input);
+
+ // Go above and back into the source root. This should return a system-
+ // absolute path. We could arguably return this as a source-absolute path,
+ // but that would require additional handling to account for a rare edge
+ // case.
+ input = "//../root/foo";
+ NormalizePath(&input, "/source/root");
+ EXPECT_EQ("/source/root/foo", input);
+
+ input = "//../root/foo/bar/";
+ NormalizePath(&input, "/source/root");
+ EXPECT_EQ("/source/root/foo/bar/", input);
+
+ // Stay inside the source root
+ input = "//foo/bar";
+ NormalizePath(&input, "/source/root");
+ EXPECT_EQ("//foo/bar", input);
+
+ input = "//foo/bar/";
+ NormalizePath(&input, "/source/root");
+ EXPECT_EQ("//foo/bar/", input);
+
+ // The path should not go above the system root.
+ input = "//../../../../../foo/bar";
+ NormalizePath(&input, "/source/root");
+ EXPECT_EQ("/foo/bar", input);
+
+ // Test when the source root is the system root.
+ input = "//../foo/bar/";
+ NormalizePath(&input, "/");
+ EXPECT_EQ("/foo/bar/", input);
+
+ input = "//../";
+ NormalizePath(&input, "/");
+ EXPECT_EQ("/", input);
+
+ input = "//../foo.txt";
+ NormalizePath(&input, "/");
+ EXPECT_EQ("/foo.txt", input);
+
+#endif
+}
+
+TEST(FilesystemUtils, RebasePath) {
+ base::StringPiece source_root("/source/root");
+
+ // Degenerate case.
+ EXPECT_EQ(".", RebasePath("//", SourceDir("//"), source_root));
+ EXPECT_EQ(".", RebasePath("//foo/bar/", SourceDir("//foo/bar/"),
+ source_root));
+
+ // Going up the tree.
+ EXPECT_EQ("../foo", RebasePath("//foo", SourceDir("//bar/"), source_root));
+ EXPECT_EQ("../foo/", RebasePath("//foo/", SourceDir("//bar/"), source_root));
+ EXPECT_EQ("../../foo", RebasePath("//foo", SourceDir("//bar/moo"),
+ source_root));
+ EXPECT_EQ("../../foo/", RebasePath("//foo/", SourceDir("//bar/moo"),
+ source_root));
+
+ // Going down the tree.
+ EXPECT_EQ("foo/bar", RebasePath("//foo/bar", SourceDir("//"), source_root));
+ EXPECT_EQ("foo/bar/", RebasePath("//foo/bar/", SourceDir("//"),
+ source_root));
+
+ // Going up and down the tree.
+ EXPECT_EQ("../../foo/bar", RebasePath("//foo/bar", SourceDir("//a/b/"),
+ source_root));
+ EXPECT_EQ("../../foo/bar/", RebasePath("//foo/bar/", SourceDir("//a/b/"),
+ source_root));
+
+ // Sharing prefix.
+ EXPECT_EQ("foo", RebasePath("//a/foo", SourceDir("//a/"), source_root));
+ EXPECT_EQ("foo/", RebasePath("//a/foo/", SourceDir("//a/"), source_root));
+ EXPECT_EQ("foo", RebasePath("//a/b/foo", SourceDir("//a/b/"), source_root));
+ EXPECT_EQ("foo/", RebasePath("//a/b/foo/", SourceDir("//a/b/"),
+ source_root));
+ EXPECT_EQ("foo/bar", RebasePath("//a/b/foo/bar", SourceDir("//a/b/"),
+ source_root));
+ EXPECT_EQ("foo/bar/", RebasePath("//a/b/foo/bar/", SourceDir("//a/b/"),
+ source_root));
+
+ // One could argue about this case. Since the input doesn't have a slash it
+ // would normally not be treated like a directory and we'd go up, which is
+ // simpler. However, since it matches the output directory's name, we could
+ // potentially infer that it's the same and return "." for this.
+ EXPECT_EQ("../bar", RebasePath("//foo/bar", SourceDir("//foo/bar/"),
+ source_root));
+
+ // Check when only |input| is system-absolute
+ EXPECT_EQ("foo", RebasePath("/source/root/foo", SourceDir("//"),
+ base::StringPiece("/source/root")));
+ EXPECT_EQ("foo/", RebasePath("/source/root/foo/", SourceDir("//"),
+ base::StringPiece("/source/root")));
+ EXPECT_EQ("../../builddir/Out/Debug",
+ RebasePath("/builddir/Out/Debug", SourceDir("//"),
+ base::StringPiece("/source/root")));
+ EXPECT_EQ("../../../builddir/Out/Debug",
+ RebasePath("/builddir/Out/Debug", SourceDir("//"),
+ base::StringPiece("/source/root/foo")));
+ EXPECT_EQ("../../../builddir/Out/Debug/",
+ RebasePath("/builddir/Out/Debug/", SourceDir("//"),
+ base::StringPiece("/source/root/foo")));
+ EXPECT_EQ("../../path/to/foo",
+ RebasePath("/path/to/foo", SourceDir("//"),
+ base::StringPiece("/source/root")));
+ EXPECT_EQ("../../../path/to/foo",
+ RebasePath("/path/to/foo", SourceDir("//a"),
+ base::StringPiece("/source/root")));
+ EXPECT_EQ("../../../../path/to/foo",
+ RebasePath("/path/to/foo", SourceDir("//a/b"),
+ base::StringPiece("/source/root")));
+
+ // Check when only |dest_dir| is system-absolute.
+ EXPECT_EQ(".",
+ RebasePath("//", SourceDir("/source/root"),
+ base::StringPiece("/source/root")));
+ EXPECT_EQ("foo",
+ RebasePath("//foo", SourceDir("/source/root"),
+ base::StringPiece("/source/root")));
+ EXPECT_EQ("../foo",
+ RebasePath("//foo", SourceDir("/source/root/bar"),
+ base::StringPiece("/source/root")));
+ EXPECT_EQ("../../../source/root/foo",
+ RebasePath("//foo", SourceDir("/other/source/root"),
+ base::StringPiece("/source/root")));
+ EXPECT_EQ("../../../../source/root/foo",
+ RebasePath("//foo", SourceDir("/other/source/root/bar"),
+ base::StringPiece("/source/root")));
+
+ // Check when |input| and |dest_dir| are both system-absolute. Also,
+ // in this case |source_root| is never used so set it to a dummy
+ // value.
+ EXPECT_EQ("foo",
+ RebasePath("/source/root/foo", SourceDir("/source/root"),
+ base::StringPiece("/x/y/z")));
+ EXPECT_EQ("foo/",
+ RebasePath("/source/root/foo/", SourceDir("/source/root"),
+ base::StringPiece("/x/y/z")));
+ EXPECT_EQ("../../builddir/Out/Debug",
+ RebasePath("/builddir/Out/Debug",SourceDir("/source/root"),
+ base::StringPiece("/x/y/z")));
+ EXPECT_EQ("../../../builddir/Out/Debug",
+ RebasePath("/builddir/Out/Debug", SourceDir("/source/root/foo"),
+ base::StringPiece("/source/root/foo")));
+ EXPECT_EQ("../../../builddir/Out/Debug/",
+ RebasePath("/builddir/Out/Debug/", SourceDir("/source/root/foo"),
+ base::StringPiece("/source/root/foo")));
+ EXPECT_EQ("../../path/to/foo",
+ RebasePath("/path/to/foo", SourceDir("/source/root"),
+ base::StringPiece("/x/y/z")));
+ EXPECT_EQ("../../../path/to/foo",
+ RebasePath("/path/to/foo", SourceDir("/source/root/a"),
+ base::StringPiece("/x/y/z")));
+ EXPECT_EQ("../../../../path/to/foo",
+ RebasePath("/path/to/foo", SourceDir("/source/root/a/b"),
+ base::StringPiece("/x/y/z")));
+
+#if defined(OS_WIN)
+ // Test corrections while rebasing Windows-style absolute paths.
+ EXPECT_EQ("../../../../path/to/foo",
+ RebasePath("C:/path/to/foo", SourceDir("//a/b"),
+ base::StringPiece("/C:/source/root")));
+ EXPECT_EQ("../../../../path/to/foo",
+ RebasePath("/C:/path/to/foo", SourceDir("//a/b"),
+ base::StringPiece("C:/source/root")));
+ EXPECT_EQ("../../../../path/to/foo",
+ RebasePath("/C:/path/to/foo", SourceDir("//a/b"),
+ base::StringPiece("/c:/source/root")));
+ EXPECT_EQ("../../../../path/to/foo",
+ RebasePath("/c:/path/to/foo", SourceDir("//a/b"),
+ base::StringPiece("c:/source/root")));
+ EXPECT_EQ("../../../../path/to/foo",
+ RebasePath("/c:/path/to/foo", SourceDir("//a/b"),
+ base::StringPiece("C:/source/root")));
+#endif
+}
+
+TEST(FilesystemUtils, DirectoryWithNoLastSlash) {
+ EXPECT_EQ("", DirectoryWithNoLastSlash(SourceDir()));
+ EXPECT_EQ("/.", DirectoryWithNoLastSlash(SourceDir("/")));
+ EXPECT_EQ("//.", DirectoryWithNoLastSlash(SourceDir("//")));
+ EXPECT_EQ("//foo", DirectoryWithNoLastSlash(SourceDir("//foo/")));
+ EXPECT_EQ("/bar", DirectoryWithNoLastSlash(SourceDir("/bar/")));
+}
+
+TEST(FilesystemUtils, SourceDirForPath) {
+#if defined(OS_WIN)
+ base::FilePath root(L"C:\\source\\foo\\");
+ EXPECT_EQ("/C:/foo/bar/", SourceDirForPath(root,
+ base::FilePath(L"C:\\foo\\bar")).value());
+ EXPECT_EQ("/", SourceDirForPath(root,
+ base::FilePath(L"/")).value());
+ EXPECT_EQ("//", SourceDirForPath(root,
+ base::FilePath(L"C:\\source\\foo")).value());
+ EXPECT_EQ("//bar/", SourceDirForPath(root,
+ base::FilePath(L"C:\\source\\foo\\bar\\")). value());
+ EXPECT_EQ("//bar/baz/", SourceDirForPath(root,
+ base::FilePath(L"C:\\source\\foo\\bar\\baz")).value());
+
+ // Should be case-and-slash-insensitive.
+ EXPECT_EQ("//baR/", SourceDirForPath(root,
+ base::FilePath(L"c:/SOURCE\\Foo/baR/")).value());
+
+ // Some "weird" Windows paths.
+ EXPECT_EQ("/foo/bar/", SourceDirForPath(root,
+ base::FilePath(L"/foo/bar/")).value());
+ EXPECT_EQ("/C:/foo/bar/", SourceDirForPath(root,
+ base::FilePath(L"C:foo/bar/")).value());
+
+ // Also allow absolute GN-style Windows paths.
+ EXPECT_EQ("/C:/foo/bar/", SourceDirForPath(root,
+ base::FilePath(L"/C:/foo/bar")).value());
+ EXPECT_EQ("//bar/", SourceDirForPath(root,
+ base::FilePath(L"/C:/source/foo/bar")).value());
+
+ // Empty source dir.
+ base::FilePath empty;
+ EXPECT_EQ(
+ "/C:/source/foo/",
+ SourceDirForPath(empty, base::FilePath(L"C:\\source\\foo")).value());
+#else
+ base::FilePath root("/source/foo/");
+ EXPECT_EQ("/foo/bar/", SourceDirForPath(root,
+ base::FilePath("/foo/bar/")).value());
+ EXPECT_EQ("/", SourceDirForPath(root,
+ base::FilePath("/")).value());
+ EXPECT_EQ("//", SourceDirForPath(root,
+ base::FilePath("/source/foo")).value());
+ EXPECT_EQ("//bar/", SourceDirForPath(root,
+ base::FilePath("/source/foo/bar/")).value());
+ EXPECT_EQ("//bar/baz/", SourceDirForPath(root,
+ base::FilePath("/source/foo/bar/baz/")).value());
+
+ // Should be case-sensitive.
+ EXPECT_EQ("/SOURCE/foo/bar/", SourceDirForPath(root,
+ base::FilePath("/SOURCE/foo/bar/")).value());
+
+ // Empty source dir.
+ base::FilePath empty;
+ EXPECT_EQ("/source/foo/",
+ SourceDirForPath(empty, base::FilePath("/source/foo")).value());
+#endif
+}
+
+TEST(FilesystemUtils, ContentsEqual) {
+ base::ScopedTempDir temp_dir;
+ ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
+
+ std::string data = "foo";
+
+ base::FilePath file_path = temp_dir.path().AppendASCII("foo.txt");
+ base::WriteFile(file_path, data.c_str(), static_cast<int>(data.size()));
+
+ EXPECT_TRUE(ContentsEqual(file_path, data));
+
+ // Different length and contents.
+ data += "bar";
+ EXPECT_FALSE(ContentsEqual(file_path, data));
+
+ // The same length, different contents.
+ EXPECT_FALSE(ContentsEqual(file_path, "bar"));
+}
+
+TEST(FilesystemUtils, WriteFileIfChanged) {
+ base::ScopedTempDir temp_dir;
+ ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
+
+ std::string data = "foo";
+
+ // Write if file doesn't exist. Create also directory.
+ base::FilePath file_path =
+ temp_dir.path().AppendASCII("bar").AppendASCII("foo.txt");
+ EXPECT_TRUE(WriteFileIfChanged(file_path, data, nullptr));
+
+ base::File::Info file_info;
+ ASSERT_TRUE(base::GetFileInfo(file_path, &file_info));
+ base::Time last_modified = file_info.last_modified;
+
+#if defined(OS_MACOSX)
+ // Modification times are in seconds in HFS on Mac.
+ base::TimeDelta sleep_time = base::TimeDelta::FromSeconds(1);
+#else
+ base::TimeDelta sleep_time = base::TimeDelta::FromMilliseconds(1);
+#endif
+ base::PlatformThread::Sleep(sleep_time);
+
+ // Don't write if contents is the same.
+ EXPECT_TRUE(WriteFileIfChanged(file_path, data, nullptr));
+ ASSERT_TRUE(base::GetFileInfo(file_path, &file_info));
+ EXPECT_EQ(last_modified, file_info.last_modified);
+
+ // Write if contents changed.
+ EXPECT_TRUE(WriteFileIfChanged(file_path, "bar", nullptr));
+ std::string file_data;
+ ASSERT_TRUE(base::ReadFileToString(file_path, &file_data));
+ EXPECT_EQ("bar", file_data);
+}
+
+TEST(FilesystemUtils, GetToolchainDirs) {
+ BuildSettings build_settings;
+ build_settings.SetBuildDir(SourceDir("//out/Debug/"));
+
+ // The default toolchain.
+ Settings default_settings(&build_settings, "");
+ Label default_toolchain_label(SourceDir("//toolchain/"), "default");
+ default_settings.set_toolchain_label(default_toolchain_label);
+ default_settings.set_default_toolchain_label(default_toolchain_label);
+
+ // Default toolchain out dir.
+ EXPECT_EQ("//out/Debug/",
+ GetToolchainOutputDir(&default_settings).value());
+ EXPECT_EQ("//out/Debug/",
+ GetToolchainOutputDir(&build_settings, default_toolchain_label,
+ true).value());
+
+ // Default toolchain gen dir.
+ EXPECT_EQ("//out/Debug/gen/",
+ GetToolchainGenDir(&default_settings).value());
+ EXPECT_EQ("gen/",
+ GetToolchainGenDirAsOutputFile(&default_settings).value());
+ EXPECT_EQ("//out/Debug/gen/",
+ GetToolchainGenDir(&build_settings, default_toolchain_label,
+ true).value());
+
+ // Check a secondary toolchain.
+ Settings other_settings(&build_settings, "two/");
+ Label other_toolchain_label(SourceDir("//toolchain/"), "two");
+ default_settings.set_toolchain_label(other_toolchain_label);
+ default_settings.set_default_toolchain_label(default_toolchain_label);
+
+ // Secondary toolchain out dir.
+ EXPECT_EQ("//out/Debug/two/",
+ GetToolchainOutputDir(&other_settings).value());
+ EXPECT_EQ("//out/Debug/two/",
+ GetToolchainOutputDir(&build_settings, other_toolchain_label,
+ false).value());
+
+ // Secondary toolchain gen dir.
+ EXPECT_EQ("//out/Debug/two/gen/",
+ GetToolchainGenDir(&other_settings).value());
+ EXPECT_EQ("two/gen/",
+ GetToolchainGenDirAsOutputFile(&other_settings).value());
+ EXPECT_EQ("//out/Debug/two/gen/",
+ GetToolchainGenDir(&build_settings, other_toolchain_label,
+ false).value());
+}
+
+TEST(FilesystemUtils, GetOutDirForSourceDir) {
+ BuildSettings build_settings;
+ build_settings.SetBuildDir(SourceDir("//out/Debug/"));
+
+ // Test the default toolchain.
+ Label default_toolchain_label(SourceDir("//toolchain/"), "default");
+ Settings default_settings(&build_settings, "");
+ default_settings.set_toolchain_label(default_toolchain_label);
+ default_settings.set_default_toolchain_label(default_toolchain_label);
+ EXPECT_EQ("//out/Debug/obj/",
+ GetOutputDirForSourceDir(
+ &default_settings, SourceDir("//")).value());
+ EXPECT_EQ("obj/",
+ GetOutputDirForSourceDirAsOutputFile(
+ &default_settings, SourceDir("//")).value());
+
+ EXPECT_EQ("//out/Debug/obj/foo/bar/",
+ GetOutputDirForSourceDir(
+ &default_settings, SourceDir("//foo/bar/")).value());
+ EXPECT_EQ("obj/foo/bar/",
+ GetOutputDirForSourceDirAsOutputFile(
+ &default_settings, SourceDir("//foo/bar/")).value());
+
+ // Secondary toolchain.
+ Settings other_settings(&build_settings, "two/");
+ other_settings.set_toolchain_label(Label(SourceDir("//toolchain/"), "two"));
+ other_settings.set_default_toolchain_label(default_toolchain_label);
+ EXPECT_EQ("//out/Debug/two/obj/",
+ GetOutputDirForSourceDir(
+ &other_settings, SourceDir("//")).value());
+ EXPECT_EQ("two/obj/",
+ GetOutputDirForSourceDirAsOutputFile(
+ &other_settings, SourceDir("//")).value());
+
+ EXPECT_EQ("//out/Debug/two/obj/foo/bar/",
+ GetOutputDirForSourceDir(&other_settings,
+ SourceDir("//foo/bar/")).value());
+ EXPECT_EQ("two/obj/foo/bar/",
+ GetOutputDirForSourceDirAsOutputFile(
+ &other_settings, SourceDir("//foo/bar/")).value());
+
+ // Absolute source path
+ EXPECT_EQ("//out/Debug/obj/ABS_PATH/abs/",
+ GetOutputDirForSourceDir(
+ &default_settings, SourceDir("/abs")).value());
+ EXPECT_EQ("obj/ABS_PATH/abs/",
+ GetOutputDirForSourceDirAsOutputFile(
+ &default_settings, SourceDir("/abs")).value());
+#if defined(OS_WIN)
+ EXPECT_EQ("//out/Debug/obj/ABS_PATH/C/abs/",
+ GetOutputDirForSourceDir(
+ &default_settings, SourceDir("/C:/abs")).value());
+ EXPECT_EQ("obj/ABS_PATH/C/abs/",
+ GetOutputDirForSourceDirAsOutputFile(
+ &default_settings, SourceDir("/C:/abs")).value());
+#endif
+}
+
+TEST(FilesystemUtils, GetGenDirForSourceDir) {
+ BuildSettings build_settings;
+ build_settings.SetBuildDir(SourceDir("//out/Debug/"));
+
+ // Test the default toolchain.
+ Settings default_settings(&build_settings, "");
+ EXPECT_EQ("//out/Debug/gen/",
+ GetGenDirForSourceDir(
+ &default_settings, SourceDir("//")).value());
+ EXPECT_EQ("gen/",
+ GetGenDirForSourceDirAsOutputFile(
+ &default_settings, SourceDir("//")).value());
+
+ EXPECT_EQ("//out/Debug/gen/foo/bar/",
+ GetGenDirForSourceDir(
+ &default_settings, SourceDir("//foo/bar/")).value());
+ EXPECT_EQ("gen/foo/bar/",
+ GetGenDirForSourceDirAsOutputFile(
+ &default_settings, SourceDir("//foo/bar/")).value());
+
+ // Secondary toolchain.
+ Settings other_settings(&build_settings, "two/");
+ EXPECT_EQ("//out/Debug/two/gen/",
+ GetGenDirForSourceDir(
+ &other_settings, SourceDir("//")).value());
+ EXPECT_EQ("two/gen/",
+ GetGenDirForSourceDirAsOutputFile(
+ &other_settings, SourceDir("//")).value());
+
+ EXPECT_EQ("//out/Debug/two/gen/foo/bar/",
+ GetGenDirForSourceDir(
+ &other_settings, SourceDir("//foo/bar/")).value());
+ EXPECT_EQ("two/gen/foo/bar/",
+ GetGenDirForSourceDirAsOutputFile(
+ &other_settings, SourceDir("//foo/bar/")).value());
+}
+
+TEST(FilesystemUtils, GetTargetDirs) {
+ BuildSettings build_settings;
+ build_settings.SetBuildDir(SourceDir("//out/Debug/"));
+ Settings settings(&build_settings, "");
+
+ Target a(&settings, Label(SourceDir("//foo/bar/"), "baz"));
+ EXPECT_EQ("//out/Debug/obj/foo/bar/", GetTargetOutputDir(&a).value());
+ EXPECT_EQ("obj/foo/bar/", GetTargetOutputDirAsOutputFile(&a).value());
+ EXPECT_EQ("//out/Debug/gen/foo/bar/", GetTargetGenDir(&a).value());
+ EXPECT_EQ("gen/foo/bar/", GetTargetGenDirAsOutputFile(&a).value());
+}
+
+// Tests handling of output dirs when build dir is the same as the root.
+TEST(FilesystemUtils, GetDirForEmptyBuildDir) {
+ BuildSettings build_settings;
+ build_settings.SetBuildDir(SourceDir("//"));
+ Settings settings(&build_settings, "");
+
+ EXPECT_EQ("//", GetToolchainOutputDir(&settings).value());
+ EXPECT_EQ("//gen/", GetToolchainGenDir(&settings).value());
+ EXPECT_EQ("gen/", GetToolchainGenDirAsOutputFile(&settings).value());
+ EXPECT_EQ("//obj/",
+ GetOutputDirForSourceDir(&settings, SourceDir("//")).value());
+ EXPECT_EQ("obj/",
+ GetOutputDirForSourceDirAsOutputFile(
+ &settings, SourceDir("//")).value());
+ EXPECT_EQ("gen/",
+ GetGenDirForSourceDirAsOutputFile(
+ &settings, SourceDir("//")).value());
+}
diff --git a/chromium/tools/gn/format_test_data/001.gn b/chromium/tools/gn/format_test_data/001.gn
new file mode 100644
index 00000000000..c35c27951fd
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/001.gn
@@ -0,0 +1,2 @@
+# Test.
+executable("test"){}
diff --git a/chromium/tools/gn/format_test_data/001.golden b/chromium/tools/gn/format_test_data/001.golden
new file mode 100644
index 00000000000..31c9069d2b2
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/001.golden
@@ -0,0 +1,3 @@
+# Test.
+executable("test") {
+}
diff --git a/chromium/tools/gn/format_test_data/002.gn b/chromium/tools/gn/format_test_data/002.gn
new file mode 100644
index 00000000000..34043b5ed30
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/002.gn
@@ -0,0 +1,6 @@
+executable("test") {
+ sources = [
+ "stuff.cc",
+ "things.cc"
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/002.golden b/chromium/tools/gn/format_test_data/002.golden
new file mode 100644
index 00000000000..cd8f38810c4
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/002.golden
@@ -0,0 +1,6 @@
+executable("test") {
+ sources = [
+ "stuff.cc",
+ "things.cc",
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/003.gn b/chromium/tools/gn/format_test_data/003.gn
new file mode 100644
index 00000000000..429165a285c
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/003.gn
@@ -0,0 +1,10 @@
+executable("test") {
+ sources = [
+ "stuff.cc",
+ "things.cc"
+ ]
+
+ deps = [
+ "//base",
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/003.golden b/chromium/tools/gn/format_test_data/003.golden
new file mode 100644
index 00000000000..2fdcb9e38ba
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/003.golden
@@ -0,0 +1,10 @@
+executable("test") {
+ sources = [
+ "stuff.cc",
+ "things.cc",
+ ]
+
+ deps = [
+ "//base",
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/004.gn b/chromium/tools/gn/format_test_data/004.gn
new file mode 100644
index 00000000000..f36d039cf4c
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/004.gn
@@ -0,0 +1,10 @@
+# This is a block comment that goes at the top of the file and is attached to
+# the top level target.
+executable("test") {
+ sources = [
+ "stuff.cc",# Comment attached to list item.
+ "things.cc"
+ ]
+ # Comment attached to statement.
+ deps = [ "//base", ]
+}
diff --git a/chromium/tools/gn/format_test_data/004.golden b/chromium/tools/gn/format_test_data/004.golden
new file mode 100644
index 00000000000..68373d10144
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/004.golden
@@ -0,0 +1,13 @@
+# This is a block comment that goes at the top of the file and is attached to
+# the top level target.
+executable("test") {
+ sources = [
+ "stuff.cc", # Comment attached to list item.
+ "things.cc",
+ ]
+
+ # Comment attached to statement.
+ deps = [
+ "//base",
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/005.gn b/chromium/tools/gn/format_test_data/005.gn
new file mode 100644
index 00000000000..07b4ef372c0
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/005.gn
@@ -0,0 +1,5 @@
+# This is a separated block comment that mustn't be attached the to target
+# below, and should be separated by a single blank line.
+
+executable("test") {
+}
diff --git a/chromium/tools/gn/format_test_data/005.golden b/chromium/tools/gn/format_test_data/005.golden
new file mode 100644
index 00000000000..07b4ef372c0
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/005.golden
@@ -0,0 +1,5 @@
+# This is a separated block comment that mustn't be attached the to target
+# below, and should be separated by a single blank line.
+
+executable("test") {
+}
diff --git a/chromium/tools/gn/format_test_data/006.gn b/chromium/tools/gn/format_test_data/006.gn
new file mode 100644
index 00000000000..737fceaae0e
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/006.gn
@@ -0,0 +1,9 @@
+# This is a separated block comment that mustn't be attached the to target
+# below, and should be separated by a single blank line.
+
+
+
+
+
+executable("test") {
+}
diff --git a/chromium/tools/gn/format_test_data/006.golden b/chromium/tools/gn/format_test_data/006.golden
new file mode 100644
index 00000000000..07b4ef372c0
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/006.golden
@@ -0,0 +1,5 @@
+# This is a separated block comment that mustn't be attached the to target
+# below, and should be separated by a single blank line.
+
+executable("test") {
+}
diff --git a/chromium/tools/gn/format_test_data/007.gn b/chromium/tools/gn/format_test_data/007.gn
new file mode 100644
index 00000000000..3cd002d8606
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/007.gn
@@ -0,0 +1,9 @@
+executable("test") {
+ sources = ["a.cc"]
+
+ # This is an unusual comment that's a header for the stuff that comes after
+ # it. We want to make sure that it's not connected to the next element in
+ # the file.
+
+ cflags = ["-Wee"]
+}
diff --git a/chromium/tools/gn/format_test_data/007.golden b/chromium/tools/gn/format_test_data/007.golden
new file mode 100644
index 00000000000..610fa3fead5
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/007.golden
@@ -0,0 +1,11 @@
+executable("test") {
+ sources = [
+ "a.cc",
+ ]
+
+ # This is an unusual comment that's a header for the stuff that comes after
+ # it. We want to make sure that it's not connected to the next element in
+ # the file.
+
+ cflags = [ "-Wee" ]
+}
diff --git a/chromium/tools/gn/format_test_data/008.gn b/chromium/tools/gn/format_test_data/008.gn
new file mode 100644
index 00000000000..16b4a8e32fb
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/008.gn
@@ -0,0 +1 @@
+if (is_win) { sources = ["win.cc"] }
diff --git a/chromium/tools/gn/format_test_data/008.golden b/chromium/tools/gn/format_test_data/008.golden
new file mode 100644
index 00000000000..aec281ab952
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/008.golden
@@ -0,0 +1,5 @@
+if (is_win) {
+ sources = [
+ "win.cc",
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/009.gn b/chromium/tools/gn/format_test_data/009.gn
new file mode 100644
index 00000000000..e47b62175ae
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/009.gn
@@ -0,0 +1,2 @@
+if (is_win) { sources = ["win.cc"] }
+else { sources = ["linux.cc"] }
diff --git a/chromium/tools/gn/format_test_data/009.golden b/chromium/tools/gn/format_test_data/009.golden
new file mode 100644
index 00000000000..af27d797005
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/009.golden
@@ -0,0 +1,9 @@
+if (is_win) {
+ sources = [
+ "win.cc",
+ ]
+} else {
+ sources = [
+ "linux.cc",
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/010.gn b/chromium/tools/gn/format_test_data/010.gn
new file mode 100644
index 00000000000..70004a75158
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/010.gn
@@ -0,0 +1,2 @@
+if (is_win) { sources = ["win.cc"] }
+else if (is_linux) { sources = ["linux.cc"] }
diff --git a/chromium/tools/gn/format_test_data/010.golden b/chromium/tools/gn/format_test_data/010.golden
new file mode 100644
index 00000000000..64fabb9ebc5
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/010.golden
@@ -0,0 +1,9 @@
+if (is_win) {
+ sources = [
+ "win.cc",
+ ]
+} else if (is_linux) {
+ sources = [
+ "linux.cc",
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/011.gn b/chromium/tools/gn/format_test_data/011.gn
new file mode 100644
index 00000000000..c2c5bf755ea
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/011.gn
@@ -0,0 +1,4 @@
+if (is_win) { sources = ["win.cc"] }
+else if (is_linux) { sources = ["linux.cc"] }
+else { sources = ["wha.cc"] }
+
diff --git a/chromium/tools/gn/format_test_data/011.golden b/chromium/tools/gn/format_test_data/011.golden
new file mode 100644
index 00000000000..4ce009ea1dc
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/011.golden
@@ -0,0 +1,13 @@
+if (is_win) {
+ sources = [
+ "win.cc",
+ ]
+} else if (is_linux) {
+ sources = [
+ "linux.cc",
+ ]
+} else {
+ sources = [
+ "wha.cc",
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/012.gn b/chromium/tools/gn/format_test_data/012.gn
new file mode 100644
index 00000000000..1da385c00e8
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/012.gn
@@ -0,0 +1,16 @@
+# (A sample top level block comment)
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_win) {
+ # This is some special stuff for Windows
+ sources = ["win.cc"] } else if (is_linux) {
+
+ # This is a block comment inside the linux block, but not attached.
+
+ sources = ["linux.cc"]
+} else {
+ # A comment with trailing spaces
+sources = ["wha.cc"] }
+
diff --git a/chromium/tools/gn/format_test_data/012.golden b/chromium/tools/gn/format_test_data/012.golden
new file mode 100644
index 00000000000..a0049b2d55d
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/012.golden
@@ -0,0 +1,22 @@
+# (A sample top level block comment)
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_win) {
+ # This is some special stuff for Windows
+ sources = [
+ "win.cc",
+ ]
+} else if (is_linux) {
+ # This is a block comment inside the linux block, but not attached.
+
+ sources = [
+ "linux.cc",
+ ]
+} else {
+ # A comment with trailing spaces
+ sources = [
+ "wha.cc",
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/013.gn b/chromium/tools/gn/format_test_data/013.gn
new file mode 100644
index 00000000000..06183a82d48
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/013.gn
@@ -0,0 +1,7 @@
+defines = [
+ # Separate comment inside expression.
+
+ # Connected comment.
+ "WEE",
+ "BLORPY",
+]
diff --git a/chromium/tools/gn/format_test_data/013.golden b/chromium/tools/gn/format_test_data/013.golden
new file mode 100644
index 00000000000..d84b7d20f8e
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/013.golden
@@ -0,0 +1,7 @@
+defines = [
+ # Separate comment inside expression.
+
+ # Connected comment.
+ "WEE",
+ "BLORPY",
+]
diff --git a/chromium/tools/gn/format_test_data/014.gn b/chromium/tools/gn/format_test_data/014.gn
new file mode 100644
index 00000000000..2d0170d862a
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/014.gn
@@ -0,0 +1,6 @@
+defines = [
+
+ # Connected comment.
+ "WEE",
+ "BLORPY",
+]
diff --git a/chromium/tools/gn/format_test_data/014.golden b/chromium/tools/gn/format_test_data/014.golden
new file mode 100644
index 00000000000..c0ba5bb3981
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/014.golden
@@ -0,0 +1,5 @@
+defines = [
+ # Connected comment.
+ "WEE",
+ "BLORPY",
+]
diff --git a/chromium/tools/gn/format_test_data/015.gn b/chromium/tools/gn/format_test_data/015.gn
new file mode 100644
index 00000000000..f065095f656
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/015.gn
@@ -0,0 +1,4 @@
+if (is_win) {
+ sources = ["a.cc"]
+ # Some comment at end.
+}
diff --git a/chromium/tools/gn/format_test_data/015.golden b/chromium/tools/gn/format_test_data/015.golden
new file mode 100644
index 00000000000..553f01ce105
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/015.golden
@@ -0,0 +1,6 @@
+if (is_win) {
+ sources = [
+ "a.cc",
+ ]
+ # Some comment at end.
+}
diff --git a/chromium/tools/gn/format_test_data/016.gn b/chromium/tools/gn/format_test_data/016.gn
new file mode 100644
index 00000000000..00a79922828
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/016.gn
@@ -0,0 +1 @@
+something = !is_win && is_linux || is_mac && !(is_freebsd || is_ios)
diff --git a/chromium/tools/gn/format_test_data/016.golden b/chromium/tools/gn/format_test_data/016.golden
new file mode 100644
index 00000000000..3f4f15bc507
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/016.golden
@@ -0,0 +1 @@
+something = (!is_win && is_linux) || (is_mac && !(is_freebsd || is_ios))
diff --git a/chromium/tools/gn/format_test_data/017.gn b/chromium/tools/gn/format_test_data/017.gn
new file mode 100644
index 00000000000..225ae2fe30c
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/017.gn
@@ -0,0 +1,15 @@
+executable("win" # Suffix comment on arg.
+ # And a strangely positioned line comment for some reason
+ ) {
+ defines = [ # Defines comment, suffix at end position.
+ ]
+
+ deps = [
+ # Deps comment, should be forced multiline.
+ ]
+ sources = [
+ "a.cc"
+ # End of single sources comment, should be forced multiline.
+ ]
+ # End of block comment.
+}
diff --git a/chromium/tools/gn/format_test_data/017.golden b/chromium/tools/gn/format_test_data/017.golden
new file mode 100644
index 00000000000..d9d7ad9a472
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/017.golden
@@ -0,0 +1,16 @@
+executable("win" # Suffix comment on arg.
+
+ # And a strangely positioned line comment for some reason
+ ) {
+ defines = [] # Defines comment, suffix at end position.
+
+ deps = [
+ # Deps comment, should be forced multiline.
+ ]
+ sources = [
+ "a.cc",
+ # End of single sources comment, should be forced multiline.
+ ]
+
+ # End of block comment.
+}
diff --git a/chromium/tools/gn/format_test_data/018.gn b/chromium/tools/gn/format_test_data/018.gn
new file mode 100644
index 00000000000..e63ef9def15
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/018.gn
@@ -0,0 +1,3 @@
+# Don't crash when no block on a function call.
+
+import("wee.gni")
diff --git a/chromium/tools/gn/format_test_data/018.golden b/chromium/tools/gn/format_test_data/018.golden
new file mode 100644
index 00000000000..e63ef9def15
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/018.golden
@@ -0,0 +1,3 @@
+# Don't crash when no block on a function call.
+
+import("wee.gni")
diff --git a/chromium/tools/gn/format_test_data/019.gn b/chromium/tools/gn/format_test_data/019.gn
new file mode 100644
index 00000000000..48772b9ded8
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/019.gn
@@ -0,0 +1,23 @@
+# Make sure blank lines are maintained before comments in lists.
+
+deps = [
+ "//pdf", # Not compiled on Android in GYP yet, either.
+ "//ppapi:ppapi_c",
+ "//third_party/libusb",
+ "//ui/keyboard", # Blocked on content.
+
+ # Seems to not be compiled on Android. Otherwise it will need a config.h.
+ "//third_party/libxslt",
+
+ # Not relevant to Android.
+ "//ash",
+ "//tools/gn",
+
+ # Multiple line
+ # comment
+ # here.
+ "//ui/aura",
+ "//ui/display",
+ "//ui/views",
+ "//ui/views/controls/webview",
+]
diff --git a/chromium/tools/gn/format_test_data/019.golden b/chromium/tools/gn/format_test_data/019.golden
new file mode 100644
index 00000000000..c800ed1fe42
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/019.golden
@@ -0,0 +1,23 @@
+# Make sure blank lines are maintained before comments in lists.
+
+deps = [
+ "//pdf", # Not compiled on Android in GYP yet, either.
+ "//ppapi:ppapi_c",
+ "//third_party/libusb",
+ "//ui/keyboard", # Blocked on content.
+
+ # Seems to not be compiled on Android. Otherwise it will need a config.h.
+ "//third_party/libxslt",
+
+ # Not relevant to Android.
+ "//ash",
+ "//tools/gn",
+
+ # Multiple line
+ # comment
+ # here.
+ "//ui/aura",
+ "//ui/display",
+ "//ui/views",
+ "//ui/views/controls/webview",
+]
diff --git a/chromium/tools/gn/format_test_data/020.gn b/chromium/tools/gn/format_test_data/020.gn
new file mode 100644
index 00000000000..96de16448bc
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/020.gn
@@ -0,0 +1,5 @@
+cflags = [
+ "/wd4267", # size_t -> int.
+ "/wd4324", # Structure was padded due to __declspec(align()), which is
+ # uninteresting.
+]
diff --git a/chromium/tools/gn/format_test_data/020.golden b/chromium/tools/gn/format_test_data/020.golden
new file mode 100644
index 00000000000..96de16448bc
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/020.golden
@@ -0,0 +1,5 @@
+cflags = [
+ "/wd4267", # size_t -> int.
+ "/wd4324", # Structure was padded due to __declspec(align()), which is
+ # uninteresting.
+]
diff --git a/chromium/tools/gn/format_test_data/021.gn b/chromium/tools/gn/format_test_data/021.gn
new file mode 100644
index 00000000000..355735db135
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/021.gn
@@ -0,0 +1,33 @@
+f(aaaaaaaaaaaaaaaaaaa)
+
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaa)
+
+# Exactly 80 wide.
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaa, aaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaa, aaaaa)
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa)
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa)
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaa)
+
+aaaaaaaaaa(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaa + aaaaaaaaaa.aaaaaaaaaaaaaaa)
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaa)
+
+aaaaaaa(aaaaaaaaaaaaa, aaaaaaaaaaaaa, aaaaaaaaaaaaa(aaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaa))
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa)
+
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaa, aaaaaaaaaaaa)
+
+# 80 ---------------------------------------------------------------------------
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaa, aaaaaaaaaaaa)
+
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaa)
+
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaa)
+
+somefunction(someotherFunction(ddddddddddddddddddddddddddddddddddd, ddddddddddddddddddddddddddddd), test)
diff --git a/chromium/tools/gn/format_test_data/021.golden b/chromium/tools/gn/format_test_data/021.golden
new file mode 100644
index 00000000000..f17c5fd83c6
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/021.golden
@@ -0,0 +1,61 @@
+f(aaaaaaaaaaaaaaaaaaa)
+
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaa)
+
+# Exactly 80 wide.
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaa, aaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaa, aaaaa)
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa)
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa)
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaa)
+
+aaaaaaaaaa(
+ aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaa + aaaaaaaaaa.aaaaaaaaaaaaaaa)
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaa)
+
+aaaaaaa(aaaaaaaaaaaaa,
+ aaaaaaaaaaaaa,
+ aaaaaaaaaaaaa(aaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaa))
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaa +
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa)
+
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaa,
+ aaaaaaaaaaaa)
+
+# 80 ---------------------------------------------------------------------------
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaa,
+ aaaaaaaaaaaa)
+
+aaaaaaaaaaaa(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaa)
+
+aaaaaaaaaaaa(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaa)
+
+somefunction(someotherFunction(ddddddddddddddddddddddddddddddddddd,
+ ddddddddddddddddddddddddddddd),
+ test)
diff --git a/chromium/tools/gn/format_test_data/022.gn b/chromium/tools/gn/format_test_data/022.gn
new file mode 100644
index 00000000000..a67ed24d6bb
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/022.gn
@@ -0,0 +1,6 @@
+executable(something[0]) {
+ if (weeeeee.stuff) {
+ x = a.b
+ y = a[8]
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/022.golden b/chromium/tools/gn/format_test_data/022.golden
new file mode 100644
index 00000000000..a67ed24d6bb
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/022.golden
@@ -0,0 +1,6 @@
+executable(something[0]) {
+ if (weeeeee.stuff) {
+ x = a.b
+ y = a[8]
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/023.gn b/chromium/tools/gn/format_test_data/023.gn
new file mode 100644
index 00000000000..8a1b51926b7
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/023.gn
@@ -0,0 +1,38 @@
+f(aaaaaaaaaaaaaaaaaaa) {}
+
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaa) {}
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaa, aaaaaaaaaaaaaaaa, aaaaaaaaaaaaa, aaaaaa) {}
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaa, aaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaa, aaaaa) {}
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa) {}
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa) {}
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa) {}
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa, aaaaaaaaa) {}
+
+aaaaaaaaaa(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaa + aaaaaaaaaa.aaaaaaaaaaaaaaa) {}
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaa) {}
+
+aaaaaaa(aaaaaaaaaaaaa, aaaaaaaaaaaaa, aaaaaaaaaaaaa(aaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaa)) {}
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa) {}
+
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaa, aaaaaaaaaaaa) {}
+
+# 80 ---------------------------------------------------------------------------
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaa, aaaaaaaaaaaa) {}
+
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaa) {}
+
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaa) {}
+
+somefunction(someotherFunction(ddddddddddddddddddddddddddddddddddd, ddddddddddddddddddddddddddddd), test) {}
diff --git a/chromium/tools/gn/format_test_data/023.golden b/chromium/tools/gn/format_test_data/023.golden
new file mode 100644
index 00000000000..bab6e45eda3
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/023.golden
@@ -0,0 +1,88 @@
+f(aaaaaaaaaaaaaaaaaaa) {
+}
+
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaa) {
+}
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa, aaaaaaaaaaaaa, aaaaaaaaaaaaaaaa, aaaaaaaaaaaaa, aaaaaa) {
+}
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaa,
+ aaaaa) {
+}
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa) {
+}
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa) {
+}
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa) {
+}
+
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaa,
+ aaaaaaaaa) {
+}
+
+aaaaaaaaaa(
+ aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaa + aaaaaaaaaa.aaaaaaaaaaaaaaa) {
+}
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaa) {
+}
+
+aaaaaaa(aaaaaaaaaaaaa,
+ aaaaaaaaaaaaa,
+ aaaaaaaaaaaaa(aaaaaaaaaaaaaaaaa, aaaaaaaaaaaaaaaa)) {
+}
+
+# 80 ---------------------------------------------------------------------------
+f(aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaaaaaaaaaaaa +
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa) {
+}
+
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaa + aaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaa,
+ aaaaaaaaaaaa) {
+}
+
+# 80 ---------------------------------------------------------------------------
+aaaaaaaaaaaa(aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa +
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaa,
+ aaaaaaaaaaaa) {
+}
+
+aaaaaaaaaaaa(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaa) {
+}
+
+aaaaaaaaaaaa(
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,
+ aaaaaaaaaaaa) {
+}
+
+somefunction(someotherFunction(ddddddddddddddddddddddddddddddddddd,
+ ddddddddddddddddddddddddddddd),
+ test) {
+}
diff --git a/chromium/tools/gn/format_test_data/024.gn b/chromium/tools/gn/format_test_data/024.gn
new file mode 100644
index 00000000000..5034cdc84e7
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/024.gn
@@ -0,0 +1 @@
+somefunc(){}
diff --git a/chromium/tools/gn/format_test_data/024.golden b/chromium/tools/gn/format_test_data/024.golden
new file mode 100644
index 00000000000..f2c755dcb8b
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/024.golden
@@ -0,0 +1,2 @@
+somefunc() {
+}
diff --git a/chromium/tools/gn/format_test_data/025.gn b/chromium/tools/gn/format_test_data/025.gn
new file mode 100644
index 00000000000..959ec8a3c52
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/025.gn
@@ -0,0 +1,5 @@
+# Various parenthesis maintenance/trimming.
+if ((a.b && c[d] + ((x < 4 || ((z + b)))))) {
+ y = z - (y - (x - !(b-d)))
+ a += ["a", "b", "c"] - (["x"] - ["y"])
+}
diff --git a/chromium/tools/gn/format_test_data/025.golden b/chromium/tools/gn/format_test_data/025.golden
new file mode 100644
index 00000000000..e6d08692a4f
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/025.golden
@@ -0,0 +1,9 @@
+# Various parenthesis maintenance/trimming.
+if (a.b && c[d] + (x < 4 || z + b)) {
+ y = z - (y - (x - !(b - d)))
+ a += [
+ "a",
+ "b",
+ "c",
+ ] - ([ "x" ] - [ "y" ])
+}
diff --git a/chromium/tools/gn/format_test_data/026.gn b/chromium/tools/gn/format_test_data/026.gn
new file mode 100644
index 00000000000..8cf2028a72a
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/026.gn
@@ -0,0 +1,6 @@
+# 80 ---------------------------------------------------------------------------
+args = [
+ rebase_path("$target_gen_dir/experimental-libraries.cc", root_build_dir),
+ "EXPERIMENTAL",
+ v8_compress_startup_data
+] + rebase_path(sources, root_build_dir)
diff --git a/chromium/tools/gn/format_test_data/026.golden b/chromium/tools/gn/format_test_data/026.golden
new file mode 100644
index 00000000000..a1d1d3f936d
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/026.golden
@@ -0,0 +1,7 @@
+# 80 ---------------------------------------------------------------------------
+args =
+ [
+ rebase_path("$target_gen_dir/experimental-libraries.cc", root_build_dir),
+ "EXPERIMENTAL",
+ v8_compress_startup_data,
+ ] + rebase_path(sources, root_build_dir)
diff --git a/chromium/tools/gn/format_test_data/027.gn b/chromium/tools/gn/format_test_data/027.gn
new file mode 100644
index 00000000000..cc5fe5fa942
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/027.gn
@@ -0,0 +1,3 @@
+# 80 ---------------------------------------------------------------------------
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = [
+"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "aaaaaaaaaaaaaaaaaaaaaaaaa"]
diff --git a/chromium/tools/gn/format_test_data/027.golden b/chromium/tools/gn/format_test_data/027.golden
new file mode 100644
index 00000000000..05f0eb57931
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/027.golden
@@ -0,0 +1,5 @@
+# 80 ---------------------------------------------------------------------------
+aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa = [
+ "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
+ "aaaaaaaaaaaaaaaaaaaaaaaaa",
+]
diff --git a/chromium/tools/gn/format_test_data/028.gn b/chromium/tools/gn/format_test_data/028.gn
new file mode 100644
index 00000000000..d84e1f8c957
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/028.gn
@@ -0,0 +1,9 @@
+# Don't separate these.
+import("wee.gni")
+import("waa.gni")
+
+import("woo.gni")
+
+
+
+import("blah.gni")
diff --git a/chromium/tools/gn/format_test_data/028.golden b/chromium/tools/gn/format_test_data/028.golden
new file mode 100644
index 00000000000..a1d54c550a2
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/028.golden
@@ -0,0 +1,7 @@
+# Don't separate these.
+import("wee.gni")
+import("waa.gni")
+
+import("woo.gni")
+
+import("blah.gni")
diff --git a/chromium/tools/gn/format_test_data/029.gn b/chromium/tools/gn/format_test_data/029.gn
new file mode 100644
index 00000000000..ac67830164e
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/029.gn
@@ -0,0 +1,9 @@
+# Don't separate small simple statements.
+is_android = false
+is_chromeos = false
+is_ios = false
+is_linux -= false
+is_mac = true
+is_nacl = false
+is_posix += true
+is_win = false
diff --git a/chromium/tools/gn/format_test_data/029.golden b/chromium/tools/gn/format_test_data/029.golden
new file mode 100644
index 00000000000..ac67830164e
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/029.golden
@@ -0,0 +1,9 @@
+# Don't separate small simple statements.
+is_android = false
+is_chromeos = false
+is_ios = false
+is_linux -= false
+is_mac = true
+is_nacl = false
+is_posix += true
+is_win = false
diff --git a/chromium/tools/gn/format_test_data/030.gn b/chromium/tools/gn/format_test_data/030.gn
new file mode 100644
index 00000000000..adac9a82416
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/030.gn
@@ -0,0 +1,12 @@
+# Don't separate simple statements in a scope.
+
+import("//testing/test.gni")
+
+test("something") {
+ if (is_linux) {
+ sources -= [ "file_version_info_unittest.cc" ]
+ sources += [ "nix/xdg_util_unittest.cc" ]
+ defines = [ "USE_SYMBOLIZE" ]
+ configs += [ "//build/config/linux:glib" ]
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/030.golden b/chromium/tools/gn/format_test_data/030.golden
new file mode 100644
index 00000000000..adac9a82416
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/030.golden
@@ -0,0 +1,12 @@
+# Don't separate simple statements in a scope.
+
+import("//testing/test.gni")
+
+test("something") {
+ if (is_linux) {
+ sources -= [ "file_version_info_unittest.cc" ]
+ sources += [ "nix/xdg_util_unittest.cc" ]
+ defines = [ "USE_SYMBOLIZE" ]
+ configs += [ "//build/config/linux:glib" ]
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/031.gn b/chromium/tools/gn/format_test_data/031.gn
new file mode 100644
index 00000000000..d83d4234a70
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/031.gn
@@ -0,0 +1,8 @@
+deps += [
+ ":packed_extra_resources",
+ ":packed_resources",
+
+ # This shouldn't crash.
+
+ # This shouldn't crash 2.
+ ]
diff --git a/chromium/tools/gn/format_test_data/031.golden b/chromium/tools/gn/format_test_data/031.golden
new file mode 100644
index 00000000000..6653951f16a
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/031.golden
@@ -0,0 +1,8 @@
+deps += [
+ ":packed_extra_resources",
+ ":packed_resources",
+
+ # This shouldn't crash.
+
+ # This shouldn't crash 2.
+]
diff --git a/chromium/tools/gn/format_test_data/032.gn b/chromium/tools/gn/format_test_data/032.gn
new file mode 100644
index 00000000000..d7ea7e5f8f4
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/032.gn
@@ -0,0 +1,6 @@
+# Make sure continued conditions are aligned.
+if (something) {
+ if (false) {
+ } else if (is_linux && !is_android && current_cpu == "x64" && !disable_iterator_debugging) {
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/032.golden b/chromium/tools/gn/format_test_data/032.golden
new file mode 100644
index 00000000000..aeca8963933
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/032.golden
@@ -0,0 +1,7 @@
+# Make sure continued conditions are aligned.
+if (something) {
+ if (false) {
+ } else if (is_linux && !is_android && current_cpu == "x64" &&
+ !disable_iterator_debugging) {
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/033.gn b/chromium/tools/gn/format_test_data/033.gn
new file mode 100644
index 00000000000..6767acd6b93
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/033.gn
@@ -0,0 +1,8 @@
+# Don't attach trailing comments too far back.
+if (!is_android) {
+ source_set("tcmalloc") {
+ if (is_win) {
+ ldflags = [ "/ignore:4006:4221" ]
+ } # is_win
+ } # source_set
+} # !is_android
diff --git a/chromium/tools/gn/format_test_data/033.golden b/chromium/tools/gn/format_test_data/033.golden
new file mode 100644
index 00000000000..6767acd6b93
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/033.golden
@@ -0,0 +1,8 @@
+# Don't attach trailing comments too far back.
+if (!is_android) {
+ source_set("tcmalloc") {
+ if (is_win) {
+ ldflags = [ "/ignore:4006:4221" ]
+ } # is_win
+ } # source_set
+} # !is_android
diff --git a/chromium/tools/gn/format_test_data/034.gn b/chromium/tools/gn/format_test_data/034.gn
new file mode 100644
index 00000000000..33f5eadceb5
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/034.gn
@@ -0,0 +1,13 @@
+# Special case for 'args': If args[N] starts with '-' and args[N+1] is a call to
+# rebase_path, keep them as a pair, rather than breaking into individual items.
+action("wee") {
+ if (something) {
+ args = [
+ "--depfile", rebase_path(depfile, root_build_dir),
+ "--android-sdk", rebase_path(android_sdk, root_build_dir),
+ "--android-sdk-tools",
+ rebase_path(android_sdk_build_tools, root_build_dir),
+ "--android-manifest", rebase_path(android_manifest, root_build_dir),
+ ]
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/035.gn b/chromium/tools/gn/format_test_data/035.gn
new file mode 100644
index 00000000000..70bc1a9eca0
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/035.gn
@@ -0,0 +1 @@
+import("//build/config/sysroot.gni") # Imports android/config.gni.
diff --git a/chromium/tools/gn/format_test_data/035.golden b/chromium/tools/gn/format_test_data/035.golden
new file mode 100644
index 00000000000..70bc1a9eca0
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/035.golden
@@ -0,0 +1 @@
+import("//build/config/sysroot.gni") # Imports android/config.gni.
diff --git a/chromium/tools/gn/format_test_data/036.gn b/chromium/tools/gn/format_test_data/036.gn
new file mode 100644
index 00000000000..5a5eca8455c
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/036.gn
@@ -0,0 +1,9 @@
+import("a")
+import("b")
+
+assert(x)
+assert(y)
+assert(z)
+
+source_set("stuff") {
+}
diff --git a/chromium/tools/gn/format_test_data/036.golden b/chromium/tools/gn/format_test_data/036.golden
new file mode 100644
index 00000000000..5a5eca8455c
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/036.golden
@@ -0,0 +1,9 @@
+import("a")
+import("b")
+
+assert(x)
+assert(y)
+assert(z)
+
+source_set("stuff") {
+}
diff --git a/chromium/tools/gn/format_test_data/037.gn b/chromium/tools/gn/format_test_data/037.gn
new file mode 100644
index 00000000000..ebbf0f8fa9a
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/037.gn
@@ -0,0 +1,5 @@
+if (true) {
+ if (true) {
+ args = rebase_path(sources, root_build_dir) + rebase_path(outputs, root_build_dir)
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/037.golden b/chromium/tools/gn/format_test_data/037.golden
new file mode 100644
index 00000000000..71e95a4b581
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/037.golden
@@ -0,0 +1,6 @@
+if (true) {
+ if (true) {
+ args = rebase_path(sources, root_build_dir) +
+ rebase_path(outputs, root_build_dir)
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/038.gn b/chromium/tools/gn/format_test_data/038.gn
new file mode 100644
index 00000000000..ea06d93da70
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/038.gn
@@ -0,0 +1,4 @@
+if (stuff) {
+ # Blank line at EOF.
+}
+
diff --git a/chromium/tools/gn/format_test_data/038.golden b/chromium/tools/gn/format_test_data/038.golden
new file mode 100644
index 00000000000..f8c9a1b92b0
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/038.golden
@@ -0,0 +1,3 @@
+if (stuff) {
+ # Blank line at EOF.
+}
diff --git a/chromium/tools/gn/format_test_data/039.gn b/chromium/tools/gn/format_test_data/039.gn
new file mode 100644
index 00000000000..662b8cdb108
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/039.gn
@@ -0,0 +1,6 @@
+if (true) {
+ assert(arm_float_abi == "" ||
+ arm_float_abi == "hard" ||
+ arm_float_abi == "soft" ||
+ arm_float_abi == "softfp")
+}
diff --git a/chromium/tools/gn/format_test_data/039.golden b/chromium/tools/gn/format_test_data/039.golden
new file mode 100644
index 00000000000..b92b1ae838b
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/039.golden
@@ -0,0 +1,4 @@
+if (true) {
+ assert(arm_float_abi == "" || arm_float_abi == "hard" ||
+ arm_float_abi == "soft" || arm_float_abi == "softfp")
+}
diff --git a/chromium/tools/gn/format_test_data/040.gn b/chromium/tools/gn/format_test_data/040.gn
new file mode 100644
index 00000000000..1f4754e2794
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/040.gn
@@ -0,0 +1,9 @@
+# Dewrapping shouldn't cause 80 col to be exceed.
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ if (is_win) {
+ cflags = [
+ "/wd4267", # TODO(jschuh): crbug.com/167187 fix size_t to int truncations.
+ ]
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/041.gn b/chromium/tools/gn/format_test_data/041.gn
new file mode 100644
index 00000000000..fa39b4923ff
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/041.gn
@@ -0,0 +1,12 @@
+if (true) {
+ a = [ "wee" ]
+
+ b = [
+ "x",
+ "y",
+ "z",
+ ]
+ c = [ "x" ]
+
+ d = [ "x" ]
+}
diff --git a/chromium/tools/gn/format_test_data/041.golden b/chromium/tools/gn/format_test_data/041.golden
new file mode 100644
index 00000000000..fa39b4923ff
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/041.golden
@@ -0,0 +1,12 @@
+if (true) {
+ a = [ "wee" ]
+
+ b = [
+ "x",
+ "y",
+ "z",
+ ]
+ c = [ "x" ]
+
+ d = [ "x" ]
+}
diff --git a/chromium/tools/gn/format_test_data/042.gn b/chromium/tools/gn/format_test_data/042.gn
new file mode 100644
index 00000000000..b827f29e070
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/042.gn
@@ -0,0 +1,44 @@
+# Test zero, one, and multiple element for specifically named LHSs.
+if (true) {
+ cflags = []
+ cflags_c = []
+ cflags_cc = []
+ data = []
+ datadeps = []
+ defines = []
+ deps = []
+ include_dirs = []
+ inputs = []
+ ldflags = []
+ outputs = []
+ public_deps = []
+ sources = []
+} else if (true) {
+ cflags = [ "x" ]
+ cflags_c = [ "x" ]
+ cflags_cc = [ "x" ]
+ data = [ "x" ]
+ datadeps = [ "x" ]
+ defines = [ "x" ]
+ deps = [ "x" ]
+ include_dirs = [ "x" ]
+ inputs = [ "x" ]
+ ldflags = [ "x" ]
+ outputs = [ "x" ]
+ public_deps = [ "x" ]
+ sources = [ "x" ]
+} else {
+ cflags = [ "x", "y", "z"]
+ cflags_c = [ "x", "y", "z"]
+ cflags_cc = [ "x", "y", "z"]
+ data = [ "x", "y", "z"]
+ datadeps = [ "x", "y", "z"]
+ defines = [ "x", "y", "z"]
+ deps = [ "x", "y", "z"]
+ include_dirs = [ "x", "y", "z"]
+ inputs = [ "x", "y", "z"]
+ ldflags = [ "x", "y", "z"]
+ outputs = [ "x", "y", "z"]
+ public_deps = [ "x", "y", "z"]
+ sources = [ "x", "y", "z"]
+}
diff --git a/chromium/tools/gn/format_test_data/042.golden b/chromium/tools/gn/format_test_data/042.golden
new file mode 100644
index 00000000000..1968d0acd94
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/042.golden
@@ -0,0 +1,110 @@
+# Test zero, one, and multiple element for specifically named LHSs.
+if (true) {
+ cflags = []
+ cflags_c = []
+ cflags_cc = []
+ data = []
+ datadeps = []
+ defines = []
+ deps = []
+ include_dirs = []
+ inputs = []
+ ldflags = []
+ outputs = []
+ public_deps = []
+ sources = []
+} else if (true) {
+ cflags = [ "x" ]
+ cflags_c = [ "x" ]
+ cflags_cc = [ "x" ]
+ data = [
+ "x",
+ ]
+ datadeps = [
+ "x",
+ ]
+ defines = [ "x" ]
+ deps = [
+ "x",
+ ]
+ include_dirs = [ "x" ]
+ inputs = [
+ "x",
+ ]
+ ldflags = [ "x" ]
+ outputs = [
+ "x",
+ ]
+ public_deps = [
+ "x",
+ ]
+ sources = [
+ "x",
+ ]
+} else {
+ cflags = [
+ "x",
+ "y",
+ "z",
+ ]
+ cflags_c = [
+ "x",
+ "y",
+ "z",
+ ]
+ cflags_cc = [
+ "x",
+ "y",
+ "z",
+ ]
+ data = [
+ "x",
+ "y",
+ "z",
+ ]
+ datadeps = [
+ "x",
+ "y",
+ "z",
+ ]
+ defines = [
+ "x",
+ "y",
+ "z",
+ ]
+ deps = [
+ "x",
+ "y",
+ "z",
+ ]
+ include_dirs = [
+ "x",
+ "y",
+ "z",
+ ]
+ inputs = [
+ "x",
+ "y",
+ "z",
+ ]
+ ldflags = [
+ "x",
+ "y",
+ "z",
+ ]
+ outputs = [
+ "x",
+ "y",
+ "z",
+ ]
+ public_deps = [
+ "x",
+ "y",
+ "z",
+ ]
+ sources = [
+ "x",
+ "y",
+ "z",
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/043.gn b/chromium/tools/gn/format_test_data/043.gn
new file mode 100644
index 00000000000..b95c6a52593
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/043.gn
@@ -0,0 +1,6 @@
+# Don't break and indent when it's hopeless.
+# 80 ---------------------------------------------------------------------------
+android_java_prebuilt("android_support_v13_java") {
+ jar_path = "$android_sdk_root/extras/android/support/v7/appcompat/libs/android-support-v7-appcompat.jar"
+ jar_path = "$android_sdk_root/extras/android/support/v13/android-support-v13.jar"
+}
diff --git a/chromium/tools/gn/format_test_data/043.golden b/chromium/tools/gn/format_test_data/043.golden
new file mode 100644
index 00000000000..336ec2fff1e
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/043.golden
@@ -0,0 +1,7 @@
+# Don't break and indent when it's hopeless.
+# 80 ---------------------------------------------------------------------------
+android_java_prebuilt("android_support_v13_java") {
+ jar_path = "$android_sdk_root/extras/android/support/v7/appcompat/libs/android-support-v7-appcompat.jar"
+ jar_path =
+ "$android_sdk_root/extras/android/support/v13/android-support-v13.jar"
+}
diff --git a/chromium/tools/gn/format_test_data/044.gn b/chromium/tools/gn/format_test_data/044.gn
new file mode 100644
index 00000000000..fe09617912d
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/044.gn
@@ -0,0 +1,10 @@
+config("compiler") {
+ if (is_win) {
+ if (is_component_build) {
+ cflags += [
+ "/EHsc", # Assume C functions can't throw exceptions and don't catch
+ # structured exceptions (only C++ ones).
+ ]
+ }
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/044.golden b/chromium/tools/gn/format_test_data/044.golden
new file mode 100644
index 00000000000..030c5dd5dc7
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/044.golden
@@ -0,0 +1,11 @@
+# 80 ---------------------------------------------------------------------------
+config("compiler") {
+ if (is_win) {
+ if (is_component_build) {
+ cflags += [
+ "/EHsc", # Assume C functions can't throw exceptions and don't catch
+ # structured exceptions (only C++ ones).
+ ]
+ }
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/045.gn b/chromium/tools/gn/format_test_data/045.gn
new file mode 100644
index 00000000000..28a7280d3ba
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/045.gn
@@ -0,0 +1,10 @@
+static_library("browser") {
+ if (!is_ios) {
+ sources += rebase_path(gypi_values.chrome_browser_predictor_sources,
+ ".", "//chrome")
+ sources += rebase_path(gypi_values.chrome_browser_predictor_sourcesaaaaaaaa,
+ ".", "//chrome")
+ sources += rebase_path(gypi_values.chrome_browser_predictor_sourcesaaaaaaaaa,
+ ".", "//chrome")
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/045.golden b/chromium/tools/gn/format_test_data/045.golden
new file mode 100644
index 00000000000..21c560a80f4
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/045.golden
@@ -0,0 +1,14 @@
+static_library("browser") {
+ if (!is_ios) {
+ sources += rebase_path(gypi_values.chrome_browser_predictor_sources,
+ ".",
+ "//chrome")
+ sources += rebase_path(gypi_values.chrome_browser_predictor_sourcesaaaaaaaa,
+ ".",
+ "//chrome")
+ sources +=
+ rebase_path(gypi_values.chrome_browser_predictor_sourcesaaaaaaaaa,
+ ".",
+ "//chrome")
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/046.gn b/chromium/tools/gn/format_test_data/046.gn
new file mode 100644
index 00000000000..28df74be66c
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/046.gn
@@ -0,0 +1,22 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ # The JavaScript files required by main.html.
+ remoting_webapp_main_html_js_files =
+ # Include the core files first as it is required by the other files.
+ # Otherwise, Jscompile will complain.
+ remoting_webapp_js_core_files +
+ remoting_webapp_js_auth_client2host_files +
+ remoting_webapp_js_auth_google_files +
+ remoting_webapp_js_client_files +
+ remoting_webapp_js_gnubby_auth_files +
+ remoting_webapp_js_cast_extension_files +
+ remoting_webapp_js_host_files +
+ remoting_webapp_js_logging_files +
+ remoting_webapp_js_ui_files +
+ remoting_webapp_js_ui_host_control_files +
+ remoting_webapp_js_ui_host_display_files +
+ remoting_webapp_js_wcs_container_files
+ # Uncomment this line to include browser test files in the web app
+ # to expedite debugging or local development.
+ #+ remoting_webapp_js_browser_test_files
+}
diff --git a/chromium/tools/gn/format_test_data/046.golden b/chromium/tools/gn/format_test_data/046.golden
new file mode 100644
index 00000000000..7d2a679c18d
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/046.golden
@@ -0,0 +1,19 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ # The JavaScript files required by main.html.
+ remoting_webapp_main_html_js_files =
+ # Include the core files first as it is required by the other files.
+ # Otherwise, Jscompile will complain.
+ remoting_webapp_js_core_files +
+ remoting_webapp_js_auth_client2host_files +
+ remoting_webapp_js_auth_google_files + remoting_webapp_js_client_files +
+ remoting_webapp_js_gnubby_auth_files +
+ remoting_webapp_js_cast_extension_files + remoting_webapp_js_host_files +
+ remoting_webapp_js_logging_files + remoting_webapp_js_ui_files +
+ remoting_webapp_js_ui_host_control_files +
+ remoting_webapp_js_ui_host_display_files +
+ remoting_webapp_js_wcs_container_files
+ # Uncomment this line to include browser test files in the web app
+ # to expedite debugging or local development.
+ #+ remoting_webapp_js_browser_test_files
+}
diff --git a/chromium/tools/gn/format_test_data/047.gn b/chromium/tools/gn/format_test_data/047.gn
new file mode 100644
index 00000000000..f1fdbeca8b5
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/047.gn
@@ -0,0 +1,7 @@
+if (true) {
+ args += [ "--template" ] +
+ rebase_path(remoting_webapp_template_files, template_rel_dir)
+ args += [ "--dir-for-templates",
+ rebase_path(template_rel_dir, root_build_dir) ]
+ args += ["--js"] + rebase_path(remoting_webapp_main_html_js_files, template_rel_dir_and_some_more)
+}
diff --git a/chromium/tools/gn/format_test_data/047.golden b/chromium/tools/gn/format_test_data/047.golden
new file mode 100644
index 00000000000..5217e762580
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/047.golden
@@ -0,0 +1,10 @@
+if (true) {
+ args += [ "--template" ] +
+ rebase_path(remoting_webapp_template_files, template_rel_dir)
+ args += [
+ "--dir-for-templates",
+ rebase_path(template_rel_dir, root_build_dir),
+ ]
+ args += [ "--js" ] + rebase_path(remoting_webapp_main_html_js_files,
+ template_rel_dir_and_some_more)
+}
diff --git a/chromium/tools/gn/format_test_data/048.gn b/chromium/tools/gn/format_test_data/048.gn
new file mode 100644
index 00000000000..7d39efb68c4
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/048.gn
@@ -0,0 +1,19 @@
+# No blank inserted after libs (caused by trailing comment on 'else').
+component("google_toolbox_for_mac") {
+ if (!is_ios) {
+ sources += [
+ "src/AddressBook/GTMABAddressBook.h",
+ "src/AddressBook/GTMABAddressBook.m",
+ ]
+
+ libs = [
+ "AddressBook.framework",
+ "AppKit.framework",
+ ]
+ } else { # is_ios
+ sources += [
+ "src/iPhone/GTMFadeTruncatingLabel.h",
+ "src/iPhone/GTMFadeTruncatingLabel.m",
+ ]
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/048.golden b/chromium/tools/gn/format_test_data/048.golden
new file mode 100644
index 00000000000..7d39efb68c4
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/048.golden
@@ -0,0 +1,19 @@
+# No blank inserted after libs (caused by trailing comment on 'else').
+component("google_toolbox_for_mac") {
+ if (!is_ios) {
+ sources += [
+ "src/AddressBook/GTMABAddressBook.h",
+ "src/AddressBook/GTMABAddressBook.m",
+ ]
+
+ libs = [
+ "AddressBook.framework",
+ "AppKit.framework",
+ ]
+ } else { # is_ios
+ sources += [
+ "src/iPhone/GTMFadeTruncatingLabel.h",
+ "src/iPhone/GTMFadeTruncatingLabel.m",
+ ]
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/049.gn b/chromium/tools/gn/format_test_data/049.gn
new file mode 100644
index 00000000000..fe793d2898a
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/049.gn
@@ -0,0 +1,14 @@
+func(aaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbb,
+ # Comment about function arg.
+ ccccccccccccccccccccccccccccc,
+ dddddddddddddddddddd)
+
+func(aaaaaaaaaaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbbbb,
+
+ # Block comment
+ # Comment about function arg.
+
+ ccccccccccccccccccccccccccccc,
+ dddddddddddddddddddd)
diff --git a/chromium/tools/gn/format_test_data/050.gn b/chromium/tools/gn/format_test_data/050.gn
new file mode 100644
index 00000000000..92a6d6d7d4b
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/050.gn
@@ -0,0 +1,10 @@
+# 80 ---------------------------------------------------------------------------
+zippy = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa - (bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb - cccccccccccccccccccccccc)
+
+zippy = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb + cccccccccccccccccccccccc
+
+zippy = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb + cccccccccccccccccccccccc + ddddddddddddddddddddd + eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee + ffffffffffffff(aaaaaaaaaaaaa, bbbbbbbbbbbbbbbbbbbbbbbb, ccccccccccccccccccc, ddddddddddddddddddd)
+
+zippy = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb + cccccccccccccccccccccccc + ddddddddddddddddddddd + eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee + ffffffffffffff(aaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbb + ccccccccccccccccccc + ddddddddddddddddddd)
+
+zippy = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb + cccccccccccccccccccccccc + ddddddddddddddddddddd + eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee + ffffffffffffff(aaaaaaaaaaaaa - (bbbbbbbbbbbbbbbbbbbbbbbb + ccccccccccccccccccc + ddddddddddddddddddd))
diff --git a/chromium/tools/gn/format_test_data/050.golden b/chromium/tools/gn/format_test_data/050.golden
new file mode 100644
index 00000000000..2645600c738
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/050.golden
@@ -0,0 +1,27 @@
+# 80 ---------------------------------------------------------------------------
+zippy = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa -
+ (bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb - cccccccccccccccccccccccc)
+
+zippy = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb +
+ cccccccccccccccccccccccc
+
+zippy = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb +
+ cccccccccccccccccccccccc + ddddddddddddddddddddd +
+ eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee +
+ ffffffffffffff(aaaaaaaaaaaaa,
+ bbbbbbbbbbbbbbbbbbbbbbbb,
+ ccccccccccccccccccc,
+ ddddddddddddddddddd)
+
+zippy = aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb +
+ cccccccccccccccccccccccc + ddddddddddddddddddddd +
+ eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee +
+ ffffffffffffff(aaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbb +
+ ccccccccccccccccccc + ddddddddddddddddddd)
+
+zippy =
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa + bbbbbbbbbbbbbbbbbbbbbbbbbbbbbb +
+ cccccccccccccccccccccccc + ddddddddddddddddddddd +
+ eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee +
+ ffffffffffffff(aaaaaaaaaaaaa - (bbbbbbbbbbbbbbbbbbbbbbbb +
+ ccccccccccccccccccc + ddddddddddddddddddd))
diff --git a/chromium/tools/gn/format_test_data/051.gn b/chromium/tools/gn/format_test_data/051.gn
new file mode 100644
index 00000000000..8076ec696d4
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/051.gn
@@ -0,0 +1,6 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa =
+ bbbbbbbbbbbbbbbbbbbb - bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb +
+ cccccccccccccccccccccccccccccccccccc
+}
diff --git a/chromium/tools/gn/format_test_data/051.golden b/chromium/tools/gn/format_test_data/051.golden
new file mode 100644
index 00000000000..37a12124013
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/051.golden
@@ -0,0 +1,7 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa =
+ bbbbbbbbbbbbbbbbbbbb -
+ bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb +
+ cccccccccccccccccccccccccccccccccccc
+}
diff --git a/chromium/tools/gn/format_test_data/052.gn b/chromium/tools/gn/format_test_data/052.gn
new file mode 100644
index 00000000000..1f6a10f5159
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/052.gn
@@ -0,0 +1,11 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ if (true) {
+ sources += rebase_path(
+ gypi_values.browser_chromeos_non_athena_sources,
+ ".", "//chrome") +
+ rebase_path(gypi_values.browser_chromeos_extension_non_athena_sources,
+ ".", "//chrome")
+ }
+}
+
diff --git a/chromium/tools/gn/format_test_data/052.golden b/chromium/tools/gn/format_test_data/052.golden
new file mode 100644
index 00000000000..880c5639f2c
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/052.golden
@@ -0,0 +1,12 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ if (true) {
+ sources +=
+ rebase_path(gypi_values.browser_chromeos_non_athena_sources,
+ ".",
+ "//chrome") +
+ rebase_path(gypi_values.browser_chromeos_extension_non_athena_sources,
+ ".",
+ "//chrome")
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/053.gn b/chromium/tools/gn/format_test_data/053.gn
new file mode 100644
index 00000000000..42a3e0899ba
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/053.gn
@@ -0,0 +1,7 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ check_internal_result = exec_script(
+ "build/check_internal.py",
+ [ rebase_path("internal/google_chrome_api_keys.h", root_build_dir) ],
+ "value")
+}
diff --git a/chromium/tools/gn/format_test_data/053.golden b/chromium/tools/gn/format_test_data/053.golden
new file mode 100644
index 00000000000..76179cc1953
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/053.golden
@@ -0,0 +1,8 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ check_internal_result =
+ exec_script("build/check_internal.py",
+ [ rebase_path("internal/google_chrome_api_keys.h",
+ root_build_dir) ],
+ "value")
+}
diff --git a/chromium/tools/gn/format_test_data/054.gn b/chromium/tools/gn/format_test_data/054.gn
new file mode 100644
index 00000000000..dca2ace3b32
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/054.gn
@@ -0,0 +1,7 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ args = [
+ "{{source}}",
+ rebase_path("${target_gen_dir}/{{source_name_part}}-inc.cc", root_build_dir)
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/054.golden b/chromium/tools/gn/format_test_data/054.golden
new file mode 100644
index 00000000000..7dfe5be7f26
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/054.golden
@@ -0,0 +1,8 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ args = [
+ "{{source}}",
+ rebase_path("${target_gen_dir}/{{source_name_part}}-inc.cc",
+ root_build_dir),
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/055.gn b/chromium/tools/gn/format_test_data/055.gn
new file mode 100644
index 00000000000..7e467247694
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/055.gn
@@ -0,0 +1,10 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ if (true) {
+ use_system_harfbuzz = exec_script(
+ pkg_config_script,
+ pkg_config_args + [ "--atleast-version=1.31.0", "pangoft2" ],
+ "value")
+ }
+}
+
diff --git a/chromium/tools/gn/format_test_data/055.golden b/chromium/tools/gn/format_test_data/055.golden
new file mode 100644
index 00000000000..b2bfdf28654
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/055.golden
@@ -0,0 +1,11 @@
+# 80 ---------------------------------------------------------------------------
+if (true) {
+ if (true) {
+ use_system_harfbuzz = exec_script(pkg_config_script,
+ pkg_config_args + [
+ "--atleast-version=1.31.0",
+ "pangoft2",
+ ],
+ "value")
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/056.gn b/chromium/tools/gn/format_test_data/056.gn
new file mode 100644
index 00000000000..2082a770d87
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/056.gn
@@ -0,0 +1,45 @@
+# 80 ---------------------------------------------------------------------------
+java_files = [
+ "test/android/java/src/org/chromium/base/ContentUriTestUtils.java"
+]
+
+defines = [
+ "test/android/java/src/org/chromium/base/ContentUriTestUtils.java"
+]
+
+defines = [
+ "abc/test/android/java/src/org/chromium/base/ContentUriTestUtils.java"
+]
+
+cflags += [
+ # WEE
+ "/a",
+ "/b",
+ "/c",
+]
+
+sources = [ "/a", "/b", "/c" ]
+
+sources = [
+ # WEE
+ "/a",
+ "/b",
+ "/c",
+]
+
+sources += [
+ # WEE
+ "/a",
+ "/b",
+ "/c",
+]
+
+configs -= [
+ # Something!
+ "//build/config/win:nominmax",
+]
+
+cflags = [
+ "/wd4267", # size_t -> int
+ "/wd4324", # structure was padded
+]
diff --git a/chromium/tools/gn/format_test_data/056.golden b/chromium/tools/gn/format_test_data/056.golden
new file mode 100644
index 00000000000..df088fc06b5
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/056.golden
@@ -0,0 +1,45 @@
+# 80 ---------------------------------------------------------------------------
+java_files =
+ [ "test/android/java/src/org/chromium/base/ContentUriTestUtils.java" ]
+
+defines = [ "test/android/java/src/org/chromium/base/ContentUriTestUtils.java" ]
+
+defines =
+ [ "abc/test/android/java/src/org/chromium/base/ContentUriTestUtils.java" ]
+
+cflags += [
+ # WEE
+ "/a",
+ "/b",
+ "/c",
+]
+
+sources = [
+ "/a",
+ "/b",
+ "/c",
+]
+
+sources = [
+ # WEE
+ "/a",
+ "/b",
+ "/c",
+]
+
+sources += [
+ # WEE
+ "/a",
+ "/b",
+ "/c",
+]
+
+configs -= [
+ # Something!
+ "//build/config/win:nominmax",
+]
+
+cflags = [
+ "/wd4267", # size_t -> int
+ "/wd4324", # structure was padded
+]
diff --git a/chromium/tools/gn/format_test_data/057.gn b/chromium/tools/gn/format_test_data/057.gn
new file mode 100644
index 00000000000..858e3115b11
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/057.gn
@@ -0,0 +1,24 @@
+# 80 ---------------------------------------------------------------------------
+# Because there is a difference in precedence level between || and &&
+# a || b || c && d
+# is equivalent to
+# a || b || (c && d)
+# Because parens are not stored in the parse tree, the formatter recreates the
+# minimally required set to maintain meaning. However, this particular case can
+# be confusing for human readers, so we special case these ones and add
+# strictly-unnecessary parens.
+
+supports_android = (is_apk || is_android_resources ||
+ (is_java_library && defined(invoker.supports_android) &&
+ invoker.supports_android))
+
+enable_one_click_signin = is_win || is_mac || (is_linux && !is_chromeos)
+enable_one_click_signin = (is_linux && !is_chromeos) || is_win || is_mac
+
+x = c || (a&&b)
+x = (a&&b) || c
+x = a&&b || c
+
+x = c && (a||b)
+x = (a||b) && c
+x = a||b && c
diff --git a/chromium/tools/gn/format_test_data/057.golden b/chromium/tools/gn/format_test_data/057.golden
new file mode 100644
index 00000000000..d0daa6c8e72
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/057.golden
@@ -0,0 +1,24 @@
+# 80 ---------------------------------------------------------------------------
+# Because there is a difference in precedence level between || and &&
+# a || b || c && d
+# is equivalent to
+# a || b || (c && d)
+# Because parens are not stored in the parse tree, the formatter recreates the
+# minimally required set to maintain meaning. However, this particular case can
+# be confusing for human readers, so we special case these ones and add
+# strictly-unnecessary parens.
+
+supports_android = is_apk || is_android_resources ||
+ (is_java_library && defined(invoker.supports_android) &&
+ invoker.supports_android)
+
+enable_one_click_signin = is_win || is_mac || (is_linux && !is_chromeos)
+enable_one_click_signin = (is_linux && !is_chromeos) || is_win || is_mac
+
+x = c || (a && b)
+x = (a && b) || c
+x = (a && b) || c
+
+x = c && (a || b)
+x = (a || b) && c
+x = a || (b && c)
diff --git a/chromium/tools/gn/format_test_data/058.gn b/chromium/tools/gn/format_test_data/058.gn
new file mode 100644
index 00000000000..568074a634b
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/058.gn
@@ -0,0 +1,2 @@
+if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
+}
diff --git a/chromium/tools/gn/format_test_data/058.golden b/chromium/tools/gn/format_test_data/058.golden
new file mode 100644
index 00000000000..568074a634b
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/058.golden
@@ -0,0 +1,2 @@
+if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
+}
diff --git a/chromium/tools/gn/format_test_data/059.gn b/chromium/tools/gn/format_test_data/059.gn
new file mode 100644
index 00000000000..ea6fb8e3dc8
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/059.gn
@@ -0,0 +1,10 @@
+assert(type == "android_apk" || type == "java_library" ||
+ type == "android_resources" || things == stuff && stuff != 432)
+
+assert(type == "android_apk" || type == "java_library" ||
+ type == "android_resources",
+ type == "android_apk" || type == "java_library" ||
+ type == "android_resources")
+
+
+if (type == "android_apk" || type == "java_library" || type == "android_resources" || things == stuff && stuff != 432) {}
diff --git a/chromium/tools/gn/format_test_data/059.golden b/chromium/tools/gn/format_test_data/059.golden
new file mode 100644
index 00000000000..423e88882e1
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/059.golden
@@ -0,0 +1,11 @@
+assert(type == "android_apk" || type == "java_library" ||
+ type == "android_resources" || (things == stuff && stuff != 432))
+
+assert(type == "android_apk" || type == "java_library" ||
+ type == "android_resources",
+ type == "android_apk" || type == "java_library" ||
+ type == "android_resources")
+
+if (type == "android_apk" || type == "java_library" ||
+ type == "android_resources" || (things == stuff && stuff != 432)) {
+}
diff --git a/chromium/tools/gn/format_test_data/060.gn b/chromium/tools/gn/format_test_data/060.gn
new file mode 100644
index 00000000000..2b0da79f7cb
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/060.gn
@@ -0,0 +1,2 @@
+some_variable = "this is a very long string that is going to exceed 80 col and will never under any circumstance fit"
+another_variable = [ "this is a very long string that is going to exceed 80 col and will never under any circumstance fit" ]
diff --git a/chromium/tools/gn/format_test_data/060.golden b/chromium/tools/gn/format_test_data/060.golden
new file mode 100644
index 00000000000..2b0da79f7cb
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/060.golden
@@ -0,0 +1,2 @@
+some_variable = "this is a very long string that is going to exceed 80 col and will never under any circumstance fit"
+another_variable = [ "this is a very long string that is going to exceed 80 col and will never under any circumstance fit" ]
diff --git a/chromium/tools/gn/format_test_data/061.gn b/chromium/tools/gn/format_test_data/061.gn
new file mode 100644
index 00000000000..5948037fe56
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/061.gn
@@ -0,0 +1,9 @@
+action("generate_gl_bindings") {
+ args = [
+ "--header-paths=" + rebase_path("//third_party/khronos", root_build_dir) +
+ ":" + rebase_path("//third_party/mesa/src/include", root_build_dir) + ":" +
+ rebase_path("//ui/gl", root_build_dir) + ":" +
+ rebase_path("//gpu", root_build_dir),
+ rebase_path(gl_binding_output_dir, root_build_dir),
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/061.golden b/chromium/tools/gn/format_test_data/061.golden
new file mode 100644
index 00000000000..edbf43dc6ad
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/061.golden
@@ -0,0 +1,9 @@
+action("generate_gl_bindings") {
+ args = [
+ "--header-paths=" + rebase_path("//third_party/khronos", root_build_dir) +
+ ":" + rebase_path("//third_party/mesa/src/include", root_build_dir) +
+ ":" + rebase_path("//ui/gl", root_build_dir) + ":" +
+ rebase_path("//gpu", root_build_dir),
+ rebase_path(gl_binding_output_dir, root_build_dir),
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/062.gn b/chromium/tools/gn/format_test_data/062.gn
new file mode 100644
index 00000000000..d7fbb3cc7f1
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/062.gn
@@ -0,0 +1,112 @@
+# Sorting, making sure we don't detach comments.
+
+sources = []
+
+sources = ["x.cc"]
+
+sources = [
+ "/a",
+ "/b",
+ "/c",
+ # End of block.
+]
+
+sources += [
+ # Start of block, separate.
+
+ "c",
+ "a",
+ "b",
+]
+
+sources += [
+ "z",
+ "z2",
+ # Attached comment.
+ "y.h",
+ "y.cc",
+ "y.mm",
+ "y.rc",
+ "a"
+]
+
+sources += [
+ "z",
+ "z2",
+
+ # Block comment.
+
+ "y.h",
+ "y.cc",
+ "y.mm",
+ "y.rc",
+ "a"
+]
+
+sources += [
+ "z",
+ "z2",
+
+ #
+ # Multiline block comment.
+ #
+
+ "y.h",
+ "y.cc",
+ "y.mm",
+ "y.rc",
+ "a"
+]
+
+# With identifiers.
+sources += [
+ "a",
+ "b",
+ "c",
+ some_other_thing,
+ abcd,
+]
+
+# With accessors.
+sources += [
+ "a",
+ wee[0],
+ "b",
+ invoker.stuff,
+ "c",
+]
+
+# Various separated blocks.
+sources -= [
+ # Fix this test to build on Windows.
+ "focus_cycler_unittest.cc",
+
+ # All tests for multiple displays: not supported on Windows Ash.
+ "wm/drag_window_resizer_unittest.cc",
+
+ # Accelerometer is only available on Chrome OS.
+ "wm/maximize_mode/maximize_mode_controller_unittest.cc",
+
+ # Can't resize on Windows Ash. http://crbug.com/165962
+ "autoclick/autoclick_unittest.cc",
+ "magnifier/magnification_controller_unittest.cc",
+ # Attached 1.
+ # Attached 2.
+ "wm/workspace/workspace_window_resizer_unittest.cc",
+ "sticky_keys/sticky_keys_overlay_unittest.cc",
+ "system/tray/media_security/multi_profile_media_tray_item_unittest.cc",
+ "virtual_keyboard_controller_unittest.cc",
+
+ # Separated at end.
+ "zzzzzzzzzzzzzz.cc",
+]
+
+sources += [
+ "srtp/crypto/include/xfm.h",
+
+ # sources
+ "srtp/srtp/ekt.c",
+ "srtp/srtp/srtp.c",
+ "srtp/crypto/rng/prng.c",
+ "srtp/crypto/rng/rand_source.c",
+]
diff --git a/chromium/tools/gn/format_test_data/062.golden b/chromium/tools/gn/format_test_data/062.golden
new file mode 100644
index 00000000000..e939e449a05
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/062.golden
@@ -0,0 +1,117 @@
+# Sorting, making sure we don't detach comments.
+
+sources = []
+
+sources = [
+ "x.cc",
+]
+
+sources = [
+ "/a",
+ "/b",
+ "/c",
+
+ # End of block.
+]
+
+sources += [
+ # Start of block, separate.
+
+ "a",
+ "b",
+ "c",
+]
+
+sources += [
+ "a",
+ "y.cc",
+
+ # Attached comment.
+ "y.h",
+ "y.mm",
+ "y.rc",
+ "z",
+ "z2",
+]
+
+sources += [
+ "z",
+ "z2",
+
+ # Block comment.
+
+ "a",
+ "y.cc",
+ "y.h",
+ "y.mm",
+ "y.rc",
+]
+
+sources += [
+ "z",
+ "z2",
+
+ #
+ # Multiline block comment.
+ #
+
+ "a",
+ "y.cc",
+ "y.h",
+ "y.mm",
+ "y.rc",
+]
+
+# With identifiers.
+sources += [
+ "a",
+ "b",
+ "c",
+ abcd,
+ some_other_thing,
+]
+
+# With accessors.
+sources += [
+ "a",
+ "b",
+ "c",
+ invoker.stuff,
+ wee[0],
+]
+
+# Various separated blocks.
+sources -= [
+ # Fix this test to build on Windows.
+ "focus_cycler_unittest.cc",
+
+ # All tests for multiple displays: not supported on Windows Ash.
+ "wm/drag_window_resizer_unittest.cc",
+
+ # Accelerometer is only available on Chrome OS.
+ "wm/maximize_mode/maximize_mode_controller_unittest.cc",
+
+ # Can't resize on Windows Ash. http://crbug.com/165962
+ "autoclick/autoclick_unittest.cc",
+ "magnifier/magnification_controller_unittest.cc",
+ "sticky_keys/sticky_keys_overlay_unittest.cc",
+ "system/tray/media_security/multi_profile_media_tray_item_unittest.cc",
+ "virtual_keyboard_controller_unittest.cc",
+
+ # Attached 1.
+ # Attached 2.
+ "wm/workspace/workspace_window_resizer_unittest.cc",
+
+ # Separated at end.
+ "zzzzzzzzzzzzzz.cc",
+]
+
+sources += [
+ "srtp/crypto/include/xfm.h",
+
+ # sources
+ "srtp/crypto/rng/prng.c",
+ "srtp/crypto/rng/rand_source.c",
+ "srtp/srtp/ekt.c",
+ "srtp/srtp/srtp.c",
+]
diff --git a/chromium/tools/gn/format_test_data/063.gn b/chromium/tools/gn/format_test_data/063.gn
new file mode 100644
index 00000000000..9fd8211b692
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/063.gn
@@ -0,0 +1,36 @@
+source_set("test") {
+ a = "a"
+ b = "b"
+ deps = [
+ "//a",
+ "//a/a",
+ "//a/b",
+ "//a:a",
+ "//a:b",
+ "//b",
+ ":a",
+ ":b",
+ "a",
+ "a/a",
+ "a/b",
+ "a:a",
+ "a:b",
+ "b",
+ a,
+ b,
+ ]
+
+ public_deps = []
+ if (condition) {
+ public_deps += [
+ "//a",
+ "//a/a",
+ "//a:a",
+ ":a",
+ "a",
+ "a/a",
+ "a:a",
+ a,
+ ]
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/063.golden b/chromium/tools/gn/format_test_data/063.golden
new file mode 100644
index 00000000000..3dc4bedfae7
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/063.golden
@@ -0,0 +1,36 @@
+source_set("test") {
+ a = "a"
+ b = "b"
+ deps = [
+ ":a",
+ ":b",
+ "a",
+ "a:a",
+ "a:b",
+ "a/a",
+ "a/b",
+ "b",
+ "//a",
+ "//a:a",
+ "//a:b",
+ "//a/a",
+ "//a/b",
+ "//b",
+ a,
+ b,
+ ]
+
+ public_deps = []
+ if (condition) {
+ public_deps += [
+ ":a",
+ "a",
+ "a:a",
+ "a/a",
+ "//a",
+ "//a:a",
+ "//a/a",
+ a,
+ ]
+ }
+}
diff --git a/chromium/tools/gn/format_test_data/064.gn b/chromium/tools/gn/format_test_data/064.gn
new file mode 100644
index 00000000000..c1867250fe5
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/064.gn
@@ -0,0 +1,3 @@
+source_set("test") {
+ deps = [ rebase_path(sdk_dep, ".", mojo_root) ]
+}
diff --git a/chromium/tools/gn/format_test_data/064.golden b/chromium/tools/gn/format_test_data/064.golden
new file mode 100644
index 00000000000..3ff56f6490f
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/064.golden
@@ -0,0 +1,5 @@
+source_set("test") {
+ deps = [
+ rebase_path(sdk_dep, ".", mojo_root),
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/065.gn b/chromium/tools/gn/format_test_data/065.gn
new file mode 100644
index 00000000000..a4489092232
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/065.gn
@@ -0,0 +1,4 @@
+source_set("test") {
+ some_target_name = ":some_target"
+ deps = [ some_target_name, "//last_target", ":another_target" ]
+}
diff --git a/chromium/tools/gn/format_test_data/065.golden b/chromium/tools/gn/format_test_data/065.golden
new file mode 100644
index 00000000000..5df85fda4df
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/065.golden
@@ -0,0 +1,8 @@
+source_set("test") {
+ some_target_name = ":some_target"
+ deps = [
+ ":another_target",
+ "//last_target",
+ some_target_name,
+ ]
+}
diff --git a/chromium/tools/gn/format_test_data/066.gn b/chromium/tools/gn/format_test_data/066.gn
new file mode 100644
index 00000000000..c62eb2ae6ec
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/066.gn
@@ -0,0 +1,30 @@
+# Suppress sorting based on comment.
+
+# NOSORT
+sources = []
+
+# NOSORT
+sources = [
+ "a",
+]
+
+# NOSORT
+sources += [
+ "a",
+]
+
+# NOSORT
+sources = [
+ "z",
+ "z2",
+ "a",
+ "y.cc",
+]
+
+# NOSORT
+sources += [
+ "z",
+ "z2",
+ "a",
+ "y.cc",
+]
diff --git a/chromium/tools/gn/format_test_data/066.golden b/chromium/tools/gn/format_test_data/066.golden
new file mode 100644
index 00000000000..45467b880f5
--- /dev/null
+++ b/chromium/tools/gn/format_test_data/066.golden
@@ -0,0 +1,28 @@
+# Suppress sorting based on comment.
+
+# NOSORT
+sources = []
+
+# NOSORT
+sources = [
+ "a",
+]
+
+# NOSORT
+sources += [ "a" ]
+
+# NOSORT
+sources = [
+ "z",
+ "z2",
+ "a",
+ "y.cc",
+]
+
+# NOSORT
+sources += [
+ "z",
+ "z2",
+ "a",
+ "y.cc",
+]
diff --git a/chromium/tools/gn/function_exec_script.cc b/chromium/tools/gn/function_exec_script.cc
new file mode 100644
index 00000000000..231693b5d2e
--- /dev/null
+++ b/chromium/tools/gn/function_exec_script.cc
@@ -0,0 +1,258 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/command_line.h"
+#include "base/files/file_util.h"
+#include "base/logging.h"
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/utf_string_conversions.h"
+#include "base/time/time.h"
+#include "build/build_config.h"
+#include "tools/gn/err.h"
+#include "tools/gn/exec_process.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/input_conversion.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/trace.h"
+#include "tools/gn/value.h"
+
+namespace functions {
+
+namespace {
+
+bool CheckExecScriptPermissions(const BuildSettings* build_settings,
+ const FunctionCallNode* function,
+ Err* err) {
+ const std::set<SourceFile>* whitelist =
+ build_settings->exec_script_whitelist();
+ if (!whitelist)
+ return true; // No whitelist specified, don't check.
+
+ LocationRange function_range = function->GetRange();
+ if (!function_range.begin().file())
+ return true; // No file, might be some internal thing, implicitly pass.
+
+ if (whitelist->find(function_range.begin().file()->name()) !=
+ whitelist->end())
+ return true; // Whitelisted, this is OK.
+
+ // Disallowed case.
+ *err = Err(function, "Disallowed exec_script call.",
+ "The use of exec_script use is restricted in this build. exec_script\n"
+ "is discouraged because it can slow down the GN run and is easily\n"
+ "abused.\n"
+ "\n"
+ "Generally nontrivial work should be done as build steps rather than\n"
+ "when GN is run. For example, if you need to compute a nontrivial\n"
+ "preprocessor define, it will be better to have an action target\n"
+ "generate a header containing the define rather than blocking the GN\n"
+ "run to compute the value.\n"
+ "\n"
+ "The allowed callers of exec_script is maintained in the \"//.gn\" file\n"
+ "if you need to modify the whitelist.");
+ return false;
+}
+
+} // namespace
+
+const char kExecScript[] = "exec_script";
+const char kExecScript_HelpShort[] =
+ "exec_script: Synchronously run a script and return the output.";
+const char kExecScript_Help[] =
+ "exec_script: Synchronously run a script and return the output.\n"
+ "\n"
+ " exec_script(filename,\n"
+ " arguments = [],\n"
+ " input_conversion = \"\",\n"
+ " file_dependencies = [])\n"
+ "\n"
+ " Runs the given script, returning the stdout of the script. The build\n"
+ " generation will fail if the script does not exist or returns a nonzero\n"
+ " exit code.\n"
+ "\n"
+ " The current directory when executing the script will be the root\n"
+ " build directory. If you are passing file names, you will want to use\n"
+ " the rebase_path() function to make file names relative to this\n"
+ " path (see \"gn help rebase_path\").\n"
+ "\n"
+ "Arguments:\n"
+ "\n"
+ " filename:\n"
+ " File name of python script to execute. Non-absolute names will\n"
+ " be treated as relative to the current build file.\n"
+ "\n"
+ " arguments:\n"
+ " A list of strings to be passed to the script as arguments.\n"
+ " May be unspecified or the empty list which means no arguments.\n"
+ "\n"
+ " input_conversion:\n"
+ " Controls how the file is read and parsed.\n"
+ " See \"gn help input_conversion\".\n"
+ "\n"
+ " If unspecified, defaults to the empty string which causes the\n"
+ " script result to be discarded. exec script will return None.\n"
+ "\n"
+ " dependencies:\n"
+ " (Optional) A list of files that this script reads or otherwise\n"
+ " depends on. These dependencies will be added to the build result\n"
+ " such that if any of them change, the build will be regenerated and\n"
+ " the script will be re-run.\n"
+ "\n"
+ " The script itself will be an implicit dependency so you do not\n"
+ " need to list it.\n"
+ "\n"
+ "Example:\n"
+ "\n"
+ " all_lines = exec_script(\n"
+ " \"myscript.py\", [some_input], \"list lines\",\n"
+ " [ rebase_path(\"data_file.txt\", root_build_dir) ])\n"
+ "\n"
+ " # This example just calls the script with no arguments and discards\n"
+ " # the result.\n"
+ " exec_script(\"//foo/bar/myscript.py\")\n";
+
+Value RunExecScript(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (args.size() < 1 || args.size() > 4) {
+ *err = Err(function->function(), "Wrong number of arguments to exec_script",
+ "I expected between one and four arguments.");
+ return Value();
+ }
+
+ const Settings* settings = scope->settings();
+ const BuildSettings* build_settings = settings->build_settings();
+ const SourceDir& cur_dir = scope->GetSourceDir();
+
+ if (!CheckExecScriptPermissions(build_settings, function, err))
+ return Value();
+
+ // Find the python script to run.
+ SourceFile script_source =
+ cur_dir.ResolveRelativeFile(args[0], err,
+ scope->settings()->build_settings()->root_path_utf8());
+ if (err->has_error())
+ return Value();
+ base::FilePath script_path = build_settings->GetFullPath(script_source);
+ if (!build_settings->secondary_source_path().empty() &&
+ !base::PathExists(script_path)) {
+ // Fall back to secondary source root when the file doesn't exist.
+ script_path = build_settings->GetFullPathSecondary(script_source);
+ }
+
+ ScopedTrace trace(TraceItem::TRACE_SCRIPT_EXECUTE, script_source.value());
+ trace.SetToolchain(settings->toolchain_label());
+
+ // Add all dependencies of this script, including the script itself, to the
+ // build deps.
+ g_scheduler->AddGenDependency(script_path);
+ if (args.size() == 4) {
+ const Value& deps_value = args[3];
+ if (!deps_value.VerifyTypeIs(Value::LIST, err))
+ return Value();
+
+ for (const auto& dep : deps_value.list_value()) {
+ if (!dep.VerifyTypeIs(Value::STRING, err))
+ return Value();
+ g_scheduler->AddGenDependency(
+ build_settings->GetFullPath(cur_dir.ResolveRelativeFile(
+ dep, err,
+ scope->settings()->build_settings()->root_path_utf8())));
+ if (err->has_error())
+ return Value();
+ }
+ }
+
+ // Make the command line.
+ const base::FilePath& python_path = build_settings->python_path();
+ base::CommandLine cmdline(python_path);
+
+ // CommandLine tries to interpret arguments by default. Passing "--" disables
+ // this for everything following the "--", so pass this as the very first
+ // thing to python. Python ignores a -- before the .py file, and this makes
+ // CommandLine let through arguments without modifying them.
+ cmdline.AppendArg("--");
+
+ cmdline.AppendArgPath(script_path);
+
+ if (args.size() >= 2) {
+ // Optional command-line arguments to the script.
+ const Value& script_args = args[1];
+ if (!script_args.VerifyTypeIs(Value::LIST, err))
+ return Value();
+ for (const auto& arg : script_args.list_value()) {
+ if (!arg.VerifyTypeIs(Value::STRING, err))
+ return Value();
+ cmdline.AppendArg(arg.string_value());
+ }
+ }
+
+ // Log command line for debugging help.
+ trace.SetCommandLine(cmdline);
+ base::TimeTicks begin_exec;
+ if (g_scheduler->verbose_logging()) {
+#if defined(OS_WIN)
+ g_scheduler->Log("Pythoning",
+ base::UTF16ToUTF8(cmdline.GetCommandLineString()));
+#else
+ g_scheduler->Log("Pythoning", cmdline.GetCommandLineString());
+#endif
+ begin_exec = base::TimeTicks::Now();
+ }
+
+ base::FilePath startup_dir =
+ build_settings->GetFullPath(build_settings->build_dir());
+ // The first time a build is run, no targets will have been written so the
+ // build output directory won't exist. We need to make sure it does before
+ // running any scripts with this as its startup directory, although it will
+ // be relatively rare that the directory won't exist by the time we get here.
+ //
+ // If this shows up on benchmarks, we can cache whether we've done this
+ // or not and skip creating the directory.
+ base::CreateDirectory(startup_dir);
+
+ // Execute the process.
+ // TODO(brettw) set the environment block.
+ std::string output;
+ std::string stderr_output;
+ int exit_code = 0;
+ if (!internal::ExecProcess(
+ cmdline, startup_dir, &output, &stderr_output, &exit_code)) {
+ *err = Err(function->function(), "Could not execute python.",
+ "I was trying to execute \"" + FilePathToUTF8(python_path) + "\".");
+ return Value();
+ }
+ if (g_scheduler->verbose_logging()) {
+ g_scheduler->Log("Pythoning", script_source.value() + " took " +
+ base::Int64ToString(
+ (base::TimeTicks::Now() - begin_exec).InMilliseconds()) +
+ "ms");
+ }
+
+ if (exit_code != 0) {
+ std::string msg = "Current dir: " + FilePathToUTF8(startup_dir) +
+ "\nCommand: " + FilePathToUTF8(cmdline.GetCommandLineString()) +
+ "\nReturned " + base::IntToString(exit_code);
+ if (!output.empty())
+ msg += " and printed out:\n\n" + output;
+ else
+ msg += ".";
+ if (!stderr_output.empty())
+ msg += "\nstderr:\n\n" + stderr_output;
+
+ *err = Err(function->function(), "Script returned non-zero exit code.",
+ msg);
+ return Value();
+ }
+
+ // Default to None value for the input conversion if unspecified.
+ return ConvertInputToValue(scope->settings(), output, function,
+ args.size() >= 3 ? args[2] : Value(), err);
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_foreach.cc b/chromium/tools/gn/function_foreach.cc
new file mode 100644
index 00000000000..549ebe407c1
--- /dev/null
+++ b/chromium/tools/gn/function_foreach.cc
@@ -0,0 +1,121 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/err.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+
+namespace functions {
+
+const char kForEach[] = "foreach";
+const char kForEach_HelpShort[] =
+ "foreach: Iterate over a list.";
+const char kForEach_Help[] =
+ "foreach: Iterate over a list.\n"
+ "\n"
+ " foreach(<loop_var>, <list>) {\n"
+ " <loop contents>\n"
+ " }\n"
+ "\n"
+ " Executes the loop contents block over each item in the list,\n"
+ " assigning the loop_var to each item in sequence. The loop_var will be\n"
+ " a copy so assigning to it will not mutate the list.\n"
+ "\n"
+ " The block does not introduce a new scope, so that variable assignments\n"
+ " inside the loop will be visible once the loop terminates.\n"
+ "\n"
+ " The loop variable will temporarily shadow any existing variables with\n"
+ " the same name for the duration of the loop. After the loop terminates\n"
+ " the loop variable will no longer be in scope, and the previous value\n"
+ " (if any) will be restored.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " mylist = [ \"a\", \"b\", \"c\" ]\n"
+ " foreach(i, mylist) {\n"
+ " print(i)\n"
+ " }\n"
+ "\n"
+ " Prints:\n"
+ " a\n"
+ " b\n"
+ " c\n";
+
+Value RunForEach(Scope* scope,
+ const FunctionCallNode* function,
+ const ListNode* args_list,
+ Err* err) {
+ const std::vector<const ParseNode*>& args_vector = args_list->contents();
+ if (args_vector.size() != 2) {
+ *err = Err(function, "Wrong number of arguments to foreach().",
+ "Expecting exactly two.");
+ return Value();
+ }
+
+ // Extract the loop variable.
+ const IdentifierNode* identifier = args_vector[0]->AsIdentifier();
+ if (!identifier) {
+ *err = Err(args_vector[0], "Expected an identifier for the loop var.");
+ return Value();
+ }
+ base::StringPiece loop_var(identifier->value().value());
+
+ // Extract the list, avoid a copy if it's an identifier (common case).
+ Value value_storage_for_exec; // Backing for list_value when we need to exec.
+ const Value* list_value = nullptr;
+ const IdentifierNode* list_identifier = args_vector[1]->AsIdentifier();
+ if (list_identifier) {
+ list_value = scope->GetValue(list_identifier->value().value(), true);
+ if (!list_value) {
+ *err = Err(args_vector[1], "Undefined identifier.");
+ return Value();
+ }
+ } else {
+ // Not an identifier, evaluate the node to get the result.
+ Scope list_exec_scope(scope);
+ value_storage_for_exec = args_vector[1]->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+ list_value = &value_storage_for_exec;
+ }
+ if (!list_value->VerifyTypeIs(Value::LIST, err))
+ return Value();
+ const std::vector<Value>& list = list_value->list_value();
+
+ // Block to execute.
+ const BlockNode* block = function->block();
+ if (!block) {
+ *err = Err(function, "Expected { after foreach.");
+ return Value();
+ }
+
+ // If the loop variable was previously defined in this scope, save it so we
+ // can put it back after the loop is done.
+ const Value* old_loop_value_ptr = scope->GetValue(loop_var);
+ Value old_loop_value;
+ if (old_loop_value_ptr)
+ old_loop_value = *old_loop_value_ptr;
+
+ for (const auto& cur : list) {
+ scope->SetValue(loop_var, cur, function);
+ block->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+ }
+
+ // Put back loop var.
+ if (old_loop_value_ptr) {
+ // Put back old value. Use the copy we made, rather than use the pointer,
+ // which will probably point to the new value now in the scope.
+ scope->SetValue(loop_var, old_loop_value, old_loop_value.origin());
+ } else {
+ // Loop variable was undefined before loop, delete it.
+ scope->RemoveIdentifier(loop_var);
+ }
+
+ return Value();
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_foreach_unittest.cc b/chromium/tools/gn/function_foreach_unittest.cc
new file mode 100644
index 00000000000..462d7142dfb
--- /dev/null
+++ b/chromium/tools/gn/function_foreach_unittest.cc
@@ -0,0 +1,75 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(FunctionForeach, CollisionOnLoopVar) {
+ TestWithScope setup;
+ TestParseInput input(
+ "a = 5\n"
+ "i = 6\n"
+ "foreach(i, [1, 2, 3]) {\n" // Use same loop var name previously defined.
+ " print(\"$a $i\")\n"
+ " a = a + 1\n" // Test for side effects inside loop.
+ "}\n"
+ "print(\"$a $i\")"); // Make sure that i goes back to original value.
+ ASSERT_FALSE(input.has_error());
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(err.has_error()) << err.message();
+
+ EXPECT_EQ("5 1\n6 2\n7 3\n8 6\n", setup.print_output());
+}
+
+TEST(FunctionForeach, UniqueLoopVar) {
+ TestWithScope setup;
+ TestParseInput input_good(
+ "foreach(i, [1, 2, 3]) {\n"
+ " print(i)\n"
+ "}\n");
+ ASSERT_FALSE(input_good.has_error());
+
+ Err err;
+ input_good.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(err.has_error()) << err.message();
+
+ EXPECT_EQ("1\n2\n3\n", setup.print_output());
+ setup.print_output().clear();
+
+ // Same thing but try to use the loop var after loop is done. It should be
+ // undefined and throw an error.
+ TestParseInput input_bad(
+ "foreach(i, [1, 2, 3]) {\n"
+ " print(i)\n"
+ "}\n"
+ "print(i)");
+ ASSERT_FALSE(input_bad.has_error()); // Should parse OK.
+
+ input_bad.parsed()->Execute(setup.scope(), &err);
+ ASSERT_TRUE(err.has_error()); // Shouldn't actually run.
+}
+
+// Checks that the identifier used as the list is marked as "used".
+TEST(FunctionForeach, MarksIdentAsUsed) {
+ TestWithScope setup;
+ TestParseInput input_good(
+ "a = [1, 2]\n"
+ "foreach(i, a) {\n"
+ " print(i)\n"
+ "}\n");
+ ASSERT_FALSE(input_good.has_error());
+
+ Err err;
+ input_good.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(err.has_error()) << err.message();
+
+ EXPECT_EQ("1\n2\n", setup.print_output());
+ setup.print_output().clear();
+
+ // Check for unused vars.
+ EXPECT_TRUE(setup.scope()->CheckForUnusedVars(&err));
+ EXPECT_FALSE(err.has_error());
+}
diff --git a/chromium/tools/gn/function_forward_variables_from.cc b/chromium/tools/gn/function_forward_variables_from.cc
new file mode 100644
index 00000000000..ee141287715
--- /dev/null
+++ b/chromium/tools/gn/function_forward_variables_from.cc
@@ -0,0 +1,227 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/err.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+
+namespace functions {
+
+namespace {
+
+void ForwardAllValues(const FunctionCallNode* function,
+ Scope* source,
+ Scope* dest,
+ const std::set<std::string>& exclusion_set,
+ Err* err) {
+ Scope::MergeOptions options;
+ // This function needs to clobber existing for it to be useful. It will be
+ // called in a template to forward all values, but there will be some
+ // default stuff like configs set up in both scopes, so it would always
+ // fail if it didn't clobber.
+ options.clobber_existing = true;
+ options.skip_private_vars = true;
+ options.mark_dest_used = false;
+ options.excluded_values = exclusion_set;
+ source->NonRecursiveMergeTo(dest, options, function,
+ "source scope", err);
+ source->MarkAllUsed();
+}
+
+void ForwardValuesFromList(Scope* source,
+ Scope* dest,
+ const std::vector<Value>& list,
+ const std::set<std::string>& exclusion_set,
+ Err* err) {
+ for (const Value& cur : list) {
+ if (!cur.VerifyTypeIs(Value::STRING, err))
+ return;
+ if (exclusion_set.find(cur.string_value()) != exclusion_set.end())
+ continue;
+ const Value* value = source->GetValue(cur.string_value(), true);
+ if (value) {
+ // Use the storage key for the original value rather than the string in
+ // "cur" because "cur" is a temporary that will be deleted, and Scopes
+ // expect a persistent StringPiece (it won't copy). Not doing this will
+ // lead the scope's key to point to invalid memory after this returns.
+ base::StringPiece storage_key = source->GetStorageKey(cur.string_value());
+ if (storage_key.empty()) {
+ // Programmatic value, don't allow copying.
+ *err = Err(cur, "This value can't be forwarded.",
+ "The variable \"" + cur.string_value() + "\" is a built-in.");
+ return;
+ }
+
+ // Keep the origin information from the original value. The normal
+ // usage is for this to be used in a template, and if there's an error,
+ // the user expects to see the line where they set the variable
+ // blamed, rather than a template call to forward_variables_from().
+ dest->SetValue(storage_key, *value, value->origin());
+ }
+ }
+}
+
+} // namespace
+
+const char kForwardVariablesFrom[] = "forward_variables_from";
+const char kForwardVariablesFrom_HelpShort[] =
+ "forward_variables_from: Copies variables from a different scope.";
+const char kForwardVariablesFrom_Help[] =
+ "forward_variables_from: Copies variables from a different scope.\n"
+ "\n"
+ " forward_variables_from(from_scope, variable_list_or_star,\n"
+ " variable_to_not_forward_list = [])\n"
+ "\n"
+ " Copies the given variables from the given scope to the local scope\n"
+ " if they exist. This is normally used in the context of templates to\n"
+ " use the values of variables defined in the template invocation to\n"
+ " a template-defined target.\n"
+ "\n"
+ " The variables in the given variable_list will be copied if they exist\n"
+ " in the given scope or any enclosing scope. If they do not exist,\n"
+ " nothing will happen and they be left undefined in the current scope.\n"
+ "\n"
+ " As a special case, if the variable_list is a string with the value of\n"
+ " \"*\", all variables from the given scope will be copied. \"*\" only\n"
+ " copies variables set directly on the from_scope, not enclosing ones.\n"
+ " Otherwise it would duplicate all global variables.\n"
+ "\n"
+ " When an explicit list of variables is supplied, if the variable exists\n"
+ " in the current (destination) scope already, an error will be thrown.\n"
+ " If \"*\" is specified, variables in the current scope will be\n"
+ " clobbered (the latter is important because most targets have an\n"
+ " implicit configs list, which means it wouldn't work at all if it\n"
+ " didn't clobber).\n"
+ "\n"
+ " The sources assignment filter (see \"gn help "
+ "set_sources_assignment_filter\")\n"
+ " is never applied by this function. It's assumed than any desired\n"
+ " filtering was already done when sources was set on the from_scope.\n"
+ "\n"
+ " If variables_to_not_forward_list is non-empty, then it must contains\n"
+ " a list of variable names that will not be forwarded. This is mostly\n"
+ " useful when variable_list_or_star has a value of \"*\".\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " # This is a common action template. It would invoke a script with\n"
+ " # some given parameters, and wants to use the various types of deps\n"
+ " # and the visibility from the invoker if it's defined. It also injects\n"
+ " # an additional dependency to all targets.\n"
+ " template(\"my_test\") {\n"
+ " action(target_name) {\n"
+ " forward_variables_from(invoker, [ \"data_deps\", \"deps\",\n"
+ " \"public_deps\", \"visibility\" "
+ "])\n"
+ " # Add our test code to the dependencies.\n"
+ " # \"deps\" may or may not be defined at this point.\n"
+ " if (defined(deps)) {\n"
+ " deps += [ \"//tools/doom_melon\" ]\n"
+ " } else {\n"
+ " deps = [ \"//tools/doom_melon\" ]\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "\n"
+ " # This is a template around either a target whose type depends on a\n"
+ " # global variable. It forwards all values from the invoker.\n"
+ " template(\"my_wrapper\") {\n"
+ " target(my_wrapper_target_type, target_name) {\n"
+ " forward_variables_from(invoker, \"*\")\n"
+ " }\n"
+ " }\n"
+ "\n"
+ " # A template that wraps another. It adds behavior based on one \n"
+ " # variable, and forwards all others to the nested target.\n"
+ " template(\"my_ios_test_app\") {\n"
+ " ios_test_app(target_name) {\n"
+ " forward_variables_from(invoker, \"*\", [\"test_bundle_name\"])\n"
+ " if (!defined(extra_substitutions)) {\n"
+ " extra_substitutions = []\n"
+ " }\n"
+ " extra_substitutions += [ \"BUNDLE_ID_TEST_NAME=$test_bundle_name\" "
+ "]\n"
+ " }\n"
+ " }\n";
+
+// This function takes a ListNode rather than a resolved vector of values
+// both avoid copying the potentially-large source scope, and so the variables
+// in the source scope can be marked as used.
+Value RunForwardVariablesFrom(Scope* scope,
+ const FunctionCallNode* function,
+ const ListNode* args_list,
+ Err* err) {
+ const std::vector<const ParseNode*>& args_vector = args_list->contents();
+ if (args_vector.size() != 2 && args_vector.size() != 3) {
+ *err = Err(function, "Wrong number of arguments.",
+ "Expecting two or three arguments.");
+ return Value();
+ }
+
+ // Extract the scope identifier. This assumes the first parameter is an
+ // identifier. It is difficult to write code where this is not the case, and
+ // this saves an expensive scope copy. If necessary, this could be expanded
+ // to execute the ParseNode and get the value out if it's not an identifer.
+ const IdentifierNode* identifier = args_vector[0]->AsIdentifier();
+ if (!identifier) {
+ *err = Err(args_vector[0], "Expected an identifier for the scope.");
+ return Value();
+ }
+
+ // Extract the source scope.
+ Value* value = scope->GetMutableValue(identifier->value().value(), true);
+ if (!value) {
+ *err = Err(identifier, "Undefined identifier.");
+ return Value();
+ }
+ if (!value->VerifyTypeIs(Value::SCOPE, err))
+ return Value();
+ Scope* source = value->scope_value();
+
+ // Extract the exclusion list if defined.
+ std::set<std::string> exclusion_set;
+ if (args_vector.size() == 3) {
+ Value exclusion_value = args_vector[2]->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+
+ if (exclusion_value.type() != Value::LIST) {
+ *err = Err(exclusion_value, "Not a valid list of variables to exclude.",
+ "Expecting a list of strings.");
+ return Value();
+ }
+
+ for (const Value& cur : exclusion_value.list_value()) {
+ if (!cur.VerifyTypeIs(Value::STRING, err))
+ return Value();
+
+ exclusion_set.insert(cur.string_value());
+ }
+ }
+
+ // Extract the list. If all_values is not set, the what_value will be a list.
+ Value what_value = args_vector[1]->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+ if (what_value.type() == Value::STRING) {
+ if (what_value.string_value() == "*") {
+ ForwardAllValues(function, source, scope, exclusion_set, err);
+ return Value();
+ }
+ } else {
+ if (what_value.type() == Value::LIST) {
+ ForwardValuesFromList(source, scope, what_value.list_value(),
+ exclusion_set, err);
+ return Value();
+ }
+ }
+
+ // Not the right type of argument.
+ *err = Err(what_value, "Not a valid list of variables to copy.",
+ "Expecting either the string \"*\" or a list of strings.");
+ return Value();
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_forward_variables_from_unittest.cc b/chromium/tools/gn/function_forward_variables_from_unittest.cc
new file mode 100644
index 00000000000..e5137a48b26
--- /dev/null
+++ b/chromium/tools/gn/function_forward_variables_from_unittest.cc
@@ -0,0 +1,208 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(FunctionForwardVariablesFrom, List) {
+ Scheduler scheduler;
+ TestWithScope setup;
+
+ // Defines a template and copy the two x and y, and z values out.
+ TestParseInput input(
+ "template(\"a\") {\n"
+ " forward_variables_from(invoker, [\"x\", \"y\", \"z\"])\n"
+ " assert(!defined(z))\n" // "z" should still be undefined.
+ " print(\"$target_name, $x, $y\")\n"
+ "}\n"
+ "a(\"target\") {\n"
+ " x = 1\n"
+ " y = 2\n"
+ "}\n");
+
+ ASSERT_FALSE(input.has_error());
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(err.has_error()) << err.message();
+
+ EXPECT_EQ("target, 1, 2\n", setup.print_output());
+ setup.print_output().clear();
+}
+
+TEST(FunctionForwardVariablesFrom, ListWithExclusion) {
+ Scheduler scheduler;
+ TestWithScope setup;
+
+ // Defines a template and copy the two x and y, and z values out.
+ TestParseInput input(
+ "template(\"a\") {\n"
+ " forward_variables_from(invoker, [\"x\", \"y\", \"z\"], [\"z\"])\n"
+ " assert(!defined(z))\n" // "z" should still be undefined.
+ " print(\"$target_name, $x, $y\")\n"
+ "}\n"
+ "a(\"target\") {\n"
+ " x = 1\n"
+ " y = 2\n"
+ " z = 3\n"
+ " print(\"$z\")\n"
+ "}\n");
+
+ ASSERT_FALSE(input.has_error());
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(err.has_error()) << err.message();
+
+ EXPECT_EQ("3\ntarget, 1, 2\n", setup.print_output());
+ setup.print_output().clear();
+}
+
+TEST(FunctionForwardVariablesFrom, ErrorCases) {
+ Scheduler scheduler;
+ TestWithScope setup;
+
+ // Type check the source scope.
+ TestParseInput invalid_source(
+ "template(\"a\") {\n"
+ " forward_variables_from(42, [\"x\"])\n"
+ " print(\"$target_name\")\n" // Prevent unused var error.
+ "}\n"
+ "a(\"target\") {\n"
+ "}\n");
+ ASSERT_FALSE(invalid_source.has_error());
+ Err err;
+ invalid_source.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+ EXPECT_EQ("Expected an identifier for the scope.", err.message());
+
+ // Type check the list. We need to use a new template name each time since
+ // all of these invocations are executing in sequence in the same scope.
+ TestParseInput invalid_list(
+ "template(\"b\") {\n"
+ " forward_variables_from(invoker, 42)\n"
+ " print(\"$target_name\")\n"
+ "}\n"
+ "b(\"target\") {\n"
+ "}\n");
+ ASSERT_FALSE(invalid_list.has_error());
+ err = Err();
+ invalid_list.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+ EXPECT_EQ("Not a valid list of variables to copy.", err.message());
+
+ // Type check the exclusion list.
+ TestParseInput invalid_exclusion_list(
+ "template(\"c\") {\n"
+ " forward_variables_from(invoker, \"*\", 42)\n"
+ " print(\"$target_name\")\n"
+ "}\n"
+ "c(\"target\") {\n"
+ "}\n");
+ ASSERT_FALSE(invalid_exclusion_list.has_error());
+ err = Err();
+ invalid_exclusion_list.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+ EXPECT_EQ("Not a valid list of variables to exclude.", err.message());
+
+ // Programmatic values should error.
+ TestParseInput prog(
+ "template(\"d\") {\n"
+ " forward_variables_from(invoker, [\"root_out_dir\"])\n"
+ " print(\"$target_name\")\n"
+ "}\n"
+ "d(\"target\") {\n"
+ "}\n");
+ ASSERT_FALSE(prog.has_error());
+ err = Err();
+ prog.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+ EXPECT_EQ("This value can't be forwarded.", err.message());
+
+ // Not enough arguments.
+ TestParseInput not_enough_arguments(
+ "template(\"e\") {\n"
+ " forward_variables_from(invoker)\n"
+ " print(\"$target_name\")\n"
+ "}\n"
+ "e(\"target\") {\n"
+ "}\n");
+ ASSERT_FALSE(not_enough_arguments.has_error());
+ err = Err();
+ not_enough_arguments.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+ EXPECT_EQ("Wrong number of arguments.", err.message());
+
+ // Too many arguments.
+ TestParseInput too_many_arguments(
+ "template(\"f\") {\n"
+ " forward_variables_from(invoker, \"*\", [], [])\n"
+ " print(\"$target_name\")\n"
+ "}\n"
+ "f(\"target\") {\n"
+ "}\n");
+ ASSERT_FALSE(too_many_arguments.has_error());
+ err = Err();
+ too_many_arguments.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+ EXPECT_EQ("Wrong number of arguments.", err.message());
+}
+
+TEST(FunctionForwardVariablesFrom, Star) {
+ Scheduler scheduler;
+ TestWithScope setup;
+
+ // Defines a template and copy the two x and y values out. The "*" behavior
+ // should clobber existing variables with the same name.
+ TestParseInput input(
+ "template(\"a\") {\n"
+ " x = 1000000\n" // Should be clobbered.
+ " forward_variables_from(invoker, \"*\")\n"
+ " print(\"$target_name, $x, $y\")\n"
+ "}\n"
+ "a(\"target\") {\n"
+ " x = 1\n"
+ " y = 2\n"
+ "}\n");
+
+ ASSERT_FALSE(input.has_error());
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(err.has_error()) << err.message();
+
+ EXPECT_EQ("target, 1, 2\n", setup.print_output());
+ setup.print_output().clear();
+}
+
+
+TEST(FunctionForwardVariablesFrom, StarWithExclusion) {
+ Scheduler scheduler;
+ TestWithScope setup;
+
+ // Defines a template and copy all values except z value. The "*" behavior
+ // should clobber existing variables with the same name.
+ TestParseInput input(
+ "template(\"a\") {\n"
+ " x = 1000000\n" // Should be clobbered.
+ " forward_variables_from(invoker, \"*\", [\"z\"])\n"
+ " print(\"$target_name, $x, $y\")\n"
+ "}\n"
+ "a(\"target\") {\n"
+ " x = 1\n"
+ " y = 2\n"
+ " z = 3\n"
+ " print(\"$z\")\n"
+ "}\n");
+
+ ASSERT_FALSE(input.has_error());
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(err.has_error()) << err.message();
+
+ EXPECT_EQ("3\ntarget, 1, 2\n", setup.print_output());
+ setup.print_output().clear();
+}
diff --git a/chromium/tools/gn/function_get_label_info.cc b/chromium/tools/gn/function_get_label_info.cc
new file mode 100644
index 00000000000..be22cca2ffb
--- /dev/null
+++ b/chromium/tools/gn/function_get_label_info.cc
@@ -0,0 +1,162 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/label.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/value.h"
+
+namespace functions {
+
+namespace {
+
+bool ToolchainIsDefault(const Scope* scope, const Label& toolchain_label) {
+ return scope->settings()->default_toolchain_label() == toolchain_label;
+}
+
+} // namespace
+
+const char kGetLabelInfo[] = "get_label_info";
+const char kGetLabelInfo_HelpShort[] =
+ "get_label_info: Get an attribute from a target's label.";
+const char kGetLabelInfo_Help[] =
+ "get_label_info: Get an attribute from a target's label.\n"
+ "\n"
+ " get_label_info(target_label, what)\n"
+ "\n"
+ " Given the label of a target, returns some attribute of that target.\n"
+ " The target need not have been previously defined in the same file,\n"
+ " since none of the attributes depend on the actual target definition,\n"
+ " only the label itself.\n"
+ "\n"
+ " See also \"gn help get_target_outputs\".\n"
+ "\n"
+ "Possible values for the \"what\" parameter\n"
+ "\n"
+ " \"name\"\n"
+ " The short name of the target. This will match the value of the\n"
+ " \"target_name\" variable inside that target's declaration. For the\n"
+ " label \"//foo/bar:baz\" this will return \"baz\".\n"
+ "\n"
+ " \"dir\"\n"
+ " The directory containing the target's definition, with no slash at\n"
+ " the end. For the label \"//foo/bar:baz\" this will return\n"
+ " \"//foo/bar\".\n"
+ "\n"
+ " \"target_gen_dir\"\n"
+ " The generated file directory for the target. This will match the\n"
+ " value of the \"target_gen_dir\" variable when inside that target's\n"
+ " declaration.\n"
+ "\n"
+ " \"root_gen_dir\"\n"
+ " The root of the generated file tree for the target. This will\n"
+ " match the value of the \"root_gen_dir\" variable when inside that\n"
+ " target's declaration.\n"
+ "\n"
+ " \"target_out_dir\n"
+ " The output directory for the target. This will match the\n"
+ " value of the \"target_out_dir\" variable when inside that target's\n"
+ " declaration.\n"
+ "\n"
+ " \"root_out_dir\"\n"
+ " The root of the output file tree for the target. This will\n"
+ " match the value of the \"root_out_dir\" variable when inside that\n"
+ " target's declaration.\n"
+ "\n"
+ " \"label_no_toolchain\"\n"
+ " The fully qualified version of this label, not including the\n"
+ " toolchain. For the input \":bar\" it might return\n"
+ " \"//foo:bar\".\n"
+ "\n"
+ " \"label_with_toolchain\"\n"
+ " The fully qualified version of this label, including the\n"
+ " toolchain. For the input \":bar\" it might return\n"
+ " \"//foo:bar(//toolchain:x64)\".\n"
+ "\n"
+ " \"toolchain\"\n"
+ " The label of the toolchain. This will match the value of the\n"
+ " \"current_toolchain\" variable when inside that target's\n"
+ " declaration.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " get_label_info(\":foo\", \"name\")\n"
+ " # Returns string \"foo\".\n"
+ "\n"
+ " get_label_info(\"//foo/bar:baz\", \"gen_dir\")\n"
+ " # Returns string \"//out/Debug/gen/foo/bar\".\n";
+
+Value RunGetLabelInfo(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (args.size() != 2) {
+ *err = Err(function, "Expected two arguments.");
+ return Value();
+ }
+
+ // Resolve the requested label.
+ Label label = Label::Resolve(scope->GetSourceDir(),
+ ToolchainLabelForScope(scope), args[0], err);
+ if (label.is_null())
+ return Value();
+
+ // Extract the "what" parameter.
+ if (!args[1].VerifyTypeIs(Value::STRING, err))
+ return Value();
+ const std::string& what = args[1].string_value();
+
+ Value result(function, Value::STRING);
+ if (what == "name") {
+ result.string_value() = label.name();
+
+ } else if (what == "dir") {
+ result.string_value() = DirectoryWithNoLastSlash(label.dir());
+
+ } else if (what == "target_gen_dir") {
+ result.string_value() = DirectoryWithNoLastSlash(
+ GetGenDirForSourceDir(scope->settings(), label.dir()));
+
+ } else if (what == "root_gen_dir") {
+ Label toolchain_label = label.GetToolchainLabel();
+ result.string_value() = DirectoryWithNoLastSlash(
+ GetToolchainGenDir(scope->settings()->build_settings(),
+ toolchain_label,
+ ToolchainIsDefault(scope, toolchain_label)));
+
+ } else if (what == "target_out_dir") {
+ Label toolchain_label = label.GetToolchainLabel();
+ result.string_value() = DirectoryWithNoLastSlash(
+ GetOutputDirForSourceDir(scope->settings()->build_settings(),
+ label.dir(), toolchain_label,
+ ToolchainIsDefault(scope, toolchain_label)));
+
+ } else if (what == "root_out_dir") {
+ Label toolchain_label = label.GetToolchainLabel();
+ result.string_value() = DirectoryWithNoLastSlash(
+ GetToolchainOutputDir(scope->settings()->build_settings(),
+ toolchain_label,
+ ToolchainIsDefault(scope, toolchain_label)));
+
+ } else if (what == "toolchain") {
+ result.string_value() = label.GetToolchainLabel().GetUserVisibleName(false);
+
+ } else if (what == "label_no_toolchain") {
+ result.string_value() =
+ label.GetWithNoToolchain().GetUserVisibleName(false);
+
+ } else if (what == "label_with_toolchain") {
+ result.string_value() = label.GetUserVisibleName(true);
+
+ } else {
+ *err = Err(args[1], "Unknown value for \"what\" parameter.");
+ return Value();
+ }
+
+ return result;
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_get_label_info_unittest.cc b/chromium/tools/gn/function_get_label_info_unittest.cc
new file mode 100644
index 00000000000..ddec8302c9a
--- /dev/null
+++ b/chromium/tools/gn/function_get_label_info_unittest.cc
@@ -0,0 +1,101 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+class GetLabelInfoTest : public testing::Test {
+ public:
+ GetLabelInfoTest() : testing::Test() {
+ setup_.scope()->set_source_dir(SourceDir("//src/foo/"));
+ }
+
+ // Convenience wrapper to call GetLabelInfo.
+ std::string Call(const std::string& label, const std::string& what) {
+ FunctionCallNode function;
+
+ std::vector<Value> args;
+ args.push_back(Value(nullptr, label));
+ args.push_back(Value(nullptr, what));
+
+ Err err;
+ Value result = functions::RunGetLabelInfo(setup_.scope(), &function,
+ args, &err);
+ if (err.has_error()) {
+ EXPECT_TRUE(result.type() == Value::NONE);
+ return std::string();
+ }
+ return result.string_value();
+ }
+
+ protected:
+ // Note: TestWithScope's default toolchain is "//toolchain:default" and
+ // output dir is "//out/Debug".
+ TestWithScope setup_;
+};
+
+} // namespace
+
+TEST_F(GetLabelInfoTest, BadInput) {
+ EXPECT_EQ("", Call(":name", "incorrect_value"));
+ EXPECT_EQ("", Call("", "name"));
+}
+
+TEST_F(GetLabelInfoTest, Name) {
+ EXPECT_EQ("name", Call(":name", "name"));
+ EXPECT_EQ("name", Call("//foo/bar:name", "name"));
+ EXPECT_EQ("name", Call("//foo/bar:name(//other:tc)", "name"));
+}
+
+TEST_F(GetLabelInfoTest, Dir) {
+ EXPECT_EQ("//src/foo", Call(":name", "dir"));
+ EXPECT_EQ("//foo/bar", Call("//foo/bar:baz", "dir"));
+ EXPECT_EQ("//foo/bar", Call("//foo/bar:baz(//other:tc)", "dir"));
+}
+
+TEST_F(GetLabelInfoTest, RootOutDir) {
+ EXPECT_EQ("//out/Debug", Call(":name", "root_out_dir"));
+ EXPECT_EQ("//out/Debug/random",
+ Call(":name(//toolchain:random)", "root_out_dir"));
+}
+
+TEST_F(GetLabelInfoTest, RootGenDir) {
+ EXPECT_EQ("//out/Debug/gen", Call(":name", "root_gen_dir"));
+ EXPECT_EQ("//out/Debug/gen",
+ Call(":name(//toolchain:default)", "root_gen_dir"));
+ EXPECT_EQ("//out/Debug/random/gen",
+ Call(":name(//toolchain:random)", "root_gen_dir"));
+}
+
+TEST_F(GetLabelInfoTest, TargetOutDir) {
+ EXPECT_EQ("//out/Debug/obj/src/foo", Call(":name", "target_out_dir"));
+ EXPECT_EQ("//out/Debug", Call(":name", "root_out_dir"));
+
+ EXPECT_EQ("//out/Debug/obj/foo",
+ Call("//foo:name(//toolchain:default)", "target_out_dir"));
+ EXPECT_EQ("//out/Debug/random/obj/foo",
+ Call("//foo:name(//toolchain:random)", "target_out_dir"));
+}
+
+TEST_F(GetLabelInfoTest, LabelNoToolchain) {
+ EXPECT_EQ("//src/foo:name", Call(":name", "label_no_toolchain"));
+ EXPECT_EQ("//src/foo:name",
+ Call("//src/foo:name(//toolchain:random)", "label_no_toolchain"));
+}
+
+TEST_F(GetLabelInfoTest, LabelWithToolchain) {
+ EXPECT_EQ("//src/foo:name(//toolchain:default)",
+ Call(":name", "label_with_toolchain"));
+ EXPECT_EQ("//src/foo:name(//toolchain:random)",
+ Call(":name(//toolchain:random)", "label_with_toolchain"));
+}
+
+TEST_F(GetLabelInfoTest, Toolchain) {
+ EXPECT_EQ("//toolchain:default", Call(":name", "toolchain"));
+ EXPECT_EQ("//toolchain:random",
+ Call(":name(//toolchain:random)", "toolchain"));
+}
diff --git a/chromium/tools/gn/function_get_path_info.cc b/chromium/tools/gn/function_get_path_info.cc
new file mode 100644
index 00000000000..d994f06bde1
--- /dev/null
+++ b/chromium/tools/gn/function_get_path_info.cc
@@ -0,0 +1,253 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/value.h"
+
+namespace functions {
+
+namespace {
+
+// Corresponds to the various values of "what" in the function call.
+enum What {
+ WHAT_FILE,
+ WHAT_NAME,
+ WHAT_EXTENSION,
+ WHAT_DIR,
+ WHAT_ABSPATH,
+ WHAT_GEN_DIR,
+ WHAT_OUT_DIR,
+};
+
+// Returns the directory containing the input (resolving it against the
+// |current_dir|), regardless of whether the input is a directory or a file.
+SourceDir DirForInput(const Settings* settings,
+ const SourceDir& current_dir,
+ const Value& input,
+ Err* err) {
+ // Input should already have been validated as a string.
+ const std::string& input_string = input.string_value();
+
+ if (!input_string.empty() && input_string[input_string.size() - 1] == '/') {
+ // Input is a directory.
+ return current_dir.ResolveRelativeDir(input, err,
+ settings->build_settings()->root_path_utf8());
+ }
+
+ // Input is a directory.
+ return current_dir.ResolveRelativeFile(input, err,
+ settings->build_settings()->root_path_utf8()).GetDir();
+}
+
+std::string GetOnePathInfo(const Settings* settings,
+ const SourceDir& current_dir,
+ What what,
+ const Value& input,
+ Err* err) {
+ if (!input.VerifyTypeIs(Value::STRING, err))
+ return std::string();
+ const std::string& input_string = input.string_value();
+ if (input_string.empty()) {
+ *err = Err(input, "Calling get_path_info on an empty string.");
+ return std::string();
+ }
+
+ switch (what) {
+ case WHAT_FILE: {
+ return FindFilename(&input_string).as_string();
+ }
+ case WHAT_NAME: {
+ std::string file = FindFilename(&input_string).as_string();
+ size_t extension_offset = FindExtensionOffset(file);
+ if (extension_offset == std::string::npos)
+ return file;
+ // Trim extension and dot.
+ return file.substr(0, extension_offset - 1);
+ }
+ case WHAT_EXTENSION: {
+ return FindExtension(&input_string).as_string();
+ }
+ case WHAT_DIR: {
+ base::StringPiece dir_incl_slash = FindDir(&input_string);
+ if (dir_incl_slash.empty())
+ return std::string(".");
+ // Trim slash since this function doesn't return trailing slashes. The
+ // times we don't do this are if the result is "/" and "//" since those
+ // slashes can't be trimmed.
+ if (dir_incl_slash == "/")
+ return std::string("/.");
+ if (dir_incl_slash == "//")
+ return std::string("//.");
+ return dir_incl_slash.substr(0, dir_incl_slash.size() - 1).as_string();
+ }
+ case WHAT_GEN_DIR: {
+ return DirectoryWithNoLastSlash(
+ GetGenDirForSourceDir(settings,
+ DirForInput(settings, current_dir,
+ input, err)));
+ }
+ case WHAT_OUT_DIR: {
+ return DirectoryWithNoLastSlash(
+ GetOutputDirForSourceDir(settings,
+ DirForInput(settings, current_dir,
+ input, err)));
+ }
+ case WHAT_ABSPATH: {
+ if (!input_string.empty() &&
+ input_string[input_string.size() - 1] == '/') {
+ return current_dir.ResolveRelativeDir(input, err,
+ settings->build_settings()->root_path_utf8()).value();
+ } else {
+ return current_dir.ResolveRelativeFile(input, err,
+ settings->build_settings()->root_path_utf8()).value();
+ }
+ }
+ default:
+ NOTREACHED();
+ return std::string();
+ }
+}
+
+} // namespace
+
+const char kGetPathInfo[] = "get_path_info";
+const char kGetPathInfo_HelpShort[] =
+ "get_path_info: Extract parts of a file or directory name.";
+const char kGetPathInfo_Help[] =
+ "get_path_info: Extract parts of a file or directory name.\n"
+ "\n"
+ " get_path_info(input, what)\n"
+ "\n"
+ " The first argument is either a string representing a file or\n"
+ " directory name, or a list of such strings. If the input is a list\n"
+ " the return value will be a list containing the result of applying the\n"
+ " rule to each item in the input.\n"
+ "\n"
+ "Possible values for the \"what\" parameter\n"
+ "\n"
+ " \"file\"\n"
+ " The substring after the last slash in the path, including the name\n"
+ " and extension. If the input ends in a slash, the empty string will\n"
+ " be returned.\n"
+ " \"foo/bar.txt\" => \"bar.txt\"\n"
+ " \"bar.txt\" => \"bar.txt\"\n"
+ " \"foo/\" => \"\"\n"
+ " \"\" => \"\"\n"
+ "\n"
+ " \"name\"\n"
+ " The substring of the file name not including the extension.\n"
+ " \"foo/bar.txt\" => \"bar\"\n"
+ " \"foo/bar\" => \"bar\"\n"
+ " \"foo/\" => \"\"\n"
+ "\n"
+ " \"extension\"\n"
+ " The substring following the last period following the last slash,\n"
+ " or the empty string if not found. The period is not included.\n"
+ " \"foo/bar.txt\" => \"txt\"\n"
+ " \"foo/bar\" => \"\"\n"
+ "\n"
+ " \"dir\"\n"
+ " The directory portion of the name, not including the slash.\n"
+ " \"foo/bar.txt\" => \"foo\"\n"
+ " \"//foo/bar\" => \"//foo\"\n"
+ " \"foo\" => \".\"\n"
+ "\n"
+ " The result will never end in a slash, so if the resulting\n"
+ " is empty, the system (\"/\") or source (\"//\") roots, a \".\"\n"
+ " will be appended such that it is always legal to append a slash\n"
+ " and a filename and get a valid path.\n"
+ "\n"
+ " \"out_dir\"\n"
+ " The output file directory corresponding to the path of the\n"
+ " given file, not including a trailing slash.\n"
+ " \"//foo/bar/baz.txt\" => \"//out/Default/obj/foo/bar\"\n"
+
+ " \"gen_dir\"\n"
+ " The generated file directory corresponding to the path of the\n"
+ " given file, not including a trailing slash.\n"
+ " \"//foo/bar/baz.txt\" => \"//out/Default/gen/foo/bar\"\n"
+ "\n"
+ " \"abspath\"\n"
+ " The full absolute path name to the file or directory. It will be\n"
+ " resolved relative to the current directory, and then the source-\n"
+ " absolute version will be returned. If the input is system-\n"
+ " absolute, the same input will be returned.\n"
+ " \"foo/bar.txt\" => \"//mydir/foo/bar.txt\"\n"
+ " \"foo/\" => \"//mydir/foo/\"\n"
+ " \"//foo/bar\" => \"//foo/bar\" (already absolute)\n"
+ " \"/usr/include\" => \"/usr/include\" (already absolute)\n"
+ "\n"
+ " If you want to make the path relative to another directory, or to\n"
+ " be system-absolute, see rebase_path().\n"
+ "\n"
+ "Examples\n"
+ " sources = [ \"foo.cc\", \"foo.h\" ]\n"
+ " result = get_path_info(source, \"abspath\")\n"
+ " # result will be [ \"//mydir/foo.cc\", \"//mydir/foo.h\" ]\n"
+ "\n"
+ " result = get_path_info(\"//foo/bar/baz.cc\", \"dir\")\n"
+ " # result will be \"//foo/bar\"\n"
+ "\n"
+ " # Extract the source-absolute directory name,\n"
+ " result = get_path_info(get_path_info(path, \"dir\"), \"abspath\")\n";
+
+Value RunGetPathInfo(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (args.size() != 2) {
+ *err = Err(function, "Expecting two arguments to get_path_info.");
+ return Value();
+ }
+
+ // Extract the "what".
+ if (!args[1].VerifyTypeIs(Value::STRING, err))
+ return Value();
+ What what;
+ if (args[1].string_value() == "file") {
+ what = WHAT_FILE;
+ } else if (args[1].string_value() == "name") {
+ what = WHAT_NAME;
+ } else if (args[1].string_value() == "extension") {
+ what = WHAT_EXTENSION;
+ } else if (args[1].string_value() == "dir") {
+ what = WHAT_DIR;
+ } else if (args[1].string_value() == "out_dir") {
+ what = WHAT_OUT_DIR;
+ } else if (args[1].string_value() == "gen_dir") {
+ what = WHAT_GEN_DIR;
+ } else if (args[1].string_value() == "abspath") {
+ what = WHAT_ABSPATH;
+ } else {
+ *err = Err(args[1], "Unknown value for 'what'.");
+ return Value();
+ }
+
+ const SourceDir& current_dir = scope->GetSourceDir();
+ if (args[0].type() == Value::STRING) {
+ return Value(function, GetOnePathInfo(scope->settings(), current_dir, what,
+ args[0], err));
+ } else if (args[0].type() == Value::LIST) {
+ const std::vector<Value>& input_list = args[0].list_value();
+ Value result(function, Value::LIST);
+ for (const auto& cur : input_list) {
+ result.list_value().push_back(Value(function,
+ GetOnePathInfo(scope->settings(), current_dir, what, cur, err)));
+ if (err->has_error())
+ return Value();
+ }
+ return result;
+ }
+
+ *err = Err(args[0], "Path must be a string or a list of strings.");
+ return Value();
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_get_path_info_unittest.cc b/chromium/tools/gn/function_get_path_info_unittest.cc
new file mode 100644
index 00000000000..79020df0824
--- /dev/null
+++ b/chromium/tools/gn/function_get_path_info_unittest.cc
@@ -0,0 +1,120 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "build/build_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+class GetPathInfoTest : public testing::Test {
+ public:
+ GetPathInfoTest() : testing::Test() {
+ setup_.scope()->set_source_dir(SourceDir("//src/foo/"));
+ }
+
+ // Convenience wrapper to call GetLabelInfo.
+ std::string Call(const std::string& input, const std::string& what) {
+ FunctionCallNode function;
+
+ std::vector<Value> args;
+ args.push_back(Value(nullptr, input));
+ args.push_back(Value(nullptr, what));
+
+ Err err;
+ Value result = functions::RunGetPathInfo(setup_.scope(), &function,
+ args, &err);
+ if (err.has_error()) {
+ EXPECT_TRUE(result.type() == Value::NONE);
+ return std::string();
+ }
+ return result.string_value();
+ }
+
+ protected:
+ TestWithScope setup_;
+};
+
+} // namespace
+
+TEST_F(GetPathInfoTest, File) {
+ EXPECT_EQ("bar.txt", Call("foo/bar.txt", "file"));
+ EXPECT_EQ("bar.txt", Call("bar.txt", "file"));
+ EXPECT_EQ("bar.txt", Call("/bar.txt", "file"));
+ EXPECT_EQ("", Call("foo/", "file"));
+ EXPECT_EQ("", Call("//", "file"));
+ EXPECT_EQ("", Call("/", "file"));
+}
+
+TEST_F(GetPathInfoTest, Name) {
+ EXPECT_EQ("bar", Call("foo/bar.txt", "name"));
+ EXPECT_EQ("bar", Call("bar.", "name"));
+ EXPECT_EQ("", Call("/.txt", "name"));
+ EXPECT_EQ("", Call("foo/", "name"));
+ EXPECT_EQ("", Call("//", "name"));
+ EXPECT_EQ("", Call("/", "name"));
+}
+
+TEST_F(GetPathInfoTest, Extension) {
+ EXPECT_EQ("txt", Call("foo/bar.txt", "extension"));
+ EXPECT_EQ("", Call("bar.", "extension"));
+ EXPECT_EQ("txt", Call("/.txt", "extension"));
+ EXPECT_EQ("", Call("f.oo/", "extension"));
+ EXPECT_EQ("", Call("//", "extension"));
+ EXPECT_EQ("", Call("/", "extension"));
+}
+
+TEST_F(GetPathInfoTest, Dir) {
+ EXPECT_EQ("foo", Call("foo/bar.txt", "dir"));
+ EXPECT_EQ(".", Call("bar.txt", "dir"));
+ EXPECT_EQ("foo/bar", Call("foo/bar/baz", "dir"));
+ EXPECT_EQ("//foo", Call("//foo/", "dir"));
+ EXPECT_EQ("//.", Call("//", "dir"));
+ EXPECT_EQ("/foo", Call("/foo/", "dir"));
+ EXPECT_EQ("/.", Call("/", "dir"));
+}
+
+// Note "current dir" is "//src/foo"
+TEST_F(GetPathInfoTest, AbsPath) {
+ EXPECT_EQ("//src/foo/foo/bar.txt", Call("foo/bar.txt", "abspath"));
+ EXPECT_EQ("//src/foo/bar.txt", Call("bar.txt", "abspath"));
+ EXPECT_EQ("//src/foo/bar/", Call("bar/", "abspath"));
+ EXPECT_EQ("//foo", Call("//foo", "abspath"));
+ EXPECT_EQ("//foo/", Call("//foo/", "abspath"));
+ EXPECT_EQ("//", Call("//", "abspath"));
+ EXPECT_EQ("/foo", Call("/foo", "abspath"));
+ EXPECT_EQ("/foo/", Call("/foo/", "abspath"));
+ EXPECT_EQ("/", Call("/", "abspath"));
+}
+
+// Note build dir is "//out/Debug/".
+TEST_F(GetPathInfoTest, OutDir) {
+ EXPECT_EQ("//out/Debug/obj/src/foo/foo", Call("foo/bar.txt", "out_dir"));
+ EXPECT_EQ("//out/Debug/obj/src/foo/bar", Call("bar/", "out_dir"));
+ EXPECT_EQ("//out/Debug/obj/src/foo", Call(".", "out_dir"));
+ EXPECT_EQ("//out/Debug/obj/src/foo", Call("bar", "out_dir"));
+ EXPECT_EQ("//out/Debug/obj/foo", Call("//foo/bar.txt", "out_dir"));
+ // System paths go into the ABS_PATH obj directory.
+ EXPECT_EQ("//out/Debug/obj/ABS_PATH/foo", Call("/foo/bar.txt", "out_dir"));
+#if defined(OS_WIN)
+ EXPECT_EQ("//out/Debug/obj/ABS_PATH/C/foo",
+ Call("/C:/foo/bar.txt", "out_dir"));
+#endif
+}
+
+// Note build dir is "//out/Debug/".
+TEST_F(GetPathInfoTest, GenDir) {
+ EXPECT_EQ("//out/Debug/gen/src/foo/foo", Call("foo/bar.txt", "gen_dir"));
+ EXPECT_EQ("//out/Debug/gen/src/foo/bar", Call("bar/", "gen_dir"));
+ EXPECT_EQ("//out/Debug/gen/src/foo", Call(".", "gen_dir"));
+ EXPECT_EQ("//out/Debug/gen/src/foo", Call("bar", "gen_dir"));
+ EXPECT_EQ("//out/Debug/gen/foo", Call("//foo/bar.txt", "gen_dir"));
+ // System paths go into the ABS_PATH gen directory
+ EXPECT_EQ("//out/Debug/gen/ABS_PATH/foo", Call("/foo/bar.txt", "gen_dir"));
+#if defined(OS_WIN)
+ EXPECT_EQ("//out/Debug/gen/ABS_PATH/C/foo",
+ Call("/C:/foo/bar.txt", "gen_dir"));
+#endif
+}
diff --git a/chromium/tools/gn/function_get_target_outputs.cc b/chromium/tools/gn/function_get_target_outputs.cc
new file mode 100644
index 00000000000..6e3cf6563ca
--- /dev/null
+++ b/chromium/tools/gn/function_get_target_outputs.cc
@@ -0,0 +1,140 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/build_settings.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/value.h"
+
+namespace functions {
+
+const char kGetTargetOutputs[] = "get_target_outputs";
+const char kGetTargetOutputs_HelpShort[] =
+ "get_target_outputs: [file list] Get the list of outputs from a target.";
+const char kGetTargetOutputs_Help[] =
+ "get_target_outputs: [file list] Get the list of outputs from a target.\n"
+ "\n"
+ " get_target_outputs(target_label)\n"
+ "\n"
+ " Returns a list of output files for the named target. The named target\n"
+ " must have been previously defined in the current file before this\n"
+ " function is called (it can't reference targets in other files because\n"
+ " there isn't a defined execution order, and it obviously can't\n"
+ " reference targets that are defined after the function call).\n"
+ "\n"
+ " Only copy and action targets are supported. The outputs from binary\n"
+ " targets will depend on the toolchain definition which won't\n"
+ " necessarily have been loaded by the time a given line of code has run,\n"
+ " and source sets and groups have no useful output file.\n"
+ "\n"
+ "Return value\n"
+ "\n"
+ " The names in the resulting list will be absolute file paths (normally\n"
+ " like \"//out/Debug/bar.exe\", depending on the build directory).\n"
+ "\n"
+ " action targets: this will just return the files specified in the\n"
+ " \"outputs\" variable of the target.\n"
+ "\n"
+ " action_foreach targets: this will return the result of applying\n"
+ " the output template to the sources (see \"gn help source_expansion\").\n"
+ " This will be the same result (though with guaranteed absolute file\n"
+ " paths), as process_file_template will return for those inputs\n"
+ " (see \"gn help process_file_template\").\n"
+ "\n"
+ " binary targets (executables, libraries): this will return a list\n"
+ " of the resulting binary file(s). The \"main output\" (the actual\n"
+ " binary or library) will always be the 0th element in the result.\n"
+ " Depending on the platform and output type, there may be other output\n"
+ " files as well (like import libraries) which will follow.\n"
+ "\n"
+ " source sets and groups: this will return a list containing the path of\n"
+ " the \"stamp\" file that Ninja will produce once all outputs are\n"
+ " generated. This probably isn't very useful.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " # Say this action generates a bunch of C source files.\n"
+ " action_foreach(\"my_action\") {\n"
+ " sources = [ ... ]\n"
+ " outputs = [ ... ]\n"
+ " }\n"
+ "\n"
+ " # Compile the resulting source files into a source set.\n"
+ " source_set(\"my_lib\") {\n"
+ " sources = get_target_outputs(\":my_action\")\n"
+ " }\n";
+
+Value RunGetTargetOutputs(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (args.size() != 1) {
+ *err = Err(function, "Expected one argument.");
+ return Value();
+ }
+
+ // Resolve the requested label.
+ Label label = Label::Resolve(scope->GetSourceDir(),
+ ToolchainLabelForScope(scope), args[0], err);
+ if (label.is_null())
+ return Value();
+
+ // Find the referenced target. The targets previously encountered in this
+ // scope will have been stashed in the item collector (they'll be dispatched
+ // when this file is done running) so we can look through them.
+ const Target* target = nullptr;
+ Scope::ItemVector* collector = scope->GetItemCollector();
+ if (!collector) {
+ *err = Err(function, "No targets defined in this context.");
+ return Value();
+ }
+ for (const auto& item : *collector) {
+ if (item->label() != label)
+ continue;
+
+ const Target* as_target = item->AsTarget();
+ if (!as_target) {
+ *err = Err(function, "Label does not refer to a target.",
+ label.GetUserVisibleName(false) +
+ "\nrefers to a " + item->GetItemTypeName());
+ return Value();
+ }
+ target = as_target;
+ break;
+ }
+
+ if (!target) {
+ *err = Err(function, "Target not found in this context.",
+ label.GetUserVisibleName(false) +
+ "\nwas not found. get_target_outputs() can only be used for targets\n"
+ "previously defined in the current file.");
+ return Value();
+ }
+
+ // Compute the output list.
+ std::vector<SourceFile> files;
+ if (target->output_type() == Target::ACTION ||
+ target->output_type() == Target::COPY_FILES ||
+ target->output_type() == Target::ACTION_FOREACH) {
+ target->action_values().GetOutputsAsSourceFiles(target, &files);
+ } else {
+ // Other types of targets are not supported.
+ *err = Err(args[0], "Target is not an action, action_foreach, or copy.",
+ "Only these target types are supported by get_target_outputs.");
+ return Value();
+ }
+
+ // Convert to Values.
+ Value ret(function, Value::LIST);
+ ret.list_value().reserve(files.size());
+ for (const auto& file : files)
+ ret.list_value().push_back(Value(function, file.value()));
+
+ return ret;
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_get_target_outputs_unittest.cc b/chromium/tools/gn/function_get_target_outputs_unittest.cc
new file mode 100644
index 00000000000..c947709a2ed
--- /dev/null
+++ b/chromium/tools/gn/function_get_target_outputs_unittest.cc
@@ -0,0 +1,104 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+class GetTargetOutputsTest : public testing::Test {
+ public:
+ GetTargetOutputsTest() {
+ setup_.scope()->set_item_collector(&items_);
+ }
+
+ Value GetTargetOutputs(const std::string& name, Err* err) {
+ FunctionCallNode function;
+ std::vector<Value> args;
+ args.push_back(Value(nullptr, name));
+ return functions::RunGetTargetOutputs(setup_.scope(), &function, args, err);
+ }
+
+ // Shortcut to get a label with the current toolchain.
+ Label GetLabel(const std::string& dir, const std::string& name) {
+ return Label(SourceDir(dir), name, setup_.toolchain()->label().dir(),
+ setup_.toolchain()->label().name());
+ }
+
+ // Asserts that the given list contains a single string with the given value.
+ void AssertSingleStringEquals(const Value& list,
+ const std::string& expected) {
+ ASSERT_TRUE(list.type() == Value::LIST);
+ ASSERT_EQ(1u, list.list_value().size());
+ ASSERT_TRUE(list.list_value()[0].type() == Value::STRING);
+ ASSERT_EQ(expected, list.list_value()[0].string_value());
+ }
+
+ void AssertTwoStringsEqual(const Value& list,
+ const std::string& expected1,
+ const std::string& expected2) {
+ ASSERT_TRUE(list.type() == Value::LIST);
+ ASSERT_EQ(2u, list.list_value().size());
+ ASSERT_TRUE(list.list_value()[0].type() == Value::STRING);
+ ASSERT_EQ(expected1, list.list_value()[0].string_value());
+ ASSERT_TRUE(list.list_value()[1].type() == Value::STRING);
+ ASSERT_EQ(expected2, list.list_value()[1].string_value());
+ }
+
+ protected:
+ TestWithScope setup_;
+
+ Scope::ItemVector items_;
+};
+
+} // namespace
+
+TEST_F(GetTargetOutputsTest, Copy) {
+ Target* action = new Target(setup_.settings(), GetLabel("//foo/", "bar"));
+ action->set_output_type(Target::COPY_FILES);
+ action->sources().push_back(SourceFile("//file.txt"));
+ action->action_values().outputs() =
+ SubstitutionList::MakeForTest("//out/Debug/{{source_file_part}}.one");
+
+ items_.push_back(action);
+
+ Err err;
+ Value result = GetTargetOutputs("//foo:bar", &err);
+ ASSERT_FALSE(err.has_error());
+ AssertSingleStringEquals(result, "//out/Debug/file.txt.one");
+}
+
+TEST_F(GetTargetOutputsTest, Action) {
+ Target* action = new Target(setup_.settings(), GetLabel("//foo/", "bar"));
+ action->set_output_type(Target::ACTION);
+ action->action_values().outputs() = SubstitutionList::MakeForTest(
+ "//output1.txt",
+ "//output2.txt");
+
+ items_.push_back(action);
+
+ Err err;
+ Value result = GetTargetOutputs("//foo:bar", &err);
+ ASSERT_FALSE(err.has_error());
+ AssertTwoStringsEqual(result, "//output1.txt", "//output2.txt");
+}
+
+TEST_F(GetTargetOutputsTest, ActionForeach) {
+ Target* action = new Target(setup_.settings(), GetLabel("//foo/", "bar"));
+ action->set_output_type(Target::ACTION_FOREACH);
+ action->sources().push_back(SourceFile("//file.txt"));
+ action->action_values().outputs() = SubstitutionList::MakeForTest(
+ "//out/Debug/{{source_file_part}}.one",
+ "//out/Debug/{{source_file_part}}.two");
+
+ items_.push_back(action);
+
+ Err err;
+ Value result = GetTargetOutputs("//foo:bar", &err);
+ ASSERT_FALSE(err.has_error());
+ AssertTwoStringsEqual(result, "//out/Debug/file.txt.one",
+ "//out/Debug/file.txt.two");
+}
diff --git a/chromium/tools/gn/function_process_file_template.cc b/chromium/tools/gn/function_process_file_template.cc
new file mode 100644
index 00000000000..b7828a6b8aa
--- /dev/null
+++ b/chromium/tools/gn/function_process_file_template.cc
@@ -0,0 +1,107 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/functions.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/substitution_list.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/value_extractors.h"
+
+namespace functions {
+
+const char kProcessFileTemplate[] = "process_file_template";
+const char kProcessFileTemplate_HelpShort[] =
+ "process_file_template: Do template expansion over a list of files.";
+const char kProcessFileTemplate_Help[] =
+ "process_file_template: Do template expansion over a list of files.\n"
+ "\n"
+ " process_file_template(source_list, template)\n"
+ "\n"
+ " process_file_template applies a template list to a source file list,\n"
+ " returning the result of applying each template to each source. This is\n"
+ " typically used for computing output file names from input files.\n"
+ "\n"
+ " In most cases, get_target_outputs() will give the same result with\n"
+ " shorter, more maintainable code. This function should only be used\n"
+ " when that function can't be used (like there's no target or the target\n"
+ " is defined in another build file).\n"
+ "\n"
+ "Arguments:\n"
+ "\n"
+ " The source_list is a list of file names.\n"
+ "\n"
+ " The template can be a string or a list. If it is a list, multiple\n"
+ " output strings are generated for each input.\n"
+ "\n"
+ " The template should contain source expansions to which each name in\n"
+ " the source list is applied. See \"gn help source_expansion\".\n"
+ "\n"
+ "Example:\n"
+ "\n"
+ " sources = [\n"
+ " \"foo.idl\",\n"
+ " \"bar.idl\",\n"
+ " ]\n"
+ " myoutputs = process_file_template(\n"
+ " sources,\n"
+ " [ \"$target_gen_dir/{{source_name_part}}.cc\",\n"
+ " \"$target_gen_dir/{{source_name_part}}.h\" ])\n"
+ "\n"
+ " The result in this case will be:\n"
+ " [ \"//out/Debug/foo.cc\"\n"
+ " \"//out/Debug/foo.h\"\n"
+ " \"//out/Debug/bar.cc\"\n"
+ " \"//out/Debug/bar.h\" ]\n";
+
+Value RunProcessFileTemplate(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (args.size() != 2) {
+ *err = Err(function->function(), "Expected two arguments");
+ return Value();
+ }
+
+ // Source list.
+ Target::FileList input_files;
+ if (!ExtractListOfRelativeFiles(scope->settings()->build_settings(), args[0],
+ scope->GetSourceDir(), &input_files, err))
+ return Value();
+
+ std::vector<std::string> result_files;
+ SubstitutionList subst;
+
+ // Template.
+ const Value& template_arg = args[1];
+ if (template_arg.type() == Value::STRING) {
+ // Convert the string to a SubstitutionList with one pattern in it to
+ // simplify the code below.
+ std::vector<std::string> list;
+ list.push_back(template_arg.string_value());
+ if (!subst.Parse(list, template_arg.origin(), err))
+ return Value();
+ } else if (template_arg.type() == Value::LIST) {
+ if (!subst.Parse(template_arg, err))
+ return Value();
+ } else {
+ *err = Err(template_arg, "Not a string or a list.");
+ return Value();
+ }
+
+ SubstitutionWriter::ApplyListToSourcesAsString(
+ scope->settings(), subst, input_files, &result_files);
+
+ // Convert the list of strings to the return Value.
+ Value ret(function, Value::LIST);
+ ret.list_value().reserve(result_files.size());
+ for (const auto& file : result_files)
+ ret.list_value().push_back(Value(function, file));
+
+ return ret;
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_process_file_template_unittest.cc b/chromium/tools/gn/function_process_file_template_unittest.cc
new file mode 100644
index 00000000000..a455222a549
--- /dev/null
+++ b/chromium/tools/gn/function_process_file_template_unittest.cc
@@ -0,0 +1,64 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(FunctionProcessFileTemplates, SingleString) {
+ TestWithScope setup;
+
+ std::vector<Value> args;
+
+ Value sources(nullptr, Value::LIST);
+ sources.list_value().push_back(Value(nullptr, "//src/foo.txt"));
+ args.push_back(sources);
+
+ Value expansion(nullptr, "1234{{source_name_part}}5678");
+ args.push_back(expansion);
+
+ Err err;
+ Value result =
+ functions::RunProcessFileTemplate(setup.scope(), nullptr, args, &err);
+ EXPECT_FALSE(err.has_error());
+
+ ASSERT_TRUE(result.type() == Value::LIST);
+ ASSERT_EQ(1u, result.list_value().size());
+ ASSERT_TRUE(result.list_value()[0].type() == Value::STRING);
+ ASSERT_EQ("1234foo5678", result.list_value()[0].string_value());
+}
+
+TEST(FunctionProcessFileTemplates, MultipleStrings) {
+ TestWithScope setup;
+
+ std::vector<Value> args;
+
+ Value sources(nullptr, Value::LIST);
+ sources.list_value().push_back(Value(nullptr, "//src/one.txt"));
+ sources.list_value().push_back(Value(nullptr, "//src/two.txt"));
+ args.push_back(sources);
+
+ Value expansions(nullptr, Value::LIST);
+ expansions.list_value().push_back(
+ Value(nullptr, "1234{{source_name_part}}5678"));
+ expansions.list_value().push_back(
+ Value(nullptr, "ABCD{{source_file_part}}EFGH"));
+ args.push_back(expansions);
+
+ Err err;
+ Value result =
+ functions::RunProcessFileTemplate(setup.scope(), nullptr, args, &err);
+ EXPECT_FALSE(err.has_error());
+
+ ASSERT_TRUE(result.type() == Value::LIST);
+ ASSERT_EQ(4u, result.list_value().size());
+ ASSERT_TRUE(result.list_value()[0].type() == Value::STRING);
+ ASSERT_TRUE(result.list_value()[1].type() == Value::STRING);
+ ASSERT_TRUE(result.list_value()[2].type() == Value::STRING);
+ ASSERT_TRUE(result.list_value()[3].type() == Value::STRING);
+ ASSERT_EQ("1234one5678", result.list_value()[0].string_value());
+ ASSERT_EQ("ABCDone.txtEFGH", result.list_value()[1].string_value());
+ ASSERT_EQ("1234two5678", result.list_value()[2].string_value());
+ ASSERT_EQ("ABCDtwo.txtEFGH", result.list_value()[3].string_value());
+}
diff --git a/chromium/tools/gn/function_read_file.cc b/chromium/tools/gn/function_read_file.cc
new file mode 100644
index 00000000000..11585d89615
--- /dev/null
+++ b/chromium/tools/gn/function_read_file.cc
@@ -0,0 +1,78 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/files/file_util.h"
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/input_conversion.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/scheduler.h"
+
+// TODO(brettw) consider removing this. I originally wrote it for making the
+// WebKit bindings but misundersood what was required, and didn't need to
+// use this. This seems to have a high potential for misuse.
+
+namespace functions {
+
+const char kReadFile[] = "read_file";
+const char kReadFile_HelpShort[] =
+ "read_file: Read a file into a variable.";
+const char kReadFile_Help[] =
+ "read_file: Read a file into a variable.\n"
+ "\n"
+ " read_file(filename, input_conversion)\n"
+ "\n"
+ " Whitespace will be trimmed from the end of the file. Throws an error\n"
+ " if the file can not be opened.\n"
+ "\n"
+ "Arguments:\n"
+ "\n"
+ " filename\n"
+ " Filename to read, relative to the build file.\n"
+ "\n"
+ " input_conversion\n"
+ " Controls how the file is read and parsed.\n"
+ " See \"gn help input_conversion\".\n"
+ "\n"
+ "Example\n"
+ " lines = read_file(\"foo.txt\", \"list lines\")\n";
+
+Value RunReadFile(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (args.size() != 2) {
+ *err = Err(function->function(), "Wrong number of arguments to read_file",
+ "I expected two arguments.");
+ return Value();
+ }
+ if (!args[0].VerifyTypeIs(Value::STRING, err))
+ return Value();
+
+ // Compute the file name.
+ const SourceDir& cur_dir = scope->GetSourceDir();
+ SourceFile source_file = cur_dir.ResolveRelativeFile(args[0], err,
+ scope->settings()->build_settings()->root_path_utf8());
+ if (err->has_error())
+ return Value();
+ base::FilePath file_path =
+ scope->settings()->build_settings()->GetFullPath(source_file);
+
+ // Ensure that everything is recomputed if the read file changes.
+ g_scheduler->AddGenDependency(file_path);
+
+ // Read contents.
+ std::string file_contents;
+ if (!base::ReadFileToString(file_path, &file_contents)) {
+ *err = Err(args[0], "Could not read file.",
+ "I resolved this to \"" + FilePathToUTF8(file_path) + "\".");
+ return Value();
+ }
+
+ return ConvertInputToValue(scope->settings(), file_contents, function,
+ args[1], err);
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_rebase_path.cc b/chromium/tools/gn/function_rebase_path.cc
new file mode 100644
index 00000000000..ef5ef40f984
--- /dev/null
+++ b/chromium/tools/gn/function_rebase_path.cc
@@ -0,0 +1,289 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include "tools/gn/build_settings.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/value.h"
+
+namespace functions {
+
+namespace {
+
+// We want the output to match the input in terms of ending in a slash or not.
+// Through all the transformations, these can get added or removed in various
+// cases.
+void MakeSlashEndingMatchInput(const std::string& input, std::string* output) {
+ if (EndsWithSlash(input)) {
+ if (!EndsWithSlash(*output)) // Preserve same slash type as input.
+ output->push_back(input[input.size() - 1]);
+ } else {
+ if (EndsWithSlash(*output))
+ output->resize(output->size() - 1);
+ }
+}
+
+// Returns true if the given value looks like a directory, otherwise we'll
+// assume it's a file.
+bool ValueLooksLikeDir(const std::string& value) {
+ if (value.empty())
+ return true;
+ size_t value_size = value.size();
+
+ // Count the number of dots at the end of the string.
+ size_t num_dots = 0;
+ while (num_dots < value_size && value[value_size - num_dots - 1] == '.')
+ num_dots++;
+
+ if (num_dots == value.size())
+ return true; // String is all dots.
+
+ if (IsSlash(value[value_size - num_dots - 1]))
+ return true; // String is a [back]slash followed by 0 or more dots.
+
+ // Anything else.
+ return false;
+}
+
+Value ConvertOnePath(const Scope* scope,
+ const FunctionCallNode* function,
+ const Value& value,
+ const SourceDir& from_dir,
+ const SourceDir& to_dir,
+ bool convert_to_system_absolute,
+ Err* err) {
+ Value result; // Ensure return value optimization.
+
+ if (!value.VerifyTypeIs(Value::STRING, err))
+ return result;
+ const std::string& string_value = value.string_value();
+
+ bool looks_like_dir = ValueLooksLikeDir(string_value);
+
+ // System-absolute output special case.
+ if (convert_to_system_absolute) {
+ base::FilePath system_path;
+ if (looks_like_dir) {
+ system_path = scope->settings()->build_settings()->GetFullPath(
+ from_dir.ResolveRelativeDir(value, err,
+ scope->settings()->build_settings()->root_path_utf8()));
+ } else {
+ system_path = scope->settings()->build_settings()->GetFullPath(
+ from_dir.ResolveRelativeFile(value, err,
+ scope->settings()->build_settings()->root_path_utf8()));
+ }
+ if (err->has_error())
+ return Value();
+
+ result = Value(function, FilePathToUTF8(system_path));
+ if (looks_like_dir)
+ MakeSlashEndingMatchInput(string_value, &result.string_value());
+ return result;
+ }
+
+ result = Value(function, Value::STRING);
+ if (looks_like_dir) {
+ result.string_value() = RebasePath(
+ from_dir.ResolveRelativeDir(value, err,
+ scope->settings()->build_settings()->root_path_utf8()).value(),
+ to_dir,
+ scope->settings()->build_settings()->root_path_utf8());
+ MakeSlashEndingMatchInput(string_value, &result.string_value());
+ } else {
+ result.string_value() = RebasePath(
+ from_dir.ResolveRelativeFile(value, err,
+ scope->settings()->build_settings()->root_path_utf8()).value(),
+ to_dir,
+ scope->settings()->build_settings()->root_path_utf8());
+ if (err->has_error())
+ return Value();
+ }
+
+ return result;
+}
+
+} // namespace
+
+const char kRebasePath[] = "rebase_path";
+const char kRebasePath_HelpShort[] =
+ "rebase_path: Rebase a file or directory to another location.";
+const char kRebasePath_Help[] =
+ "rebase_path: Rebase a file or directory to another location.\n"
+ "\n"
+ " converted = rebase_path(input,\n"
+ " new_base = \"\",\n"
+ " current_base = \".\")\n"
+ "\n"
+ " Takes a string argument representing a file name, or a list of such\n"
+ " strings and converts it/them to be relative to a different base\n"
+ " directory.\n"
+ "\n"
+ " When invoking the compiler or scripts, GN will automatically convert\n"
+ " sources and include directories to be relative to the build directory.\n"
+ " However, if you're passing files directly in the \"args\" array or\n"
+ " doing other manual manipulations where GN doesn't know something is\n"
+ " a file name, you will need to convert paths to be relative to what\n"
+ " your tool is expecting.\n"
+ "\n"
+ " The common case is to use this to convert paths relative to the\n"
+ " current directory to be relative to the build directory (which will\n"
+ " be the current directory when executing scripts).\n"
+ "\n"
+ " If you want to convert a file path to be source-absolute (that is,\n"
+ " beginning with a double slash like \"//foo/bar\"), you should use\n"
+ " the get_path_info() function. This function won't work because it will\n"
+ " always make relative paths, and it needs to support making paths\n"
+ " relative to the source root, so can't also generate source-absolute\n"
+ " paths without more special-cases.\n"
+ "\n"
+ "Arguments\n"
+ "\n"
+ " input\n"
+ " A string or list of strings representing file or directory names\n"
+ " These can be relative paths (\"foo/bar.txt\"), system absolute\n"
+ " paths (\"/foo/bar.txt\"), or source absolute paths\n"
+ " (\"//foo/bar.txt\").\n"
+ "\n"
+ " new_base\n"
+ " The directory to convert the paths to be relative to. This can be\n"
+ " an absolute path or a relative path (which will be treated\n"
+ " as being relative to the current BUILD-file's directory).\n"
+ "\n"
+ " As a special case, if new_base is the empty string (the default),\n"
+ " all paths will be converted to system-absolute native style paths\n"
+ " with system path separators. This is useful for invoking external\n"
+ " programs.\n"
+ "\n"
+ " current_base\n"
+ " Directory representing the base for relative paths in the input.\n"
+ " If this is not an absolute path, it will be treated as being\n"
+ " relative to the current build file. Use \".\" (the default) to\n"
+ " convert paths from the current BUILD-file's directory.\n"
+ "\n"
+ "Return value\n"
+ "\n"
+ " The return value will be the same type as the input value (either a\n"
+ " string or a list of strings). All relative and source-absolute file\n"
+ " names will be converted to be relative to the requested output\n"
+ " System-absolute paths will be unchanged.\n"
+ "\n"
+ " Whether an output path will end in a slash will match whether the\n"
+ " corresponding input path ends in a slash. It will return \".\" or\n"
+ " \"./\" (depending on whether the input ends in a slash) to avoid\n"
+ " returning empty strings. This means if you want a root path\n"
+ " (\"//\" or \"/\") not ending in a slash, you can add a dot (\"//.\").\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " # Convert a file in the current directory to be relative to the build\n"
+ " # directory (the current dir when executing compilers and scripts).\n"
+ " foo = rebase_path(\"myfile.txt\", root_build_dir)\n"
+ " # might produce \"../../project/myfile.txt\".\n"
+ "\n"
+ " # Convert a file to be system absolute:\n"
+ " foo = rebase_path(\"myfile.txt\")\n"
+ " # Might produce \"D:\\source\\project\\myfile.txt\" on Windows or\n"
+ " # \"/home/you/source/project/myfile.txt\" on Linux.\n"
+ "\n"
+ " # Typical usage for converting to the build directory for a script.\n"
+ " action(\"myscript\") {\n"
+ " # Don't convert sources, GN will automatically convert these to be\n"
+ " # relative to the build directory when it constructs the command\n"
+ " # line for your script.\n"
+ " sources = [ \"foo.txt\", \"bar.txt\" ]\n"
+ "\n"
+ " # Extra file args passed manually need to be explicitly converted\n"
+ " # to be relative to the build directory:\n"
+ " args = [\n"
+ " \"--data\",\n"
+ " rebase_path(\"//mything/data/input.dat\", root_build_dir),\n"
+ " \"--rel\",\n"
+ " rebase_path(\"relative_path.txt\", root_build_dir)\n"
+ " ] + rebase_path(sources, root_build_dir)\n"
+ " }\n";
+
+Value RunRebasePath(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ Value result;
+
+ // Argument indices.
+ static const size_t kArgIndexInputs = 0;
+ static const size_t kArgIndexDest = 1;
+ static const size_t kArgIndexFrom = 2;
+
+ // Inputs.
+ if (args.size() < 1 || args.size() > 3) {
+ *err = Err(function->function(), "Wrong # of arguments for rebase_path.");
+ return result;
+ }
+ const Value& inputs = args[kArgIndexInputs];
+
+ // To path.
+ bool convert_to_system_absolute = true;
+ SourceDir to_dir;
+ const SourceDir& current_dir = scope->GetSourceDir();
+ if (args.size() > kArgIndexDest) {
+ if (!args[kArgIndexDest].VerifyTypeIs(Value::STRING, err))
+ return result;
+ if (!args[kArgIndexDest].string_value().empty()) {
+ to_dir = current_dir.ResolveRelativeDir(
+ args[kArgIndexDest], err,
+ scope->settings()->build_settings()->root_path_utf8());
+ if (err->has_error())
+ return Value();
+ convert_to_system_absolute = false;
+ }
+ }
+
+ // From path.
+ SourceDir from_dir;
+ if (args.size() > kArgIndexFrom) {
+ if (!args[kArgIndexFrom].VerifyTypeIs(Value::STRING, err))
+ return result;
+ from_dir = current_dir.ResolveRelativeDir(
+ args[kArgIndexFrom], err,
+ scope->settings()->build_settings()->root_path_utf8());
+ if (err->has_error())
+ return Value();
+ } else {
+ // Default to current directory if unspecified.
+ from_dir = current_dir;
+ }
+
+ // Path conversion.
+ if (inputs.type() == Value::STRING) {
+ return ConvertOnePath(scope, function, inputs,
+ from_dir, to_dir, convert_to_system_absolute, err);
+
+ } else if (inputs.type() == Value::LIST) {
+ result = Value(function, Value::LIST);
+ result.list_value().reserve(inputs.list_value().size());
+
+ for (const auto& input : inputs.list_value()) {
+ result.list_value().push_back(
+ ConvertOnePath(scope, function, input,
+ from_dir, to_dir, convert_to_system_absolute, err));
+ if (err->has_error()) {
+ result = Value();
+ return result;
+ }
+ }
+ return result;
+ }
+
+ *err = Err(function->function(),
+ "rebase_path requires a list or a string.");
+ return result;
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_rebase_path_unittest.cc b/chromium/tools/gn/function_rebase_path_unittest.cc
new file mode 100644
index 00000000000..456d4fb8a96
--- /dev/null
+++ b/chromium/tools/gn/function_rebase_path_unittest.cc
@@ -0,0 +1,186 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "build/build_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+std::string RebaseOne(Scope* scope,
+ const char* input,
+ const char* to_dir,
+ const char* from_dir) {
+ std::vector<Value> args;
+ args.push_back(Value(nullptr, input));
+ args.push_back(Value(nullptr, to_dir));
+ args.push_back(Value(nullptr, from_dir));
+
+ Err err;
+ FunctionCallNode function;
+ Value result = functions::RunRebasePath(scope, &function, args, &err);
+ bool is_string = result.type() == Value::STRING;
+ EXPECT_TRUE(is_string);
+
+ return result.string_value();
+}
+
+} // namespace
+
+TEST(RebasePath, Strings) {
+ TestWithScope setup;
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Scope* scope = setup.scope();
+ scope->set_source_dir(SourceDir("//tools/gn/"));
+
+ // Build-file relative paths.
+ EXPECT_EQ("../../tools/gn", RebaseOne(scope, ".", "//out/Debug", "."));
+ EXPECT_EQ("../../tools/gn/", RebaseOne(scope, "./", "//out/Debug", "."));
+ EXPECT_EQ("../../tools/gn/foo", RebaseOne(scope, "foo", "//out/Debug", "."));
+ EXPECT_EQ("../..", RebaseOne(scope, "../..", "//out/Debug", "."));
+ EXPECT_EQ("../../", RebaseOne(scope, "../../", "//out/Debug", "."));
+
+ // Without a source root defined, we cannot move out of the source tree.
+ EXPECT_EQ("../..", RebaseOne(scope, "../../..", "//out/Debug", "."));
+
+ // Source-absolute input paths.
+ EXPECT_EQ("./", RebaseOne(scope, "//", "//", "//"));
+ EXPECT_EQ("foo", RebaseOne(scope, "//foo", "//", "//"));
+ EXPECT_EQ("foo/", RebaseOne(scope, "//foo/", "//", "//"));
+ EXPECT_EQ("../../foo/bar", RebaseOne(scope, "//foo/bar", "//out/Debug", "."));
+ EXPECT_EQ("./", RebaseOne(scope, "//foo/", "//foo/", "//"));
+ // Thie one is technically correct but could be simplified to "." if
+ // necessary.
+ EXPECT_EQ("../foo", RebaseOne(scope, "//foo", "//foo", "//"));
+
+ // Test slash conversion.
+ EXPECT_EQ("foo/bar", RebaseOne(scope, "foo/bar", ".", "."));
+ EXPECT_EQ("foo/bar", RebaseOne(scope, "foo\\bar", ".", "."));
+
+ // Test system path output.
+#if defined(OS_WIN)
+ setup.build_settings()->SetRootPath(base::FilePath(L"C:/path/to/src"));
+ EXPECT_EQ("C:/path/to/src", RebaseOne(scope, ".", "", "//"));
+ EXPECT_EQ("C:/path/to/src/", RebaseOne(scope, "//", "", "//"));
+ EXPECT_EQ("C:/path/to/src/foo", RebaseOne(scope, "foo", "", "//"));
+ EXPECT_EQ("C:/path/to/src/foo/", RebaseOne(scope, "foo/", "", "//"));
+ EXPECT_EQ("C:/path/to/src/tools/gn/foo", RebaseOne(scope, "foo", "", "."));
+ EXPECT_EQ("C:/path/to/other/tools",
+ RebaseOne(scope, "//../other/tools", "", "//"));
+ EXPECT_EQ("C:/path/to/src/foo/bar",
+ RebaseOne(scope, "//../src/foo/bar", "", "//"));
+ EXPECT_EQ("C:/path/to", RebaseOne(scope, "//..", "", "//"));
+ EXPECT_EQ("C:/path", RebaseOne(scope, "../../../..", "", "."));
+ EXPECT_EQ("C:/path/to/external/dir/",
+ RebaseOne(scope, "//../external/dir/", "", "//"));
+
+#else
+ setup.build_settings()->SetRootPath(base::FilePath("/path/to/src"));
+ EXPECT_EQ("/path/to/src", RebaseOne(scope, ".", "", "//"));
+ EXPECT_EQ("/path/to/src/", RebaseOne(scope, "//", "", "//"));
+ EXPECT_EQ("/path/to/src/foo", RebaseOne(scope, "foo", "", "//"));
+ EXPECT_EQ("/path/to/src/foo/", RebaseOne(scope, "foo/", "", "//"));
+ EXPECT_EQ("/path/to/src/tools/gn/foo", RebaseOne(scope, "foo", "", "."));
+ EXPECT_EQ("/path/to/other/tools",
+ RebaseOne(scope, "//../other/tools", "", "//"));
+ EXPECT_EQ("/path/to/src/foo/bar",
+ RebaseOne(scope, "//../src/foo/bar", "", "//"));
+ EXPECT_EQ("/path/to", RebaseOne(scope, "//..", "", "//"));
+ EXPECT_EQ("/path", RebaseOne(scope, "../../../..", "", "."));
+ EXPECT_EQ("/path/to/external/dir/",
+ RebaseOne(scope, "//../external/dir/", "", "//"));
+#endif
+}
+
+TEST(RebasePath, StringsSystemPaths) {
+ TestWithScope setup;
+ Scope* scope = setup.scope();
+
+#if defined(OS_WIN)
+ setup.build_settings()->SetBuildDir(SourceDir("C:/ssd/out/Debug"));
+ setup.build_settings()->SetRootPath(base::FilePath(L"C:/hdd/src"));
+
+ // Test system absolute to-dir.
+ EXPECT_EQ("../../ssd/out/Debug",
+ RebaseOne(scope, ".", "//", "C:/ssd/out/Debug"));
+ EXPECT_EQ("../../ssd/out/Debug/",
+ RebaseOne(scope, "./", "//", "C:/ssd/out/Debug"));
+ EXPECT_EQ("../../ssd/out/Debug/foo",
+ RebaseOne(scope, "foo", "//", "C:/ssd/out/Debug"));
+ EXPECT_EQ("../../ssd/out/Debug/foo/",
+ RebaseOne(scope, "foo/", "//", "C:/ssd/out/Debug"));
+
+ // Test system absolute from-dir.
+ EXPECT_EQ("../../../hdd/src",
+ RebaseOne(scope, ".", "C:/ssd/out/Debug", "//"));
+ EXPECT_EQ("../../../hdd/src/",
+ RebaseOne(scope, "./", "C:/ssd/out/Debug", "//"));
+ EXPECT_EQ("../../../hdd/src/foo",
+ RebaseOne(scope, "foo", "C:/ssd/out/Debug", "//"));
+ EXPECT_EQ("../../../hdd/src/foo/",
+ RebaseOne(scope, "foo/", "C:/ssd/out/Debug", "//"));
+#else
+ setup.build_settings()->SetBuildDir(SourceDir("/ssd/out/Debug"));
+ setup.build_settings()->SetRootPath(base::FilePath("/hdd/src"));
+
+ // Test system absolute to-dir.
+ EXPECT_EQ("../../ssd/out/Debug",
+ RebaseOne(scope, ".", "//", "/ssd/out/Debug"));
+ EXPECT_EQ("../../ssd/out/Debug/",
+ RebaseOne(scope, "./", "//", "/ssd/out/Debug"));
+ EXPECT_EQ("../../ssd/out/Debug/foo",
+ RebaseOne(scope, "foo", "//", "/ssd/out/Debug"));
+ EXPECT_EQ("../../ssd/out/Debug/foo/",
+ RebaseOne(scope, "foo/", "//", "/ssd/out/Debug"));
+
+ // Test system absolute from-dir.
+ EXPECT_EQ("../../../hdd/src",
+ RebaseOne(scope, ".", "/ssd/out/Debug", "//"));
+ EXPECT_EQ("../../../hdd/src/",
+ RebaseOne(scope, "./", "/ssd/out/Debug", "//"));
+ EXPECT_EQ("../../../hdd/src/foo",
+ RebaseOne(scope, "foo", "/ssd/out/Debug", "//"));
+ EXPECT_EQ("../../../hdd/src/foo/",
+ RebaseOne(scope, "foo/", "/ssd/out/Debug", "//"));
+#endif
+}
+
+// Test list input.
+TEST(RebasePath, List) {
+ TestWithScope setup;
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ setup.scope()->set_source_dir(SourceDir("//tools/gn/"));
+
+ std::vector<Value> args;
+ args.push_back(Value(nullptr, Value::LIST));
+ args[0].list_value().push_back(Value(nullptr, "foo.txt"));
+ args[0].list_value().push_back(Value(nullptr, "bar.txt"));
+ args.push_back(Value(nullptr, "//out/Debug/"));
+ args.push_back(Value(nullptr, "."));
+
+ Err err;
+ FunctionCallNode function;
+ Value ret = functions::RunRebasePath(setup.scope(), &function, args, &err);
+ EXPECT_FALSE(err.has_error());
+
+ ASSERT_EQ(Value::LIST, ret.type());
+ ASSERT_EQ(2u, ret.list_value().size());
+
+ EXPECT_EQ("../../tools/gn/foo.txt", ret.list_value()[0].string_value());
+ EXPECT_EQ("../../tools/gn/bar.txt", ret.list_value()[1].string_value());
+}
+
+TEST(RebasePath, Errors) {
+ TestWithScope setup;
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ // No arg input should issue an error.
+ Err err;
+ std::vector<Value> args;
+ FunctionCallNode function;
+ Value ret = functions::RunRebasePath(setup.scope(), &function, args, &err);
+ EXPECT_TRUE(err.has_error());
+}
diff --git a/chromium/tools/gn/function_set_default_toolchain.cc b/chromium/tools/gn/function_set_default_toolchain.cc
new file mode 100644
index 00000000000..cc5929e6686
--- /dev/null
+++ b/chromium/tools/gn/function_set_default_toolchain.cc
@@ -0,0 +1,78 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/build_settings.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/loader.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/settings.h"
+
+namespace functions {
+
+const char kSetDefaultToolchain[] = "set_default_toolchain";
+const char kSetDefaultToolchain_HelpShort[] =
+ "set_default_toolchain: Sets the default toolchain name.";
+const char kSetDefaultToolchain_Help[] =
+ "set_default_toolchain: Sets the default toolchain name.\n"
+ "\n"
+ " set_default_toolchain(toolchain_label)\n"
+ "\n"
+ " The given label should identify a toolchain definition (see\n"
+ " \"help toolchain\"). This toolchain will be used for all targets\n"
+ " unless otherwise specified.\n"
+ "\n"
+ " This function is only valid to call during the processing of the build\n"
+ " configuration file. Since the build configuration file is processed\n"
+ " separately for each toolchain, this function will be a no-op when\n"
+ " called under any non-default toolchains.\n"
+ "\n"
+ " For example, the default toolchain should be appropriate for the\n"
+ " current environment. If the current environment is 32-bit and \n"
+ " somebody references a target with a 64-bit toolchain, we wouldn't\n"
+ " want processing of the build config file for the 64-bit toolchain to\n"
+ " reset the default toolchain to 64-bit, we want to keep it 32-bits.\n"
+ "\n"
+ "Argument:\n"
+ "\n"
+ " toolchain_label\n"
+ " Toolchain name.\n"
+ "\n"
+ "Example:\n"
+ "\n"
+ " set_default_toolchain(\"//build/config/win:vs32\")";
+
+Value RunSetDefaultToolchain(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (!scope->IsProcessingBuildConfig()) {
+ *err = Err(function->function(), "Must be called from build config.",
+ "set_default_toolchain can only be called from the build configuration "
+ "file.");
+ return Value();
+ }
+
+ // When the loader is expecting the default toolchain to be set, it will set
+ // this key on the scope to point to the destination.
+ Label* default_toolchain_dest = static_cast<Label*>(
+ scope->GetProperty(Loader::kDefaultToolchainKey, nullptr));
+ if (!default_toolchain_dest)
+ return Value();
+
+ const SourceDir& current_dir = scope->GetSourceDir();
+ const Label& default_toolchain = ToolchainLabelForScope(scope);
+
+ if (!EnsureSingleStringArg(function, args, err))
+ return Value();
+ Label toolchain_label(
+ Label::Resolve(current_dir, default_toolchain, args[0], err));
+ if (toolchain_label.is_null())
+ return Value();
+
+ *default_toolchain_dest = toolchain_label;
+ return Value();
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_set_defaults.cc b/chromium/tools/gn/function_set_defaults.cc
new file mode 100644
index 00000000000..4efb92964d3
--- /dev/null
+++ b/chromium/tools/gn/function_set_defaults.cc
@@ -0,0 +1,92 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/err.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+
+namespace functions {
+
+const char kSetDefaults[] = "set_defaults";
+const char kSetDefaults_HelpShort[] =
+ "set_defaults: Set default values for a target type.";
+const char kSetDefaults_Help[] =
+ "set_defaults: Set default values for a target type.\n"
+ "\n"
+ " set_defaults(<target_type_name>) { <values...> }\n"
+ "\n"
+ " Sets the default values for a given target type. Whenever\n"
+ " target_type_name is seen in the future, the values specified in\n"
+ " set_default's block will be copied into the current scope.\n"
+ "\n"
+ " When the target type is used, the variable copying is very strict.\n"
+ " If a variable with that name is already in scope, the build will fail\n"
+ " with an error.\n"
+ "\n"
+ " set_defaults can be used for built-in target types (\"executable\",\n"
+ " \"shared_library\", etc.) and custom ones defined via the \"template\"\n"
+ " command.\n"
+ "\n"
+ "Example:\n"
+ " set_defaults(\"static_library\") {\n"
+ " configs = [ \"//tools/mything:settings\" ]\n"
+ " }\n"
+ "\n"
+ " static_library(\"mylib\")\n"
+ " # The configs will be auto-populated as above. You can remove it if\n"
+ " # you don't want the default for a particular default:\n"
+ " configs -= \"//tools/mything:settings\"\n"
+ " }\n";
+
+Value RunSetDefaults(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ if (!EnsureSingleStringArg(function, args, err))
+ return Value();
+ const std::string& target_type(args[0].string_value());
+
+ // Ensure there aren't defaults already set.
+ //
+ // It might be nice to allow multiple calls set mutate the defaults. The
+ // main case for this is where some local portions of the code want
+ // additional defaults they specify in an imported file.
+ //
+ // Currently, we don't allow imports to clobber anything, so this wouldn't
+ // work. Additionally, allowing this would be undesirable since we don't
+ // want multiple imports to each try to set defaults, since it might look
+ // like the defaults are modified by each one in sequence, while in fact
+ // imports would always clobber previous values and it would be confusing.
+ //
+ // If we wanted this, the solution would be to allow imports to overwrite
+ // target defaults set up by the default build config only. That way there
+ // are no ordering issues, but this would be more work.
+ if (scope->GetTargetDefaults(target_type)) {
+ *err = Err(function->function(),
+ "This target type defaults were already set.");
+ return Value();
+ }
+
+ if (!block) {
+ FillNeedsBlockError(function, err);
+ return Value();
+ }
+
+ // Run the block for the rule invocation.
+ Scope block_scope(scope);
+ block->Execute(&block_scope, err);
+ if (err->has_error())
+ return Value();
+
+ // Now copy the values set on the scope we made into the free-floating one
+ // (with no containing scope) used to hold the target defaults.
+ Scope* dest = scope->MakeTargetDefaults(target_type);
+ block_scope.NonRecursiveMergeTo(dest, Scope::MergeOptions(), function,
+ "<SHOULD NOT FAIL>", err);
+ return Value();
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_template.cc b/chromium/tools/gn/function_template.cc
new file mode 100644
index 00000000000..17dda0662d9
--- /dev/null
+++ b/chromium/tools/gn/function_template.cc
@@ -0,0 +1,198 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/functions.h"
+
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/template.h"
+#include "tools/gn/value.h"
+
+namespace functions {
+
+const char kTemplate[] = "template";
+const char kTemplate_HelpShort[] =
+ "template: Define a template rule.";
+const char kTemplate_Help[] =
+ "template: Define a template rule.\n"
+ "\n"
+ " A template defines a custom name that acts like a function. It\n"
+ " provides a way to add to the built-in target types.\n"
+ "\n"
+ " The template() function is used to declare a template. To invoke the\n"
+ " template, just use the name of the template like any other target\n"
+ " type.\n"
+ "\n"
+ " Often you will want to declare your template in a special file that\n"
+ " other files will import (see \"gn help import\") so your template\n"
+ " rule can be shared across build files.\n"
+ "\n"
+ "Variables and templates:\n"
+ "\n"
+ " When you call template() it creates a closure around all variables\n"
+ " currently in scope with the code in the template block. When the\n"
+ " template is invoked, the closure will be executed.\n"
+ "\n"
+ " When the template is invoked, the code in the caller is executed and\n"
+ " passed to the template code as an implicit \"invoker\" variable. The\n"
+ " template uses this to read state out of the invoking code.\n"
+ "\n"
+ " One thing explicitly excluded from the closure is the \"current\n"
+ " directory\" against which relative file names are resolved. The\n"
+ " current directory will be that of the invoking code, since typically\n"
+ " that code specifies the file names. This means all files internal\n"
+ " to the template should use absolute names.\n"
+ "\n"
+ " A template will typically forward some or all variables from the\n"
+ " invoking scope to a target that it defines. Often, such variables\n"
+ " might be optional. Use the pattern:\n"
+ "\n"
+ " if (defined(invoker.deps)) {\n"
+ " deps = invoker.deps\n"
+ " }\n"
+ "\n"
+ " The function forward_variables_from() provides a shortcut to forward\n"
+ " one or more or possibly all variables in this manner:\n"
+ "\n"
+ " forward_variables_from(invoker, [\"deps\", \"public_deps\"])\n"
+ "\n"
+ "Target naming:\n"
+ "\n"
+ " Your template should almost always define a built-in target with the\n"
+ " name the template invoker specified. For example, if you have an IDL\n"
+ " template and somebody does:\n"
+ " idl(\"foo\") {...\n"
+ " you will normally want this to expand to something defining a\n"
+ " source_set or static_library named \"foo\" (among other things you may\n"
+ " need). This way, when another target specifies a dependency on\n"
+ " \"foo\", the static_library or source_set will be linked.\n"
+ "\n"
+ " It is also important that any other targets your template expands to\n"
+ " have globally unique names, or you will get collisions.\n"
+ "\n"
+ " Access the invoking name in your template via the implicit\n"
+ " \"target_name\" variable. This should also be the basis for how other\n"
+ " targets that a template expands to ensure uniqueness.\n"
+ "\n"
+ " A typical example would be a template that defines an action to\n"
+ " generate some source files, and a source_set to compile that source.\n"
+ " Your template would name the source_set \"target_name\" because\n"
+ " that's what you want external targets to depend on to link your code.\n"
+ " And you would name the action something like \"${target_name}_action\"\n"
+ " to make it unique. The source set would have a dependency on the\n"
+ " action to make it run.\n"
+ "\n"
+ "Example of defining a template:\n"
+ "\n"
+ " template(\"my_idl\") {\n"
+ " # Be nice and help callers debug problems by checking that the\n"
+ " # variables the template requires are defined. This gives a nice\n"
+ " # message rather than giving the user an error about an\n"
+ " # undefined variable in the file defining the template\n"
+ " #\n"
+ " # You can also use defined() to give default values to variables\n"
+ " # unspecified by the invoker.\n"
+ " assert(defined(invoker.sources),\n"
+ " \"Need sources in $target_name listing the idl files.\")\n"
+ "\n"
+ " # Name of the intermediate target that does the code gen. This must\n"
+ " # incorporate the target name so it's unique across template\n"
+ " # instantiations.\n"
+ " code_gen_target_name = target_name + \"_code_gen\"\n"
+ "\n"
+ " # Intermediate target to convert IDL to C source. Note that the name\n"
+ " # is based on the name the invoker of the template specified. This\n"
+ " # way, each time the template is invoked we get a unique\n"
+ " # intermediate action name (since all target names are in the global\n"
+ " # scope).\n"
+ " action_foreach(code_gen_target_name) {\n"
+ " # Access the scope defined by the invoker via the implicit\n"
+ " # \"invoker\" variable.\n"
+ " sources = invoker.sources\n"
+ "\n"
+ " # Note that we need an absolute path for our script file name.\n"
+ " # The current directory when executing this code will be that of\n"
+ " # the invoker (this is why we can use the \"sources\" directly\n"
+ " # above without having to rebase all of the paths). But if we need\n"
+ " # to reference a script relative to the template file, we'll need\n"
+ " # to use an absolute path instead.\n"
+ " script = \"//tools/idl/idl_code_generator.py\"\n"
+ "\n"
+ " # Tell GN how to expand output names given the sources.\n"
+ " # See \"gn help source_expansion\" for more.\n"
+ " outputs = [ \"$target_gen_dir/{{source_name_part}}.cc\",\n"
+ " \"$target_gen_dir/{{source_name_part}}.h\" ]\n"
+ " }\n"
+ "\n"
+ " # Name the source set the same as the template invocation so\n"
+ " # instancing this template produces something that other targets\n"
+ " # can link to in their deps.\n"
+ " source_set(target_name) {\n"
+ " # Generates the list of sources, we get these from the\n"
+ " # action_foreach above.\n"
+ " sources = get_target_outputs(\":$code_gen_target_name\")\n"
+ "\n"
+ " # This target depends on the files produced by the above code gen\n"
+ " # target.\n"
+ " deps = [ \":$code_gen_target_name\" ]\n"
+ " }\n"
+ " }\n"
+ "\n"
+ "Example of invoking the resulting template:\n"
+ "\n"
+ " # This calls the template code above, defining target_name to be\n"
+ " # \"foo_idl_files\" and \"invoker\" to be the set of stuff defined in\n"
+ " # the curly brackets.\n"
+ " my_idl(\"foo_idl_files\") {\n"
+ " # Goes into the template as \"invoker.sources\".\n"
+ " sources = [ \"foo.idl\", \"bar.idl\" ]\n"
+ " }\n"
+ "\n"
+ " # Here is a target that depends on our template.\n"
+ " executable(\"my_exe\") {\n"
+ " # Depend on the name we gave the template call above. Internally,\n"
+ " # this will produce a dependency from executable to the source_set\n"
+ " # inside the template (since it has this name), which will in turn\n"
+ " # depend on the code gen action.\n"
+ " deps = [ \":foo_idl_files\" ]\n"
+ " }\n";
+
+Value RunTemplate(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ // Of course you can have configs and targets in a template. But here, we're
+ // not actually executing the block, only declaring it. Marking the template
+ // declaration as non-nestable means that you can't put it inside a target,
+ // for example.
+ NonNestableBlock non_nestable(scope, function, "template");
+ if (!non_nestable.Enter(err))
+ return Value();
+
+ // TODO(brettw) determine if the function is built-in and throw an error if
+ // it is.
+ if (args.size() != 1) {
+ *err = Err(function->function(),
+ "Need exactly one string arg to template.");
+ return Value();
+ }
+ if (!args[0].VerifyTypeIs(Value::STRING, err))
+ return Value();
+ std::string template_name = args[0].string_value();
+
+ const Template* existing_template = scope->GetTemplate(template_name);
+ if (existing_template) {
+ *err = Err(function, "Duplicate template definition.",
+ "A template with this name was already defined.");
+ err->AppendSubErr(Err(existing_template->GetDefinitionRange(),
+ "Previous definition."));
+ return Value();
+ }
+
+ scope->AddTemplate(template_name, new Template(scope, function));
+ return Value();
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_toolchain.cc b/chromium/tools/gn/function_toolchain.cc
new file mode 100644
index 00000000000..f3344ffdfe8
--- /dev/null
+++ b/chromium/tools/gn/function_toolchain.cc
@@ -0,0 +1,1022 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <algorithm>
+#include <limits>
+#include <utility>
+
+#include "tools/gn/err.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/tool.h"
+#include "tools/gn/toolchain.h"
+#include "tools/gn/value_extractors.h"
+#include "tools/gn/variables.h"
+
+namespace functions {
+
+namespace {
+
+// This is just a unique value to take the address of to use as the key for
+// the toolchain property on a scope.
+const int kToolchainPropertyKey = 0;
+
+bool ReadBool(Scope* scope,
+ const char* var,
+ Tool* tool,
+ void (Tool::*set)(bool),
+ Err* err) {
+ const Value* v = scope->GetValue(var, true);
+ if (!v)
+ return true; // Not present is fine.
+ if (!v->VerifyTypeIs(Value::BOOLEAN, err))
+ return false;
+
+ (tool->*set)(v->boolean_value());
+ return true;
+}
+
+// Reads the given string from the scope (if present) and puts the result into
+// dest. If the value is not a string, sets the error and returns false.
+bool ReadString(Scope* scope,
+ const char* var,
+ Tool* tool,
+ void (Tool::*set)(const std::string&),
+ Err* err) {
+ const Value* v = scope->GetValue(var, true);
+ if (!v)
+ return true; // Not present is fine.
+ if (!v->VerifyTypeIs(Value::STRING, err))
+ return false;
+
+ (tool->*set)(v->string_value());
+ return true;
+}
+
+// Calls the given validate function on each type in the list. On failure,
+// sets the error, blame the value, and return false.
+bool ValidateSubstitutionList(const std::vector<SubstitutionType>& list,
+ bool (*validate)(SubstitutionType),
+ const Value* origin,
+ Err* err) {
+ for (const auto& cur_type : list) {
+ if (!validate(cur_type)) {
+ *err = Err(*origin, "Pattern not valid here.",
+ "You used the pattern " + std::string(kSubstitutionNames[cur_type]) +
+ " which is not valid\nfor this variable.");
+ return false;
+ }
+ }
+ return true;
+}
+
+bool ReadPattern(Scope* scope,
+ const char* name,
+ bool (*validate)(SubstitutionType),
+ Tool* tool,
+ void (Tool::*set)(const SubstitutionPattern&),
+ Err* err) {
+ const Value* value = scope->GetValue(name, true);
+ if (!value)
+ return true; // Not present is fine.
+ if (!value->VerifyTypeIs(Value::STRING, err))
+ return false;
+
+ SubstitutionPattern pattern;
+ if (!pattern.Parse(*value, err))
+ return false;
+ if (!ValidateSubstitutionList(pattern.required_types(), validate, value, err))
+ return false;
+
+ (tool->*set)(pattern);
+ return true;
+}
+
+bool ReadOutputExtension(Scope* scope, Tool* tool, Err* err) {
+ const Value* value = scope->GetValue("default_output_extension", true);
+ if (!value)
+ return true; // Not present is fine.
+ if (!value->VerifyTypeIs(Value::STRING, err))
+ return false;
+
+ if (value->string_value().empty())
+ return true; // Accept empty string.
+
+ if (value->string_value()[0] != '.') {
+ *err = Err(*value, "default_output_extension must begin with a '.'");
+ return false;
+ }
+
+ tool->set_default_output_extension(value->string_value());
+ return true;
+}
+
+bool ReadPrecompiledHeaderType(Scope* scope, Tool* tool, Err* err) {
+ const Value* value = scope->GetValue("precompiled_header_type", true);
+ if (!value)
+ return true; // Not present is fine.
+ if (!value->VerifyTypeIs(Value::STRING, err))
+ return false;
+
+ if (value->string_value().empty())
+ return true; // Accept empty string, do nothing (default is "no PCH").
+
+ if (value->string_value() == "gcc") {
+ tool->set_precompiled_header_type(Tool::PCH_GCC);
+ return true;
+ } else if (value->string_value() == "msvc") {
+ tool->set_precompiled_header_type(Tool::PCH_MSVC);
+ return true;
+ }
+ *err = Err(*value, "Invalid precompiled_header_type",
+ "Must either be empty, \"gcc\", or \"msvc\".");
+ return false;
+}
+
+bool ReadDepsFormat(Scope* scope, Tool* tool, Err* err) {
+ const Value* value = scope->GetValue("depsformat", true);
+ if (!value)
+ return true; // Not present is fine.
+ if (!value->VerifyTypeIs(Value::STRING, err))
+ return false;
+
+ if (value->string_value() == "gcc") {
+ tool->set_depsformat(Tool::DEPS_GCC);
+ } else if (value->string_value() == "msvc") {
+ tool->set_depsformat(Tool::DEPS_MSVC);
+ } else {
+ *err = Err(*value, "Deps format must be \"gcc\" or \"msvc\".");
+ return false;
+ }
+ return true;
+}
+
+bool ReadOutputs(Scope* scope,
+ const FunctionCallNode* tool_function,
+ bool (*validate)(SubstitutionType),
+ Tool* tool,
+ Err* err) {
+ const Value* value = scope->GetValue("outputs", true);
+ if (!value) {
+ *err = Err(tool_function, "\"outputs\" must be specified for this tool.");
+ return false;
+ }
+
+ SubstitutionList list;
+ if (!list.Parse(*value, err))
+ return false;
+
+ // Validate the right kinds of patterns are used.
+ if (!ValidateSubstitutionList(list.required_types(), validate, value, err))
+ return false;
+
+ // There should always be at least one output.
+ if (list.list().empty()) {
+ *err = Err(*value, "Outputs list is empty.", "I need some outputs.");
+ return false;
+ }
+
+ tool->set_outputs(list);
+ return true;
+}
+
+bool IsCompilerTool(Toolchain::ToolType type) {
+ return type == Toolchain::TYPE_CC ||
+ type == Toolchain::TYPE_CXX ||
+ type == Toolchain::TYPE_OBJC ||
+ type == Toolchain::TYPE_OBJCXX ||
+ type == Toolchain::TYPE_RC ||
+ type == Toolchain::TYPE_ASM;
+}
+
+bool IsLinkerTool(Toolchain::ToolType type) {
+ return type == Toolchain::TYPE_ALINK ||
+ type == Toolchain::TYPE_SOLINK ||
+ type == Toolchain::TYPE_SOLINK_MODULE ||
+ type == Toolchain::TYPE_LINK;
+}
+
+bool IsPatternInOutputList(const SubstitutionList& output_list,
+ const SubstitutionPattern& pattern) {
+ for (const auto& cur : output_list.list()) {
+ if (pattern.ranges().size() == cur.ranges().size() &&
+ std::equal(pattern.ranges().begin(), pattern.ranges().end(),
+ cur.ranges().begin()))
+ return true;
+ }
+ return false;
+}
+
+} // namespace
+
+// toolchain -------------------------------------------------------------------
+
+const char kToolchain[] = "toolchain";
+const char kToolchain_HelpShort[] =
+ "toolchain: Defines a toolchain.";
+const char kToolchain_Help[] =
+ "toolchain: Defines a toolchain.\n"
+ "\n"
+ " A toolchain is a set of commands and build flags used to compile the\n"
+ " source code. You can have more than one toolchain in use at once in\n"
+ " a build.\n"
+ "\n"
+ "Functions and variables\n"
+ "\n"
+ " tool()\n"
+ " The tool() function call specifies the commands commands to run for\n"
+ " a given step. See \"gn help tool\".\n"
+ "\n"
+ " toolchain_args()\n"
+ " List of arguments to pass to the toolchain when invoking this\n"
+ " toolchain. This applies only to non-default toolchains. See\n"
+ " \"gn help toolchain_args\" for more.\n"
+ "\n"
+ " deps\n"
+ " Dependencies of this toolchain. These dependencies will be resolved\n"
+ " before any target in the toolchain is compiled. To avoid circular\n"
+ " dependencies these must be targets defined in another toolchain.\n"
+ "\n"
+ " This is expressed as a list of targets, and generally these targets\n"
+ " will always specify a toolchain:\n"
+ " deps = [ \"//foo/bar:baz(//build/toolchain:bootstrap)\" ]\n"
+ "\n"
+ " This concept is somewhat inefficient to express in Ninja (it\n"
+ " requires a lot of duplicate of rules) so should only be used when\n"
+ " absolutely necessary.\n"
+ "\n"
+ " concurrent_links\n"
+ " In integer expressing the number of links that Ninja will perform in\n"
+ " parallel. GN will create a pool for shared library and executable\n"
+ " link steps with this many processes. Since linking is memory- and\n"
+ " I/O-intensive, projects with many large targets may want to limit\n"
+ " the number of parallel steps to avoid overloading the computer.\n"
+ " Since creating static libraries is generally not as intensive\n"
+ " there is no limit to \"alink\" steps.\n"
+ "\n"
+ " Defaults to 0 which Ninja interprets as \"no limit\".\n"
+ "\n"
+ " The value used will be the one from the default toolchain of the\n"
+ " current build.\n"
+ "\n"
+ "Invoking targets in toolchains:\n"
+ "\n"
+ " By default, when a target depends on another, there is an implicit\n"
+ " toolchain label that is inherited, so the dependee has the same one\n"
+ " as the dependent.\n"
+ "\n"
+ " You can override this and refer to any other toolchain by explicitly\n"
+ " labeling the toolchain to use. For example:\n"
+ " data_deps = [ \"//plugins:mine(//toolchains:plugin_toolchain)\" ]\n"
+ " The string \"//build/toolchains:plugin_toolchain\" is a label that\n"
+ " identifies the toolchain declaration for compiling the sources.\n"
+ "\n"
+ " To load a file in an alternate toolchain, GN does the following:\n"
+ "\n"
+ " 1. Loads the file with the toolchain definition in it (as determined\n"
+ " by the toolchain label).\n"
+ " 2. Re-runs the master build configuration file, applying the\n"
+ " arguments specified by the toolchain_args section of the toolchain\n"
+ " definition (see \"gn help toolchain_args\").\n"
+ " 3. Loads the destination build file in the context of the\n"
+ " configuration file in the previous step.\n"
+ "\n"
+ "Example:\n"
+ " toolchain(\"plugin_toolchain\") {\n"
+ " concurrent_links = 8\n"
+ "\n"
+ " tool(\"cc\") {\n"
+ " command = \"gcc {{source}}\"\n"
+ " ...\n"
+ " }\n"
+ "\n"
+ " toolchain_args() {\n"
+ " is_plugin = true\n"
+ " is_32bit = true\n"
+ " is_64bit = false\n"
+ " }\n"
+ " }\n";
+
+Value RunToolchain(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ NonNestableBlock non_nestable(scope, function, "toolchain");
+ if (!non_nestable.Enter(err))
+ return Value();
+
+ if (!EnsureNotProcessingImport(function, scope, err) ||
+ !EnsureNotProcessingBuildConfig(function, scope, err))
+ return Value();
+
+ // Note that we don't want to use MakeLabelForScope since that will include
+ // the toolchain name in the label, and toolchain labels don't themselves
+ // have toolchain names.
+ const SourceDir& input_dir = scope->GetSourceDir();
+ Label label(input_dir, args[0].string_value());
+ if (g_scheduler->verbose_logging())
+ g_scheduler->Log("Defining toolchain", label.GetUserVisibleName(false));
+
+ // This object will actually be copied into the one owned by the toolchain
+ // manager, but that has to be done in the lock.
+ std::unique_ptr<Toolchain> toolchain(new Toolchain(scope->settings(), label));
+ toolchain->set_defined_from(function);
+ toolchain->visibility().SetPublic();
+
+ Scope block_scope(scope);
+ block_scope.SetProperty(&kToolchainPropertyKey, toolchain.get());
+ block->Execute(&block_scope, err);
+ block_scope.SetProperty(&kToolchainPropertyKey, nullptr);
+ if (err->has_error())
+ return Value();
+
+ // Read deps (if any).
+ const Value* deps_value = block_scope.GetValue(variables::kDeps, true);
+ if (deps_value) {
+ ExtractListOfLabels(
+ *deps_value, block_scope.GetSourceDir(),
+ ToolchainLabelForScope(&block_scope), &toolchain->deps(), err);
+ if (err->has_error())
+ return Value();
+ }
+
+ // Read concurrent_links (if any).
+ const Value* concurrent_links_value =
+ block_scope.GetValue("concurrent_links", true);
+ if (concurrent_links_value) {
+ if (!concurrent_links_value->VerifyTypeIs(Value::INTEGER, err))
+ return Value();
+ if (concurrent_links_value->int_value() < 0 ||
+ concurrent_links_value->int_value() > std::numeric_limits<int>::max()) {
+ *err = Err(*concurrent_links_value, "Value out of range.");
+ return Value();
+ }
+ toolchain->set_concurrent_links(
+ static_cast<int>(concurrent_links_value->int_value()));
+ }
+
+ if (!block_scope.CheckForUnusedVars(err))
+ return Value();
+
+ // Save this toolchain.
+ toolchain->ToolchainSetupComplete();
+ Scope::ItemVector* collector = scope->GetItemCollector();
+ if (!collector) {
+ *err = Err(function, "Can't define a toolchain in this context.");
+ return Value();
+ }
+ collector->push_back(toolchain.release());
+ return Value();
+}
+
+// tool ------------------------------------------------------------------------
+
+const char kTool[] = "tool";
+const char kTool_HelpShort[] =
+ "tool: Specify arguments to a toolchain tool.";
+const char kTool_Help[] =
+ "tool: Specify arguments to a toolchain tool.\n"
+ "\n"
+ "Usage:\n"
+ "\n"
+ " tool(<tool type>) {\n"
+ " <tool variables...>\n"
+ " }\n"
+ "\n"
+ "Tool types\n"
+ "\n"
+ " Compiler tools:\n"
+ " \"cc\": C compiler\n"
+ " \"cxx\": C++ compiler\n"
+ " \"objc\": Objective C compiler\n"
+ " \"objcxx\": Objective C++ compiler\n"
+ " \"rc\": Resource compiler (Windows .rc files)\n"
+ " \"asm\": Assembler\n"
+ "\n"
+ " Linker tools:\n"
+ " \"alink\": Linker for static libraries (archives)\n"
+ " \"solink\": Linker for shared libraries\n"
+ " \"link\": Linker for executables\n"
+ "\n"
+ " Other tools:\n"
+ " \"stamp\": Tool for creating stamp files\n"
+ " \"copy\": Tool to copy files.\n"
+ "\n"
+ " Platform specific tools:\n"
+ " \"copy_bundle_data\": [iOS, OS X] Tool to copy files in a bundle.\n"
+ " \"compile_xcassets\": [iOS, OS X] Tool to compile asset catalogs.\n"
+ "\n"
+ "Tool variables\n"
+ "\n"
+ " command [string with substitutions]\n"
+ " Valid for: all tools (required)\n"
+ "\n"
+ " The command to run.\n"
+ "\n"
+ " default_output_extension [string]\n"
+ " Valid for: linker tools\n"
+ "\n"
+ " Extension for the main output of a linkable tool. It includes\n"
+ " the leading dot. This will be the default value for the\n"
+ " {{output_extension}} expansion (discussed below) but will be\n"
+ " overridden by by the \"output extension\" variable in a target,\n"
+ " if one is specified. Empty string means no extension.\n"
+ "\n"
+ " GN doesn't actually do anything with this extension other than\n"
+ " pass it along, potentially with target-specific overrides. One\n"
+ " would typically use the {{output_extension}} value in the\n"
+ " \"outputs\" to read this value.\n"
+ "\n"
+ " Example: default_output_extension = \".exe\"\n"
+ "\n"
+ " depfile [string]\n"
+ " Valid for: compiler tools (optional)\n"
+ "\n"
+ " If the tool can write \".d\" files, this specifies the name of\n"
+ " the resulting file. These files are used to list header file\n"
+ " dependencies (or other implicit input dependencies) that are\n"
+ " discovered at build time. See also \"depsformat\".\n"
+ "\n"
+ " Example: depfile = \"{{output}}.d\"\n"
+ "\n"
+ " depsformat [string]\n"
+ " Valid for: compiler tools (when depfile is specified)\n"
+ "\n"
+ " Format for the deps outputs. This is either \"gcc\" or \"msvc\".\n"
+ " See the ninja documentation for \"deps\" for more information.\n"
+ "\n"
+ " Example: depsformat = \"gcc\"\n"
+ "\n"
+ " description [string with substitutions, optional]\n"
+ " Valid for: all tools\n"
+ "\n"
+ " What to print when the command is run.\n"
+ "\n"
+ " Example: description = \"Compiling {{source}}\"\n"
+ "\n"
+ " lib_switch [string, optional, link tools only]\n"
+ " lib_dir_switch [string, optional, link tools only]\n"
+ " Valid for: Linker tools except \"alink\"\n"
+ "\n"
+ " These strings will be prepended to the libraries and library\n"
+ " search directories, respectively, because linkers differ on how\n"
+ " specify them. If you specified:\n"
+ " lib_switch = \"-l\"\n"
+ " lib_dir_switch = \"-L\"\n"
+ " then the \"{{libs}}\" expansion for [ \"freetype\", \"expat\"]\n"
+ " would be \"-lfreetype -lexpat\".\n"
+ "\n"
+ " outputs [list of strings with substitutions]\n"
+ " Valid for: Linker and compiler tools (required)\n"
+ "\n"
+ " An array of names for the output files the tool produces. These\n"
+ " are relative to the build output directory. There must always be\n"
+ " at least one output file. There can be more than one output (a\n"
+ " linker might produce a library and an import library, for\n"
+ " example).\n"
+ "\n"
+ " This array just declares to GN what files the tool will\n"
+ " produce. It is your responsibility to specify the tool command\n"
+ " that actually produces these files.\n"
+ "\n"
+ " If you specify more than one output for shared library links,\n"
+ " you should consider setting link_output, depend_output, and\n"
+ " runtime_link_output. Otherwise, the first entry in the\n"
+ " outputs list should always be the main output which will be\n"
+ " linked to.\n"
+ "\n"
+ " Example for a compiler tool that produces .obj files:\n"
+ " outputs = [\n"
+ " \"{{source_out_dir}}/{{source_name_part}}.obj\"\n"
+ " ]\n"
+ "\n"
+ " Example for a linker tool that produces a .dll and a .lib. The\n"
+ " use of {{output_extension}} rather than hardcoding \".dll\"\n"
+ " allows the extension of the library to be overridden on a\n"
+ " target-by-target basis, but in this example, it always\n"
+ " produces a \".lib\" import library:\n"
+ " outputs = [\n"
+ " \"{{root_out_dir}}/{{target_output_name}}"
+ "{{output_extension}}\",\n"
+ " \"{{root_out_dir}}/{{target_output_name}}.lib\",\n"
+ " ]\n"
+ "\n"
+ " link_output [string with substitutions]\n"
+ " depend_output [string with substitutions]\n"
+ " runtime_link_output [string with substitutions]\n"
+ " Valid for: \"solink\" only (optional)\n"
+ "\n"
+ " These three files specify which of the outputs from the solink\n"
+ " tool should be used for linking and dependency tracking. These\n"
+ " should match entries in the \"outputs\". If unspecified, the\n"
+ " first item in the \"outputs\" array will be used for all. See\n"
+ " \"Separate linking and dependencies for shared libraries\"\n"
+ " below for more. If link_output is set but runtime_link_output\n"
+ " is not set, runtime_link_output defaults to link_output.\n"
+ "\n"
+ " On Windows, where the tools produce a .dll shared library and\n"
+ " a .lib import library, you will want the first two to be the\n"
+ " import library and the third one to be the .dll file.\n"
+ " On Linux, if you're not doing the separate linking/dependency\n"
+ " optimization, all of these should be the .so output.\n"
+ "\n"
+ " output_prefix [string]\n"
+ " Valid for: Linker tools (optional)\n"
+ "\n"
+ " Prefix to use for the output name. Defaults to empty. This\n"
+ " prefix will be prepended to the name of the target (or the\n"
+ " output_name if one is manually specified for it) if the prefix\n"
+ " is not already there. The result will show up in the\n"
+ " {{output_name}} substitution pattern.\n"
+ "\n"
+ " Individual targets can opt-out of the output prefix by setting:\n"
+ " output_prefix_override = true\n"
+ " (see \"gn help output_prefix_override\").\n"
+ "\n"
+ " This is typically used to prepend \"lib\" to libraries on\n"
+ " Posix systems:\n"
+ " output_prefix = \"lib\"\n"
+ "\n"
+ " precompiled_header_type [string]\n"
+ " Valid for: \"cc\", \"cxx\", \"objc\", \"objcxx\"\n"
+ "\n"
+ " Type of precompiled headers. If undefined or the empty string,\n"
+ " precompiled headers will not be used for this tool. Otherwise\n"
+ " use \"gcc\" or \"msvc\".\n"
+ "\n"
+ " For precompiled headers to be used for a given target, the\n"
+ " target (or a config applied to it) must also specify a\n"
+ " \"precompiled_header\" and, for \"msvc\"-style headers, a\n"
+ " \"precompiled_source\" value. If the type is \"gcc\", then both\n"
+ " \"precompiled_header\" and \"precompiled_source\" must resolve\n"
+ " to the same file, despite the different formats required for each."
+ "\n"
+ " See \"gn help precompiled_header\" for more.\n"
+ "\n"
+ " restat [boolean]\n"
+ " Valid for: all tools (optional, defaults to false)\n"
+ "\n"
+ " Requests that Ninja check the file timestamp after this tool has\n"
+ " run to determine if anything changed. Set this if your tool has\n"
+ " the ability to skip writing output if the output file has not\n"
+ " changed.\n"
+ "\n"
+ " Normally, Ninja will assume that when a tool runs the output\n"
+ " be new and downstream dependents must be rebuild. When this is\n"
+ " set to trye, Ninja can skip rebuilding downstream dependents for\n"
+ " input changes that don't actually affect the output.\n"
+ "\n"
+ " Example:\n"
+ " restat = true\n"
+ "\n"
+ " rspfile [string with substitutions]\n"
+ " Valid for: all tools (optional)\n"
+ "\n"
+ " Name of the response file. If empty, no response file will be\n"
+ " used. See \"rspfile_content\".\n"
+ "\n"
+ " rspfile_content [string with substitutions]\n"
+ " Valid for: all tools (required when \"rspfile\" is specified)\n"
+ "\n"
+ " The contents to be written to the response file. This may\n"
+ " include all or part of the command to send to the tool which\n"
+ " allows you to get around OS command-line length limits.\n"
+ "\n"
+ " This example adds the inputs and libraries to a response file,\n"
+ " but passes the linker flags directly on the command line:\n"
+ " tool(\"link\") {\n"
+ " command = \"link -o {{output}} {{ldflags}} @{{output}}.rsp\"\n"
+ " rspfile = \"{{output}}.rsp\"\n"
+ " rspfile_content = \"{{inputs}} {{solibs}} {{libs}}\"\n"
+ " }\n"
+ "\n"
+ "Expansions for tool variables\n"
+ "\n"
+ " All paths are relative to the root build directory, which is the\n"
+ " current directory for running all tools. These expansions are\n"
+ " available to all tools:\n"
+ "\n"
+ " {{label}}\n"
+ " The label of the current target. This is typically used in the\n"
+ " \"description\" field for link tools. The toolchain will be\n"
+ " omitted from the label for targets in the default toolchain, and\n"
+ " will be included for targets in other toolchains.\n"
+ "\n"
+ " {{label_name}}\n"
+ " The short name of the label of the target. This is the part\n"
+ " after the colon. For \"//foo/bar:baz\" this will be \"baz\".\n"
+ " Unlike {{target_output_name}}, this is not affected by the\n"
+ " \"output_prefix\" in the tool or the \"output_name\" set\n"
+ " on the target.\n"
+ "\n"
+ " {{output}}\n"
+ " The relative path and name of the output(s) of the current\n"
+ " build step. If there is more than one output, this will expand\n"
+ " to a list of all of them.\n"
+ " Example: \"out/base/my_file.o\"\n"
+ "\n"
+ " {{target_gen_dir}}\n"
+ " {{target_out_dir}}\n"
+ " The directory of the generated file and output directories,\n"
+ " respectively, for the current target. There is no trailing\n"
+ " slash.\n"
+ " Example: \"out/base/test\"\n"
+ "\n"
+ " {{target_output_name}}\n"
+ " The short name of the current target with no path information,\n"
+ " or the value of the \"output_name\" variable if one is specified\n"
+ " in the target. This will include the \"output_prefix\" if any.\n"
+ " See also {{label_name}}.\n"
+ " Example: \"libfoo\" for the target named \"foo\" and an\n"
+ " output prefix for the linker tool of \"lib\".\n"
+ "\n"
+ " Compiler tools have the notion of a single input and a single output,\n"
+ " along with a set of compiler-specific flags. The following expansions\n"
+ " are available:\n"
+ "\n"
+ " {{asmflags}}\n"
+ " {{cflags}}\n"
+ " {{cflags_c}}\n"
+ " {{cflags_cc}}\n"
+ " {{cflags_objc}}\n"
+ " {{cflags_objcc}}\n"
+ " {{defines}}\n"
+ " {{include_dirs}}\n"
+ " Strings correspond that to the processed flags/defines/include\n"
+ " directories specified for the target.\n"
+ " Example: \"--enable-foo --enable-bar\"\n"
+ "\n"
+ " Defines will be prefixed by \"-D\" and include directories will\n"
+ " be prefixed by \"-I\" (these work with Posix tools as well as\n"
+ " Microsoft ones).\n"
+ "\n"
+ " {{source}}\n"
+ " The relative path and name of the current input file.\n"
+ " Example: \"../../base/my_file.cc\"\n"
+ "\n"
+ " {{source_file_part}}\n"
+ " The file part of the source including the extension (with no\n"
+ " directory information).\n"
+ " Example: \"foo.cc\"\n"
+ "\n"
+ " {{source_name_part}}\n"
+ " The filename part of the source file with no directory or\n"
+ " extension.\n"
+ " Example: \"foo\"\n"
+ "\n"
+ " {{source_gen_dir}}\n"
+ " {{source_out_dir}}\n"
+ " The directory in the generated file and output directories,\n"
+ " respectively, for the current input file. If the source file\n"
+ " is in the same directory as the target is declared in, they will\n"
+ " will be the same as the \"target\" versions above.\n"
+ " Example: \"gen/base/test\"\n"
+ "\n"
+ " Linker tools have multiple inputs and (potentially) multiple outputs\n"
+ " The following expansions are available:\n"
+ "\n"
+ " {{inputs}}\n"
+ " {{inputs_newline}}\n"
+ " Expands to the inputs to the link step. This will be a list of\n"
+ " object files and static libraries.\n"
+ " Example: \"obj/foo.o obj/bar.o obj/somelibrary.a\"\n"
+ "\n"
+ " The \"_newline\" version will separate the input files with\n"
+ " newlines instead of spaces. This is useful in response files:\n"
+ " some linkers can take a \"-filelist\" flag which expects newline\n"
+ " separated files, and some Microsoft tools have a fixed-sized\n"
+ " buffer for parsing each line of a response file.\n"
+ "\n"
+ " {{ldflags}}\n"
+ " Expands to the processed set of ldflags and library search paths\n"
+ " specified for the target.\n"
+ " Example: \"-m64 -fPIC -pthread -L/usr/local/mylib\"\n"
+ "\n"
+ " {{libs}}\n"
+ " Expands to the list of system libraries to link to. Each will\n"
+ " be prefixed by the \"lib_prefix\".\n"
+ "\n"
+ " As a special case to support Mac, libraries with names ending in\n"
+ " \".framework\" will be added to the {{libs}} with \"-framework\"\n"
+ " preceeding it, and the lib prefix will be ignored.\n"
+ "\n"
+ " Example: \"-lfoo -lbar\"\n"
+ "\n"
+ " {{output_extension}}\n"
+ " The value of the \"output_extension\" variable in the target,\n"
+ " or the value of the \"default_output_extension\" value in the\n"
+ " tool if the target does not specify an output extension.\n"
+ " Example: \".so\"\n"
+ "\n"
+ " {{solibs}}\n"
+ " Extra libraries from shared library dependencide not specified\n"
+ " in the {{inputs}}. This is the list of link_output files from\n"
+ " shared libraries (if the solink tool specifies a \"link_output\"\n"
+ " variable separate from the \"depend_output\").\n"
+ "\n"
+ " These should generally be treated the same as libs by your tool.\n"
+ " Example: \"libfoo.so libbar.so\"\n"
+ "\n"
+ " The copy tool allows the common compiler/linker substitutions, plus\n"
+ " {{source}} which is the source of the copy. The stamp tool allows\n"
+ " only the common tool substitutions.\n"
+ "\n"
+ " The copy_bundle_data and compile_xcassets tools only allows the common\n"
+ " tool substitutions. Both tools are required to create iOS/OS X bundles\n"
+ " and need only be defined on those platforms.\n"
+ "\n"
+ " The copy_bundle_data tool will be called with one source and needs to\n"
+ " copy (optionally optimizing the data representation) to its output. It\n"
+ " may be called with a directory as input and it needs to be recursively\n"
+ " copied.\n"
+ "\n"
+ " The compile_xcassets tool will be called with one or more source (each\n"
+ " an asset catalog) that needs to be compiled to a single output.\n"
+ "\n"
+ "Separate linking and dependencies for shared libraries\n"
+ "\n"
+ " Shared libraries are special in that not all changes to them require\n"
+ " that dependent targets be re-linked. If the shared library is changed\n"
+ " but no imports or exports are different, dependent code needn't be\n"
+ " relinked, which can speed up the build.\n"
+ "\n"
+ " If your link step can output a list of exports from a shared library\n"
+ " and writes the file only if the new one is different, the timestamp of\n"
+ " this file can be used for triggering re-links, while the actual shared\n"
+ " library would be used for linking.\n"
+ "\n"
+ " You will need to specify\n"
+ " restat = true\n"
+ " in the linker tool to make this work, so Ninja will detect if the\n"
+ " timestamp of the dependency file has changed after linking (otherwise\n"
+ " it will always assume that running a command updates the output):\n"
+ "\n"
+ " tool(\"solink\") {\n"
+ " command = \"...\"\n"
+ " outputs = [\n"
+ " \"{{root_out_dir}}/{{target_output_name}}{{output_extension}}\",\n"
+ " \"{{root_out_dir}}/{{target_output_name}}"
+ "{{output_extension}}.TOC\",\n"
+ " ]\n"
+ " link_output =\n"
+ " \"{{root_out_dir}}/{{target_output_name}}{{output_extension}}\"\n"
+ " depend_output =\n"
+ " \"{{root_out_dir}}/{{target_output_name}}"
+ "{{output_extension}}.TOC\"\n"
+ " restat = true\n"
+ " }\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " toolchain(\"my_toolchain\") {\n"
+ " # Put these at the top to apply to all tools below.\n"
+ " lib_prefix = \"-l\"\n"
+ " lib_dir_prefix = \"-L\"\n"
+ "\n"
+ " tool(\"cc\") {\n"
+ " command = \"gcc {{source}} -o {{output}}\"\n"
+ " outputs = [ \"{{source_out_dir}}/{{source_name_part}}.o\" ]\n"
+ " description = \"GCC {{source}}\"\n"
+ " }\n"
+ " tool(\"cxx\") {\n"
+ " command = \"g++ {{source}} -o {{output}}\"\n"
+ " outputs = [ \"{{source_out_dir}}/{{source_name_part}}.o\" ]\n"
+ " description = \"G++ {{source}}\"\n"
+ " }\n"
+ " }\n";
+
+Value RunTool(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ // Find the toolchain definition we're executing inside of. The toolchain
+ // function will set a property pointing to it that we'll pick up.
+ Toolchain* toolchain = reinterpret_cast<Toolchain*>(
+ scope->GetProperty(&kToolchainPropertyKey, nullptr));
+ if (!toolchain) {
+ *err = Err(function->function(), "tool() called outside of toolchain().",
+ "The tool() function can only be used inside a toolchain() "
+ "definition.");
+ return Value();
+ }
+
+ if (!EnsureSingleStringArg(function, args, err))
+ return Value();
+ const std::string& tool_name = args[0].string_value();
+ Toolchain::ToolType tool_type = Toolchain::ToolNameToType(tool_name);
+ if (tool_type == Toolchain::TYPE_NONE) {
+ *err = Err(args[0], "Unknown tool type");
+ return Value();
+ }
+
+ // Run the tool block.
+ Scope block_scope(scope);
+ block->Execute(&block_scope, err);
+ if (err->has_error())
+ return Value();
+
+ // Figure out which validator to use for the substitution pattern for this
+ // tool type. There are different validators for the "outputs" than for the
+ // rest of the strings.
+ bool (*subst_validator)(SubstitutionType) = nullptr;
+ bool (*subst_output_validator)(SubstitutionType) = nullptr;
+ if (IsCompilerTool(tool_type)) {
+ subst_validator = &IsValidCompilerSubstitution;
+ subst_output_validator = &IsValidCompilerOutputsSubstitution;
+ } else if (IsLinkerTool(tool_type)) {
+ subst_validator = &IsValidLinkerSubstitution;
+ subst_output_validator = &IsValidLinkerOutputsSubstitution;
+ } else if (tool_type == Toolchain::TYPE_COPY ||
+ tool_type == Toolchain::TYPE_COPY_BUNDLE_DATA) {
+ subst_validator = &IsValidCopySubstitution;
+ subst_output_validator = &IsValidCopySubstitution;
+ } else if (tool_type == Toolchain::TYPE_COMPILE_XCASSETS) {
+ subst_validator = &IsValidCompileXCassetsSubstitution;
+ subst_output_validator = &IsValidCompileXCassetsSubstitution;
+ } else {
+ subst_validator = &IsValidToolSubstitution;
+ subst_output_validator = &IsValidToolSubstitution;
+ }
+
+ std::unique_ptr<Tool> tool(new Tool);
+
+ if (!ReadPattern(&block_scope, "command", subst_validator, tool.get(),
+ &Tool::set_command, err) ||
+ !ReadOutputExtension(&block_scope, tool.get(), err) ||
+ !ReadPattern(&block_scope, "depfile", subst_validator, tool.get(),
+ &Tool::set_depfile, err) ||
+ !ReadDepsFormat(&block_scope, tool.get(), err) ||
+ !ReadPattern(&block_scope, "description", subst_validator, tool.get(),
+ &Tool::set_description, err) ||
+ !ReadString(&block_scope, "lib_switch", tool.get(), &Tool::set_lib_switch,
+ err) ||
+ !ReadString(&block_scope, "lib_dir_switch", tool.get(),
+ &Tool::set_lib_dir_switch, err) ||
+ !ReadPattern(&block_scope, "link_output", subst_validator, tool.get(),
+ &Tool::set_link_output, err) ||
+ !ReadPattern(&block_scope, "depend_output", subst_validator, tool.get(),
+ &Tool::set_depend_output, err) ||
+ !ReadPattern(&block_scope, "runtime_link_output", subst_validator,
+ tool.get(), &Tool::set_runtime_link_output, err) ||
+ !ReadString(&block_scope, "output_prefix", tool.get(),
+ &Tool::set_output_prefix, err) ||
+ !ReadPrecompiledHeaderType(&block_scope, tool.get(), err) ||
+ !ReadBool(&block_scope, "restat", tool.get(), &Tool::set_restat, err) ||
+ !ReadPattern(&block_scope, "rspfile", subst_validator, tool.get(),
+ &Tool::set_rspfile, err) ||
+ !ReadPattern(&block_scope, "rspfile_content", subst_validator, tool.get(),
+ &Tool::set_rspfile_content, err)) {
+ return Value();
+ }
+
+ if (tool_type != Toolchain::TYPE_COPY &&
+ tool_type != Toolchain::TYPE_STAMP &&
+ tool_type != Toolchain::TYPE_COPY_BUNDLE_DATA &&
+ tool_type != Toolchain::TYPE_COMPILE_XCASSETS) {
+ // All tools should have outputs, except the copy, stamp, copy_bundle_data
+ // and compile_xcassets tools that generate their outputs internally.
+ if (!ReadOutputs(&block_scope, function, subst_output_validator,
+ tool.get(), err))
+ return Value();
+ }
+
+ // Validate that the link_output, depend_output, and runtime_link_output
+ // refer to items in the outputs and aren't defined for irrelevant tool
+ // types.
+ if (!tool->link_output().empty()) {
+ if (tool_type != Toolchain::TYPE_SOLINK &&
+ tool_type != Toolchain::TYPE_SOLINK_MODULE) {
+ *err = Err(function, "This tool specifies a link_output.",
+ "This is only valid for solink and solink_module tools.");
+ return Value();
+ }
+ if (!IsPatternInOutputList(tool->outputs(), tool->link_output())) {
+ *err = Err(function, "This tool's link_output is bad.",
+ "It must match one of the outputs.");
+ return Value();
+ }
+ }
+ if (!tool->depend_output().empty()) {
+ if (tool_type != Toolchain::TYPE_SOLINK &&
+ tool_type != Toolchain::TYPE_SOLINK_MODULE) {
+ *err = Err(function, "This tool specifies a depend_output.",
+ "This is only valid for solink and solink_module tools.");
+ return Value();
+ }
+ if (!IsPatternInOutputList(tool->outputs(), tool->depend_output())) {
+ *err = Err(function, "This tool's depend_output is bad.",
+ "It must match one of the outputs.");
+ return Value();
+ }
+ }
+ if ((!tool->link_output().empty() && tool->depend_output().empty()) ||
+ (tool->link_output().empty() && !tool->depend_output().empty())) {
+ *err = Err(function, "Both link_output and depend_output should either "
+ "be specified or they should both be empty.");
+ return Value();
+ }
+ if (!tool->runtime_link_output().empty()) {
+ if (tool_type != Toolchain::TYPE_SOLINK &&
+ tool_type != Toolchain::TYPE_SOLINK_MODULE) {
+ *err = Err(function, "This tool specifies a runtime_link_output.",
+ "This is only valid for solink and solink_module tools.");
+ return Value();
+ }
+ if (!IsPatternInOutputList(tool->outputs(), tool->runtime_link_output())) {
+ *err = Err(function, "This tool's runtime_link_output is bad.",
+ "It must match one of the outputs.");
+ return Value();
+ }
+ }
+
+ // Make sure there weren't any vars set in this tool that were unused.
+ if (!block_scope.CheckForUnusedVars(err))
+ return Value();
+
+ toolchain->SetTool(tool_type, std::move(tool));
+ return Value();
+}
+
+// toolchain_args --------------------------------------------------------------
+
+extern const char kToolchainArgs[] = "toolchain_args";
+extern const char kToolchainArgs_HelpShort[] =
+ "toolchain_args: Set build arguments for toolchain build setup.";
+extern const char kToolchainArgs_Help[] =
+ "toolchain_args: Set build arguments for toolchain build setup.\n"
+ "\n"
+ " Used inside a toolchain definition to pass arguments to an alternate\n"
+ " toolchain's invocation of the build.\n"
+ "\n"
+ " When you specify a target using an alternate toolchain, the master\n"
+ " build configuration file is re-interpreted in the context of that\n"
+ " toolchain (see \"gn help toolchain\"). The toolchain_args function\n"
+ " allows you to control the arguments passed into this alternate\n"
+ " invocation of the build.\n"
+ "\n"
+ " Any default system arguments or arguments passed in on the command-\n"
+ " line will also be passed to the alternate invocation unless explicitly\n"
+ " overridden by toolchain_args.\n"
+ "\n"
+ " The toolchain_args will be ignored when the toolchain being defined\n"
+ " is the default. In this case, it's expected you want the default\n"
+ " argument values.\n"
+ "\n"
+ " See also \"gn help buildargs\" for an overview of these arguments.\n"
+ "\n"
+ "Example:\n"
+ " toolchain(\"my_weird_toolchain\") {\n"
+ " ...\n"
+ " toolchain_args() {\n"
+ " # Override the system values for a generic Posix system.\n"
+ " is_win = false\n"
+ " is_posix = true\n"
+ "\n"
+ " # Pass this new value for specific setup for my toolchain.\n"
+ " is_my_weird_system = true\n"
+ " }\n"
+ " }\n";
+
+Value RunToolchainArgs(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ // Find the toolchain definition we're executing inside of. The toolchain
+ // function will set a property pointing to it that we'll pick up.
+ Toolchain* toolchain = reinterpret_cast<Toolchain*>(
+ scope->GetProperty(&kToolchainPropertyKey, nullptr));
+ if (!toolchain) {
+ *err = Err(function->function(),
+ "toolchain_args() called outside of toolchain().",
+ "The toolchain_args() function can only be used inside a "
+ "toolchain() definition.");
+ return Value();
+ }
+
+ if (!args.empty()) {
+ *err = Err(function->function(), "This function takes no arguments.");
+ return Value();
+ }
+
+ // This function makes a new scope with various variable sets on it, which
+ // we then save on the toolchain to use when re-invoking the build.
+ Scope block_scope(scope);
+ block->Execute(&block_scope, err);
+ if (err->has_error())
+ return Value();
+
+ Scope::KeyValueMap values;
+ block_scope.GetCurrentScopeValues(&values);
+ toolchain->args() = values;
+
+ return Value();
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_write_file.cc b/chromium/tools/gn/function_write_file.cc
new file mode 100644
index 00000000000..feb1004c3f2
--- /dev/null
+++ b/chromium/tools/gn/function_write_file.cc
@@ -0,0 +1,103 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <iostream>
+#include <sstream>
+
+#include "base/files/file_util.h"
+#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
+#include "base/strings/utf_string_conversions.h"
+#include "build/build_config.h"
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scheduler.h"
+
+namespace functions {
+
+const char kWriteFile[] = "write_file";
+const char kWriteFile_HelpShort[] =
+ "write_file: Write a file to disk.";
+const char kWriteFile_Help[] =
+ "write_file: Write a file to disk.\n"
+ "\n"
+ " write_file(filename, data)\n"
+ "\n"
+ " If data is a list, the list will be written one-item-per-line with no\n"
+ " quoting or brackets.\n"
+ "\n"
+ " If the file exists and the contents are identical to that being\n"
+ " written, the file will not be updated. This will prevent unnecessary\n"
+ " rebuilds of targets that depend on this file.\n"
+ "\n"
+ " One use for write_file is to write a list of inputs to an script\n"
+ " that might be too long for the command line. However, it is\n"
+ " preferrable to use response files for this purpose. See\n"
+ " \"gn help response_file_contents\".\n"
+ "\n"
+ " TODO(brettw) we probably need an optional third argument to control\n"
+ " list formatting.\n"
+ "\n"
+ "Arguments\n"
+ "\n"
+ " filename\n"
+ " Filename to write. This must be within the output directory.\n"
+ "\n"
+ " data:\n"
+ " The list or string to write.\n";
+
+Value RunWriteFile(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (args.size() != 2) {
+ *err = Err(function->function(), "Wrong number of arguments to write_file",
+ "I expected two arguments.");
+ return Value();
+ }
+
+ // Compute the file name and make sure it's in the output dir.
+ const SourceDir& cur_dir = scope->GetSourceDir();
+ SourceFile source_file = cur_dir.ResolveRelativeFile(args[0], err,
+ scope->settings()->build_settings()->root_path_utf8());
+ if (err->has_error())
+ return Value();
+ if (!EnsureStringIsInOutputDir(
+ scope->settings()->build_settings()->build_dir(),
+ source_file.value(), args[0].origin(), err))
+ return Value();
+ g_scheduler->AddWrittenFile(source_file); // Track that we wrote this file.
+
+ // Track how to recreate this file, since we write it a gen time.
+ // Note this is a hack since the correct output is not a dependency proper,
+ // but an addition of this file to the output of the gn rule that writes it.
+ // This dependency will, however, cause the gen step to be re-run and the
+ // build restarted if the file is missing.
+ g_scheduler->AddGenDependency(
+ scope->settings()->build_settings()->GetFullPath(source_file));
+
+ // Compute output.
+ std::ostringstream contents;
+ if (args[1].type() == Value::LIST) {
+ const std::vector<Value>& list = args[1].list_value();
+ for (const auto& cur : list)
+ contents << cur.ToString(false) << std::endl;
+ } else {
+ contents << args[1].ToString(false);
+ }
+
+ base::FilePath file_path =
+ scope->settings()->build_settings()->GetFullPath(source_file);
+
+ // Make sure we're not replacing the same contents.
+ if (!WriteFileIfChanged(file_path, contents.str(), err))
+ *err = Err(function->function(), err->message(), err->help_text());
+
+ return Value();
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/function_write_file_unittest.cc b/chromium/tools/gn/function_write_file_unittest.cc
new file mode 100644
index 00000000000..50a0f43ec0b
--- /dev/null
+++ b/chromium/tools/gn/function_write_file_unittest.cc
@@ -0,0 +1,90 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdint.h>
+
+#include "base/files/file.h"
+#include "base/files/file_util.h"
+#include "base/files/scoped_temp_dir.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+// Returns true on success, false if write_file signaled an error.
+bool CallWriteFile(Scope* scope,
+ const std::string& filename,
+ const Value& data) {
+ Err err;
+
+ std::vector<Value> args;
+ args.push_back(Value(nullptr, filename));
+ args.push_back(data);
+
+ FunctionCallNode function_call;
+ Value result = functions::RunWriteFile(scope, &function_call, args, &err);
+ EXPECT_EQ(Value::NONE, result.type()); // Should always return none.
+
+ return !err.has_error();
+}
+
+} // namespace
+
+TEST(WriteFile, WithData) {
+ Scheduler scheduler;
+ TestWithScope setup;
+
+ // Make a real directory for writing the files.
+ base::ScopedTempDir temp_dir;
+ ASSERT_TRUE(temp_dir.CreateUniqueTempDir());
+ setup.build_settings()->SetRootPath(temp_dir.path());
+ setup.build_settings()->SetBuildDir(SourceDir("//out/"));
+
+ Value some_string(nullptr, "some string contents");
+
+ // Should refuse to write files outside of the output dir.
+ EXPECT_FALSE(CallWriteFile(setup.scope(), "//in_root.txt", some_string));
+ EXPECT_FALSE(CallWriteFile(setup.scope(), "//other_dir/foo.txt",
+ some_string));
+
+ // Should be able to write to a new dir inside the out dir.
+ EXPECT_TRUE(CallWriteFile(setup.scope(), "//out/foo.txt", some_string));
+ base::FilePath foo_name = temp_dir.path().Append(FILE_PATH_LITERAL("out"))
+ .Append(FILE_PATH_LITERAL("foo.txt"));
+ std::string result_contents;
+ EXPECT_TRUE(base::ReadFileToString(foo_name, &result_contents));
+ EXPECT_EQ(some_string.string_value(), result_contents);
+
+ // Update the contents with a list of a string and a number.
+ Value some_list(nullptr, Value::LIST);
+ some_list.list_value().push_back(Value(nullptr, "line 1"));
+ some_list.list_value().push_back(Value(nullptr, static_cast<int64_t>(2)));
+ EXPECT_TRUE(CallWriteFile(setup.scope(), "//out/foo.txt", some_list));
+ EXPECT_TRUE(base::ReadFileToString(foo_name, &result_contents));
+ EXPECT_EQ("line 1\n2\n", result_contents);
+
+ // Test that the file is not rewritten if the contents are not changed.
+ // Start by setting the modified time to something old to avoid clock
+ // resolution issues.
+ base::Time old_time = base::Time::Now() - base::TimeDelta::FromDays(1);
+ base::File foo_file(foo_name,
+ base::File::FLAG_OPEN |
+ base::File::FLAG_READ | base::File::FLAG_WRITE);
+ ASSERT_TRUE(foo_file.IsValid());
+ foo_file.SetTimes(old_time, old_time);
+
+ // Read the current time to avoid timer resolution issues when comparing
+ // below.
+ base::File::Info original_info;
+ foo_file.GetInfo(&original_info);
+
+ EXPECT_TRUE(CallWriteFile(setup.scope(), "//out/foo.txt", some_list));
+
+ // Verify that the last modified time is the same as before.
+ base::File::Info new_info;
+ foo_file.GetInfo(&new_info);
+ EXPECT_EQ(original_info.last_modified, new_info.last_modified);
+}
diff --git a/chromium/tools/gn/functions.cc b/chromium/tools/gn/functions.cc
new file mode 100644
index 00000000000..f6d405d0a56
--- /dev/null
+++ b/chromium/tools/gn/functions.cc
@@ -0,0 +1,930 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/functions.h"
+
+#include <stddef.h>
+#include <iostream>
+#include <utility>
+
+#include "base/environment.h"
+#include "base/strings/string_util.h"
+#include "tools/gn/config.h"
+#include "tools/gn/config_values_generator.h"
+#include "tools/gn/err.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/template.h"
+#include "tools/gn/token.h"
+#include "tools/gn/value.h"
+#include "tools/gn/value_extractors.h"
+#include "tools/gn/variables.h"
+
+namespace {
+
+// Some functions take a {} following them, and some don't. For the ones that
+// don't, this is used to verify that the given block node is null and will
+// set the error accordingly if it's not. Returns true if the block is null.
+bool VerifyNoBlockForFunctionCall(const FunctionCallNode* function,
+ const BlockNode* block,
+ Err* err) {
+ if (!block)
+ return true;
+
+ *err = Err(block, "Unexpected '{'.",
+ "This function call doesn't take a {} block following it, and you\n"
+ "can't have a {} block that's not connected to something like an if\n"
+ "statement or a target declaration.");
+ err->AppendRange(function->function().range());
+ return false;
+}
+
+} // namespace
+
+bool EnsureNotProcessingImport(const ParseNode* node,
+ const Scope* scope,
+ Err* err) {
+ if (scope->IsProcessingImport()) {
+ *err = Err(node, "Not valid from an import.",
+ "Imports are for defining defaults, variables, and rules. The\n"
+ "appropriate place for this kind of thing is really in a normal\n"
+ "BUILD file.");
+ return false;
+ }
+ return true;
+}
+
+bool EnsureNotProcessingBuildConfig(const ParseNode* node,
+ const Scope* scope,
+ Err* err) {
+ if (scope->IsProcessingBuildConfig()) {
+ *err = Err(node, "Not valid from the build config.",
+ "You can't do this kind of thing from the build config script, "
+ "silly!\nPut it in a regular BUILD file.");
+ return false;
+ }
+ return true;
+}
+
+bool FillTargetBlockScope(const Scope* scope,
+ const FunctionCallNode* function,
+ const std::string& target_type,
+ const BlockNode* block,
+ const std::vector<Value>& args,
+ Scope* block_scope,
+ Err* err) {
+ if (!block) {
+ FillNeedsBlockError(function, err);
+ return false;
+ }
+
+ // Copy the target defaults, if any, into the scope we're going to execute
+ // the block in.
+ const Scope* default_scope = scope->GetTargetDefaults(target_type);
+ if (default_scope) {
+ Scope::MergeOptions merge_options;
+ merge_options.skip_private_vars = true;
+ if (!default_scope->NonRecursiveMergeTo(block_scope, merge_options,
+ function, "target defaults", err))
+ return false;
+ }
+
+ // The name is the single argument to the target function.
+ if (!EnsureSingleStringArg(function, args, err))
+ return false;
+
+ // Set the target name variable to the current target, and mark it used
+ // because we don't want to issue an error if the script ignores it.
+ const base::StringPiece target_name("target_name");
+ block_scope->SetValue(target_name, Value(function, args[0].string_value()),
+ function);
+ block_scope->MarkUsed(target_name);
+ return true;
+}
+
+void FillNeedsBlockError(const FunctionCallNode* function, Err* err) {
+ *err = Err(function->function(), "This function call requires a block.",
+ "The block's \"{\" must be on the same line as the function "
+ "call's \")\".");
+}
+
+bool EnsureSingleStringArg(const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (args.size() != 1) {
+ *err = Err(function->function(), "Incorrect arguments.",
+ "This function requires a single string argument.");
+ return false;
+ }
+ return args[0].VerifyTypeIs(Value::STRING, err);
+}
+
+const Label& ToolchainLabelForScope(const Scope* scope) {
+ return scope->settings()->toolchain_label();
+}
+
+Label MakeLabelForScope(const Scope* scope,
+ const FunctionCallNode* function,
+ const std::string& name) {
+ const Label& toolchain_label = ToolchainLabelForScope(scope);
+ return Label(scope->GetSourceDir(), name, toolchain_label.dir(),
+ toolchain_label.name());
+}
+
+// static
+const int NonNestableBlock::kKey = 0;
+
+NonNestableBlock::NonNestableBlock(
+ Scope* scope,
+ const FunctionCallNode* function,
+ const char* type_description)
+ : scope_(scope),
+ function_(function),
+ type_description_(type_description),
+ key_added_(false) {
+}
+
+NonNestableBlock::~NonNestableBlock() {
+ if (key_added_)
+ scope_->SetProperty(&kKey, nullptr);
+}
+
+bool NonNestableBlock::Enter(Err* err) {
+ void* scope_value = scope_->GetProperty(&kKey, nullptr);
+ if (scope_value) {
+ // Existing block.
+ const NonNestableBlock* existing =
+ reinterpret_cast<const NonNestableBlock*>(scope_value);
+ *err = Err(function_, "Can't nest these things.",
+ std::string("You are trying to nest a ") + type_description_ +
+ " inside a " + existing->type_description_ + ".");
+ err->AppendSubErr(Err(existing->function_, "The enclosing block."));
+ return false;
+ }
+
+ scope_->SetProperty(&kKey, this);
+ key_added_ = true;
+ return true;
+}
+
+namespace functions {
+
+// assert ----------------------------------------------------------------------
+
+const char kAssert[] = "assert";
+const char kAssert_HelpShort[] =
+ "assert: Assert an expression is true at generation time.";
+const char kAssert_Help[] =
+ "assert: Assert an expression is true at generation time.\n"
+ "\n"
+ " assert(<condition> [, <error string>])\n"
+ "\n"
+ " If the condition is false, the build will fail with an error. If the\n"
+ " optional second argument is provided, that string will be printed\n"
+ " with the error message.\n"
+ "\n"
+ "Examples:\n"
+ " assert(is_win)\n"
+ " assert(defined(sources), \"Sources must be defined\")\n";
+
+Value RunAssert(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (args.size() != 1 && args.size() != 2) {
+ *err = Err(function->function(), "Wrong number of arguments.",
+ "assert() takes one or two argument, "
+ "were you expecting somethig else?");
+ } else if (args[0].type() != Value::BOOLEAN) {
+ *err = Err(function->function(), "Assertion value not a bool.");
+ } else if (!args[0].boolean_value()) {
+ if (args.size() == 2) {
+ // Optional string message.
+ if (args[1].type() != Value::STRING) {
+ *err = Err(function->function(), "Assertion failed.",
+ "<<<ERROR MESSAGE IS NOT A STRING>>>");
+ } else {
+ *err = Err(function->function(), "Assertion failed.",
+ args[1].string_value());
+ }
+ } else {
+ *err = Err(function->function(), "Assertion failed.");
+ }
+
+ if (args[0].origin()) {
+ // If you do "assert(foo)" we'd ideally like to show you where foo was
+ // set, and in this case the origin of the args will tell us that.
+ // However, if you do "assert(foo && bar)" the source of the value will
+ // be the assert like, which isn't so helpful.
+ //
+ // So we try to see if the args are from the same line or not. This will
+ // break if you do "assert(\nfoo && bar)" and we may show the second line
+ // as the source, oh well. The way around this is to check to see if the
+ // origin node is inside our function call block.
+ Location origin_location = args[0].origin()->GetRange().begin();
+ if (origin_location.file() != function->function().location().file() ||
+ origin_location.line_number() !=
+ function->function().location().line_number()) {
+ err->AppendSubErr(Err(args[0].origin()->GetRange(), "",
+ "This is where it was set."));
+ }
+ }
+ }
+ return Value();
+}
+
+// config ----------------------------------------------------------------------
+
+const char kConfig[] = "config";
+const char kConfig_HelpShort[] =
+ "config: Defines a configuration object.";
+const char kConfig_Help[] =
+ "config: Defines a configuration object.\n"
+ "\n"
+ " Configuration objects can be applied to targets and specify sets of\n"
+ " compiler flags, includes, defines, etc. They provide a way to\n"
+ " conveniently group sets of this configuration information.\n"
+ "\n"
+ " A config is referenced by its label just like a target.\n"
+ "\n"
+ " The values in a config are additive only. If you want to remove a flag\n"
+ " you need to remove the corresponding config that sets it. The final\n"
+ " set of flags, defines, etc. for a target is generated in this order:\n"
+ "\n"
+ " 1. The values specified directly on the target (rather than using a\n"
+ " config.\n"
+ " 2. The configs specified in the target's \"configs\" list, in order.\n"
+ " 3. Public_configs from a breadth-first traversal of the dependency\n"
+ " tree in the order that the targets appear in \"deps\".\n"
+ " 4. All dependent configs from a breadth-first traversal of the\n"
+ " dependency tree in the order that the targets appear in \"deps\".\n"
+ "\n"
+ "Variables valid in a config definition\n"
+ "\n"
+ CONFIG_VALUES_VARS_HELP
+ " Nested configs: configs\n"
+ "\n"
+ "Variables on a target used to apply configs\n"
+ "\n"
+ " all_dependent_configs, configs, public_configs\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " config(\"myconfig\") {\n"
+ " includes = [ \"include/common\" ]\n"
+ " defines = [ \"ENABLE_DOOM_MELON\" ]\n"
+ " }\n"
+ "\n"
+ " executable(\"mything\") {\n"
+ " configs = [ \":myconfig\" ]\n"
+ " }\n";
+
+Value RunConfig(const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Scope* scope,
+ Err* err) {
+ NonNestableBlock non_nestable(scope, function, "config");
+ if (!non_nestable.Enter(err))
+ return Value();
+
+ if (!EnsureSingleStringArg(function, args, err) ||
+ !EnsureNotProcessingImport(function, scope, err))
+ return Value();
+
+ Label label(MakeLabelForScope(scope, function, args[0].string_value()));
+
+ if (g_scheduler->verbose_logging())
+ g_scheduler->Log("Defining config", label.GetUserVisibleName(true));
+
+ // Create the new config.
+ std::unique_ptr<Config> config(new Config(scope->settings(), label));
+ config->set_defined_from(function);
+ if (!Visibility::FillItemVisibility(config.get(), scope, err))
+ return Value();
+
+ // Fill the flags and such.
+ const SourceDir& input_dir = scope->GetSourceDir();
+ ConfigValuesGenerator gen(&config->own_values(), scope, input_dir, err);
+ gen.Run();
+ if (err->has_error())
+ return Value();
+
+ // Read sub-configs.
+ const Value* configs_value = scope->GetValue(variables::kConfigs, true);
+ if (configs_value) {
+ ExtractListOfUniqueLabels(*configs_value, scope->GetSourceDir(),
+ ToolchainLabelForScope(scope),
+ &config->configs(), err);
+ }
+ if (err->has_error())
+ return Value();
+
+ // Save the generated item.
+ Scope::ItemVector* collector = scope->GetItemCollector();
+ if (!collector) {
+ *err = Err(function, "Can't define a config in this context.");
+ return Value();
+ }
+ collector->push_back(config.release());
+
+ return Value();
+}
+
+// declare_args ----------------------------------------------------------------
+
+const char kDeclareArgs[] = "declare_args";
+const char kDeclareArgs_HelpShort[] =
+ "declare_args: Declare build arguments.";
+const char kDeclareArgs_Help[] =
+ "declare_args: Declare build arguments.\n"
+ "\n"
+ " Introduces the given arguments into the current scope. If they are\n"
+ " not specified on the command line or in a toolchain's arguments,\n"
+ " the default values given in the declare_args block will be used.\n"
+ " However, these defaults will not override command-line values.\n"
+ "\n"
+ " See also \"gn help buildargs\" for an overview.\n"
+ "\n"
+ " The precise behavior of declare args is:\n"
+ "\n"
+ " 1. The declare_arg block executes. Any variables in the enclosing\n"
+ " scope are available for reading.\n"
+ "\n"
+ " 2. At the end of executing the block, any variables set within that\n"
+ " scope are saved globally as build arguments, with their current\n"
+ " values being saved as the \"default value\" for that argument.\n"
+ "\n"
+ " 3. User-defined overrides are applied. Anything set in \"gn args\"\n"
+ " now overrides any default values. The resulting set of variables\n"
+ " is promoted to be readable from the following code in the file.\n"
+ "\n"
+ " This has some ramifications that may not be obvious:\n"
+ "\n"
+ " - You should not perform difficult work inside a declare_args block\n"
+ " since this only sets a default value that may be discarded. In\n"
+ " particular, don't use the result of exec_script() to set the\n"
+ " default value. If you want to have a script-defined default, set\n"
+ " some default \"undefined\" value like [], \"\", or -1, and after\n"
+ " the declare_args block, call exec_script if the value is unset by\n"
+ " the user.\n"
+ "\n"
+ " - Any code inside of the declare_args block will see the default\n"
+ " values of previous variables defined in the block rather than\n"
+ " the user-overridden value. This can be surprising because you will\n"
+ " be used to seeing the overridden value. If you need to make the\n"
+ " default value of one arg dependent on the possibly-overridden\n"
+ " value of another, write two separate declare_args blocks:\n"
+ "\n"
+ " declare_args() {\n"
+ " enable_foo = true\n"
+ " }\n"
+ " declare_args() {\n"
+ " # Bar defaults to same user-overridden state as foo.\n"
+ " enable_bar = enable_foo\n"
+ " }\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " declare_args() {\n"
+ " enable_teleporter = true\n"
+ " enable_doom_melon = false\n"
+ " }\n"
+ "\n"
+ " If you want to override the (default disabled) Doom Melon:\n"
+ " gn --args=\"enable_doom_melon=true enable_teleporter=false\"\n"
+ " This also sets the teleporter, but it's already defaulted to on so\n"
+ " it will have no effect.\n";
+
+Value RunDeclareArgs(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ NonNestableBlock non_nestable(scope, function, "declare_args");
+ if (!non_nestable.Enter(err))
+ return Value();
+
+ Scope block_scope(scope);
+ block->Execute(&block_scope, err);
+ if (err->has_error())
+ return Value();
+
+ // Pass the values from our scope into the Args object for adding to the
+ // scope with the proper values (taking into account the defaults given in
+ // the block_scope, and arguments passed into the build).
+ Scope::KeyValueMap values;
+ block_scope.GetCurrentScopeValues(&values);
+ scope->settings()->build_settings()->build_args().DeclareArgs(
+ values, scope, err);
+ return Value();
+}
+
+// defined ---------------------------------------------------------------------
+
+const char kDefined[] = "defined";
+const char kDefined_HelpShort[] =
+ "defined: Returns whether an identifier is defined.";
+const char kDefined_Help[] =
+ "defined: Returns whether an identifier is defined.\n"
+ "\n"
+ " Returns true if the given argument is defined. This is most useful in\n"
+ " templates to assert that the caller set things up properly.\n"
+ "\n"
+ " You can pass an identifier:\n"
+ " defined(foo)\n"
+ " which will return true or false depending on whether foo is defined in\n"
+ " the current scope.\n"
+ "\n"
+ " You can also check a named scope:\n"
+ " defined(foo.bar)\n"
+ " which will return true or false depending on whether bar is defined in\n"
+ " the named scope foo. It will throw an error if foo is not defined or\n"
+ " is not a scope.\n"
+ "\n"
+ "Example:\n"
+ "\n"
+ " template(\"mytemplate\") {\n"
+ " # To help users call this template properly...\n"
+ " assert(defined(invoker.sources), \"Sources must be defined\")\n"
+ "\n"
+ " # If we want to accept an optional \"values\" argument, we don't\n"
+ " # want to dereference something that may not be defined.\n"
+ " if (defined(invoker.values)) {\n"
+ " values = invoker.values\n"
+ " } else {\n"
+ " values = \"some default value\"\n"
+ " }\n"
+ " }\n";
+
+Value RunDefined(Scope* scope,
+ const FunctionCallNode* function,
+ const ListNode* args_list,
+ Err* err) {
+ const std::vector<const ParseNode*>& args_vector = args_list->contents();
+ if (args_vector.size() != 1) {
+ *err = Err(function, "Wrong number of arguments to defined().",
+ "Expecting exactly one.");
+ return Value();
+ }
+
+ const IdentifierNode* identifier = args_vector[0]->AsIdentifier();
+ if (identifier) {
+ // Passed an identifier "defined(foo)".
+ if (scope->GetValue(identifier->value().value()))
+ return Value(function, true);
+ return Value(function, false);
+ }
+
+ const AccessorNode* accessor = args_vector[0]->AsAccessor();
+ if (accessor) {
+ // Passed an accessor "defined(foo.bar)".
+ if (accessor->member()) {
+ // The base of the accessor must be a scope if it's defined.
+ const Value* base = scope->GetValue(accessor->base().value());
+ if (!base) {
+ *err = Err(accessor, "Undefined identifier");
+ return Value();
+ }
+ if (!base->VerifyTypeIs(Value::SCOPE, err))
+ return Value();
+
+ // Check the member inside the scope to see if its defined.
+ if (base->scope_value()->GetValue(accessor->member()->value().value()))
+ return Value(function, true);
+ return Value(function, false);
+ }
+ }
+
+ // Argument is invalid.
+ *err = Err(function, "Bad thing passed to defined().",
+ "It should be of the form defined(foo) or defined(foo.bar).");
+ return Value();
+}
+
+// getenv ----------------------------------------------------------------------
+
+const char kGetEnv[] = "getenv";
+const char kGetEnv_HelpShort[] =
+ "getenv: Get an environment variable.";
+const char kGetEnv_Help[] =
+ "getenv: Get an environment variable.\n"
+ "\n"
+ " value = getenv(env_var_name)\n"
+ "\n"
+ " Returns the value of the given enironment variable. If the value is\n"
+ " not found, it will try to look up the variable with the \"opposite\"\n"
+ " case (based on the case of the first letter of the variable), but\n"
+ " is otherwise case-sensitive.\n"
+ "\n"
+ " If the environment variable is not found, the empty string will be\n"
+ " returned. Note: it might be nice to extend this if we had the concept\n"
+ " of \"none\" in the language to indicate lookup failure.\n"
+ "\n"
+ "Example:\n"
+ "\n"
+ " home_dir = getenv(\"HOME\")\n";
+
+Value RunGetEnv(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (!EnsureSingleStringArg(function, args, err))
+ return Value();
+
+ std::unique_ptr<base::Environment> env(base::Environment::Create());
+
+ std::string result;
+ if (!env->GetVar(args[0].string_value().c_str(), &result))
+ return Value(function, ""); // Not found, return empty string.
+ return Value(function, result);
+}
+
+// import ----------------------------------------------------------------------
+
+const char kImport[] = "import";
+const char kImport_HelpShort[] =
+ "import: Import a file into the current scope.";
+const char kImport_Help[] =
+ "import: Import a file into the current scope.\n"
+ "\n"
+ " The import command loads the rules and variables resulting from\n"
+ " executing the given file into the current scope.\n"
+ "\n"
+ " By convention, imported files are named with a .gni extension.\n"
+ "\n"
+ " An import is different than a C++ \"include\". The imported file is\n"
+ " executed in a standalone environment from the caller of the import\n"
+ " command. The results of this execution are cached for other files that\n"
+ " import the same .gni file.\n"
+ "\n"
+ " Note that you can not import a BUILD.gn file that's otherwise used\n"
+ " in the build. Files must either be imported or implicitly loaded as\n"
+ " a result of deps rules, but not both.\n"
+ "\n"
+ " The imported file's scope will be merged with the scope at the point\n"
+ " import was called. If there is a conflict (both the current scope and\n"
+ " the imported file define some variable or rule with the same name but\n"
+ " different value), a runtime error will be thrown. Therefore, it's good\n"
+ " practice to minimize the stuff that an imported file defines.\n"
+ "\n"
+ " Variables and templates beginning with an underscore '_' are\n"
+ " considered private and will not be imported. Imported files can use\n"
+ " such variables for internal computation without affecting other files.\n"
+ "\n"
+ "Examples:\n"
+ "\n"
+ " import(\"//build/rules/idl_compilation_rule.gni\")\n"
+ "\n"
+ " # Looks in the current directory.\n"
+ " import(\"my_vars.gni\")\n";
+
+Value RunImport(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (!EnsureSingleStringArg(function, args, err))
+ return Value();
+
+ const SourceDir& input_dir = scope->GetSourceDir();
+ SourceFile import_file =
+ input_dir.ResolveRelativeFile(args[0], err,
+ scope->settings()->build_settings()->root_path_utf8());
+ if (!err->has_error()) {
+ scope->settings()->import_manager().DoImport(import_file, function,
+ scope, err);
+ }
+ return Value();
+}
+
+// set_sources_assignment_filter -----------------------------------------------
+
+const char kSetSourcesAssignmentFilter[] = "set_sources_assignment_filter";
+const char kSetSourcesAssignmentFilter_HelpShort[] =
+ "set_sources_assignment_filter: Set a pattern to filter source files.";
+const char kSetSourcesAssignmentFilter_Help[] =
+ "set_sources_assignment_filter: Set a pattern to filter source files.\n"
+ "\n"
+ " The sources assignment filter is a list of patterns that remove files\n"
+ " from the list implicitly whenever the \"sources\" variable is\n"
+ " assigned to. This is intended to be used to globally filter out files\n"
+ " with platform-specific naming schemes when they don't apply, for\n"
+ " example, you may want to filter out all \"*_win.cc\" files on non-\n"
+ " Windows platforms.\n"
+ "\n"
+ " Typically this will be called once in the master build config script\n"
+ " to set up the filter for the current platform. Subsequent calls will\n"
+ " overwrite the previous values.\n"
+ "\n"
+ " If you want to bypass the filter and add a file even if it might\n"
+ " be filtered out, call set_sources_assignment_filter([]) to clear the\n"
+ " list of filters. This will apply until the current scope exits\n"
+ "\n"
+ "How to use patterns\n"
+ "\n"
+ " File patterns are VERY limited regular expressions. They must match\n"
+ " the entire input string to be counted as a match. In regular\n"
+ " expression parlance, there is an implicit \"^...$\" surrounding your\n"
+ " input. If you want to match a substring, you need to use wildcards at\n"
+ " the beginning and end.\n"
+ "\n"
+ " There are only two special tokens understood by the pattern matcher.\n"
+ " Everything else is a literal.\n"
+ "\n"
+ " * Matches zero or more of any character. It does not depend on the\n"
+ " preceding character (in regular expression parlance it is\n"
+ " equivalent to \".*\").\n"
+ "\n"
+ " \\b Matches a path boundary. This will match the beginning or end of\n"
+ " a string, or a slash.\n"
+ "\n"
+ "Pattern examples\n"
+ "\n"
+ " \"*asdf*\"\n"
+ " Matches a string containing \"asdf\" anywhere.\n"
+ "\n"
+ " \"asdf\"\n"
+ " Matches only the exact string \"asdf\".\n"
+ "\n"
+ " \"*.cc\"\n"
+ " Matches strings ending in the literal \".cc\".\n"
+ "\n"
+ " \"\\bwin/*\"\n"
+ " Matches \"win/foo\" and \"foo/win/bar.cc\" but not \"iwin/foo\".\n"
+ "\n"
+ "Sources assignment example\n"
+ "\n"
+ " # Filter out all _win files.\n"
+ " set_sources_assignment_filter([ \"*_win.cc\", \"*_win.h\" ])\n"
+ " sources = [ \"a.cc\", \"b_win.cc\" ]\n"
+ " print(sources)\n"
+ " # Will print [ \"a.cc\" ]. b_win one was filtered out.\n";
+
+Value RunSetSourcesAssignmentFilter(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ if (args.size() != 1) {
+ *err = Err(function, "set_sources_assignment_filter takes one argument.");
+ } else {
+ std::unique_ptr<PatternList> f(new PatternList);
+ f->SetFromValue(args[0], err);
+ if (!err->has_error())
+ scope->set_sources_assignment_filter(std::move(f));
+ }
+ return Value();
+}
+
+// print -----------------------------------------------------------------------
+
+const char kPrint[] = "print";
+const char kPrint_HelpShort[] =
+ "print: Prints to the console.";
+const char kPrint_Help[] =
+ "print: Prints to the console.\n"
+ "\n"
+ " Prints all arguments to the console separated by spaces. A newline is\n"
+ " automatically appended to the end.\n"
+ "\n"
+ " This function is intended for debugging. Note that build files are run\n"
+ " in parallel so you may get interleaved prints. A buildfile may also\n"
+ " be executed more than once in parallel in the context of different\n"
+ " toolchains so the prints from one file may be duplicated or\n"
+ " interleaved with itself.\n"
+ "\n"
+ "Examples:\n"
+ " print(\"Hello world\")\n"
+ "\n"
+ " print(sources, deps)\n";
+
+Value RunPrint(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err) {
+ std::string output;
+ for (size_t i = 0; i < args.size(); i++) {
+ if (i != 0)
+ output.push_back(' ');
+ output.append(args[i].ToString(false));
+ }
+ output.push_back('\n');
+
+ const BuildSettings::PrintCallback& cb =
+ scope->settings()->build_settings()->print_callback();
+ if (cb.is_null())
+ printf("%s", output.c_str());
+ else
+ cb.Run(output);
+
+ return Value();
+}
+
+// -----------------------------------------------------------------------------
+
+FunctionInfo::FunctionInfo()
+ : self_evaluating_args_runner(nullptr),
+ generic_block_runner(nullptr),
+ executed_block_runner(nullptr),
+ no_block_runner(nullptr),
+ help_short(nullptr),
+ help(nullptr),
+ is_target(false) {
+}
+
+FunctionInfo::FunctionInfo(SelfEvaluatingArgsFunction seaf,
+ const char* in_help_short,
+ const char* in_help,
+ bool in_is_target)
+ : self_evaluating_args_runner(seaf),
+ generic_block_runner(nullptr),
+ executed_block_runner(nullptr),
+ no_block_runner(nullptr),
+ help_short(in_help_short),
+ help(in_help),
+ is_target(in_is_target) {
+}
+
+FunctionInfo::FunctionInfo(GenericBlockFunction gbf,
+ const char* in_help_short,
+ const char* in_help,
+ bool in_is_target)
+ : self_evaluating_args_runner(nullptr),
+ generic_block_runner(gbf),
+ executed_block_runner(nullptr),
+ no_block_runner(nullptr),
+ help_short(in_help_short),
+ help(in_help),
+ is_target(in_is_target) {
+}
+
+FunctionInfo::FunctionInfo(ExecutedBlockFunction ebf,
+ const char* in_help_short,
+ const char* in_help,
+ bool in_is_target)
+ : self_evaluating_args_runner(nullptr),
+ generic_block_runner(nullptr),
+ executed_block_runner(ebf),
+ no_block_runner(nullptr),
+ help_short(in_help_short),
+ help(in_help),
+ is_target(in_is_target) {
+}
+
+FunctionInfo::FunctionInfo(NoBlockFunction nbf,
+ const char* in_help_short,
+ const char* in_help,
+ bool in_is_target)
+ : self_evaluating_args_runner(nullptr),
+ generic_block_runner(nullptr),
+ executed_block_runner(nullptr),
+ no_block_runner(nbf),
+ help_short(in_help_short),
+ help(in_help),
+ is_target(in_is_target) {
+}
+
+// Setup the function map via a static initializer. We use this because it
+// avoids race conditions without having to do some global setup function or
+// locking-heavy singleton checks at runtime. In practice, we always need this
+// before we can do anything interesting, so it's OK to wait for the
+// initializer.
+struct FunctionInfoInitializer {
+ FunctionInfoMap map;
+
+ FunctionInfoInitializer() {
+ #define INSERT_FUNCTION(command, is_target) \
+ map[k##command] = FunctionInfo(&Run##command, \
+ k##command##_HelpShort, \
+ k##command##_Help, \
+ is_target);
+
+ INSERT_FUNCTION(Action, true)
+ INSERT_FUNCTION(ActionForEach, true)
+ INSERT_FUNCTION(BundleData, true)
+ INSERT_FUNCTION(CreateBundle, true)
+ INSERT_FUNCTION(Copy, true)
+ INSERT_FUNCTION(Executable, true)
+ INSERT_FUNCTION(Group, true)
+ INSERT_FUNCTION(LoadableModule, true)
+ INSERT_FUNCTION(SharedLibrary, true)
+ INSERT_FUNCTION(SourceSet, true)
+ INSERT_FUNCTION(StaticLibrary, true)
+ INSERT_FUNCTION(Target, true)
+
+ INSERT_FUNCTION(Assert, false)
+ INSERT_FUNCTION(Config, false)
+ INSERT_FUNCTION(DeclareArgs, false)
+ INSERT_FUNCTION(Defined, false)
+ INSERT_FUNCTION(ExecScript, false)
+ INSERT_FUNCTION(ForEach, false)
+ INSERT_FUNCTION(ForwardVariablesFrom, false)
+ INSERT_FUNCTION(GetEnv, false)
+ INSERT_FUNCTION(GetLabelInfo, false)
+ INSERT_FUNCTION(GetPathInfo, false)
+ INSERT_FUNCTION(GetTargetOutputs, false)
+ INSERT_FUNCTION(Import, false)
+ INSERT_FUNCTION(Print, false)
+ INSERT_FUNCTION(ProcessFileTemplate, false)
+ INSERT_FUNCTION(ReadFile, false)
+ INSERT_FUNCTION(RebasePath, false)
+ INSERT_FUNCTION(SetDefaults, false)
+ INSERT_FUNCTION(SetDefaultToolchain, false)
+ INSERT_FUNCTION(SetSourcesAssignmentFilter, false)
+ INSERT_FUNCTION(Template, false)
+ INSERT_FUNCTION(Tool, false)
+ INSERT_FUNCTION(Toolchain, false)
+ INSERT_FUNCTION(ToolchainArgs, false)
+ INSERT_FUNCTION(WriteFile, false)
+
+ #undef INSERT_FUNCTION
+ }
+};
+const FunctionInfoInitializer function_info;
+
+const FunctionInfoMap& GetFunctions() {
+ return function_info.map;
+}
+
+Value RunFunction(Scope* scope,
+ const FunctionCallNode* function,
+ const ListNode* args_list,
+ BlockNode* block,
+ Err* err) {
+ const Token& name = function->function();
+
+ const FunctionInfoMap& function_map = GetFunctions();
+ FunctionInfoMap::const_iterator found_function =
+ function_map.find(name.value());
+ if (found_function == function_map.end()) {
+ // No built-in function matching this, check for a template.
+ const Template* templ =
+ scope->GetTemplate(function->function().value().as_string());
+ if (templ) {
+ Value args = args_list->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+ return templ->Invoke(scope, function, args.list_value(), block, err);
+ }
+
+ *err = Err(name, "Unknown function.");
+ return Value();
+ }
+
+ if (found_function->second.self_evaluating_args_runner) {
+ // Self evaluating args functions are special weird built-ins like foreach.
+ // Rather than force them all to check that they have a block or no block
+ // and risk bugs for new additions, check a whitelist here.
+ if (found_function->second.self_evaluating_args_runner != &RunForEach) {
+ if (!VerifyNoBlockForFunctionCall(function, block, err))
+ return Value();
+ }
+ return found_function->second.self_evaluating_args_runner(
+ scope, function, args_list, err);
+ }
+
+ // All other function types take a pre-executed set of args.
+ Value args = args_list->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+
+ if (found_function->second.generic_block_runner) {
+ if (!block) {
+ FillNeedsBlockError(function, err);
+ return Value();
+ }
+ return found_function->second.generic_block_runner(
+ scope, function, args.list_value(), block, err);
+ }
+
+ if (found_function->second.executed_block_runner) {
+ if (!block) {
+ FillNeedsBlockError(function, err);
+ return Value();
+ }
+
+ Scope block_scope(scope);
+ block->Execute(&block_scope, err);
+ if (err->has_error())
+ return Value();
+
+ Value result = found_function->second.executed_block_runner(
+ function, args.list_value(), &block_scope, err);
+ if (err->has_error())
+ return Value();
+
+ if (!block_scope.CheckForUnusedVars(err))
+ return Value();
+ return result;
+ }
+
+ // Otherwise it's a no-block function.
+ if (!VerifyNoBlockForFunctionCall(function, block, err))
+ return Value();
+ return found_function->second.no_block_runner(scope, function,
+ args.list_value(), err);
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/functions.h b/chromium/tools/gn/functions.h
new file mode 100644
index 00000000000..3ce945e39fe
--- /dev/null
+++ b/chromium/tools/gn/functions.h
@@ -0,0 +1,488 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_FUNCTIONS_H_
+#define TOOLS_GN_FUNCTIONS_H_
+
+#include <map>
+#include <string>
+#include <vector>
+
+#include "base/strings/string_piece.h"
+
+class Err;
+class BlockNode;
+class FunctionCallNode;
+class Label;
+class ListNode;
+class ParseNode;
+class Scope;
+class Token;
+class Value;
+
+// -----------------------------------------------------------------------------
+
+namespace functions {
+
+// This type of function invocation has no block and evaluates its arguments
+// itself rather than taking a pre-executed list. This allows us to implement
+// certain built-in functions.
+typedef Value (*SelfEvaluatingArgsFunction)(Scope* scope,
+ const FunctionCallNode* function,
+ const ListNode* args_list,
+ Err* err);
+
+// This type of function invocation takes a block node that it will execute.
+typedef Value (*GenericBlockFunction)(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+// This type of function takes a block, but does not need to control execution
+// of it. The dispatch function will pre-execute the block and pass the
+// resulting block_scope to the function.
+typedef Value(*ExecutedBlockFunction)(const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Scope* block_scope,
+ Err* err);
+
+// This type of function does not take a block. It just has arguments.
+typedef Value (*NoBlockFunction)(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kAction[];
+extern const char kAction_HelpShort[];
+extern const char kAction_Help[];
+Value RunAction(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kActionForEach[];
+extern const char kActionForEach_HelpShort[];
+extern const char kActionForEach_Help[];
+Value RunActionForEach(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kAssert[];
+extern const char kAssert_HelpShort[];
+extern const char kAssert_Help[];
+Value RunAssert(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kBundleData[];
+extern const char kBundleData_HelpShort[];
+extern const char kBundleData_Help[];
+Value RunBundleData(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kCreateBundle[];
+extern const char kCreateBundle_HelpShort[];
+extern const char kCreateBundle_Help[];
+Value RunCreateBundle(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kConfig[];
+extern const char kConfig_HelpShort[];
+extern const char kConfig_Help[];
+Value RunConfig(const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Scope* block_scope,
+ Err* err);
+
+extern const char kCopy[];
+extern const char kCopy_HelpShort[];
+extern const char kCopy_Help[];
+Value RunCopy(const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Scope* block_scope,
+ Err* err);
+
+extern const char kDeclareArgs[];
+extern const char kDeclareArgs_HelpShort[];
+extern const char kDeclareArgs_Help[];
+Value RunDeclareArgs(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kDefined[];
+extern const char kDefined_HelpShort[];
+extern const char kDefined_Help[];
+Value RunDefined(Scope* scope,
+ const FunctionCallNode* function,
+ const ListNode* args_list,
+ Err* err);
+
+extern const char kExecScript[];
+extern const char kExecScript_HelpShort[];
+extern const char kExecScript_Help[];
+Value RunExecScript(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kExecutable[];
+extern const char kExecutable_HelpShort[];
+extern const char kExecutable_Help[];
+Value RunExecutable(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kForEach[];
+extern const char kForEach_HelpShort[];
+extern const char kForEach_Help[];
+Value RunForEach(Scope* scope,
+ const FunctionCallNode* function,
+ const ListNode* args_list,
+ Err* err);
+
+extern const char kForwardVariablesFrom[];
+extern const char kForwardVariablesFrom_HelpShort[];
+extern const char kForwardVariablesFrom_Help[];
+Value RunForwardVariablesFrom(Scope* scope,
+ const FunctionCallNode* function,
+ const ListNode* args_list,
+ Err* err);
+
+extern const char kGetEnv[];
+extern const char kGetEnv_HelpShort[];
+extern const char kGetEnv_Help[];
+Value RunGetEnv(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kGetLabelInfo[];
+extern const char kGetLabelInfo_HelpShort[];
+extern const char kGetLabelInfo_Help[];
+Value RunGetLabelInfo(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kGetPathInfo[];
+extern const char kGetPathInfo_HelpShort[];
+extern const char kGetPathInfo_Help[];
+Value RunGetPathInfo(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kGetTargetOutputs[];
+extern const char kGetTargetOutputs_HelpShort[];
+extern const char kGetTargetOutputs_Help[];
+Value RunGetTargetOutputs(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kGroup[];
+extern const char kGroup_HelpShort[];
+extern const char kGroup_Help[];
+Value RunGroup(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kImport[];
+extern const char kImport_HelpShort[];
+extern const char kImport_Help[];
+Value RunImport(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kLoadableModule[];
+extern const char kLoadableModule_HelpShort[];
+extern const char kLoadableModule_Help[];
+Value RunLoadableModule(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kPrint[];
+extern const char kPrint_HelpShort[];
+extern const char kPrint_Help[];
+Value RunPrint(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kProcessFileTemplate[];
+extern const char kProcessFileTemplate_HelpShort[];
+extern const char kProcessFileTemplate_Help[];
+Value RunProcessFileTemplate(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kReadFile[];
+extern const char kReadFile_HelpShort[];
+extern const char kReadFile_Help[];
+Value RunReadFile(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kRebasePath[];
+extern const char kRebasePath_HelpShort[];
+extern const char kRebasePath_Help[];
+Value RunRebasePath(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kSetDefaults[];
+extern const char kSetDefaults_HelpShort[];
+extern const char kSetDefaults_Help[];
+Value RunSetDefaults(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kSetDefaultToolchain[];
+extern const char kSetDefaultToolchain_HelpShort[];
+extern const char kSetDefaultToolchain_Help[];
+Value RunSetDefaultToolchain(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kSetSourcesAssignmentFilter[];
+extern const char kSetSourcesAssignmentFilter_HelpShort[];
+extern const char kSetSourcesAssignmentFilter_Help[];
+Value RunSetSourcesAssignmentFilter(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+extern const char kSharedLibrary[];
+extern const char kSharedLibrary_HelpShort[];
+extern const char kSharedLibrary_Help[];
+Value RunSharedLibrary(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kSourceSet[];
+extern const char kSourceSet_HelpShort[];
+extern const char kSourceSet_Help[];
+Value RunSourceSet(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kStaticLibrary[];
+extern const char kStaticLibrary_HelpShort[];
+extern const char kStaticLibrary_Help[];
+Value RunStaticLibrary(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kTarget[];
+extern const char kTarget_HelpShort[];
+extern const char kTarget_Help[];
+Value RunTarget(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kTemplate[];
+extern const char kTemplate_HelpShort[];
+extern const char kTemplate_Help[];
+Value RunTemplate(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kTool[];
+extern const char kTool_HelpShort[];
+extern const char kTool_Help[];
+Value RunTool(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kToolchain[];
+extern const char kToolchain_HelpShort[];
+extern const char kToolchain_Help[];
+Value RunToolchain(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kToolchainArgs[];
+extern const char kToolchainArgs_HelpShort[];
+extern const char kToolchainArgs_Help[];
+Value RunToolchainArgs(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err);
+
+extern const char kWriteFile[];
+extern const char kWriteFile_HelpShort[];
+extern const char kWriteFile_Help[];
+Value RunWriteFile(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+// -----------------------------------------------------------------------------
+
+// One function record. Only one of the given runner types will be non-null
+// which indicates the type of function it is.
+struct FunctionInfo {
+ FunctionInfo();
+ FunctionInfo(SelfEvaluatingArgsFunction seaf,
+ const char* in_help_short,
+ const char* in_help,
+ bool in_is_target);
+ FunctionInfo(GenericBlockFunction gbf,
+ const char* in_help_short,
+ const char* in_help,
+ bool in_is_target);
+ FunctionInfo(ExecutedBlockFunction ebf,
+ const char* in_help_short,
+ const char* in_help,
+ bool in_is_target);
+ FunctionInfo(NoBlockFunction nbf,
+ const char* in_help_short,
+ const char* in_help,
+ bool in_is_target);
+
+ SelfEvaluatingArgsFunction self_evaluating_args_runner;
+ GenericBlockFunction generic_block_runner;
+ ExecutedBlockFunction executed_block_runner;
+ NoBlockFunction no_block_runner;
+
+ const char* help_short;
+ const char* help;
+
+ bool is_target;
+};
+
+typedef std::map<base::StringPiece, FunctionInfo> FunctionInfoMap;
+
+// Returns the mapping of all built-in functions.
+const FunctionInfoMap& GetFunctions();
+
+// Runs the given function.
+Value RunFunction(Scope* scope,
+ const FunctionCallNode* function,
+ const ListNode* args_list,
+ BlockNode* block, // Optional.
+ Err* err);
+
+} // namespace functions
+
+// Helper functions -----------------------------------------------------------
+
+// Verifies that the current scope is not processing an import. If it is, it
+// will set the error, blame the given parse node for it, and return false.
+bool EnsureNotProcessingImport(const ParseNode* node,
+ const Scope* scope,
+ Err* err);
+
+// Like EnsureNotProcessingImport but checks for running the build config.
+bool EnsureNotProcessingBuildConfig(const ParseNode* node,
+ const Scope* scope,
+ Err* err);
+
+// Sets up the |block_scope| for executing a target (or something like it).
+// The |scope| is the containing scope. It should have been already set as the
+// parent for the |block_scope| when the |block_scope| was created.
+//
+// This will set up the target defaults and set the |target_name| variable in
+// the block scope to the current target name, which is assumed to be the first
+// argument to the function.
+//
+// On success, returns true. On failure, sets the error and returns false.
+bool FillTargetBlockScope(const Scope* scope,
+ const FunctionCallNode* function,
+ const std::string& target_type,
+ const BlockNode* block,
+ const std::vector<Value>& args,
+ Scope* block_scope,
+ Err* err);
+
+// Sets the given error to a message explaining that the function call requires
+// a block.
+void FillNeedsBlockError(const FunctionCallNode* function, Err* err);
+
+// Validates that the given function call has one string argument. This is
+// the most common function signature, so it saves space to have this helper.
+// Returns false and sets the error on failure.
+bool EnsureSingleStringArg(const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Err* err);
+
+// Returns the name of the toolchain for the given scope.
+const Label& ToolchainLabelForScope(const Scope* scope);
+
+// Generates a label for the given scope, using the current directory and
+// toolchain, and the given name.
+Label MakeLabelForScope(const Scope* scope,
+ const FunctionCallNode* function,
+ const std::string& name);
+
+// Some tyesp of blocks can't be nested inside other ones. For such cases,
+// instantiate this object upon entering the block and Enter() will fail if
+// there is already another non-nestable block on the stack.
+class NonNestableBlock {
+ public:
+ // type_description is a string that will be used in error messages
+ // describing the type of the block, for example, "template" or "config".
+ NonNestableBlock(Scope* scope,
+ const FunctionCallNode* function,
+ const char* type_description);
+ ~NonNestableBlock();
+
+ bool Enter(Err* err);
+
+ private:
+ // Used as a void* key for the Scope to track our property. The actual value
+ // is never used.
+ static const int kKey;
+
+ Scope* scope_;
+ const FunctionCallNode* function_;
+ const char* type_description_;
+
+ // Set to true when the key is added to the scope so we don't try to
+ // delete nonexistant keys which will cause assertions.
+ bool key_added_;
+};
+
+#endif // TOOLS_GN_FUNCTIONS_H_
diff --git a/chromium/tools/gn/functions_target.cc b/chromium/tools/gn/functions_target.cc
new file mode 100644
index 00000000000..7ec5a50dd26
--- /dev/null
+++ b/chromium/tools/gn/functions_target.cc
@@ -0,0 +1,734 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/functions.h"
+
+#include "tools/gn/config_values_generator.h"
+#include "tools/gn/err.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/target_generator.h"
+#include "tools/gn/template.h"
+#include "tools/gn/value.h"
+#include "tools/gn/variables.h"
+
+#define DEPENDENT_CONFIG_VARS \
+ " Dependent configs: all_dependent_configs, public_configs\n"
+#define DEPS_VARS \
+ " Deps: data_deps, deps, public_deps\n"
+#define GENERAL_TARGET_VARS \
+ " General: check_includes, configs, data, inputs, output_name,\n" \
+ " output_extension, public, sources, testonly, visibility\n"
+
+namespace functions {
+
+namespace {
+
+Value ExecuteGenericTarget(const char* target_type,
+ Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ NonNestableBlock non_nestable(scope, function, "target");
+ if (!non_nestable.Enter(err))
+ return Value();
+
+ if (!EnsureNotProcessingImport(function, scope, err) ||
+ !EnsureNotProcessingBuildConfig(function, scope, err))
+ return Value();
+ Scope block_scope(scope);
+ if (!FillTargetBlockScope(scope, function, target_type, block,
+ args, &block_scope, err))
+ return Value();
+
+ block->Execute(&block_scope, err);
+ if (err->has_error())
+ return Value();
+
+ TargetGenerator::GenerateTarget(&block_scope, function, args,
+ target_type, err);
+ if (err->has_error())
+ return Value();
+
+ block_scope.CheckForUnusedVars(err);
+ return Value();
+}
+
+} // namespace
+
+// action ----------------------------------------------------------------------
+
+// Common help paragraph on script runtime execution directories.
+#define SCRIPT_EXECUTION_CONTEXT \
+ " The script will be executed with the given arguments with the current\n"\
+ " directory being that of the root build directory. If you pass files\n"\
+ " to your script, see \"gn help rebase_path\" for how to convert\n" \
+ " file names to be relative to the build directory (file names in the\n" \
+ " sources, outputs, and inputs will be all treated as relative to the\n" \
+ " current build file and converted as needed automatically).\n"
+
+// Common help paragraph on script output directories.
+#define SCRIPT_EXECUTION_OUTPUTS \
+ " All output files must be inside the output directory of the build.\n" \
+ " You would generally use |$target_out_dir| or |$target_gen_dir| to\n" \
+ " reference the output or generated intermediate file directories,\n" \
+ " respectively.\n"
+
+#define ACTION_DEPS \
+ " The \"deps\" and \"public_deps\" for an action will always be\n" \
+ " completed before any part of the action is run so it can depend on\n" \
+ " the output of previous steps. The \"data_deps\" will be built if the\n" \
+ " action is built, but may not have completed before all steps of the\n" \
+ " action are started. This can give additional parallelism in the build\n"\
+ " for runtime-only dependencies.\n"
+
+const char kAction[] = "action";
+const char kAction_HelpShort[] =
+ "action: Declare a target that runs a script a single time.";
+const char kAction_Help[] =
+ "action: Declare a target that runs a script a single time.\n"
+ "\n"
+ " This target type allows you to run a script a single time to produce\n"
+ " or more output files. If you want to run a script once for each of a\n"
+ " set of input files, see \"gn help action_foreach\".\n"
+ "\n"
+ "Inputs\n"
+ "\n"
+ " In an action the \"sources\" and \"inputs\" are treated the same:\n"
+ " they're both input dependencies on script execution with no special\n"
+ " handling. If you want to pass the sources to your script, you must do\n"
+ " so explicitly by including them in the \"args\". Note also that this\n"
+ " means there is no special handling of paths since GN doesn't know\n"
+ " which of the args are paths and not. You will want to use\n"
+ " rebase_path() to convert paths to be relative to the root_build_dir.\n"
+ "\n"
+ " You can dynamically write input dependencies (for incremental rebuilds\n"
+ " if an input file changes) by writing a depfile when the script is run\n"
+ " (see \"gn help depfile\"). This is more flexible than \"inputs\".\n"
+ "\n"
+ " If the command line length is very long, you can use response files\n"
+ " to pass args to your script. See \"gn help response_file_contents\".\n"
+ "\n"
+ " It is recommended you put inputs to your script in the \"sources\"\n"
+ " variable, and stuff like other Python files required to run your\n"
+ " script in the \"inputs\" variable.\n"
+ "\n"
+ ACTION_DEPS
+ "\n"
+ "Outputs\n"
+ "\n"
+ " You should specify files created by your script by specifying them in\n"
+ " the \"outputs\".\n"
+ "\n"
+ SCRIPT_EXECUTION_CONTEXT
+ "\n"
+ "File name handling\n"
+ "\n"
+ SCRIPT_EXECUTION_OUTPUTS
+ "\n"
+ "Variables\n"
+ "\n"
+ " args, console, data, data_deps, depfile, deps, inputs, outputs*,\n"
+ " response_file_contents, script*, sources\n"
+ " * = required\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " action(\"run_this_guy_once\") {\n"
+ " script = \"doprocessing.py\"\n"
+ " sources = [ \"my_configuration.txt\" ]\n"
+ " outputs = [ \"$target_gen_dir/insightful_output.txt\" ]\n"
+ "\n"
+ " # Our script imports this Python file so we want to rebuild if it\n"
+ " # changes.\n"
+ " inputs = [ \"helper_library.py\" ]\n"
+ "\n"
+ " # Note that we have to manually pass the sources to our script if\n"
+ " # the script needs them as inputs.\n"
+ " args = [ \"--out\", rebase_path(target_gen_dir, root_build_dir) ] +\n"
+ " rebase_path(sources, root_build_dir)\n"
+ " }\n";
+
+Value RunAction(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ return ExecuteGenericTarget(functions::kAction, scope, function, args,
+ block, err);
+}
+
+// action_foreach --------------------------------------------------------------
+
+const char kActionForEach[] = "action_foreach";
+const char kActionForEach_HelpShort[] =
+ "action_foreach: Declare a target that runs a script over a set of files.";
+const char kActionForEach_Help[] =
+ "action_foreach: Declare a target that runs a script over a set of files.\n"
+ "\n"
+ " This target type allows you to run a script once-per-file over a set\n"
+ " of sources. If you want to run a script once that takes many files as\n"
+ " input, see \"gn help action\".\n"
+ "\n"
+ "Inputs\n"
+ "\n"
+ " The script will be run once per file in the \"sources\" variable. The\n"
+ " \"outputs\" variable should specify one or more files with a source\n"
+ " expansion pattern in it (see \"gn help source_expansion\"). The output\n"
+ " file(s) for each script invocation should be unique. Normally you\n"
+ " use \"{{source_name_part}}\" in each output file.\n"
+ "\n"
+ " If your script takes additional data as input, such as a shared\n"
+ " configuration file or a Python module it uses, those files should be\n"
+ " listed in the \"inputs\" variable. These files are treated as\n"
+ " dependencies of each script invocation.\n"
+ "\n"
+ " If the command line length is very long, you can use response files\n"
+ " to pass args to your script. See \"gn help response_file_contents\".\n"
+ "\n"
+ " You can dynamically write input dependencies (for incremental rebuilds\n"
+ " if an input file changes) by writing a depfile when the script is run\n"
+ " (see \"gn help depfile\"). This is more flexible than \"inputs\".\n"
+ "\n"
+ ACTION_DEPS
+ "\n"
+ "Outputs\n"
+ "\n"
+ SCRIPT_EXECUTION_CONTEXT
+ "\n"
+ "File name handling\n"
+ "\n"
+ SCRIPT_EXECUTION_OUTPUTS
+ "\n"
+ "Variables\n"
+ "\n"
+ " args, console, data, data_deps, depfile, deps, inputs, outputs*,\n"
+ " response_file_contents, script*, sources*\n"
+ " * = required\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " # Runs the script over each IDL file. The IDL script will generate\n"
+ " # both a .cc and a .h file for each input.\n"
+ " action_foreach(\"my_idl\") {\n"
+ " script = \"idl_processor.py\"\n"
+ " sources = [ \"foo.idl\", \"bar.idl\" ]\n"
+ "\n"
+ " # Our script reads this file each time, so we need to list is as a\n"
+ " # dependency so we can rebuild if it changes.\n"
+ " inputs = [ \"my_configuration.txt\" ]\n"
+ "\n"
+ " # Transformation from source file name to output file names.\n"
+ " outputs = [ \"$target_gen_dir/{{source_name_part}}.h\",\n"
+ " \"$target_gen_dir/{{source_name_part}}.cc\" ]\n"
+ "\n"
+ " # Note that since \"args\" is opaque to GN, if you specify paths\n"
+ " # here, you will need to convert it to be relative to the build\n"
+ " # directory using \"rebase_path()\".\n"
+ " args = [\n"
+ " \"{{source}}\",\n"
+ " \"-o\",\n"
+ " rebase_path(relative_target_gen_dir, root_build_dir) +\n"
+ " \"/{{source_name_part}}.h\" ]\n"
+ " }\n"
+ "\n";
+Value RunActionForEach(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ return ExecuteGenericTarget(functions::kActionForEach, scope, function, args,
+ block, err);
+}
+
+// bundle_data -----------------------------------------------------------------
+
+const char kBundleData[] = "bundle_data";
+const char kBundleData_HelpShort[] =
+ "bundle_data: [iOS/OS X] Declare a target without output.";
+const char kBundleData_Help[] =
+ "bundle_data: [iOS/OS X] Declare a target without output.\n"
+ "\n"
+ " This target type allows to declare data that is required at runtime.\n"
+ " It is used to inform \"create_bundle\" targets of the files to copy\n"
+ " into generated bundle, see \"gn help create_bundle\" for help.\n"
+ "\n"
+ " The target must define a list of files as \"sources\" and a single\n"
+ " \"outputs\". If there are multiple files, source expansions must be\n"
+ " used to express the output. The output must reference a file inside\n"
+ " of {{bundle_root_dir}}.\n"
+ "\n"
+ " This target can be used on all platforms though it is designed only to\n"
+ " generate iOS/OS X bundle. In cross-platform projects, it is advised to\n"
+ " put it behind iOS/Mac conditionals.\n"
+ "\n"
+ " See \"gn help create_bundle\" for more information.\n"
+ "\n"
+ "Variables\n"
+ "\n"
+ " sources*, outputs*, deps, data_deps, public_deps, visibility\n"
+ " * = required\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " bundle_data(\"icudata\") {\n"
+ " sources = [ \"sources/data/in/icudtl.dat\" ]\n"
+ " outputs = [ \"{{bundle_resources_dir}}/{{source_file_part}}\" ]\n"
+ " }\n"
+ "\n"
+ " bundle_data(\"base_unittests_bundle_data]\") {\n"
+ " sources = [ \"test/data\" ]\n"
+ " outputs = [\n"
+ " \"{{bundle_resources_dir}}/{{source_root_relative_dir}}/\" +\n"
+ " \"{{source_file_part}}\"\n"
+ " ]\n"
+ " }\n"
+ "\n"
+ " bundle_data(\"material_typography_bundle_data\") {\n"
+ " sources = [\n"
+ " \"src/MaterialTypography.bundle/Roboto-Bold.ttf\",\n"
+ " \"src/MaterialTypography.bundle/Roboto-Italic.ttf\",\n"
+ " \"src/MaterialTypography.bundle/Roboto-Regular.ttf\",\n"
+ " \"src/MaterialTypography.bundle/Roboto-Thin.ttf\",\n"
+ " ]\n"
+ " outputs = [\n"
+ " \"{{bundle_resources_dir}}/MaterialTypography.bundle/\"\n"
+ " \"{{source_file_part}}\"\n"
+ " ]\n"
+ " }\n";
+
+Value RunBundleData(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ return ExecuteGenericTarget(functions::kBundleData, scope, function, args,
+ block, err);
+}
+
+// create_bundle ---------------------------------------------------------------
+
+const char kCreateBundle[] = "create_bundle";
+const char kCreateBundle_HelpShort[] =
+ "create_bundle: [iOS/OS X] Build an OS X / iOS bundle.";
+const char kCreateBundle_Help[] =
+ "create_bundle: [iOS/OS X] Build an OS X / iOS bundle.\n"
+ "\n"
+ " This target generates an iOS/OS X bundle (which is a directory with a\n"
+ " well-know structure). This target does not define any sources, instead\n"
+ " they are computed from all \"bundle_data\" target this one depends on\n"
+ " transitively (the recursion stops at \"create_bundle\" targets).\n"
+ "\n"
+ " The \"bundle_*_dir\" properties must be defined. They will be used for\n"
+ " the expansion of {{bundle_*_dir}} rules in \"bundle_data\" outputs.\n"
+ "\n"
+ " This target can be used on all platforms though it is designed only to\n"
+ " generate iOS/OS X bundle. In cross-platform projects, it is advised to\n"
+ " put it behind iOS/Mac conditionals.\n"
+ "\n"
+ "Variables\n"
+ "\n"
+ " bundle_root_dir*, bundle_resources_dir*, bundle_executable_dir*,\n"
+ " bundle_plugins_dir*, deps, data_deps, public_deps, visibility\n"
+ " * = required\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " # Defines a template to create an application. On most platform, this\n"
+ " # is just an alias for an \"executable\" target, but on iOS/OS X, it\n"
+ " # builds an application bundle.\n"
+ " template(\"app\") {\n"
+ " if (!is_ios && !is_mac) {\n"
+ " executable(target_name) {\n"
+ " forward_variables_from(invoker, \"*\")\n"
+ " }\n"
+ " } else {\n"
+ " app_name = target_name\n"
+ " gen_path = target_gen_dir\n"
+ "\n"
+ " action(\"${app_name}_generate_info_plist\") {\n"
+ " script = [ \"//build/ios/ios_gen_plist.py\" ]\n"
+ " sources = [ \"templates/Info.plist\" ]\n"
+ " outputs = [ \"$gen_path/Info.plist\" ]\n"
+ " args = rebase_path(sources, root_build_dir) +\n"
+ " rebase_path(outputs, root_build_dir)\n"
+ " }\n"
+ "\n"
+ " bundle_data(\"${app_name}_bundle_info_plist\") {\n"
+ " deps = [ \":${app_name}_generate_info_plist\" ]\n"
+ " sources = [ \"$gen_path/Info.plist\" ]\n"
+ " outputs = [ \"{{bundle_root_dir}}/Info.plist\" ]\n"
+ " }\n"
+ "\n"
+ " executable(\"${app_name}_generate_executable\") {\n"
+ " forward_variables_from(invoker, \"*\", [\n"
+ " \"output_name\",\n"
+ " \"visibility\",\n"
+ " ])\n"
+ " output_name =\n"
+ " rebase_path(\"$gen_path/$app_name\", root_build_dir)\n"
+ " }\n"
+ "\n"
+ " bundle_data(\"${app_name}_bundle_executable\") {\n"
+ " deps = [ \":${app_name}_generate_executable\" ]\n"
+ " sources = [ \"$gen_path/$app_name\" ]\n"
+ " outputs = [ \"{{bundle_executable_dir}}/$app_name\" ]\n"
+ " }\n"
+ "\n"
+ " create_bundle(\"${app_name}.app\") {\n"
+ " deps = [\n"
+ " \":${app_name}_bundle_executable\",\n"
+ " \":${app_name}_bundle_info_plist\",\n"
+ " ]\n"
+ " if (is_ios) {\n"
+ " bundle_root_dir = \"${root_build_dir}/$target_name\"\n"
+ " bundle_resources_dir = bundle_root_dir\n"
+ " bundle_executable_dir = bundle_root_dir\n"
+ " bundle_plugins_dir = bundle_root_dir + \"/Plugins\"\n"
+ " } else {\n"
+ " bundle_root_dir = \"${root_build_dir}/target_name/Contents\"\n"
+ " bundle_resources_dir = bundle_root_dir + \"/Resources\"\n"
+ " bundle_executable_dir = bundle_root_dir + \"/MacOS\"\n"
+ " bundle_plugins_dir = bundle_root_dir + \"/Plugins\"\n"
+ " }\n"
+ " }\n"
+ "\n"
+ " group(target_name) {\n"
+ " forward_variables_from(invoker, [\"visibility\"])\n"
+ " deps = [ \":${app_name}.app\" ]\n"
+ " }\n"
+ " }\n"
+ " }\n";
+
+Value RunCreateBundle(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ return ExecuteGenericTarget(functions::kCreateBundle, scope, function, args,
+ block, err);
+}
+
+// copy ------------------------------------------------------------------------
+
+const char kCopy[] = "copy";
+const char kCopy_HelpShort[] =
+ "copy: Declare a target that copies files.";
+const char kCopy_Help[] =
+ "copy: Declare a target that copies files.\n"
+ "\n"
+ "File name handling\n"
+ "\n"
+ " All output files must be inside the output directory of the build.\n"
+ " You would generally use |$target_out_dir| or |$target_gen_dir| to\n"
+ " reference the output or generated intermediate file directories,\n"
+ " respectively.\n"
+ "\n"
+ " Both \"sources\" and \"outputs\" must be specified. Sources can "
+ "include\n"
+ " as many files as you want, but there can only be one item in the\n"
+ " outputs list (plural is used for the name for consistency with\n"
+ " other target types).\n"
+ "\n"
+ " If there is more than one source file, your output name should specify\n"
+ " a mapping from each source file to an output file name using source\n"
+ " expansion (see \"gn help source_expansion\"). The placeholders will\n"
+ " look like \"{{source_name_part}}\", for example.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " # Write a rule that copies a checked-in DLL to the output directory.\n"
+ " copy(\"mydll\") {\n"
+ " sources = [ \"mydll.dll\" ]\n"
+ " outputs = [ \"$target_out_dir/mydll.dll\" ]\n"
+ " }\n"
+ "\n"
+ " # Write a rule to copy several files to the target generated files\n"
+ " # directory.\n"
+ " copy(\"myfiles\") {\n"
+ " sources = [ \"data1.dat\", \"data2.dat\", \"data3.dat\" ]\n"
+ "\n"
+ " # Use source expansion to generate output files with the\n"
+ " # corresponding file names in the gen dir. This will just copy each\n"
+ " # file.\n"
+ " outputs = [ \"$target_gen_dir/{{source_file_part}}\" ]\n"
+ " }\n";
+
+Value RunCopy(const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ Scope* scope,
+ Err* err) {
+ if (!EnsureNotProcessingImport(function, scope, err) ||
+ !EnsureNotProcessingBuildConfig(function, scope, err))
+ return Value();
+ TargetGenerator::GenerateTarget(scope, function, args, functions::kCopy, err);
+ return Value();
+}
+
+// executable ------------------------------------------------------------------
+
+const char kExecutable[] = "executable";
+const char kExecutable_HelpShort[] =
+ "executable: Declare an executable target.";
+const char kExecutable_Help[] =
+ "executable: Declare an executable target.\n"
+ "\n"
+ "Variables\n"
+ "\n"
+ CONFIG_VALUES_VARS_HELP
+ DEPS_VARS
+ DEPENDENT_CONFIG_VARS
+ GENERAL_TARGET_VARS;
+
+Value RunExecutable(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ return ExecuteGenericTarget(functions::kExecutable, scope, function, args,
+ block, err);
+}
+
+// group -----------------------------------------------------------------------
+
+const char kGroup[] = "group";
+const char kGroup_HelpShort[] =
+ "group: Declare a named group of targets.";
+const char kGroup_Help[] =
+ "group: Declare a named group of targets.\n"
+ "\n"
+ " This target type allows you to create meta-targets that just collect a\n"
+ " set of dependencies into one named target. Groups can additionally\n"
+ " specify configs that apply to their dependents.\n"
+ "\n"
+ " Depending on a group is exactly like depending directly on that\n"
+ " group's deps. \n"
+ "\n"
+ "Variables\n"
+ "\n"
+ DEPS_VARS
+ DEPENDENT_CONFIG_VARS
+ "\n"
+ "Example\n"
+ "\n"
+ " group(\"all\") {\n"
+ " deps = [\n"
+ " \"//project:runner\",\n"
+ " \"//project:unit_tests\",\n"
+ " ]\n"
+ " }\n";
+
+Value RunGroup(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ return ExecuteGenericTarget(functions::kGroup, scope, function, args,
+ block, err);
+}
+
+// loadable_module -------------------------------------------------------------
+
+const char kLoadableModule[] = "loadable_module";
+const char kLoadableModule_HelpShort[] =
+ "loadable_module: Declare a loadable module target.";
+const char kLoadableModule_Help[] =
+ "loadable_module: Declare a loadable module target.\n"
+ "\n"
+ " This target type allows you to create an object file that is (and can\n"
+ " only be) loaded and unloaded at runtime.\n"
+ "\n"
+ " A loadable module will be specified on the linker line for targets\n"
+ " listing the loadable module in its \"deps\". If you don't want this\n"
+ " (if you don't need to dynamically load the library at runtime), then\n"
+ " you should use a \"shared_library\" target type instead.\n"
+ "\n"
+ "Variables\n"
+ "\n"
+ CONFIG_VALUES_VARS_HELP
+ DEPS_VARS
+ DEPENDENT_CONFIG_VARS
+ GENERAL_TARGET_VARS;
+
+Value RunLoadableModule(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ return ExecuteGenericTarget(functions::kLoadableModule, scope, function, args,
+ block, err);
+}
+
+// shared_library --------------------------------------------------------------
+
+const char kSharedLibrary[] = "shared_library";
+const char kSharedLibrary_HelpShort[] =
+ "shared_library: Declare a shared library target.";
+const char kSharedLibrary_Help[] =
+ "shared_library: Declare a shared library target.\n"
+ "\n"
+ " A shared library will be specified on the linker line for targets\n"
+ " listing the shared library in its \"deps\". If you don't want this\n"
+ " (say you dynamically load the library at runtime), then you should\n"
+ " depend on the shared library via \"data_deps\" or, on Darwin\n"
+ " platforms, use a \"loadable_module\" target type instead.\n"
+ "\n"
+ "Variables\n"
+ "\n"
+ CONFIG_VALUES_VARS_HELP
+ DEPS_VARS
+ DEPENDENT_CONFIG_VARS
+ GENERAL_TARGET_VARS;
+
+Value RunSharedLibrary(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ return ExecuteGenericTarget(functions::kSharedLibrary, scope, function, args,
+ block, err);
+}
+
+// source_set ------------------------------------------------------------------
+
+extern const char kSourceSet[] = "source_set";
+extern const char kSourceSet_HelpShort[] =
+ "source_set: Declare a source set target.";
+extern const char kSourceSet_Help[] =
+ "source_set: Declare a source set target.\n"
+ "\n"
+ " A source set is a collection of sources that get compiled, but are not\n"
+ " linked to produce any kind of library. Instead, the resulting object\n"
+ " files are implicitly added to the linker line of all targets that\n"
+ " depend on the source set.\n"
+ "\n"
+ " In most cases, a source set will behave like a static library, except\n"
+ " no actual library file will be produced. This will make the build go\n"
+ " a little faster by skipping creation of a large static library, while\n"
+ " maintaining the organizational benefits of focused build targets.\n"
+ "\n"
+ " The main difference between a source set and a static library is\n"
+ " around handling of exported symbols. Most linkers assume declaring\n"
+ " a function exported means exported from the static library. The linker\n"
+ " can then do dead code elimination to delete code not reachable from\n"
+ " exported functions.\n"
+ "\n"
+ " A source set will not do this code elimination since there is no link\n"
+ " step. This allows you to link many sources sets into a shared library\n"
+ " and have the \"exported symbol\" notation indicate \"export from the\n"
+ " final shared library and not from the intermediate targets.\" There is\n"
+ " no way to express this concept when linking multiple static libraries\n"
+ " into a shared library.\n"
+ "\n"
+ "Variables\n"
+ "\n"
+ CONFIG_VALUES_VARS_HELP
+ DEPS_VARS
+ DEPENDENT_CONFIG_VARS
+ GENERAL_TARGET_VARS;
+
+Value RunSourceSet(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ return ExecuteGenericTarget(functions::kSourceSet, scope, function, args,
+ block, err);
+}
+
+// static_library --------------------------------------------------------------
+
+const char kStaticLibrary[] = "static_library";
+const char kStaticLibrary_HelpShort[] =
+ "static_library: Declare a static library target.";
+const char kStaticLibrary_Help[] =
+ "static_library: Declare a static library target.\n"
+ "\n"
+ " Make a \".a\" / \".lib\" file.\n"
+ "\n"
+ " If you only need the static library for intermediate results in the\n"
+ " build, you should consider a source_set instead since it will skip\n"
+ " the (potentially slow) step of creating the intermediate library file.\n"
+ "\n"
+ "Variables\n"
+ "\n"
+ CONFIG_VALUES_VARS_HELP
+ DEPS_VARS
+ DEPENDENT_CONFIG_VARS
+ GENERAL_TARGET_VARS;
+
+Value RunStaticLibrary(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ return ExecuteGenericTarget(functions::kStaticLibrary, scope, function, args,
+ block, err);
+}
+
+// target ---------------------------------------------------------------------
+
+const char kTarget[] = "target";
+const char kTarget_HelpShort[] =
+ "target: Declare an target with the given programmatic type.";
+const char kTarget_Help[] =
+ "target: Declare an target with the given programmatic type.\n"
+ "\n"
+ " target(target_type_string, target_name_string) { ... }\n"
+ "\n"
+ " The target() function is a way to invoke a built-in target or template\n"
+ " with a type determined at runtime. This is useful for cases where the\n"
+ " type of a target might not be known statically.\n"
+ "\n"
+ " Only templates and built-in target functions are supported for the\n"
+ " target_type_string parameter. Arbitrary functions, configs, and\n"
+ " toolchains are not supported.\n"
+ "\n"
+ " The call:\n"
+ " target(\"source_set\", \"doom_melon\") {\n"
+ " Is equivalent to:\n"
+ " source_set(\"doom_melon\") {\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " if (foo_build_as_shared) {\n"
+ " my_type = \"shared_library\"\n"
+ " } else {\n"
+ " my_type = \"source_set\"\n"
+ " }\n"
+ "\n"
+ " target(my_type, \"foo\") {\n"
+ " ...\n"
+ " }\n";
+Value RunTarget(Scope* scope,
+ const FunctionCallNode* function,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) {
+ if (args.size() != 2) {
+ *err = Err(function, "Expected two arguments.", "Try \"gn help target\".");
+ return Value();
+ }
+
+ // The first argument must be a string (the target type). Don't type-check
+ // the second argument since the target-specific function will do that.
+ if (!args[0].VerifyTypeIs(Value::STRING, err))
+ return Value();
+ const std::string& target_type = args[0].string_value();
+
+ // The rest of the args are passed to the function.
+ std::vector<Value> sub_args(args.begin() + 1, args.end());
+
+ // Run a template if it is one.
+ const Template* templ = scope->GetTemplate(target_type);
+ if (templ)
+ return templ->Invoke(scope, function, sub_args, block, err);
+
+ // Otherwise, assume the target is a built-in target type.
+ return ExecuteGenericTarget(target_type.c_str(), scope, function, sub_args,
+ block, err);
+}
+
+} // namespace functions
diff --git a/chromium/tools/gn/functions_target_unittest.cc b/chromium/tools/gn/functions_target_unittest.cc
new file mode 100644
index 00000000000..a7a0de9ac2c
--- /dev/null
+++ b/chromium/tools/gn/functions_target_unittest.cc
@@ -0,0 +1,38 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/test_with_scope.h"
+
+
+// Checks that we find unused identifiers in targets.
+TEST(FunctionsTarget, CheckUnused) {
+ Scheduler scheduler;
+ TestWithScope setup;
+
+ // The target generator needs a place to put the targets or it will fail.
+ Scope::ItemVector item_collector;
+ setup.scope()->set_item_collector(&item_collector);
+
+ // Test a good one first.
+ TestParseInput good_input(
+ "source_set(\"foo\") {\n"
+ "}\n");
+ ASSERT_FALSE(good_input.has_error());
+ Err err;
+ good_input.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(err.has_error()) << err.message();
+
+ // Test a source set with an unused variable.
+ TestParseInput source_set_input(
+ "source_set(\"foo\") {\n"
+ " unused = 5\n"
+ "}\n");
+ ASSERT_FALSE(source_set_input.has_error());
+ err = Err();
+ source_set_input.parsed()->Execute(setup.scope(), &err);
+ ASSERT_TRUE(err.has_error());
+}
diff --git a/chromium/tools/gn/functions_unittest.cc b/chromium/tools/gn/functions_unittest.cc
new file mode 100644
index 00000000000..22670710704
--- /dev/null
+++ b/chromium/tools/gn/functions_unittest.cc
@@ -0,0 +1,92 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/functions.h"
+
+#include <utility>
+
+#include "base/memory/ptr_util.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/test_with_scope.h"
+#include "tools/gn/value.h"
+
+TEST(Functions, Defined) {
+ TestWithScope setup;
+
+ FunctionCallNode function_call;
+ Err err;
+
+ // Test an undefined identifier.
+ Token undefined_token(Location(), Token::IDENTIFIER, "undef");
+ ListNode args_list_identifier_undefined;
+ args_list_identifier_undefined.append_item(
+ std::unique_ptr<ParseNode>(new IdentifierNode(undefined_token)));
+ Value result = functions::RunDefined(setup.scope(), &function_call,
+ &args_list_identifier_undefined, &err);
+ ASSERT_EQ(Value::BOOLEAN, result.type());
+ EXPECT_FALSE(result.boolean_value());
+
+ // Define a value that's itself a scope value.
+ const char kDef[] = "def"; // Defined variable name.
+ setup.scope()->SetValue(
+ kDef, Value(nullptr, std::unique_ptr<Scope>(new Scope(setup.scope()))),
+ nullptr);
+
+ // Test the defined identifier.
+ Token defined_token(Location(), Token::IDENTIFIER, kDef);
+ ListNode args_list_identifier_defined;
+ args_list_identifier_defined.append_item(
+ std::unique_ptr<ParseNode>(new IdentifierNode(defined_token)));
+ result = functions::RunDefined(setup.scope(), &function_call,
+ &args_list_identifier_defined, &err);
+ ASSERT_EQ(Value::BOOLEAN, result.type());
+ EXPECT_TRUE(result.boolean_value());
+
+ // Should also work by passing an accessor node so you can do
+ // "defined(def.foo)" to see if foo is defined on the def scope.
+ std::unique_ptr<AccessorNode> undef_accessor(new AccessorNode);
+ undef_accessor->set_base(defined_token);
+ undef_accessor->set_member(
+ base::WrapUnique(new IdentifierNode(undefined_token)));
+ ListNode args_list_accessor_defined;
+ args_list_accessor_defined.append_item(std::move(undef_accessor));
+ result = functions::RunDefined(setup.scope(), &function_call,
+ &args_list_accessor_defined, &err);
+ ASSERT_EQ(Value::BOOLEAN, result.type());
+ EXPECT_FALSE(result.boolean_value());
+}
+
+// Tests that an error is thrown when a {} is supplied to a function that
+// doesn't take one.
+TEST(Functions, FunctionsWithBlock) {
+ TestWithScope setup;
+ Err err;
+
+ // No scope to print() is OK.
+ TestParseInput print_no_scope("print(6)");
+ EXPECT_FALSE(print_no_scope.has_error());
+ Value result = print_no_scope.parsed()->Execute(setup.scope(), &err);
+ EXPECT_FALSE(err.has_error());
+
+ // Passing a scope should pass parsing (it doesn't know about what kind of
+ // function it is) and then throw an error during execution.
+ TestParseInput print_with_scope("print(foo) {}");
+ EXPECT_FALSE(print_with_scope.has_error());
+ result = print_with_scope.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+ err = Err();
+
+ // defined() is a special function so test it separately.
+ TestParseInput defined_no_scope("defined(foo)");
+ EXPECT_FALSE(defined_no_scope.has_error());
+ result = defined_no_scope.parsed()->Execute(setup.scope(), &err);
+ EXPECT_FALSE(err.has_error());
+
+ // A block to defined should fail.
+ TestParseInput defined_with_scope("defined(foo) {}");
+ EXPECT_FALSE(defined_with_scope.has_error());
+ result = defined_with_scope.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+}
diff --git a/chromium/tools/gn/gn.gyp b/chromium/tools/gn/gn.gyp
index ade8d3aa914..79f83a719c8 100644
--- a/chromium/tools/gn/gn.gyp
+++ b/chromium/tools/gn/gn.gyp
@@ -24,6 +24,12 @@
'builder.h',
'builder_record.cc',
'builder_record.h',
+ 'bundle_data.cc',
+ 'bundle_data.h',
+ 'bundle_data_target_generator.cc',
+ 'bundle_data_target_generator.h',
+ 'bundle_file_rule.cc',
+ 'bundle_file_rule.h',
'c_include_iterator.cc',
'c_include_iterator.h',
'command_args.cc',
@@ -48,8 +54,12 @@
'config_values_generator.h',
'copy_target_generator.cc',
'copy_target_generator.h',
+ 'create_bundle_target_generator.cc',
+ 'create_bundle_target_generator.h',
'deps_iterator.cc',
'deps_iterator.h',
+ 'eclipse_writer.cc',
+ 'eclipse_writer.h',
'err.cc',
'err.h',
'escape.cc',
@@ -108,8 +118,12 @@
'ninja_binary_target_writer.h',
'ninja_build_writer.cc',
'ninja_build_writer.h',
+ 'ninja_bundle_data_target_writer.cc',
+ 'ninja_bundle_data_target_writer.h',
'ninja_copy_target_writer.cc',
'ninja_copy_target_writer.h',
+ 'ninja_create_bundle_target_writer.cc',
+ 'ninja_create_bundle_target_writer.h',
'ninja_group_target_writer.cc',
'ninja_group_target_writer.h',
'ninja_target_writer.cc',
@@ -189,6 +203,12 @@
'variables.h',
'visibility.cc',
'visibility.h',
+ 'visual_studio_utils.cc',
+ 'visual_studio_utils.h',
+ 'visual_studio_writer.cc',
+ 'visual_studio_writer.h',
+ 'xml_element_writer.cc',
+ 'xml_element_writer.h',
],
},
{
@@ -235,6 +255,7 @@
'ninja_action_target_writer_unittest.cc',
'ninja_binary_target_writer_unittest.cc',
'ninja_copy_target_writer_unittest.cc',
+ 'ninja_create_bundle_target_writer_unittest.cc',
'ninja_group_target_writer_unittest.cc',
'ninja_target_writer_unittest.cc',
'ninja_toolchain_writer_unittest.cc',
@@ -259,6 +280,9 @@
'unique_vector_unittest.cc',
'value_unittest.cc',
'visibility_unittest.cc',
+ 'visual_studio_utils_unittest.cc',
+ 'visual_studio_writer_unittest.cc',
+ 'xml_element_writer_unittest.cc',
],
'dependencies': [
'gn_lib',
diff --git a/chromium/tools/gn/gn_main.cc b/chromium/tools/gn/gn_main.cc
new file mode 100644
index 00000000000..135ed561a9c
--- /dev/null
+++ b/chromium/tools/gn/gn_main.cc
@@ -0,0 +1,83 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/at_exit.h"
+#include "base/command_line.h"
+#include "base/strings/utf_string_conversions.h"
+#include "build/build_config.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/err.h"
+#include "tools/gn/location.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/switches.h"
+
+// Only the GN-generated build makes this header for now.
+// TODO(brettw) consider adding this if we need it in GYP.
+#if defined(GN_BUILD)
+#include "tools/gn/last_commit_position.h"
+#else
+#define LAST_COMMIT_POSITION "UNKNOWN"
+#endif
+
+namespace {
+
+std::vector<std::string> GetArgs(const base::CommandLine& cmdline) {
+ base::CommandLine::StringVector in_args = cmdline.GetArgs();
+#if defined(OS_WIN)
+ std::vector<std::string> out_args;
+ for (const auto& arg : in_args)
+ out_args.push_back(base::WideToUTF8(arg));
+ return out_args;
+#else
+ return in_args;
+#endif
+}
+
+} // namespace
+
+int main(int argc, char** argv) {
+ base::AtExitManager at_exit;
+#if defined(OS_WIN)
+ base::CommandLine::set_slash_is_not_a_switch();
+#endif
+ base::CommandLine::Init(argc, argv);
+
+ const base::CommandLine& cmdline = *base::CommandLine::ForCurrentProcess();
+ std::vector<std::string> args = GetArgs(cmdline);
+
+ std::string command;
+ if (cmdline.HasSwitch("help") || cmdline.HasSwitch("h")) {
+ // Make "-h" and "--help" default to help command.
+ command = commands::kHelp;
+ } else if (cmdline.HasSwitch(switches::kVersion)) {
+ // Make "--version" print the version and exit.
+ OutputString(std::string(LAST_COMMIT_POSITION) + "\n");
+ exit(0);
+ } else if (args.empty()) {
+ // No command, print error and exit.
+ Err(Location(), "No command specified.",
+ "Most commonly you want \"gn gen <out_dir>\" to make a build dir.\n"
+ "Or try \"gn help\" for more commands.").PrintToStdout();
+ return 1;
+ } else {
+ command = args[0];
+ args.erase(args.begin());
+ }
+
+ const commands::CommandInfoMap& command_map = commands::GetCommands();
+ commands::CommandInfoMap::const_iterator found_command =
+ command_map.find(command);
+
+ int retval;
+ if (found_command != command_map.end()) {
+ retval = found_command->second.runner(args);
+ } else {
+ Err(Location(),
+ "Command \"" + command + "\" unknown.").PrintToStdout();
+ commands::RunHelp(std::vector<std::string>());
+ retval = 1;
+ }
+
+ exit(retval); // Don't free memory, it can be really slow!
+}
diff --git a/chromium/tools/gn/group_target_generator.cc b/chromium/tools/gn/group_target_generator.cc
new file mode 100644
index 00000000000..2c427f4cc49
--- /dev/null
+++ b/chromium/tools/gn/group_target_generator.cc
@@ -0,0 +1,25 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/group_target_generator.h"
+
+#include "tools/gn/target.h"
+#include "tools/gn/variables.h"
+
+GroupTargetGenerator::GroupTargetGenerator(
+ Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Err* err)
+ : TargetGenerator(target, scope, function_call, err) {
+}
+
+GroupTargetGenerator::~GroupTargetGenerator() {
+}
+
+void GroupTargetGenerator::DoRun() {
+ target_->set_output_type(Target::GROUP);
+ // Groups only have the default types filled in by the target generator
+ // base class.
+}
diff --git a/chromium/tools/gn/group_target_generator.h b/chromium/tools/gn/group_target_generator.h
new file mode 100644
index 00000000000..502f40b8db4
--- /dev/null
+++ b/chromium/tools/gn/group_target_generator.h
@@ -0,0 +1,27 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_GROUP_TARGET_GENERATOR_H_
+#define TOOLS_GN_GROUP_TARGET_GENERATOR_H_
+
+#include "base/macros.h"
+#include "tools/gn/target_generator.h"
+
+// Populates a Target with the values for a group rule.
+class GroupTargetGenerator : public TargetGenerator {
+ public:
+ GroupTargetGenerator(Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Err* err);
+ ~GroupTargetGenerator() override;
+
+ protected:
+ void DoRun() override;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(GroupTargetGenerator);
+};
+
+#endif // TOOLS_GN_GROUP_TARGET_GENERATOR_H_
diff --git a/chromium/tools/gn/group_target_generator_unittest.cc b/chromium/tools/gn/group_target_generator_unittest.cc
new file mode 100644
index 00000000000..3b0c8244368
--- /dev/null
+++ b/chromium/tools/gn/group_target_generator_unittest.cc
@@ -0,0 +1,46 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/group_target_generator.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+// Returns true on success, false if write_file signaled an error.
+bool ParseWriteRuntimeDeps(Scope* scope, const std::string& value) {
+ TestParseInput input(
+ "group(\"foo\") { write_runtime_deps = " + value + "}");
+ if (input.has_error())
+ return false;
+
+ Err err;
+ input.parsed()->Execute(scope, &err);
+ return !err.has_error();
+}
+
+} // namespace
+
+
+// Tests that actions can't have output substitutions.
+TEST(GroupTargetGenerator, WriteRuntimeDeps) {
+ Scheduler scheduler;
+ TestWithScope setup;
+ Scope::ItemVector items_;
+ setup.scope()->set_item_collector(&items_);
+
+ // Should refuse to write files outside of the output dir.
+ EXPECT_FALSE(ParseWriteRuntimeDeps(setup.scope(), "\"//foo.txt\""));
+ EXPECT_EQ(0U, scheduler.GetWriteRuntimeDepsTargets().size());
+
+ // Should fail for garbage inputs.
+ EXPECT_FALSE(ParseWriteRuntimeDeps(setup.scope(), "0"));
+ EXPECT_EQ(0U, scheduler.GetWriteRuntimeDepsTargets().size());
+
+ // Should be able to write inside the out dir.
+ EXPECT_TRUE(ParseWriteRuntimeDeps(setup.scope(), "\"//out/Debug/foo.txt\""));
+ EXPECT_EQ(1U, scheduler.GetWriteRuntimeDepsTargets().size());
+}
+
diff --git a/chromium/tools/gn/header_checker.cc b/chromium/tools/gn/header_checker.cc
new file mode 100644
index 00000000000..1002081a837
--- /dev/null
+++ b/chromium/tools/gn/header_checker.cc
@@ -0,0 +1,583 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/header_checker.h"
+
+#include <algorithm>
+
+#include "base/bind.h"
+#include "base/files/file_util.h"
+#include "base/message_loop/message_loop.h"
+#include "base/strings/string_util.h"
+#include "base/threading/sequenced_worker_pool.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/builder.h"
+#include "tools/gn/c_include_iterator.h"
+#include "tools/gn/config.h"
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/source_file_type.h"
+#include "tools/gn/target.h"
+#include "tools/gn/trace.h"
+
+namespace {
+
+struct PublicGeneratedPair {
+ PublicGeneratedPair() : is_public(false), is_generated(false) {}
+ bool is_public;
+ bool is_generated;
+};
+
+// If the given file is in the "gen" folder, trims this so it treats the gen
+// directory as the source root:
+// //out/Debug/gen/foo/bar.h -> //foo/bar.h
+// If the file isn't in the generated root, returns the input unchanged.
+SourceFile RemoveRootGenDirFromFile(const Target* target,
+ const SourceFile& file) {
+ const SourceDir& gen = target->settings()->toolchain_gen_dir();
+ if (!gen.is_null() && base::StartsWith(file.value(), gen.value(),
+ base::CompareCase::SENSITIVE))
+ return SourceFile("//" + file.value().substr(gen.value().size()));
+ return file;
+}
+
+// This class makes InputFiles on the stack as it reads files to check. When
+// we throw an error, the Err indicates a locatin which has a pointer to
+// an InputFile that must persist as long as the Err does.
+//
+// To make this work, this function creates a clone of the InputFile managed
+// by the InputFileManager so the error can refer to something that
+// persists. This means that the current file contents will live as long as
+// the program, but this is OK since we're erroring out anyway.
+LocationRange CreatePersistentRange(const InputFile& input_file,
+ const LocationRange& range) {
+ InputFile* clone_input_file;
+ std::vector<Token>* tokens; // Don't care about this.
+ std::unique_ptr<ParseNode>* parse_root; // Don't care about this.
+
+ g_scheduler->input_file_manager()->AddDynamicInput(
+ input_file.name(), &clone_input_file, &tokens, &parse_root);
+ clone_input_file->SetContents(input_file.contents());
+
+ return LocationRange(Location(clone_input_file,
+ range.begin().line_number(),
+ range.begin().column_number(),
+ -1 /* TODO(scottmg) */),
+ Location(clone_input_file,
+ range.end().line_number(),
+ range.end().column_number(),
+ -1 /* TODO(scottmg) */));
+}
+
+// Given a reverse dependency chain where the target chain[0]'s includes are
+// being used by chain[end] and not all deps are public, returns the string
+// describing the error.
+std::string GetDependencyChainPublicError(
+ const HeaderChecker::Chain& chain) {
+ std::string ret = "The target:\n " +
+ chain[chain.size() - 1].target->label().GetUserVisibleName(false) +
+ "\nis including a file from the target:\n " +
+ chain[0].target->label().GetUserVisibleName(false) +
+ "\n";
+
+ // Invalid chains should always be 0 (no chain) or more than two
+ // (intermediate private dependencies). 1 and 2 are impossible because a
+ // target can always include headers from itself and its direct dependents.
+ DCHECK(chain.size() != 1 && chain.size() != 2);
+ if (chain.empty()) {
+ ret += "There is no dependency chain between these targets.";
+ } else {
+ // Indirect dependency chain, print the chain.
+ ret += "\nIt's usually best to depend directly on the destination target.\n"
+ "In some cases, the destination target is considered a subcomponent\n"
+ "of an intermediate target. In this case, the intermediate target\n"
+ "should depend publicly on the destination to forward the ability\n"
+ "to include headers.\n"
+ "\n"
+ "Dependency chain (there may also be others):\n";
+
+ for (int i = static_cast<int>(chain.size()) - 1; i >= 0; i--) {
+ ret.append(" " + chain[i].target->label().GetUserVisibleName(false));
+ if (i != 0) {
+ // Identify private dependencies so the user can see where in the
+ // dependency chain things went bad. Don't list this for the first link
+ // in the chain since direct dependencies are OK, and listing that as
+ // "private" may make people feel like they need to fix it.
+ if (i == static_cast<int>(chain.size()) - 1 || chain[i - 1].is_public)
+ ret.append(" -->");
+ else
+ ret.append(" --[private]-->");
+ }
+ ret.append("\n");
+ }
+ }
+ return ret;
+}
+
+// Returns true if the two targets have the same label not counting the
+// toolchain.
+bool TargetLabelsMatchExceptToolchain(const Target* a, const Target* b) {
+ return a->label().dir() == b->label().dir() &&
+ a->label().name() == b->label().name();
+}
+
+} // namespace
+
+HeaderChecker::HeaderChecker(const BuildSettings* build_settings,
+ const std::vector<const Target*>& targets)
+ : main_loop_(base::MessageLoop::current()),
+ build_settings_(build_settings) {
+ for (const auto& target : targets)
+ AddTargetToFileMap(target, &file_map_);
+}
+
+HeaderChecker::~HeaderChecker() {
+}
+
+bool HeaderChecker::Run(const std::vector<const Target*>& to_check,
+ bool force_check,
+ std::vector<Err>* errors) {
+ FileMap files_to_check;
+ for (const auto& check : to_check)
+ AddTargetToFileMap(check, &files_to_check);
+ RunCheckOverFiles(files_to_check, force_check);
+
+ if (errors_.empty())
+ return true;
+ *errors = errors_;
+ return false;
+}
+
+void HeaderChecker::RunCheckOverFiles(const FileMap& files, bool force_check) {
+ if (files.empty())
+ return;
+
+ scoped_refptr<base::SequencedWorkerPool> pool(
+ new base::SequencedWorkerPool(16, "HeaderChecker"));
+ for (const auto& file : files) {
+ // Only check C-like source files (RC files also have includes).
+ SourceFileType type = GetSourceFileType(file.first);
+ if (type != SOURCE_CPP && type != SOURCE_H && type != SOURCE_C &&
+ type != SOURCE_M && type != SOURCE_MM && type != SOURCE_RC)
+ continue;
+
+ // If any target marks it as generated, don't check it. We have to check
+ // file_map_, which includes all known files; files only includes those
+ // being checked.
+ bool is_generated = false;
+ for (const auto& vect_i : file_map_[file.first])
+ is_generated |= vect_i.is_generated;
+ if (is_generated)
+ continue;
+
+ for (const auto& vect_i : file.second) {
+ if (vect_i.target->check_includes()) {
+ pool->PostWorkerTaskWithShutdownBehavior(
+ FROM_HERE,
+ base::Bind(&HeaderChecker::DoWork, this, vect_i.target, file.first),
+ base::SequencedWorkerPool::BLOCK_SHUTDOWN);
+ }
+ }
+ }
+
+ // After this call we're single-threaded again.
+ pool->Shutdown();
+}
+
+void HeaderChecker::DoWork(const Target* target, const SourceFile& file) {
+ Err err;
+ if (!CheckFile(target, file, &err)) {
+ base::AutoLock lock(lock_);
+ errors_.push_back(err);
+ }
+}
+
+// static
+void HeaderChecker::AddTargetToFileMap(const Target* target, FileMap* dest) {
+ // Files in the sources have this public bit by default.
+ bool default_public = target->all_headers_public();
+
+ std::map<SourceFile, PublicGeneratedPair> files_to_public;
+
+ // First collect the normal files, they get the default visibility. Always
+ // trim the root gen dir if it exists. This will only exist on outputs of an
+ // action, but those are often then wired into the sources of a compiled
+ // target to actually compile generated code. If you depend on the compiled
+ // target, it should be enough to be able to include the header.
+ for (const auto& source : target->sources()) {
+ SourceFile file = RemoveRootGenDirFromFile(target, source);
+ files_to_public[file].is_public = default_public;
+ }
+
+ // Add in the public files, forcing them to public. This may overwrite some
+ // entries, and it may add new ones.
+ if (default_public) // List only used when default is not public.
+ DCHECK(target->public_headers().empty());
+ for (const auto& source : target->public_headers()) {
+ SourceFile file = RemoveRootGenDirFromFile(target, source);
+ files_to_public[file].is_public = true;
+ }
+
+ // Add in outputs from actions. These are treated as public (since if other
+ // targets can't use them, then there wouldn't be any point in outputting).
+ std::vector<SourceFile> outputs;
+ target->action_values().GetOutputsAsSourceFiles(target, &outputs);
+ for (const auto& output : outputs) {
+ // For generated files in the "gen" directory, add the filename to the
+ // map assuming "gen" is the source root. This means that when files include
+ // the generated header relative to there (the recommended practice), we'll
+ // find the file.
+ SourceFile output_file = RemoveRootGenDirFromFile(target, output);
+ PublicGeneratedPair* pair = &files_to_public[output_file];
+ pair->is_public = true;
+ pair->is_generated = true;
+ }
+
+ // Add the merged list to the master list of all files.
+ for (const auto& cur : files_to_public) {
+ (*dest)[cur.first].push_back(TargetInfo(
+ target, cur.second.is_public, cur.second.is_generated));
+ }
+}
+
+bool HeaderChecker::IsFileInOuputDir(const SourceFile& file) const {
+ const std::string& build_dir = build_settings_->build_dir().value();
+ return file.value().compare(0, build_dir.size(), build_dir) == 0;
+}
+
+// This current assumes all include paths are relative to the source root
+// which is generally the case for Chromium.
+//
+// A future enhancement would be to search the include path for the target
+// containing the source file containing this include and find the file to
+// handle the cases where people do weird things with the paths.
+SourceFile HeaderChecker::SourceFileForInclude(
+ const base::StringPiece& input) const {
+ std::string str("//");
+ input.AppendToString(&str);
+ return SourceFile(str);
+}
+
+bool HeaderChecker::CheckFile(const Target* from_target,
+ const SourceFile& file,
+ Err* err) const {
+ ScopedTrace trace(TraceItem::TRACE_CHECK_HEADER, file.value());
+
+ // Sometimes you have generated source files included as sources in another
+ // target. These won't exist at checking time. Since we require all generated
+ // files to be somewhere in the output tree, we can just check the name to
+ // see if they should be skipped.
+ if (IsFileInOuputDir(file))
+ return true;
+
+ base::FilePath path = build_settings_->GetFullPath(file);
+ std::string contents;
+ if (!base::ReadFileToString(path, &contents)) {
+ *err = Err(from_target->defined_from(), "Source file not found.",
+ "The target:\n " + from_target->label().GetUserVisibleName(false) +
+ "\nhas a source file:\n " + file.value() +
+ "\nwhich was not found.");
+ return false;
+ }
+
+ InputFile input_file(file);
+ input_file.SetContents(contents);
+
+ CIncludeIterator iter(&input_file);
+ base::StringPiece current_include;
+ LocationRange range;
+ while (iter.GetNextIncludeString(&current_include, &range)) {
+ SourceFile include = SourceFileForInclude(current_include);
+ if (!CheckInclude(from_target, input_file, include, range, err))
+ return false;
+ }
+
+ return true;
+}
+
+// If the file exists:
+// - The header must be in the public section of a target, or it must
+// be in the sources with no public list (everything is implicitly public).
+// - The dependency path to the included target must follow only public_deps.
+// - If there are multiple targets with the header in it, only one need be
+// valid for the check to pass.
+bool HeaderChecker::CheckInclude(const Target* from_target,
+ const InputFile& source_file,
+ const SourceFile& include_file,
+ const LocationRange& range,
+ Err* err) const {
+ // Assume if the file isn't declared in our sources that we don't need to
+ // check it. It would be nice if we could give an error if this happens, but
+ // our include finder is too primitive and returns all includes, even if
+ // they're in a #if not executed in the current build. In that case, it's
+ // not unusual for the buildfiles to not specify that header at all.
+ FileMap::const_iterator found = file_map_.find(include_file);
+ if (found == file_map_.end())
+ return true;
+
+ const TargetVector& targets = found->second;
+ Chain chain; // Prevent reallocating in the loop.
+
+ // If the file is unknown in the current toolchain (rather than being private
+ // or in a target not visible to the current target), ignore it. This is a
+ // bit of a hack to account for the fact that the include finder doesn't
+ // understand the preprocessor.
+ //
+ // When not cross-compiling, if a platform specific header is conditionally
+ // included in the build, and preprocessor conditions around #includes of
+ // that match the build conditions, everything will be OK because the file
+ // won't be known to GN even though the #include finder identified the file.
+ //
+ // Cross-compiling breaks this. When compiling Android on Linux, for example,
+ // we might see both Linux and Android definitions of a target and know
+ // about the union of all headers in the build. Since the #include finder
+ // ignores preprocessor, we will find the Linux headers in the Android
+ // build and note that a dependency from the Android target to the Linux
+ // one is missing (these might even be the same target in different
+ // toolchains!).
+ bool present_in_current_toolchain = false;
+ for (const auto& target : targets) {
+ if (from_target->label().ToolchainsEqual(target.target->label())) {
+ present_in_current_toolchain = true;
+ break;
+ }
+ }
+ if (!present_in_current_toolchain)
+ return true;
+
+ // For all targets containing this file, we require that at least one be
+ // a direct or public dependency of the current target, and that the header
+ // is public within the target.
+ //
+ // If there is more than one target containing this header, we may encounter
+ // some error cases before finding a good one. This error stores the previous
+ // one encountered, which we may or may not throw away.
+ Err last_error;
+
+ bool found_dependency = false;
+ for (const auto& target : targets) {
+ // We always allow source files in a target to include headers also in that
+ // target.
+ const Target* to_target = target.target;
+ if (to_target == from_target)
+ return true;
+
+ bool is_permitted_chain = false;
+ if (IsDependencyOf(to_target, from_target, &chain, &is_permitted_chain)) {
+ DCHECK(chain.size() >= 2);
+ DCHECK(chain[0].target == to_target);
+ DCHECK(chain[chain.size() - 1].target == from_target);
+
+ found_dependency = true;
+
+ if (target.is_public && is_permitted_chain) {
+ // This one is OK, we're done.
+ last_error = Err();
+ break;
+ }
+
+ // Diagnose the error.
+ if (!target.is_public) {
+ // Danger: must call CreatePersistentRange to put in Err.
+ last_error = Err(CreatePersistentRange(source_file, range),
+ "Including a private header.",
+ "This file is private to the target " +
+ target.target->label().GetUserVisibleName(false));
+ } else if (!is_permitted_chain) {
+ last_error = Err(
+ CreatePersistentRange(source_file, range),
+ "Can't include this header from here.",
+ GetDependencyChainPublicError(chain));
+ } else {
+ NOTREACHED();
+ }
+ } else if (
+ to_target->allow_circular_includes_from().find(from_target->label()) !=
+ to_target->allow_circular_includes_from().end()) {
+ // Not a dependency, but this include is whitelisted from the destination.
+ found_dependency = true;
+ last_error = Err();
+ break;
+ }
+ }
+
+ if (!found_dependency) {
+ DCHECK(!last_error.has_error());
+ *err = MakeUnreachableError(source_file, range, from_target, targets);
+ return false;
+ }
+ if (last_error.has_error()) {
+ // Found at least one dependency chain above, but it had an error.
+ *err = last_error;
+ return false;
+ }
+
+ // One thing we didn't check for is targets that expose their dependents
+ // headers in their own public headers.
+ //
+ // Say we have A -> B -> C. If C has public_configs, everybody getting headers
+ // from C should get the configs also or things could be out-of-sync. Above,
+ // we check for A including C's headers directly, but A could also include a
+ // header from B that in turn includes a header from C.
+ //
+ // There are two ways to solve this:
+ // - If a public header in B includes C, force B to publicly depend on C.
+ // This is possible to check, but might be super annoying because most
+ // targets (especially large leaf-node targets) don't declare
+ // public/private headers and you'll get lots of false positives.
+ //
+ // - Save the includes found in each file and actually compute the graph of
+ // includes to detect when A implicitly includes C's header. This will not
+ // have the annoying false positive problem, but is complex to write.
+
+ return true;
+}
+
+bool HeaderChecker::IsDependencyOf(const Target* search_for,
+ const Target* search_from,
+ Chain* chain,
+ bool* is_permitted) const {
+ if (search_for == search_from) {
+ // A target is always visible from itself.
+ *is_permitted = true;
+ return false;
+ }
+
+ // Find the shortest public dependency chain.
+ if (IsDependencyOf(search_for, search_from, true, chain)) {
+ *is_permitted = true;
+ return true;
+ }
+
+ // If not, try to find any dependency chain at all.
+ if (IsDependencyOf(search_for, search_from, false, chain)) {
+ *is_permitted = false;
+ return true;
+ }
+
+ *is_permitted = false;
+ return false;
+}
+
+bool HeaderChecker::IsDependencyOf(const Target* search_for,
+ const Target* search_from,
+ bool require_permitted,
+ Chain* chain) const {
+ // This method conducts a breadth-first search through the dependency graph
+ // to find a shortest chain from search_from to search_for.
+ //
+ // work_queue maintains a queue of targets which need to be considered as
+ // part of this chain, in the order they were first traversed.
+ //
+ // Each time a new transitive dependency of search_from is discovered for
+ // the first time, it is added to work_queue and a "breadcrumb" is added,
+ // indicating which target it was reached from when first discovered.
+ //
+ // Once this search finds search_for, the breadcrumbs are used to reconstruct
+ // a shortest dependency chain (in reverse order) from search_from to
+ // search_for.
+
+ std::map<const Target*, ChainLink> breadcrumbs;
+ std::queue<ChainLink> work_queue;
+ work_queue.push(ChainLink(search_from, true));
+
+ bool first_time = true;
+ while (!work_queue.empty()) {
+ ChainLink cur_link = work_queue.front();
+ const Target* target = cur_link.target;
+ work_queue.pop();
+
+ if (target == search_for) {
+ // Found it! Reconstruct the chain.
+ chain->clear();
+ while (target != search_from) {
+ chain->push_back(cur_link);
+ cur_link = breadcrumbs[target];
+ target = cur_link.target;
+ }
+ chain->push_back(ChainLink(search_from, true));
+ return true;
+ }
+
+ // Always consider public dependencies as possibilities.
+ for (const auto& dep : target->public_deps()) {
+ if (breadcrumbs.insert(std::make_pair(dep.ptr, cur_link)).second)
+ work_queue.push(ChainLink(dep.ptr, true));
+ }
+
+ if (first_time || !require_permitted) {
+ // Consider all dependencies since all target paths are allowed, so add
+ // in private ones. Also do this the first time through the loop, since
+ // a target can include headers from its direct deps regardless of
+ // public/private-ness.
+ first_time = false;
+ for (const auto& dep : target->private_deps()) {
+ if (breadcrumbs.insert(std::make_pair(dep.ptr, cur_link)).second)
+ work_queue.push(ChainLink(dep.ptr, false));
+ }
+ }
+ }
+
+ return false;
+}
+
+Err HeaderChecker::MakeUnreachableError(
+ const InputFile& source_file,
+ const LocationRange& range,
+ const Target* from_target,
+ const TargetVector& targets) {
+ // Normally the toolchains will all match, but when cross-compiling, we can
+ // get targets with more than one toolchain in the list of possibilities.
+ std::vector<const Target*> targets_with_matching_toolchains;
+ std::vector<const Target*> targets_with_other_toolchains;
+ for (const TargetInfo& candidate : targets) {
+ if (candidate.target->toolchain() == from_target->toolchain())
+ targets_with_matching_toolchains.push_back(candidate.target);
+ else
+ targets_with_other_toolchains.push_back(candidate.target);
+ }
+
+ // It's common when cross-compiling to have a target with the same file in
+ // more than one toolchain. We could output all of them, but this is
+ // generally confusing to people (most end-users won't understand toolchains
+ // well).
+ //
+ // So delete any candidates in other toolchains that also appear in the same
+ // toolchain as the from_target.
+ for (int other_index = 0;
+ other_index < static_cast<int>(targets_with_other_toolchains.size());
+ other_index++) {
+ for (const Target* cur_matching : targets_with_matching_toolchains) {
+ if (TargetLabelsMatchExceptToolchain(
+ cur_matching, targets_with_other_toolchains[other_index])) {
+ // Found a duplicate, erase it.
+ targets_with_other_toolchains.erase(
+ targets_with_other_toolchains.begin() + other_index);
+ other_index--;
+ break;
+ }
+ }
+ }
+
+ // Only display toolchains on labels if they don't all match.
+ bool include_toolchain = !targets_with_other_toolchains.empty();
+
+ std::string msg = "It is not in any dependency of\n " +
+ from_target->label().GetUserVisibleName(include_toolchain);
+ msg += "\nThe include file is in the target(s):\n";
+ for (const auto& target : targets_with_matching_toolchains)
+ msg += " " + target->label().GetUserVisibleName(include_toolchain) + "\n";
+ for (const auto& target : targets_with_other_toolchains)
+ msg += " " + target->label().GetUserVisibleName(include_toolchain) + "\n";
+ if (targets_with_other_toolchains.size() +
+ targets_with_matching_toolchains.size() > 1)
+ msg += "at least one of ";
+ msg += "which should somehow be reachable.";
+
+ // Danger: must call CreatePersistentRange to put in Err.
+ return Err(CreatePersistentRange(source_file, range),
+ "Include not allowed.", msg);
+}
+
diff --git a/chromium/tools/gn/header_checker.h b/chromium/tools/gn/header_checker.h
new file mode 100644
index 00000000000..8f9212f1f0c
--- /dev/null
+++ b/chromium/tools/gn/header_checker.h
@@ -0,0 +1,183 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_HEADER_CHECKER_H_
+#define TOOLS_GN_HEADER_CHECKER_H_
+
+#include <map>
+#include <set>
+#include <vector>
+
+#include "base/gtest_prod_util.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/run_loop.h"
+#include "base/strings/string_piece.h"
+#include "base/synchronization/lock.h"
+#include "tools/gn/err.h"
+
+class BuildSettings;
+class InputFile;
+class Label;
+class LocationRange;
+class SourceFile;
+class Target;
+
+namespace base {
+class MessageLoop;
+}
+
+class HeaderChecker : public base::RefCountedThreadSafe<HeaderChecker> {
+ public:
+ // Represents a dependency chain.
+ struct ChainLink {
+ ChainLink() : target(nullptr), is_public(false) {}
+ ChainLink(const Target* t, bool p) : target(t), is_public(p) {}
+
+ const Target* target;
+
+ // True when the dependency on this target is public.
+ bool is_public;
+
+ // Used for testing.
+ bool operator==(const ChainLink& other) const {
+ return target == other.target && is_public == other.is_public;
+ }
+ };
+ typedef std::vector<ChainLink> Chain;
+
+ HeaderChecker(const BuildSettings* build_settings,
+ const std::vector<const Target*>& targets);
+
+ // Runs the check. The targets in to_check will be checked.
+ //
+ // This assumes that the current thread already has a message loop. On
+ // error, fills the given vector with the errors and returns false. Returns
+ // true on success.
+ //
+ // force_check, if true, will override targets opting out of header checking
+ // with "check_includes = false" and will check them anyway.
+ bool Run(const std::vector<const Target*>& to_check,
+ bool force_check,
+ std::vector<Err>* errors);
+
+ private:
+ friend class base::RefCountedThreadSafe<HeaderChecker>;
+ FRIEND_TEST_ALL_PREFIXES(HeaderCheckerTest, IsDependencyOf);
+ FRIEND_TEST_ALL_PREFIXES(HeaderCheckerTest, CheckInclude);
+ FRIEND_TEST_ALL_PREFIXES(HeaderCheckerTest, PublicFirst);
+ FRIEND_TEST_ALL_PREFIXES(HeaderCheckerTest, CheckIncludeAllowCircular);
+ ~HeaderChecker();
+
+ struct TargetInfo {
+ TargetInfo() : target(nullptr), is_public(false), is_generated(false) {}
+ TargetInfo(const Target* t, bool is_pub, bool is_gen)
+ : target(t),
+ is_public(is_pub),
+ is_generated(is_gen) {
+ }
+
+ const Target* target;
+
+ // True if the file is public in the given target.
+ bool is_public;
+
+ // True if this file is generated and won't actually exist on disk.
+ bool is_generated;
+ };
+
+ typedef std::vector<TargetInfo> TargetVector;
+ typedef std::map<SourceFile, TargetVector> FileMap;
+
+ // Backend for Run() that takes the list of files to check. The errors_ list
+ // will be populate on failure.
+ void RunCheckOverFiles(const FileMap& flies, bool force_check);
+
+ void DoWork(const Target* target, const SourceFile& file);
+
+ // Adds the sources and public files from the given target to the given map.
+ static void AddTargetToFileMap(const Target* target, FileMap* dest);
+
+ // Returns true if the given file is in the output directory.
+ bool IsFileInOuputDir(const SourceFile& file) const;
+
+ // Resolves the contents of an include to a SourceFile.
+ SourceFile SourceFileForInclude(const base::StringPiece& input) const;
+
+ // from_target is the target the file was defined from. It will be used in
+ // error messages.
+ bool CheckFile(const Target* from_target,
+ const SourceFile& file,
+ Err* err) const;
+
+ // Checks that the given file in the given target can include the given
+ // include file. If disallowed, returns false and sets the error. The
+ // range indicates the location of the include in the file for error
+ // reporting.
+ bool CheckInclude(const Target* from_target,
+ const InputFile& source_file,
+ const SourceFile& include_file,
+ const LocationRange& range,
+ Err* err) const;
+
+ // Returns true if the given search_for target is a dependency of
+ // search_from.
+ //
+ // If found, the vector given in "chain" will be filled with the reverse
+ // dependency chain from the dest target (chain[0] = search_for) to the src
+ // target (chain[chain.size() - 1] = search_from).
+ //
+ // Chains with permitted dependencies will be considered first. If a
+ // permitted match is found, *is_permitted will be set to true. A chain with
+ // indirect, non-public dependencies will only be considered if there are no
+ // public or direct chains. In this case, *is_permitted will be false.
+ //
+ // A permitted dependency is a sequence of public dependencies. The first
+ // one may be private, since a direct dependency always allows headers to be
+ // included.
+ bool IsDependencyOf(const Target* search_for,
+ const Target* search_from,
+ Chain* chain,
+ bool* is_permitted) const;
+
+ // For internal use by the previous override of IsDependencyOf. If
+ // require_public is true, only public dependency chains are searched.
+ bool IsDependencyOf(const Target* search_for,
+ const Target* search_from,
+ bool require_permitted,
+ Chain* chain) const;
+
+ // Makes a very descriptive error message for when an include is disallowed
+ // from a given from_target, with a missing dependency to one of the given
+ // targets.
+ static Err MakeUnreachableError(const InputFile& source_file,
+ const LocationRange& range,
+ const Target* from_target,
+ const TargetVector& targets);
+
+ // Non-locked variables ------------------------------------------------------
+ //
+ // These are initialized during construction (which happens on one thread)
+ // and are not modified after, so any thread can read these without locking.
+
+ base::MessageLoop* main_loop_;
+ base::RunLoop main_thread_runner_;
+
+ const BuildSettings* build_settings_;
+
+ // Maps source files to targets it appears in (usually just one target).
+ FileMap file_map_;
+
+ // Locked variables ----------------------------------------------------------
+ //
+ // These are mutable during runtime and require locking.
+
+ base::Lock lock_;
+
+ std::vector<Err> errors_;
+
+ DISALLOW_COPY_AND_ASSIGN(HeaderChecker);
+};
+
+#endif // TOOLS_GN_HEADER_CHECKER_H_
diff --git a/chromium/tools/gn/header_checker_unittest.cc b/chromium/tools/gn/header_checker_unittest.cc
new file mode 100644
index 00000000000..19d5e4054fd
--- /dev/null
+++ b/chromium/tools/gn/header_checker_unittest.cc
@@ -0,0 +1,290 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <vector>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/config.h"
+#include "tools/gn/header_checker.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+class HeaderCheckerTest : public testing::Test {
+ public:
+ HeaderCheckerTest()
+ : a_(setup_.settings(), Label(SourceDir("//a/"), "a")),
+ b_(setup_.settings(), Label(SourceDir("//b/"), "b")),
+ c_(setup_.settings(), Label(SourceDir("//c/"), "c")),
+ d_(setup_.settings(), Label(SourceDir("//d/"), "d")) {
+ a_.set_output_type(Target::SOURCE_SET);
+ b_.set_output_type(Target::SOURCE_SET);
+ c_.set_output_type(Target::SOURCE_SET);
+ d_.set_output_type(Target::SOURCE_SET);
+
+ Err err;
+ a_.SetToolchain(setup_.toolchain(), &err);
+ b_.SetToolchain(setup_.toolchain(), &err);
+ c_.SetToolchain(setup_.toolchain(), &err);
+ d_.SetToolchain(setup_.toolchain(), &err);
+
+ a_.public_deps().push_back(LabelTargetPair(&b_));
+ b_.public_deps().push_back(LabelTargetPair(&c_));
+
+ // Start with all public visibility.
+ a_.visibility().SetPublic();
+ b_.visibility().SetPublic();
+ c_.visibility().SetPublic();
+ d_.visibility().SetPublic();
+
+ d_.OnResolved(&err);
+ c_.OnResolved(&err);
+ b_.OnResolved(&err);
+ a_.OnResolved(&err);
+
+ targets_.push_back(&a_);
+ targets_.push_back(&b_);
+ targets_.push_back(&c_);
+ targets_.push_back(&d_);
+ }
+
+ protected:
+ Scheduler scheduler_;
+
+ TestWithScope setup_;
+
+ // Some headers that are automatically set up with a public dependency chain.
+ // a -> b -> c. D is unconnected.
+ Target a_;
+ Target b_;
+ Target c_;
+ Target d_;
+
+ std::vector<const Target*> targets_;
+};
+
+} // namespace
+
+TEST_F(HeaderCheckerTest, IsDependencyOf) {
+ scoped_refptr<HeaderChecker> checker(
+ new HeaderChecker(setup_.build_settings(), targets_));
+
+ // Add a target P ("private") that privately depends on C, and hook up the
+ // chain so that A -> P -> C. A will depend on C via two different paths.
+ Err err;
+ Target p(setup_.settings(), Label(SourceDir("//p/"), "p"));
+ p.set_output_type(Target::SOURCE_SET);
+ p.SetToolchain(setup_.toolchain(), &err);
+ EXPECT_FALSE(err.has_error());
+ p.private_deps().push_back(LabelTargetPair(&c_));
+ p.visibility().SetPublic();
+ p.OnResolved(&err);
+
+ a_.public_deps().push_back(LabelTargetPair(&p));
+
+ // A does not depend on itself.
+ bool is_permitted = false;
+ HeaderChecker::Chain chain;
+ EXPECT_FALSE(checker->IsDependencyOf(&a_, &a_, &chain, &is_permitted));
+
+ // A depends publicly on B.
+ chain.clear();
+ is_permitted = false;
+ EXPECT_TRUE(checker->IsDependencyOf(&b_, &a_, &chain, &is_permitted));
+ ASSERT_EQ(2u, chain.size());
+ EXPECT_EQ(HeaderChecker::ChainLink(&b_, true), chain[0]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&a_, true), chain[1]);
+ EXPECT_TRUE(is_permitted);
+
+ // A indirectly depends on C. The "public" dependency path through B should
+ // be identified.
+ chain.clear();
+ is_permitted = false;
+ EXPECT_TRUE(checker->IsDependencyOf(&c_, &a_, &chain, &is_permitted));
+ ASSERT_EQ(3u, chain.size());
+ EXPECT_EQ(HeaderChecker::ChainLink(&c_, true), chain[0]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&b_, true), chain[1]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&a_, true), chain[2]);
+ EXPECT_TRUE(is_permitted);
+
+ // C does not depend on A.
+ chain.clear();
+ is_permitted = false;
+ EXPECT_FALSE(checker->IsDependencyOf(&a_, &c_, &chain, &is_permitted));
+ EXPECT_TRUE(chain.empty());
+ EXPECT_FALSE(is_permitted);
+
+ // Remove the B -> C public dependency, leaving P's private dep on C the only
+ // path from A to C. This should now be found.
+ chain.clear();
+ EXPECT_EQ(&c_, b_.public_deps()[0].ptr); // Validate it's the right one.
+ b_.public_deps().erase(b_.public_deps().begin());
+ EXPECT_TRUE(checker->IsDependencyOf(&c_, &a_, &chain, &is_permitted));
+ EXPECT_EQ(3u, chain.size());
+ EXPECT_EQ(HeaderChecker::ChainLink(&c_, false), chain[0]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&p, true), chain[1]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&a_, true), chain[2]);
+ EXPECT_FALSE(is_permitted);
+
+ // P privately depends on C. That dependency should be OK since it's only
+ // one hop.
+ chain.clear();
+ is_permitted = false;
+ EXPECT_TRUE(checker->IsDependencyOf(&c_, &p, &chain, &is_permitted));
+ ASSERT_EQ(2u, chain.size());
+ EXPECT_EQ(HeaderChecker::ChainLink(&c_, false), chain[0]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&p, true), chain[1]);
+ EXPECT_TRUE(is_permitted);
+}
+
+TEST_F(HeaderCheckerTest, CheckInclude) {
+ InputFile input_file(SourceFile("//some_file.cc"));
+ input_file.SetContents(std::string());
+ LocationRange range; // Dummy value.
+
+ // Add a disconnected target d with a header to check that you have to have
+ // to depend on a target listing a header.
+ SourceFile d_header("//d_header.h");
+ d_.sources().push_back(SourceFile(d_header));
+
+ // Add a header on B and say everything in B is public.
+ SourceFile b_public("//b_public.h");
+ b_.sources().push_back(b_public);
+ c_.set_all_headers_public(true);
+
+ // Add a public and private header on C.
+ SourceFile c_public("//c_public.h");
+ SourceFile c_private("//c_private.h");
+ c_.sources().push_back(c_private);
+ c_.public_headers().push_back(c_public);
+ c_.set_all_headers_public(false);
+
+ // Create another toolchain.
+ Settings other_settings(setup_.build_settings(), "other/");
+ Toolchain other_toolchain(&other_settings,
+ Label(SourceDir("//toolchain/"), "other"));
+ TestWithScope::SetupToolchain(&other_toolchain);
+ other_settings.set_toolchain_label(other_toolchain.label());
+ other_settings.set_default_toolchain_label(setup_.toolchain()->label());
+
+ // Add a target in the other toolchain with a header in it that is not
+ // connected to any targets in the main toolchain.
+ Target otc(&other_settings, Label(SourceDir("//p/"), "otc",
+ other_toolchain.label().dir(), other_toolchain.label().name()));
+ otc.set_output_type(Target::SOURCE_SET);
+ Err err;
+ EXPECT_TRUE(otc.SetToolchain(&other_toolchain, &err));
+ otc.visibility().SetPublic();
+ targets_.push_back(&otc);
+
+ SourceFile otc_header("//otc_header.h");
+ otc.sources().push_back(otc_header);
+ EXPECT_TRUE(otc.OnResolved(&err));
+
+ scoped_refptr<HeaderChecker> checker(
+ new HeaderChecker(setup_.build_settings(), targets_));
+
+ // A file in target A can't include a header from D because A has no
+ // dependency on D.
+ EXPECT_FALSE(checker->CheckInclude(&a_, input_file, d_header, range, &err));
+ EXPECT_TRUE(err.has_error());
+
+ // A can include the public header in B.
+ err = Err();
+ EXPECT_TRUE(checker->CheckInclude(&a_, input_file, b_public, range, &err));
+ EXPECT_FALSE(err.has_error());
+
+ // Check A depending on the public and private headers in C.
+ err = Err();
+ EXPECT_TRUE(checker->CheckInclude(&a_, input_file, c_public, range, &err));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_FALSE(checker->CheckInclude(&a_, input_file, c_private, range, &err));
+ EXPECT_TRUE(err.has_error());
+
+ // A can depend on a random file unknown to the build.
+ err = Err();
+ EXPECT_TRUE(checker->CheckInclude(&a_, input_file, SourceFile("//random.h"),
+ range, &err));
+ EXPECT_FALSE(err.has_error());
+
+ // A can depend on a file present only in another toolchain even with no
+ // dependency path.
+ err = Err();
+ EXPECT_TRUE(checker->CheckInclude(&a_, input_file, otc_header, range, &err));
+ EXPECT_FALSE(err.has_error());
+}
+
+// A public chain of dependencies should always be identified first, even if
+// it is longer than a private one.
+TEST_F(HeaderCheckerTest, PublicFirst) {
+ // Now make a A -> Z -> D private dependency chain (one shorter than the
+ // public one to get to D).
+ Target z(setup_.settings(), Label(SourceDir("//a/"), "a"));
+ z.set_output_type(Target::SOURCE_SET);
+ Err err;
+ EXPECT_TRUE(z.SetToolchain(setup_.toolchain(), &err));
+ z.private_deps().push_back(LabelTargetPair(&d_));
+ EXPECT_TRUE(z.OnResolved(&err));
+ targets_.push_back(&z);
+
+ a_.private_deps().push_back(LabelTargetPair(&z));
+
+ // Check that D can be found from A, but since it's private, it will be
+ // marked as not permitted.
+ bool is_permitted = false;
+ HeaderChecker::Chain chain;
+ scoped_refptr<HeaderChecker> checker(
+ new HeaderChecker(setup_.build_settings(), targets_));
+ EXPECT_TRUE(checker->IsDependencyOf(&d_, &a_, &chain, &is_permitted));
+
+ EXPECT_FALSE(is_permitted);
+ ASSERT_EQ(3u, chain.size());
+ EXPECT_EQ(HeaderChecker::ChainLink(&d_, false), chain[0]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&z, false), chain[1]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&a_, true), chain[2]);
+
+ // Hook up D to the existing public A -> B -> C chain to make a long one, and
+ // search for D again.
+ c_.public_deps().push_back(LabelTargetPair(&d_));
+ checker = new HeaderChecker(setup_.build_settings(), targets_);
+ chain.clear();
+ EXPECT_TRUE(checker->IsDependencyOf(&d_, &a_, &chain, &is_permitted));
+
+ // This should have found the long public one.
+ EXPECT_TRUE(is_permitted);
+ ASSERT_EQ(4u, chain.size());
+ EXPECT_EQ(HeaderChecker::ChainLink(&d_, true), chain[0]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&c_, true), chain[1]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&b_, true), chain[2]);
+ EXPECT_EQ(HeaderChecker::ChainLink(&a_, true), chain[3]);
+}
+
+// Checks that the allow_circular_includes_from list works.
+TEST_F(HeaderCheckerTest, CheckIncludeAllowCircular) {
+ InputFile input_file(SourceFile("//some_file.cc"));
+ input_file.SetContents(std::string());
+ LocationRange range; // Dummy value.
+
+ // Add an include file to A.
+ SourceFile a_public("//a_public.h");
+ a_.sources().push_back(a_public);
+
+ scoped_refptr<HeaderChecker> checker(
+ new HeaderChecker(setup_.build_settings(), targets_));
+
+ // A depends on B. So B normally can't include headers from A.
+ Err err;
+ EXPECT_FALSE(checker->CheckInclude(&b_, input_file, a_public, range, &err));
+ EXPECT_TRUE(err.has_error());
+
+ // Add an allow_circular_includes_from on A that lists B.
+ a_.allow_circular_includes_from().insert(b_.label());
+
+ // Now the include from B to A should be allowed.
+ err = Err();
+ EXPECT_TRUE(checker->CheckInclude(&b_, input_file, a_public, range, &err));
+ EXPECT_FALSE(err.has_error());
+}
diff --git a/chromium/tools/gn/import_manager.cc b/chromium/tools/gn/import_manager.cc
new file mode 100644
index 00000000000..83cc09020cd
--- /dev/null
+++ b/chromium/tools/gn/import_manager.cc
@@ -0,0 +1,93 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/import_manager.h"
+
+#include <memory>
+
+#include "base/stl_util.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/scope_per_file_provider.h"
+
+namespace {
+
+// Returns a newly-allocated scope on success, null on failure.
+Scope* UncachedImport(const Settings* settings,
+ const SourceFile& file,
+ const ParseNode* node_for_err,
+ Err* err) {
+ const ParseNode* node = g_scheduler->input_file_manager()->SyncLoadFile(
+ node_for_err->GetRange(), settings->build_settings(), file, err);
+ if (!node)
+ return nullptr;
+
+ std::unique_ptr<Scope> scope(new Scope(settings->base_config()));
+ scope->set_source_dir(file.GetDir());
+
+ // Don't allow ScopePerFileProvider to provide target-related variables.
+ // These will be relative to the imported file, which is probably not what
+ // people mean when they use these.
+ ScopePerFileProvider per_file_provider(scope.get(), false);
+
+ scope->SetProcessingImport();
+ node->Execute(scope.get(), err);
+ if (err->has_error())
+ return nullptr;
+ scope->ClearProcessingImport();
+
+ return scope.release();
+}
+
+} // namesapce
+
+ImportManager::ImportManager() {
+}
+
+ImportManager::~ImportManager() {
+ STLDeleteContainerPairSecondPointers(imports_.begin(), imports_.end());
+}
+
+bool ImportManager::DoImport(const SourceFile& file,
+ const ParseNode* node_for_err,
+ Scope* scope,
+ Err* err) {
+ // See if we have a cached import, but be careful to actually do the scope
+ // copying outside of the lock.
+ const Scope* imported_scope = nullptr;
+ {
+ base::AutoLock lock(lock_);
+ ImportMap::const_iterator found = imports_.find(file);
+ if (found != imports_.end())
+ imported_scope = found->second;
+ }
+
+ if (!imported_scope) {
+ // Do a new import of the file.
+ imported_scope = UncachedImport(scope->settings(), file,
+ node_for_err, err);
+ if (!imported_scope)
+ return false;
+
+ // We loaded the file outside the lock. This means that there could be a
+ // race and the file was already loaded on a background thread. Recover
+ // from this and use the existing one if that happens.
+ {
+ base::AutoLock lock(lock_);
+ ImportMap::const_iterator found = imports_.find(file);
+ if (found != imports_.end()) {
+ delete imported_scope;
+ imported_scope = found->second;
+ } else {
+ imports_[file] = imported_scope;
+ }
+ }
+ }
+
+ Scope::MergeOptions options;
+ options.skip_private_vars = true;
+ options.mark_dest_used = true; // Don't require all imported values be used.
+ return imported_scope->NonRecursiveMergeTo(scope, options, node_for_err,
+ "import", err);
+}
diff --git a/chromium/tools/gn/import_manager.h b/chromium/tools/gn/import_manager.h
new file mode 100644
index 00000000000..70aeee727c5
--- /dev/null
+++ b/chromium/tools/gn/import_manager.h
@@ -0,0 +1,42 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_IMPORT_MANAGER_H_
+#define TOOLS_GN_IMPORT_MANAGER_H_
+
+#include <map>
+
+#include "base/macros.h"
+#include "base/synchronization/lock.h"
+
+class Err;
+class ParseNode;
+class Scope;
+class SourceFile;
+
+// Provides a cache of the results of importing scopes so the results can
+// be re-used rather than running the imported files multiple times.
+class ImportManager {
+ public:
+ ImportManager();
+ ~ImportManager();
+
+ // Does an import of the given file into the given scope. On error, sets the
+ // error and returns false.
+ bool DoImport(const SourceFile& file,
+ const ParseNode* node_for_err,
+ Scope* scope,
+ Err* err);
+
+ private:
+ base::Lock lock_;
+
+ // Owning pointers to the scopes.
+ typedef std::map<SourceFile, const Scope*> ImportMap;
+ ImportMap imports_;
+
+ DISALLOW_COPY_AND_ASSIGN(ImportManager);
+};
+
+#endif // TOOLS_GN_IMPORT_MANAGER_H_
diff --git a/chromium/tools/gn/inherited_libraries.cc b/chromium/tools/gn/inherited_libraries.cc
new file mode 100644
index 00000000000..06ac9ae4480
--- /dev/null
+++ b/chromium/tools/gn/inherited_libraries.cc
@@ -0,0 +1,76 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/inherited_libraries.h"
+
+#include "tools/gn/target.h"
+
+InheritedLibraries::InheritedLibraries() {
+}
+
+InheritedLibraries::~InheritedLibraries() {
+}
+
+std::vector<const Target*> InheritedLibraries::GetOrdered() const {
+ std::vector<const Target*> result;
+ result.resize(map_.size());
+
+ // The indices in the map should be from 0 to the number of items in the
+ // map, so insert directly into the result (with some sanity checks).
+ for (const auto& pair : map_) {
+ size_t index = pair.second.index;
+ DCHECK(index < result.size());
+ DCHECK(!result[index]);
+ result[index] = pair.first;
+ }
+
+ return result;
+}
+
+std::vector<std::pair<const Target*, bool>>
+InheritedLibraries::GetOrderedAndPublicFlag() const {
+ std::vector<std::pair<const Target*, bool>> result;
+ result.resize(map_.size());
+
+ for (const auto& pair : map_) {
+ size_t index = pair.second.index;
+ DCHECK(index < result.size());
+ DCHECK(!result[index].first);
+ result[index] = std::make_pair(pair.first, pair.second.is_public);
+ }
+
+ return result;
+}
+
+void InheritedLibraries::Append(const Target* target, bool is_public) {
+ // Try to insert a new node.
+ auto insert_result = map_.insert(
+ std::make_pair(target, Node(map_.size(), is_public)));
+
+ if (!insert_result.second) {
+ // Element already present, insert failed and insert_result indicates the
+ // old one. The old one may need to have its public flag updated.
+ if (is_public) {
+ Node& existing_node = insert_result.first->second;
+ existing_node.is_public = true;
+ }
+ }
+}
+
+void InheritedLibraries::AppendInherited(const InheritedLibraries& other,
+ bool is_public) {
+ // Append all items in order, mark them public only if the're already public
+ // and we're adding them publically.
+ for (const auto& cur : other.GetOrderedAndPublicFlag())
+ Append(cur.first, is_public && cur.second);
+}
+
+void InheritedLibraries::AppendPublicSharedLibraries(
+ const InheritedLibraries& other,
+ bool is_public) {
+ for (const auto& cur : other.GetOrderedAndPublicFlag()) {
+ if (cur.first->output_type() == Target::SHARED_LIBRARY && cur.second)
+ Append(cur.first, is_public);
+ }
+}
diff --git a/chromium/tools/gn/inherited_libraries.h b/chromium/tools/gn/inherited_libraries.h
new file mode 100644
index 00000000000..e8568b26766
--- /dev/null
+++ b/chromium/tools/gn/inherited_libraries.h
@@ -0,0 +1,71 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_INHERITED_LIBRARIES_H_
+#define TOOLS_GN_INHERITED_LIBRARIES_H_
+
+#include <stddef.h>
+
+#include <map>
+#include <utility>
+#include <vector>
+
+#include "base/macros.h"
+
+class Target;
+
+// Represents an ordered uniquified set of all shared/static libraries for
+// a given target. These are pushed up the dependency tree.
+//
+// Maintaining the order is important so GN links all libraries in the same
+// order specified in the build files.
+//
+// Since this list is uniquified, appending to the list will not actually
+// append a new item if the target already exists. However, the existing one
+// may have its is_public flag updated. "Public" always wins, so is_public will
+// be true if any dependency with that name has been set to public.
+class InheritedLibraries {
+ public:
+ InheritedLibraries();
+ ~InheritedLibraries();
+
+ // Returns the list of dependencies in order, optionally with the flag
+ // indicating whether the dependency is public.
+ std::vector<const Target*> GetOrdered() const;
+ std::vector<std::pair<const Target*, bool>> GetOrderedAndPublicFlag() const;
+
+ // Adds a single dependency to the end of the list. See note on adding above.
+ void Append(const Target* target, bool is_public);
+
+ // Appends all items from the "other" list to the current one. The is_public
+ // parameter indicates how the current target depends on the items in
+ // "other". If is_public is true, the existing public flags of the appended
+ // items will be preserved (propogating the public-ness up the dependency
+ // chain). If is_public is false, all deps will be added as private since
+ // the current target isn't forwarding them.
+ void AppendInherited(const InheritedLibraries& other, bool is_public);
+
+ // Like AppendInherited but only appends the items in "other" that are of
+ // type SHARED_LIBRARY and only when they're marked public. This is used
+ // to push shared libraries up the dependency chain, following only public
+ // deps, to dependent targets that need to use them.
+ void AppendPublicSharedLibraries(const InheritedLibraries& other,
+ bool is_public);
+
+ private:
+ struct Node {
+ Node() : index(static_cast<size_t>(-1)), is_public(false) {}
+ Node(size_t i, bool p) : index(i), is_public(p) {}
+
+ size_t index;
+ bool is_public;
+ };
+
+ typedef std::map<const Target*, Node> LibraryMap;
+ LibraryMap map_;
+
+ DISALLOW_COPY_AND_ASSIGN(InheritedLibraries);
+};
+
+#endif // TOOLS_GN_INHERITED_LIBRARIES_H_
diff --git a/chromium/tools/gn/inherited_libraries_unittest.cc b/chromium/tools/gn/inherited_libraries_unittest.cc
new file mode 100644
index 00000000000..51152095352
--- /dev/null
+++ b/chromium/tools/gn/inherited_libraries_unittest.cc
@@ -0,0 +1,135 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/inherited_libraries.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+// In these tests, Pair can't be used conveniently because the
+// "const" won't be inferred and the types won't match. This helper makes the
+// right type of pair with the const Target.
+std::pair<const Target*, bool> Pair(const Target* t, bool b) {
+ return std::pair<const Target*, bool>(t, b);
+}
+
+} // namespace
+
+TEST(InheritedLibraries, Unique) {
+ TestWithScope setup;
+
+ Target a(setup.settings(), Label(SourceDir("//foo/"), "a"));
+ Target b(setup.settings(), Label(SourceDir("//foo/"), "b"));
+
+ // Setup, add the two targets as private.
+ InheritedLibraries libs;
+ libs.Append(&a, false);
+ libs.Append(&b, false);
+ auto result = libs.GetOrderedAndPublicFlag();
+ ASSERT_EQ(2u, result.size());
+ EXPECT_EQ(Pair(&a, false), result[0]);
+ EXPECT_EQ(Pair(&b, false), result[1]);
+
+ // Add again as private, this should be a NOP.
+ libs.Append(&a, false);
+ libs.Append(&b, false);
+ result = libs.GetOrderedAndPublicFlag();
+ ASSERT_EQ(2u, result.size());
+ EXPECT_EQ(Pair(&a, false), result[0]);
+ EXPECT_EQ(Pair(&b, false), result[1]);
+
+ // Add as public, this should make both public.
+ libs.Append(&a, true);
+ libs.Append(&b, true);
+ result = libs.GetOrderedAndPublicFlag();
+ ASSERT_EQ(2u, result.size());
+ EXPECT_EQ(Pair(&a, true), result[0]);
+ EXPECT_EQ(Pair(&b, true), result[1]);
+
+ // Add again private, they should stay public.
+ libs.Append(&a, false);
+ libs.Append(&b, false);
+ result = libs.GetOrderedAndPublicFlag();
+ ASSERT_EQ(2u, result.size());
+ EXPECT_EQ(Pair(&a, true), result[0]);
+ EXPECT_EQ(Pair(&b, true), result[1]);
+}
+
+TEST(InheritedLibraries, AppendInherited) {
+ TestWithScope setup;
+
+ Target a(setup.settings(), Label(SourceDir("//foo/"), "a"));
+ Target b(setup.settings(), Label(SourceDir("//foo/"), "b"));
+ Target w(setup.settings(), Label(SourceDir("//foo/"), "w"));
+ Target x(setup.settings(), Label(SourceDir("//foo/"), "x"));
+ Target y(setup.settings(), Label(SourceDir("//foo/"), "y"));
+ Target z(setup.settings(), Label(SourceDir("//foo/"), "z"));
+
+ InheritedLibraries libs;
+ libs.Append(&a, false);
+ libs.Append(&b, false);
+
+ // Appending these things with private inheritance should make them private,
+ // no matter how they're listed in the appended class.
+ InheritedLibraries append_private;
+ append_private.Append(&a, true);
+ append_private.Append(&b, false);
+ append_private.Append(&w, true);
+ append_private.Append(&x, false);
+ libs.AppendInherited(append_private, false);
+
+ auto result = libs.GetOrderedAndPublicFlag();
+ ASSERT_EQ(4u, result.size());
+ EXPECT_EQ(Pair(&a, false), result[0]);
+ EXPECT_EQ(Pair(&b, false), result[1]);
+ EXPECT_EQ(Pair(&w, false), result[2]);
+ EXPECT_EQ(Pair(&x, false), result[3]);
+
+ // Appending these things with public inheritance should convert them.
+ InheritedLibraries append_public;
+ append_public.Append(&a, true);
+ append_public.Append(&b, false);
+ append_public.Append(&y, true);
+ append_public.Append(&z, false);
+ libs.AppendInherited(append_public, true);
+
+ result = libs.GetOrderedAndPublicFlag();
+ ASSERT_EQ(6u, result.size());
+ EXPECT_EQ(Pair(&a, true), result[0]); // Converted to public.
+ EXPECT_EQ(Pair(&b, false), result[1]);
+ EXPECT_EQ(Pair(&w, false), result[2]);
+ EXPECT_EQ(Pair(&x, false), result[3]);
+ EXPECT_EQ(Pair(&y, true), result[4]); // Appended as public.
+ EXPECT_EQ(Pair(&z, false), result[5]);
+}
+
+TEST(InheritedLibraries, AppendPublicSharedLibraries) {
+ TestWithScope setup;
+ InheritedLibraries append;
+
+ // Two source sets.
+ Target set_pub(setup.settings(), Label(SourceDir("//foo/"), "set_pub"));
+ set_pub.set_output_type(Target::SOURCE_SET);
+ append.Append(&set_pub, true);
+ Target set_priv(setup.settings(), Label(SourceDir("//foo/"), "set_priv"));
+ set_priv.set_output_type(Target::SOURCE_SET);
+ append.Append(&set_priv, false);
+
+ // Two shared libraries.
+ Target sh_pub(setup.settings(), Label(SourceDir("//foo/"), "sh_pub"));
+ sh_pub.set_output_type(Target::SHARED_LIBRARY);
+ append.Append(&sh_pub, true);
+ Target sh_priv(setup.settings(), Label(SourceDir("//foo/"), "sh_priv"));
+ sh_priv.set_output_type(Target::SHARED_LIBRARY);
+ append.Append(&sh_priv, false);
+
+ InheritedLibraries libs;
+ libs.AppendPublicSharedLibraries(append, true);
+
+ auto result = libs.GetOrderedAndPublicFlag();
+ ASSERT_EQ(1u, result.size());
+ EXPECT_EQ(Pair(&sh_pub, true), result[0]);
+}
diff --git a/chromium/tools/gn/input_conversion.cc b/chromium/tools/gn/input_conversion.cc
new file mode 100644
index 00000000000..043737a72ac
--- /dev/null
+++ b/chromium/tools/gn/input_conversion.cc
@@ -0,0 +1,213 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/input_conversion.h"
+
+#include <utility>
+
+#include "base/macros.h"
+#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/err.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/label.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/parser.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/tokenizer.h"
+#include "tools/gn/value.h"
+
+namespace {
+
+enum ValueOrScope {
+ PARSE_VALUE, // Treat the input as an expression.
+ PARSE_SCOPE, // Treat the input as code and return the resulting scope.
+};
+
+// Sets the origin of the value and any nested values with the given node.
+Value ParseValueOrScope(const Settings* settings,
+ const std::string& input,
+ ValueOrScope what,
+ const ParseNode* origin,
+ Err* err) {
+ // The memory for these will be kept around by the input file manager
+ // so the origin parse nodes for the values will be preserved.
+ InputFile* input_file;
+ std::vector<Token>* tokens;
+ std::unique_ptr<ParseNode>* parse_root_ptr;
+ g_scheduler->input_file_manager()->AddDynamicInput(
+ SourceFile(), &input_file, &tokens, &parse_root_ptr);
+
+ input_file->SetContents(input);
+ if (origin) {
+ // This description will be the blame for any error messages caused by
+ // script parsing or if a value is blamed. It will say
+ // "Error at <...>:line:char" so here we try to make a string for <...>
+ // that reads well in this context.
+ input_file->set_friendly_name(
+ "dynamically parsed input that " +
+ origin->GetRange().begin().Describe(true) +
+ " loaded ");
+ } else {
+ input_file->set_friendly_name("dynamic input");
+ }
+
+ *tokens = Tokenizer::Tokenize(input_file, err);
+ if (err->has_error())
+ return Value();
+
+ // Parse the file according to what we're looking for.
+ if (what == PARSE_VALUE)
+ *parse_root_ptr = Parser::ParseValue(*tokens, err);
+ else
+ *parse_root_ptr = Parser::Parse(*tokens, err); // Will return a Block.
+ if (err->has_error())
+ return Value();
+ ParseNode* parse_root = parse_root_ptr->get(); // For nicer syntax below.
+
+ // It's valid for the result to be a null pointer, this just means that the
+ // script returned nothing.
+ if (!parse_root)
+ return Value();
+
+ std::unique_ptr<Scope> scope(new Scope(settings));
+ Value result = parse_root->Execute(scope.get(), err);
+ if (err->has_error())
+ return Value();
+
+ // When we want the result as a scope, the result is actually the scope
+ // we made, rather than the result of running the block (which will be empty).
+ if (what == PARSE_SCOPE) {
+ DCHECK(result.type() == Value::NONE);
+ result = Value(origin, std::move(scope));
+ }
+ return result;
+}
+
+Value ParseList(const std::string& input, const ParseNode* origin, Err* err) {
+ Value ret(origin, Value::LIST);
+ std::vector<std::string> as_lines = base::SplitString(
+ input, "\n", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
+
+ // Trim one empty line from the end since the last line might end in a
+ // newline. If the user wants more trimming, they'll specify "trim" in the
+ // input conversion options.
+ if (!as_lines.empty() && as_lines[as_lines.size() - 1].empty())
+ as_lines.resize(as_lines.size() - 1);
+
+ ret.list_value().reserve(as_lines.size());
+ for (const auto& line : as_lines)
+ ret.list_value().push_back(Value(origin, line));
+ return ret;
+}
+
+// Backend for ConvertInputToValue, this takes the extracted string for the
+// input conversion so we can recursively call ourselves to handle the optional
+// "trim" prefix. This original value is also kept for the purposes of throwing
+// errors.
+Value DoConvertInputToValue(const Settings* settings,
+ const std::string& input,
+ const ParseNode* origin,
+ const Value& original_input_conversion,
+ const std::string& input_conversion,
+ Err* err) {
+ if (input_conversion.empty())
+ return Value(); // Empty string means discard the result.
+
+ const char kTrimPrefix[] = "trim ";
+ if (base::StartsWith(input_conversion, kTrimPrefix,
+ base::CompareCase::SENSITIVE)) {
+ std::string trimmed;
+ base::TrimWhitespaceASCII(input, base::TRIM_ALL, &trimmed);
+
+ // Remove "trim" prefix from the input conversion and re-run.
+ return DoConvertInputToValue(
+ settings, trimmed, origin, original_input_conversion,
+ input_conversion.substr(arraysize(kTrimPrefix) - 1), err);
+ }
+
+ if (input_conversion == "value")
+ return ParseValueOrScope(settings, input, PARSE_VALUE, origin, err);
+ if (input_conversion == "string")
+ return Value(origin, input);
+ if (input_conversion == "list lines")
+ return ParseList(input, origin, err);
+ if (input_conversion == "scope")
+ return ParseValueOrScope(settings, input, PARSE_SCOPE, origin, err);
+
+ *err = Err(original_input_conversion, "Not a valid input_conversion.",
+ "Have you considered a career in retail?");
+ return Value();
+}
+
+} // namespace
+
+extern const char kInputConversion_Help[] =
+ "input_conversion: Specifies how to transform input to a variable.\n"
+ "\n"
+ " input_conversion is an argument to read_file and exec_script that\n"
+ " specifies how the result of the read operation should be converted\n"
+ " into a variable.\n"
+ "\n"
+ " \"\" (the default)\n"
+ " Discard the result and return None.\n"
+ "\n"
+ " \"list lines\"\n"
+ " Return the file contents as a list, with a string for each line.\n"
+ " The newlines will not be present in the result. The last line may\n"
+ " or may not end in a newline.\n"
+ "\n"
+ " After splitting, each individual line will be trimmed of\n"
+ " whitespace on both ends.\n"
+ "\n"
+ " \"scope\"\n"
+ " Execute the block as GN code and return a scope with the\n"
+ " resulting values in it. If the input was:\n"
+ " a = [ \"hello.cc\", \"world.cc\" ]\n"
+ " b = 26\n"
+ " and you read the result into a variable named \"val\", then you\n"
+ " could access contents the \".\" operator on \"val\":\n"
+ " sources = val.a\n"
+ " some_count = val.b\n"
+ "\n"
+ " \"string\"\n"
+ " Return the file contents into a single string.\n"
+ "\n"
+ " \"value\"\n"
+ " Parse the input as if it was a literal rvalue in a buildfile.\n"
+ " Examples of typical program output using this mode:\n"
+ " [ \"foo\", \"bar\" ] (result will be a list)\n"
+ " or\n"
+ " \"foo bar\" (result will be a string)\n"
+ " or\n"
+ " 5 (result will be an integer)\n"
+ "\n"
+ " Note that if the input is empty, the result will be a null value\n"
+ " which will produce an error if assigned to a variable.\n"
+ "\n"
+ " \"trim ...\"\n"
+ " Prefixing any of the other transformations with the word \"trim\"\n"
+ " will result in whitespace being trimmed from the beginning and end\n"
+ " of the result before processing.\n"
+ "\n"
+ " Examples: \"trim string\" or \"trim list lines\"\n"
+ "\n"
+ " Note that \"trim value\" is useless because the value parser skips\n"
+ " whitespace anyway.\n";
+
+Value ConvertInputToValue(const Settings* settings,
+ const std::string& input,
+ const ParseNode* origin,
+ const Value& input_conversion_value,
+ Err* err) {
+ if (input_conversion_value.type() == Value::NONE)
+ return Value(); // Allow null inputs to mean discard the result.
+ if (!input_conversion_value.VerifyTypeIs(Value::STRING, err))
+ return Value();
+ return DoConvertInputToValue(settings, input, origin, input_conversion_value,
+ input_conversion_value.string_value(), err);
+}
diff --git a/chromium/tools/gn/input_conversion.h b/chromium/tools/gn/input_conversion.h
new file mode 100644
index 00000000000..e6d5f6eab32
--- /dev/null
+++ b/chromium/tools/gn/input_conversion.h
@@ -0,0 +1,30 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_INPUT_CONVERSION_H_
+#define TOOLS_GN_INPUT_CONVERSION_H_
+
+#include <string>
+
+class Err;
+class ParseNode;
+class Settings;
+class Value;
+
+extern const char kInputConversion_Help[];
+
+// Converts the given input string (is read from a file or output from a
+// script) to a Value. Conversions as specified in the input_conversion string
+// will be performed. The given origin will be used for constructing the
+// resulting Value.
+//
+// If the conversion string is invalid, the error will be set and an empty
+// value will be returned.
+Value ConvertInputToValue(const Settings* settings,
+ const std::string& input,
+ const ParseNode* origin,
+ const Value& input_conversion_value,
+ Err* err);
+
+#endif // TOOLS_GN_INPUT_CONVERSION_H_
diff --git a/chromium/tools/gn/input_conversion_unittest.cc b/chromium/tools/gn/input_conversion_unittest.cc
new file mode 100644
index 00000000000..2c4afeacc6b
--- /dev/null
+++ b/chromium/tools/gn/input_conversion_unittest.cc
@@ -0,0 +1,182 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/err.h"
+#include "tools/gn/input_conversion.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/test_with_scope.h"
+#include "tools/gn/value.h"
+
+namespace {
+
+// InputConversion needs a global scheduler object.
+class InputConversionTest : public testing::Test {
+ public:
+ InputConversionTest() {}
+
+ const Settings* settings() { return setup_.settings(); }
+
+ private:
+ TestWithScope setup_;
+
+ Scheduler scheduler_;
+};
+
+} // namespace
+
+TEST_F(InputConversionTest, String) {
+ Err err;
+ std::string input("\nfoo bar \n");
+ Value result = ConvertInputToValue(settings(), input, nullptr,
+ Value(nullptr, "string"), &err);
+ EXPECT_FALSE(err.has_error());
+ EXPECT_EQ(Value::STRING, result.type());
+ EXPECT_EQ(input, result.string_value());
+
+ // Test with trimming.
+ result = ConvertInputToValue(settings(), input, nullptr,
+ Value(nullptr, "trim string"), &err);
+ EXPECT_FALSE(err.has_error());
+ EXPECT_EQ(Value::STRING, result.type());
+ EXPECT_EQ("foo bar", result.string_value());
+}
+
+TEST_F(InputConversionTest, ListLines) {
+ Err err;
+ std::string input("\nfoo\nbar \n\n");
+ Value result = ConvertInputToValue(settings(), input, nullptr,
+ Value(nullptr, "list lines"), &err);
+ EXPECT_FALSE(err.has_error());
+ EXPECT_EQ(Value::LIST, result.type());
+ ASSERT_EQ(4u, result.list_value().size());
+ EXPECT_EQ("", result.list_value()[0].string_value());
+ EXPECT_EQ("foo", result.list_value()[1].string_value());
+ EXPECT_EQ("bar", result.list_value()[2].string_value());
+ EXPECT_EQ("", result.list_value()[3].string_value());
+
+ // Test with trimming.
+ result = ConvertInputToValue(settings(), input, nullptr,
+ Value(nullptr, "trim list lines"), &err);
+ EXPECT_FALSE(err.has_error());
+ EXPECT_EQ(Value::LIST, result.type());
+ ASSERT_EQ(2u, result.list_value().size());
+ EXPECT_EQ("foo", result.list_value()[0].string_value());
+ EXPECT_EQ("bar", result.list_value()[1].string_value());
+}
+
+TEST_F(InputConversionTest, ValueString) {
+ Err err;
+ std::string input("\"str\"");
+ Value result = ConvertInputToValue(settings(), input, nullptr,
+ Value(nullptr, "value"), &err);
+ EXPECT_FALSE(err.has_error());
+ EXPECT_EQ(Value::STRING, result.type());
+ EXPECT_EQ("str", result.string_value());
+}
+
+TEST_F(InputConversionTest, ValueInt) {
+ Err err;
+ std::string input("\n\n 6 \n ");
+ Value result = ConvertInputToValue(settings(), input, nullptr,
+ Value(nullptr, "value"), &err);
+ EXPECT_FALSE(err.has_error());
+ EXPECT_EQ(Value::INTEGER, result.type());
+ EXPECT_EQ(6, result.int_value());
+}
+
+TEST_F(InputConversionTest, ValueList) {
+ Err err;
+ std::string input("\n [ \"a\", 5]");
+ Value result = ConvertInputToValue(settings(), input, nullptr,
+ Value(nullptr, "value"), &err);
+ EXPECT_FALSE(err.has_error());
+ ASSERT_EQ(Value::LIST, result.type());
+ ASSERT_EQ(2u, result.list_value().size());
+ EXPECT_EQ("a", result.list_value()[0].string_value());
+ EXPECT_EQ(5, result.list_value()[1].int_value());
+}
+
+TEST_F(InputConversionTest, ValueDict) {
+ Err err;
+ std::string input("\n a = 5 b = \"foo\" c = a + 2");
+ Value result = ConvertInputToValue(settings(), input, nullptr,
+ Value(nullptr, "scope"), &err);
+ EXPECT_FALSE(err.has_error());
+ ASSERT_EQ(Value::SCOPE, result.type());
+
+ const Value* a_value = result.scope_value()->GetValue("a");
+ ASSERT_TRUE(a_value);
+ EXPECT_EQ(5, a_value->int_value());
+
+ const Value* b_value = result.scope_value()->GetValue("b");
+ ASSERT_TRUE(b_value);
+ EXPECT_EQ("foo", b_value->string_value());
+
+ const Value* c_value = result.scope_value()->GetValue("c");
+ ASSERT_TRUE(c_value);
+ EXPECT_EQ(7, c_value->int_value());
+
+ // Tests that when we get Values out of the input conversion, the resulting
+ // values have an origin set to something corresponding to the input.
+ const ParseNode* a_origin = a_value->origin();
+ ASSERT_TRUE(a_origin);
+ LocationRange a_range = a_origin->GetRange();
+ EXPECT_EQ(2, a_range.begin().line_number());
+ EXPECT_EQ(6, a_range.begin().column_number());
+
+ const InputFile* a_file = a_range.begin().file();
+ EXPECT_EQ(input, a_file->contents());
+}
+
+TEST_F(InputConversionTest, ValueEmpty) {
+ Err err;
+ Value result = ConvertInputToValue(settings(), "", nullptr,
+ Value(nullptr, "value"), &err);
+ EXPECT_FALSE(err.has_error());
+ EXPECT_EQ(Value::NONE, result.type());
+}
+
+TEST_F(InputConversionTest, ValueError) {
+ static const char* const kTests[] = {
+ "\n [ \"a\", 5\nfoo bar",
+
+ // Blocks not allowed.
+ "{ foo = 5 }",
+
+ // Function calls not allowed.
+ "print(5)",
+
+ // Trailing junk not allowed.
+ "233105-1",
+
+ // Non-literals hidden in arrays are not allowed.
+ "[233105 - 1]",
+ "[rebase_path(\"//\")]",
+ };
+
+ for (auto test : kTests) {
+ Err err;
+ std::string input(test);
+ Value result = ConvertInputToValue(settings(), input, nullptr,
+ Value(nullptr, "value"), &err);
+ EXPECT_TRUE(err.has_error()) << test;
+ }
+}
+
+// Passing none or the empty string for input conversion should ignore the
+// result.
+TEST_F(InputConversionTest, Ignore) {
+ Err err;
+ Value result = ConvertInputToValue(settings(), "foo", nullptr, Value(), &err);
+ EXPECT_FALSE(err.has_error());
+ EXPECT_EQ(Value::NONE, result.type());
+
+ result =
+ ConvertInputToValue(settings(), "foo", nullptr, Value(nullptr, ""), &err);
+ EXPECT_FALSE(err.has_error());
+ EXPECT_EQ(Value::NONE, result.type());
+}
diff --git a/chromium/tools/gn/input_file.cc b/chromium/tools/gn/input_file.cc
new file mode 100644
index 00000000000..74e419d4656
--- /dev/null
+++ b/chromium/tools/gn/input_file.cc
@@ -0,0 +1,31 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/input_file.h"
+
+#include "base/files/file_util.h"
+
+InputFile::InputFile(const SourceFile& name)
+ : name_(name),
+ dir_(name_.GetDir()),
+ contents_loaded_(false) {
+}
+
+InputFile::~InputFile() {
+}
+
+void InputFile::SetContents(const std::string& c) {
+ contents_loaded_ = true;
+ contents_ = c;
+}
+
+bool InputFile::Load(const base::FilePath& system_path) {
+ if (base::ReadFileToString(system_path, &contents_)) {
+ contents_loaded_ = true;
+ physical_name_ = system_path;
+ return true;
+ }
+ return false;
+}
+
diff --git a/chromium/tools/gn/input_file.h b/chromium/tools/gn/input_file.h
new file mode 100644
index 00000000000..49e5f855741
--- /dev/null
+++ b/chromium/tools/gn/input_file.h
@@ -0,0 +1,65 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_INPUT_FILE_H_
+#define TOOLS_GN_INPUT_FILE_H_
+
+#include <string>
+
+#include "base/files/file_path.h"
+#include "base/logging.h"
+#include "base/macros.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/source_file.h"
+
+class InputFile {
+ public:
+ explicit InputFile(const SourceFile& name);
+
+ ~InputFile();
+
+ // The virtual name passed into the constructor. This does not take into
+ // account whether the file was loaded from the secondary source tree (see
+ // BuildSettings secondary_source_path).
+ const SourceFile& name() const { return name_; }
+
+ // The directory is just a cached version of name()->GetDir() but we get this
+ // a lot so computing it once up front saves a bunch of work.
+ const SourceDir& dir() const { return dir_; }
+
+ // The physical name tells the actual name on disk, if there is one.
+ const base::FilePath& physical_name() const { return physical_name_; }
+
+ // The friendly name can be set to override the name() in cases where there
+ // is no name (like SetContents is used instead) or if the name doesn't
+ // make sense. This will be displayed in error messages.
+ const std::string& friendly_name() const { return friendly_name_; }
+ void set_friendly_name(const std::string& f) { friendly_name_ = f; }
+
+ const std::string& contents() const {
+ DCHECK(contents_loaded_);
+ return contents_;
+ }
+
+ // For testing and in cases where this input doesn't actually refer to
+ // "a file".
+ void SetContents(const std::string& c);
+
+ // Loads the given file synchronously, returning true on success. This
+ bool Load(const base::FilePath& system_path);
+
+ private:
+ SourceFile name_;
+ SourceDir dir_;
+
+ base::FilePath physical_name_;
+ std::string friendly_name_;
+
+ bool contents_loaded_;
+ std::string contents_;
+
+ DISALLOW_COPY_AND_ASSIGN(InputFile);
+};
+
+#endif // TOOLS_GN_INPUT_FILE_H_
diff --git a/chromium/tools/gn/input_file_manager.cc b/chromium/tools/gn/input_file_manager.cc
new file mode 100644
index 00000000000..a3e64606b0d
--- /dev/null
+++ b/chromium/tools/gn/input_file_manager.cc
@@ -0,0 +1,323 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/input_file_manager.h"
+
+#include <utility>
+
+#include "base/bind.h"
+#include "base/stl_util.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/parser.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/scope_per_file_provider.h"
+#include "tools/gn/tokenizer.h"
+#include "tools/gn/trace.h"
+
+namespace {
+
+void InvokeFileLoadCallback(const InputFileManager::FileLoadCallback& cb,
+ const ParseNode* node) {
+ cb.Run(node);
+}
+
+bool DoLoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& name,
+ InputFile* file,
+ std::vector<Token>* tokens,
+ std::unique_ptr<ParseNode>* root,
+ Err* err) {
+ // Do all of this stuff outside the lock. We should not give out file
+ // pointers until the read is complete.
+ if (g_scheduler->verbose_logging()) {
+ std::string logmsg = name.value();
+ if (origin.begin().file())
+ logmsg += " (referenced from " + origin.begin().Describe(false) + ")";
+ g_scheduler->Log("Loading", logmsg);
+ }
+
+ // Read.
+ base::FilePath primary_path = build_settings->GetFullPath(name);
+ ScopedTrace load_trace(TraceItem::TRACE_FILE_LOAD, name.value());
+ if (!file->Load(primary_path)) {
+ if (!build_settings->secondary_source_path().empty()) {
+ // Fall back to secondary source tree.
+ base::FilePath secondary_path =
+ build_settings->GetFullPathSecondary(name);
+ if (!file->Load(secondary_path)) {
+ *err = Err(origin, "Can't load input file.",
+ "Unable to load:\n " +
+ FilePathToUTF8(primary_path) + "\n"
+ "I also checked in the secondary tree for:\n " +
+ FilePathToUTF8(secondary_path));
+ return false;
+ }
+ } else {
+ *err = Err(origin,
+ "Unable to load \"" + FilePathToUTF8(primary_path) + "\".");
+ return false;
+ }
+ }
+ load_trace.Done();
+
+ ScopedTrace exec_trace(TraceItem::TRACE_FILE_PARSE, name.value());
+
+ // Tokenize.
+ *tokens = Tokenizer::Tokenize(file, err);
+ if (err->has_error())
+ return false;
+
+ // Parse.
+ *root = Parser::Parse(*tokens, err);
+ if (err->has_error())
+ return false;
+
+ exec_trace.Done();
+ return true;
+}
+
+} // namespace
+
+InputFileManager::InputFileData::InputFileData(const SourceFile& file_name)
+ : file(file_name),
+ loaded(false),
+ sync_invocation(false) {
+}
+
+InputFileManager::InputFileData::~InputFileData() {
+}
+
+InputFileManager::InputFileManager() {
+}
+
+InputFileManager::~InputFileManager() {
+ // Should be single-threaded by now.
+ STLDeleteContainerPairSecondPointers(input_files_.begin(),
+ input_files_.end());
+ STLDeleteContainerPointers(dynamic_inputs_.begin(), dynamic_inputs_.end());
+}
+
+bool InputFileManager::AsyncLoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& file_name,
+ const FileLoadCallback& callback,
+ Err* err) {
+ // Try not to schedule callbacks while holding the lock. All cases that don't
+ // want to schedule should return early. Otherwise, this will be scheduled
+ // after we leave the lock.
+ base::Closure schedule_this;
+ {
+ base::AutoLock lock(lock_);
+
+ InputFileMap::const_iterator found = input_files_.find(file_name);
+ if (found == input_files_.end()) {
+ // New file, schedule load.
+ InputFileData* data = new InputFileData(file_name);
+ data->scheduled_callbacks.push_back(callback);
+ input_files_[file_name] = data;
+
+ schedule_this = base::Bind(&InputFileManager::BackgroundLoadFile,
+ this,
+ origin,
+ build_settings,
+ file_name,
+ &data->file);
+ } else {
+ InputFileData* data = found->second;
+
+ // Prevent mixing async and sync loads. See SyncLoadFile for discussion.
+ if (data->sync_invocation) {
+ g_scheduler->FailWithError(Err(
+ origin, "Load type mismatch.",
+ "The file \"" + file_name.value() + "\" was previously loaded\n"
+ "synchronously (via an import) and now you're trying to load it "
+ "asynchronously\n(via a deps rule). This is a class 2 misdemeanor: "
+ "a single input file must\nbe loaded the same way each time to "
+ "avoid blowing my tiny, tiny mind."));
+ return false;
+ }
+
+ if (data->loaded) {
+ // Can just directly issue the callback on the background thread.
+ schedule_this = base::Bind(&InvokeFileLoadCallback, callback,
+ data->parsed_root.get());
+ } else {
+ // Load is pending on this file, schedule the invoke.
+ data->scheduled_callbacks.push_back(callback);
+ return true;
+ }
+ }
+ }
+ g_scheduler->pool()->PostWorkerTaskWithShutdownBehavior(
+ FROM_HERE, schedule_this,
+ base::SequencedWorkerPool::BLOCK_SHUTDOWN);
+ return true;
+}
+
+const ParseNode* InputFileManager::SyncLoadFile(
+ const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& file_name,
+ Err* err) {
+ base::AutoLock lock(lock_);
+
+ InputFileData* data = nullptr;
+ InputFileMap::iterator found = input_files_.find(file_name);
+ if (found == input_files_.end()) {
+ // Haven't seen this file yet, start loading right now.
+ data = new InputFileData(file_name);
+ data->sync_invocation = true;
+ input_files_[file_name] = data;
+
+ base::AutoUnlock unlock(lock_);
+ if (!LoadFile(origin, build_settings, file_name, &data->file, err))
+ return nullptr;
+ } else {
+ // This file has either been loaded or is pending loading.
+ data = found->second;
+
+ if (!data->sync_invocation) {
+ // Don't allow mixing of sync and async loads. If an async load is
+ // scheduled and then a bunch of threads need to load it synchronously
+ // and block on it loading, it could deadlock or at least cause a lot
+ // of wasted CPU while those threads wait for the load to complete (which
+ // may be far back in the input queue).
+ //
+ // We could work around this by promoting the load to a sync load. This
+ // requires a bunch of extra code to either check flags and likely do
+ // extra locking (bad) or to just do both types of load on the file and
+ // deal with the race condition.
+ //
+ // I have no practical way to test this, and generally we should have
+ // all include files processed synchronously and all build files
+ // processed asynchronously, so it doesn't happen in practice.
+ *err = Err(
+ origin, "Load type mismatch.",
+ "The file \"" + file_name.value() + "\" was previously loaded\n"
+ "asynchronously (via a deps rule) and now you're trying to load it "
+ "synchronously.\nThis is a class 2 misdemeanor: a single input file "
+ "must be loaded the same way\neach time to avoid blowing my tiny, "
+ "tiny mind.");
+ return nullptr;
+ }
+
+ if (!data->loaded) {
+ // Wait for the already-pending sync load to complete.
+ if (!data->completion_event)
+ data->completion_event.reset(new base::WaitableEvent(false, false));
+ {
+ base::AutoUnlock unlock(lock_);
+ data->completion_event->Wait();
+ }
+ // If there were multiple waiters on the same event, we now need to wake
+ // up the next one.
+ data->completion_event->Signal();
+ }
+ }
+
+ // The other load could have failed. It is possible that this thread's error
+ // will be reported to the scheduler before the other thread's (and the first
+ // error reported "wins"). Forward the parse error from the other load for
+ // this thread so that the error message is useful.
+ if (!data->parsed_root)
+ *err = data->parse_error;
+ return data->parsed_root.get();
+}
+
+void InputFileManager::AddDynamicInput(
+ const SourceFile& name,
+ InputFile** file,
+ std::vector<Token>** tokens,
+ std::unique_ptr<ParseNode>** parse_root) {
+ InputFileData* data = new InputFileData(name);
+ {
+ base::AutoLock lock(lock_);
+ dynamic_inputs_.push_back(data);
+ }
+ *file = &data->file;
+ *tokens = &data->tokens;
+ *parse_root = &data->parsed_root;
+}
+
+int InputFileManager::GetInputFileCount() const {
+ base::AutoLock lock(lock_);
+ return static_cast<int>(input_files_.size());
+}
+
+void InputFileManager::GetAllPhysicalInputFileNames(
+ std::vector<base::FilePath>* result) const {
+ base::AutoLock lock(lock_);
+ result->reserve(input_files_.size());
+ for (const auto& file : input_files_) {
+ if (!file.second->file.physical_name().empty())
+ result->push_back(file.second->file.physical_name());
+ }
+}
+
+void InputFileManager::BackgroundLoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& name,
+ InputFile* file) {
+ Err err;
+ if (!LoadFile(origin, build_settings, name, file, &err))
+ g_scheduler->FailWithError(err);
+}
+
+bool InputFileManager::LoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& name,
+ InputFile* file,
+ Err* err) {
+ std::vector<Token> tokens;
+ std::unique_ptr<ParseNode> root;
+ bool success = DoLoadFile(origin, build_settings, name, file,
+ &tokens, &root, err);
+ // Can't return early. We have to ensure that the completion event is
+ // signaled in all cases bacause another thread could be blocked on this one.
+
+ // Save this pointer for running the callbacks below, which happens after the
+ // scoped ptr ownership is taken away inside the lock.
+ ParseNode* unowned_root = root.get();
+
+ std::vector<FileLoadCallback> callbacks;
+ {
+ base::AutoLock lock(lock_);
+ DCHECK(input_files_.find(name) != input_files_.end());
+
+ InputFileData* data = input_files_[name];
+ data->loaded = true;
+ if (success) {
+ data->tokens.swap(tokens);
+ data->parsed_root = std::move(root);
+ } else {
+ data->parse_error = *err;
+ }
+
+ // Unblock waiters on this event.
+ //
+ // It's somewhat bad to signal this inside the lock. When it's used, it's
+ // lazily created inside the lock. So we need to do the check and signal
+ // inside the lock to avoid race conditions on the lazy creation of the
+ // lock.
+ //
+ // We could avoid this by creating the lock every time, but the lock is
+ // very seldom used and will generally be NULL, so my current theory is that
+ // several signals of a completion event inside a lock is better than
+ // creating about 1000 extra locks (one for each file).
+ if (data->completion_event)
+ data->completion_event->Signal();
+
+ callbacks.swap(data->scheduled_callbacks);
+ }
+
+ // Run pending invocations. Theoretically we could schedule each of these
+ // separately to get some parallelism. But normally there will only be one
+ // item in the list, so that's extra overhead and complexity for no gain.
+ if (success) {
+ for (const auto& cb : callbacks)
+ cb.Run(unowned_root);
+ }
+ return success;
+}
diff --git a/chromium/tools/gn/input_file_manager.h b/chromium/tools/gn/input_file_manager.h
new file mode 100644
index 00000000000..ba055e219c5
--- /dev/null
+++ b/chromium/tools/gn/input_file_manager.h
@@ -0,0 +1,156 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_INPUT_FILE_MANAGER_H_
+#define TOOLS_GN_INPUT_FILE_MANAGER_H_
+
+#include <set>
+#include <utility>
+#include <vector>
+
+#include "base/callback.h"
+#include "base/containers/hash_tables.h"
+#include "base/files/file_path.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/synchronization/lock.h"
+#include "base/synchronization/waitable_event.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/settings.h"
+
+class Err;
+class LocationRange;
+class ParseNode;
+class Token;
+
+// Manages loading and parsing files from disk. This doesn't actually have
+// any context for executing the results, so potentially multiple configs
+// could use the same input file (saving parsing).
+//
+// This class is threadsafe.
+//
+// InputFile objects must never be deleted while the program is running since
+// various state points into them.
+class InputFileManager : public base::RefCountedThreadSafe<InputFileManager> {
+ public:
+ // Callback issued when a file is laoded. On auccess, the parse node will
+ // refer to the root block of the file. On failure, this will be NULL.
+ typedef base::Callback<void(const ParseNode*)> FileLoadCallback;
+
+ InputFileManager();
+
+ // Loads the given file and executes the callback on the worker pool.
+ //
+ // There are two types of errors. For errors known synchronously, the error
+ // will be set, it will return false, and no work will be scheduled.
+ //
+ // For parse errors and such that happen in the future, the error will be
+ // logged to the scheduler and the callback will be invoked with a null
+ // ParseNode pointer. The given |origin| will be blamed for the invocation.
+ bool AsyncLoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& file_name,
+ const FileLoadCallback& callback,
+ Err* err);
+
+ // Loads and parses the given file synchronously, returning the root block
+ // corresponding to the parsed result. On error, return NULL and the given
+ // Err is set.
+ const ParseNode* SyncLoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& file_name,
+ Err* err);
+
+ // Creates an entry to manage the memory associated with keeping a parsed
+ // set of code in memory.
+ //
+ // The values pointed to by the parameters will be filled with pointers to
+ // the file, tokens, and parse node that this class created. The calling
+ // code is responsible for populating these values and maintaining
+ // threadsafety. This class' only job is to hold onto the memory and delete
+ // it when the program exits.
+ //
+ // This solves the problem that sometimes we need to execute something
+ // dynamic and save the result, but the values all have references to the
+ // nodes and file that created it. Either we need to reset the origin of
+ // the values and lose context for error reporting, or somehow keep the
+ // associated parse nodes, tokens, and file data in memory. This function
+ // allows the latter.
+ void AddDynamicInput(const SourceFile& name,
+ InputFile** file,
+ std::vector<Token>** tokens,
+ std::unique_ptr<ParseNode>** parse_root);
+
+ // Does not count dynamic input.
+ int GetInputFileCount() const;
+
+ // Fills the vector with all input files.
+ void GetAllPhysicalInputFileNames(std::vector<base::FilePath>* result) const;
+
+ private:
+ friend class base::RefCountedThreadSafe<InputFileManager>;
+
+ struct InputFileData {
+ explicit InputFileData(const SourceFile& file_name);
+ ~InputFileData();
+
+ // Don't touch this outside the lock until it's marked loaded.
+ InputFile file;
+
+ bool loaded;
+
+ bool sync_invocation;
+
+ // Lists all invocations that need to be executed when the file completes
+ // loading.
+ std::vector<FileLoadCallback> scheduled_callbacks;
+
+ // Event to signal when the load is complete (or fails). This is lazily
+ // created only when a thread is synchronously waiting for this load (which
+ // only happens for imports).
+ std::unique_ptr<base::WaitableEvent> completion_event;
+
+ std::vector<Token> tokens;
+
+ // Null before the file is loaded or if loading failed.
+ std::unique_ptr<ParseNode> parsed_root;
+ Err parse_error;
+ };
+
+ virtual ~InputFileManager();
+
+ void BackgroundLoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& name,
+ InputFile* file);
+
+ // Loads the given file. On error, sets the Err and return false.
+ bool LoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& name,
+ InputFile* file,
+ Err* err);
+
+ mutable base::Lock lock_;
+
+ // Maps repo-relative filenames to the corresponding owned pointer.
+ typedef base::hash_map<SourceFile, InputFileData*> InputFileMap;
+ InputFileMap input_files_;
+
+ // Tracks all dynamic inputs. The data are holders for memory management
+ // purposes and should not be read or modified by this class. The values
+ // will be vended out to the code creating the dynamic input, who is in
+ // charge of the threadsafety requirements.
+ //
+ // See AddDynamicInput().
+ //
+ // Owning pointers.
+ std::vector<InputFileData*> dynamic_inputs_;
+
+ DISALLOW_COPY_AND_ASSIGN(InputFileManager);
+};
+
+#endif // TOOLS_GN_INPUT_FILE_MANAGER_H_
diff --git a/chromium/tools/gn/item.cc b/chromium/tools/gn/item.cc
new file mode 100644
index 00000000000..b0efd64ac01
--- /dev/null
+++ b/chromium/tools/gn/item.cc
@@ -0,0 +1,49 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/item.h"
+
+#include "base/logging.h"
+#include "tools/gn/settings.h"
+
+Item::Item(const Settings* settings, const Label& label)
+ : settings_(settings), label_(label), defined_from_(nullptr) {
+}
+
+Item::~Item() {
+}
+
+Config* Item::AsConfig() {
+ return nullptr;
+}
+const Config* Item::AsConfig() const {
+ return nullptr;
+}
+Target* Item::AsTarget() {
+ return nullptr;
+}
+const Target* Item::AsTarget() const {
+ return nullptr;
+}
+Toolchain* Item::AsToolchain() {
+ return nullptr;
+}
+const Toolchain* Item::AsToolchain() const {
+ return nullptr;
+}
+
+std::string Item::GetItemTypeName() const {
+ if (AsConfig())
+ return "config";
+ if (AsTarget())
+ return "target";
+ if (AsToolchain())
+ return "toolchain";
+ NOTREACHED();
+ return "this thing that I have no idea what it is";
+}
+
+bool Item::OnResolved(Err* err) {
+ return true;
+}
diff --git a/chromium/tools/gn/item.h b/chromium/tools/gn/item.h
new file mode 100644
index 00000000000..069c3edc558
--- /dev/null
+++ b/chromium/tools/gn/item.h
@@ -0,0 +1,63 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_ITEM_H_
+#define TOOLS_GN_ITEM_H_
+
+#include <string>
+
+#include "tools/gn/label.h"
+#include "tools/gn/visibility.h"
+
+class Config;
+class ParseNode;
+class Settings;
+class Target;
+class Toolchain;
+
+// A named item (target, config, etc.) that participates in the dependency
+// graph.
+class Item {
+ public:
+ Item(const Settings* settings, const Label& label);
+ virtual ~Item();
+
+ const Settings* settings() const { return settings_; }
+
+ // This is guaranteed to never change after construction so this can be
+ // accessed from any thread with no locking once the item is constructed.
+ const Label& label() const { return label_; }
+
+ const ParseNode* defined_from() const { return defined_from_; }
+ void set_defined_from(const ParseNode* df) { defined_from_ = df; }
+
+ Visibility& visibility() { return visibility_; }
+ const Visibility& visibility() const { return visibility_; }
+
+ // Manual RTTI.
+ virtual Config* AsConfig();
+ virtual const Config* AsConfig() const;
+ virtual Target* AsTarget();
+ virtual const Target* AsTarget() const;
+ virtual Toolchain* AsToolchain();
+ virtual const Toolchain* AsToolchain() const;
+
+ // Returns a name like "target" or "config" for the type of item this is, to
+ // be used in logging and error messages.
+ std::string GetItemTypeName() const;
+
+ // Called when this item is resolved, meaning it and all of its dependents
+ // have no unresolved deps. Returns true on success. Sets the error and
+ // returns false on failure.
+ virtual bool OnResolved(Err* err);
+
+ private:
+ const Settings* settings_;
+ Label label_;
+ const ParseNode* defined_from_;
+
+ Visibility visibility_;
+};
+
+#endif // TOOLS_GN_ITEM_H_
diff --git a/chromium/tools/gn/label.cc b/chromium/tools/gn/label.cc
new file mode 100644
index 00000000000..4545525c2ee
--- /dev/null
+++ b/chromium/tools/gn/label.cc
@@ -0,0 +1,279 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/label.h"
+
+#include "base/logging.h"
+#include "base/strings/string_util.h"
+#include "build/build_config.h"
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/value.h"
+
+namespace {
+
+// We print user visible label names with no trailing slash after the
+// directory name.
+std::string DirWithNoTrailingSlash(const SourceDir& dir) {
+ // Be careful not to trim if the input is just "/" or "//".
+ if (dir.value().size() > 2)
+ return dir.value().substr(0, dir.value().size() - 1);
+ return dir.value();
+}
+
+// Given the separate-out input (everything before the colon) in the dep rule,
+// computes the final build rule. Sets err on failure. On success,
+// |*used_implicit| will be set to whether the implicit current directory was
+// used. The value is used only for generating error messages.
+bool ComputeBuildLocationFromDep(const Value& input_value,
+ const SourceDir& current_dir,
+ const base::StringPiece& input,
+ SourceDir* result,
+ Err* err) {
+ // No rule, use the current location.
+ if (input.empty()) {
+ *result = current_dir;
+ return true;
+ }
+
+ *result = current_dir.ResolveRelativeDir(input_value, input, err);
+ return true;
+}
+
+// Given the separated-out target name (after the colon) computes the final
+// name, using the implicit name from the previously-generated
+// computed_location if necessary. The input_value is used only for generating
+// error messages.
+bool ComputeTargetNameFromDep(const Value& input_value,
+ const SourceDir& computed_location,
+ const base::StringPiece& input,
+ std::string* result,
+ Err* err) {
+ if (!input.empty()) {
+ // Easy case: input is specified, just use it.
+ result->assign(input.data(), input.size());
+ return true;
+ }
+
+ const std::string& loc = computed_location.value();
+
+ // Use implicit name. The path will be "//", "//base/", "//base/i18n/", etc.
+ if (loc.size() <= 2) {
+ *err = Err(input_value, "This dependency name is empty");
+ return false;
+ }
+
+ size_t next_to_last_slash = loc.rfind('/', loc.size() - 2);
+ DCHECK(next_to_last_slash != std::string::npos);
+ result->assign(&loc[next_to_last_slash + 1],
+ loc.size() - next_to_last_slash - 2);
+ return true;
+}
+
+// The original value is used only for error reporting, use the |input| as the
+// input to this function (which may be a substring of the original value when
+// we're parsing toolchains.
+//
+// If the output toolchain vars are NULL, then we'll report an error if we
+// find a toolchain specified (this is used when recursively parsing toolchain
+// labels which themselves can't have toolchain specs).
+//
+// We assume that the output variables are initialized to empty so we don't
+// write them unless we need them to contain something.
+//
+// Returns true on success. On failure, the out* variables might be written to
+// but shouldn't be used.
+bool Resolve(const SourceDir& current_dir,
+ const Label& current_toolchain,
+ const Value& original_value,
+ const base::StringPiece& input,
+ SourceDir* out_dir,
+ std::string* out_name,
+ SourceDir* out_toolchain_dir,
+ std::string* out_toolchain_name,
+ Err* err) {
+ // To workaround the problem that StringPiece operator[] doesn't return a ref.
+ const char* input_str = input.data();
+ size_t offset = 0;
+#if defined(OS_WIN)
+ if (IsPathAbsolute(input)) {
+ size_t drive_letter_pos = input[0] == '/' ? 1 : 0;
+ if (input.size() > drive_letter_pos + 2 &&
+ input[drive_letter_pos + 1] == ':' &&
+ IsSlash(input[drive_letter_pos + 2]) &&
+ base::IsAsciiAlpha(input[drive_letter_pos])) {
+ // Skip over the drive letter colon.
+ offset = drive_letter_pos + 2;
+ }
+ }
+#endif
+ size_t path_separator = input.find_first_of(":(", offset);
+ base::StringPiece location_piece;
+ base::StringPiece name_piece;
+ base::StringPiece toolchain_piece;
+ if (path_separator == std::string::npos) {
+ location_piece = input;
+ // Leave name & toolchain piece null.
+ } else {
+ location_piece = base::StringPiece(&input_str[0], path_separator);
+
+ size_t toolchain_separator = input.find('(', path_separator);
+ if (toolchain_separator == std::string::npos) {
+ name_piece = base::StringPiece(&input_str[path_separator + 1],
+ input.size() - path_separator - 1);
+ // Leave location piece null.
+ } else if (!out_toolchain_dir) {
+ // Toolchain specified but not allows in this context.
+ *err = Err(original_value, "Toolchain has a toolchain.",
+ "Your toolchain definition (inside the parens) seems to itself "
+ "have a\ntoolchain. Don't do this.");
+ return false;
+ } else {
+ // Name piece is everything between the two separators. Note that the
+ // separators may be the same (e.g. "//foo(bar)" which means empty name.
+ if (toolchain_separator > path_separator) {
+ name_piece = base::StringPiece(
+ &input_str[path_separator + 1],
+ toolchain_separator - path_separator - 1);
+ }
+
+ // Toolchain name should end in a ) and this should be the end of the
+ // string.
+ if (input[input.size() - 1] != ')') {
+ *err = Err(original_value, "Bad toolchain name.",
+ "Toolchain name must end in a \")\" at the end of the label.");
+ return false;
+ }
+
+ // Subtract off the two parens to just get the toolchain name.
+ toolchain_piece = base::StringPiece(
+ &input_str[toolchain_separator + 1],
+ input.size() - toolchain_separator - 2);
+ }
+ }
+
+ // Everything before the separator is the filename.
+ // We allow three cases:
+ // Absolute: "//foo:bar" -> /foo:bar
+ // Target in current file: ":foo" -> <currentdir>:foo
+ // Path with implicit name: "/foo" -> /foo:foo
+ if (location_piece.empty() && name_piece.empty()) {
+ // Can't use both implicit filename and name (":").
+ *err = Err(original_value, "This doesn't specify a dependency.");
+ return false;
+ }
+
+ if (!ComputeBuildLocationFromDep(original_value, current_dir, location_piece,
+ out_dir, err))
+ return false;
+
+ if (!ComputeTargetNameFromDep(original_value, *out_dir, name_piece,
+ out_name, err))
+ return false;
+
+ // Last, do the toolchains.
+ if (out_toolchain_dir) {
+ // Handle empty toolchain strings. We don't allow normal labels to be
+ // empty so we can't allow the recursive call of this function to do this
+ // check.
+ if (toolchain_piece.empty()) {
+ *out_toolchain_dir = current_toolchain.dir();
+ *out_toolchain_name = current_toolchain.name();
+ return true;
+ } else {
+ return Resolve(current_dir, current_toolchain, original_value,
+ toolchain_piece, out_toolchain_dir, out_toolchain_name,
+ nullptr, nullptr, err);
+ }
+ }
+ return true;
+}
+
+} // namespace
+
+Label::Label() {
+}
+
+Label::Label(const SourceDir& dir,
+ const base::StringPiece& name,
+ const SourceDir& toolchain_dir,
+ const base::StringPiece& toolchain_name)
+ : dir_(dir),
+ toolchain_dir_(toolchain_dir) {
+ name_.assign(name.data(), name.size());
+ toolchain_name_.assign(toolchain_name.data(), toolchain_name.size());
+}
+
+Label::Label(const SourceDir& dir, const base::StringPiece& name)
+ : dir_(dir) {
+ name_.assign(name.data(), name.size());
+}
+
+Label::Label(const Label& other) = default;
+
+Label::~Label() {
+}
+
+// static
+Label Label::Resolve(const SourceDir& current_dir,
+ const Label& current_toolchain,
+ const Value& input,
+ Err* err) {
+ Label ret;
+ if (input.type() != Value::STRING) {
+ *err = Err(input, "Dependency is not a string.");
+ return ret;
+ }
+ const std::string& input_string = input.string_value();
+ if (input_string.empty()) {
+ *err = Err(input, "Dependency string is empty.");
+ return ret;
+ }
+
+ if (!::Resolve(current_dir, current_toolchain, input, input_string,
+ &ret.dir_, &ret.name_,
+ &ret.toolchain_dir_, &ret.toolchain_name_,
+ err))
+ return Label();
+ return ret;
+}
+
+Label Label::GetToolchainLabel() const {
+ return Label(toolchain_dir_, toolchain_name_);
+}
+
+Label Label::GetWithNoToolchain() const {
+ return Label(dir_, name_);
+}
+
+std::string Label::GetUserVisibleName(bool include_toolchain) const {
+ std::string ret;
+ ret.reserve(dir_.value().size() + name_.size() + 1);
+
+ if (dir_.is_null())
+ return ret;
+
+ ret = DirWithNoTrailingSlash(dir_);
+ ret.push_back(':');
+ ret.append(name_);
+
+ if (include_toolchain) {
+ ret.push_back('(');
+ if (!toolchain_dir_.is_null() && !toolchain_name_.empty()) {
+ ret.append(DirWithNoTrailingSlash(toolchain_dir_));
+ ret.push_back(':');
+ ret.append(toolchain_name_);
+ }
+ ret.push_back(')');
+ }
+ return ret;
+}
+
+std::string Label::GetUserVisibleName(const Label& default_toolchain) const {
+ bool include_toolchain =
+ default_toolchain.dir() != toolchain_dir_ ||
+ default_toolchain.name() != toolchain_name_;
+ return GetUserVisibleName(include_toolchain);
+}
diff --git a/chromium/tools/gn/label.h b/chromium/tools/gn/label.h
new file mode 100644
index 00000000000..cbeb1774d44
--- /dev/null
+++ b/chromium/tools/gn/label.h
@@ -0,0 +1,126 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_LABEL_H_
+#define TOOLS_GN_LABEL_H_
+
+#include <stddef.h>
+
+#include "base/containers/hash_tables.h"
+#include "tools/gn/source_dir.h"
+
+class Err;
+class Value;
+
+// A label represents the name of a target or some other named thing in
+// the source path. The label is always absolute and always includes a name
+// part, so it starts with a slash, and has one colon.
+class Label {
+ public:
+ Label();
+
+ // Makes a label given an already-separated out path and name.
+ // See also Resolve().
+ Label(const SourceDir& dir,
+ const base::StringPiece& name,
+ const SourceDir& toolchain_dir,
+ const base::StringPiece& toolchain_name);
+
+ // Makes a label with an empty toolchain.
+ Label(const SourceDir& dir, const base::StringPiece& name);
+ Label(const Label& other);
+ ~Label();
+
+ // Resolives a string from a build file that may be relative to the
+ // current directory into a fully qualified label. On failure returns an
+ // is_null() label and sets the error.
+ static Label Resolve(const SourceDir& current_dir,
+ const Label& current_toolchain,
+ const Value& input,
+ Err* err);
+
+ bool is_null() const { return dir_.is_null(); }
+
+ const SourceDir& dir() const { return dir_; }
+ const std::string& name() const { return name_; }
+
+ const SourceDir& toolchain_dir() const { return toolchain_dir_; }
+ const std::string& toolchain_name() const { return toolchain_name_; }
+
+ // Returns the current label's toolchain as its own Label.
+ Label GetToolchainLabel() const;
+
+ // Returns a copy of this label but with an empty toolchain.
+ Label GetWithNoToolchain() const;
+
+ // Formats this label in a way that we can present to the user or expose to
+ // other parts of the system. SourceDirs end in slashes, but the user
+ // expects names like "//chrome/renderer:renderer_config" when printed. The
+ // toolchain is optionally included.
+ std::string GetUserVisibleName(bool include_toolchain) const;
+
+ // Like the above version, but automatically includes the toolchain if it's
+ // not the default one. Normally the user only cares about the toolchain for
+ // non-default ones, so this can make certain output more clear.
+ std::string GetUserVisibleName(const Label& default_toolchain) const;
+
+ bool operator==(const Label& other) const {
+ return name_ == other.name_ && dir_ == other.dir_ &&
+ toolchain_dir_ == other.toolchain_dir_ &&
+ toolchain_name_ == other.toolchain_name_;
+ }
+ bool operator!=(const Label& other) const {
+ return !operator==(other);
+ }
+ bool operator<(const Label& other) const {
+ if (int c = dir_.value().compare(other.dir_.value()))
+ return c < 0;
+ if (int c = name_.compare(other.name_))
+ return c < 0;
+ if (int c = toolchain_dir_.value().compare(other.toolchain_dir_.value()))
+ return c < 0;
+ return toolchain_name_ < other.toolchain_name_;
+ }
+
+ void swap(Label& other) {
+ dir_.swap(other.dir_);
+ name_.swap(other.name_);
+ toolchain_dir_.swap(other.toolchain_dir_);
+ toolchain_name_.swap(other.toolchain_name_);
+ }
+
+ // Returns true if the toolchain dir/name of this object matches some
+ // other object.
+ bool ToolchainsEqual(const Label& other) const {
+ return toolchain_dir_ == other.toolchain_dir_ &&
+ toolchain_name_ == other.toolchain_name_;
+ }
+
+ private:
+ SourceDir dir_;
+ std::string name_;
+
+ SourceDir toolchain_dir_;
+ std::string toolchain_name_;
+};
+
+namespace BASE_HASH_NAMESPACE {
+
+template<> struct hash<Label> {
+ std::size_t operator()(const Label& v) const {
+ hash<std::string> stringhash;
+ return ((stringhash(v.dir().value()) * 131 +
+ stringhash(v.name())) * 131 +
+ stringhash(v.toolchain_dir().value())) * 131 +
+ stringhash(v.toolchain_name());
+ }
+};
+
+} // namespace BASE_HASH_NAMESPACE
+
+inline void swap(Label& lhs, Label& rhs) {
+ lhs.swap(rhs);
+}
+
+#endif // TOOLS_GN_LABEL_H_
diff --git a/chromium/tools/gn/label_pattern.cc b/chromium/tools/gn/label_pattern.cc
new file mode 100644
index 00000000000..a330b3b6664
--- /dev/null
+++ b/chromium/tools/gn/label_pattern.cc
@@ -0,0 +1,267 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/label_pattern.h"
+
+#include <stddef.h>
+
+#include "base/strings/string_util.h"
+#include "build/build_config.h"
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/value.h"
+
+const char kLabelPattern_Help[] =
+ "Label patterns\n"
+ "\n"
+ " A label pattern is a way of expressing one or more labels in a portion\n"
+ " of the source tree. They are not general regular expressions.\n"
+ "\n"
+ " They can take the following forms only:\n"
+ "\n"
+ " - Explicit (no wildcard):\n"
+ " \"//foo/bar:baz\"\n"
+ " \":baz\"\n"
+ "\n"
+ " - Wildcard target names:\n"
+ " \"//foo/bar:*\" (all targets in the //foo/bar/BUILD.gn file)\n"
+ " \":*\" (all targets in the current build file)\n"
+ "\n"
+ " - Wildcard directory names (\"*\" is only supported at the end)\n"
+ " \"*\" (all targets)\n"
+ " \"//foo/bar/*\" (all targets in any subdir of //foo/bar)\n"
+ " \"./*\" (all targets in the current build file or sub dirs)\n"
+ "\n"
+ " Any of the above forms can additionally take an explicit toolchain.\n"
+ " In this case, the toolchain must be fully qualified (no wildcards\n"
+ " are supported in the toolchain name).\n"
+ "\n"
+ " \"//foo:bar(//build/toochain:mac)\"\n"
+ " An explicit target in an explicit toolchain.\n"
+ "\n"
+ " \":*(//build/toolchain/linux:32bit)\"\n"
+ " All targets in the current build file using the 32-bit Linux\n"
+ " toolchain.\n"
+ "\n"
+ " \"//foo/*(//build/toolchain:win)\"\n"
+ " All targets in //foo and any subdirectory using the Windows\n"
+ " toolchain.\n";
+
+LabelPattern::LabelPattern() : type_(MATCH) {
+}
+
+LabelPattern::LabelPattern(Type type,
+ const SourceDir& dir,
+ const base::StringPiece& name,
+ const Label& toolchain_label)
+ : toolchain_(toolchain_label),
+ type_(type),
+ dir_(dir) {
+ name.CopyToString(&name_);
+}
+
+LabelPattern::LabelPattern(const LabelPattern& other) = default;
+
+LabelPattern::~LabelPattern() {
+}
+
+// static
+LabelPattern LabelPattern::GetPattern(const SourceDir& current_dir,
+ const Value& value,
+ Err* err) {
+ if (!value.VerifyTypeIs(Value::STRING, err))
+ return LabelPattern();
+
+ base::StringPiece str(value.string_value());
+ if (str.empty()) {
+ *err = Err(value, "Label pattern must not be empty.");
+ return LabelPattern();
+ }
+
+ // If there's no wildcard, this is specifying an exact label, use the
+ // label resolution code to get all the implicit name stuff.
+ size_t star = str.find('*');
+ if (star == std::string::npos) {
+ Label label = Label::Resolve(current_dir, Label(), value, err);
+ if (err->has_error())
+ return LabelPattern();
+
+ // Toolchain.
+ Label toolchain_label;
+ if (!label.toolchain_dir().is_null() || !label.toolchain_name().empty())
+ toolchain_label = label.GetToolchainLabel();
+
+ return LabelPattern(MATCH, label.dir(), label.name(), toolchain_label);
+ }
+
+ // Wildcard case, need to split apart the label to see what it specifies.
+ Label toolchain_label;
+ size_t open_paren = str.find('(');
+ if (open_paren != std::string::npos) {
+ // Has a toolchain definition, extract inside the parens.
+ size_t close_paren = str.find(')', open_paren);
+ if (close_paren == std::string::npos) {
+ *err = Err(value, "No close paren when looking for toolchain name.");
+ return LabelPattern();
+ }
+
+ std::string toolchain_string =
+ str.substr(open_paren + 1, close_paren - open_paren - 1).as_string();
+ if (toolchain_string.find('*') != std::string::npos) {
+ *err = Err(value, "Can't have a wildcard in the toolchain.");
+ return LabelPattern();
+ }
+
+ // Parse the inside of the parens as a label for a toolchain.
+ Value value_for_toolchain(value.origin(), toolchain_string);
+ toolchain_label =
+ Label::Resolve(current_dir, Label(), value_for_toolchain, err);
+ if (err->has_error())
+ return LabelPattern();
+
+ // Trim off the toolchain for the processing below.
+ str = str.substr(0, open_paren);
+ }
+
+ // Extract path and name.
+ base::StringPiece path;
+ base::StringPiece name;
+ size_t offset = 0;
+#if defined(OS_WIN)
+ if (IsPathAbsolute(str)) {
+ size_t drive_letter_pos = str[0] == '/' ? 1 : 0;
+ if (str.size() > drive_letter_pos + 2 && str[drive_letter_pos + 1] == ':' &&
+ IsSlash(str[drive_letter_pos + 2]) &&
+ base::IsAsciiAlpha(str[drive_letter_pos])) {
+ // Skip over the drive letter colon.
+ offset = drive_letter_pos + 2;
+ }
+ }
+#endif
+ size_t colon = str.find(':', offset);
+ if (colon == std::string::npos) {
+ path = base::StringPiece(str);
+ } else {
+ path = str.substr(0, colon);
+ name = str.substr(colon + 1);
+ }
+
+ // The path can have these forms:
+ // 1. <empty> (use current dir)
+ // 2. <non wildcard stuff> (send through directory resolution)
+ // 3. <non wildcard stuff>* (send stuff through dir resolution, note star)
+ // 4. * (matches anything)
+ SourceDir dir;
+ bool has_path_star = false;
+ if (path.empty()) {
+ // Looks like ":foo".
+ dir = current_dir;
+ } else if (path[path.size() - 1] == '*') {
+ // Case 3 or 4 above.
+ has_path_star = true;
+
+ // Adjust path to contain everything but the star.
+ path = path.substr(0, path.size() - 1);
+
+ if (!path.empty() && path[path.size() - 1] != '/') {
+ // The input was "foo*" which is invalid.
+ *err = Err(value, "'*' must match full directories in a label pattern.",
+ "You did \"foo*\" but this thing doesn't do general pattern\n"
+ "matching. Instead, you have to add a slash: \"foo/*\" to match\n"
+ "all targets in a directory hierarchy.");
+ return LabelPattern();
+ }
+ }
+
+ // Resolve the part of the path that's not the wildcard.
+ if (!path.empty()) {
+ // The non-wildcard stuff better not have a wildcard.
+ if (path.find('*') != base::StringPiece::npos) {
+ *err = Err(value, "Label patterns only support wildcard suffixes.",
+ "The pattern contained a '*' that wasn't at the end.");
+ return LabelPattern();
+ }
+
+ // Resolve the non-wildcard stuff.
+ dir = current_dir.ResolveRelativeDir(value, path, err);
+ if (err->has_error())
+ return LabelPattern();
+ }
+
+ // Resolve the name. At this point, we're doing wildcard matches so the
+ // name should either be empty ("foo/*") or a wildcard ("foo:*");
+ if (colon != std::string::npos && name != "*") {
+ *err = Err(value, "Invalid label pattern.",
+ "You seem to be using the wildcard more generally that is supported.\n"
+ "Did you mean \"foo:*\" to match everything in the file, or\n"
+ "\"./*\" to recursively match everything in the currend subtree.");
+ return LabelPattern();
+ }
+
+ Type type;
+ if (has_path_star) {
+ // We know there's a wildcard, so if the name is empty it looks like
+ // "foo/*".
+ type = RECURSIVE_DIRECTORY;
+ } else {
+ // Everything else should be of the form "foo:*".
+ type = DIRECTORY;
+ }
+
+ // When we're doing wildcard matching, the name is always empty.
+ return LabelPattern(type, dir, base::StringPiece(), toolchain_label);
+}
+
+bool LabelPattern::HasWildcard(const std::string& str) {
+ // Just look for a star. In the future, we may want to handle escaping or
+ // other types of patterns.
+ return str.find('*') != std::string::npos;
+}
+
+bool LabelPattern::Matches(const Label& label) const {
+ if (!toolchain_.is_null()) {
+ // Toolchain must match exactly.
+ if (toolchain_.dir() != label.toolchain_dir() ||
+ toolchain_.name() != label.toolchain_name())
+ return false;
+ }
+
+ switch (type_) {
+ case MATCH:
+ return label.name() == name_ && label.dir() == dir_;
+ case DIRECTORY:
+ // The directories must match exactly.
+ return label.dir() == dir_;
+ case RECURSIVE_DIRECTORY:
+ // Our directory must be a prefix of the input label for recursive.
+ return label.dir().value().compare(0, dir_.value().size(), dir_.value())
+ == 0;
+ default:
+ NOTREACHED();
+ return false;
+ }
+}
+
+std::string LabelPattern::Describe() const {
+ std::string result;
+
+ switch (type()) {
+ case MATCH:
+ result = DirectoryWithNoLastSlash(dir()) + ":" + name();
+ break;
+ case DIRECTORY:
+ result = DirectoryWithNoLastSlash(dir()) + ":*";
+ break;
+ case RECURSIVE_DIRECTORY:
+ result = dir().value() + "*";
+ break;
+ }
+
+ if (!toolchain_.is_null()) {
+ result.push_back('(');
+ result.append(toolchain_.GetUserVisibleName(false));
+ result.push_back(')');
+ }
+ return result;
+}
diff --git a/chromium/tools/gn/label_pattern.h b/chromium/tools/gn/label_pattern.h
new file mode 100644
index 00000000000..7d0768c834b
--- /dev/null
+++ b/chromium/tools/gn/label_pattern.h
@@ -0,0 +1,76 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_LABEL_PATTERN_H_
+#define TOOLS_GN_LABEL_PATTERN_H_
+
+#include "base/strings/string_piece.h"
+#include "tools/gn/label.h"
+#include "tools/gn/source_dir.h"
+
+class Err;
+class Value;
+
+extern const char kLabelPattern_Help[];
+
+// A label pattern is a simple pattern that matches labels. It is used for
+// specifying visibility and other times when multiple targets need to be
+// referenced.
+class LabelPattern {
+ public:
+ enum Type {
+ MATCH = 1, // Exact match for a given target.
+ DIRECTORY, // Only targets in the file in the given directory.
+ RECURSIVE_DIRECTORY // The given directory and any subdir.
+ // (also indicates "public" when dir is empty).
+ };
+
+ LabelPattern();
+ LabelPattern(Type type,
+ const SourceDir& dir,
+ const base::StringPiece& name,
+ const Label& toolchain_label);
+ LabelPattern(const LabelPattern& other);
+ ~LabelPattern();
+
+ // Converts the given input string to a pattern. This does special stuff
+ // to treat the pattern as a label. Sets the error on failure.
+ static LabelPattern GetPattern(const SourceDir& current_dir,
+ const Value& value,
+ Err* err);
+
+ // Returns true if the given input string might match more than one thing.
+ static bool HasWildcard(const std::string& str);
+
+ // Returns true if this pattern matches the given label.
+ bool Matches(const Label& label) const;
+
+ // Returns a string representation of this pattern.
+ std::string Describe() const;
+
+ Type type() const { return type_; }
+
+ const SourceDir& dir() const { return dir_; }
+ const std::string& name() const { return name_; }
+
+ const Label& toolchain() const { return toolchain_; }
+ void set_toolchain(const Label& tc) { toolchain_ = tc; }
+
+ private:
+ // If nonempty, specifies the toolchain to use. If empty, this will match
+ // all toolchains. This is independent of the match type.
+ Label toolchain_;
+
+ Type type_;
+
+ // Used when type_ == PRIVATE and PRIVATE_RECURSIVE. This specifies the
+ // directory that to which the pattern is private to.
+ SourceDir dir_;
+
+ // Empty name means match everything. Otherwise the name must match
+ // exactly.
+ std::string name_;
+};
+
+#endif // TOOLS_GN_LABEL_PATTERN_H_
diff --git a/chromium/tools/gn/label_pattern_unittest.cc b/chromium/tools/gn/label_pattern_unittest.cc
new file mode 100644
index 00000000000..2154af1b88a
--- /dev/null
+++ b/chromium/tools/gn/label_pattern_unittest.cc
@@ -0,0 +1,86 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include "base/macros.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/err.h"
+#include "tools/gn/label_pattern.h"
+#include "tools/gn/value.h"
+
+namespace {
+
+struct PatternCase {
+ const char* input;
+ bool success;
+
+ LabelPattern::Type type;
+ const char* dir;
+ const char* name;
+ const char* toolchain;
+};
+
+} // namespace
+
+TEST(LabelPattern, PatternParse) {
+ SourceDir current_dir("//foo/");
+ PatternCase cases[] = {
+ // Missing stuff.
+ {"", false, LabelPattern::MATCH, "", "", ""},
+ {":", false, LabelPattern::MATCH, "", "", ""},
+ // Normal things.
+ {":bar", true, LabelPattern::MATCH, "//foo/", "bar", ""},
+ {"//la:bar", true, LabelPattern::MATCH, "//la/", "bar", ""},
+ {"*", true, LabelPattern::RECURSIVE_DIRECTORY, "", "", ""},
+ {":*", true, LabelPattern::DIRECTORY, "//foo/", "", ""},
+ {"la:*", true, LabelPattern::DIRECTORY, "//foo/la/", "", ""},
+ {"la/*:*", true, LabelPattern::RECURSIVE_DIRECTORY, "//foo/la/", "", ""},
+ {"//la:*", true, LabelPattern::DIRECTORY, "//la/", "", ""},
+ {"./*", true, LabelPattern::RECURSIVE_DIRECTORY, "//foo/", "", ""},
+ {"foo/*", true, LabelPattern::RECURSIVE_DIRECTORY, "//foo/foo/", "", ""},
+ {"//l/*", true, LabelPattern::RECURSIVE_DIRECTORY, "//l/", "", ""},
+ // Toolchains.
+ {"//foo()", true, LabelPattern::MATCH, "//foo/", "foo", ""},
+ {"//foo(//bar)", true, LabelPattern::MATCH, "//foo/", "foo", "//bar:bar"},
+ {"//foo:*(//bar)", true, LabelPattern::DIRECTORY, "//foo/", "",
+ "//bar:bar"},
+ {"//foo/*(//bar)", true, LabelPattern::RECURSIVE_DIRECTORY, "//foo/", "",
+ "//bar:bar"},
+ // Wildcards in invalid places.
+ {"*foo*:bar", false, LabelPattern::MATCH, "", "", ""},
+ {"foo*:*bar", false, LabelPattern::MATCH, "", "", ""},
+ {"*foo:bar", false, LabelPattern::MATCH, "", "", ""},
+ {"foo:bar*", false, LabelPattern::MATCH, "", "", ""},
+ {"*:*", true, LabelPattern::RECURSIVE_DIRECTORY, "", "", ""},
+ // Invalid toolchain stuff.
+ {"//foo(//foo/bar:*)", false, LabelPattern::MATCH, "", "", ""},
+ {"//foo/*(*)", false, LabelPattern::MATCH, "", "", ""},
+ {"//foo(//bar", false, LabelPattern::MATCH, "", "", ""},
+ // Absolute paths.
+ {"/la/*", true, LabelPattern::RECURSIVE_DIRECTORY, "/la/", "", ""},
+ {"/la:bar", true, LabelPattern::MATCH, "/la/", "bar", ""},
+#if defined(OS_WIN)
+ {"/C:/la/*", true, LabelPattern::RECURSIVE_DIRECTORY, "/C:/la/", "", ""},
+ {"C:/la/*", true, LabelPattern::RECURSIVE_DIRECTORY, "/C:/la/", "", ""},
+ {"/C:/la:bar", true, LabelPattern::MATCH, "/C:/la/", "bar", ""},
+ {"C:/la:bar", true, LabelPattern::MATCH, "/C:/la/", "bar", ""},
+ {"C:foo", true, LabelPattern::MATCH, "//foo/C/", "foo", ""},
+#endif
+ };
+
+ for (size_t i = 0; i < arraysize(cases); i++) {
+ const PatternCase& cur = cases[i];
+ Err err;
+ LabelPattern result =
+ LabelPattern::GetPattern(current_dir, Value(nullptr, cur.input), &err);
+
+ EXPECT_EQ(cur.success, !err.has_error()) << i << " " << cur.input;
+ EXPECT_EQ(cur.type, result.type()) << i << " " << cur.input;
+ EXPECT_EQ(cur.dir, result.dir().value()) << i << " " << cur.input;
+ EXPECT_EQ(cur.name, result.name()) << i << " " << cur.input;
+ EXPECT_EQ(cur.toolchain, result.toolchain().GetUserVisibleName(false))
+ << i << " " << cur.input;
+ }
+}
diff --git a/chromium/tools/gn/label_ptr.h b/chromium/tools/gn/label_ptr.h
new file mode 100644
index 00000000000..c0b2d636f17
--- /dev/null
+++ b/chromium/tools/gn/label_ptr.h
@@ -0,0 +1,117 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_LABEL_PTR_H_
+#define TOOLS_GN_LABEL_PTR_H_
+
+#include <stddef.h>
+
+#include <functional>
+
+#include "tools/gn/label.h"
+
+class Config;
+class ParseNode;
+class Target;
+
+// Structure that holds a labeled "thing". This is used for various places
+// where we need to store lists of targets or configs. We sometimes populate
+// the pointers on another thread from where we compute the labels, so this
+// structure lets us save them separately. This also allows us to store the
+// location of the thing that added this dependency.
+template<typename T>
+struct LabelPtrPair {
+ typedef T DestType;
+
+ LabelPtrPair() : label(), ptr(nullptr), origin(nullptr) {}
+
+ explicit LabelPtrPair(const Label& l)
+ : label(l), ptr(nullptr), origin(nullptr) {}
+
+ // This contructor is typically used in unit tests, it extracts the label
+ // automatically from a given pointer.
+ explicit LabelPtrPair(const T* p)
+ : label(p->label()), ptr(p), origin(nullptr) {}
+
+ ~LabelPtrPair() {}
+
+ Label label;
+ const T* ptr; // May be NULL.
+
+ // The origin of this dependency. This will be null for internally generated
+ // dependencies. This happens when a group is automatically expanded and that
+ // group's members are added to the target that depends on that group.
+ const ParseNode* origin;
+};
+
+typedef LabelPtrPair<Config> LabelConfigPair;
+typedef LabelPtrPair<Target> LabelTargetPair;
+
+typedef std::vector<LabelConfigPair> LabelConfigVector;
+typedef std::vector<LabelTargetPair> LabelTargetVector;
+
+// Comparison and search functions ---------------------------------------------
+
+// To do a brute-force search by label:
+// std::find_if(vect.begin(), vect.end(), LabelPtrLabelEquals<Config>(label));
+template<typename T>
+struct LabelPtrLabelEquals {
+ explicit LabelPtrLabelEquals(const Label& l) : label(l) {}
+
+ bool operator()(const LabelPtrPair<T>& arg) const {
+ return arg.label == label;
+ }
+
+ const Label& label;
+};
+
+// To do a brute-force search by object pointer:
+// std::find_if(vect.begin(), vect.end(), LabelPtrPtrEquals<Config>(config));
+template<typename T>
+struct LabelPtrPtrEquals {
+ explicit LabelPtrPtrEquals(const T* p) : ptr(p) {}
+
+ bool operator()(const LabelPtrPair<T>& arg) const {
+ return arg.ptr == ptr;
+ }
+
+ const T* ptr;
+};
+
+// To sort by label:
+// std::sort(vect.begin(), vect.end(), LabelPtrLabelLess<Config>());
+template<typename T>
+struct LabelPtrLabelLess {
+ bool operator()(const LabelPtrPair<T>& a, const LabelPtrPair<T>& b) const {
+ return a.label < b.label;
+ }
+};
+
+// Default comparison operators -----------------------------------------------
+//
+// The default hash and comparison operators operate on the label, which should
+// always be valid, whereas the pointer is sometimes null.
+
+template<typename T> inline bool operator==(const LabelPtrPair<T>& a,
+ const LabelPtrPair<T>& b) {
+ return a.label == b.label;
+}
+
+template<typename T> inline bool operator<(const LabelPtrPair<T>& a,
+ const LabelPtrPair<T>& b) {
+ return a.label < b.label;
+}
+
+namespace BASE_HASH_NAMESPACE {
+
+template<typename T> struct hash< LabelPtrPair<T> > {
+ std::size_t operator()(const LabelPtrPair<T>& v) const {
+ BASE_HASH_NAMESPACE::hash<Label> h;
+ return h(v.label);
+ }
+};
+
+} // namespace BASE_HASH_NAMESPACE
+
+#endif // TOOLS_GN_LABEL_PTR_H_
diff --git a/chromium/tools/gn/label_unittest.cc b/chromium/tools/gn/label_unittest.cc
new file mode 100644
index 00000000000..986aa9b50d6
--- /dev/null
+++ b/chromium/tools/gn/label_unittest.cc
@@ -0,0 +1,95 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include "base/macros.h"
+#include "build/build_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/err.h"
+#include "tools/gn/label.h"
+#include "tools/gn/value.h"
+
+namespace {
+
+struct ParseDepStringCase {
+ const char* cur_dir;
+ const char* str;
+ bool success;
+ const char* expected_dir;
+ const char* expected_name;
+ const char* expected_toolchain_dir;
+ const char* expected_toolchain_name;
+};
+
+} // namespace
+
+TEST(Label, Resolve) {
+ ParseDepStringCase cases[] = {
+ {"//chrome/", "", false, "", "", "", ""},
+ {"//chrome/", "/", false, "", "", "", ""},
+ {"//chrome/", ":", false, "", "", "", ""},
+ {"//chrome/", "/:", false, "", "", "", ""},
+ {"//chrome/", "blah", true, "//chrome/blah/", "blah", "//t/", "d"},
+ {"//chrome/", "blah:bar", true, "//chrome/blah/", "bar", "//t/", "d"},
+ // Absolute paths.
+ {"//chrome/", "/chrome:bar", true, "/chrome/", "bar", "//t/", "d"},
+ {"//chrome/", "/chrome/:bar", true, "/chrome/", "bar", "//t/", "d"},
+#if defined(OS_WIN)
+ {"//chrome/", "/C:/chrome:bar", true, "/C:/chrome/", "bar", "//t/", "d"},
+ {"//chrome/", "/C:/chrome/:bar", true, "/C:/chrome/", "bar", "//t/", "d"},
+ {"//chrome/", "C:/chrome:bar", true, "/C:/chrome/", "bar", "//t/", "d"},
+#endif
+ // Refers to root dir.
+ {"//chrome/", "//:bar", true, "//", "bar", "//t/", "d"},
+ // Implicit directory
+ {"//chrome/", ":bar", true, "//chrome/", "bar", "//t/", "d"},
+ {"//chrome/renderer/", ":bar", true, "//chrome/renderer/", "bar", "//t/",
+ "d"},
+ // Implicit names.
+ {"//chrome/", "//base", true, "//base/", "base", "//t/", "d"},
+ {"//chrome/", "//base/i18n", true, "//base/i18n/", "i18n", "//t/", "d"},
+ {"//chrome/", "//base/i18n:foo", true, "//base/i18n/", "foo", "//t/", "d"},
+ {"//chrome/", "//", false, "", "", "", ""},
+ // Toolchain parsing.
+ {"//chrome/", "//chrome:bar(//t:n)", true, "//chrome/", "bar", "//t/", "n"},
+ {"//chrome/", "//chrome:bar(//t)", true, "//chrome/", "bar", "//t/", "t"},
+ {"//chrome/", "//chrome:bar(//t:)", true, "//chrome/", "bar", "//t/", "t"},
+ {"//chrome/", "//chrome:bar()", true, "//chrome/", "bar", "//t/", "d"},
+ {"//chrome/", "//chrome:bar(foo)", true, "//chrome/", "bar",
+ "//chrome/foo/", "foo"},
+ {"//chrome/", "//chrome:bar(:foo)", true, "//chrome/", "bar", "//chrome/",
+ "foo"},
+ // TODO(brettw) it might be nice to make this an error:
+ //{"//chrome/", "//chrome:bar())", false, "", "", "", "" },
+ {"//chrome/", "//chrome:bar(//t:bar(tc))", false, "", "", "", ""},
+ {"//chrome/", "//chrome:bar(()", false, "", "", "", ""},
+ {"//chrome/", "(t:b)", false, "", "", "", ""},
+ {"//chrome/", ":bar(//t/b)", true, "//chrome/", "bar", "//t/b/", "b"},
+ {"//chrome/", ":bar(/t/b)", true, "//chrome/", "bar", "/t/b/", "b"},
+ {"//chrome/", ":bar(t/b)", true, "//chrome/", "bar", "//chrome/t/b/", "b"},
+ };
+
+ Label default_toolchain(SourceDir("//t/"), "d");
+
+ for (size_t i = 0; i < arraysize(cases); i++) {
+ const ParseDepStringCase& cur = cases[i];
+
+ std::string location, name;
+ Err err;
+ Value v(nullptr, Value::STRING);
+ v.string_value() = cur.str;
+ Label result =
+ Label::Resolve(SourceDir(cur.cur_dir), default_toolchain, v, &err);
+ EXPECT_EQ(cur.success, !err.has_error()) << i << " " << cur.str;
+ if (!err.has_error() && cur.success) {
+ EXPECT_EQ(cur.expected_dir, result.dir().value()) << i << " " << cur.str;
+ EXPECT_EQ(cur.expected_name, result.name()) << i << " " << cur.str;
+ EXPECT_EQ(cur.expected_toolchain_dir, result.toolchain_dir().value())
+ << i << " " << cur.str;
+ EXPECT_EQ(cur.expected_toolchain_name, result.toolchain_name())
+ << i << " " << cur.str;
+ }
+ }
+}
diff --git a/chromium/tools/gn/last_commit_position.py b/chromium/tools/gn/last_commit_position.py
new file mode 100644
index 00000000000..b710ed0b808
--- /dev/null
+++ b/chromium/tools/gn/last_commit_position.py
@@ -0,0 +1,101 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes the most recent "Cr-Commit-Position" value on the master branch
+to a C header file.
+
+Usage: last_commit_position.py <dir> <outfile> <headerguard>
+
+ <dir>
+ Some directory inside the repo to check. This will be used as the current
+ directory when running git. It's best to pass the repo toplevel directory.
+
+ <outfile>
+ C header file to write.
+
+ <headerguard>
+ String to use as the header guard for the written file.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+def RunGitCommand(directory, command):
+ """
+ Launches git subcommand.
+
+ Errors are swallowed.
+
+ Returns:
+ A process object or None.
+ """
+ command = ['git'] + command
+ # Force shell usage under cygwin. This is a workaround for
+ # mysterious loss of cwd while invoking cygwin's git.
+ # We can't just pass shell=True to Popen, as under win32 this will
+ # cause CMD to be used, while we explicitly want a cygwin shell.
+ if sys.platform == 'cygwin':
+ command = ['sh', '-c', ' '.join(command)]
+ try:
+ proc = subprocess.Popen(command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ cwd=directory,
+ shell=(sys.platform=='win32'))
+ return proc
+ except OSError:
+ return None
+
+
+def FetchCommitPosition(directory):
+ regex = re.compile(r'\s*Cr-Commit-Position: refs/heads/master@\{#(\d+)\}\s*')
+
+ # Search this far backward in the git log. The commit position should be
+ # close to the top. We allow some slop for long commit messages, and maybe
+ # there were some local commits after the last "official" one. Having this
+ # max prevents us from searching all history in the case of an error.
+ max_lines = 2048
+
+ proc = RunGitCommand(directory, ['log'])
+ for i in range(max_lines):
+ line = proc.stdout.readline()
+ if not line:
+ return None
+
+ match = regex.match(line)
+ if match:
+ return match.group(1)
+
+ return None
+
+
+def WriteHeader(header_file, header_guard, value):
+ with open(header_file, 'w') as f:
+ f.write('''/* Generated by last_commit_position.py. */
+
+#ifndef %(guard)s
+#define %(guard)s
+
+#define LAST_COMMIT_POSITION "%(value)s"
+
+#endif
+''' % {'guard': header_guard, 'value': value})
+
+
+if len(sys.argv) != 4:
+ print "Wrong number of arguments"
+ sys.exit(1)
+
+git_directory = sys.argv[1]
+output_file = sys.argv[2]
+header_guard = sys.argv[3]
+
+value = FetchCommitPosition(git_directory)
+if not value:
+ print "Could not get last commit position."
+ sys.exit(1)
+
+WriteHeader(output_file, header_guard, value)
diff --git a/chromium/tools/gn/lib_file.cc b/chromium/tools/gn/lib_file.cc
new file mode 100644
index 00000000000..86eacb29c12
--- /dev/null
+++ b/chromium/tools/gn/lib_file.cc
@@ -0,0 +1,30 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/lib_file.h"
+
+#include "base/logging.h"
+
+LibFile::LibFile() {}
+
+LibFile::LibFile(const SourceFile& source_file) : source_file_(source_file) {}
+
+LibFile::LibFile(const base::StringPiece& lib_name)
+ : name_(lib_name.data(), lib_name.size()) {
+ DCHECK(!lib_name.empty());
+}
+
+void LibFile::Swap(LibFile* other) {
+ name_.swap(other->name_);
+ source_file_.swap(other->source_file_);
+}
+
+const std::string& LibFile::value() const {
+ return is_source_file() ? source_file_.value() : name_;
+}
+
+const SourceFile& LibFile::source_file() const {
+ DCHECK(is_source_file());
+ return source_file_;
+}
diff --git a/chromium/tools/gn/lib_file.h b/chromium/tools/gn/lib_file.h
new file mode 100644
index 00000000000..675a48c4ce3
--- /dev/null
+++ b/chromium/tools/gn/lib_file.h
@@ -0,0 +1,58 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_LIB_FILE_H_
+#define TOOLS_GN_LIB_FILE_H_
+
+#include <stddef.h>
+
+#include <algorithm>
+#include <string>
+
+#include "base/strings/string_piece.h"
+#include "tools/gn/source_file.h"
+
+// Represents an entry in "libs" list. Can be either a path (a SourceFile) or
+// a library name (a string).
+class LibFile {
+ public:
+ LibFile();
+ explicit LibFile(const base::StringPiece& lib_name);
+ explicit LibFile(const SourceFile& source_file);
+
+ void Swap(LibFile* other);
+ bool is_source_file() const { return name_.empty(); }
+
+ // Returns name, or source_file().value() (whichever is set).
+ const std::string& value() const;
+ const SourceFile& source_file() const;
+
+ bool operator==(const LibFile& other) const {
+ return value() == other.value();
+ }
+ bool operator!=(const LibFile& other) const { return !operator==(other); }
+ bool operator<(const LibFile& other) const { return value() < other.value(); }
+
+ private:
+ std::string name_;
+ SourceFile source_file_;
+};
+
+namespace BASE_HASH_NAMESPACE {
+
+template <>
+struct hash<LibFile> {
+ std::size_t operator()(const LibFile& v) const {
+ hash<std::string> h;
+ return h(v.value());
+ }
+};
+
+} // namespace BASE_HASH_NAMESPACE
+
+inline void swap(LibFile& lhs, LibFile& rhs) {
+ lhs.Swap(&rhs);
+}
+
+#endif // TOOLS_GN_LIB_FILE_H_
diff --git a/chromium/tools/gn/loader.cc b/chromium/tools/gn/loader.cc
new file mode 100644
index 00000000000..9061be82a0b
--- /dev/null
+++ b/chromium/tools/gn/loader.cc
@@ -0,0 +1,420 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/loader.h"
+
+#include "base/bind.h"
+#include "base/memory/ptr_util.h"
+#include "base/message_loop/message_loop.h"
+#include "base/stl_util.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/input_file_manager.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/scope_per_file_provider.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/trace.h"
+
+namespace {
+
+struct SourceFileAndOrigin {
+ SourceFileAndOrigin(const SourceFile& f, const LocationRange& o)
+ : file(f),
+ origin(o) {
+ }
+
+ SourceFile file;
+ LocationRange origin;
+};
+
+} // namespace
+
+// Identifies one time a file is loaded in a given toolchain so we don't load
+// it more than once.
+struct LoaderImpl::LoadID {
+ LoadID() {}
+ LoadID(const SourceFile& f, const Label& tc_name)
+ : file(f),
+ toolchain_name(tc_name) {
+ }
+
+ bool operator<(const LoadID& other) const {
+ if (file.value() == other.file.value())
+ return toolchain_name < other.toolchain_name;
+ return file < other.file;
+ }
+
+ SourceFile file;
+ Label toolchain_name;
+};
+
+// Our tracking information for a toolchain.
+struct LoaderImpl::ToolchainRecord {
+ // The default toolchain label can be empty for the first time the default
+ // toolchain is loaded, since we don't know it yet. This will be fixed up
+ // later. It should be valid in all other cases.
+ ToolchainRecord(const BuildSettings* build_settings,
+ const Label& toolchain_label,
+ const Label& default_toolchain_label)
+ : settings(build_settings,
+ GetOutputSubdirName(toolchain_label,
+ toolchain_label == default_toolchain_label)),
+ is_toolchain_loaded(false),
+ is_config_loaded(false) {
+ settings.set_default_toolchain_label(default_toolchain_label);
+ settings.set_toolchain_label(toolchain_label);
+ }
+
+ Settings settings;
+
+ bool is_toolchain_loaded;
+ bool is_config_loaded;
+
+ std::vector<SourceFileAndOrigin> waiting_on_me;
+};
+
+// -----------------------------------------------------------------------------
+
+const void* const Loader::kDefaultToolchainKey = &kDefaultToolchainKey;
+
+Loader::Loader() {
+}
+
+Loader::~Loader() {
+}
+
+void Loader::Load(const Label& label, const LocationRange& origin) {
+ Load(BuildFileForLabel(label), origin, label.GetToolchainLabel());
+}
+
+// static
+SourceFile Loader::BuildFileForLabel(const Label& label) {
+ return SourceFile(label.dir().value() + "BUILD.gn");
+}
+
+// -----------------------------------------------------------------------------
+
+LoaderImpl::LoaderImpl(const BuildSettings* build_settings)
+ : main_loop_(base::MessageLoop::current()),
+ pending_loads_(0),
+ build_settings_(build_settings) {
+}
+
+LoaderImpl::~LoaderImpl() {
+ STLDeleteContainerPairSecondPointers(toolchain_records_.begin(),
+ toolchain_records_.end());
+}
+
+void LoaderImpl::Load(const SourceFile& file,
+ const LocationRange& origin,
+ const Label& in_toolchain_name) {
+ const Label& toolchain_name = in_toolchain_name.is_null()
+ ? default_toolchain_label_ : in_toolchain_name;
+ LoadID load_id(file, toolchain_name);
+ if (!invocations_.insert(load_id).second)
+ return; // Already in set, so this file was already loaded or schedulerd.
+
+ if (toolchain_records_.empty()) {
+ // Nothing loaded, need to load the default build config. The intial load
+ // should not specify a toolchain.
+ DCHECK(toolchain_name.is_null());
+
+ ToolchainRecord* record =
+ new ToolchainRecord(build_settings_, Label(), Label());
+ toolchain_records_[Label()] = record;
+
+ // The default build config is no dependent on the toolchain definition,
+ // since we need to load the build config before we know what the default
+ // toolchain name is.
+ record->is_toolchain_loaded = true;
+
+ record->waiting_on_me.push_back(SourceFileAndOrigin(file, origin));
+ ScheduleLoadBuildConfig(&record->settings, Scope::KeyValueMap());
+ return;
+ }
+
+ ToolchainRecord* record;
+ if (toolchain_name.is_null())
+ record = toolchain_records_[default_toolchain_label_];
+ else
+ record = toolchain_records_[toolchain_name];
+
+ if (!record) {
+ DCHECK(!default_toolchain_label_.is_null());
+
+ // No reference to this toolchain found yet, make one.
+ record = new ToolchainRecord(build_settings_, toolchain_name,
+ default_toolchain_label_);
+ toolchain_records_[toolchain_name] = record;
+
+ // Schedule a load of the toolchain using the default one.
+ Load(BuildFileForLabel(toolchain_name), origin, default_toolchain_label_);
+ }
+
+ if (record->is_config_loaded)
+ ScheduleLoadFile(&record->settings, origin, file);
+ else
+ record->waiting_on_me.push_back(SourceFileAndOrigin(file, origin));
+}
+
+void LoaderImpl::ToolchainLoaded(const Toolchain* toolchain) {
+ ToolchainRecord* record = toolchain_records_[toolchain->label()];
+ if (!record) {
+ DCHECK(!default_toolchain_label_.is_null());
+ record = new ToolchainRecord(build_settings_, toolchain->label(),
+ default_toolchain_label_);
+ toolchain_records_[toolchain->label()] = record;
+ }
+ record->is_toolchain_loaded = true;
+
+ // The default build config is loaded first, then its toolchain. Secondary
+ // ones are loaded in the opposite order so we can pass toolchain parameters
+ // to the build config. So we may or may not have a config at this point.
+ if (!record->is_config_loaded) {
+ ScheduleLoadBuildConfig(&record->settings, toolchain->args());
+ } else {
+ // There should be nobody waiting on this if the build config is already
+ // loaded.
+ DCHECK(record->waiting_on_me.empty());
+ }
+}
+
+Label LoaderImpl::GetDefaultToolchain() const {
+ return default_toolchain_label_;
+}
+
+const Settings* LoaderImpl::GetToolchainSettings(const Label& label) const {
+ ToolchainRecordMap::const_iterator found_toolchain;
+ if (label.is_null()) {
+ if (default_toolchain_label_.is_null())
+ return nullptr;
+ found_toolchain = toolchain_records_.find(default_toolchain_label_);
+ } else {
+ found_toolchain = toolchain_records_.find(label);
+ }
+
+ if (found_toolchain == toolchain_records_.end())
+ return nullptr;
+ return &found_toolchain->second->settings;
+}
+
+void LoaderImpl::ScheduleLoadFile(const Settings* settings,
+ const LocationRange& origin,
+ const SourceFile& file) {
+ Err err;
+ pending_loads_++;
+ if (!AsyncLoadFile(origin, settings->build_settings(), file,
+ base::Bind(&LoaderImpl::BackgroundLoadFile, this,
+ settings, file),
+ &err)) {
+ g_scheduler->FailWithError(err);
+ DecrementPendingLoads();
+ }
+}
+
+void LoaderImpl::ScheduleLoadBuildConfig(
+ Settings* settings,
+ const Scope::KeyValueMap& toolchain_overrides) {
+ Err err;
+ pending_loads_++;
+ if (!AsyncLoadFile(LocationRange(), settings->build_settings(),
+ settings->build_settings()->build_config_file(),
+ base::Bind(&LoaderImpl::BackgroundLoadBuildConfig,
+ this, settings, toolchain_overrides),
+ &err)) {
+ g_scheduler->FailWithError(err);
+ DecrementPendingLoads();
+ }
+}
+
+void LoaderImpl::BackgroundLoadFile(const Settings* settings,
+ const SourceFile& file_name,
+ const ParseNode* root) {
+ if (!root) {
+ main_loop_->PostTask(FROM_HERE,
+ base::Bind(&LoaderImpl::DecrementPendingLoads, this));
+ return;
+ }
+
+ if (g_scheduler->verbose_logging()) {
+ g_scheduler->Log("Running", file_name.value() + " with toolchain " +
+ settings->toolchain_label().GetUserVisibleName(false));
+ }
+
+ Scope our_scope(settings->base_config());
+ ScopePerFileProvider per_file_provider(&our_scope, true);
+ our_scope.set_source_dir(file_name.GetDir());
+
+ // Targets, etc. generated as part of running this file will end up here.
+ Scope::ItemVector collected_items;
+ our_scope.set_item_collector(&collected_items);
+
+ ScopedTrace trace(TraceItem::TRACE_FILE_EXECUTE, file_name.value());
+ trace.SetToolchain(settings->toolchain_label());
+
+ Err err;
+ root->Execute(&our_scope, &err);
+ if (err.has_error())
+ g_scheduler->FailWithError(err);
+
+ if (!our_scope.CheckForUnusedVars(&err))
+ g_scheduler->FailWithError(err);
+
+ // Pass all of the items that were defined off to the builder.
+ for (auto& item : collected_items) {
+ settings->build_settings()->ItemDefined(base::WrapUnique(item));
+ item = nullptr;
+ }
+
+ trace.Done();
+
+ main_loop_->PostTask(FROM_HERE, base::Bind(&LoaderImpl::DidLoadFile, this));
+}
+
+void LoaderImpl::BackgroundLoadBuildConfig(
+ Settings* settings,
+ const Scope::KeyValueMap& toolchain_overrides,
+ const ParseNode* root) {
+ if (!root) {
+ main_loop_->PostTask(FROM_HERE,
+ base::Bind(&LoaderImpl::DecrementPendingLoads, this));
+ return;
+ }
+
+ Scope* base_config = settings->base_config();
+ base_config->set_source_dir(SourceDir("//"));
+
+ settings->build_settings()->build_args().SetupRootScope(
+ base_config, toolchain_overrides);
+
+ base_config->SetProcessingBuildConfig();
+
+ // See kDefaultToolchainKey in the header.
+ Label default_toolchain_label;
+ if (settings->is_default())
+ base_config->SetProperty(kDefaultToolchainKey, &default_toolchain_label);
+
+ ScopedTrace trace(TraceItem::TRACE_FILE_EXECUTE,
+ settings->build_settings()->build_config_file().value());
+ trace.SetToolchain(settings->toolchain_label());
+
+ Err err;
+ root->Execute(base_config, &err);
+
+ // Clear all private variables left in the scope. We want the root build
+ // config to be like a .gni file in that variables beginning with an
+ // underscore aren't exported.
+ base_config->RemovePrivateIdentifiers();
+
+ trace.Done();
+
+ if (err.has_error())
+ g_scheduler->FailWithError(err);
+
+ base_config->ClearProcessingBuildConfig();
+ if (settings->is_default()) {
+ // The default toolchain must have been set in the default build config
+ // file.
+ if (default_toolchain_label.is_null()) {
+ g_scheduler->FailWithError(Err(Location(),
+ "The default build config file did not call set_default_toolchain()",
+ "If you don't call this, I can't figure out what toolchain to use\n"
+ "for all of this code."));
+ } else {
+ DCHECK(settings->toolchain_label().is_null());
+ settings->set_toolchain_label(default_toolchain_label);
+ }
+ }
+
+ main_loop_->PostTask(FROM_HERE,
+ base::Bind(&LoaderImpl::DidLoadBuildConfig, this,
+ settings->toolchain_label()));
+}
+
+void LoaderImpl::DidLoadFile() {
+ DecrementPendingLoads();
+}
+
+void LoaderImpl::DidLoadBuildConfig(const Label& label) {
+ // Do not return early, we must call DecrementPendingLoads() at the bottom.
+
+ ToolchainRecordMap::iterator found_toolchain = toolchain_records_.find(label);
+ ToolchainRecord* record = nullptr;
+ if (found_toolchain == toolchain_records_.end()) {
+ // When loading the default build config, we'll insert it into the record
+ // map with an empty label since we don't yet know what to call it.
+ //
+ // In this case, we should have exactly one entry in the map with an empty
+ // label. We now need to fix up the naming so it refers to the "real" one.
+ CHECK_EQ(1U, toolchain_records_.size());
+ ToolchainRecordMap::iterator empty_label = toolchain_records_.find(Label());
+ CHECK(empty_label != toolchain_records_.end());
+
+ // Fix up the toolchain record.
+ record = empty_label->second;
+ toolchain_records_[label] = record;
+ toolchain_records_.erase(empty_label);
+
+ // Save the default toolchain label.
+ default_toolchain_label_ = label;
+ DCHECK(record->settings.default_toolchain_label().is_null());
+ record->settings.set_default_toolchain_label(label);
+
+ // The settings object should have the toolchain label already set.
+ DCHECK(!record->settings.toolchain_label().is_null());
+
+ // Update any stored invocations that refer to the empty toolchain label.
+ // This will normally only be one, for the root build file, so brute-force
+ // is OK.
+ LoadIDSet old_loads;
+ invocations_.swap(old_loads);
+ for (const auto& load : old_loads) {
+ if (load.toolchain_name.is_null()) {
+ // Fix up toolchain label
+ invocations_.insert(LoadID(load.file, label));
+ } else {
+ // Can keep the old one.
+ invocations_.insert(load);
+ }
+ }
+ } else {
+ record = found_toolchain->second;
+ }
+
+ DCHECK(!record->is_config_loaded);
+ DCHECK(record->is_toolchain_loaded);
+ record->is_config_loaded = true;
+
+ // Schedule all waiting file loads.
+ for (const auto& waiting : record->waiting_on_me)
+ ScheduleLoadFile(&record->settings, waiting.origin, waiting.file);
+ record->waiting_on_me.clear();
+
+ DecrementPendingLoads();
+}
+
+void LoaderImpl::DecrementPendingLoads() {
+ DCHECK_GT(pending_loads_, 0);
+ pending_loads_--;
+ if (pending_loads_ == 0 && !complete_callback_.is_null())
+ complete_callback_.Run();
+}
+
+bool LoaderImpl::AsyncLoadFile(
+ const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& file_name,
+ const base::Callback<void(const ParseNode*)>& callback,
+ Err* err) {
+ if (async_load_file_.is_null()) {
+ return g_scheduler->input_file_manager()->AsyncLoadFile(
+ origin, build_settings, file_name, callback, err);
+ }
+ return async_load_file_.Run(
+ origin, build_settings, file_name, callback, err);
+}
diff --git a/chromium/tools/gn/loader.h b/chromium/tools/gn/loader.h
new file mode 100644
index 00000000000..3d61fe4b4e8
--- /dev/null
+++ b/chromium/tools/gn/loader.h
@@ -0,0 +1,181 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_LOADER_H_
+#define TOOLS_GN_LOADER_H_
+
+#include <map>
+#include <set>
+
+#include "base/callback.h"
+#include "base/memory/ref_counted.h"
+#include "tools/gn/label.h"
+#include "tools/gn/scope.h"
+
+namespace base {
+class MessageLoop;
+}
+
+class BuildSettings;
+class LocationRange;
+class Settings;
+class SourceFile;
+class Toolchain;
+
+// The loader manages execution of the different build files. It receives
+// requests (normally from the Builder) when new references are found, and also
+// manages loading the build config files.
+//
+// This loader class is abstract so it can be mocked out for testing the
+// Builder.
+class Loader : public base::RefCountedThreadSafe<Loader> {
+ public:
+ Loader();
+
+ // Loads the given file in the conext of the given toolchain. The initial
+ // call to this (the one that actually starts the generation) should have an
+ // empty toolchain name, which will trigger the load of the default build
+ // config.
+ virtual void Load(const SourceFile& file,
+ const LocationRange& origin,
+ const Label& toolchain_name) = 0;
+
+ // Notification that the given toolchain has loaded. This will unblock files
+ // waiting on this definition.
+ virtual void ToolchainLoaded(const Toolchain* toolchain) = 0;
+
+ // Returns the label of the default toolchain.
+ virtual Label GetDefaultToolchain() const = 0;
+
+ // Returns information about the toolchain with the given label. Will return
+ // false if we haven't processed this toolchain yet.
+ virtual const Settings* GetToolchainSettings(const Label& label) const = 0;
+
+ // Helper function that extracts the file and toolchain name from the given
+ // label, and calls Load().
+ void Load(const Label& label, const LocationRange& origin);
+
+ // Returns the build file that the given label references.
+ static SourceFile BuildFileForLabel(const Label& label);
+
+ // When processing the default build config, we want to capture the argument
+ // of set_default_build_config. The implementation of that function uses this
+ // constant as a property key to get the Label* out of the scope where the
+ // label should be stored.
+ static const void* const kDefaultToolchainKey;
+
+ protected:
+ friend class base::RefCountedThreadSafe<Loader>;
+ virtual ~Loader();
+};
+
+class LoaderImpl : public Loader {
+ public:
+ // Callback to emulate InputFileManager::AsyncLoadFile.
+ typedef base::Callback<bool(const LocationRange&,
+ const BuildSettings*,
+ const SourceFile&,
+ const base::Callback<void(const ParseNode*)>&,
+ Err*)> AsyncLoadFileCallback;
+
+ explicit LoaderImpl(const BuildSettings* build_settings);
+
+ // Loader implementation.
+ void Load(const SourceFile& file,
+ const LocationRange& origin,
+ const Label& toolchain_name) override;
+ void ToolchainLoaded(const Toolchain* toolchain) override;
+ Label GetDefaultToolchain() const override;
+ const Settings* GetToolchainSettings(const Label& label) const override;
+
+ // Sets the message loop corresponding to the main thread. By default this
+ // class will use the thread active during construction, but there is not
+ // a message loop active during construction all the time.
+ void set_main_loop(base::MessageLoop* loop) { main_loop_ = loop; }
+
+ // The complete callback is called whenever there are no more pending loads.
+ // Called on the main thread only. This may be called more than once if the
+ // queue is drained, but then more stuff gets added.
+ void set_complete_callback(const base::Closure& cb) {
+ complete_callback_ = cb;
+ }
+
+ // This callback is used when the loader finds it wants to load a file.
+ void set_async_load_file(const AsyncLoadFileCallback& cb) {
+ async_load_file_ = cb;
+ }
+
+ const Label& default_toolchain_label() const {
+ return default_toolchain_label_;
+ }
+
+ private:
+ struct LoadID;
+ struct ToolchainRecord;
+
+ ~LoaderImpl() override;
+
+ // Schedules the input file manager to load the given file.
+ void ScheduleLoadFile(const Settings* settings,
+ const LocationRange& origin,
+ const SourceFile& file);
+ void ScheduleLoadBuildConfig(
+ Settings* settings,
+ const Scope::KeyValueMap& toolchain_overrides);
+
+ // Runs the given file on the background thread. These are called by the
+ // input file manager.
+ void BackgroundLoadFile(const Settings* settings,
+ const SourceFile& file_name,
+ const ParseNode* root);
+ void BackgroundLoadBuildConfig(
+ Settings* settings,
+ const Scope::KeyValueMap& toolchain_overrides,
+ const ParseNode* root);
+
+ // Posted to the main thread when any file other than a build config file
+ // file has completed running.
+ void DidLoadFile();
+
+ // Posted to the main thread when any build config file has completed
+ // running. The label should be the name of the toolchain.
+ //
+ // If there is no defauled toolchain loaded yet, we'll assume that the first
+ // call to this indicates to the default toolchain, and this function will
+ // set the default toolchain name to the given label.
+ void DidLoadBuildConfig(const Label& label);
+
+ // Decrements the pending_loads_ variable and issues the complete callback if
+ // necessary.
+ void DecrementPendingLoads();
+
+ // Forwards to the appropriate location to load the file.
+ bool AsyncLoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& file_name,
+ const base::Callback<void(const ParseNode*)>& callback,
+ Err* err);
+
+ base::MessageLoop* main_loop_;
+
+ int pending_loads_;
+ base::Closure complete_callback_;
+
+ // When non-null, use this callback instead of the InputFileManager for
+ // mocking purposes.
+ AsyncLoadFileCallback async_load_file_;
+
+ typedef std::set<LoadID> LoadIDSet;
+ LoadIDSet invocations_;
+
+ const BuildSettings* build_settings_;
+ Label default_toolchain_label_;
+
+ // Records for the build config file loads.
+ // Owning pointers.
+ typedef std::map<Label, ToolchainRecord*> ToolchainRecordMap;
+ ToolchainRecordMap toolchain_records_;
+};
+
+#endif // TOOLS_GN_LOADER_H_
diff --git a/chromium/tools/gn/loader_unittest.cc b/chromium/tools/gn/loader_unittest.cc
new file mode 100644
index 00000000000..47d0f80d5f9
--- /dev/null
+++ b/chromium/tools/gn/loader_unittest.cc
@@ -0,0 +1,186 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <map>
+#include <utility>
+#include <vector>
+
+#include "base/bind.h"
+#include "base/message_loop/message_loop.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/err.h"
+#include "tools/gn/loader.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/parser.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/tokenizer.h"
+
+namespace {
+
+class MockInputFileManager {
+ public:
+ typedef base::Callback<void(const ParseNode*)> Callback;
+
+ MockInputFileManager() {
+ }
+
+ LoaderImpl::AsyncLoadFileCallback GetCallback();
+
+ // Sets a given response for a given source file.
+ void AddCannedResponse(const SourceFile& source_file,
+ const std::string& source);
+
+ // Returns true if there is/are pending load(s) matching the given file(s).
+ bool HasOnePending(const SourceFile& f) const;
+ bool HasTwoPending(const SourceFile& f1, const SourceFile& f2) const;
+
+ void IssueAllPending();
+
+ private:
+ struct CannedResult {
+ std::unique_ptr<InputFile> input_file;
+ std::vector<Token> tokens;
+ std::unique_ptr<ParseNode> root;
+ };
+
+ bool AsyncLoadFile(const LocationRange& origin,
+ const BuildSettings* build_settings,
+ const SourceFile& file_name,
+ const Callback& callback,
+ Err* err) {
+ pending_.push_back(std::make_pair(file_name, callback));
+ return true;
+ }
+
+ typedef std::map<SourceFile, std::unique_ptr<CannedResult>> CannedResponseMap;
+ CannedResponseMap canned_responses_;
+
+ std::vector< std::pair<SourceFile, Callback> > pending_;
+};
+
+LoaderImpl::AsyncLoadFileCallback MockInputFileManager::GetCallback() {
+ return base::Bind(&MockInputFileManager::AsyncLoadFile,
+ base::Unretained(this));
+}
+
+// Sets a given response for a given source file.
+void MockInputFileManager::AddCannedResponse(const SourceFile& source_file,
+ const std::string& source) {
+ std::unique_ptr<CannedResult> canned(new CannedResult);
+ canned->input_file.reset(new InputFile(source_file));
+ canned->input_file->SetContents(source);
+
+ // Tokenize.
+ Err err;
+ canned->tokens = Tokenizer::Tokenize(canned->input_file.get(), &err);
+ EXPECT_FALSE(err.has_error());
+
+ // Parse.
+ canned->root = Parser::Parse(canned->tokens, &err);
+ EXPECT_FALSE(err.has_error());
+
+ canned_responses_[source_file] = std::move(canned);
+}
+
+bool MockInputFileManager::HasOnePending(const SourceFile& f) const {
+ return pending_.size() == 1u && pending_[0].first == f;
+}
+
+bool MockInputFileManager::HasTwoPending(const SourceFile& f1,
+ const SourceFile& f2) const {
+ if (pending_.size() != 2u)
+ return false;
+ return pending_[0].first == f1 && pending_[1].first == f2;
+}
+
+void MockInputFileManager::IssueAllPending() {
+ BlockNode block; // Default response.
+
+ for (const auto& cur : pending_) {
+ CannedResponseMap::const_iterator found = canned_responses_.find(cur.first);
+ if (found == canned_responses_.end())
+ cur.second.Run(&block);
+ else
+ cur.second.Run(found->second->root.get());
+ }
+ pending_.clear();
+}
+
+// LoaderTest ------------------------------------------------------------------
+
+class LoaderTest : public testing::Test {
+ public:
+ LoaderTest() {
+ build_settings_.SetBuildDir(SourceDir("//out/Debug/"));
+ }
+
+ protected:
+ Scheduler scheduler_;
+ BuildSettings build_settings_;
+ MockInputFileManager mock_ifm_;
+};
+
+} // namespace
+
+// -----------------------------------------------------------------------------
+
+TEST_F(LoaderTest, Foo) {
+ SourceFile build_config("//build/config/BUILDCONFIG.gn");
+ build_settings_.set_build_config_file(build_config);
+
+ scoped_refptr<LoaderImpl> loader(new LoaderImpl(&build_settings_));
+
+ // The default toolchain needs to be set by the build config file.
+ mock_ifm_.AddCannedResponse(build_config,
+ "set_default_toolchain(\"//tc:tc\")");
+
+ loader->set_async_load_file(mock_ifm_.GetCallback());
+
+ // Request the root build file be loaded. This should kick off the default
+ // build config loading.
+ SourceFile root_build("//BUILD.gn");
+ loader->Load(root_build, LocationRange(), Label());
+ EXPECT_TRUE(mock_ifm_.HasOnePending(build_config));
+
+ // Completing the build config load should kick off the root build file load.
+ mock_ifm_.IssueAllPending();
+ scheduler_.main_loop()->RunUntilIdle();
+ EXPECT_TRUE(mock_ifm_.HasOnePending(root_build));
+
+ // Load the root build file.
+ mock_ifm_.IssueAllPending();
+ scheduler_.main_loop()->RunUntilIdle();
+
+ // Schedule some other file to load in another toolchain.
+ Label second_tc(SourceDir("//tc2/"), "tc2");
+ SourceFile second_file("//foo/BUILD.gn");
+ loader->Load(second_file, LocationRange(), second_tc);
+ EXPECT_TRUE(mock_ifm_.HasOnePending(SourceFile("//tc2/BUILD.gn")));
+
+ // Running the toolchain file should schedule the build config file to load
+ // for that toolchain.
+ mock_ifm_.IssueAllPending();
+ scheduler_.main_loop()->RunUntilIdle();
+
+ // We have to tell it we have a toolchain definition now (normally the
+ // builder would do this).
+ const Settings* default_settings = loader->GetToolchainSettings(Label());
+ Toolchain second_tc_object(default_settings, second_tc);
+ loader->ToolchainLoaded(&second_tc_object);
+ EXPECT_TRUE(mock_ifm_.HasOnePending(build_config));
+
+ // Scheduling a second file to load in that toolchain should not make it
+ // pending yet (it's waiting for the build config).
+ SourceFile third_file("//bar/BUILD.gn");
+ loader->Load(third_file, LocationRange(), second_tc);
+ EXPECT_TRUE(mock_ifm_.HasOnePending(build_config));
+
+ // Running the build config file should make our third file pending.
+ mock_ifm_.IssueAllPending();
+ scheduler_.main_loop()->RunUntilIdle();
+ EXPECT_TRUE(mock_ifm_.HasTwoPending(second_file, third_file));
+
+ EXPECT_FALSE(scheduler_.is_failed());
+}
diff --git a/chromium/tools/gn/location.cc b/chromium/tools/gn/location.cc
new file mode 100644
index 00000000000..49ca3ffe717
--- /dev/null
+++ b/chromium/tools/gn/location.cc
@@ -0,0 +1,78 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/location.h"
+
+#include <tuple>
+
+#include "base/logging.h"
+#include "base/strings/string_number_conversions.h"
+#include "tools/gn/input_file.h"
+
+Location::Location()
+ : file_(nullptr),
+ line_number_(-1),
+ column_number_(-1) {
+}
+
+Location::Location(const InputFile* file,
+ int line_number,
+ int column_number,
+ int byte)
+ : file_(file),
+ line_number_(line_number),
+ column_number_(column_number),
+ byte_(byte) {
+}
+
+bool Location::operator==(const Location& other) const {
+ return other.file_ == file_ &&
+ other.line_number_ == line_number_ &&
+ other.column_number_ == column_number_;
+}
+
+bool Location::operator!=(const Location& other) const {
+ return !operator==(other);
+}
+
+bool Location::operator<(const Location& other) const {
+ DCHECK(file_ == other.file_);
+ return std::tie(line_number_, column_number_) <
+ std::tie(other.line_number_, other.column_number_);
+}
+
+std::string Location::Describe(bool include_column_number) const {
+ if (!file_)
+ return std::string();
+
+ std::string ret;
+ if (file_->friendly_name().empty())
+ ret = file_->name().value();
+ else
+ ret = file_->friendly_name();
+
+ ret += ":";
+ ret += base::IntToString(line_number_);
+ if (include_column_number) {
+ ret += ":";
+ ret += base::IntToString(column_number_);
+ }
+ return ret;
+}
+
+LocationRange::LocationRange() {
+}
+
+LocationRange::LocationRange(const Location& begin, const Location& end)
+ : begin_(begin),
+ end_(end) {
+ DCHECK(begin_.file() == end_.file());
+}
+
+LocationRange LocationRange::Union(const LocationRange& other) const {
+ DCHECK(begin_.file() == other.begin_.file());
+ return LocationRange(
+ begin_ < other.begin_ ? begin_ : other.begin_,
+ end_ < other.end_ ? other.end_ : end_);
+}
diff --git a/chromium/tools/gn/location.h b/chromium/tools/gn/location.h
new file mode 100644
index 00000000000..44d1a6fe68f
--- /dev/null
+++ b/chromium/tools/gn/location.h
@@ -0,0 +1,56 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_LOCATION_H_
+#define TOOLS_GN_LOCATION_H_
+
+#include <string>
+
+class InputFile;
+
+// Represents a place in a source file. Used for error reporting.
+class Location {
+ public:
+ Location();
+ Location(const InputFile* file, int line_number, int column_number, int byte);
+
+ const InputFile* file() const { return file_; }
+ int line_number() const { return line_number_; }
+ int column_number() const { return column_number_; }
+ int byte() const { return byte_; }
+
+ bool operator==(const Location& other) const;
+ bool operator!=(const Location& other) const;
+ bool operator<(const Location& other) const;
+
+ // Returns a string with the file, line, and (optionally) the character
+ // offset for this location. If this location is null, returns an empty
+ // string.
+ std::string Describe(bool include_column_number) const;
+
+ private:
+ const InputFile* file_; // Null when unset.
+ int line_number_; // -1 when unset. 1-based.
+ int column_number_; // -1 when unset. 1-based.
+ int byte_; // Index into the buffer, 0-based.
+};
+
+// Represents a range in a source file. Used for error reporting.
+// The end is exclusive i.e. [begin, end)
+class LocationRange {
+ public:
+ LocationRange();
+ LocationRange(const Location& begin, const Location& end);
+
+ const Location& begin() const { return begin_; }
+ const Location& end() const { return end_; }
+
+ LocationRange Union(const LocationRange& other) const;
+
+ private:
+ Location begin_;
+ Location end_;
+};
+
+#endif // TOOLS_GN_LOCATION_H_
diff --git a/chromium/tools/gn/misc/emacs/gn-mode.el b/chromium/tools/gn/misc/emacs/gn-mode.el
new file mode 100644
index 00000000000..66822907c67
--- /dev/null
+++ b/chromium/tools/gn/misc/emacs/gn-mode.el
@@ -0,0 +1,155 @@
+;;; gn-mode.el - A major mode for editing gn files.
+
+;; Copyright 2015 The Chromium Authors. All rights reserved.
+;; Use of this source code is governed by a BSD-style license that can be
+;; found in the LICENSE file.
+
+;; Author: Elliot Glaysher <erg@chromium.org>
+;; Created: April 03, 2015
+;; Keywords: tools, gn, ninja, chromium
+
+;; This file is not part of GNU Emacs.
+
+;;; Commentary:
+
+;; A major mode for editing GN files. GN stands for Generate Ninja. GN is the
+;; meta build system used in Chromium. For more information on GN, see the GN
+;; manual: <https://chromium.googlesource.com/chromium/src/+/master/tools/gn/README.md>
+
+;;; To Do:
+
+;; - We syntax highlight builtin actions, but don't highlight instantiations of
+;; templates. Should we?
+
+
+
+(eval-when-compile (require 'cl)) ;For the `case' macro.
+(require 'smie)
+
+(defgroup gn nil
+ "Major mode for editing Generate Ninja files."
+ :prefix "gn-"
+ :group 'languages)
+
+(defcustom gn-indent-basic 2
+ "The number of spaces to indent a new scope."
+ :group 'gn
+ :type 'integer)
+
+(defgroup gn-faces nil
+ "Faces used in Generate Ninja mode."
+ :group 'gn
+ :group 'faces)
+
+(defface gn-embedded-variable
+ '((t :inherit font-lock-variable-name-face))
+ "Font lock face used to highlight variable names in strings."
+ :group 'gn-faces)
+
+(defface gn-embedded-variable-boundary
+ '((t :bold t
+ :inherit gn-embedded-variable))
+ "Font lock face used to highlight the '$' that starts a
+variable name or the '{{' and '}}' which surround it."
+ :group 'gn-faces)
+
+(defvar gn-font-lock-target-declaration-keywords
+ '("action" "action_foreach" "copy" "executable" "group" "loadable_module"
+ "shared_library" "source_set" "static_library" "if" "else"))
+
+(defvar gn-font-lock-buildfile-fun-keywords
+ '("assert" "config" "declare_args" "defined" "exec_script" "foreach"
+ "get_label_info" "get_path_info" "get_target_outputs" "getenv" "import"
+ "print" "process_file_template" "read_file" "rebase_path"
+ "set_default_toolchain" "set_defaults" "set_sources_assignment_filter"
+ "template" "tool" "toolchain" "toolchain_args" "write_file"))
+
+(defvar gn-font-lock-predefined-var-keywords
+ '("current_cpu" "current_os" "current_toolchain" "default_toolchain"
+ "host_cpu" "host_os" "python_path" "root_build_dir" "root_gen_dir"
+ "root_out_dir" "target_cpu" "target_gen_dir" "target_os" "target_out_dir"))
+
+(defvar gn-font-lock-var-keywords
+ '("all_dependent_configs" "allow_circular_includes_from" "args" "asmflags"
+ "cflags" "cflags_c" "cflags_cc" "cflags_objc" "cflags_objcc"
+ "check_includes" "complete_static_lib" "configs" "data" "data_deps"
+ "defines" "depfile" "deps" "include_dirs" "inputs" "ldflags" "lib_dirs"
+ "libs" "output_extension" "output_name" "outputs" "public" "public_configs"
+ "public_deps" "script" "sources" "testonly" "visibility"))
+
+(defconst gn-font-lock-keywords
+ `((,(regexp-opt gn-font-lock-target-declaration-keywords 'words) .
+ font-lock-keyword-face)
+ (,(regexp-opt gn-font-lock-buildfile-fun-keywords 'words) .
+ font-lock-function-name-face)
+ (,(regexp-opt gn-font-lock-predefined-var-keywords 'words) .
+ font-lock-constant-face)
+ (,(regexp-opt gn-font-lock-var-keywords 'words) .
+ font-lock-variable-name-face)
+ ;; $variables_like_this
+ ("\\(\\$\\)\\([a-zA-Z0-9_]+\\)"
+ (1 'gn-embedded-variable-boundary t)
+ (2 'gn-embedded-variable t))
+ ;; ${variables_like_this}
+ ("\\(\\${\\)\\([^\n }]+\\)\\(}\\)"
+ (1 'gn-embedded-variable-boundary t)
+ (2 'gn-embedded-variable t)
+ (3 'gn-embedded-variable-boundary t))
+ ;; {{placeholders}} (see substitute_type.h)
+ ("\\({{\\)\\([^\n }]+\\)\\(}}\\)"
+ (1 'gn-embedded-variable-boundary t)
+ (2 'gn-embedded-variable t)
+ (3 'gn-embedded-variable-boundary t))))
+
+(defun gn-smie-rules (kind token)
+ "These are slightly modified indentation rules from the SMIE
+ Indentation Example info page. This changes the :before rule
+ and adds a :list-intro to handle our x = [ ] syntax."
+ (pcase (cons kind token)
+ (`(:elem . basic) gn-indent-basic)
+ (`(,_ . ",") (smie-rule-separator kind))
+ (`(:list-intro . "") gn-indent-basic)
+ (`(:before . ,(or `"[" `"(" `"{"))
+ (if (smie-rule-hanging-p) (smie-rule-parent)))
+ (`(:before . "if")
+ (and (not (smie-rule-bolp)) (smie-rule-prev-p "else")
+ (smie-rule-parent)))))
+
+(defun gn-fill-paragraph (&optional justify)
+ "We only fill inside of comments in GN mode."
+ (interactive "P")
+ (or (fill-comment-paragraph justify)
+ ;; Never return nil; `fill-paragraph' will perform its default behavior
+ ;; if we do.
+ t))
+
+;;;###autoload
+(define-derived-mode gn-mode prog-mode "GN"
+ "Major mode for editing gn (Generate Ninja)."
+ :group 'gn
+
+ (setq-local comment-use-syntax t)
+ (setq-local comment-start "#")
+ (setq-local comment-end "")
+ (setq-local indent-tabs-mode nil)
+
+ (setq-local fill-paragraph-function 'gn-fill-paragraph)
+
+ (setq-local font-lock-defaults '(gn-font-lock-keywords))
+
+ ;; For every 'rule("name") {', adds "name" to the imenu for quick navigation.
+ (setq-local imenu-generic-expression
+ '((nil "^\s*[a-zA-Z0-9_]+(\"\\([a-zA-Z0-9_]+\\)\")\s*{" 1)))
+
+ (smie-setup nil #'gn-smie-rules)
+ (setq-local smie-indent-basic gn-indent-basic)
+
+ ;; python style comment: “# …â€
+ (modify-syntax-entry ?# "< b" gn-mode-syntax-table)
+ (modify-syntax-entry ?\n "> b" gn-mode-syntax-table)
+ (modify-syntax-entry ?_ "w" gn-mode-syntax-table))
+
+;;;###autoload
+(add-to-list 'auto-mode-alist '("\\.gni?\\'" . gn-mode))
+
+(provide 'gn-mode)
diff --git a/chromium/tools/gn/misc/tm/GN.tmLanguage b/chromium/tools/gn/misc/tm/GN.tmLanguage
new file mode 100644
index 00000000000..6a80a36e5e0
--- /dev/null
+++ b/chromium/tools/gn/misc/tm/GN.tmLanguage
@@ -0,0 +1,102 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>fileTypes</key>
+ <array>
+ <string>gn</string>
+ <string>gni</string>
+ </array>
+ <key>name</key>
+ <string>GN</string>
+ <key>patterns</key>
+ <array>
+ <dict>
+ <key>comment</key>
+ <string>keywords</string>
+ <key>match</key>
+ <string>\b(?:if)\b</string>
+ <key>name</key>
+ <string>keyword.control.gn</string>
+ </dict>
+ <dict>
+ <key>comment</key>
+ <string>constants</string>
+ <key>match</key>
+ <string>\b(?:true|false)\b</string>
+ <key>name</key>
+ <string>constant.language.gn</string>
+ </dict>
+ <dict>
+ <key>comment</key>
+ <string>numbers</string>
+ <key>match</key>
+ <string>\b\d+\.?(?:\d+)?\b</string>
+ <key>name</key>
+ <string>constant.numeric.gn</string>
+ </dict>
+ <dict>
+ <key>comment</key>
+ <string>double quoted string</string>
+ <key>match</key>
+ <string>\"[^\"]*\"</string>
+ <key>name</key>
+ <string>string.quoted.double.gn</string>
+ </dict>
+ <dict>
+ <key>comment</key>
+ <string>comment</string>
+ <key>begin</key>
+ <string>#</string>
+ <key>end</key>
+ <string>$</string>
+ <key>name</key>
+ <string>comment.gn</string>
+ </dict>
+ <dict>
+ <key>comment</key>
+ <string>operators</string>
+ <key>match</key>
+ <string>(?:=|==|\+=|-=|\+|-)</string>
+ <key>name</key>
+ <string>keyword.operator.gn</string>
+ </dict>
+ <dict>
+ <key>comment</key>
+ <string>targets</string>
+ <key>match</key>
+ <string>\b(?:action|action_foreach|copy|executable|group|loadable_module|shared_library|source_set|static_library)\b</string>
+ <key>name</key>
+ <string>entity.name.tag.gn</string>
+ </dict>
+ <dict>
+ <key>comment</key>
+ <string>functions</string>
+ <key>match</key>
+ <string>\b(?:assert|config|declare_args|defined|exec_script|foreach|get_label_info|get_path_info|get_target_outputs|getenv|import|print|process_file_template|read_file|rebase_path|set_default_toolchain|set_defaults|set_sources_assignment_filter|template|tool|toolchain|toolchain_args|write_file)\b</string>
+ <key>name</key>
+ <string>entity.name.function.gn</string>
+ </dict>
+ <dict>
+ <key>comment</key>
+ <string>predefined variables</string>
+ <key>match</key>
+ <string>\b(?:current_cpu|current_os|current_toolchain|default_toolchain|host_cpu|host_os|python_path|root_build_dir|root_gen_dir|root_out_dir|target_cpu|target_gen_dir|target_os|target_out_dir)\b</string>
+ <key>name</key>
+ <string>variable.parameter.gn</string>
+ </dict>
+ <dict>
+ <key>comment</key>
+ <string>target variables</string>
+ <key>match</key>
+ <string>\b(?:all_dependent_configs|allow_circular_includes_from|args|asmflags|cflags|cflags_c|cflags_cc|cflags_objc|cflags_objcc|check_includes|complete_static_lib|configs|data|data_deps|defines|depfile|deps|include_dirs|inputs|ldflags|lib_dirs|libs|output_extension|output_name|outputs|public|public_configs|public_deps|script|sources|testonly|visibility)\b</string>
+ <key>name</key>
+ <string>entity.other.attribute-name.gn</string>
+ </dict>
+ </array>
+ <key>scopeName</key>
+ <string>source.gn</string>
+ <key>uuid</key>
+ <string>DE419F8C-EC46-4824-87F3-732BD08694DC</string>
+</dict>
+</plist>
diff --git a/chromium/tools/gn/misc/tm/GN.tmPreferences b/chromium/tools/gn/misc/tm/GN.tmPreferences
new file mode 100644
index 00000000000..2706d51344a
--- /dev/null
+++ b/chromium/tools/gn/misc/tm/GN.tmPreferences
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+<dict>
+ <key>name</key>
+ <string>Comments</string>
+ <key>scope</key>
+ <string>source.gn</string>
+ <key>settings</key>
+ <dict>
+ <key>shellVariables</key>
+ <array>
+ <dict>
+ <key>name</key>
+ <string>TM_COMMENT_START</string>
+ <key>value</key>
+ <string># </string>
+ </dict>
+ </array>
+ </dict>
+</dict>
+</plist> \ No newline at end of file
diff --git a/chromium/tools/gn/misc/vim/README.chromium b/chromium/tools/gn/misc/vim/README.chromium
new file mode 100644
index 00000000000..07834967e85
--- /dev/null
+++ b/chromium/tools/gn/misc/vim/README.chromium
@@ -0,0 +1,5 @@
+You can use this by adding
+
+ set runtimepath^=.../tools/gn/misc/vim
+
+to your .vimrc.
diff --git a/chromium/tools/gn/misc/vim/ftdetect/gnfiletype.vim b/chromium/tools/gn/misc/vim/ftdetect/gnfiletype.vim
new file mode 100644
index 00000000000..20448c15b11
--- /dev/null
+++ b/chromium/tools/gn/misc/vim/ftdetect/gnfiletype.vim
@@ -0,0 +1,27 @@
+" Copyright 2014 The Chromium Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style license that can be
+" found in the LICENSE file.
+
+" We take care to preserve the user's fileencodings and fileformats,
+" because those settings are global (not buffer local), yet we want
+" to override them for loading GN files, which should be UTF-8.
+let s:current_fileformats = ''
+let s:current_fileencodings = ''
+
+" define fileencodings to open as utf-8 encoding even if it's ascii.
+function! s:gnfiletype_pre()
+ let s:current_fileformats = &g:fileformats
+ let s:current_fileencodings = &g:fileencodings
+ set fileencodings=utf-8 fileformats=unix
+ setlocal filetype=gn
+endfunction
+
+" restore fileencodings as others
+function! s:gnfiletype_post()
+ let &g:fileformats = s:current_fileformats
+ let &g:fileencodings = s:current_fileencodings
+endfunction
+
+au BufNewFile *.gn,*.gni setlocal filetype=gn fileencoding=utf-8 fileformat=unix
+au BufRead *.gn,*.gni call s:gnfiletype_pre()
+au BufReadPost *.gn,*.gni call s:gnfiletype_post()
diff --git a/chromium/tools/gn/misc/vim/syntax/gn.vim b/chromium/tools/gn/misc/vim/syntax/gn.vim
new file mode 100644
index 00000000000..55f18524c93
--- /dev/null
+++ b/chromium/tools/gn/misc/vim/syntax/gn.vim
@@ -0,0 +1,82 @@
+" Copyright 2014 The Chromium Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style license that can be
+" found in the LICENSE file.
+"
+" gn.vim: Vim syntax file for GN.
+"
+" Quit when a (custom) syntax file was already loaded
+"if exists("b:current_syntax")
+ "finish
+"endif
+
+syn case match
+
+" Keywords within functions
+syn keyword gnConditional if else
+hi def link gnConditional Conditional
+
+" Predefined variables
+syn keyword gnPredefVar current_cpu current_os current_toolchain
+syn keyword gnPredefVar default_toolchain host_cpu host_os
+syn keyword gnPredefVar root_build_dir root_gen_dir root_out_dir
+syn keyword gnPredefVar target_cpu target_gen_dir target_out_dir
+syn keyword gnPredefVar target_os
+syn keyword gnPredefVar true false
+hi def link gnPredefVar Constant
+
+" Target declarations
+syn keyword gnTarget action action_foreach copy executable group
+syn keyword gnTarget shared_library source_set static_library
+syn keyword gnTarget loadable_module
+hi def link gnTarget Type
+
+" Buildfile functions
+syn keyword gnFunctions assert config declare_args defined exec_script
+syn keyword gnFunctions foreach get_label_info get_path_info
+syn keyword gnFunctions get_target_outputs getenv import print
+syn keyword gnFunctions process_file_template read_file rebase_path
+syn keyword gnFunctions set_default_toolchain set_defaults
+syn keyword gnFunctions set_sources_assignment_filter template tool
+syn keyword gnFunctions toolchain toolchain_args write_file
+hi def link gnFunctions Macro
+
+" Variables
+syn keyword gnVariable all_dependent_configs allow_circular_includes_from
+syn keyword gnVariable args asmflags cflags cflags_c cflags_cc cflags_objc
+syn keyword gnVariable cflags_objcc check_includes complete_static_lib
+syn keyword gnVariable configs data data_deps defines depfile deps
+syn keyword gnVariable include_dirs inputs ldflags lib_dirs libs
+syn keyword gnVariable output_extension output_name outputs public
+syn keyword gnVariable public_configs public_deps scripte sources testonly
+syn keyword gnVariable visibility
+hi def link gnVariable Keyword
+
+" Strings
+syn region gnString start=+L\="+ skip=+\\\\\|\\"+ end=+"+ contains=@Spell
+hi def link gnString String
+
+" Comments
+syn keyword gnTodo contained TODO FIXME XXX BUG NOTE
+syn cluster gnCommentGroup contains=gnTodo
+syn region gnComment start="#" end="$" contains=@gnCommentGroup,@Spell
+
+hi def link gnComment Comment
+hi def link gnTodo Todo
+
+" Operators; I think this is a bit too colourful.
+"syn match gnOperator /=/
+"syn match gnOperator /!=/
+"syn match gnOperator />=/
+"syn match gnOperator /<=/
+"syn match gnOperator /==/
+"syn match gnOperator /+=/
+"syn match gnOperator /-=/
+"syn match gnOperator /\s>\s/
+"syn match gnOperator /\s<\s/
+"syn match gnOperator /\s+\s/
+"syn match gnOperator /\s-\s/
+"hi def link gnOperator Operator
+
+syn sync minlines=500
+
+let b:current_syntax = "gn"
diff --git a/chromium/tools/gn/ninja_action_target_writer.cc b/chromium/tools/gn/ninja_action_target_writer.cc
new file mode 100644
index 00000000000..aed8ffbc864
--- /dev/null
+++ b/chromium/tools/gn/ninja_action_target_writer.cc
@@ -0,0 +1,221 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_action_target_writer.h"
+
+#include <stddef.h>
+
+#include "base/strings/string_util.h"
+#include "tools/gn/deps_iterator.h"
+#include "tools/gn/err.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/string_utils.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+
+NinjaActionTargetWriter::NinjaActionTargetWriter(const Target* target,
+ std::ostream& out)
+ : NinjaTargetWriter(target, out),
+ path_output_no_escaping_(
+ target->settings()->build_settings()->build_dir(),
+ target->settings()->build_settings()->root_path_utf8(),
+ ESCAPE_NONE) {
+}
+
+NinjaActionTargetWriter::~NinjaActionTargetWriter() {
+}
+
+void NinjaActionTargetWriter::Run() {
+ std::string custom_rule_name = WriteRuleDefinition();
+
+ // Collect our deps to pass as "extra hard dependencies" for input deps. This
+ // will force all of the action's dependencies to be completed before the
+ // action is run. Usually, if an action has a dependency, it will be
+ // operating on the result of that previous step, so we need to be sure to
+ // serialize these.
+ std::vector<const Target*> extra_hard_deps;
+ for (const auto& pair : target_->GetDeps(Target::DEPS_LINKED))
+ extra_hard_deps.push_back(pair.ptr);
+
+ // For ACTIONs this is a bit inefficient since it creates an input dep
+ // stamp file even though we're only going to use it once. It would save a
+ // build step to skip this and write the order-only deps directly on the
+ // build rule. This should probably be handled by WriteInputDepsStampAndGetDep
+ // automatically if we supply a count of sources (so it can optimize based on
+ // how many times things would be duplicated).
+ OutputFile input_dep = WriteInputDepsStampAndGetDep(extra_hard_deps);
+ out_ << std::endl;
+
+ // Collects all output files for writing below.
+ std::vector<OutputFile> output_files;
+
+ if (target_->output_type() == Target::ACTION_FOREACH) {
+ // Write separate build lines for each input source file.
+ WriteSourceRules(custom_rule_name, input_dep, &output_files);
+ } else {
+ DCHECK(target_->output_type() == Target::ACTION);
+
+ // Write a rule that invokes the script once with the outputs as outputs,
+ // and the data as inputs. It does not depend on the sources.
+ out_ << "build";
+ SubstitutionWriter::GetListAsOutputFiles(
+ settings_, target_->action_values().outputs(), &output_files);
+ path_output_.WriteFiles(out_, output_files);
+
+ out_ << ": " << custom_rule_name;
+ if (!input_dep.value().empty()) {
+ // As in WriteSourceRules, we want to force this target to rebuild any
+ // time any of its dependencies change.
+ out_ << " | ";
+ path_output_.WriteFile(out_, input_dep);
+ }
+ out_ << std::endl;
+ if (target_->action_values().has_depfile()) {
+ out_ << " depfile = ";
+ WriteDepfile(SourceFile());
+ out_ << std::endl;
+ }
+ if (target_->action_values().is_console()) {
+ out_ << " pool = console";
+ out_ << std::endl;
+ }
+ }
+ out_ << std::endl;
+
+ // Write the stamp, which also depends on all data deps. These are needed at
+ // runtime and should be compiled when the action is, but don't need to be
+ // done before we run the action.
+ std::vector<OutputFile> data_outs;
+ for (const auto& dep : target_->data_deps())
+ data_outs.push_back(dep.ptr->dependency_output_file());
+ WriteStampForTarget(output_files, data_outs);
+}
+
+std::string NinjaActionTargetWriter::WriteRuleDefinition() {
+ // Make a unique name for this rule.
+ //
+ // Use a unique name for the response file when there are multiple build
+ // steps so that they don't stomp on each other. When there are no sources,
+ // there will be only one invocation so we can use a simple name.
+ std::string target_label = target_->label().GetUserVisibleName(true);
+ std::string custom_rule_name(target_label);
+ base::ReplaceChars(custom_rule_name, ":/()", "_", &custom_rule_name);
+ custom_rule_name.append("_rule");
+
+ const SubstitutionList& args = target_->action_values().args();
+ EscapeOptions args_escape_options;
+ args_escape_options.mode = ESCAPE_NINJA_COMMAND;
+
+ out_ << "rule " << custom_rule_name << std::endl;
+
+ if (target_->action_values().uses_rsp_file()) {
+ // Needs a response file. The unique_name part is for action_foreach so
+ // each invocation of the rule gets a different response file. This isn't
+ // strictly necessary for regular one-shot actions, but it's easier to
+ // just always define unique_name.
+ std::string rspfile = custom_rule_name;
+ if (!target_->sources().empty())
+ rspfile += ".$unique_name";
+ rspfile += ".rsp";
+ out_ << " rspfile = " << rspfile << std::endl;
+
+ // Response file contents.
+ out_ << " rspfile_content =";
+ for (const auto& arg :
+ target_->action_values().rsp_file_contents().list()) {
+ out_ << " ";
+ SubstitutionWriter::WriteWithNinjaVariables(
+ arg, args_escape_options, out_);
+ }
+ out_ << std::endl;
+ }
+
+ out_ << " command = ";
+ path_output_.WriteFile(out_, settings_->build_settings()->python_path());
+ out_ << " ";
+ path_output_.WriteFile(out_, target_->action_values().script());
+ for (const auto& arg : args.list()) {
+ out_ << " ";
+ SubstitutionWriter::WriteWithNinjaVariables(
+ arg, args_escape_options, out_);
+ }
+ out_ << std::endl;
+ out_ << " description = ACTION " << target_label << std::endl;
+ out_ << " restat = 1" << std::endl;
+
+ return custom_rule_name;
+}
+
+void NinjaActionTargetWriter::WriteSourceRules(
+ const std::string& custom_rule_name,
+ const OutputFile& input_dep,
+ std::vector<OutputFile>* output_files) {
+ EscapeOptions args_escape_options;
+ args_escape_options.mode = ESCAPE_NINJA_COMMAND;
+ // We're writing the substitution values, these should not be quoted since
+ // they will get pasted into the real command line.
+ args_escape_options.inhibit_quoting = true;
+
+ const Target::FileList& sources = target_->sources();
+ for (size_t i = 0; i < sources.size(); i++) {
+ out_ << "build";
+ WriteOutputFilesForBuildLine(sources[i], output_files);
+
+ out_ << ": " << custom_rule_name << " ";
+ path_output_.WriteFile(out_, sources[i]);
+ if (!input_dep.value().empty()) {
+ // Using "|" for the dependencies forces all implicit dependencies to be
+ // fully up-to-date before running the action, and will re-run this
+ // action if any input dependencies change. This is important because
+ // this action may consume the outputs of previous steps.
+ out_ << " | ";
+ path_output_.WriteFile(out_, input_dep);
+ }
+ out_ << std::endl;
+
+ // Response files require a unique name be defined.
+ if (target_->action_values().uses_rsp_file())
+ out_ << " unique_name = " << i << std::endl;
+
+ // The required types is the union of the args and response file. This
+ // might theoretically duplicate a definition if the same substitution is
+ // used in both the args and the reponse file. However, this should be
+ // very unusual (normally the substitutions will go in one place or the
+ // other) and the redundant assignment won't bother Ninja.
+ SubstitutionWriter::WriteNinjaVariablesForSource(
+ settings_, sources[i],
+ target_->action_values().args().required_types(),
+ args_escape_options, out_);
+ SubstitutionWriter::WriteNinjaVariablesForSource(
+ settings_, sources[i],
+ target_->action_values().rsp_file_contents().required_types(),
+ args_escape_options, out_);
+
+ if (target_->action_values().has_depfile()) {
+ out_ << " depfile = ";
+ WriteDepfile(sources[i]);
+ out_ << std::endl;
+ }
+ }
+}
+
+void NinjaActionTargetWriter::WriteOutputFilesForBuildLine(
+ const SourceFile& source,
+ std::vector<OutputFile>* output_files) {
+ size_t first_output_index = output_files->size();
+
+ SubstitutionWriter::ApplyListToSourceAsOutputFile(
+ settings_, target_->action_values().outputs(), source, output_files);
+
+ for (size_t i = first_output_index; i < output_files->size(); i++) {
+ out_ << " ";
+ path_output_.WriteFile(out_, (*output_files)[i]);
+ }
+}
+
+void NinjaActionTargetWriter::WriteDepfile(const SourceFile& source) {
+ path_output_.WriteFile(out_,
+ SubstitutionWriter::ApplyPatternToSourceAsOutputFile(
+ settings_, target_->action_values().depfile(), source));
+}
diff --git a/chromium/tools/gn/ninja_action_target_writer.h b/chromium/tools/gn/ninja_action_target_writer.h
new file mode 100644
index 00000000000..71c12397622
--- /dev/null
+++ b/chromium/tools/gn/ninja_action_target_writer.h
@@ -0,0 +1,64 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_ACTION_TARGET_WRITER_H_
+#define TOOLS_GN_NINJA_ACTION_TARGET_WRITER_H_
+
+#include <vector>
+
+#include "base/gtest_prod_util.h"
+#include "base/macros.h"
+#include "tools/gn/ninja_target_writer.h"
+
+class OutputFile;
+
+// Writes a .ninja file for a action target type.
+class NinjaActionTargetWriter : public NinjaTargetWriter {
+ public:
+ NinjaActionTargetWriter(const Target* target, std::ostream& out);
+ ~NinjaActionTargetWriter() override;
+
+ void Run() override;
+
+ private:
+ FRIEND_TEST_ALL_PREFIXES(NinjaActionTargetWriter,
+ WriteOutputFilesForBuildLine);
+ FRIEND_TEST_ALL_PREFIXES(NinjaActionTargetWriter,
+ WriteOutputFilesForBuildLineWithDepfile);
+ FRIEND_TEST_ALL_PREFIXES(NinjaActionTargetWriter,
+ WriteArgsSubstitutions);
+
+ // Writes the Ninja rule for invoking the script.
+ //
+ // Returns the name of the custom rule generated. This will be based on the
+ // target name, and will include the string "$unique_name" if there are
+ // multiple inputs.
+ std::string WriteRuleDefinition();
+
+ // Writes the rules for compiling each source, writing all output files
+ // to the given vector.
+ //
+ // input_dep is a file expressing the depencies common to all build steps.
+ // It will be a stamp file if there is more than one.
+ void WriteSourceRules(const std::string& custom_rule_name,
+ const OutputFile& input_dep,
+ std::vector<OutputFile>* output_files);
+
+ // Writes the output files generated by the output template for the given
+ // source file. This will start with a space and will not include a newline.
+ // Appends the output files to the given vector.
+ void WriteOutputFilesForBuildLine(const SourceFile& source,
+ std::vector<OutputFile>* output_files);
+
+ void WriteDepfile(const SourceFile& source);
+
+ // Path output writer that doesn't do any escaping or quoting. It does,
+ // however, convert slashes. Used for
+ // computing intermediate strings.
+ PathOutput path_output_no_escaping_;
+
+ DISALLOW_COPY_AND_ASSIGN(NinjaActionTargetWriter);
+};
+
+#endif // TOOLS_GN_NINJA_ACTION_TARGET_WRITER_H_
diff --git a/chromium/tools/gn/ninja_action_target_writer_unittest.cc b/chromium/tools/gn/ninja_action_target_writer_unittest.cc
new file mode 100644
index 00000000000..21a7ea558c1
--- /dev/null
+++ b/chromium/tools/gn/ninja_action_target_writer_unittest.cc
@@ -0,0 +1,364 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <algorithm>
+#include <sstream>
+
+#include "build/build_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/ninja_action_target_writer.h"
+#include "tools/gn/substitution_list.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(NinjaActionTargetWriter, WriteOutputFilesForBuildLine) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::ACTION_FOREACH);
+ target.action_values().outputs() = SubstitutionList::MakeForTest(
+ "//out/Debug/gen/a b{{source_name_part}}.h",
+ "//out/Debug/gen/{{source_name_part}}.cc");
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaActionTargetWriter writer(&target, out);
+
+ SourceFile source("//foo/bar.in");
+ std::vector<OutputFile> output_files;
+ writer.WriteOutputFilesForBuildLine(source, &output_files);
+
+ EXPECT_EQ(" gen/a$ bbar.h gen/bar.cc", out.str());
+}
+
+// Tests an action with no sources.
+TEST(NinjaActionTargetWriter, ActionNoSources) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::ACTION);
+
+ target.action_values().set_script(SourceFile("//foo/script.py"));
+ target.inputs().push_back(SourceFile("//foo/included.txt"));
+
+ target.action_values().outputs() =
+ SubstitutionList::MakeForTest("//out/Debug/foo.out");
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ setup.build_settings()->set_python_path(base::FilePath(FILE_PATH_LITERAL(
+ "/usr/bin/python")));
+
+ std::ostringstream out;
+ NinjaActionTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected[] =
+ "rule __foo_bar___rule\n"
+ " command = /usr/bin/python ../../foo/script.py\n"
+ " description = ACTION //foo:bar()\n"
+ " restat = 1\n"
+ "build obj/foo/bar.inputdeps.stamp: stamp ../../foo/script.py "
+ "../../foo/included.txt\n"
+ "\n"
+ "build foo.out: __foo_bar___rule | obj/foo/bar.inputdeps.stamp\n"
+ "\n"
+ "build obj/foo/bar.stamp: stamp foo.out\n";
+ EXPECT_EQ(expected, out.str());
+}
+
+
+// Tests an action with no sources and console = true
+TEST(NinjaActionTargetWriter, ActionNoSourcesConsole) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::ACTION);
+
+ target.action_values().set_script(SourceFile("//foo/script.py"));
+ target.inputs().push_back(SourceFile("//foo/included.txt"));
+
+ target.action_values().outputs() =
+ SubstitutionList::MakeForTest("//out/Debug/foo.out");
+ target.action_values().set_console(true);
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ setup.build_settings()->set_python_path(base::FilePath(FILE_PATH_LITERAL(
+ "/usr/bin/python")));
+
+ std::ostringstream out;
+ NinjaActionTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected[] =
+ "rule __foo_bar___rule\n"
+ " command = /usr/bin/python ../../foo/script.py\n"
+ " description = ACTION //foo:bar()\n"
+ " restat = 1\n"
+ "build obj/foo/bar.inputdeps.stamp: stamp ../../foo/script.py "
+ "../../foo/included.txt\n"
+ "\n"
+ "build foo.out: __foo_bar___rule | obj/foo/bar.inputdeps.stamp\n"
+ " pool = console\n"
+ "\n"
+ "build obj/foo/bar.stamp: stamp foo.out\n";
+ EXPECT_EQ(expected, out.str());
+}
+
+// Makes sure that we write sources as input dependencies for actions with
+// both sources and inputs (ACTION_FOREACH treats the sources differently).
+TEST(NinjaActionTargetWriter, ActionWithSources) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::ACTION);
+
+ target.action_values().set_script(SourceFile("//foo/script.py"));
+
+ target.sources().push_back(SourceFile("//foo/source.txt"));
+ target.inputs().push_back(SourceFile("//foo/included.txt"));
+
+ target.action_values().outputs() =
+ SubstitutionList::MakeForTest("//out/Debug/foo.out");
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ setup.build_settings()->set_python_path(base::FilePath(FILE_PATH_LITERAL(
+ "/usr/bin/python")));
+
+ std::ostringstream out;
+ NinjaActionTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected_linux[] =
+ "rule __foo_bar___rule\n"
+ " command = /usr/bin/python ../../foo/script.py\n"
+ " description = ACTION //foo:bar()\n"
+ " restat = 1\n"
+ "build obj/foo/bar.inputdeps.stamp: stamp ../../foo/script.py "
+ "../../foo/included.txt ../../foo/source.txt\n"
+ "\n"
+ "build foo.out: __foo_bar___rule | obj/foo/bar.inputdeps.stamp\n"
+ "\n"
+ "build obj/foo/bar.stamp: stamp foo.out\n";
+ EXPECT_EQ(expected_linux, out.str());
+}
+
+TEST(NinjaActionTargetWriter, ForEach) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ // Some dependencies that the action can depend on. Use actions for these
+ // so they have a nice platform-independent stamp file that can appear in the
+ // output (rather than having to worry about how the current platform names
+ // binaries).
+ Target dep(setup.settings(), Label(SourceDir("//foo/"), "dep"));
+ dep.set_output_type(Target::ACTION);
+ dep.visibility().SetPublic();
+ dep.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(dep.OnResolved(&err));
+
+ Target datadep(setup.settings(), Label(SourceDir("//foo/"), "datadep"));
+ datadep.set_output_type(Target::ACTION);
+ datadep.visibility().SetPublic();
+ datadep.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(datadep.OnResolved(&err));
+
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::ACTION_FOREACH);
+ target.private_deps().push_back(LabelTargetPair(&dep));
+ target.data_deps().push_back(LabelTargetPair(&datadep));
+
+ target.sources().push_back(SourceFile("//foo/input1.txt"));
+ target.sources().push_back(SourceFile("//foo/input2.txt"));
+
+ target.action_values().set_script(SourceFile("//foo/script.py"));
+
+ target.action_values().args() = SubstitutionList::MakeForTest(
+ "-i",
+ "{{source}}",
+ "--out=foo bar{{source_name_part}}.o");
+ target.action_values().outputs() = SubstitutionList::MakeForTest(
+ "//out/Debug/{{source_name_part}}.out");
+
+ target.inputs().push_back(SourceFile("//foo/included.txt"));
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ setup.build_settings()->set_python_path(base::FilePath(FILE_PATH_LITERAL(
+ "/usr/bin/python")));
+
+ std::ostringstream out;
+ NinjaActionTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected_linux[] =
+ "rule __foo_bar___rule\n"
+ " command = /usr/bin/python ../../foo/script.py -i ${in} "
+ // Escaping is different between Windows and Posix.
+#if defined(OS_WIN)
+ "\"--out=foo$ bar${source_name_part}.o\"\n"
+#else
+ "--out=foo\\$ bar${source_name_part}.o\n"
+#endif
+ " description = ACTION //foo:bar()\n"
+ " restat = 1\n"
+ "build obj/foo/bar.inputdeps.stamp: stamp ../../foo/script.py "
+ "../../foo/included.txt obj/foo/dep.stamp\n"
+ "\n"
+ "build input1.out: __foo_bar___rule ../../foo/input1.txt | "
+ "obj/foo/bar.inputdeps.stamp\n"
+ " source_name_part = input1\n"
+ "build input2.out: __foo_bar___rule ../../foo/input2.txt | "
+ "obj/foo/bar.inputdeps.stamp\n"
+ " source_name_part = input2\n"
+ "\n"
+ "build obj/foo/bar.stamp: "
+ "stamp input1.out input2.out || obj/foo/datadep.stamp\n";
+
+ std::string out_str = out.str();
+#if defined(OS_WIN)
+ std::replace(out_str.begin(), out_str.end(), '\\', '/');
+#endif
+ EXPECT_EQ(expected_linux, out_str);
+}
+
+TEST(NinjaActionTargetWriter, ForEachWithDepfile) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::ACTION_FOREACH);
+
+ target.sources().push_back(SourceFile("//foo/input1.txt"));
+ target.sources().push_back(SourceFile("//foo/input2.txt"));
+
+ target.action_values().set_script(SourceFile("//foo/script.py"));
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ SubstitutionPattern depfile;
+ ASSERT_TRUE(
+ depfile.Parse("//out/Debug/gen/{{source_name_part}}.d", nullptr, &err));
+ target.action_values().set_depfile(depfile);
+
+ target.action_values().args() = SubstitutionList::MakeForTest(
+ "-i",
+ "{{source}}",
+ "--out=foo bar{{source_name_part}}.o");
+ target.action_values().outputs() = SubstitutionList::MakeForTest(
+ "//out/Debug/{{source_name_part}}.out");
+
+ target.inputs().push_back(SourceFile("//foo/included.txt"));
+
+ setup.build_settings()->set_python_path(base::FilePath(FILE_PATH_LITERAL(
+ "/usr/bin/python")));
+
+ std::ostringstream out;
+ NinjaActionTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected_linux[] =
+ "rule __foo_bar___rule\n"
+ " command = /usr/bin/python ../../foo/script.py -i ${in} "
+#if defined(OS_WIN)
+ "\"--out=foo$ bar${source_name_part}.o\"\n"
+#else
+ "--out=foo\\$ bar${source_name_part}.o\n"
+#endif
+ " description = ACTION //foo:bar()\n"
+ " restat = 1\n"
+ "build obj/foo/bar.inputdeps.stamp: stamp ../../foo/script.py "
+ "../../foo/included.txt\n"
+ "\n"
+ "build input1.out: __foo_bar___rule ../../foo/input1.txt"
+ " | obj/foo/bar.inputdeps.stamp\n"
+ " source_name_part = input1\n"
+ " depfile = gen/input1.d\n"
+ "build input2.out: __foo_bar___rule ../../foo/input2.txt"
+ " | obj/foo/bar.inputdeps.stamp\n"
+ " source_name_part = input2\n"
+ " depfile = gen/input2.d\n"
+ "\n"
+ "build obj/foo/bar.stamp: stamp input1.out input2.out\n";
+ EXPECT_EQ(expected_linux, out.str());
+}
+
+TEST(NinjaActionTargetWriter, ForEachWithResponseFile) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::ACTION_FOREACH);
+
+ target.sources().push_back(SourceFile("//foo/input1.txt"));
+ target.action_values().set_script(SourceFile("//foo/script.py"));
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ // Make sure we get interesting substitutions for both the args and the
+ // response file contents.
+ target.action_values().args() = SubstitutionList::MakeForTest(
+ "{{source}}",
+ "{{source_file_part}}",
+ "{{response_file_name}}");
+ target.action_values().rsp_file_contents() = SubstitutionList::MakeForTest(
+ "-j",
+ "{{source_name_part}}");
+ target.action_values().outputs() = SubstitutionList::MakeForTest(
+ "//out/Debug/{{source_name_part}}.out");
+
+ setup.build_settings()->set_python_path(base::FilePath(FILE_PATH_LITERAL(
+ "/usr/bin/python")));
+
+ std::ostringstream out;
+ NinjaActionTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected_linux[] =
+ "rule __foo_bar___rule\n"
+ // This name is autogenerated from the target rule name.
+ " rspfile = __foo_bar___rule.$unique_name.rsp\n"
+ // These come from rsp_file_contents above.
+ " rspfile_content = -j ${source_name_part}\n"
+ // These come from the args.
+ " command = /usr/bin/python ../../foo/script.py ${in} "
+ "${source_file_part} ${rspfile}\n"
+ " description = ACTION //foo:bar()\n"
+ " restat = 1\n"
+ "\n"
+ "build input1.out: __foo_bar___rule ../../foo/input1.txt"
+ " | ../../foo/script.py\n"
+ // Necessary for the rspfile defined in the rule.
+ " unique_name = 0\n"
+ // Substitution for the args.
+ " source_file_part = input1.txt\n"
+ // Substitution for the rspfile contents.
+ " source_name_part = input1\n"
+ "\n"
+ "build obj/foo/bar.stamp: stamp input1.out\n";
+ EXPECT_EQ(expected_linux, out.str());
+}
diff --git a/chromium/tools/gn/ninja_binary_target_writer.cc b/chromium/tools/gn/ninja_binary_target_writer.cc
new file mode 100644
index 00000000000..eaae098ef71
--- /dev/null
+++ b/chromium/tools/gn/ninja_binary_target_writer.cc
@@ -0,0 +1,1007 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_binary_target_writer.h"
+
+#include <stddef.h>
+#include <string.h>
+
+#include <cstring>
+#include <set>
+#include <sstream>
+
+#include "base/containers/hash_tables.h"
+#include "base/strings/string_util.h"
+#include "tools/gn/config_values_extractors.h"
+#include "tools/gn/deps_iterator.h"
+#include "tools/gn/err.h"
+#include "tools/gn/escape.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/ninja_utils.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/source_file_type.h"
+#include "tools/gn/string_utils.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+
+// Represents a set of tool types. Must be first since it is also shared by
+// some helper functions in the anonymous namespace below.
+class NinjaBinaryTargetWriter::SourceFileTypeSet {
+ public:
+ SourceFileTypeSet() {
+ memset(flags_, 0, sizeof(bool) * static_cast<int>(SOURCE_NUMTYPES));
+ }
+
+ void Set(SourceFileType type) {
+ flags_[static_cast<int>(type)] = true;
+ }
+ bool Get(SourceFileType type) const {
+ return flags_[static_cast<int>(type)];
+ }
+
+ private:
+ bool flags_[static_cast<int>(SOURCE_NUMTYPES)];
+};
+
+namespace {
+
+// Returns the proper escape options for writing compiler and linker flags.
+EscapeOptions GetFlagOptions() {
+ EscapeOptions opts;
+ opts.mode = ESCAPE_NINJA_COMMAND;
+
+ // Some flag strings are actually multiple flags that expect to be just
+ // added to the command line. We assume that quoting is done by the
+ // buildfiles if it wants such things quoted.
+ opts.inhibit_quoting = true;
+
+ return opts;
+}
+
+struct DefineWriter {
+ DefineWriter() {
+ options.mode = ESCAPE_NINJA_COMMAND;
+ }
+
+ void operator()(const std::string& s, std::ostream& out) const {
+ out << " -D";
+ EscapeStringToStream(out, s, options);
+ }
+
+ EscapeOptions options;
+};
+
+struct IncludeWriter {
+ explicit IncludeWriter(PathOutput& path_output) : path_output_(path_output) {
+ }
+ ~IncludeWriter() {
+ }
+
+ void operator()(const SourceDir& d, std::ostream& out) const {
+ std::ostringstream path_out;
+ path_output_.WriteDir(path_out, d, PathOutput::DIR_NO_LAST_SLASH);
+ const std::string& path = path_out.str();
+ if (path[0] == '"')
+ out << " \"-I" << path.substr(1);
+ else
+ out << " -I" << path;
+ }
+
+ PathOutput& path_output_;
+};
+
+// Returns the language-specific suffix for precompiled header files.
+const char* GetPCHLangSuffixForToolType(Toolchain::ToolType type) {
+ switch (type) {
+ case Toolchain::TYPE_CC:
+ return "c";
+ case Toolchain::TYPE_CXX:
+ return "cc";
+ case Toolchain::TYPE_OBJC:
+ return "m";
+ case Toolchain::TYPE_OBJCXX:
+ return "mm";
+ default:
+ NOTREACHED() << "Not a valid PCH tool type: " << type;
+ return "";
+ }
+}
+
+std::string GetWindowsPCHObjectExtension(Toolchain::ToolType tool_type) {
+ const char* lang_suffix = GetPCHLangSuffixForToolType(tool_type);
+ std::string result = ".";
+ // For MSVC, annotate the obj files with the language type. For example:
+ // obj/foo/target_name.precompile.o ->
+ // obj/foo/target_name.precompile.cc.o
+ result += lang_suffix;
+ result += ".o";
+ return result;
+}
+
+std::string GetGCCPCHOutputExtension(Toolchain::ToolType tool_type) {
+ const char* lang_suffix = GetPCHLangSuffixForToolType(tool_type);
+ std::string result = ".";
+ // For GCC, the output name must have a .gch suffix and be annotated with
+ // the language type. For example:
+ // obj/foo/target_name.header.h ->
+ // obj/foo/target_name.header.h-cc.gch
+ // In order for the compiler to pick it up, the output name (minus the .gch
+ // suffix MUST match whatever is passed to the -include flag).
+ result += "h-";
+ result += lang_suffix;
+ result += ".gch";
+ return result;
+}
+
+// Returns the language-specific lang recognized by gcc’s -x flag for
+// precompiled header files.
+const char* GetPCHLangForToolType(Toolchain::ToolType type) {
+ switch (type) {
+ case Toolchain::TYPE_CC:
+ return "c-header";
+ case Toolchain::TYPE_CXX:
+ return "c++-header";
+ case Toolchain::TYPE_OBJC:
+ return "objective-c-header";
+ case Toolchain::TYPE_OBJCXX:
+ return "objective-c++-header";
+ default:
+ NOTREACHED() << "Not a valid PCH tool type: " << type;
+ return "";
+ }
+}
+
+// Fills |outputs| with the object or gch file for the precompiled header of the
+// given type (flag type and tool type must match).
+void GetPCHOutputFiles(const Target* target,
+ Toolchain::ToolType tool_type,
+ std::vector<OutputFile>* outputs) {
+ outputs->clear();
+
+ // Compute the tool. This must use the tool type passed in rather than the
+ // detected file type of the precompiled source file since the same
+ // precompiled source file will be used for separate C/C++ compiles.
+ const Tool* tool = target->toolchain()->GetTool(tool_type);
+ if (!tool)
+ return;
+ SubstitutionWriter::ApplyListToCompilerAsOutputFile(
+ target, target->config_values().precompiled_source(),
+ tool->outputs(), outputs);
+
+ if (outputs->empty())
+ return;
+ if (outputs->size() > 1)
+ outputs->resize(1); // Only link the first output from the compiler tool.
+
+ std::string& output_value = (*outputs)[0].value();
+ size_t extension_offset = FindExtensionOffset(output_value);
+ if (extension_offset == std::string::npos) {
+ // No extension found.
+ return;
+ }
+ DCHECK(extension_offset >= 1);
+ DCHECK(output_value[extension_offset - 1] == '.');
+
+ std::string output_extension;
+ Tool::PrecompiledHeaderType header_type = tool->precompiled_header_type();
+ switch (header_type) {
+ case Tool::PCH_MSVC:
+ output_extension = GetWindowsPCHObjectExtension(tool_type);
+ break;
+ case Tool::PCH_GCC:
+ output_extension = GetGCCPCHOutputExtension(tool_type);
+ break;
+ case Tool::PCH_NONE:
+ NOTREACHED() << "No outputs for no PCH type.";
+ break;
+ }
+ output_value.replace(extension_offset - 1,
+ std::string::npos,
+ output_extension);
+}
+
+// Appends the object files generated by the given source set to the given
+// output vector.
+void AddSourceSetObjectFiles(const Target* source_set,
+ UniqueVector<OutputFile>* obj_files) {
+ std::vector<OutputFile> tool_outputs; // Prevent allocation in loop.
+ NinjaBinaryTargetWriter::SourceFileTypeSet used_types;
+
+ // Compute object files for all sources. Only link the first output from
+ // the tool if there are more than one.
+ for (const auto& source : source_set->sources()) {
+ Toolchain::ToolType tool_type = Toolchain::TYPE_NONE;
+ if (source_set->GetOutputFilesForSource(source, &tool_type, &tool_outputs))
+ obj_files->push_back(tool_outputs[0]);
+
+ used_types.Set(GetSourceFileType(source));
+ }
+
+ // Add MSVC precompiled header object files. GCC .gch files are not object
+ // files so they are omitted.
+ if (source_set->config_values().has_precompiled_headers()) {
+ if (used_types.Get(SOURCE_C)) {
+ const Tool* tool = source_set->toolchain()->GetTool(Toolchain::TYPE_CC);
+ if (tool && tool->precompiled_header_type() == Tool::PCH_MSVC) {
+ GetPCHOutputFiles(source_set, Toolchain::TYPE_CC, &tool_outputs);
+ obj_files->Append(tool_outputs.begin(), tool_outputs.end());
+ }
+ }
+ if (used_types.Get(SOURCE_CPP)) {
+ const Tool* tool = source_set->toolchain()->GetTool(Toolchain::TYPE_CXX);
+ if (tool && tool->precompiled_header_type() == Tool::PCH_MSVC) {
+ GetPCHOutputFiles(source_set, Toolchain::TYPE_CXX, &tool_outputs);
+ obj_files->Append(tool_outputs.begin(), tool_outputs.end());
+ }
+ }
+ if (used_types.Get(SOURCE_M)) {
+ const Tool* tool = source_set->toolchain()->GetTool(Toolchain::TYPE_OBJC);
+ if (tool && tool->precompiled_header_type() == Tool::PCH_MSVC) {
+ GetPCHOutputFiles(source_set, Toolchain::TYPE_OBJC, &tool_outputs);
+ obj_files->Append(tool_outputs.begin(), tool_outputs.end());
+ }
+ }
+ if (used_types.Get(SOURCE_MM)) {
+ const Tool* tool = source_set->toolchain()->GetTool(
+ Toolchain::TYPE_OBJCXX);
+ if (tool && tool->precompiled_header_type() == Tool::PCH_MSVC) {
+ GetPCHOutputFiles(source_set, Toolchain::TYPE_OBJCXX, &tool_outputs);
+ obj_files->Append(tool_outputs.begin(), tool_outputs.end());
+ }
+ }
+ }
+}
+
+} // namespace
+
+NinjaBinaryTargetWriter::NinjaBinaryTargetWriter(const Target* target,
+ std::ostream& out)
+ : NinjaTargetWriter(target, out),
+ tool_(target->toolchain()->GetToolForTargetFinalOutput(target)),
+ rule_prefix_(GetNinjaRulePrefixForToolchain(settings_)) {
+}
+
+NinjaBinaryTargetWriter::~NinjaBinaryTargetWriter() {
+}
+
+void NinjaBinaryTargetWriter::Run() {
+ // Figure out what source types are needed.
+ SourceFileTypeSet used_types;
+ for (const auto& source : target_->sources())
+ used_types.Set(GetSourceFileType(source));
+
+ WriteCompilerVars(used_types);
+
+ // The input dependencies will be an order-only dependency. This will cause
+ // Ninja to make sure the inputs are up-to-date before compiling this source,
+ // but changes in the inputs deps won't cause the file to be recompiled.
+ //
+ // This is important to prevent changes in unrelated actions that are
+ // upstream of this target from causing everything to be recompiled.
+ //
+ // Why can we get away with this rather than using implicit deps ("|", which
+ // will force rebuilds when the inputs change)? For source code, the
+ // computed dependencies of all headers will be computed by the compiler,
+ // which will cause source rebuilds if any "real" upstream dependencies
+ // change.
+ //
+ // If a .cc file is generated by an input dependency, Ninja will see the
+ // input to the build rule doesn't exist, and that it is an output from a
+ // previous step, and build the previous step first. This is a "real"
+ // dependency and doesn't need | or || to express.
+ //
+ // The only case where this rule matters is for the first build where no .d
+ // files exist, and Ninja doesn't know what that source file depends on. In
+ // this case it's sufficient to ensure that the upstream dependencies are
+ // built first. This is exactly what Ninja's order-only dependencies
+ // expresses.
+ OutputFile order_only_dep =
+ WriteInputDepsStampAndGetDep(std::vector<const Target*>());
+
+ // For GCC builds, the .gch files are not object files, but still need to be
+ // added as explicit dependencies below. The .gch output files are placed in
+ // |pch_other_files|. This is to prevent linking against them.
+ std::vector<OutputFile> pch_obj_files;
+ std::vector<OutputFile> pch_other_files;
+ WritePCHCommands(used_types, order_only_dep,
+ &pch_obj_files, &pch_other_files);
+ std::vector<OutputFile>* pch_files = !pch_obj_files.empty() ?
+ &pch_obj_files : &pch_other_files;
+
+ // Treat all pch output files as explicit dependencies of all
+ // compiles that support them. Some notes:
+ //
+ // - On Windows, the .pch file is the input to the compile, not the
+ // precompiled header's corresponding object file that we're using here.
+ // But Ninja's depslog doesn't support multiple outputs from the
+ // precompiled header compile step (it outputs both the .pch file and a
+ // corresponding .obj file). So we consistently list the .obj file and the
+ // .pch file we really need comes along with it.
+ //
+ // - GCC .gch files are not object files, therefore they are not added to the
+ // object file list.
+ std::vector<OutputFile> obj_files;
+ std::vector<SourceFile> other_files;
+ WriteSources(*pch_files, order_only_dep, &obj_files, &other_files);
+
+ // Link all MSVC pch object files. The vector will be empty on GCC toolchains.
+ obj_files.insert(obj_files.end(), pch_obj_files.begin(), pch_obj_files.end());
+ if (!CheckForDuplicateObjectFiles(obj_files))
+ return;
+
+ if (target_->output_type() == Target::SOURCE_SET) {
+ WriteSourceSetStamp(obj_files);
+#ifndef NDEBUG
+ // Verify that the function that separately computes a source set's object
+ // files match the object files just computed.
+ UniqueVector<OutputFile> computed_obj;
+ AddSourceSetObjectFiles(target_, &computed_obj);
+ DCHECK_EQ(obj_files.size(), computed_obj.size());
+ for (const auto& obj : obj_files)
+ DCHECK_NE(static_cast<size_t>(-1), computed_obj.IndexOf(obj));
+#endif
+ } else {
+ WriteLinkerStuff(obj_files, other_files);
+ }
+}
+
+void NinjaBinaryTargetWriter::WriteCompilerVars(
+ const SourceFileTypeSet& used_types) {
+ const SubstitutionBits& subst = target_->toolchain()->substitution_bits();
+
+ // Defines.
+ if (subst.used[SUBSTITUTION_DEFINES]) {
+ out_ << kSubstitutionNinjaNames[SUBSTITUTION_DEFINES] << " =";
+ RecursiveTargetConfigToStream<std::string>(
+ target_, &ConfigValues::defines, DefineWriter(), out_);
+ out_ << std::endl;
+ }
+
+ // Include directories.
+ if (subst.used[SUBSTITUTION_INCLUDE_DIRS]) {
+ out_ << kSubstitutionNinjaNames[SUBSTITUTION_INCLUDE_DIRS] << " =";
+ PathOutput include_path_output(
+ path_output_.current_dir(),
+ settings_->build_settings()->root_path_utf8(),
+ ESCAPE_NINJA_COMMAND);
+ RecursiveTargetConfigToStream<SourceDir>(
+ target_, &ConfigValues::include_dirs,
+ IncludeWriter(include_path_output), out_);
+ out_ << std::endl;
+ }
+
+ bool has_precompiled_headers =
+ target_->config_values().has_precompiled_headers();
+
+ EscapeOptions opts = GetFlagOptions();
+ if (used_types.Get(SOURCE_S) || used_types.Get(SOURCE_ASM)) {
+ WriteOneFlag(SUBSTITUTION_ASMFLAGS, false, Toolchain::TYPE_NONE,
+ &ConfigValues::asmflags, opts);
+ }
+ if (used_types.Get(SOURCE_C) || used_types.Get(SOURCE_CPP) ||
+ used_types.Get(SOURCE_M) || used_types.Get(SOURCE_MM)) {
+ WriteOneFlag(SUBSTITUTION_CFLAGS, false, Toolchain::TYPE_NONE,
+ &ConfigValues::cflags, opts);
+ }
+ if (used_types.Get(SOURCE_C)) {
+ WriteOneFlag(SUBSTITUTION_CFLAGS_C, has_precompiled_headers,
+ Toolchain::TYPE_CC, &ConfigValues::cflags_c, opts);
+ }
+ if (used_types.Get(SOURCE_CPP)) {
+ WriteOneFlag(SUBSTITUTION_CFLAGS_CC, has_precompiled_headers,
+ Toolchain::TYPE_CXX, &ConfigValues::cflags_cc, opts);
+ }
+ if (used_types.Get(SOURCE_M)) {
+ WriteOneFlag(SUBSTITUTION_CFLAGS_OBJC, has_precompiled_headers,
+ Toolchain::TYPE_OBJC, &ConfigValues::cflags_objc, opts);
+ }
+ if (used_types.Get(SOURCE_MM)) {
+ WriteOneFlag(SUBSTITUTION_CFLAGS_OBJCC, has_precompiled_headers,
+ Toolchain::TYPE_OBJCXX, &ConfigValues::cflags_objcc, opts);
+ }
+
+ WriteSharedVars(subst);
+}
+
+void NinjaBinaryTargetWriter::WriteOneFlag(
+ SubstitutionType subst_enum,
+ bool has_precompiled_headers,
+ Toolchain::ToolType tool_type,
+ const std::vector<std::string>& (ConfigValues::* getter)() const,
+ EscapeOptions flag_escape_options) {
+ if (!target_->toolchain()->substitution_bits().used[subst_enum])
+ return;
+
+ out_ << kSubstitutionNinjaNames[subst_enum] << " =";
+
+ if (has_precompiled_headers) {
+ const Tool* tool = target_->toolchain()->GetTool(tool_type);
+ if (tool && tool->precompiled_header_type() == Tool::PCH_MSVC) {
+ // Name the .pch file.
+ out_ << " /Fp";
+ path_output_.WriteFile(out_, GetWindowsPCHFile(tool_type));
+
+ // Enables precompiled headers and names the .h file. It's a string
+ // rather than a file name (so no need to rebase or use path_output_).
+ out_ << " /Yu" << target_->config_values().precompiled_header();
+ RecursiveTargetConfigStringsToStream(target_, getter,
+ flag_escape_options, out_);
+ } else if (tool && tool->precompiled_header_type() == Tool::PCH_GCC) {
+ // The targets to build the .gch files should omit the -include flag
+ // below. To accomplish this, each substitution flag is overwritten in the
+ // target rule and these values are repeated. The -include flag is omitted
+ // in place of the required -x <header lang> flag for .gch targets.
+ RecursiveTargetConfigStringsToStream(target_, getter,
+ flag_escape_options, out_);
+
+ // Compute the gch file (it will be language-specific).
+ std::vector<OutputFile> outputs;
+ GetPCHOutputFiles(target_, tool_type, &outputs);
+ if (!outputs.empty()) {
+ // Trim the .gch suffix for the -include flag.
+ // e.g. for gch file foo/bar/target.precompiled.h.gch:
+ // -include foo/bar/target.precompiled.h
+ std::string pch_file = outputs[0].value();
+ pch_file.erase(pch_file.length() - 4);
+ out_ << " -include " << pch_file;
+ }
+ }
+ } else {
+ RecursiveTargetConfigStringsToStream(target_, getter,
+ flag_escape_options, out_);
+ }
+ out_ << std::endl;
+}
+
+void NinjaBinaryTargetWriter::WritePCHCommands(
+ const SourceFileTypeSet& used_types,
+ const OutputFile& order_only_dep,
+ std::vector<OutputFile>* object_files,
+ std::vector<OutputFile>* other_files) {
+ if (!target_->config_values().has_precompiled_headers())
+ return;
+
+ const Tool* tool_c = target_->toolchain()->GetTool(Toolchain::TYPE_CC);
+ if (tool_c &&
+ tool_c->precompiled_header_type() != Tool::PCH_NONE &&
+ used_types.Get(SOURCE_C)) {
+ WritePCHCommand(SUBSTITUTION_CFLAGS_C,
+ Toolchain::TYPE_CC,
+ tool_c->precompiled_header_type(),
+ order_only_dep, object_files, other_files);
+ }
+ const Tool* tool_cxx = target_->toolchain()->GetTool(Toolchain::TYPE_CXX);
+ if (tool_cxx &&
+ tool_cxx->precompiled_header_type() != Tool::PCH_NONE &&
+ used_types.Get(SOURCE_CPP)) {
+ WritePCHCommand(SUBSTITUTION_CFLAGS_CC,
+ Toolchain::TYPE_CXX,
+ tool_cxx->precompiled_header_type(),
+ order_only_dep, object_files, other_files);
+ }
+
+ const Tool* tool_objc = target_->toolchain()->GetTool(Toolchain::TYPE_OBJC);
+ if (tool_objc &&
+ tool_objc->precompiled_header_type() == Tool::PCH_GCC &&
+ used_types.Get(SOURCE_M)) {
+ WritePCHCommand(SUBSTITUTION_CFLAGS_OBJC,
+ Toolchain::TYPE_OBJC,
+ tool_objc->precompiled_header_type(),
+ order_only_dep, object_files, other_files);
+ }
+
+ const Tool* tool_objcxx =
+ target_->toolchain()->GetTool(Toolchain::TYPE_OBJCXX);
+ if (tool_objcxx &&
+ tool_objcxx->precompiled_header_type() == Tool::PCH_GCC &&
+ used_types.Get(SOURCE_MM)) {
+ WritePCHCommand(SUBSTITUTION_CFLAGS_OBJCC,
+ Toolchain::TYPE_OBJCXX,
+ tool_objcxx->precompiled_header_type(),
+ order_only_dep, object_files, other_files);
+ }
+}
+
+void NinjaBinaryTargetWriter::WritePCHCommand(
+ SubstitutionType flag_type,
+ Toolchain::ToolType tool_type,
+ Tool::PrecompiledHeaderType header_type,
+ const OutputFile& order_only_dep,
+ std::vector<OutputFile>* object_files,
+ std::vector<OutputFile>* other_files) {
+ switch (header_type) {
+ case Tool::PCH_MSVC:
+ WriteWindowsPCHCommand(flag_type, tool_type, order_only_dep,
+ object_files);
+ break;
+ case Tool::PCH_GCC:
+ WriteGCCPCHCommand(flag_type, tool_type, order_only_dep,
+ other_files);
+ break;
+ case Tool::PCH_NONE:
+ NOTREACHED() << "Cannot write a PCH command with no PCH header type";
+ break;
+ }
+}
+
+void NinjaBinaryTargetWriter::WriteGCCPCHCommand(
+ SubstitutionType flag_type,
+ Toolchain::ToolType tool_type,
+ const OutputFile& order_only_dep,
+ std::vector<OutputFile>* gch_files) {
+ // Compute the pch output file (it will be language-specific).
+ std::vector<OutputFile> outputs;
+ GetPCHOutputFiles(target_, tool_type, &outputs);
+ if (outputs.empty())
+ return;
+
+ gch_files->insert(gch_files->end(), outputs.begin(), outputs.end());
+
+ // Build line to compile the file.
+ WriteCompilerBuildLine(target_->config_values().precompiled_source(),
+ std::vector<OutputFile>(), order_only_dep, tool_type,
+ outputs);
+
+ // This build line needs a custom language-specific flags value. Rule-specific
+ // variables are just indented underneath the rule line.
+ out_ << " " << kSubstitutionNinjaNames[flag_type] << " =";
+
+ // Each substitution flag is overwritten in the target rule to replace the
+ // implicitly generated -include flag with the -x <header lang> flag required
+ // for .gch targets.
+ EscapeOptions opts = GetFlagOptions();
+ if (tool_type == Toolchain::TYPE_CC) {
+ RecursiveTargetConfigStringsToStream(target_,
+ &ConfigValues::cflags_c, opts, out_);
+ } else if (tool_type == Toolchain::TYPE_CXX) {
+ RecursiveTargetConfigStringsToStream(target_,
+ &ConfigValues::cflags_cc, opts, out_);
+ } else if (tool_type == Toolchain::TYPE_OBJC) {
+ RecursiveTargetConfigStringsToStream(target_,
+ &ConfigValues::cflags_objc, opts, out_);
+ } else if (tool_type == Toolchain::TYPE_OBJCXX) {
+ RecursiveTargetConfigStringsToStream(target_,
+ &ConfigValues::cflags_objcc, opts, out_);
+ }
+
+ // Append the command to specify the language of the .gch file.
+ out_ << " -x " << GetPCHLangForToolType(tool_type);
+
+ // Write two blank lines to help separate the PCH build lines from the
+ // regular source build lines.
+ out_ << std::endl << std::endl;
+}
+
+void NinjaBinaryTargetWriter::WriteWindowsPCHCommand(
+ SubstitutionType flag_type,
+ Toolchain::ToolType tool_type,
+ const OutputFile& order_only_dep,
+ std::vector<OutputFile>* object_files) {
+ // Compute the pch output file (it will be language-specific).
+ std::vector<OutputFile> outputs;
+ GetPCHOutputFiles(target_, tool_type, &outputs);
+ if (outputs.empty())
+ return;
+
+ object_files->insert(object_files->end(), outputs.begin(), outputs.end());
+
+ // Build line to compile the file.
+ WriteCompilerBuildLine(target_->config_values().precompiled_source(),
+ std::vector<OutputFile>(), order_only_dep, tool_type,
+ outputs);
+
+ // This build line needs a custom language-specific flags value. Rule-specific
+ // variables are just indented underneath the rule line.
+ out_ << " " << kSubstitutionNinjaNames[flag_type] << " =";
+
+ // Append the command to generate the .pch file.
+ // This adds the value to the existing flag instead of overwriting it.
+ out_ << " ${" << kSubstitutionNinjaNames[flag_type] << "}";
+ out_ << " /Yc" << target_->config_values().precompiled_header();
+
+ // Write two blank lines to help separate the PCH build lines from the
+ // regular source build lines.
+ out_ << std::endl << std::endl;
+}
+
+void NinjaBinaryTargetWriter::WriteSources(
+ const std::vector<OutputFile>& pch_deps,
+ const OutputFile& order_only_dep,
+ std::vector<OutputFile>* object_files,
+ std::vector<SourceFile>* other_files) {
+ object_files->reserve(object_files->size() + target_->sources().size());
+
+ std::vector<OutputFile> tool_outputs; // Prevent reallocation in loop.
+ std::vector<OutputFile> deps;
+ for (const auto& source : target_->sources()) {
+ // Clear the vector but maintain the max capacity to prevent reallocations.
+ deps.resize(0);
+ Toolchain::ToolType tool_type = Toolchain::TYPE_NONE;
+ if (!target_->GetOutputFilesForSource(source, &tool_type, &tool_outputs)) {
+ if (GetSourceFileType(source) == SOURCE_DEF)
+ other_files->push_back(source);
+ continue; // No output for this source.
+ }
+
+ if (tool_type != Toolchain::TYPE_NONE) {
+ // Only include PCH deps that correspond to the tool type, for instance,
+ // do not specify target_name.precompile.cc.o (a CXX PCH file) as a dep
+ // for the output of a C tool type.
+ //
+ // This makes the assumption that pch_deps only contains pch output files
+ // with the naming scheme specified in GetWindowsPCHObjectExtension or
+ // GetGCCPCHOutputExtension.
+ const Tool* tool = target_->toolchain()->GetTool(tool_type);
+ if (tool->precompiled_header_type() != Tool::PCH_NONE) {
+ for (const auto& dep : pch_deps) {
+ const std::string& output_value = dep.value();
+ std::string output_extension;
+ if (tool->precompiled_header_type() == Tool::PCH_MSVC) {
+ output_extension = GetWindowsPCHObjectExtension(tool_type);
+ } else if (tool->precompiled_header_type() == Tool::PCH_GCC) {
+ output_extension = GetGCCPCHOutputExtension(tool_type);
+ }
+ if (output_value.compare(output_value.size() -
+ output_extension.size(), output_extension.size(),
+ output_extension) == 0) {
+ deps.push_back(dep);
+ }
+ }
+ }
+ WriteCompilerBuildLine(source, deps, order_only_dep, tool_type,
+ tool_outputs);
+ }
+
+ // It's theoretically possible for a compiler to produce more than one
+ // output, but we'll only link to the first output.
+ object_files->push_back(tool_outputs[0]);
+ }
+ out_ << std::endl;
+}
+
+void NinjaBinaryTargetWriter::WriteCompilerBuildLine(
+ const SourceFile& source,
+ const std::vector<OutputFile>& extra_deps,
+ const OutputFile& order_only_dep,
+ Toolchain::ToolType tool_type,
+ const std::vector<OutputFile>& outputs) {
+ out_ << "build";
+ path_output_.WriteFiles(out_, outputs);
+
+ out_ << ": " << rule_prefix_ << Toolchain::ToolTypeToName(tool_type);
+ out_ << " ";
+ path_output_.WriteFile(out_, source);
+
+ if (!extra_deps.empty()) {
+ out_ << " |";
+ for (const OutputFile& dep : extra_deps) {
+ out_ << " ";
+ path_output_.WriteFile(out_, dep);
+ }
+ }
+
+ if (!order_only_dep.value().empty()) {
+ out_ << " || ";
+ path_output_.WriteFile(out_, order_only_dep);
+ }
+ out_ << std::endl;
+}
+
+void NinjaBinaryTargetWriter::WriteLinkerStuff(
+ const std::vector<OutputFile>& object_files,
+ const std::vector<SourceFile>& other_files) {
+ std::vector<OutputFile> output_files;
+ SubstitutionWriter::ApplyListToLinkerAsOutputFile(
+ target_, tool_, tool_->outputs(), &output_files);
+
+ out_ << "build";
+ path_output_.WriteFiles(out_, output_files);
+
+ out_ << ": " << rule_prefix_
+ << Toolchain::ToolTypeToName(
+ target_->toolchain()->GetToolTypeForTargetFinalOutput(target_));
+
+ UniqueVector<OutputFile> extra_object_files;
+ UniqueVector<const Target*> linkable_deps;
+ UniqueVector<const Target*> non_linkable_deps;
+ GetDeps(&extra_object_files, &linkable_deps, &non_linkable_deps);
+
+ // Object files.
+ path_output_.WriteFiles(out_, object_files);
+ path_output_.WriteFiles(out_, extra_object_files);
+
+ // Dependencies.
+ std::vector<OutputFile> implicit_deps;
+ std::vector<OutputFile> solibs;
+ for (const Target* cur : linkable_deps) {
+ // All linkable deps should have a link output file.
+ DCHECK(!cur->link_output_file().value().empty())
+ << "No link output file for "
+ << target_->label().GetUserVisibleName(false);
+
+ if (cur->dependency_output_file().value() !=
+ cur->link_output_file().value()) {
+ // This is a shared library with separate link and deps files. Save for
+ // later.
+ implicit_deps.push_back(cur->dependency_output_file());
+ solibs.push_back(cur->link_output_file());
+ } else {
+ // Normal case, just link to this target.
+ out_ << " ";
+ path_output_.WriteFile(out_, cur->link_output_file());
+ }
+ }
+
+ const SourceFile* optional_def_file = nullptr;
+ if (!other_files.empty()) {
+ for (const SourceFile& src_file : other_files) {
+ if (GetSourceFileType(src_file) == SOURCE_DEF) {
+ optional_def_file = &src_file;
+ implicit_deps.push_back(
+ OutputFile(settings_->build_settings(), src_file));
+ break; // Only one def file is allowed.
+ }
+ }
+ }
+
+ // Libraries specified by paths.
+ const OrderedSet<LibFile>& libs = target_->all_libs();
+ for (size_t i = 0; i < libs.size(); i++) {
+ if (libs[i].is_source_file()) {
+ implicit_deps.push_back(
+ OutputFile(settings_->build_settings(), libs[i].source_file()));
+ }
+ }
+
+ // Append implicit dependencies collected above.
+ if (!implicit_deps.empty()) {
+ out_ << " |";
+ path_output_.WriteFiles(out_, implicit_deps);
+ }
+
+ // Append data dependencies as order-only dependencies.
+ //
+ // This will include data dependencies and input dependencies (like when
+ // this target depends on an action). Having the data dependencies in this
+ // list ensures that the data is available at runtime when the user builds
+ // this target.
+ //
+ // The action dependencies are not strictly necessary in this case. They
+ // should also have been collected via the input deps stamp that each source
+ // file has for an order-only dependency, and since this target depends on
+ // the sources, there is already an implicit order-only dependency. However,
+ // it's extra work to separate these out and there's no disadvantage to
+ // listing them again.
+ WriteOrderOnlyDependencies(non_linkable_deps);
+
+ // End of the link "build" line.
+ out_ << std::endl;
+
+ // The remaining things go in the inner scope of the link line.
+ if (target_->output_type() == Target::EXECUTABLE ||
+ target_->output_type() == Target::SHARED_LIBRARY ||
+ target_->output_type() == Target::LOADABLE_MODULE) {
+ WriteLinkerFlags(optional_def_file);
+ WriteLibs();
+ }
+ WriteOutputExtension();
+ WriteSolibs(solibs);
+}
+
+void NinjaBinaryTargetWriter::WriteLinkerFlags(
+ const SourceFile* optional_def_file) {
+ out_ << " ldflags =";
+
+ // First the ldflags from the target and its config.
+ EscapeOptions flag_options = GetFlagOptions();
+ RecursiveTargetConfigStringsToStream(target_, &ConfigValues::ldflags,
+ flag_options, out_);
+
+ // Followed by library search paths that have been recursively pushed
+ // through the dependency tree.
+ const OrderedSet<SourceDir> all_lib_dirs = target_->all_lib_dirs();
+ if (!all_lib_dirs.empty()) {
+ // Since we're passing these on the command line to the linker and not
+ // to Ninja, we need to do shell escaping.
+ PathOutput lib_path_output(path_output_.current_dir(),
+ settings_->build_settings()->root_path_utf8(),
+ ESCAPE_NINJA_COMMAND);
+ for (size_t i = 0; i < all_lib_dirs.size(); i++) {
+ out_ << " " << tool_->lib_dir_switch();
+ lib_path_output.WriteDir(out_, all_lib_dirs[i],
+ PathOutput::DIR_NO_LAST_SLASH);
+ }
+ }
+
+ if (optional_def_file) {
+ out_ << " /DEF:";
+ path_output_.WriteFile(out_, *optional_def_file);
+ }
+
+ out_ << std::endl;
+}
+
+void NinjaBinaryTargetWriter::WriteLibs() {
+ out_ << " libs =";
+
+ // Libraries that have been recursively pushed through the dependency tree.
+ EscapeOptions lib_escape_opts;
+ lib_escape_opts.mode = ESCAPE_NINJA_COMMAND;
+ const OrderedSet<LibFile> all_libs = target_->all_libs();
+ const std::string framework_ending(".framework");
+ for (size_t i = 0; i < all_libs.size(); i++) {
+ const LibFile& lib_file = all_libs[i];
+ const std::string& lib_value = lib_file.value();
+ if (lib_file.is_source_file()) {
+ out_ << " ";
+ path_output_.WriteFile(out_, lib_file.source_file());
+ } else if (base::EndsWith(lib_value, framework_ending,
+ base::CompareCase::INSENSITIVE_ASCII)) {
+ // Special-case libraries ending in ".framework" to support Mac: Add the
+ // -framework switch and don't add the extension to the output.
+ out_ << " -framework ";
+ EscapeStringToStream(
+ out_, lib_value.substr(0, lib_value.size() - framework_ending.size()),
+ lib_escape_opts);
+ } else {
+ out_ << " " << tool_->lib_switch();
+ EscapeStringToStream(out_, lib_value, lib_escape_opts);
+ }
+ }
+ out_ << std::endl;
+}
+
+void NinjaBinaryTargetWriter::WriteOutputExtension() {
+ out_ << " output_extension = "
+ << SubstitutionWriter::GetLinkerSubstitution(
+ target_, tool_, SUBSTITUTION_OUTPUT_EXTENSION);
+ out_ << std::endl;
+}
+
+void NinjaBinaryTargetWriter::WriteSolibs(
+ const std::vector<OutputFile>& solibs) {
+ if (solibs.empty())
+ return;
+
+ out_ << " solibs =";
+ path_output_.WriteFiles(out_, solibs);
+ out_ << std::endl;
+}
+
+void NinjaBinaryTargetWriter::WriteSourceSetStamp(
+ const std::vector<OutputFile>& object_files) {
+ // The stamp rule for source sets is generally not used, since targets that
+ // depend on this will reference the object files directly. However, writing
+ // this rule allows the user to type the name of the target and get a build
+ // which can be convenient for development.
+ UniqueVector<OutputFile> extra_object_files;
+ UniqueVector<const Target*> linkable_deps;
+ UniqueVector<const Target*> non_linkable_deps;
+ GetDeps(&extra_object_files, &linkable_deps, &non_linkable_deps);
+
+ // The classifier should never put extra object files in a source set:
+ // any source sets that we depend on should appear in our non-linkable
+ // deps instead.
+ DCHECK(extra_object_files.empty());
+
+ std::vector<OutputFile> order_only_deps;
+ for (const auto& dep : non_linkable_deps)
+ order_only_deps.push_back(dep->dependency_output_file());
+
+ WriteStampForTarget(object_files, order_only_deps);
+}
+
+void NinjaBinaryTargetWriter::GetDeps(
+ UniqueVector<OutputFile>* extra_object_files,
+ UniqueVector<const Target*>* linkable_deps,
+ UniqueVector<const Target*>* non_linkable_deps) const {
+ // Normal public/private deps.
+ for (const auto& pair : target_->GetDeps(Target::DEPS_LINKED)) {
+ ClassifyDependency(pair.ptr, extra_object_files,
+ linkable_deps, non_linkable_deps);
+ }
+
+ // Inherited libraries.
+ for (const auto& inherited_target :
+ target_->inherited_libraries().GetOrdered()) {
+ ClassifyDependency(inherited_target, extra_object_files,
+ linkable_deps, non_linkable_deps);
+ }
+
+ // Data deps.
+ for (const auto& data_dep_pair : target_->data_deps())
+ non_linkable_deps->push_back(data_dep_pair.ptr);
+}
+
+void NinjaBinaryTargetWriter::ClassifyDependency(
+ const Target* dep,
+ UniqueVector<OutputFile>* extra_object_files,
+ UniqueVector<const Target*>* linkable_deps,
+ UniqueVector<const Target*>* non_linkable_deps) const {
+ // Only the following types of outputs have libraries linked into them:
+ // EXECUTABLE
+ // SHARED_LIBRARY
+ // _complete_ STATIC_LIBRARY
+ //
+ // Child deps of intermediate static libraries get pushed up the
+ // dependency tree until one of these is reached, and source sets
+ // don't link at all.
+ bool can_link_libs = target_->IsFinal();
+
+ if (dep->output_type() == Target::SOURCE_SET) {
+ // Source sets have their object files linked into final targets
+ // (shared libraries, executables, loadable modules, and complete static
+ // libraries). Intermediate static libraries and other source sets
+ // just forward the dependency, otherwise the files in the source
+ // set can easily get linked more than once which will cause
+ // multiple definition errors.
+ if (can_link_libs)
+ AddSourceSetObjectFiles(dep, extra_object_files);
+
+ // Add the source set itself as a non-linkable dependency on the current
+ // target. This will make sure that anything the source set's stamp file
+ // depends on (like data deps) are also built before the current target
+ // can be complete. Otherwise, these will be skipped since this target
+ // will depend only on the source set's object files.
+ non_linkable_deps->push_back(dep);
+ } else if (can_link_libs && dep->IsLinkable()) {
+ linkable_deps->push_back(dep);
+ } else {
+ non_linkable_deps->push_back(dep);
+ }
+}
+
+void NinjaBinaryTargetWriter::WriteOrderOnlyDependencies(
+ const UniqueVector<const Target*>& non_linkable_deps) {
+ if (!non_linkable_deps.empty()) {
+ out_ << " ||";
+
+ // Non-linkable targets.
+ for (const auto& non_linkable_dep : non_linkable_deps) {
+ out_ << " ";
+ path_output_.WriteFile(out_, non_linkable_dep->dependency_output_file());
+ }
+ }
+}
+
+OutputFile NinjaBinaryTargetWriter::GetWindowsPCHFile(
+ Toolchain::ToolType tool_type) const {
+ // Use "obj/{dir}/{target_name}_{lang}.pch" which ends up
+ // looking like "obj/chrome/browser/browser_cc.pch"
+ OutputFile ret = GetTargetOutputDirAsOutputFile(target_);
+ ret.value().append(target_->label().name());
+ ret.value().push_back('_');
+ ret.value().append(GetPCHLangSuffixForToolType(tool_type));
+ ret.value().append(".pch");
+
+ return ret;
+}
+
+bool NinjaBinaryTargetWriter::CheckForDuplicateObjectFiles(
+ const std::vector<OutputFile>& files) const {
+ base::hash_set<std::string> set;
+ for (const auto& file : files) {
+ if (!set.insert(file.value()).second) {
+ Err err(
+ target_->defined_from(),
+ "Duplicate object file",
+ "The target " + target_->label().GetUserVisibleName(false) +
+ "\ngenerates two object files with the same name:\n " +
+ file.value() + "\n"
+ "\n"
+ "It could be you accidentally have a file listed twice in the\n"
+ "sources. Or, depending on how your toolchain maps sources to\n"
+ "object files, two source files with the same name in different\n"
+ "directories could map to the same object file.\n"
+ "\n"
+ "In the latter case, either rename one of the files or move one of\n"
+ "the sources to a separate source_set to avoid them both being in\n"
+ "the same target.");
+ g_scheduler->FailWithError(err);
+ return false;
+ }
+ }
+ return true;
+}
diff --git a/chromium/tools/gn/ninja_binary_target_writer.h b/chromium/tools/gn/ninja_binary_target_writer.h
new file mode 100644
index 00000000000..db2b15d12af
--- /dev/null
+++ b/chromium/tools/gn/ninja_binary_target_writer.h
@@ -0,0 +1,148 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_BINARY_TARGET_WRITER_H_
+#define TOOLS_GN_NINJA_BINARY_TARGET_WRITER_H_
+
+#include "base/macros.h"
+#include "tools/gn/config_values.h"
+#include "tools/gn/ninja_target_writer.h"
+#include "tools/gn/toolchain.h"
+#include "tools/gn/unique_vector.h"
+
+struct EscapeOptions;
+class SourceFileTypeSet;
+
+// Writes a .ninja file for a binary target type (an executable, a shared
+// library, or a static library).
+class NinjaBinaryTargetWriter : public NinjaTargetWriter {
+ public:
+ class SourceFileTypeSet;
+
+ NinjaBinaryTargetWriter(const Target* target, std::ostream& out);
+ ~NinjaBinaryTargetWriter() override;
+
+ void Run() override;
+
+ private:
+ typedef std::set<OutputFile> OutputFileSet;
+
+ // Writes all flags for the compiler: includes, defines, cflags, etc.
+ void WriteCompilerVars(const SourceFileTypeSet& used_types);
+
+ // has_precompiled_headers is set when this substitution matches a tool type
+ // that supports precompiled headers, and this target supports precompiled
+ // headers. It doesn't indicate if the tool has precompiled headers (this
+ // will be looked up by this function).
+ //
+ // The tool_type indicates the corresponding tool for flags that are
+ // tool-specific (e.g. "cflags_c"). For non-tool-specific flags (e.g.
+ // "defines") tool_type should be TYPE_NONE.
+ void WriteOneFlag(
+ SubstitutionType subst_enum,
+ bool has_precompiled_headers,
+ Toolchain::ToolType tool_type,
+ const std::vector<std::string>& (ConfigValues::* getter)() const,
+ EscapeOptions flag_escape_options);
+
+ // Writes build lines required for precompiled headers. Any generated
+ // object files will be appended to the |object_files|. Any generated
+ // non-object files (for instance, .gch files from a GCC toolchain, are
+ // appended to |other_files|).
+ //
+ // input_dep is the stamp file collecting the dependencies required before
+ // compiling this target. It will be empty if there are no input deps.
+ void WritePCHCommands(const SourceFileTypeSet& used_types,
+ const OutputFile& input_dep,
+ std::vector<OutputFile>* object_files,
+ std::vector<OutputFile>* other_files);
+
+ // Writes a .pch compile build line for a language type.
+ void WritePCHCommand(SubstitutionType flag_type,
+ Toolchain::ToolType tool_type,
+ Tool::PrecompiledHeaderType header_type,
+ const OutputFile& input_dep,
+ std::vector<OutputFile>* object_files,
+ std::vector<OutputFile>* other_files);
+
+ void WriteGCCPCHCommand(SubstitutionType flag_type,
+ Toolchain::ToolType tool_type,
+ const OutputFile& order_only_dep,
+ std::vector<OutputFile>* gch_files);
+
+ void WriteWindowsPCHCommand(SubstitutionType flag_type,
+ Toolchain::ToolType tool_type,
+ const OutputFile& order_only_dep,
+ std::vector<OutputFile>* object_files);
+
+ // pch_deps are additional dependencies to run before the rule. They are
+ // expected to abide by the naming conventions specified by GetPCHOutputFiles.
+ //
+ // order_only_dep is the name of the stamp file that covers the dependencies
+ // that must be run before doing any compiles.
+ //
+ // The files produced by the compiler will be added to two output vectors.
+ void WriteSources(const std::vector<OutputFile>& pch_deps,
+ const OutputFile& order_only_dep,
+ std::vector<OutputFile>* object_files,
+ std::vector<SourceFile>* other_files);
+
+ // Writes a build line.
+ void WriteCompilerBuildLine(const SourceFile& source,
+ const std::vector<OutputFile>& extra_deps,
+ const OutputFile& order_only_dep,
+ Toolchain::ToolType tool_type,
+ const std::vector<OutputFile>& outputs);
+
+ void WriteLinkerStuff(const std::vector<OutputFile>& object_files,
+ const std::vector<SourceFile>& other_files);
+ void WriteLinkerFlags(const SourceFile* optional_def_file);
+ void WriteLibs();
+ void WriteOutputExtension();
+ void WriteSolibs(const std::vector<OutputFile>& solibs);
+
+ // Writes the stamp line for a source set. These are not linked.
+ void WriteSourceSetStamp(const std::vector<OutputFile>& object_files);
+
+ // Gets all target dependencies and classifies them, as well as accumulates
+ // object files from source sets we need to link.
+ void GetDeps(UniqueVector<OutputFile>* extra_object_files,
+ UniqueVector<const Target*>* linkable_deps,
+ UniqueVector<const Target*>* non_linkable_deps) const;
+
+ // Classifies the dependency as linkable or nonlinkable with the current
+ // target, adding it to the appropriate vector. If the dependency is a source
+ // set we should link in, the source set's object files will be appended to
+ // |extra_object_files|.
+ void ClassifyDependency(const Target* dep,
+ UniqueVector<OutputFile>* extra_object_files,
+ UniqueVector<const Target*>* linkable_deps,
+ UniqueVector<const Target*>* non_linkable_deps) const;
+
+ // Writes the implicit dependencies for the link or stamp line. This is
+ // the "||" and everything following it on the ninja line.
+ //
+ // The order-only dependencies are the non-linkable deps passed in as an
+ // argument, plus the data file depdencies in the target.
+ void WriteOrderOnlyDependencies(
+ const UniqueVector<const Target*>& non_linkable_deps);
+
+ // Returns the computed name of the Windows .pch file for the given
+ // tool type. The tool must support precompiled headers.
+ OutputFile GetWindowsPCHFile(Toolchain::ToolType tool_type) const;
+
+ // Checks for duplicates in the given list of output files. If any duplicates
+ // are found, throws an error and return false.
+ bool CheckForDuplicateObjectFiles(const std::vector<OutputFile>& files) const;
+
+ const Tool* tool_;
+
+ // Cached version of the prefix used for rule types for this toolchain.
+ std::string rule_prefix_;
+
+ DISALLOW_COPY_AND_ASSIGN(NinjaBinaryTargetWriter);
+};
+
+#endif // TOOLS_GN_NINJA_BINARY_TARGET_WRITER_H_
+
diff --git a/chromium/tools/gn/ninja_binary_target_writer_unittest.cc b/chromium/tools/gn/ninja_binary_target_writer_unittest.cc
new file mode 100644
index 00000000000..1e02446dfe3
--- /dev/null
+++ b/chromium/tools/gn/ninja_binary_target_writer_unittest.cc
@@ -0,0 +1,736 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_binary_target_writer.h"
+
+#include <sstream>
+#include <utility>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(NinjaBinaryTargetWriter, SourceSet) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::SOURCE_SET);
+ target.visibility().SetPublic();
+ target.sources().push_back(SourceFile("//foo/input1.cc"));
+ target.sources().push_back(SourceFile("//foo/input2.cc"));
+ // Also test object files, which should be just passed through to the
+ // dependents to link.
+ target.sources().push_back(SourceFile("//foo/input3.o"));
+ target.sources().push_back(SourceFile("//foo/input4.obj"));
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ // Source set itself.
+ {
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_cc =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = bar\n"
+ "\n"
+ "build obj/foo/bar.input1.o: cxx ../../foo/input1.cc\n"
+ "build obj/foo/bar.input2.o: cxx ../../foo/input2.cc\n"
+ "\n"
+ "build obj/foo/bar.stamp: stamp obj/foo/bar.input1.o "
+ "obj/foo/bar.input2.o ../../foo/input3.o ../../foo/input4.obj\n";
+ std::string out_str = out.str();
+ EXPECT_EQ(expected, out_str);
+ }
+
+ // A shared library that depends on the source set.
+ Target shlib_target(setup.settings(), Label(SourceDir("//foo/"), "shlib"));
+ shlib_target.set_output_type(Target::SHARED_LIBRARY);
+ shlib_target.public_deps().push_back(LabelTargetPair(&target));
+ shlib_target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(shlib_target.OnResolved(&err));
+
+ {
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&shlib_target, out);
+ writer.Run();
+
+ const char expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = libshlib\n"
+ "\n"
+ "\n"
+ // Ordering of the obj files here should come out in the order
+ // specified, with the target's first, followed by the source set's, in
+ // order.
+ "build ./libshlib.so: solink obj/foo/bar.input1.o "
+ "obj/foo/bar.input2.o ../../foo/input3.o ../../foo/input4.obj "
+ "|| obj/foo/bar.stamp\n"
+ " ldflags =\n"
+ " libs =\n"
+ " output_extension = .so\n";
+ std::string out_str = out.str();
+ EXPECT_EQ(expected, out_str);
+ }
+
+ // A static library that depends on the source set (should not link it).
+ Target stlib_target(setup.settings(), Label(SourceDir("//foo/"), "stlib"));
+ stlib_target.set_output_type(Target::STATIC_LIBRARY);
+ stlib_target.public_deps().push_back(LabelTargetPair(&target));
+ stlib_target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(stlib_target.OnResolved(&err));
+
+ {
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&stlib_target, out);
+ writer.Run();
+
+ const char expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = libstlib\n"
+ "\n"
+ "\n"
+ // There are no sources so there are no params to alink. (In practice
+ // this will probably fail in the archive tool.)
+ "build obj/foo/libstlib.a: alink || obj/foo/bar.stamp\n"
+ " output_extension = \n";
+ std::string out_str = out.str();
+ EXPECT_EQ(expected, out_str);
+ }
+
+ // Make the static library 'complete', which means it should be linked.
+ stlib_target.set_complete_static_lib(true);
+ {
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&stlib_target, out);
+ writer.Run();
+
+ const char expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = libstlib\n"
+ "\n"
+ "\n"
+ // Ordering of the obj files here should come out in the order
+ // specified, with the target's first, followed by the source set's, in
+ // order.
+ "build obj/foo/libstlib.a: alink obj/foo/bar.input1.o "
+ "obj/foo/bar.input2.o ../../foo/input3.o ../../foo/input4.obj "
+ "|| obj/foo/bar.stamp\n"
+ " output_extension = \n";
+ std::string out_str = out.str();
+ EXPECT_EQ(expected, out_str);
+ }
+}
+
+// This tests that output extension overrides apply, and input dependencies
+// are applied.
+TEST(NinjaBinaryTargetWriter, ProductExtensionAndInputDeps) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ // An action for our library to depend on.
+ Target action(setup.settings(), Label(SourceDir("//foo/"), "action"));
+ action.set_output_type(Target::ACTION_FOREACH);
+ action.visibility().SetPublic();
+ action.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(action.OnResolved(&err));
+
+ // A shared library w/ the product_extension set to a custom value.
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "shlib"));
+ target.set_output_type(Target::SHARED_LIBRARY);
+ target.set_output_extension(std::string("so.6"));
+ target.sources().push_back(SourceFile("//foo/input1.cc"));
+ target.sources().push_back(SourceFile("//foo/input2.cc"));
+ target.public_deps().push_back(LabelTargetPair(&action));
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_cc =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = libshlib\n"
+ "\n"
+ "build obj/foo/libshlib.input1.o: cxx ../../foo/input1.cc"
+ " || obj/foo/action.stamp\n"
+ "build obj/foo/libshlib.input2.o: cxx ../../foo/input2.cc"
+ " || obj/foo/action.stamp\n"
+ "\n"
+ "build ./libshlib.so.6: solink obj/foo/libshlib.input1.o "
+ // The order-only dependency here is stricly unnecessary since the
+ // sources list this as an order-only dep. See discussion in the code
+ // that writes this.
+ "obj/foo/libshlib.input2.o || obj/foo/action.stamp\n"
+ " ldflags =\n"
+ " libs =\n"
+ " output_extension = .so.6\n";
+
+ std::string out_str = out.str();
+ EXPECT_EQ(expected, out_str);
+}
+
+// Tests libs are applied.
+TEST(NinjaBinaryTargetWriter, LibsAndLibDirs) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ // A shared library w/ libs and lib_dirs.
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "shlib"));
+ target.set_output_type(Target::SHARED_LIBRARY);
+ target.config_values().libs().push_back(LibFile(SourceFile("//foo/lib1.a")));
+ target.config_values().libs().push_back(LibFile("foo"));
+ target.config_values().lib_dirs().push_back(SourceDir("//foo/bar/"));
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = libshlib\n"
+ "\n"
+ "\n"
+ "build ./libshlib.so: solink | ../../foo/lib1.a\n"
+ " ldflags = -L../../foo/bar\n"
+ " libs = ../../foo/lib1.a -lfoo\n"
+ " output_extension = .so\n";
+
+ std::string out_str = out.str();
+ EXPECT_EQ(expected, out_str);
+}
+
+TEST(NinjaBinaryTargetWriter, EmptyProductExtension) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ // This test is the same as ProductExtension, except that we call
+ // set_output_extension("") and ensure that we get an empty one and override
+ // the output prefix so that the name matches the target exactly.
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "shlib"));
+ target.set_output_type(Target::SHARED_LIBRARY);
+ target.set_output_prefix_override(true);
+ target.set_output_extension(std::string());
+ target.sources().push_back(SourceFile("//foo/input1.cc"));
+ target.sources().push_back(SourceFile("//foo/input2.cc"));
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_cc =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = shlib\n"
+ "\n"
+ "build obj/foo/shlib.input1.o: cxx ../../foo/input1.cc\n"
+ "build obj/foo/shlib.input2.o: cxx ../../foo/input2.cc\n"
+ "\n"
+ "build ./shlib: solink obj/foo/shlib.input1.o "
+ "obj/foo/shlib.input2.o\n"
+ " ldflags =\n"
+ " libs =\n"
+ " output_extension = \n";
+
+ std::string out_str = out.str();
+ EXPECT_EQ(expected, out_str);
+}
+
+TEST(NinjaBinaryTargetWriter, SourceSetDataDeps) {
+ TestWithScope setup;
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ Err err;
+
+ // This target is a data (runtime) dependency of the intermediate target.
+ Target data(setup.settings(), Label(SourceDir("//foo/"), "data_target"));
+ data.set_output_type(Target::EXECUTABLE);
+ data.visibility().SetPublic();
+ data.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(data.OnResolved(&err));
+
+ // Intermediate source set target.
+ Target inter(setup.settings(), Label(SourceDir("//foo/"), "inter"));
+ inter.set_output_type(Target::SOURCE_SET);
+ inter.visibility().SetPublic();
+ inter.data_deps().push_back(LabelTargetPair(&data));
+ inter.SetToolchain(setup.toolchain());
+ inter.sources().push_back(SourceFile("//foo/inter.cc"));
+ ASSERT_TRUE(inter.OnResolved(&err)) << err.message();
+
+ // Write out the intermediate target.
+ std::ostringstream inter_out;
+ NinjaBinaryTargetWriter inter_writer(&inter, inter_out);
+ inter_writer.Run();
+
+ // The intermediate source set will be a stamp file that depends on the
+ // object files, and will have an order-only dependency on its data dep and
+ // data file.
+ const char inter_expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_cc =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = inter\n"
+ "\n"
+ "build obj/foo/inter.inter.o: cxx ../../foo/inter.cc\n"
+ "\n"
+ "build obj/foo/inter.stamp: stamp obj/foo/inter.inter.o || "
+ "./data_target\n";
+ EXPECT_EQ(inter_expected, inter_out.str());
+
+ // Final target.
+ Target exe(setup.settings(), Label(SourceDir("//foo/"), "exe"));
+ exe.set_output_type(Target::EXECUTABLE);
+ exe.public_deps().push_back(LabelTargetPair(&inter));
+ exe.SetToolchain(setup.toolchain());
+ exe.sources().push_back(SourceFile("//foo/final.cc"));
+ ASSERT_TRUE(exe.OnResolved(&err));
+
+ std::ostringstream final_out;
+ NinjaBinaryTargetWriter final_writer(&exe, final_out);
+ final_writer.Run();
+
+ // The final output depends on both object files (one from the final target,
+ // one from the source set) and has an order-only dependency on the source
+ // set's stamp file and the final target's data file. The source set stamp
+ // dependency will create an implicit order-only dependency on the data
+ // target.
+ const char final_expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_cc =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = exe\n"
+ "\n"
+ "build obj/foo/exe.final.o: cxx ../../foo/final.cc\n"
+ "\n"
+ "build ./exe: link obj/foo/exe.final.o obj/foo/inter.inter.o || "
+ "obj/foo/inter.stamp\n"
+ " ldflags =\n"
+ " libs =\n"
+ " output_extension = \n";
+ EXPECT_EQ(final_expected, final_out.str());
+}
+
+TEST(NinjaBinaryTargetWriter, SharedLibraryModuleDefinitionFile) {
+ TestWithScope setup;
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ Target shared_lib(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ shared_lib.set_output_type(Target::SHARED_LIBRARY);
+ shared_lib.SetToolchain(setup.toolchain());
+ shared_lib.sources().push_back(SourceFile("//foo/sources.cc"));
+ shared_lib.sources().push_back(SourceFile("//foo/bar.def"));
+
+ Err err;
+ ASSERT_TRUE(shared_lib.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&shared_lib, out);
+ writer.Run();
+
+ const char expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_cc =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = libbar\n"
+ "\n"
+ "build obj/foo/libbar.sources.o: cxx ../../foo/sources.cc\n"
+ "\n"
+ "build ./libbar.so: solink obj/foo/libbar.sources.o | ../../foo/bar.def\n"
+ " ldflags = /DEF:../../foo/bar.def\n"
+ " libs =\n"
+ " output_extension = .so\n";
+ EXPECT_EQ(expected, out.str());
+}
+
+TEST(NinjaBinaryTargetWriter, LoadableModule) {
+ TestWithScope setup;
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ Target loadable_module(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ loadable_module.set_output_type(Target::LOADABLE_MODULE);
+ loadable_module.visibility().SetPublic();
+ loadable_module.SetToolchain(setup.toolchain());
+ loadable_module.sources().push_back(SourceFile("//foo/sources.cc"));
+
+ Err err;
+ ASSERT_TRUE(loadable_module.OnResolved(&err)) << err.message();
+
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&loadable_module, out);
+ writer.Run();
+
+ const char loadable_expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_cc =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = libbar\n"
+ "\n"
+ "build obj/foo/libbar.sources.o: cxx ../../foo/sources.cc\n"
+ "\n"
+ "build ./libbar.so: solink_module obj/foo/libbar.sources.o\n"
+ " ldflags =\n"
+ " libs =\n"
+ " output_extension = .so\n";
+ EXPECT_EQ(loadable_expected, out.str());
+
+ // Final target.
+ Target exe(setup.settings(), Label(SourceDir("//foo/"), "exe"));
+ exe.set_output_type(Target::EXECUTABLE);
+ exe.public_deps().push_back(LabelTargetPair(&loadable_module));
+ exe.SetToolchain(setup.toolchain());
+ exe.sources().push_back(SourceFile("//foo/final.cc"));
+ ASSERT_TRUE(exe.OnResolved(&err)) << err.message();
+
+ std::ostringstream final_out;
+ NinjaBinaryTargetWriter final_writer(&exe, final_out);
+ final_writer.Run();
+
+ // The final output depends on the loadable module so should have an
+ // order-only dependency on the loadable modules's output file.
+ const char final_expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_cc =\n"
+ "root_out_dir = .\n"
+ "target_out_dir = obj/foo\n"
+ "target_output_name = exe\n"
+ "\n"
+ "build obj/foo/exe.final.o: cxx ../../foo/final.cc\n"
+ "\n"
+ "build ./exe: link obj/foo/exe.final.o || ./libbar.so\n"
+ " ldflags =\n"
+ " libs =\n"
+ " output_extension = \n";
+ EXPECT_EQ(final_expected, final_out.str());
+}
+
+TEST(NinjaBinaryTargetWriter, WinPrecompiledHeaders) {
+ Err err;
+
+ // This setup's toolchain does not have precompiled headers defined.
+ TestWithScope setup;
+
+ // A precompiled header toolchain.
+ Settings pch_settings(setup.build_settings(), "withpch/");
+ Toolchain pch_toolchain(&pch_settings,
+ Label(SourceDir("//toolchain/"), "withpch"));
+ pch_settings.set_toolchain_label(pch_toolchain.label());
+ pch_settings.set_default_toolchain_label(setup.toolchain()->label());
+
+ // Declare a C++ compiler that supports PCH.
+ std::unique_ptr<Tool> cxx_tool(new Tool);
+ TestWithScope::SetCommandForTool(
+ "c++ {{source}} {{cflags}} {{cflags_cc}} {{defines}} {{include_dirs}} "
+ "-o {{output}}",
+ cxx_tool.get());
+ cxx_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o"));
+ cxx_tool->set_precompiled_header_type(Tool::PCH_MSVC);
+ pch_toolchain.SetTool(Toolchain::TYPE_CXX, std::move(cxx_tool));
+
+ // Add a C compiler as well.
+ std::unique_ptr<Tool> cc_tool(new Tool);
+ TestWithScope::SetCommandForTool(
+ "cc {{source}} {{cflags}} {{cflags_c}} {{defines}} {{include_dirs}} "
+ "-o {{output}}",
+ cc_tool.get());
+ cc_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o"));
+ cc_tool->set_precompiled_header_type(Tool::PCH_MSVC);
+ pch_toolchain.SetTool(Toolchain::TYPE_CC, std::move(cc_tool));
+ pch_toolchain.ToolchainSetupComplete();
+
+ // This target doesn't specify precompiled headers.
+ {
+ Target no_pch_target(&pch_settings,
+ Label(SourceDir("//foo/"), "no_pch_target"));
+ no_pch_target.set_output_type(Target::SOURCE_SET);
+ no_pch_target.visibility().SetPublic();
+ no_pch_target.sources().push_back(SourceFile("//foo/input1.cc"));
+ no_pch_target.sources().push_back(SourceFile("//foo/input2.c"));
+ no_pch_target.config_values().cflags_c().push_back("-std=c99");
+ no_pch_target.SetToolchain(&pch_toolchain);
+ ASSERT_TRUE(no_pch_target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&no_pch_target, out);
+ writer.Run();
+
+ const char no_pch_expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_c = -std=c99\n"
+ "cflags_cc =\n"
+ "target_output_name = no_pch_target\n"
+ "\n"
+ "build withpch/obj/foo/no_pch_target.input1.o: "
+ "withpch_cxx ../../foo/input1.cc\n"
+ "build withpch/obj/foo/no_pch_target.input2.o: "
+ "withpch_cc ../../foo/input2.c\n"
+ "\n"
+ "build withpch/obj/foo/no_pch_target.stamp: "
+ "withpch_stamp withpch/obj/foo/no_pch_target.input1.o "
+ "withpch/obj/foo/no_pch_target.input2.o\n";
+ EXPECT_EQ(no_pch_expected, out.str());
+ }
+
+ // This target specifies PCH.
+ {
+ Target pch_target(&pch_settings,
+ Label(SourceDir("//foo/"), "pch_target"));
+ pch_target.config_values().set_precompiled_header("build/precompile.h");
+ pch_target.config_values().set_precompiled_source(
+ SourceFile("//build/precompile.cc"));
+ pch_target.set_output_type(Target::SOURCE_SET);
+ pch_target.visibility().SetPublic();
+ pch_target.sources().push_back(SourceFile("//foo/input1.cc"));
+ pch_target.sources().push_back(SourceFile("//foo/input2.c"));
+ pch_target.SetToolchain(&pch_toolchain);
+ ASSERT_TRUE(pch_target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&pch_target, out);
+ writer.Run();
+
+ const char pch_win_expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ // It should output language-specific pch files.
+ "cflags_c = /Fpwithpch/obj/foo/pch_target_c.pch "
+ "/Yubuild/precompile.h\n"
+ "cflags_cc = /Fpwithpch/obj/foo/pch_target_cc.pch "
+ "/Yubuild/precompile.h\n"
+ "target_output_name = pch_target\n"
+ "\n"
+ // Compile the precompiled source files with /Yc.
+ "build withpch/obj/build/pch_target.precompile.c.o: "
+ "withpch_cc ../../build/precompile.cc\n"
+ " cflags_c = ${cflags_c} /Ycbuild/precompile.h\n"
+ "\n"
+ "build withpch/obj/build/pch_target.precompile.cc.o: "
+ "withpch_cxx ../../build/precompile.cc\n"
+ " cflags_cc = ${cflags_cc} /Ycbuild/precompile.h\n"
+ "\n"
+ "build withpch/obj/foo/pch_target.input1.o: "
+ "withpch_cxx ../../foo/input1.cc | "
+ // Explicit dependency on the PCH build step.
+ "withpch/obj/build/pch_target.precompile.cc.o\n"
+ "build withpch/obj/foo/pch_target.input2.o: "
+ "withpch_cc ../../foo/input2.c | "
+ // Explicit dependency on the PCH build step.
+ "withpch/obj/build/pch_target.precompile.c.o\n"
+ "\n"
+ "build withpch/obj/foo/pch_target.stamp: withpch_stamp "
+ "withpch/obj/foo/pch_target.input1.o "
+ "withpch/obj/foo/pch_target.input2.o "
+ // The precompiled object files were added to the outputs.
+ "withpch/obj/build/pch_target.precompile.c.o "
+ "withpch/obj/build/pch_target.precompile.cc.o\n";
+ EXPECT_EQ(pch_win_expected, out.str());
+ }
+}
+
+TEST(NinjaBinaryTargetWriter, GCCPrecompiledHeaders) {
+ Err err;
+
+ // This setup's toolchain does not have precompiled headers defined.
+ TestWithScope setup;
+
+ // A precompiled header toolchain.
+ Settings pch_settings(setup.build_settings(), "withpch/");
+ Toolchain pch_toolchain(&pch_settings,
+ Label(SourceDir("//toolchain/"), "withpch"));
+ pch_settings.set_toolchain_label(pch_toolchain.label());
+ pch_settings.set_default_toolchain_label(setup.toolchain()->label());
+
+ // Declare a C++ compiler that supports PCH.
+ std::unique_ptr<Tool> cxx_tool(new Tool);
+ TestWithScope::SetCommandForTool(
+ "c++ {{source}} {{cflags}} {{cflags_cc}} {{defines}} {{include_dirs}} "
+ "-o {{output}}",
+ cxx_tool.get());
+ cxx_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o"));
+ cxx_tool->set_precompiled_header_type(Tool::PCH_GCC);
+ pch_toolchain.SetTool(Toolchain::TYPE_CXX, std::move(cxx_tool));
+ pch_toolchain.ToolchainSetupComplete();
+
+ // Add a C compiler as well.
+ std::unique_ptr<Tool> cc_tool(new Tool);
+ TestWithScope::SetCommandForTool(
+ "cc {{source}} {{cflags}} {{cflags_c}} {{defines}} {{include_dirs}} "
+ "-o {{output}}",
+ cc_tool.get());
+ cc_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o"));
+ cc_tool->set_precompiled_header_type(Tool::PCH_GCC);
+ pch_toolchain.SetTool(Toolchain::TYPE_CC, std::move(cc_tool));
+ pch_toolchain.ToolchainSetupComplete();
+
+ // This target doesn't specify precompiled headers.
+ {
+ Target no_pch_target(&pch_settings,
+ Label(SourceDir("//foo/"), "no_pch_target"));
+ no_pch_target.set_output_type(Target::SOURCE_SET);
+ no_pch_target.visibility().SetPublic();
+ no_pch_target.sources().push_back(SourceFile("//foo/input1.cc"));
+ no_pch_target.sources().push_back(SourceFile("//foo/input2.c"));
+ no_pch_target.config_values().cflags_c().push_back("-std=c99");
+ no_pch_target.SetToolchain(&pch_toolchain);
+ ASSERT_TRUE(no_pch_target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&no_pch_target, out);
+ writer.Run();
+
+ const char no_pch_expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_c = -std=c99\n"
+ "cflags_cc =\n"
+ "target_output_name = no_pch_target\n"
+ "\n"
+ "build withpch/obj/foo/no_pch_target.input1.o: "
+ "withpch_cxx ../../foo/input1.cc\n"
+ "build withpch/obj/foo/no_pch_target.input2.o: "
+ "withpch_cc ../../foo/input2.c\n"
+ "\n"
+ "build withpch/obj/foo/no_pch_target.stamp: "
+ "withpch_stamp withpch/obj/foo/no_pch_target.input1.o "
+ "withpch/obj/foo/no_pch_target.input2.o\n";
+ EXPECT_EQ(no_pch_expected, out.str());
+ }
+
+ // This target specifies PCH.
+ {
+ Target pch_target(&pch_settings,
+ Label(SourceDir("//foo/"), "pch_target"));
+ pch_target.config_values().set_precompiled_header("build/precompile.h");
+ pch_target.config_values().set_precompiled_source(
+ SourceFile("//build/precompile.h"));
+ pch_target.config_values().cflags_c().push_back("-std=c99");
+ pch_target.set_output_type(Target::SOURCE_SET);
+ pch_target.visibility().SetPublic();
+ pch_target.sources().push_back(SourceFile("//foo/input1.cc"));
+ pch_target.sources().push_back(SourceFile("//foo/input2.c"));
+ pch_target.SetToolchain(&pch_toolchain);
+ ASSERT_TRUE(pch_target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&pch_target, out);
+ writer.Run();
+
+ const char pch_gcc_expected[] =
+ "defines =\n"
+ "include_dirs =\n"
+ "cflags =\n"
+ "cflags_c = -std=c99 "
+ "-include withpch/obj/build/pch_target.precompile.h-c\n"
+ "cflags_cc = -include withpch/obj/build/pch_target.precompile.h-cc\n"
+ "target_output_name = pch_target\n"
+ "\n"
+ // Compile the precompiled sources with -x <lang>.
+ "build withpch/obj/build/pch_target.precompile.h-c.gch: "
+ "withpch_cc ../../build/precompile.h\n"
+ " cflags_c = -std=c99 -x c-header\n"
+ "\n"
+ "build withpch/obj/build/pch_target.precompile.h-cc.gch: "
+ "withpch_cxx ../../build/precompile.h\n"
+ " cflags_cc = -x c++-header\n"
+ "\n"
+ "build withpch/obj/foo/pch_target.input1.o: "
+ "withpch_cxx ../../foo/input1.cc | "
+ // Explicit dependency on the PCH build step.
+ "withpch/obj/build/pch_target.precompile.h-cc.gch\n"
+ "build withpch/obj/foo/pch_target.input2.o: "
+ "withpch_cc ../../foo/input2.c | "
+ // Explicit dependency on the PCH build step.
+ "withpch/obj/build/pch_target.precompile.h-c.gch\n"
+ "\n"
+ "build withpch/obj/foo/pch_target.stamp: "
+ "withpch_stamp withpch/obj/foo/pch_target.input1.o "
+ "withpch/obj/foo/pch_target.input2.o\n";
+ EXPECT_EQ(pch_gcc_expected, out.str());
+ }
+}
+
+// Should throw an error with the scheduler if a duplicate object file exists.
+// This is dependent on the toolchain's object file mapping.
+TEST(NinjaBinaryTargetWriter, DupeObjFileError) {
+ Scheduler scheduler;
+
+ TestWithScope setup;
+ TestTarget target(setup, "//foo:bar", Target::EXECUTABLE);
+ target.sources().push_back(SourceFile("//a.cc"));
+ target.sources().push_back(SourceFile("//a.cc"));
+
+ EXPECT_FALSE(scheduler.is_failed());
+
+ std::ostringstream out;
+ NinjaBinaryTargetWriter writer(&target, out);
+ writer.Run();
+
+ // Should have issued an error.
+ EXPECT_TRUE(scheduler.is_failed());
+}
diff --git a/chromium/tools/gn/ninja_build_writer.cc b/chromium/tools/gn/ninja_build_writer.cc
new file mode 100644
index 00000000000..d28969084ee
--- /dev/null
+++ b/chromium/tools/gn/ninja_build_writer.cc
@@ -0,0 +1,406 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_build_writer.h"
+
+#include <stddef.h>
+
+#include <fstream>
+#include <map>
+
+#include "base/command_line.h"
+#include "base/files/file_util.h"
+#include "base/path_service.h"
+#include "base/process/process_handle.h"
+#include "base/strings/string_util.h"
+#include "base/strings/utf_string_conversions.h"
+#include "build/build_config.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/err.h"
+#include "tools/gn/escape.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/input_file_manager.h"
+#include "tools/gn/ninja_utils.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/switches.h"
+#include "tools/gn/target.h"
+#include "tools/gn/trace.h"
+
+#if defined(OS_WIN)
+#include <windows.h>
+#endif
+
+namespace {
+
+std::string GetSelfInvocationCommand(const BuildSettings* build_settings) {
+ base::FilePath executable;
+ PathService::Get(base::FILE_EXE, &executable);
+
+ base::CommandLine cmdline(executable.NormalizePathSeparatorsTo('/'));
+ cmdline.AppendArg("gen");
+ cmdline.AppendArg(build_settings->build_dir().value());
+ cmdline.AppendSwitchPath(std::string("--") + switches::kRoot,
+ build_settings->root_path());
+ // Successful automatic invocations shouldn't print output.
+ cmdline.AppendSwitch(std::string("-") + switches::kQuiet);
+
+ EscapeOptions escape_shell;
+ escape_shell.mode = ESCAPE_NINJA_COMMAND;
+#if defined(OS_WIN)
+ // The command line code quoting varies by platform. We have one string,
+ // possibly with spaces, that we want to quote. The Windows command line
+ // quotes again, so we don't want quoting. The Posix one doesn't.
+ escape_shell.inhibit_quoting = true;
+#endif
+
+ const base::CommandLine& our_cmdline =
+ *base::CommandLine::ForCurrentProcess();
+ const base::CommandLine::SwitchMap& switches = our_cmdline.GetSwitches();
+ for (base::CommandLine::SwitchMap::const_iterator i = switches.begin();
+ i != switches.end(); ++i) {
+ // Only write arguments we haven't already written. Always skip "args"
+ // since those will have been written to the file and will be used
+ // implicitly in the future. Keeping --args would mean changes to the file
+ // would be ignored.
+ if (i->first != switches::kQuiet &&
+ i->first != switches::kRoot &&
+ i->first != switches::kArgs) {
+ std::string escaped_value =
+ EscapeString(FilePathToUTF8(i->second), escape_shell, nullptr);
+ cmdline.AppendSwitchASCII(i->first, escaped_value);
+ }
+ }
+
+#if defined(OS_WIN)
+ return base::WideToUTF8(cmdline.GetCommandLineString());
+#else
+ return cmdline.GetCommandLineString();
+#endif
+}
+
+OutputFile GetTargetOutputFile(const Target* target) {
+ OutputFile result(target->dependency_output_file());
+
+ // The output files may have leading "./" so normalize those away.
+ NormalizePath(&result.value());
+ return result;
+}
+
+bool HasOutputIdenticalToLabel(const Target* target,
+ const std::string& short_name) {
+ if (target->output_type() != Target::ACTION &&
+ target->output_type() != Target::ACTION_FOREACH)
+ return false;
+
+ // Rather than convert all outputs to be relative to the build directory
+ // and then compare to the short name, convert the short name to look like a
+ // file in the output directory since this is only one conversion.
+ SourceFile short_name_as_source_file(
+ target->settings()->build_settings()->build_dir().value() + short_name);
+
+ std::vector<SourceFile> outputs_as_source;
+ target->action_values().GetOutputsAsSourceFiles(target, &outputs_as_source);
+ for (const SourceFile& output_as_source : outputs_as_source) {
+ if (output_as_source == short_name_as_source_file)
+ return true;
+ }
+ return false;
+}
+
+// Given an output that appears more than once, generates an error message
+// that describes the problem and which targets generate it.
+Err GetDuplicateOutputError(const std::vector<const Target*>& all_targets,
+ const OutputFile& bad_output) {
+ std::vector<const Target*> matches;
+ for (const Target* target : all_targets) {
+ for (const auto& output : target->computed_outputs()) {
+ if (output == bad_output) {
+ matches.push_back(target);
+ break;
+ }
+ }
+ }
+
+ // There should always be at least two targets generating this file for this
+ // function to be called in the first place.
+ DCHECK(matches.size() >= 2);
+ std::string matches_string;
+ for (const Target* target : matches)
+ matches_string += " " + target->label().GetUserVisibleName(false) + "\n";
+
+ Err result(matches[0]->defined_from(), "Duplicate output file.",
+ "Two or more targets generate the same output:\n " +
+ bad_output.value() + "\n\n"
+ "This is can often be fixed by changing one of the target names, or by \n"
+ "setting an output_name on one of them.\n"
+ "\nCollisions:\n" + matches_string);
+ for (size_t i = 1; i < matches.size(); i++)
+ result.AppendSubErr(Err(matches[i]->defined_from(), "Collision."));
+ return result;
+}
+
+} // namespace
+
+NinjaBuildWriter::NinjaBuildWriter(
+ const BuildSettings* build_settings,
+ const std::vector<const Settings*>& all_settings,
+ const Toolchain* default_toolchain,
+ const std::vector<const Target*>& default_toolchain_targets,
+ std::ostream& out,
+ std::ostream& dep_out)
+ : build_settings_(build_settings),
+ all_settings_(all_settings),
+ default_toolchain_(default_toolchain),
+ default_toolchain_targets_(default_toolchain_targets),
+ out_(out),
+ dep_out_(dep_out),
+ path_output_(build_settings->build_dir(),
+ build_settings->root_path_utf8(), ESCAPE_NINJA) {
+}
+
+NinjaBuildWriter::~NinjaBuildWriter() {
+}
+
+bool NinjaBuildWriter::Run(Err* err) {
+ WriteNinjaRules();
+ WriteLinkPool();
+ WriteSubninjas();
+ return WritePhonyAndAllRules(err);
+}
+
+// static
+bool NinjaBuildWriter::RunAndWriteFile(
+ const BuildSettings* build_settings,
+ const std::vector<const Settings*>& all_settings,
+ const Toolchain* default_toolchain,
+ const std::vector<const Target*>& default_toolchain_targets,
+ Err* err) {
+ ScopedTrace trace(TraceItem::TRACE_FILE_WRITE, "build.ninja");
+
+ base::FilePath ninja_file(build_settings->GetFullPath(
+ SourceFile(build_settings->build_dir().value() + "build.ninja")));
+ base::CreateDirectory(ninja_file.DirName());
+
+ std::ofstream file;
+ file.open(FilePathToUTF8(ninja_file).c_str(),
+ std::ios_base::out | std::ios_base::binary);
+ if (file.fail()) {
+ *err = Err(Location(), "Couldn't open build.ninja for writing");
+ return false;
+ }
+
+ std::ofstream depfile;
+ depfile.open((FilePathToUTF8(ninja_file) + ".d").c_str(),
+ std::ios_base::out | std::ios_base::binary);
+ if (depfile.fail()) {
+ *err = Err(Location(), "Couldn't open depfile for writing");
+ return false;
+ }
+
+ NinjaBuildWriter gen(build_settings, all_settings, default_toolchain,
+ default_toolchain_targets, file, depfile);
+ return gen.Run(err);
+}
+
+void NinjaBuildWriter::WriteNinjaRules() {
+ out_ << "rule gn\n";
+ out_ << " command = " << GetSelfInvocationCommand(build_settings_) << "\n";
+ out_ << " description = Regenerating ninja files\n\n";
+
+ // This rule will regenerate the ninja files when any input file has changed.
+ out_ << "build build.ninja: gn\n"
+ << " generator = 1\n"
+ << " depfile = build.ninja.d\n";
+
+ // Input build files. These go in the ".d" file. If we write them as
+ // dependencies in the .ninja file itself, ninja will expect the files to
+ // exist and will error if they don't. When files are listed in a depfile,
+ // missing files are ignored.
+ dep_out_ << "build.ninja:";
+ std::vector<base::FilePath> input_files;
+ g_scheduler->input_file_manager()->GetAllPhysicalInputFileNames(&input_files);
+
+ // Other files read by the build.
+ std::vector<base::FilePath> other_files = g_scheduler->GetGenDependencies();
+
+ // Sort the input files to order them deterministically.
+ // Additionally, remove duplicate filepaths that seem to creep in.
+ std::set<base::FilePath> fileset(input_files.begin(), input_files.end());
+ fileset.insert(other_files.begin(), other_files.end());
+
+ for (const auto& other_file : fileset)
+ dep_out_ << " " << FilePathToUTF8(other_file);
+
+ out_ << std::endl;
+}
+
+void NinjaBuildWriter::WriteLinkPool() {
+ out_ << "pool link_pool\n"
+ << " depth = " << default_toolchain_->concurrent_links() << std::endl
+ << std::endl;
+}
+
+void NinjaBuildWriter::WriteSubninjas() {
+ for (const auto& elem : all_settings_) {
+ out_ << "subninja ";
+ path_output_.WriteFile(out_, GetNinjaFileForToolchain(elem));
+ out_ << std::endl;
+ }
+ out_ << std::endl;
+}
+
+bool NinjaBuildWriter::WritePhonyAndAllRules(Err* err) {
+ std::string all_rules;
+
+ // Track rules as we generate them so we don't accidentally write a phony
+ // rule that collides with something else.
+ // GN internally generates an "all" target, so don't duplicate it.
+ std::set<std::string> written_rules;
+ written_rules.insert("all");
+
+ // Write phony rules for all uniquely-named targets in the default toolchain.
+ // Don't do other toolchains or we'll get naming conflicts, and if the name
+ // isn't unique, also skip it. The exception is for the toplevel targets
+ // which we also find.
+ std::map<std::string, int> small_name_count;
+ std::map<std::string, int> exe_count;
+ std::vector<const Target*> toplevel_targets;
+ base::hash_set<std::string> target_files;
+ for (const auto& target : default_toolchain_targets_) {
+ const Label& label = target->label();
+ small_name_count[label.name()]++;
+
+ // Look for targets with a name of the form
+ // dir = "//foo/", name = "foo"
+ // i.e. where the target name matches the top level directory. We will
+ // always write phony rules for these even if there is another target with
+ // the same short name.
+ const std::string& dir_string = label.dir().value();
+ if (dir_string.size() == label.name().size() + 3 && // Size matches.
+ dir_string[0] == '/' && dir_string[1] == '/' && // "//" at beginning.
+ dir_string[dir_string.size() - 1] == '/' && // "/" at end.
+ dir_string.compare(2, label.name().size(), label.name()) == 0)
+ toplevel_targets.push_back(target);
+
+ // Look for executables; later we will generate phony rules for them
+ // even if there are non-executable targets with the same name.
+ if (target->output_type() == Target::EXECUTABLE)
+ exe_count[label.name()]++;
+
+ // Add the files to the list of generated targets so we don't write phony
+ // rules that collide.
+ std::string target_file(target->dependency_output_file().value());
+ NormalizePath(&target_file);
+ written_rules.insert(target_file);
+ }
+
+ for (const auto& target : default_toolchain_targets_) {
+ const Label& label = target->label();
+ for (const auto& output : target->computed_outputs()) {
+ if (!target_files.insert(output.value()).second) {
+ *err = GetDuplicateOutputError(default_toolchain_targets_, output);
+ return false;
+ }
+ }
+
+ OutputFile target_file = GetTargetOutputFile(target);
+ // Write the long name "foo/bar:baz" for the target "//foo/bar:baz".
+ std::string long_name = label.GetUserVisibleName(false);
+ base::TrimString(long_name, "/", &long_name);
+ WritePhonyRule(target, target_file, long_name, &written_rules);
+
+ // Write the directory name with no target name if they match
+ // (e.g. "//foo/bar:bar" -> "foo/bar").
+ if (FindLastDirComponent(label.dir()) == label.name()) {
+ std::string medium_name = DirectoryWithNoLastSlash(label.dir());
+ base::TrimString(medium_name, "/", &medium_name);
+ // That may have generated a name the same as the short name of the
+ // target which we already wrote.
+ if (medium_name != label.name())
+ WritePhonyRule(target, target_file, medium_name, &written_rules);
+ }
+
+ // Write short names for ones which are either completely unique or there
+ // at least only one of them in the default toolchain that is an exe.
+ if (small_name_count[label.name()] == 1 ||
+ (target->output_type() == Target::EXECUTABLE &&
+ exe_count[label.name()] == 1)) {
+ // It's reasonable to generate output files in the root build directory
+ // with the same name as the target. Don't generate phony rules for
+ // these cases.
+ //
+ // All of this does not do the general checking of all target's outputs
+ // which may theoretically collide. But it's not very reasonable for
+ // a script target named "foo" to generate a file named "bar" with no
+ // extension in the root build directory while another target is named
+ // "bar". If this does occur, the user is likely to be confused when
+ // building "bar" that is builds foo anyway, so you probably just
+ // shouldn't do that.
+ //
+ // We should fix this however, and build up all generated script outputs
+ // and check everything against that. There are other edge cases that the
+ // current phony rule generator doesn't check. We may need to make a big
+ // set of every possible generated file in the build for this purpose.
+ if (!HasOutputIdenticalToLabel(target, label.name()))
+ WritePhonyRule(target, target_file, label.name(), &written_rules);
+ }
+
+ if (!all_rules.empty())
+ all_rules.append(" $\n ");
+ all_rules.append(target_file.value());
+ }
+
+ // Pick up phony rules for the toplevel targets with non-unique names (which
+ // would have been skipped in the above loop).
+ for (const auto& toplevel_target : toplevel_targets) {
+ if (small_name_count[toplevel_target->label().name()] > 1) {
+ WritePhonyRule(toplevel_target, toplevel_target->dependency_output_file(),
+ toplevel_target->label().name(), &written_rules);
+ }
+ }
+
+ // Figure out if the BUILD file wants to declare a custom "default"
+ // target (rather than building 'all' by default). By convention
+ // we use group("default") but it doesn't have to be a group.
+ bool default_target_exists = false;
+ for (const auto& target : default_toolchain_targets_) {
+ const Label& label = target->label();
+ if (label.dir().value() == "//" && label.name() == "default")
+ default_target_exists = true;
+ }
+
+ if (!all_rules.empty()) {
+ out_ << "\nbuild all: phony " << all_rules << std::endl;
+ }
+
+ if (default_target_exists) {
+ out_ << "default default" << std::endl;
+ } else if (!all_rules.empty()) {
+ out_ << "default all" << std::endl;
+ }
+
+ return true;
+}
+
+void NinjaBuildWriter::WritePhonyRule(const Target* target,
+ const OutputFile& target_file,
+ const std::string& phony_name,
+ std::set<std::string>* written_rules) {
+ if (target_file.value() == phony_name)
+ return; // No need for a phony rule.
+
+ if (written_rules->find(phony_name) != written_rules->end())
+ return; // Already exists.
+ written_rules->insert(phony_name);
+
+ EscapeOptions ninja_escape;
+ ninja_escape.mode = ESCAPE_NINJA;
+
+ // Escape for special chars Ninja will handle.
+ std::string escaped = EscapeString(phony_name, ninja_escape, nullptr);
+
+ out_ << "build " << escaped << ": phony ";
+ path_output_.WriteFile(out_, target_file);
+ out_ << std::endl;
+}
diff --git a/chromium/tools/gn/ninja_build_writer.h b/chromium/tools/gn/ninja_build_writer.h
new file mode 100644
index 00000000000..0d5b40eef70
--- /dev/null
+++ b/chromium/tools/gn/ninja_build_writer.h
@@ -0,0 +1,68 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_BUILD_WRITER_H_
+#define TOOLS_GN_NINJA_BUILD_WRITER_H_
+
+#include <iosfwd>
+#include <set>
+#include <vector>
+
+#include "base/macros.h"
+#include "tools/gn/path_output.h"
+
+class BuildSettings;
+class Err;
+class Settings;
+class Target;
+class Toolchain;
+
+// Generates the toplevel "build.ninja" file. This references the individual
+// toolchain files and lists all input .gn files as dependencies of the
+// build itself.
+class NinjaBuildWriter {
+ public:
+ static bool RunAndWriteFile(
+ const BuildSettings* settings,
+ const std::vector<const Settings*>& all_settings,
+ const Toolchain* default_toolchain,
+ const std::vector<const Target*>& default_toolchain_targets,
+ Err* err);
+
+ NinjaBuildWriter(const BuildSettings* settings,
+ const std::vector<const Settings*>& all_settings,
+ const Toolchain* default_toolchain,
+ const std::vector<const Target*>& default_toolchain_targets,
+ std::ostream& out,
+ std::ostream& dep_out);
+ ~NinjaBuildWriter();
+
+ bool Run(Err* err);
+
+ private:
+ void WriteNinjaRules();
+ void WriteLinkPool();
+ void WriteSubninjas();
+ bool WritePhonyAndAllRules(Err* err);
+
+ // Writes a phony rule for the given target with the given name. Adds the new
+ // name to the given set. If the name is already in the set, does nothing.
+ void WritePhonyRule(const Target* target,
+ const OutputFile& target_file,
+ const std::string& phony_name,
+ std::set<std::string>* written_rules);
+
+ const BuildSettings* build_settings_;
+ std::vector<const Settings*> all_settings_;
+ const Toolchain* default_toolchain_;
+ std::vector<const Target*> default_toolchain_targets_;
+ std::ostream& out_;
+ std::ostream& dep_out_;
+ PathOutput path_output_;
+
+ DISALLOW_COPY_AND_ASSIGN(NinjaBuildWriter);
+};
+
+#endif // TOOLS_GN_NINJA_BUILD_WRITER_H_
+
diff --git a/chromium/tools/gn/ninja_build_writer_unittest.cc b/chromium/tools/gn/ninja_build_writer_unittest.cc
new file mode 100644
index 00000000000..f82dc835521
--- /dev/null
+++ b/chromium/tools/gn/ninja_build_writer_unittest.cc
@@ -0,0 +1,119 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <sstream>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/ninja_build_writer.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(NinjaBuildWriter, TwoTargets) {
+ Scheduler scheduler;
+ TestWithScope setup;
+ Err err;
+
+ Target target_foo(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target_foo.set_output_type(Target::ACTION);
+ target_foo.action_values().set_script(SourceFile("//foo/script.py"));
+ target_foo.action_values().outputs() = SubstitutionList::MakeForTest(
+ "//out/Debug/out1.out", "//out/Debug/out2.out");
+ target_foo.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target_foo.OnResolved(&err));
+
+ Target target_bar(setup.settings(), Label(SourceDir("//bar/"), "bar"));
+ target_bar.set_output_type(Target::ACTION);
+ target_bar.action_values().set_script(SourceFile("//bar/script.py"));
+ target_bar.action_values().outputs() = SubstitutionList::MakeForTest(
+ "//out/Debug/out3.out", "//out/Debug/out4.out");
+ target_bar.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target_bar.OnResolved(&err));
+
+ std::ostringstream ninja_out;
+ std::ostringstream depfile_out;
+ std::vector<const Settings*> all_settings = {setup.settings()};
+ std::vector<const Target*> targets = {&target_foo, &target_bar};
+ NinjaBuildWriter writer(setup.build_settings(), all_settings,
+ setup.toolchain(), targets, ninja_out, depfile_out);
+ ASSERT_TRUE(writer.Run(&err));
+
+ const char expected_rule_gn[] = "rule gn\n";
+ const char expected_build_ninja[] =
+ "build build.ninja: gn\n"
+ " generator = 1\n"
+ " depfile = build.ninja.d\n"
+ "\n";
+ const char expected_link_pool[] =
+ "pool link_pool\n"
+ " depth = 0\n"
+ "\n";
+ const char expected_toolchain[] =
+ "subninja toolchain.ninja\n"
+ "\n";
+ const char expected_targets[] =
+ "build foo$:bar: phony obj/foo/bar.stamp\n"
+ "build bar$:bar: phony obj/bar/bar.stamp\n"
+ "build bar: phony obj/bar/bar.stamp\n"
+ "\n";
+ const char expected_root_target[] =
+ "build all: phony obj/foo/bar.stamp $\n"
+ " obj/bar/bar.stamp\n"
+ "default all\n";
+ std::string out_str = ninja_out.str();
+#define EXPECT_SNIPPET(expected) \
+ EXPECT_NE(std::string::npos, out_str.find(expected)) << \
+ "Expected to find: " << expected << std::endl << \
+ "Within: " << out_str
+ EXPECT_SNIPPET(expected_rule_gn);
+ EXPECT_SNIPPET(expected_build_ninja);
+ EXPECT_SNIPPET(expected_link_pool);
+ EXPECT_SNIPPET(expected_toolchain);
+ EXPECT_SNIPPET(expected_targets);
+ EXPECT_SNIPPET(expected_root_target);
+#undef EXPECT_SNIPPET
+}
+
+TEST(NinjaBuildWriter, DuplicateOutputs) {
+ Scheduler scheduler;
+ TestWithScope setup;
+ Err err;
+
+ Target target_foo(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target_foo.set_output_type(Target::ACTION);
+ target_foo.action_values().set_script(SourceFile("//foo/script.py"));
+ target_foo.action_values().outputs() = SubstitutionList::MakeForTest(
+ "//out/Debug/out1.out", "//out/Debug/out2.out");
+ target_foo.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target_foo.OnResolved(&err));
+
+ Target target_bar(setup.settings(), Label(SourceDir("//bar/"), "bar"));
+ target_bar.set_output_type(Target::ACTION);
+ target_bar.action_values().set_script(SourceFile("//bar/script.py"));
+ target_bar.action_values().outputs() = SubstitutionList::MakeForTest(
+ "//out/Debug/out3.out", "//out/Debug/out2.out");
+ target_bar.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target_bar.OnResolved(&err));
+
+ std::ostringstream ninja_out;
+ std::ostringstream depfile_out;
+ std::vector<const Settings*> all_settings = { setup.settings() };
+ std::vector<const Target*> targets = { &target_foo, &target_bar };
+ NinjaBuildWriter writer(setup.build_settings(), all_settings,
+ setup.toolchain(), targets, ninja_out, depfile_out);
+ ASSERT_FALSE(writer.Run(&err));
+
+ const char expected_help_test[] =
+ "Two or more targets generate the same output:\n"
+ " out2.out\n"
+ "\n"
+ "This is can often be fixed by changing one of the target names, or by \n"
+ "setting an output_name on one of them.\n"
+ "\n"
+ "Collisions:\n"
+ " //foo:bar\n"
+ " //bar:bar\n";
+
+ EXPECT_EQ(expected_help_test, err.help_text());
+}
diff --git a/chromium/tools/gn/ninja_bundle_data_target_writer.cc b/chromium/tools/gn/ninja_bundle_data_target_writer.cc
new file mode 100644
index 00000000000..67b5a10e15b
--- /dev/null
+++ b/chromium/tools/gn/ninja_bundle_data_target_writer.cc
@@ -0,0 +1,19 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_bundle_data_target_writer.h"
+
+#include "tools/gn/output_file.h"
+
+NinjaBundleDataTargetWriter::NinjaBundleDataTargetWriter(const Target* target,
+ std::ostream& out)
+ : NinjaTargetWriter(target, out) {}
+
+NinjaBundleDataTargetWriter::~NinjaBundleDataTargetWriter() {}
+
+void NinjaBundleDataTargetWriter::Run() {
+ std::vector<OutputFile> files;
+ files.push_back(WriteInputDepsStampAndGetDep(std::vector<const Target*>()));
+ WriteStampForTarget(files, std::vector<OutputFile>());
+}
diff --git a/chromium/tools/gn/ninja_bundle_data_target_writer.h b/chromium/tools/gn/ninja_bundle_data_target_writer.h
new file mode 100644
index 00000000000..c097f67b591
--- /dev/null
+++ b/chromium/tools/gn/ninja_bundle_data_target_writer.h
@@ -0,0 +1,23 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_BUNDLE_DATA_TARGET_WRITER_H_
+#define TOOLS_GN_NINJA_BUNDLE_DATA_TARGET_WRITER_H_
+
+#include "base/macros.h"
+#include "tools/gn/ninja_target_writer.h"
+
+// Writes a .ninja file for a bundle_data target type.
+class NinjaBundleDataTargetWriter : public NinjaTargetWriter {
+ public:
+ NinjaBundleDataTargetWriter(const Target* target, std::ostream& out);
+ ~NinjaBundleDataTargetWriter() override;
+
+ void Run() override;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(NinjaBundleDataTargetWriter);
+};
+
+#endif // TOOLS_GN_NINJA_BUNDLE_DATA_TARGET_WRITER_H_
diff --git a/chromium/tools/gn/ninja_copy_target_writer.cc b/chromium/tools/gn/ninja_copy_target_writer.cc
new file mode 100644
index 00000000000..b0313bec3f2
--- /dev/null
+++ b/chromium/tools/gn/ninja_copy_target_writer.cc
@@ -0,0 +1,119 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_copy_target_writer.h"
+
+#include "base/strings/string_util.h"
+#include "tools/gn/ninja_utils.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/string_utils.h"
+#include "tools/gn/substitution_list.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/toolchain.h"
+
+NinjaCopyTargetWriter::NinjaCopyTargetWriter(const Target* target,
+ std::ostream& out)
+ : NinjaTargetWriter(target, out) {
+}
+
+NinjaCopyTargetWriter::~NinjaCopyTargetWriter() {
+}
+
+void NinjaCopyTargetWriter::Run() {
+ const Tool* copy_tool = target_->toolchain()->GetTool(Toolchain::TYPE_COPY);
+ if (!copy_tool) {
+ g_scheduler->FailWithError(Err(
+ nullptr, "Copy tool not defined",
+ "The toolchain " +
+ target_->toolchain()->label().GetUserVisibleName(false) +
+ "\n used by target " + target_->label().GetUserVisibleName(false) +
+ "\n doesn't define a \"copy\" tool."));
+ return;
+ }
+
+ const Tool* stamp_tool = target_->toolchain()->GetTool(Toolchain::TYPE_STAMP);
+ if (!stamp_tool) {
+ g_scheduler->FailWithError(Err(
+ nullptr, "Copy tool not defined",
+ "The toolchain " +
+ target_->toolchain()->label().GetUserVisibleName(false) +
+ "\n used by target " + target_->label().GetUserVisibleName(false) +
+ "\n doesn't define a \"stamp\" tool."));
+ return;
+ }
+
+ // Figure out the substitutions used by the copy and stamp tools.
+ SubstitutionBits required_bits = copy_tool->substitution_bits();
+ required_bits.MergeFrom(stamp_tool->substitution_bits());
+
+ // General target-related substitutions needed by both tools.
+ WriteSharedVars(required_bits);
+
+ std::vector<OutputFile> output_files;
+ WriteCopyRules(&output_files);
+ out_ << std::endl;
+ WriteStampForTarget(output_files, std::vector<OutputFile>());
+}
+
+void NinjaCopyTargetWriter::WriteCopyRules(
+ std::vector<OutputFile>* output_files) {
+ CHECK(target_->action_values().outputs().list().size() == 1);
+ const SubstitutionList& output_subst_list =
+ target_->action_values().outputs();
+ CHECK_EQ(1u, output_subst_list.list().size())
+ << "Should have one entry exactly.";
+ const SubstitutionPattern& output_subst = output_subst_list.list()[0];
+
+ std::string tool_name =
+ GetNinjaRulePrefixForToolchain(settings_) +
+ Toolchain::ToolTypeToName(Toolchain::TYPE_COPY);
+
+ OutputFile input_dep =
+ WriteInputDepsStampAndGetDep(std::vector<const Target*>());
+
+ // Note that we don't write implicit deps for copy steps. "copy" only
+ // depends on the output files themselves, rather than having includes
+ // (the possibility of generated #includes is the main reason for implicit
+ // dependencies).
+ //
+ // It would seem that specifying implicit dependencies on the deps of the
+ // copy command would still be harmeless. But Chrome implements copy tools
+ // as hard links (much faster) which don't change the timestamp. If the
+ // ninja rule looks like this:
+ // output: copy input | foo.stamp
+ // The copy will not make a new timestamp on the output file, but the
+ // foo.stamp file generated from a previous step will have a new timestamp.
+ // The copy rule will therefore look out-of-date to Ninja and the rule will
+ // get rebuilt.
+ //
+ // If this copy is copying a generated file, not listing the implicit
+ // dependency will be fine as long as the input to the copy is properly
+ // listed as the output from the step that generated it.
+ //
+ // Moreover, doing this assumes that the copy step is always a simple
+ // locally run command, so there is no need for a toolchain dependency.
+ //
+ // Note that there is the need in some cases for order-only dependencies
+ // where a command might need to make sure something else runs before it runs
+ // to avoid conflicts. Such cases should be avoided where possible, but
+ // sometimes that's not possible.
+ for (const auto& input_file : target_->sources()) {
+ OutputFile output_file =
+ SubstitutionWriter::ApplyPatternToSourceAsOutputFile(
+ target_->settings(), output_subst, input_file);
+ output_files->push_back(output_file);
+
+ out_ << "build ";
+ path_output_.WriteFile(out_, output_file);
+ out_ << ": " << tool_name << " ";
+ path_output_.WriteFile(out_, input_file);
+ if (!input_dep.value().empty()) {
+ out_ << " || ";
+ path_output_.WriteFile(out_, input_dep);
+ }
+ out_ << std::endl;
+ }
+}
diff --git a/chromium/tools/gn/ninja_copy_target_writer.h b/chromium/tools/gn/ninja_copy_target_writer.h
new file mode 100644
index 00000000000..9e1746ee9cc
--- /dev/null
+++ b/chromium/tools/gn/ninja_copy_target_writer.h
@@ -0,0 +1,29 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_COPY_TARGET_WRITER_H_
+#define TOOLS_GN_NINJA_COPY_TARGET_WRITER_H_
+
+#include "base/macros.h"
+#include "tools/gn/ninja_target_writer.h"
+
+class Tool;
+
+// Writes a .ninja file for a copy target type.
+class NinjaCopyTargetWriter : public NinjaTargetWriter {
+ public:
+ NinjaCopyTargetWriter(const Target* target, std::ostream& out);
+ ~NinjaCopyTargetWriter() override;
+
+ void Run() override;
+
+ private:
+ // Writes the rules top copy the file(s), putting the computed output file
+ // name(s) into the given vector.
+ void WriteCopyRules(std::vector<OutputFile>* output_files);
+
+ DISALLOW_COPY_AND_ASSIGN(NinjaCopyTargetWriter);
+};
+
+#endif // TOOLS_GN_NINJA_COPY_TARGET_WRITER_H_
diff --git a/chromium/tools/gn/ninja_copy_target_writer_unittest.cc b/chromium/tools/gn/ninja_copy_target_writer_unittest.cc
new file mode 100644
index 00000000000..dd8c2113d62
--- /dev/null
+++ b/chromium/tools/gn/ninja_copy_target_writer_unittest.cc
@@ -0,0 +1,98 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <algorithm>
+#include <sstream>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/ninja_copy_target_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+// Tests multiple files with an output pattern and no toolchain dependency.
+TEST(NinjaCopyTargetWriter, Run) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::COPY_FILES);
+
+ target.sources().push_back(SourceFile("//foo/input1.txt"));
+ target.sources().push_back(SourceFile("//foo/input2.txt"));
+
+ target.action_values().outputs() =
+ SubstitutionList::MakeForTest("//out/Debug/{{source_name_part}}.out");
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaCopyTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected_linux[] =
+ "build input1.out: copy ../../foo/input1.txt\n"
+ "build input2.out: copy ../../foo/input2.txt\n"
+ "\n"
+ "build obj/foo/bar.stamp: stamp input1.out input2.out\n";
+ std::string out_str = out.str();
+ EXPECT_EQ(expected_linux, out_str);
+}
+
+// Tests a single file with no output pattern.
+TEST(NinjaCopyTargetWriter, ToolchainDeps) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::COPY_FILES);
+
+ target.sources().push_back(SourceFile("//foo/input1.txt"));
+
+ target.action_values().outputs() =
+ SubstitutionList::MakeForTest("//out/Debug/output.out");
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaCopyTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected_linux[] =
+ "build output.out: copy ../../foo/input1.txt\n"
+ "\n"
+ "build obj/foo/bar.stamp: stamp output.out\n";
+ std::string out_str = out.str();
+ EXPECT_EQ(expected_linux, out_str);
+}
+
+TEST(NinjaCopyTargetWriter, OrderOnlyDeps) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+ target.set_output_type(Target::COPY_FILES);
+ target.sources().push_back(SourceFile("//foo/input1.txt"));
+ target.action_values().outputs() =
+ SubstitutionList::MakeForTest("//out/Debug/{{source_name_part}}.out");
+ target.inputs().push_back(SourceFile("//foo/script.py"));
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaCopyTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected_linux[] =
+ "build input1.out: copy ../../foo/input1.txt || ../../foo/script.py\n"
+ "\n"
+ "build obj/foo/bar.stamp: stamp input1.out\n";
+ std::string out_str = out.str();
+ EXPECT_EQ(expected_linux, out_str);
+}
diff --git a/chromium/tools/gn/ninja_create_bundle_target_writer.cc b/chromium/tools/gn/ninja_create_bundle_target_writer.cc
new file mode 100644
index 00000000000..d85f2401ae5
--- /dev/null
+++ b/chromium/tools/gn/ninja_create_bundle_target_writer.cc
@@ -0,0 +1,119 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_create_bundle_target_writer.h"
+
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/ninja_utils.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/toolchain.h"
+
+namespace {
+
+void FailWithMissingToolError(Toolchain::ToolType tool, const Target* target) {
+ const std::string& tool_name = Toolchain::ToolTypeToName(tool);
+ g_scheduler->FailWithError(Err(
+ nullptr, tool_name + " tool not defined",
+ "The toolchain " +
+ target->toolchain()->label().GetUserVisibleName(false) + "\n"
+ "used by target " + target->label().GetUserVisibleName(false) + "\n"
+ "doesn't define a \"" + tool_name + "\" tool."));
+}
+
+} // namespace
+
+NinjaCreateBundleTargetWriter::NinjaCreateBundleTargetWriter(
+ const Target* target,
+ std::ostream& out)
+ : NinjaTargetWriter(target, out) {}
+
+NinjaCreateBundleTargetWriter::~NinjaCreateBundleTargetWriter() {}
+
+void NinjaCreateBundleTargetWriter::Run() {
+ if (!target_->toolchain()->GetTool(Toolchain::TYPE_COPY_BUNDLE_DATA)) {
+ FailWithMissingToolError(Toolchain::TYPE_COPY_BUNDLE_DATA, target_);
+ return;
+ }
+
+ if (!target_->toolchain()->GetTool(Toolchain::TYPE_COMPILE_XCASSETS)) {
+ FailWithMissingToolError(Toolchain::TYPE_COMPILE_XCASSETS, target_);
+ return;
+ }
+
+ if (!target_->toolchain()->GetTool(Toolchain::TYPE_STAMP)) {
+ FailWithMissingToolError(Toolchain::TYPE_STAMP, target_);
+ return;
+ }
+
+ std::vector<OutputFile> output_files;
+ OutputFile input_dep =
+ WriteInputDepsStampAndGetDep(std::vector<const Target*>());
+
+ for (const BundleFileRule& file_rule : target_->bundle_data().file_rules()) {
+ for (const SourceFile& source_file : file_rule.sources()) {
+ OutputFile output_file = file_rule.ApplyPatternToSourceAsOutputFile(
+ settings_, target_->bundle_data(), source_file);
+ output_files.push_back(output_file);
+
+ out_ << "build ";
+ path_output_.WriteFile(out_, output_file);
+ out_ << ": "
+ << GetNinjaRulePrefixForToolchain(settings_)
+ << Toolchain::ToolTypeToName(Toolchain::TYPE_COPY_BUNDLE_DATA)
+ << " ";
+ path_output_.WriteFile(out_, source_file);
+ if (!input_dep.value().empty()) {
+ out_ << " || ";
+ path_output_.WriteFile(out_, input_dep);
+ }
+ out_ << std::endl;
+ }
+ }
+
+ if (!target_->bundle_data().asset_catalog_sources().empty()) {
+ OutputFile output_file(
+ settings_->build_settings(),
+ target_->bundle_data().GetCompiledAssetCatalogPath());
+ output_files.push_back(output_file);
+
+ out_ << "build ";
+ path_output_.WriteFile(out_, output_file);
+ out_ << ": "
+ << GetNinjaRulePrefixForToolchain(settings_)
+ << Toolchain::ToolTypeToName(Toolchain::TYPE_COMPILE_XCASSETS);
+
+ std::set<SourceFile> asset_catalog_bundles;
+ for (const auto& source : target_->bundle_data().asset_catalog_sources()) {
+ SourceFile asset_catalog_bundle;
+ CHECK(IsSourceFileFromAssetCatalog(source, &asset_catalog_bundle));
+ if (asset_catalog_bundles.find(asset_catalog_bundle) !=
+ asset_catalog_bundles.end())
+ continue;
+ out_ << " ";
+ path_output_.WriteFile(out_, asset_catalog_bundle);
+ asset_catalog_bundles.insert(asset_catalog_bundle);
+ }
+
+ out_ << " |";
+ for (const auto& source : target_->bundle_data().asset_catalog_sources()) {
+ out_ << " ";
+ path_output_.WriteFile(out_, source);
+ }
+ if (!input_dep.value().empty()) {
+ out_ << " || ";
+ path_output_.WriteFile(out_, input_dep);
+ }
+ out_ << std::endl;
+ }
+
+ out_ << std::endl;
+
+ std::vector<OutputFile> order_only_deps;
+ for (const auto& pair : target_->data_deps())
+ order_only_deps.push_back(pair.ptr->dependency_output_file());
+ WriteStampForTarget(output_files, order_only_deps);
+}
diff --git a/chromium/tools/gn/ninja_create_bundle_target_writer.h b/chromium/tools/gn/ninja_create_bundle_target_writer.h
new file mode 100644
index 00000000000..42b900f6bc2
--- /dev/null
+++ b/chromium/tools/gn/ninja_create_bundle_target_writer.h
@@ -0,0 +1,23 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_CREATE_BUNDLE_TARGET_WRITER_H_
+#define TOOLS_GN_NINJA_CREATE_BUNDLE_TARGET_WRITER_H_
+
+#include "base/macros.h"
+#include "tools/gn/ninja_target_writer.h"
+
+// Writes a .ninja file for a bundle_data target type.
+class NinjaCreateBundleTargetWriter : public NinjaTargetWriter {
+ public:
+ NinjaCreateBundleTargetWriter(const Target* target, std::ostream& out);
+ ~NinjaCreateBundleTargetWriter() override;
+
+ void Run() override;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(NinjaCreateBundleTargetWriter);
+};
+
+#endif // TOOLS_GN_NINJA_CREATE_BUNDLE_TARGET_WRITER_H_
diff --git a/chromium/tools/gn/ninja_create_bundle_target_writer_unittest.cc b/chromium/tools/gn/ninja_create_bundle_target_writer_unittest.cc
new file mode 100644
index 00000000000..88e6fbab34d
--- /dev/null
+++ b/chromium/tools/gn/ninja_create_bundle_target_writer_unittest.cc
@@ -0,0 +1,173 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_create_bundle_target_writer.h"
+
+#include <algorithm>
+#include <sstream>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+void SetupBundleDataDir(BundleData* bundle_data, const std::string& root_dir) {
+ bundle_data->root_dir().assign(root_dir + "/bar.bundle");
+ bundle_data->resources_dir().assign(bundle_data->root_dir() + "/Resources");
+ bundle_data->executable_dir().assign(bundle_data->root_dir() + "/Executable");
+ bundle_data->plugins_dir().assign(bundle_data->root_dir() + "/PlugIns");
+}
+
+} // namespace
+
+// Tests multiple files with an output pattern.
+TEST(NinjaCreateBundleTargetWriter, Run) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//baz/"), "bar"));
+ target.set_output_type(Target::CREATE_BUNDLE);
+
+ SetupBundleDataDir(&target.bundle_data(), "//out/Debug");
+
+ std::vector<SourceFile> sources;
+ sources.push_back(SourceFile("//foo/input1.txt"));
+ sources.push_back(SourceFile("//foo/input2.txt"));
+ target.bundle_data().file_rules().push_back(BundleFileRule(
+ sources, SubstitutionPattern::MakeForTest(
+ "{{bundle_resources_dir}}/{{source_file_part}}")));
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaCreateBundleTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected[] =
+ "build bar.bundle/Resources/input1.txt: copy_bundle_data "
+ "../../foo/input1.txt\n"
+ "build bar.bundle/Resources/input2.txt: copy_bundle_data "
+ "../../foo/input2.txt\n"
+ "\n"
+ "build obj/baz/bar.stamp: stamp "
+ "bar.bundle/Resources/input1.txt "
+ "bar.bundle/Resources/input2.txt\n";
+ std::string out_str = out.str();
+ EXPECT_EQ(expected, out_str);
+}
+
+// Tests multiple files from asset catalog.
+TEST(NinjaCreateBundleTargetWriter, AssetCatalog) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//baz/"), "bar"));
+ target.set_output_type(Target::CREATE_BUNDLE);
+
+ SetupBundleDataDir(&target.bundle_data(), "//out/Debug");
+
+ std::vector<SourceFile>& asset_catalog_sources =
+ target.bundle_data().asset_catalog_sources();
+ asset_catalog_sources.push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/Contents.json"));
+ asset_catalog_sources.push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/FooIcon-29.png"));
+ asset_catalog_sources.push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/FooIcon-29@2x.png"));
+ asset_catalog_sources.push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/FooIcon-29@3x.png"));
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaCreateBundleTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected[] =
+ "build bar.bundle/Resources/Assets.car: compile_xcassets "
+ "../../foo/Foo.xcassets | "
+ "../../foo/Foo.xcassets/foo.imageset/Contents.json "
+ "../../foo/Foo.xcassets/foo.imageset/FooIcon-29.png "
+ "../../foo/Foo.xcassets/foo.imageset/FooIcon-29@2x.png "
+ "../../foo/Foo.xcassets/foo.imageset/FooIcon-29@3x.png\n"
+ "\n"
+ "build obj/baz/bar.stamp: stamp bar.bundle/Resources/Assets.car\n";
+ std::string out_str = out.str();
+ EXPECT_EQ(expected, out_str);
+}
+
+// Tests complex target with multiple bundle_data sources, including
+// some asset catalog.
+TEST(NinjaCreateBundleTargetWriter, OrderOnlyDeps) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//baz/"), "bar"));
+ target.set_output_type(Target::CREATE_BUNDLE);
+
+ SetupBundleDataDir(&target.bundle_data(), "//out/Debug");
+
+ std::vector<SourceFile> sources1;
+ sources1.push_back(SourceFile("//foo/input1.txt"));
+ sources1.push_back(SourceFile("//foo/input2.txt"));
+ target.bundle_data().file_rules().push_back(BundleFileRule(
+ sources1, SubstitutionPattern::MakeForTest(
+ "{{bundle_resources_dir}}/{{source_file_part}}")));
+
+ std::vector<SourceFile> sources2;
+ sources2.push_back(SourceFile("//qux/Info.plist"));
+ target.bundle_data().file_rules().push_back(BundleFileRule(
+ sources2,
+ SubstitutionPattern::MakeForTest("{{bundle_root_dir}}/Info.plist")));
+
+ std::vector<SourceFile> empty_source;
+ target.bundle_data().file_rules().push_back(BundleFileRule(
+ empty_source, SubstitutionPattern::MakeForTest(
+ "{{bundle_plugins_dir}}/{{source_file_part}}")));
+
+ std::vector<SourceFile>& asset_catalog_sources =
+ target.bundle_data().asset_catalog_sources();
+ asset_catalog_sources.push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/Contents.json"));
+ asset_catalog_sources.push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/FooIcon-29.png"));
+ asset_catalog_sources.push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/FooIcon-29@2x.png"));
+ asset_catalog_sources.push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/FooIcon-29@3x.png"));
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaCreateBundleTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected[] =
+ "build bar.bundle/Resources/input1.txt: copy_bundle_data "
+ "../../foo/input1.txt\n"
+ "build bar.bundle/Resources/input2.txt: copy_bundle_data "
+ "../../foo/input2.txt\n"
+ "build bar.bundle/Info.plist: copy_bundle_data ../../qux/Info.plist\n"
+ "build bar.bundle/Resources/Assets.car: compile_xcassets "
+ "../../foo/Foo.xcassets | "
+ "../../foo/Foo.xcassets/foo.imageset/Contents.json "
+ "../../foo/Foo.xcassets/foo.imageset/FooIcon-29.png "
+ "../../foo/Foo.xcassets/foo.imageset/FooIcon-29@2x.png "
+ "../../foo/Foo.xcassets/foo.imageset/FooIcon-29@3x.png\n"
+ "\n"
+ "build obj/baz/bar.stamp: stamp "
+ "bar.bundle/Resources/input1.txt "
+ "bar.bundle/Resources/input2.txt "
+ "bar.bundle/Info.plist "
+ "bar.bundle/Resources/Assets.car\n";
+ std::string out_str = out.str();
+ EXPECT_EQ(expected, out_str);
+}
diff --git a/chromium/tools/gn/ninja_group_target_writer.cc b/chromium/tools/gn/ninja_group_target_writer.cc
new file mode 100644
index 00000000000..c298413e0db
--- /dev/null
+++ b/chromium/tools/gn/ninja_group_target_writer.cc
@@ -0,0 +1,34 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_group_target_writer.h"
+
+#include "base/strings/string_util.h"
+#include "tools/gn/deps_iterator.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/string_utils.h"
+#include "tools/gn/target.h"
+
+NinjaGroupTargetWriter::NinjaGroupTargetWriter(const Target* target,
+ std::ostream& out)
+ : NinjaTargetWriter(target, out) {
+}
+
+NinjaGroupTargetWriter::~NinjaGroupTargetWriter() {
+}
+
+void NinjaGroupTargetWriter::Run() {
+ // A group rule just generates a stamp file with dependencies on each of
+ // the deps and data_deps in the group.
+ std::vector<OutputFile> output_files;
+ for (const auto& pair : target_->GetDeps(Target::DEPS_LINKED))
+ output_files.push_back(pair.ptr->dependency_output_file());
+
+ std::vector<OutputFile> data_output_files;
+ const LabelTargetVector& data_deps = target_->data_deps();
+ for (const auto& pair : data_deps)
+ data_output_files.push_back(pair.ptr->dependency_output_file());
+
+ WriteStampForTarget(output_files, data_output_files);
+}
diff --git a/chromium/tools/gn/ninja_group_target_writer.h b/chromium/tools/gn/ninja_group_target_writer.h
new file mode 100644
index 00000000000..66e5f043155
--- /dev/null
+++ b/chromium/tools/gn/ninja_group_target_writer.h
@@ -0,0 +1,23 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_GROUP_TARGET_WRITER_H_
+#define TOOLS_GN_NINJA_GROUP_TARGET_WRITER_H_
+
+#include "base/macros.h"
+#include "tools/gn/ninja_target_writer.h"
+
+// Writes a .ninja file for a group target type.
+class NinjaGroupTargetWriter : public NinjaTargetWriter {
+ public:
+ NinjaGroupTargetWriter(const Target* target, std::ostream& out);
+ ~NinjaGroupTargetWriter() override;
+
+ void Run() override;
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(NinjaGroupTargetWriter);
+};
+
+#endif // TOOLS_GN_NINJA_GROUP_TARGET_WRITER_H_
diff --git a/chromium/tools/gn/ninja_group_target_writer_unittest.cc b/chromium/tools/gn/ninja_group_target_writer_unittest.cc
new file mode 100644
index 00000000000..c9d0ab4553d
--- /dev/null
+++ b/chromium/tools/gn/ninja_group_target_writer_unittest.cc
@@ -0,0 +1,52 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/ninja_group_target_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(NinjaGroupTargetWriter, Run) {
+ TestWithScope setup;
+ Err err;
+
+ setup.build_settings()->SetBuildDir(SourceDir("//out/Debug/"));
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+
+ target.set_output_type(Target::GROUP);
+ target.visibility().SetPublic();
+
+ Target dep(setup.settings(), Label(SourceDir("//foo/"), "dep"));
+ dep.set_output_type(Target::ACTION);
+ dep.visibility().SetPublic();
+ dep.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(dep.OnResolved(&err));
+
+ Target dep2(setup.settings(), Label(SourceDir("//foo/"), "dep2"));
+ dep2.set_output_type(Target::ACTION);
+ dep2.visibility().SetPublic();
+ dep2.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(dep2.OnResolved(&err));
+
+ Target datadep(setup.settings(), Label(SourceDir("//foo/"), "datadep"));
+ datadep.set_output_type(Target::ACTION);
+ datadep.visibility().SetPublic();
+ datadep.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(datadep.OnResolved(&err));
+
+ target.public_deps().push_back(LabelTargetPair(&dep));
+ target.public_deps().push_back(LabelTargetPair(&dep2));
+ target.data_deps().push_back(LabelTargetPair(&datadep));
+
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream out;
+ NinjaGroupTargetWriter writer(&target, out);
+ writer.Run();
+
+ const char expected[] =
+ "build obj/foo/bar.stamp: stamp obj/foo/dep.stamp obj/foo/dep2.stamp || obj/foo/datadep.stamp\n";
+ EXPECT_EQ(expected, out.str());
+}
diff --git a/chromium/tools/gn/ninja_target_writer.cc b/chromium/tools/gn/ninja_target_writer.cc
new file mode 100644
index 00000000000..486930bd2ff
--- /dev/null
+++ b/chromium/tools/gn/ninja_target_writer.cc
@@ -0,0 +1,289 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_target_writer.h"
+
+#include <sstream>
+
+#include "base/files/file_util.h"
+#include "base/strings/string_util.h"
+#include "tools/gn/err.h"
+#include "tools/gn/escape.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/ninja_action_target_writer.h"
+#include "tools/gn/ninja_binary_target_writer.h"
+#include "tools/gn/ninja_bundle_data_target_writer.h"
+#include "tools/gn/ninja_copy_target_writer.h"
+#include "tools/gn/ninja_create_bundle_target_writer.h"
+#include "tools/gn/ninja_group_target_writer.h"
+#include "tools/gn/ninja_utils.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/string_utils.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/trace.h"
+
+NinjaTargetWriter::NinjaTargetWriter(const Target* target,
+ std::ostream& out)
+ : settings_(target->settings()),
+ target_(target),
+ out_(out),
+ path_output_(settings_->build_settings()->build_dir(),
+ settings_->build_settings()->root_path_utf8(),
+ ESCAPE_NINJA) {
+}
+
+NinjaTargetWriter::~NinjaTargetWriter() {
+}
+
+// static
+void NinjaTargetWriter::RunAndWriteFile(const Target* target) {
+ const Settings* settings = target->settings();
+
+ ScopedTrace trace(TraceItem::TRACE_FILE_WRITE,
+ target->label().GetUserVisibleName(false));
+ trace.SetToolchain(settings->toolchain_label());
+
+ base::FilePath ninja_file(settings->build_settings()->GetFullPath(
+ GetNinjaFileForTarget(target)));
+
+ if (g_scheduler->verbose_logging())
+ g_scheduler->Log("Writing", FilePathToUTF8(ninja_file));
+
+ base::CreateDirectory(ninja_file.DirName());
+
+ // It's ridiculously faster to write to a string and then write that to
+ // disk in one operation than to use an fstream here.
+ std::stringstream file;
+
+ // Call out to the correct sub-type of writer.
+ if (target->output_type() == Target::BUNDLE_DATA) {
+ NinjaBundleDataTargetWriter writer(target, file);
+ writer.Run();
+ } else if (target->output_type() == Target::CREATE_BUNDLE) {
+ NinjaCreateBundleTargetWriter writer(target, file);
+ writer.Run();
+ } else if (target->output_type() == Target::COPY_FILES) {
+ NinjaCopyTargetWriter writer(target, file);
+ writer.Run();
+ } else if (target->output_type() == Target::ACTION ||
+ target->output_type() == Target::ACTION_FOREACH) {
+ NinjaActionTargetWriter writer(target, file);
+ writer.Run();
+ } else if (target->output_type() == Target::GROUP) {
+ NinjaGroupTargetWriter writer(target, file);
+ writer.Run();
+ } else if (target->IsBinary()) {
+ NinjaBinaryTargetWriter writer(target, file);
+ writer.Run();
+ } else {
+ CHECK(0) << "Output type of target not handled.";
+ }
+
+ WriteFileIfChanged(ninja_file, file.str(), nullptr);
+}
+
+void NinjaTargetWriter::WriteEscapedSubstitution(SubstitutionType type) {
+ EscapeOptions opts;
+ opts.mode = ESCAPE_NINJA;
+
+ out_ << kSubstitutionNinjaNames[type] << " = ";
+ EscapeStringToStream(out_,
+ SubstitutionWriter::GetTargetSubstitution(target_, type),
+ opts);
+ out_ << std::endl;
+}
+
+void NinjaTargetWriter::WriteSharedVars(const SubstitutionBits& bits) {
+ bool written_anything = false;
+
+ // Target label.
+ if (bits.used[SUBSTITUTION_LABEL]) {
+ WriteEscapedSubstitution(SUBSTITUTION_LABEL);
+ written_anything = true;
+ }
+
+ // Target label name
+ if (bits.used[SUBSTITUTION_LABEL_NAME]) {
+ WriteEscapedSubstitution(SUBSTITUTION_LABEL_NAME);
+ written_anything = true;
+ }
+
+ // Root gen dir.
+ if (bits.used[SUBSTITUTION_ROOT_GEN_DIR]) {
+ WriteEscapedSubstitution(SUBSTITUTION_ROOT_GEN_DIR);
+ written_anything = true;
+ }
+
+ // Root out dir.
+ if (bits.used[SUBSTITUTION_ROOT_OUT_DIR]) {
+ WriteEscapedSubstitution(SUBSTITUTION_ROOT_OUT_DIR);
+ written_anything = true;
+ }
+
+ // Target gen dir.
+ if (bits.used[SUBSTITUTION_TARGET_GEN_DIR]) {
+ WriteEscapedSubstitution(SUBSTITUTION_TARGET_GEN_DIR);
+ written_anything = true;
+ }
+
+ // Target out dir.
+ if (bits.used[SUBSTITUTION_TARGET_OUT_DIR]) {
+ WriteEscapedSubstitution(SUBSTITUTION_TARGET_OUT_DIR);
+ written_anything = true;
+ }
+
+ // Target output name.
+ if (bits.used[SUBSTITUTION_TARGET_OUTPUT_NAME]) {
+ WriteEscapedSubstitution(SUBSTITUTION_TARGET_OUTPUT_NAME);
+ written_anything = true;
+ }
+
+ // If we wrote any vars, separate them from the rest of the file that follows
+ // with a blank line.
+ if (written_anything)
+ out_ << std::endl;
+}
+
+OutputFile NinjaTargetWriter::WriteInputDepsStampAndGetDep(
+ const std::vector<const Target*>& extra_hard_deps) const {
+ CHECK(target_->toolchain())
+ << "Toolchain not set on target "
+ << target_->label().GetUserVisibleName(true);
+
+ // ----------
+ // Collect all input files that are input deps of this target. Knowing the
+ // number before writing allows us to either skip writing the input deps
+ // stamp or optimize it. Use pointers to avoid copies here.
+ std::vector<const SourceFile*> input_deps_sources;
+ input_deps_sources.reserve(32);
+
+ // Actions get implicit dependencies on the script itself.
+ if (target_->output_type() == Target::ACTION ||
+ target_->output_type() == Target::ACTION_FOREACH)
+ input_deps_sources.push_back(&target_->action_values().script());
+
+ // Input files.
+ for (const auto& input : target_->inputs())
+ input_deps_sources.push_back(&input);
+
+ // For an action (where we run a script only once) the sources are the same
+ // as the inputs. For action_foreach, the sources will be operated on
+ // separately so don't handle them here.
+ if (target_->output_type() == Target::ACTION) {
+ for (const auto& source : target_->sources())
+ input_deps_sources.push_back(&source);
+ }
+
+ // ----------
+ // Collect all target input dependencies of this target as was done for the
+ // files above.
+ std::vector<const Target*> input_deps_targets;
+ input_deps_targets.reserve(32);
+
+ // Hard dependencies that are direct or indirect dependencies.
+ // These are large (up to 100s), hence why we check other
+ const std::set<const Target*>& hard_deps(target_->recursive_hard_deps());
+ for (const Target* target : hard_deps)
+ input_deps_targets.push_back(target);
+
+ // Extra hard dependencies passed in. These are usually empty or small, and
+ // we don't want to duplicate the explicit hard deps of the target.
+ for (const Target* target : extra_hard_deps) {
+ if (!hard_deps.count(target))
+ input_deps_targets.push_back(target);
+ }
+
+ // Toolchain dependencies. These must be resolved before doing anything.
+ // This just writes all toolchain deps for simplicity. If we find that
+ // toolchains often have more than one dependency, we could consider writing
+ // a toolchain-specific stamp file and only include the stamp here.
+ // Note that these are usually empty/small.
+ const LabelTargetVector& toolchain_deps = target_->toolchain()->deps();
+ for (const auto& toolchain_dep : toolchain_deps) {
+ // This could theoretically duplicate dependencies already in the list,
+ // but it shouldn't happen in practice, is inconvenient to check for,
+ // and only results in harmless redundant dependencies listed.
+ input_deps_targets.push_back(toolchain_dep.ptr);
+ }
+
+ // ---------
+ // Write the outputs.
+
+ if (input_deps_sources.size() + input_deps_targets.size() == 0)
+ return OutputFile(); // No input dependencies.
+
+ // If we're only generating one input dependency, return it directly instead
+ // of writing a stamp file for it.
+ if (input_deps_sources.size() == 1 && input_deps_targets.size() == 0)
+ return OutputFile(settings_->build_settings(), *input_deps_sources[0]);
+ if (input_deps_sources.size() == 0 && input_deps_targets.size() == 1) {
+ const OutputFile& dep = input_deps_targets[0]->dependency_output_file();
+ DCHECK(!dep.value().empty());
+ return dep;
+ }
+
+ // Make a stamp file.
+ OutputFile input_stamp_file(
+ RebasePath(GetTargetOutputDir(target_).value(),
+ settings_->build_settings()->build_dir(),
+ settings_->build_settings()->root_path_utf8()));
+ input_stamp_file.value().append(target_->label().name());
+ input_stamp_file.value().append(".inputdeps.stamp");
+
+ out_ << "build ";
+ path_output_.WriteFile(out_, input_stamp_file);
+ out_ << ": "
+ << GetNinjaRulePrefixForToolchain(settings_)
+ << Toolchain::ToolTypeToName(Toolchain::TYPE_STAMP);
+
+ // File input deps.
+ for (const SourceFile* source : input_deps_sources) {
+ out_ << " ";
+ path_output_.WriteFile(out_, *source);
+ }
+
+ // Target input deps. Sort by label so the output is deterministic (otherwise
+ // some of the targets will have gone through std::sets which will have
+ // sorted them by pointer).
+ std::sort(
+ input_deps_targets.begin(), input_deps_targets.end(),
+ [](const Target* a, const Target* b) { return a->label() < b->label(); });
+ for (const auto& dep : input_deps_targets) {
+ DCHECK(!dep->dependency_output_file().value().empty());
+ out_ << " ";
+ path_output_.WriteFile(out_, dep->dependency_output_file());
+ }
+
+ out_ << "\n";
+ return input_stamp_file;
+}
+
+void NinjaTargetWriter::WriteStampForTarget(
+ const std::vector<OutputFile>& files,
+ const std::vector<OutputFile>& order_only_deps) {
+ const OutputFile& stamp_file = target_->dependency_output_file();
+
+ // First validate that the target's dependency is a stamp file. Otherwise,
+ // we shouldn't have gotten here!
+ CHECK(base::EndsWith(stamp_file.value(), ".stamp",
+ base::CompareCase::INSENSITIVE_ASCII))
+ << "Output should end in \".stamp\" for stamp file output. Instead got: "
+ << "\"" << stamp_file.value() << "\"";
+
+ out_ << "build ";
+ path_output_.WriteFile(out_, stamp_file);
+
+ out_ << ": "
+ << GetNinjaRulePrefixForToolchain(settings_)
+ << Toolchain::ToolTypeToName(Toolchain::TYPE_STAMP);
+ path_output_.WriteFiles(out_, files);
+
+ if (!order_only_deps.empty()) {
+ out_ << " ||";
+ path_output_.WriteFiles(out_, order_only_deps);
+ }
+ out_ << std::endl;
+}
diff --git a/chromium/tools/gn/ninja_target_writer.h b/chromium/tools/gn/ninja_target_writer.h
new file mode 100644
index 00000000000..0248b0788d7
--- /dev/null
+++ b/chromium/tools/gn/ninja_target_writer.h
@@ -0,0 +1,63 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_TARGET_WRITER_H_
+#define TOOLS_GN_NINJA_TARGET_WRITER_H_
+
+#include <iosfwd>
+
+#include "base/macros.h"
+#include "tools/gn/path_output.h"
+#include "tools/gn/substitution_type.h"
+
+class FileTemplate;
+class OutputFile;
+class Settings;
+class Target;
+struct SubstitutionBits;
+
+// Generates one target's ".ninja" file. The toplevel "build.ninja" file is
+// generated by the NinjaBuildWriter.
+class NinjaTargetWriter {
+ public:
+ NinjaTargetWriter(const Target* target, std::ostream& out);
+ virtual ~NinjaTargetWriter();
+
+ static void RunAndWriteFile(const Target* target);
+
+ virtual void Run() = 0;
+
+ protected:
+ // Writes out the substitution values that are shared between the different
+ // types of tools (target gen dir, target label, etc.). Only the substitutions
+ // identified by the given bits will be written.
+ void WriteSharedVars(const SubstitutionBits& bits);
+
+ // Writes to the output stream a stamp rule for input dependencies, and
+ // returns the file to be appended to source rules that encodes the
+ // order-only dependencies for the current target. The returned OutputFile
+ // will be empty if there are no implicit dependencies and no extra target
+ // dependencies passed in.
+ OutputFile WriteInputDepsStampAndGetDep(
+ const std::vector<const Target*>& extra_hard_deps) const;
+
+ // Writes to the output file a final stamp rule for the target that stamps
+ // the given list of files. This function assumes the stamp is for the target
+ // as a whole so the stamp file is set as the target's dependency output.
+ void WriteStampForTarget(const std::vector<OutputFile>& deps,
+ const std::vector<OutputFile>& order_only_deps);
+
+ const Settings* settings_; // Non-owning.
+ const Target* target_; // Non-owning.
+ std::ostream& out_;
+ PathOutput path_output_;
+
+ private:
+ void WriteCopyRules();
+ void WriteEscapedSubstitution(SubstitutionType type);
+
+ DISALLOW_COPY_AND_ASSIGN(NinjaTargetWriter);
+};
+
+#endif // TOOLS_GN_NINJA_TARGET_WRITER_H_
diff --git a/chromium/tools/gn/ninja_target_writer_unittest.cc b/chromium/tools/gn/ninja_target_writer_unittest.cc
new file mode 100644
index 00000000000..ccb9c7a3ceb
--- /dev/null
+++ b/chromium/tools/gn/ninja_target_writer_unittest.cc
@@ -0,0 +1,142 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <sstream>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/ninja_target_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+class TestingNinjaTargetWriter : public NinjaTargetWriter {
+ public:
+ TestingNinjaTargetWriter(const Target* target,
+ const Toolchain* toolchain,
+ std::ostream& out)
+ : NinjaTargetWriter(target, out) {
+ }
+
+ void Run() override {}
+
+ // Make this public so the test can call it.
+ OutputFile WriteInputDepsStampAndGetDep(
+ const std::vector<const Target*>& extra_hard_deps) {
+ return NinjaTargetWriter::WriteInputDepsStampAndGetDep(extra_hard_deps);
+ }
+};
+
+} // namespace
+
+TEST(NinjaTargetWriter, WriteInputDepsStampAndGetDep) {
+ TestWithScope setup;
+ Err err;
+
+ // Make a base target that's a hard dep (action).
+ Target base_target(setup.settings(), Label(SourceDir("//foo/"), "base"));
+ base_target.set_output_type(Target::ACTION);
+ base_target.visibility().SetPublic();
+ base_target.SetToolchain(setup.toolchain());
+ base_target.action_values().set_script(SourceFile("//foo/script.py"));
+
+ // Dependent target that also includes a source prerequisite (should get
+ // included) and a source (should not be included).
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "target"));
+ target.set_output_type(Target::EXECUTABLE);
+ target.visibility().SetPublic();
+ target.SetToolchain(setup.toolchain());
+ target.inputs().push_back(SourceFile("//foo/input.txt"));
+ target.sources().push_back(SourceFile("//foo/source.txt"));
+ target.public_deps().push_back(LabelTargetPair(&base_target));
+
+ // Dependent action to test that action sources will be treated the same as
+ // inputs.
+ Target action(setup.settings(), Label(SourceDir("//foo/"), "action"));
+ action.set_output_type(Target::ACTION);
+ action.visibility().SetPublic();
+ action.SetToolchain(setup.toolchain());
+ action.action_values().set_script(SourceFile("//foo/script.py"));
+ action.sources().push_back(SourceFile("//foo/action_source.txt"));
+ action.public_deps().push_back(LabelTargetPair(&target));
+
+ ASSERT_TRUE(base_target.OnResolved(&err));
+ ASSERT_TRUE(target.OnResolved(&err));
+ ASSERT_TRUE(action.OnResolved(&err));
+
+ // Input deps for the base (should be only the script itself).
+ {
+ std::ostringstream stream;
+ TestingNinjaTargetWriter writer(&base_target, setup.toolchain(), stream);
+ OutputFile dep =
+ writer.WriteInputDepsStampAndGetDep(std::vector<const Target*>());
+
+ // Since there is only one dependency, it should just be returned and
+ // nothing written to the stream.
+ EXPECT_EQ("../../foo/script.py", dep.value());
+ EXPECT_EQ("", stream.str());
+ }
+
+ // Input deps for the target (should depend on the base).
+ {
+ std::ostringstream stream;
+ TestingNinjaTargetWriter writer(&target, setup.toolchain(), stream);
+ OutputFile dep =
+ writer.WriteInputDepsStampAndGetDep(std::vector<const Target*>());
+
+ // Since there is more than one dependency, a stamp file will be returned
+ // and the rule for the stamp file will be written to the stream.
+ EXPECT_EQ("obj/foo/target.inputdeps.stamp", dep.value());
+ EXPECT_EQ("build obj/foo/target.inputdeps.stamp: stamp "
+ "../../foo/input.txt obj/foo/base.stamp\n",
+ stream.str());
+ }
+
+ // Input deps for action which should depend on the base since its a hard dep
+ // that is a (indirect) dependency, as well as the the action source.
+ {
+ std::ostringstream stream;
+ TestingNinjaTargetWriter writer(&action, setup.toolchain(), stream);
+ OutputFile dep =
+ writer.WriteInputDepsStampAndGetDep(std::vector<const Target*>());
+
+ EXPECT_EQ("obj/foo/action.inputdeps.stamp", dep.value());
+ EXPECT_EQ("build obj/foo/action.inputdeps.stamp: stamp ../../foo/script.py "
+ "../../foo/action_source.txt obj/foo/base.stamp\n",
+ stream.str());
+ }
+}
+
+// Tests WriteInputDepsStampAndGetDep when toolchain deps are present.
+TEST(NinjaTargetWriter, WriteInputDepsStampAndGetDepWithToolchainDeps) {
+ TestWithScope setup;
+ Err err;
+
+ // Toolchain dependency. Here we make a target in the same toolchain for
+ // simplicity, but in real life (using the Builder) this would be rejected
+ // because it would be a circular dependency (the target depends on its
+ // toolchain, and the toolchain depends on this target).
+ Target toolchain_dep_target(setup.settings(),
+ Label(SourceDir("//foo/"), "setup"));
+ toolchain_dep_target.set_output_type(Target::ACTION);
+ toolchain_dep_target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(toolchain_dep_target.OnResolved(&err));
+ setup.toolchain()->deps().push_back(LabelTargetPair(&toolchain_dep_target));
+
+ // Make a binary target
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "target"));
+ target.set_output_type(Target::EXECUTABLE);
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::ostringstream stream;
+ TestingNinjaTargetWriter writer(&target, setup.toolchain(), stream);
+ OutputFile dep =
+ writer.WriteInputDepsStampAndGetDep(std::vector<const Target*>());
+
+ // Since there is more than one dependency, a stamp file will be returned
+ // and the rule for the stamp file will be written to the stream.
+ EXPECT_EQ("obj/foo/setup.stamp", dep.value());
+ EXPECT_EQ("", stream.str());
+}
diff --git a/chromium/tools/gn/ninja_toolchain_writer.cc b/chromium/tools/gn/ninja_toolchain_writer.cc
new file mode 100644
index 00000000000..e7459ebb3cf
--- /dev/null
+++ b/chromium/tools/gn/ninja_toolchain_writer.cc
@@ -0,0 +1,142 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_toolchain_writer.h"
+
+#include <fstream>
+
+#include "base/files/file_util.h"
+#include "base/strings/stringize_macros.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/ninja_utils.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/toolchain.h"
+#include "tools/gn/trace.h"
+
+namespace {
+
+const char kIndent[] = " ";
+
+} // namespace
+
+NinjaToolchainWriter::NinjaToolchainWriter(
+ const Settings* settings,
+ const Toolchain* toolchain,
+ const std::vector<const Target*>& targets,
+ std::ostream& out)
+ : settings_(settings),
+ toolchain_(toolchain),
+ targets_(targets),
+ out_(out),
+ path_output_(settings_->build_settings()->build_dir(),
+ settings_->build_settings()->root_path_utf8(),
+ ESCAPE_NINJA) {
+}
+
+NinjaToolchainWriter::~NinjaToolchainWriter() {
+}
+
+void NinjaToolchainWriter::Run() {
+ WriteRules();
+ WriteSubninjas();
+}
+
+// static
+bool NinjaToolchainWriter::RunAndWriteFile(
+ const Settings* settings,
+ const Toolchain* toolchain,
+ const std::vector<const Target*>& targets) {
+ base::FilePath ninja_file(settings->build_settings()->GetFullPath(
+ GetNinjaFileForToolchain(settings)));
+ ScopedTrace trace(TraceItem::TRACE_FILE_WRITE, FilePathToUTF8(ninja_file));
+
+ base::CreateDirectory(ninja_file.DirName());
+
+ std::ofstream file;
+ file.open(FilePathToUTF8(ninja_file).c_str(),
+ std::ios_base::out | std::ios_base::binary);
+ if (file.fail())
+ return false;
+
+ NinjaToolchainWriter gen(settings, toolchain, targets, file);
+ gen.Run();
+ return true;
+}
+
+void NinjaToolchainWriter::WriteRules() {
+ std::string rule_prefix = GetNinjaRulePrefixForToolchain(settings_);
+
+ for (int i = Toolchain::TYPE_NONE + 1; i < Toolchain::TYPE_NUMTYPES; i++) {
+ Toolchain::ToolType tool_type = static_cast<Toolchain::ToolType>(i);
+ const Tool* tool = toolchain_->GetTool(tool_type);
+ if (tool)
+ WriteToolRule(tool_type, tool, rule_prefix);
+ }
+ out_ << std::endl;
+}
+
+void NinjaToolchainWriter::WriteToolRule(const Toolchain::ToolType type,
+ const Tool* tool,
+ const std::string& rule_prefix) {
+ out_ << "rule " << rule_prefix << Toolchain::ToolTypeToName(type)
+ << std::endl;
+
+ // Rules explicitly include shell commands, so don't try to escape.
+ EscapeOptions options;
+ options.mode = ESCAPE_NINJA_PREFORMATTED_COMMAND;
+
+ CHECK(!tool->command().empty()) << "Command should not be empty";
+ WriteRulePattern("command", tool->command(), options);
+
+ WriteRulePattern("description", tool->description(), options);
+ WriteRulePattern("rspfile", tool->rspfile(), options);
+ WriteRulePattern("rspfile_content", tool->rspfile_content(), options);
+
+ if (tool->depsformat() == Tool::DEPS_GCC) {
+ // GCC-style deps require a depfile.
+ if (!tool->depfile().empty()) {
+ WriteRulePattern("depfile", tool->depfile(), options);
+ out_ << kIndent << "deps = gcc" << std::endl;
+ }
+ } else if (tool->depsformat() == Tool::DEPS_MSVC) {
+ // MSVC deps don't have a depfile.
+ out_ << kIndent << "deps = msvc" << std::endl;
+ }
+
+ // The link pool applies to linker tools. Don't count TYPE_ALINK since
+ // static libraries are not generally intensive to write.
+ if (type == Toolchain::TYPE_SOLINK ||
+ type == Toolchain::TYPE_SOLINK_MODULE ||
+ type == Toolchain::TYPE_LINK) {
+ out_ << kIndent << "pool = link_pool\n";
+ }
+
+ if (tool->restat())
+ out_ << kIndent << "restat = 1" << std::endl;
+}
+
+void NinjaToolchainWriter::WriteRulePattern(const char* name,
+ const SubstitutionPattern& pattern,
+ const EscapeOptions& options) {
+ if (pattern.empty())
+ return;
+ out_ << kIndent << name << " = ";
+ SubstitutionWriter::WriteWithNinjaVariables(pattern, options, out_);
+ out_ << std::endl;
+}
+
+void NinjaToolchainWriter::WriteSubninjas() {
+ // Write subninja commands for each generated target.
+ for (const auto& target : targets_) {
+ OutputFile ninja_file(target->settings()->build_settings(),
+ GetNinjaFileForTarget(target));
+ out_ << "subninja ";
+ path_output_.WriteFile(out_, ninja_file);
+ out_ << std::endl;
+ }
+ out_ << std::endl;
+}
diff --git a/chromium/tools/gn/ninja_toolchain_writer.h b/chromium/tools/gn/ninja_toolchain_writer.h
new file mode 100644
index 00000000000..a1e39c3c52e
--- /dev/null
+++ b/chromium/tools/gn/ninja_toolchain_writer.h
@@ -0,0 +1,61 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_TOOLCHAIN_WRITER_H_
+#define TOOLS_GN_NINJA_TOOLCHAIN_WRITER_H_
+
+#include <iosfwd>
+#include <set>
+#include <string>
+#include <vector>
+
+#include "base/gtest_prod_util.h"
+#include "base/macros.h"
+#include "tools/gn/path_output.h"
+#include "tools/gn/toolchain.h"
+
+class BuildSettings;
+struct EscapeOptions;
+class Settings;
+class Target;
+class Tool;
+
+class NinjaToolchainWriter {
+ public:
+ // Takes the settings for the toolchain, as well as the list of all targets
+ // associated with the toolchain.
+ static bool RunAndWriteFile(const Settings* settings,
+ const Toolchain* toolchain,
+ const std::vector<const Target*>& targets);
+
+ private:
+ FRIEND_TEST_ALL_PREFIXES(NinjaToolchainWriter, WriteToolRule);
+
+ NinjaToolchainWriter(const Settings* settings,
+ const Toolchain* toolchain,
+ const std::vector<const Target*>& targets,
+ std::ostream& out);
+ ~NinjaToolchainWriter();
+
+ void Run();
+
+ void WriteRules();
+ void WriteToolRule(Toolchain::ToolType type,
+ const Tool* tool,
+ const std::string& rule_prefix);
+ void WriteRulePattern(const char* name,
+ const SubstitutionPattern& pattern,
+ const EscapeOptions& options);
+ void WriteSubninjas();
+
+ const Settings* settings_;
+ const Toolchain* toolchain_;
+ std::vector<const Target*> targets_;
+ std::ostream& out_;
+ PathOutput path_output_;
+
+ DISALLOW_COPY_AND_ASSIGN(NinjaToolchainWriter);
+};
+
+#endif // TOOLS_GN_NINJA_TOOLCHAIN_WRITER_H_
diff --git a/chromium/tools/gn/ninja_toolchain_writer_unittest.cc b/chromium/tools/gn/ninja_toolchain_writer_unittest.cc
new file mode 100644
index 00000000000..fc52f7da7be
--- /dev/null
+++ b/chromium/tools/gn/ninja_toolchain_writer_unittest.cc
@@ -0,0 +1,31 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <sstream>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/ninja_toolchain_writer.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(NinjaToolchainWriter, WriteToolRule) {
+ TestWithScope setup;
+
+ //Target target(setup.settings(), Label(SourceDir("//foo/"), "target"));
+ //target.set_output_type(Target::EXECUTABLE);
+ //target.SetToolchain(setup.toolchain());
+
+ std::ostringstream stream;
+
+ NinjaToolchainWriter writer(setup.settings(), setup.toolchain(),
+ std::vector<const Target*>(), stream);
+ writer.WriteToolRule(Toolchain::TYPE_CC,
+ setup.toolchain()->GetTool(Toolchain::TYPE_CC),
+ std::string("prefix_"));
+
+ EXPECT_EQ(
+ "rule prefix_cc\n"
+ " command = cc ${in} ${cflags} ${cflags_c} ${defines} ${include_dirs} "
+ "-o ${out}\n",
+ stream.str());
+}
diff --git a/chromium/tools/gn/ninja_utils.cc b/chromium/tools/gn/ninja_utils.cc
new file mode 100644
index 00000000000..60d4d364384
--- /dev/null
+++ b/chromium/tools/gn/ninja_utils.cc
@@ -0,0 +1,27 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_utils.h"
+
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/target.h"
+
+SourceFile GetNinjaFileForTarget(const Target* target) {
+ return SourceFile(GetTargetOutputDir(target).value() +
+ target->label().name() + ".ninja");
+}
+
+SourceFile GetNinjaFileForToolchain(const Settings* settings) {
+ return SourceFile(GetToolchainOutputDir(settings).value() +
+ "toolchain.ninja");
+}
+
+std::string GetNinjaRulePrefixForToolchain(const Settings* settings) {
+ // Don't prefix the default toolchain so it looks prettier, prefix everything
+ // else.
+ if (settings->is_default())
+ return std::string(); // Default toolchain has no prefix.
+ return settings->toolchain_label().name() + "_";
+}
diff --git a/chromium/tools/gn/ninja_utils.h b/chromium/tools/gn/ninja_utils.h
new file mode 100644
index 00000000000..60ae6b27a66
--- /dev/null
+++ b/chromium/tools/gn/ninja_utils.h
@@ -0,0 +1,25 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_UTILS_H_
+#define TOOLS_GN_NINJA_UTILS_H_
+
+#include <string>
+
+class Settings;
+class SourceFile;
+class Target;
+
+// Example: "base/base.ninja". The string version will not be escaped, and
+// will always have slashes for path separators.
+SourceFile GetNinjaFileForTarget(const Target* target);
+
+// Returns the name of the root .ninja file for the given toolchain.
+SourceFile GetNinjaFileForToolchain(const Settings* settings);
+
+// Returns the prefix applied to the Ninja rules in a given toolchain so they
+// don't collide with rules from other toolchains.
+std::string GetNinjaRulePrefixForToolchain(const Settings* settings);
+
+#endif // TOOLS_GN_NINJA_UTILS_H_
diff --git a/chromium/tools/gn/ninja_writer.cc b/chromium/tools/gn/ninja_writer.cc
new file mode 100644
index 00000000000..154f017a435
--- /dev/null
+++ b/chromium/tools/gn/ninja_writer.cc
@@ -0,0 +1,114 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/ninja_writer.h"
+
+#include "tools/gn/builder.h"
+#include "tools/gn/loader.h"
+#include "tools/gn/location.h"
+#include "tools/gn/ninja_build_writer.h"
+#include "tools/gn/ninja_toolchain_writer.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/target.h"
+
+NinjaWriter::NinjaWriter(const BuildSettings* build_settings,
+ Builder* builder)
+ : build_settings_(build_settings),
+ builder_(builder) {
+}
+
+NinjaWriter::~NinjaWriter() {
+}
+
+// static
+bool NinjaWriter::RunAndWriteFiles(const BuildSettings* build_settings,
+ Builder* builder,
+ Err* err) {
+ NinjaWriter writer(build_settings, builder);
+
+ std::vector<const Settings*> all_settings;
+ std::vector<const Target*> default_targets;
+ if (!writer.WriteToolchains(&all_settings, &default_targets, err))
+ return false;
+ return writer.WriteRootBuildfiles(all_settings, default_targets, err);
+}
+
+// static
+bool NinjaWriter::RunAndWriteToolchainFiles(
+ const BuildSettings* build_settings,
+ Builder* builder,
+ std::vector<const Settings*>* all_settings,
+ Err* err) {
+ NinjaWriter writer(build_settings, builder);
+ std::vector<const Target*> default_targets;
+ return writer.WriteToolchains(all_settings, &default_targets, err);
+}
+
+bool NinjaWriter::WriteToolchains(std::vector<const Settings*>* all_settings,
+ std::vector<const Target*>* default_targets,
+ Err* err) {
+ // Categorize all targets by toolchain.
+ typedef std::map<Label, std::vector<const Target*> > CategorizedMap;
+ CategorizedMap categorized;
+
+ std::vector<const BuilderRecord*> all_records = builder_->GetAllRecords();
+ for (const auto& all_record : all_records) {
+ if (all_record->type() == BuilderRecord::ITEM_TARGET &&
+ all_record->should_generate()) {
+ categorized[all_record->label().GetToolchainLabel()].push_back(
+ all_record->item()->AsTarget());
+ }
+ }
+ if (categorized.empty()) {
+ Err(Location(), "No targets.",
+ "I could not find any targets to write, so I'm doing nothing.")
+ .PrintToStdout();
+ return false;
+ }
+
+ for (auto& i : categorized) {
+ // Sort targets so that they are in a deterministic order.
+ std::sort(i.second.begin(), i.second.end(),
+ [](const Target* a, const Target* b) {
+ return a->label() < b->label();
+ });
+ }
+
+ Label default_label = builder_->loader()->GetDefaultToolchain();
+
+ // Write out the toolchain buildfiles, and also accumulate the set of
+ // all settings and find the list of targets in the default toolchain.
+ for (const auto& i : categorized) {
+ const Settings* settings =
+ builder_->loader()->GetToolchainSettings(i.first);
+ const Toolchain* toolchain = builder_->GetToolchain(i.first);
+
+ all_settings->push_back(settings);
+
+ if (!NinjaToolchainWriter::RunAndWriteFile(settings, toolchain, i.second)) {
+ Err(Location(),
+ "Couldn't open toolchain buildfile(s) for writing").PrintToStdout();
+ return false;
+ }
+ }
+
+ *default_targets = categorized[default_label];
+ return true;
+}
+
+bool NinjaWriter::WriteRootBuildfiles(
+ const std::vector<const Settings*>& all_settings,
+ const std::vector<const Target*>& default_targets,
+ Err* err) {
+ // All Settings objects should have the same default toolchain, and there
+ // should always be at least one settings object in the build.
+ CHECK(!all_settings.empty());
+ const Toolchain* default_toolchain =
+ builder_->GetToolchain(all_settings[0]->default_toolchain_label());
+
+ // Write the root buildfile.
+ return NinjaBuildWriter::RunAndWriteFile(build_settings_, all_settings,
+ default_toolchain, default_targets,
+ err);
+}
diff --git a/chromium/tools/gn/ninja_writer.h b/chromium/tools/gn/ninja_writer.h
new file mode 100644
index 00000000000..fa4a433fbfd
--- /dev/null
+++ b/chromium/tools/gn/ninja_writer.h
@@ -0,0 +1,53 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_NINJA_WRITER_H_
+#define TOOLS_GN_NINJA_WRITER_H_
+
+#include <set>
+#include <string>
+#include <vector>
+
+#include "base/macros.h"
+
+class Builder;
+class BuildSettings;
+class Err;
+class Settings;
+class Target;
+
+class NinjaWriter {
+ public:
+ // On failure will populate |err| and will return false.
+ static bool RunAndWriteFiles(const BuildSettings* build_settings,
+ Builder* builder,
+ Err* err);
+
+ // Writes only the toolchain.ninja files, skipping the root buildfile. The
+ // settings for the files written will be added to the vector.
+ static bool RunAndWriteToolchainFiles(
+ const BuildSettings* build_settings,
+ Builder* builder,
+ std::vector<const Settings*>* all_settings,
+ Err* err);
+
+ private:
+ NinjaWriter(const BuildSettings* build_settings, Builder* builder);
+ ~NinjaWriter();
+
+ bool WriteToolchains(
+ std::vector<const Settings*>* all_settings,
+ std::vector<const Target*>* default_targets,
+ Err* err);
+ bool WriteRootBuildfiles(const std::vector<const Settings*>& all_settings,
+ const std::vector<const Target*>& default_targets,
+ Err* err);
+
+ const BuildSettings* build_settings_;
+ Builder* builder_;
+
+ DISALLOW_COPY_AND_ASSIGN(NinjaWriter);
+};
+
+#endif // TOOLS_GN_NINJA_WRITER_H_
diff --git a/chromium/tools/gn/operators.cc b/chromium/tools/gn/operators.cc
new file mode 100644
index 00000000000..6b3f7c716a9
--- /dev/null
+++ b/chromium/tools/gn/operators.cc
@@ -0,0 +1,587 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/operators.h"
+
+#include <stddef.h>
+
+#include "base/strings/string_number_conversions.h"
+#include "tools/gn/err.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/token.h"
+#include "tools/gn/value.h"
+
+namespace {
+
+const char kSourcesName[] = "sources";
+
+// Applies the sources assignment filter from the given scope to each element
+// of source (can be a list or a string), appending it to dest if it doesn't
+// match.
+void AppendFilteredSourcesToValue(const Scope* scope,
+ const Value& source,
+ Value* dest) {
+ const PatternList* filter = scope->GetSourcesAssignmentFilter();
+
+ if (source.type() == Value::STRING) {
+ if (!filter || filter->is_empty() ||
+ !filter->MatchesValue(source))
+ dest->list_value().push_back(source);
+ return;
+ }
+ if (source.type() != Value::LIST) {
+ // Any non-list and non-string being added to a list can just get appended,
+ // we're not going to filter it.
+ dest->list_value().push_back(source);
+ return;
+ }
+
+ if (!filter || filter->is_empty()) {
+ // No filter, append everything.
+ for (const auto& source_entry : source.list_value())
+ dest->list_value().push_back(source_entry);
+ return;
+ }
+
+ // Note: don't reserve() the dest vector here since that actually hurts
+ // the allocation pattern when the build script is doing multiple small
+ // additions.
+ for (const auto& source_entry : source.list_value()) {
+ if (!filter->MatchesValue(source_entry))
+ dest->list_value().push_back(source_entry);
+ }
+}
+
+Value GetValueOrFillError(const BinaryOpNode* op_node,
+ const ParseNode* node,
+ const char* name,
+ Scope* scope,
+ Err* err) {
+ Value value = node->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+ if (value.type() == Value::NONE) {
+ *err = Err(op_node->op(),
+ "Operator requires a value.",
+ "This thing on the " + std::string(name) +
+ " does not evaluate to a value.");
+ err->AppendRange(node->GetRange());
+ return Value();
+ }
+ return value;
+}
+
+void RemoveMatchesFromList(const BinaryOpNode* op_node,
+ Value* list,
+ const Value& to_remove,
+ Err* err) {
+ std::vector<Value>& v = list->list_value();
+ switch (to_remove.type()) {
+ case Value::BOOLEAN:
+ case Value::INTEGER: // Filter out the individual int/string.
+ case Value::STRING: {
+ bool found_match = false;
+ for (size_t i = 0; i < v.size(); /* nothing */) {
+ if (v[i] == to_remove) {
+ found_match = true;
+ v.erase(v.begin() + i);
+ } else {
+ i++;
+ }
+ }
+ if (!found_match) {
+ *err = Err(to_remove.origin()->GetRange(), "Item not found",
+ "You were trying to remove " + to_remove.ToString(true) +
+ "\nfrom the list but it wasn't there.");
+ }
+ break;
+ }
+
+ case Value::LIST: // Filter out each individual thing.
+ for (const auto& elem : to_remove.list_value()) {
+ // TODO(brettw) if the nested item is a list, we may want to search
+ // for the literal list rather than remote the items in it.
+ RemoveMatchesFromList(op_node, list, elem, err);
+ if (err->has_error())
+ return;
+ }
+ break;
+
+ default:
+ break;
+ }
+}
+
+// Assignment -----------------------------------------------------------------
+
+// We return a null value from this rather than the result of doing the append.
+// See ValuePlusEquals for rationale.
+Value ExecuteEquals(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Token& left,
+ const Value& right,
+ Err* err) {
+ const Value* old_value = scope->GetValue(left.value(), false);
+ if (old_value) {
+ // Throw an error when overwriting a nonempty list with another nonempty
+ // list item. This is to detect the case where you write
+ // defines = ["FOO"]
+ // and you overwrote inherited ones, when instead you mean to append:
+ // defines += ["FOO"]
+ if (old_value->type() == Value::LIST &&
+ !old_value->list_value().empty() &&
+ right.type() == Value::LIST &&
+ !right.list_value().empty()) {
+ *err = Err(op_node->left()->GetRange(), "Replacing nonempty list.",
+ std::string("This overwrites a previously-defined nonempty list ") +
+ "(length " +
+ base::IntToString(static_cast<int>(old_value->list_value().size()))
+ + ").");
+ err->AppendSubErr(Err(*old_value, "for previous definition",
+ "with another one (length " +
+ base::IntToString(static_cast<int>(right.list_value().size())) +
+ "). Did you mean " +
+ "\"+=\" to append instead? If you\nreally want to do this, do\n " +
+ left.value().as_string() + " = []\nbefore reassigning."));
+ return Value();
+ }
+ }
+ if (err->has_error())
+ return Value();
+
+ if (right.type() == Value::LIST && left.value() == kSourcesName) {
+ // Assigning to sources, filter the list. Here we do the filtering and
+ // copying in one step to save an extra list copy (the lists may be
+ // long).
+ Value* set_value = scope->SetValue(left.value(),
+ Value(op_node, Value::LIST), op_node);
+ set_value->list_value().reserve(right.list_value().size());
+ AppendFilteredSourcesToValue(scope, right, set_value);
+ } else {
+ // Normal value set, just copy it.
+ scope->SetValue(left.value(), right, op_node->right());
+ }
+ return Value();
+}
+
+// allow_type_conversion indicates if we're allowed to change the type of the
+// left value. This is set to true when doing +, and false when doing +=.
+//
+// Note that we return Value() from here, which is different than C++. This
+// means you can't do clever things like foo = [ bar += baz ] to simultaneously
+// append to and use a value. This is basically never needed in out build
+// scripts and is just as likely an error as the intended behavior, and it also
+// involves a copy of the value when it's returned. Many times we're appending
+// to large lists, and copying the value to discard it for the next statement
+// is very wasteful.
+void ValuePlusEquals(const Scope* scope,
+ const BinaryOpNode* op_node,
+ const Token& left_token,
+ Value* left,
+ const Value& right,
+ bool allow_type_conversion,
+ Err* err) {
+ switch (left->type()) {
+ // Left-hand-side int.
+ case Value::INTEGER:
+ switch (right.type()) {
+ case Value::INTEGER: // int + int -> addition.
+ left->int_value() += right.int_value();
+ return;
+
+ case Value::STRING: // int + string -> string concat.
+ if (allow_type_conversion) {
+ *left = Value(op_node,
+ base::Int64ToString(left->int_value()) + right.string_value());
+ return;
+ }
+ break;
+
+ default:
+ break;
+ }
+ break;
+
+ // Left-hand-side string.
+ case Value::STRING:
+ switch (right.type()) {
+ case Value::INTEGER: // string + int -> string concat.
+ left->string_value().append(base::Int64ToString(right.int_value()));
+ return;
+
+ case Value::STRING: // string + string -> string contat.
+ left->string_value().append(right.string_value());
+ return;
+
+ default:
+ break;
+ }
+ break;
+
+ // Left-hand-side list.
+ case Value::LIST:
+ switch (right.type()) {
+ case Value::LIST: // list + list -> list concat.
+ if (left_token.value() == kSourcesName) {
+ // Filter additions through the assignment filter.
+ AppendFilteredSourcesToValue(scope, right, left);
+ } else {
+ // Normal list concat.
+ for (const auto& value : right.list_value())
+ left->list_value().push_back(value);
+ }
+ return;
+
+ default:
+ *err = Err(op_node->op(), "Incompatible types to add.",
+ "To append a single item to a list do \"foo += [ bar ]\".");
+ return;
+ }
+
+ default:
+ break;
+ }
+
+ *err = Err(op_node->op(), "Incompatible types to add.",
+ std::string("I see a ") + Value::DescribeType(left->type()) + " and a " +
+ Value::DescribeType(right.type()) + ".");
+}
+
+Value ExecutePlusEquals(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Token& left,
+ const Value& right,
+ Err* err) {
+ // We modify in-place rather than doing read-modify-write to avoid
+ // copying large lists.
+ Value* left_value =
+ scope->GetValueForcedToCurrentScope(left.value(), op_node);
+ if (!left_value) {
+ *err = Err(left, "Undefined variable for +=.",
+ "I don't have something with this name in scope now.");
+ return Value();
+ }
+ ValuePlusEquals(scope, op_node, left, left_value, right, false, err);
+ left_value->set_origin(op_node);
+ scope->MarkUnused(left.value());
+ return Value();
+}
+
+// We return a null value from this rather than the result of doing the append.
+// See ValuePlusEquals for rationale.
+void ValueMinusEquals(const BinaryOpNode* op_node,
+ Value* left,
+ const Value& right,
+ bool allow_type_conversion,
+ Err* err) {
+ switch (left->type()) {
+ // Left-hand-side int.
+ case Value::INTEGER:
+ switch (right.type()) {
+ case Value::INTEGER: // int - int -> subtraction.
+ left->int_value() -= right.int_value();
+ return;
+
+ default:
+ break;
+ }
+ break;
+
+ // Left-hand-side string.
+ case Value::STRING:
+ break; // All are errors.
+
+ // Left-hand-side list.
+ case Value::LIST:
+ if (right.type() != Value::LIST) {
+ *err = Err(op_node->op(), "Incompatible types to subtract.",
+ "To remove a single item from a list do \"foo -= [ bar ]\".");
+ } else {
+ RemoveMatchesFromList(op_node, left, right, err);
+ }
+ return;
+
+ default:
+ break;
+ }
+
+ *err = Err(op_node->op(), "Incompatible types to subtract.",
+ std::string("I see a ") + Value::DescribeType(left->type()) + " and a " +
+ Value::DescribeType(right.type()) + ".");
+}
+
+Value ExecuteMinusEquals(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Token& left,
+ const Value& right,
+ Err* err) {
+ Value* left_value =
+ scope->GetValueForcedToCurrentScope(left.value(), op_node);
+ if (!left_value) {
+ *err = Err(left, "Undefined variable for -=.",
+ "I don't have something with this name in scope now.");
+ return Value();
+ }
+ ValueMinusEquals(op_node, left_value, right, false, err);
+ left_value->set_origin(op_node);
+ scope->MarkUnused(left.value());
+ return Value();
+}
+
+// Plus/Minus -----------------------------------------------------------------
+
+Value ExecutePlus(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Value& left,
+ const Value& right,
+ Err* err) {
+ Value ret = left;
+ ValuePlusEquals(scope, op_node, Token(), &ret, right, true, err);
+ ret.set_origin(op_node);
+ return ret;
+}
+
+Value ExecuteMinus(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Value& left,
+ const Value& right,
+ Err* err) {
+ Value ret = left;
+ ValueMinusEquals(op_node, &ret, right, true, err);
+ ret.set_origin(op_node);
+ return ret;
+}
+
+// Comparison -----------------------------------------------------------------
+
+Value ExecuteEqualsEquals(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Value& left,
+ const Value& right,
+ Err* err) {
+ if (left == right)
+ return Value(op_node, true);
+ return Value(op_node, false);
+}
+
+Value ExecuteNotEquals(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Value& left,
+ const Value& right,
+ Err* err) {
+ // Evaluate in terms of ==.
+ Value result = ExecuteEqualsEquals(scope, op_node, left, right, err);
+ result.boolean_value() = !result.boolean_value();
+ return result;
+}
+
+Value FillNeedsTwoIntegersError(const BinaryOpNode* op_node,
+ const Value& left,
+ const Value& right,
+ Err* err) {
+ *err = Err(op_node, "Comparison requires two integers.",
+ "This operator can only compare two integers.");
+ err->AppendRange(left.origin()->GetRange());
+ err->AppendRange(right.origin()->GetRange());
+ return Value();
+}
+
+Value ExecuteLessEquals(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Value& left,
+ const Value& right,
+ Err* err) {
+ if (left.type() != Value::INTEGER || right.type() != Value::INTEGER)
+ return FillNeedsTwoIntegersError(op_node, left, right, err);
+ return Value(op_node, left.int_value() <= right.int_value());
+}
+
+Value ExecuteGreaterEquals(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Value& left,
+ const Value& right,
+ Err* err) {
+ if (left.type() != Value::INTEGER || right.type() != Value::INTEGER)
+ return FillNeedsTwoIntegersError(op_node, left, right, err);
+ return Value(op_node, left.int_value() >= right.int_value());
+}
+
+Value ExecuteGreater(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Value& left,
+ const Value& right,
+ Err* err) {
+ if (left.type() != Value::INTEGER || right.type() != Value::INTEGER)
+ return FillNeedsTwoIntegersError(op_node, left, right, err);
+ return Value(op_node, left.int_value() > right.int_value());
+}
+
+Value ExecuteLess(Scope* scope,
+ const BinaryOpNode* op_node,
+ const Value& left,
+ const Value& right,
+ Err* err) {
+ if (left.type() != Value::INTEGER || right.type() != Value::INTEGER)
+ return FillNeedsTwoIntegersError(op_node, left, right, err);
+ return Value(op_node, left.int_value() < right.int_value());
+}
+
+// Binary ----------------------------------------------------------------------
+
+Value ExecuteOr(Scope* scope,
+ const BinaryOpNode* op_node,
+ const ParseNode* left_node,
+ const ParseNode* right_node,
+ Err* err) {
+ Value left = GetValueOrFillError(op_node, left_node, "left", scope, err);
+ if (err->has_error())
+ return Value();
+ if (left.type() != Value::BOOLEAN) {
+ *err = Err(op_node->left(), "Left side of || operator is not a boolean.",
+ "Type is \"" + std::string(Value::DescribeType(left.type())) +
+ "\" instead.");
+ return Value();
+ }
+ if (left.boolean_value())
+ return Value(op_node, left.boolean_value());
+
+ Value right = GetValueOrFillError(op_node, right_node, "right", scope, err);
+ if (err->has_error())
+ return Value();
+ if (right.type() != Value::BOOLEAN) {
+ *err = Err(op_node->right(), "Right side of || operator is not a boolean.",
+ "Type is \"" + std::string(Value::DescribeType(right.type())) +
+ "\" instead.");
+ return Value();
+ }
+
+ return Value(op_node, left.boolean_value() || right.boolean_value());
+}
+
+Value ExecuteAnd(Scope* scope,
+ const BinaryOpNode* op_node,
+ const ParseNode* left_node,
+ const ParseNode* right_node,
+ Err* err) {
+ Value left = GetValueOrFillError(op_node, left_node, "left", scope, err);
+ if (err->has_error())
+ return Value();
+ if (left.type() != Value::BOOLEAN) {
+ *err = Err(op_node->left(), "Left side of && operator is not a boolean.",
+ "Type is \"" + std::string(Value::DescribeType(left.type())) +
+ "\" instead.");
+ return Value();
+ }
+ if (!left.boolean_value())
+ return Value(op_node, left.boolean_value());
+
+ Value right = GetValueOrFillError(op_node, right_node, "right", scope, err);
+ if (err->has_error())
+ return Value();
+ if (right.type() != Value::BOOLEAN) {
+ *err = Err(op_node->right(), "Right side of && operator is not a boolean.",
+ "Type is \"" + std::string(Value::DescribeType(right.type())) +
+ "\" instead.");
+ return Value();
+ }
+ return Value(op_node, left.boolean_value() && right.boolean_value());
+}
+
+} // namespace
+
+// ----------------------------------------------------------------------------
+
+Value ExecuteUnaryOperator(Scope* scope,
+ const UnaryOpNode* op_node,
+ const Value& expr,
+ Err* err) {
+ DCHECK(op_node->op().type() == Token::BANG);
+
+ if (expr.type() != Value::BOOLEAN) {
+ *err = Err(op_node, "Operand of ! operator is not a boolean.",
+ "Type is \"" + std::string(Value::DescribeType(expr.type())) +
+ "\" instead.");
+ return Value();
+ }
+ // TODO(scottmg): Why no unary minus?
+ return Value(op_node, !expr.boolean_value());
+}
+
+Value ExecuteBinaryOperator(Scope* scope,
+ const BinaryOpNode* op_node,
+ const ParseNode* left,
+ const ParseNode* right,
+ Err* err) {
+ const Token& op = op_node->op();
+
+ // First handle the ones that take an lvalue.
+ if (op.type() == Token::EQUAL ||
+ op.type() == Token::PLUS_EQUALS ||
+ op.type() == Token::MINUS_EQUALS) {
+ const IdentifierNode* left_id = left->AsIdentifier();
+ if (!left_id) {
+ *err = Err(op, "Operator requires a lvalue.",
+ "This thing on the left is not an identifier.");
+ err->AppendRange(left->GetRange());
+ return Value();
+ }
+ const Token& dest = left_id->value();
+
+ Value right_value = right->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+ if (right_value.type() == Value::NONE) {
+ *err = Err(op, "Operator requires a rvalue.",
+ "This thing on the right does not evaluate to a value.");
+ err->AppendRange(right->GetRange());
+ return Value();
+ }
+
+ if (op.type() == Token::EQUAL)
+ return ExecuteEquals(scope, op_node, dest, right_value, err);
+ if (op.type() == Token::PLUS_EQUALS)
+ return ExecutePlusEquals(scope, op_node, dest, right_value, err);
+ if (op.type() == Token::MINUS_EQUALS)
+ return ExecuteMinusEquals(scope, op_node, dest, right_value, err);
+ NOTREACHED();
+ return Value();
+ }
+
+ // ||, &&. Passed the node instead of the value so that they can avoid
+ // evaluating the RHS on early-out.
+ if (op.type() == Token::BOOLEAN_OR)
+ return ExecuteOr(scope, op_node, left, right, err);
+ if (op.type() == Token::BOOLEAN_AND)
+ return ExecuteAnd(scope, op_node, left, right, err);
+
+ Value left_value = GetValueOrFillError(op_node, left, "left", scope, err);
+ if (err->has_error())
+ return Value();
+ Value right_value = GetValueOrFillError(op_node, right, "right", scope, err);
+ if (err->has_error())
+ return Value();
+
+ // +, -.
+ if (op.type() == Token::MINUS)
+ return ExecuteMinus(scope, op_node, left_value, right_value, err);
+ if (op.type() == Token::PLUS)
+ return ExecutePlus(scope, op_node, left_value, right_value, err);
+
+ // Comparisons.
+ if (op.type() == Token::EQUAL_EQUAL)
+ return ExecuteEqualsEquals(scope, op_node, left_value, right_value, err);
+ if (op.type() == Token::NOT_EQUAL)
+ return ExecuteNotEquals(scope, op_node, left_value, right_value, err);
+ if (op.type() == Token::GREATER_EQUAL)
+ return ExecuteGreaterEquals(scope, op_node, left_value, right_value, err);
+ if (op.type() == Token::LESS_EQUAL)
+ return ExecuteLessEquals(scope, op_node, left_value, right_value, err);
+ if (op.type() == Token::GREATER_THAN)
+ return ExecuteGreater(scope, op_node, left_value, right_value, err);
+ if (op.type() == Token::LESS_THAN)
+ return ExecuteLess(scope, op_node, left_value, right_value, err);
+
+ return Value();
+}
diff --git a/chromium/tools/gn/operators.h b/chromium/tools/gn/operators.h
new file mode 100644
index 00000000000..82ff68e7578
--- /dev/null
+++ b/chromium/tools/gn/operators.h
@@ -0,0 +1,25 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_OPERATORS_H_
+#define TOOLS_GN_OPERATORS_H_
+
+class BinaryOpNode;
+class Err;
+class ParseNode;
+class Scope;
+class UnaryOpNode;
+class Value;
+
+Value ExecuteUnaryOperator(Scope* scope,
+ const UnaryOpNode* op_node,
+ const Value& value,
+ Err* err);
+Value ExecuteBinaryOperator(Scope* scope,
+ const BinaryOpNode* op_node,
+ const ParseNode* left,
+ const ParseNode* right,
+ Err* err);
+
+#endif // TOOLS_GN_OPERATORS_H_
diff --git a/chromium/tools/gn/operators_unittest.cc b/chromium/tools/gn/operators_unittest.cc
new file mode 100644
index 00000000000..dd31b8261e2
--- /dev/null
+++ b/chromium/tools/gn/operators_unittest.cc
@@ -0,0 +1,211 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/operators.h"
+
+#include <stdint.h>
+#include <utility>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/pattern.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+bool IsValueIntegerEqualing(const Value& v, int64_t i) {
+ if (v.type() != Value::INTEGER)
+ return false;
+ return v.int_value() == i;
+}
+
+bool IsValueStringEqualing(const Value& v, const char* s) {
+ if (v.type() != Value::STRING)
+ return false;
+ return v.string_value() == s;
+}
+
+// Returns a list populated with a single literal Value corresponding to the
+// given token. The token must outlive the list (since the list will just
+// copy the reference).
+std::unique_ptr<ListNode> ListWithLiteral(const Token& token) {
+ std::unique_ptr<ListNode> list(new ListNode);
+ list->append_item(std::unique_ptr<ParseNode>(new LiteralNode(token)));
+ return list;
+}
+
+} // namespace
+
+TEST(Operators, SourcesAppend) {
+ Err err;
+ TestWithScope setup;
+
+ // Set up "sources" with an empty list.
+ const char sources[] = "sources";
+ setup.scope()->SetValue(sources, Value(nullptr, Value::LIST), nullptr);
+
+ // Set up the operator.
+ BinaryOpNode node;
+ const char token_value[] = "+=";
+ Token op(Location(), Token::PLUS_EQUALS, token_value);
+ node.set_op(op);
+
+ // Append to the sources variable.
+ Token identifier_token(Location(), Token::IDENTIFIER, sources);
+ node.set_left(
+ std::unique_ptr<ParseNode>(new IdentifierNode(identifier_token)));
+
+ // Set up the filter on the scope to remove everything ending with "rm"
+ std::unique_ptr<PatternList> pattern_list(new PatternList);
+ pattern_list->Append(Pattern("*rm"));
+ setup.scope()->set_sources_assignment_filter(std::move(pattern_list));
+
+ // Append an integer.
+ const char integer_value[] = "5";
+ Token integer(Location(), Token::INTEGER, integer_value);
+ node.set_right(ListWithLiteral(integer));
+ node.Execute(setup.scope(), &err);
+ EXPECT_FALSE(err.has_error());
+
+ // Append a string that doesn't match the pattern, it should get appended.
+ const char string_1_value[] = "\"good\"";
+ Token string_1(Location(), Token::STRING, string_1_value);
+ node.set_right(ListWithLiteral(string_1));
+ node.Execute(setup.scope(), &err);
+ EXPECT_FALSE(err.has_error());
+
+ // Append a string that does match the pattern, it should be a no-op.
+ const char string_2_value[] = "\"foo-rm\"";
+ Token string_2(Location(), Token::STRING, string_2_value);
+ node.set_right(ListWithLiteral(string_2));
+ node.Execute(setup.scope(), &err);
+ EXPECT_FALSE(err.has_error());
+
+ // Append a list with the two strings from above.
+ ListNode list;
+ list.append_item(std::unique_ptr<ParseNode>(new LiteralNode(string_1)));
+ list.append_item(std::unique_ptr<ParseNode>(new LiteralNode(string_2)));
+ ExecuteBinaryOperator(setup.scope(), &node, node.left(), &list, &err);
+ EXPECT_FALSE(err.has_error());
+
+ // The sources variable in the scope should now have: [ 5, "good", "good" ]
+ const Value* value = setup.scope()->GetValue(sources);
+ ASSERT_TRUE(value);
+ ASSERT_EQ(Value::LIST, value->type());
+ ASSERT_EQ(3u, value->list_value().size());
+ EXPECT_TRUE(IsValueIntegerEqualing(value->list_value()[0], 5));
+ EXPECT_TRUE(IsValueStringEqualing(value->list_value()[1], "good"));
+ EXPECT_TRUE(IsValueStringEqualing(value->list_value()[2], "good"));
+}
+
+// Note that the SourcesAppend test above tests the basic list + list features,
+// this test handles the other cases.
+TEST(Operators, ListAppend) {
+ Err err;
+ TestWithScope setup;
+
+ // Set up "foo" with an empty list.
+ const char foo[] = "foo";
+ setup.scope()->SetValue(foo, Value(nullptr, Value::LIST), nullptr);
+
+ // Set up the operator.
+ BinaryOpNode node;
+ const char token_value[] = "+=";
+ Token op(Location(), Token::PLUS_EQUALS, token_value);
+ node.set_op(op);
+
+ // Append to the foo variable.
+ Token identifier_token(Location(), Token::IDENTIFIER, foo);
+ node.set_left(
+ std::unique_ptr<ParseNode>(new IdentifierNode(identifier_token)));
+
+ // Append a list with a list, the result should be a nested list.
+ std::unique_ptr<ListNode> outer_list(new ListNode);
+ const char twelve_str[] = "12";
+ Token twelve(Location(), Token::INTEGER, twelve_str);
+ outer_list->append_item(ListWithLiteral(twelve));
+ node.set_right(std::move(outer_list));
+
+ Value ret = ExecuteBinaryOperator(setup.scope(), &node, node.left(),
+ node.right(), &err);
+ EXPECT_FALSE(err.has_error());
+
+ // Return from the operator should always be "none", it should update the
+ // value only.
+ EXPECT_EQ(Value::NONE, ret.type());
+
+ // The value should be updated with "[ [ 12 ] ]"
+ Value result = *setup.scope()->GetValue(foo);
+ ASSERT_EQ(Value::LIST, result.type());
+ ASSERT_EQ(1u, result.list_value().size());
+ ASSERT_EQ(Value::LIST, result.list_value()[0].type());
+ ASSERT_EQ(1u, result.list_value()[0].list_value().size());
+ ASSERT_EQ(Value::INTEGER, result.list_value()[0].list_value()[0].type());
+ ASSERT_EQ(12, result.list_value()[0].list_value()[0].int_value());
+
+ // Try to append an integer and a string directly (e.g. foo += "hi").
+ // This should fail.
+ const char str_str[] = "\"hi\"";
+ Token str(Location(), Token::STRING, str_str);
+ node.set_right(std::unique_ptr<ParseNode>(new LiteralNode(str)));
+ ExecuteBinaryOperator(setup.scope(), &node, node.left(), node.right(), &err);
+ EXPECT_TRUE(err.has_error());
+ err = Err();
+
+ node.set_right(std::unique_ptr<ParseNode>(new LiteralNode(twelve)));
+ ExecuteBinaryOperator(setup.scope(), &node, node.left(), node.right(), &err);
+ EXPECT_TRUE(err.has_error());
+}
+
+TEST(Operators, ShortCircuitAnd) {
+ Err err;
+ TestWithScope setup;
+
+ // Set up the operator.
+ BinaryOpNode node;
+ const char token_value[] = "&&";
+ Token op(Location(), Token::BOOLEAN_AND, token_value);
+ node.set_op(op);
+
+ // Set the left to false.
+ const char false_str[] = "false";
+ Token false_tok(Location(), Token::FALSE_TOKEN, false_str);
+ node.set_left(std::unique_ptr<ParseNode>(new LiteralNode(false_tok)));
+
+ // Set right as foo, but don't define a value for it.
+ const char foo[] = "foo";
+ Token identifier_token(Location(), Token::IDENTIFIER, foo);
+ node.set_right(
+ std::unique_ptr<ParseNode>(new IdentifierNode(identifier_token)));
+
+ Value ret = ExecuteBinaryOperator(setup.scope(), &node, node.left(),
+ node.right(), &err);
+ EXPECT_FALSE(err.has_error());
+}
+
+TEST(Operators, ShortCircuitOr) {
+ Err err;
+ TestWithScope setup;
+
+ // Set up the operator.
+ BinaryOpNode node;
+ const char token_value[] = "||";
+ Token op(Location(), Token::BOOLEAN_OR, token_value);
+ node.set_op(op);
+
+ // Set the left to false.
+ const char false_str[] = "true";
+ Token false_tok(Location(), Token::TRUE_TOKEN, false_str);
+ node.set_left(std::unique_ptr<ParseNode>(new LiteralNode(false_tok)));
+
+ // Set right as foo, but don't define a value for it.
+ const char foo[] = "foo";
+ Token identifier_token(Location(), Token::IDENTIFIER, foo);
+ node.set_right(
+ std::unique_ptr<ParseNode>(new IdentifierNode(identifier_token)));
+
+ Value ret = ExecuteBinaryOperator(setup.scope(), &node, node.left(),
+ node.right(), &err);
+ EXPECT_FALSE(err.has_error());
+}
diff --git a/chromium/tools/gn/ordered_set.h b/chromium/tools/gn/ordered_set.h
new file mode 100644
index 00000000000..c67439b74de
--- /dev/null
+++ b/chromium/tools/gn/ordered_set.h
@@ -0,0 +1,71 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_ORDERED_SET_H_
+#define TOOLS_GN_ORDERED_SET_H_
+
+#include <stddef.h>
+
+#include <set>
+
+// An ordered set of items. Only appending is supported. Iteration is designed
+// to be by index.
+template<typename T>
+class OrderedSet {
+ private:
+ typedef std::set<T> set_type;
+ typedef typename set_type::const_iterator set_iterator;
+ typedef std::vector<set_iterator> vector_type;
+
+ public:
+ static const size_t npos = static_cast<size_t>(-1);
+
+ OrderedSet() {}
+ ~OrderedSet() {}
+
+ const T& operator[](size_t index) const {
+ return *ordering_[index];
+ }
+ size_t size() const {
+ return ordering_.size();
+ }
+ bool empty() const {
+ return ordering_.empty();
+ }
+
+ bool has_item(const T& t) const {
+ return set_.find(t) != set_.end();
+ }
+
+ // Returns true if the item was inserted. False if it was already in the
+ // set.
+ bool push_back(const T& t) {
+ std::pair<set_iterator, bool> result = set_.insert(t);
+ if (result.second)
+ ordering_.push_back(result.first);
+ return true;
+ }
+
+ // Appends a range of items, skipping ones that already exist.
+ template<class InputIterator>
+ void append(const InputIterator& insert_begin,
+ const InputIterator& insert_end) {
+ for (InputIterator i = insert_begin; i != insert_end; ++i) {
+ const T& t = *i;
+ push_back(t);
+ }
+ }
+
+ // Appends all items from the given other set.
+ void append(const OrderedSet<T>& other) {
+ for (size_t i = 0; i < other.size(); i++)
+ push_back(other[i]);
+ }
+
+ private:
+ set_type set_;
+ vector_type ordering_;
+};
+
+#endif // TOOLS_GN_ORDERED_SET_H_
diff --git a/chromium/tools/gn/output_file.cc b/chromium/tools/gn/output_file.cc
new file mode 100644
index 00000000000..12845fbfaad
--- /dev/null
+++ b/chromium/tools/gn/output_file.cc
@@ -0,0 +1,51 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/output_file.h"
+
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/source_file.h"
+
+OutputFile::OutputFile() : value_() {
+}
+
+OutputFile::OutputFile(std::string&& v)
+ : value_(v) {
+}
+
+OutputFile::OutputFile(const std::string& v)
+ : value_(v) {
+}
+
+OutputFile::OutputFile(const BuildSettings* build_settings,
+ const SourceFile& source_file)
+ : value_(RebasePath(source_file.value(),
+ build_settings->build_dir(),
+ build_settings->root_path_utf8())) {
+}
+
+OutputFile::~OutputFile() {
+}
+
+SourceFile OutputFile::AsSourceFile(const BuildSettings* build_settings) const {
+ DCHECK(!value_.empty());
+ DCHECK(value_[value_.size() - 1] != '/');
+
+ std::string path = build_settings->build_dir().value();
+ path.append(value_);
+ NormalizePath(&path);
+ return SourceFile(path);
+}
+
+SourceDir OutputFile::AsSourceDir(const BuildSettings* build_settings) const {
+ if (!value_.empty()) {
+ // Empty means the root build dir. Otherwise, we expect it to end in a
+ // slash.
+ DCHECK(value_[value_.size() - 1] == '/');
+ }
+ std::string path = build_settings->build_dir().value();
+ path.append(value_);
+ NormalizePath(&path);
+ return SourceDir(path);
+}
diff --git a/chromium/tools/gn/output_file.h b/chromium/tools/gn/output_file.h
new file mode 100644
index 00000000000..a7882fd34f8
--- /dev/null
+++ b/chromium/tools/gn/output_file.h
@@ -0,0 +1,66 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_OUTPUT_FILE_H_
+#define TOOLS_GN_OUTPUT_FILE_H_
+
+#include <stddef.h>
+
+#include <string>
+
+#include "base/containers/hash_tables.h"
+#include "tools/gn/build_settings.h"
+
+class SourceFile;
+
+// A simple wrapper around a string that indicates the string is a path
+// relative to the output directory.
+class OutputFile {
+ public:
+ OutputFile();
+ explicit OutputFile(std::string&& v);
+ explicit OutputFile(const std::string& v);
+ OutputFile(const BuildSettings* build_settings,
+ const SourceFile& source_file);
+ ~OutputFile();
+
+ std::string& value() { return value_; }
+ const std::string& value() const { return value_; }
+
+ // Converts to a SourceFile by prepending the build directory to the file.
+ // The *Dir version requires that the current OutputFile ends in a slash, and
+ // the *File version is the opposite.
+ SourceFile AsSourceFile(const BuildSettings* build_settings) const;
+ SourceDir AsSourceDir(const BuildSettings* build_settings) const;
+
+ bool operator==(const OutputFile& other) const {
+ return value_ == other.value_;
+ }
+ bool operator!=(const OutputFile& other) const {
+ return value_ != other.value_;
+ }
+ bool operator<(const OutputFile& other) const {
+ return value_ < other.value_;
+ }
+
+ private:
+ std::string value_;
+};
+
+namespace BASE_HASH_NAMESPACE {
+
+template<> struct hash<OutputFile> {
+ std::size_t operator()(const OutputFile& v) const {
+ hash<std::string> h;
+ return h(v.value());
+ }
+};
+
+} // namespace BASE_HASH_NAMESPACE
+
+inline void swap(OutputFile& lhs, OutputFile& rhs) {
+ lhs.value().swap(rhs.value());
+}
+
+#endif // TOOLS_GN_OUTPUT_FILE_H_
diff --git a/chromium/tools/gn/parse_tree.cc b/chromium/tools/gn/parse_tree.cc
new file mode 100644
index 00000000000..167531c93fe
--- /dev/null
+++ b/chromium/tools/gn/parse_tree.cc
@@ -0,0 +1,840 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/parse_tree.h"
+
+#include <stdint.h>
+
+#include <string>
+#include <tuple>
+
+#include "base/stl_util.h"
+#include "base/strings/string_number_conversions.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/operators.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/string_utils.h"
+
+namespace {
+
+enum DepsCategory {
+ DEPS_CATEGORY_LOCAL,
+ DEPS_CATEGORY_RELATIVE,
+ DEPS_CATEGORY_ABSOLUTE,
+ DEPS_CATEGORY_OTHER,
+};
+
+DepsCategory GetDepsCategory(base::StringPiece deps) {
+ if (deps.length() < 2 || deps[0] != '"' || deps[deps.size() - 1] != '"')
+ return DEPS_CATEGORY_OTHER;
+
+ if (deps[1] == ':')
+ return DEPS_CATEGORY_LOCAL;
+
+ if (deps[1] == '/')
+ return DEPS_CATEGORY_ABSOLUTE;
+
+ return DEPS_CATEGORY_RELATIVE;
+}
+
+std::tuple<base::StringPiece, base::StringPiece> SplitAtFirst(
+ base::StringPiece str,
+ char c) {
+ if (!str.starts_with("\"") || !str.ends_with("\""))
+ return std::make_tuple(str, base::StringPiece());
+
+ str = str.substr(1, str.length() - 2);
+ size_t index_of_first = str.find(c);
+ return std::make_tuple(str.substr(0, index_of_first),
+ index_of_first != base::StringPiece::npos
+ ? str.substr(index_of_first + 1)
+ : base::StringPiece());
+}
+
+std::string IndentFor(int value) {
+ return std::string(value, ' ');
+}
+
+bool IsSortRangeSeparator(const ParseNode* node, const ParseNode* prev) {
+ // If it's a block comment, or has an attached comment with a blank line
+ // before it, then we break the range at this point.
+ return node->AsBlockComment() != nullptr ||
+ (prev && node->comments() && !node->comments()->before().empty() &&
+ (node->GetRange().begin().line_number() >
+ prev->GetRange().end().line_number() +
+ static_cast<int>(node->comments()->before().size() + 1)));
+}
+
+base::StringPiece GetStringRepresentation(const ParseNode* node) {
+ DCHECK(node->AsLiteral() || node->AsIdentifier() || node->AsAccessor());
+ if (node->AsLiteral())
+ return node->AsLiteral()->value().value();
+ else if (node->AsIdentifier())
+ return node->AsIdentifier()->value().value();
+ else if (node->AsAccessor())
+ return node->AsAccessor()->base().value();
+ return base::StringPiece();
+}
+
+} // namespace
+
+Comments::Comments() {
+}
+
+Comments::~Comments() {
+}
+
+void Comments::ReverseSuffix() {
+ for (int i = 0, j = static_cast<int>(suffix_.size() - 1); i < j; ++i, --j)
+ std::swap(suffix_[i], suffix_[j]);
+}
+
+ParseNode::ParseNode() {
+}
+
+ParseNode::~ParseNode() {
+}
+
+const AccessorNode* ParseNode::AsAccessor() const { return nullptr; }
+const BinaryOpNode* ParseNode::AsBinaryOp() const { return nullptr; }
+const BlockCommentNode* ParseNode::AsBlockComment() const { return nullptr; }
+const BlockNode* ParseNode::AsBlock() const { return nullptr; }
+const ConditionNode* ParseNode::AsConditionNode() const { return nullptr; }
+const EndNode* ParseNode::AsEnd() const { return nullptr; }
+const FunctionCallNode* ParseNode::AsFunctionCall() const { return nullptr; }
+const IdentifierNode* ParseNode::AsIdentifier() const { return nullptr; }
+const ListNode* ParseNode::AsList() const { return nullptr; }
+const LiteralNode* ParseNode::AsLiteral() const { return nullptr; }
+const UnaryOpNode* ParseNode::AsUnaryOp() const { return nullptr; }
+
+Comments* ParseNode::comments_mutable() {
+ if (!comments_)
+ comments_.reset(new Comments);
+ return comments_.get();
+}
+
+void ParseNode::PrintComments(std::ostream& out, int indent) const {
+ if (comments_) {
+ std::string ind = IndentFor(indent + 1);
+ for (const auto& token : comments_->before())
+ out << ind << "+BEFORE_COMMENT(\"" << token.value() << "\")\n";
+ for (const auto& token : comments_->suffix())
+ out << ind << "+SUFFIX_COMMENT(\"" << token.value() << "\")\n";
+ for (const auto& token : comments_->after())
+ out << ind << "+AFTER_COMMENT(\"" << token.value() << "\")\n";
+ }
+}
+
+// AccessorNode ---------------------------------------------------------------
+
+AccessorNode::AccessorNode() {
+}
+
+AccessorNode::~AccessorNode() {
+}
+
+const AccessorNode* AccessorNode::AsAccessor() const {
+ return this;
+}
+
+Value AccessorNode::Execute(Scope* scope, Err* err) const {
+ if (index_)
+ return ExecuteArrayAccess(scope, err);
+ else if (member_)
+ return ExecuteScopeAccess(scope, err);
+ NOTREACHED();
+ return Value();
+}
+
+LocationRange AccessorNode::GetRange() const {
+ if (index_)
+ return LocationRange(base_.location(), index_->GetRange().end());
+ else if (member_)
+ return LocationRange(base_.location(), member_->GetRange().end());
+ NOTREACHED();
+ return LocationRange();
+}
+
+Err AccessorNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(GetRange(), msg, help);
+}
+
+void AccessorNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "ACCESSOR\n";
+ PrintComments(out, indent);
+ out << IndentFor(indent + 1) << base_.value() << "\n";
+ if (index_)
+ index_->Print(out, indent + 1);
+ else if (member_)
+ member_->Print(out, indent + 1);
+}
+
+Value AccessorNode::ExecuteArrayAccess(Scope* scope, Err* err) const {
+ Value index_value = index_->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+ if (!index_value.VerifyTypeIs(Value::INTEGER, err))
+ return Value();
+
+ const Value* base_value = scope->GetValue(base_.value(), true);
+ if (!base_value) {
+ *err = MakeErrorDescribing("Undefined identifier.");
+ return Value();
+ }
+ if (!base_value->VerifyTypeIs(Value::LIST, err))
+ return Value();
+
+ int64_t index_int = index_value.int_value();
+ if (index_int < 0) {
+ *err = Err(index_->GetRange(), "Negative array subscript.",
+ "You gave me " + base::Int64ToString(index_int) + ".");
+ return Value();
+ }
+ size_t index_sizet = static_cast<size_t>(index_int);
+ if (index_sizet >= base_value->list_value().size()) {
+ *err =
+ Err(index_->GetRange(), "Array subscript out of range.",
+ "You gave me " + base::Int64ToString(index_int) +
+ " but I was expecting something from 0 to " +
+ base::Int64ToString(
+ static_cast<int64_t>(base_value->list_value().size()) - 1) +
+ ", inclusive.");
+ return Value();
+ }
+
+ // Doing this assumes that there's no way in the language to do anything
+ // between the time the reference is created and the time that the reference
+ // is used. If there is, this will crash! Currently, this is just used for
+ // array accesses where this "shouldn't" happen.
+ return base_value->list_value()[index_sizet];
+}
+
+Value AccessorNode::ExecuteScopeAccess(Scope* scope, Err* err) const {
+ // We jump through some hoops here since ideally a.b will count "b" as
+ // accessed in the given scope. The value "a" might be in some normal nested
+ // scope and we can modify it, but it might also be inherited from the
+ // readonly root scope and we can't do used variable tracking on it. (It's
+ // not legal to const cast it away since the root scope will be in readonly
+ // mode and being accessed from multiple threads without locking.) So this
+ // code handles both cases.
+ const Value* result = nullptr;
+
+ // Look up the value in the scope named by "base_".
+ Value* mutable_base_value = scope->GetMutableValue(base_.value(), true);
+ if (mutable_base_value) {
+ // Common case: base value is mutable so we can track variable accesses
+ // for unused value warnings.
+ if (!mutable_base_value->VerifyTypeIs(Value::SCOPE, err))
+ return Value();
+ result = mutable_base_value->scope_value()->GetValue(
+ member_->value().value(), true);
+ } else {
+ // Fall back to see if the value is on a read-only scope.
+ const Value* const_base_value = scope->GetValue(base_.value(), true);
+ if (const_base_value) {
+ // Read only value, don't try to mark the value access as a "used" one.
+ if (!const_base_value->VerifyTypeIs(Value::SCOPE, err))
+ return Value();
+ result =
+ const_base_value->scope_value()->GetValue(member_->value().value());
+ } else {
+ *err = Err(base_, "Undefined identifier.");
+ return Value();
+ }
+ }
+
+ if (!result) {
+ *err = Err(member_.get(), "No value named \"" +
+ member_->value().value() + "\" in scope \"" + base_.value() + "\"");
+ return Value();
+ }
+ return *result;
+}
+
+void AccessorNode::SetNewLocation(int line_number) {
+ Location old = base_.location();
+ base_.set_location(
+ Location(old.file(), line_number, old.column_number(), old.byte()));
+}
+
+// BinaryOpNode ---------------------------------------------------------------
+
+BinaryOpNode::BinaryOpNode() {
+}
+
+BinaryOpNode::~BinaryOpNode() {
+}
+
+const BinaryOpNode* BinaryOpNode::AsBinaryOp() const {
+ return this;
+}
+
+Value BinaryOpNode::Execute(Scope* scope, Err* err) const {
+ return ExecuteBinaryOperator(scope, this, left_.get(), right_.get(), err);
+}
+
+LocationRange BinaryOpNode::GetRange() const {
+ return left_->GetRange().Union(right_->GetRange());
+}
+
+Err BinaryOpNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(op_, msg, help);
+}
+
+void BinaryOpNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "BINARY(" << op_.value() << ")\n";
+ PrintComments(out, indent);
+ left_->Print(out, indent + 1);
+ right_->Print(out, indent + 1);
+}
+
+// BlockNode ------------------------------------------------------------------
+
+BlockNode::BlockNode() {
+}
+
+BlockNode::~BlockNode() {
+ STLDeleteContainerPointers(statements_.begin(), statements_.end());
+}
+
+const BlockNode* BlockNode::AsBlock() const {
+ return this;
+}
+
+Value BlockNode::Execute(Scope* scope, Err* err) const {
+ for (size_t i = 0; i < statements_.size() && !err->has_error(); i++) {
+ // Check for trying to execute things with no side effects in a block.
+ const ParseNode* cur = statements_[i];
+ if (cur->AsList() || cur->AsLiteral() || cur->AsUnaryOp() ||
+ cur->AsIdentifier()) {
+ *err = cur->MakeErrorDescribing(
+ "This statement has no effect.",
+ "Either delete it or do something with the result.");
+ return Value();
+ }
+ cur->Execute(scope, err);
+ }
+ return Value();
+}
+
+LocationRange BlockNode::GetRange() const {
+ if (begin_token_.type() != Token::INVALID &&
+ end_->value().type() != Token::INVALID) {
+ return begin_token_.range().Union(end_->value().range());
+ } else if (!statements_.empty()) {
+ return statements_[0]->GetRange().Union(
+ statements_[statements_.size() - 1]->GetRange());
+ }
+ return LocationRange();
+}
+
+Err BlockNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(GetRange(), msg, help);
+}
+
+void BlockNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "BLOCK\n";
+ PrintComments(out, indent);
+ for (const auto& statement : statements_)
+ statement->Print(out, indent + 1);
+ if (end_ && end_->comments())
+ end_->Print(out, indent + 1);
+}
+
+// ConditionNode --------------------------------------------------------------
+
+ConditionNode::ConditionNode() {
+}
+
+ConditionNode::~ConditionNode() {
+}
+
+const ConditionNode* ConditionNode::AsConditionNode() const {
+ return this;
+}
+
+Value ConditionNode::Execute(Scope* scope, Err* err) const {
+ Value condition_result = condition_->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+ if (condition_result.type() != Value::BOOLEAN) {
+ *err = condition_->MakeErrorDescribing(
+ "Condition does not evaluate to a boolean value.",
+ std::string("This is a value of type \"") +
+ Value::DescribeType(condition_result.type()) +
+ "\" instead.");
+ err->AppendRange(if_token_.range());
+ return Value();
+ }
+
+ if (condition_result.boolean_value()) {
+ if_true_->Execute(scope, err);
+ } else if (if_false_) {
+ // The else block is optional.
+ if_false_->Execute(scope, err);
+ }
+
+ return Value();
+}
+
+LocationRange ConditionNode::GetRange() const {
+ if (if_false_)
+ return if_token_.range().Union(if_false_->GetRange());
+ return if_token_.range().Union(if_true_->GetRange());
+}
+
+Err ConditionNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(if_token_, msg, help);
+}
+
+void ConditionNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "CONDITION\n";
+ PrintComments(out, indent);
+ condition_->Print(out, indent + 1);
+ if_true_->Print(out, indent + 1);
+ if (if_false_)
+ if_false_->Print(out, indent + 1);
+}
+
+// FunctionCallNode -----------------------------------------------------------
+
+FunctionCallNode::FunctionCallNode() {
+}
+
+FunctionCallNode::~FunctionCallNode() {
+}
+
+const FunctionCallNode* FunctionCallNode::AsFunctionCall() const {
+ return this;
+}
+
+Value FunctionCallNode::Execute(Scope* scope, Err* err) const {
+ return functions::RunFunction(scope, this, args_.get(), block_.get(), err);
+}
+
+LocationRange FunctionCallNode::GetRange() const {
+ if (function_.type() == Token::INVALID)
+ return LocationRange(); // This will be null in some tests.
+ if (block_)
+ return function_.range().Union(block_->GetRange());
+ return function_.range().Union(args_->GetRange());
+}
+
+Err FunctionCallNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(function_, msg, help);
+}
+
+void FunctionCallNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "FUNCTION(" << function_.value() << ")\n";
+ PrintComments(out, indent);
+ args_->Print(out, indent + 1);
+ if (block_)
+ block_->Print(out, indent + 1);
+}
+
+// IdentifierNode --------------------------------------------------------------
+
+IdentifierNode::IdentifierNode() {
+}
+
+IdentifierNode::IdentifierNode(const Token& token) : value_(token) {
+}
+
+IdentifierNode::~IdentifierNode() {
+}
+
+const IdentifierNode* IdentifierNode::AsIdentifier() const {
+ return this;
+}
+
+Value IdentifierNode::Execute(Scope* scope, Err* err) const {
+ const Value* value = scope->GetValue(value_.value(), true);
+ Value result;
+ if (!value) {
+ *err = MakeErrorDescribing("Undefined identifier");
+ return result;
+ }
+
+ result = *value;
+ result.set_origin(this);
+ return result;
+}
+
+LocationRange IdentifierNode::GetRange() const {
+ return value_.range();
+}
+
+Err IdentifierNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(value_, msg, help);
+}
+
+void IdentifierNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "IDENTIFIER(" << value_.value() << ")\n";
+ PrintComments(out, indent);
+}
+
+void IdentifierNode::SetNewLocation(int line_number) {
+ Location old = value_.location();
+ value_.set_location(
+ Location(old.file(), line_number, old.column_number(), old.byte()));
+}
+
+// ListNode -------------------------------------------------------------------
+
+ListNode::ListNode() : prefer_multiline_(false) {
+}
+
+ListNode::~ListNode() {
+ STLDeleteContainerPointers(contents_.begin(), contents_.end());
+}
+
+const ListNode* ListNode::AsList() const {
+ return this;
+}
+
+Value ListNode::Execute(Scope* scope, Err* err) const {
+ Value result_value(this, Value::LIST);
+ std::vector<Value>& results = result_value.list_value();
+ results.reserve(contents_.size());
+
+ for (const auto& cur : contents_) {
+ if (cur->AsBlockComment())
+ continue;
+ results.push_back(cur->Execute(scope, err));
+ if (err->has_error())
+ return Value();
+ if (results.back().type() == Value::NONE) {
+ *err = cur->MakeErrorDescribing(
+ "This does not evaluate to a value.",
+ "I can't do something with nothing.");
+ return Value();
+ }
+ }
+ return result_value;
+}
+
+LocationRange ListNode::GetRange() const {
+ return LocationRange(begin_token_.location(),
+ end_->value().location());
+}
+
+Err ListNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(begin_token_, msg, help);
+}
+
+void ListNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "LIST" << (prefer_multiline_ ? " multiline" : "")
+ << "\n";
+ PrintComments(out, indent);
+ for (const auto& cur : contents_)
+ cur->Print(out, indent + 1);
+ if (end_ && end_->comments())
+ end_->Print(out, indent + 1);
+}
+
+template <typename Comparator>
+void ListNode::SortList(Comparator comparator) {
+ // Partitions first on BlockCommentNodes and sorts each partition separately.
+ for (auto sr : GetSortRanges()) {
+ bool skip = false;
+ for (size_t i = sr.begin; i != sr.end; ++i) {
+ // Bails out if any of the nodes are unsupported.
+ const ParseNode* node = contents_[i];
+ if (!node->AsLiteral() && !node->AsIdentifier() && !node->AsAccessor()) {
+ skip = true;
+ continue;
+ }
+ }
+ if (skip)
+ continue;
+ // Save the original line number so that we can re-assign ranges. We assume
+ // they're contiguous lines because GetSortRanges() does so above. We need
+ // to re-assign these line numbers primiarily because `gn format` uses them
+ // to determine whether two nodes were initially separated by a blank line
+ // or not.
+ int start_line = contents_[sr.begin]->GetRange().begin().line_number();
+ const ParseNode* original_first = contents_[sr.begin];
+ std::sort(contents_.begin() + sr.begin, contents_.begin() + sr.end,
+ comparator);
+ // If the beginning of the range had before comments, and the first node
+ // moved during the sort, then move its comments to the new head of the
+ // range.
+ if (original_first->comments() && contents_[sr.begin] != original_first) {
+ for (const auto& hc : original_first->comments()->before()) {
+ const_cast<ParseNode*>(contents_[sr.begin])
+ ->comments_mutable()
+ ->append_before(hc);
+ }
+ const_cast<ParseNode*>(original_first)
+ ->comments_mutable()
+ ->clear_before();
+ }
+ const ParseNode* prev = nullptr;
+ for (size_t i = sr.begin; i != sr.end; ++i) {
+ const ParseNode* node = contents_[i];
+ DCHECK(node->AsLiteral() || node->AsIdentifier() || node->AsAccessor());
+ int line_number =
+ prev ? prev->GetRange().end().line_number() + 1 : start_line;
+ if (node->AsLiteral()) {
+ const_cast<LiteralNode*>(node->AsLiteral())
+ ->SetNewLocation(line_number);
+ } else if (node->AsIdentifier()) {
+ const_cast<IdentifierNode*>(node->AsIdentifier())
+ ->SetNewLocation(line_number);
+ } else if (node->AsAccessor()) {
+ const_cast<AccessorNode*>(node->AsAccessor())
+ ->SetNewLocation(line_number);
+ }
+ prev = node;
+ }
+ }
+}
+
+void ListNode::SortAsStringsList() {
+ // Sorts alphabetically.
+ SortList([](const ParseNode* a, const ParseNode* b) {
+ base::StringPiece astr = GetStringRepresentation(a);
+ base::StringPiece bstr = GetStringRepresentation(b);
+ return astr < bstr;
+ });
+}
+
+void ListNode::SortAsDepsList() {
+ // Sorts first relative targets, then absolute, each group is sorted
+ // alphabetically.
+ SortList([](const ParseNode* a, const ParseNode* b) {
+ base::StringPiece astr = GetStringRepresentation(a);
+ base::StringPiece bstr = GetStringRepresentation(b);
+ return std::make_pair(GetDepsCategory(astr), SplitAtFirst(astr, ':')) <
+ std::make_pair(GetDepsCategory(bstr), SplitAtFirst(bstr, ':'));
+ });
+}
+
+// Breaks the ParseNodes of |contents| up by ranges that should be separately
+// sorted. In particular, we break at a block comment, or an item that has an
+// attached "before" comment and is separated by a blank line from the item
+// before it. The assumption is that both of these indicate a separate 'section'
+// of a sources block across which items should not be inter-sorted.
+std::vector<ListNode::SortRange> ListNode::GetSortRanges() const {
+ std::vector<SortRange> ranges;
+ const ParseNode* prev = nullptr;
+ size_t begin = 0;
+ for (size_t i = begin; i < contents_.size(); prev = contents_[i++]) {
+ if (IsSortRangeSeparator(contents_[i], prev)) {
+ if (i > begin) {
+ ranges.push_back(SortRange(begin, i));
+ // If |i| is an item with an attached comment, then we start the next
+ // range at that point, because we want to include it in the sort.
+ // Otherwise, it's a block comment which we skip over entirely because
+ // we don't want to move or include it in the sort. The two cases are:
+ //
+ // sources = [
+ // "a",
+ // "b",
+ //
+ // #
+ // # This is a block comment.
+ // #
+ //
+ // "c",
+ // "d",
+ // ]
+ //
+ // which contains 5 elements, and for which the ranges would be { [0,
+ // 2), [3, 5) } (notably excluding 2, the block comment), and:
+ //
+ // sources = [
+ // "a",
+ // "b",
+ //
+ // # This is a header comment.
+ // "c",
+ // "d",
+ // ]
+ //
+ // which contains 4 elements, index 2 containing an attached 'before'
+ // comments, and the ranges should be { [0, 2), [2, 4) }.
+ if (!contents_[i]->AsBlockComment())
+ begin = i;
+ else
+ begin = i + 1;
+ } else {
+ // If it was a one item range, just skip over it.
+ begin = i + 1;
+ }
+ }
+ }
+ if (begin != contents_.size())
+ ranges.push_back(SortRange(begin, contents_.size()));
+ return ranges;
+}
+
+// LiteralNode -----------------------------------------------------------------
+
+LiteralNode::LiteralNode() {
+}
+
+LiteralNode::LiteralNode(const Token& token) : value_(token) {
+}
+
+LiteralNode::~LiteralNode() {
+}
+
+const LiteralNode* LiteralNode::AsLiteral() const {
+ return this;
+}
+
+Value LiteralNode::Execute(Scope* scope, Err* err) const {
+ switch (value_.type()) {
+ case Token::TRUE_TOKEN:
+ return Value(this, true);
+ case Token::FALSE_TOKEN:
+ return Value(this, false);
+ case Token::INTEGER: {
+ base::StringPiece s = value_.value();
+ if ((s.starts_with("0") && s.size() > 1) || s.starts_with("-0")) {
+ if (s == "-0")
+ *err = MakeErrorDescribing("Negative zero doesn't make sense");
+ else
+ *err = MakeErrorDescribing("Leading zeros not allowed");
+ return Value();
+ }
+ int64_t result_int;
+ if (!base::StringToInt64(s, &result_int)) {
+ *err = MakeErrorDescribing("This does not look like an integer");
+ return Value();
+ }
+ return Value(this, result_int);
+ }
+ case Token::STRING: {
+ Value v(this, Value::STRING);
+ ExpandStringLiteral(scope, value_, &v, err);
+ return v;
+ }
+ default:
+ NOTREACHED();
+ return Value();
+ }
+}
+
+LocationRange LiteralNode::GetRange() const {
+ return value_.range();
+}
+
+Err LiteralNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(value_, msg, help);
+}
+
+void LiteralNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "LITERAL(" << value_.value() << ")\n";
+ PrintComments(out, indent);
+}
+
+void LiteralNode::SetNewLocation(int line_number) {
+ Location old = value_.location();
+ value_.set_location(
+ Location(old.file(), line_number, old.column_number(), old.byte()));
+}
+
+// UnaryOpNode ----------------------------------------------------------------
+
+UnaryOpNode::UnaryOpNode() {
+}
+
+UnaryOpNode::~UnaryOpNode() {
+}
+
+const UnaryOpNode* UnaryOpNode::AsUnaryOp() const {
+ return this;
+}
+
+Value UnaryOpNode::Execute(Scope* scope, Err* err) const {
+ Value operand_value = operand_->Execute(scope, err);
+ if (err->has_error())
+ return Value();
+ return ExecuteUnaryOperator(scope, this, operand_value, err);
+}
+
+LocationRange UnaryOpNode::GetRange() const {
+ return op_.range().Union(operand_->GetRange());
+}
+
+Err UnaryOpNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(op_, msg, help);
+}
+
+void UnaryOpNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "UNARY(" << op_.value() << ")\n";
+ PrintComments(out, indent);
+ operand_->Print(out, indent + 1);
+}
+
+// BlockCommentNode ------------------------------------------------------------
+
+BlockCommentNode::BlockCommentNode() {
+}
+
+BlockCommentNode::~BlockCommentNode() {
+}
+
+const BlockCommentNode* BlockCommentNode::AsBlockComment() const {
+ return this;
+}
+
+Value BlockCommentNode::Execute(Scope* scope, Err* err) const {
+ return Value();
+}
+
+LocationRange BlockCommentNode::GetRange() const {
+ return comment_.range();
+}
+
+Err BlockCommentNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(comment_, msg, help);
+}
+
+void BlockCommentNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "BLOCK_COMMENT(" << comment_.value() << ")\n";
+ PrintComments(out, indent);
+}
+
+// EndNode ---------------------------------------------------------------------
+
+EndNode::EndNode(const Token& token) : value_(token) {
+}
+
+EndNode::~EndNode() {
+}
+
+const EndNode* EndNode::AsEnd() const {
+ return this;
+}
+
+Value EndNode::Execute(Scope* scope, Err* err) const {
+ return Value();
+}
+
+LocationRange EndNode::GetRange() const {
+ return value_.range();
+}
+
+Err EndNode::MakeErrorDescribing(const std::string& msg,
+ const std::string& help) const {
+ return Err(value_, msg, help);
+}
+
+void EndNode::Print(std::ostream& out, int indent) const {
+ out << IndentFor(indent) << "END(" << value_.value() << ")\n";
+ PrintComments(out, indent);
+}
diff --git a/chromium/tools/gn/parse_tree.h b/chromium/tools/gn/parse_tree.h
new file mode 100644
index 00000000000..28537992068
--- /dev/null
+++ b/chromium/tools/gn/parse_tree.h
@@ -0,0 +1,520 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_PARSE_TREE_H_
+#define TOOLS_GN_PARSE_TREE_H_
+
+#include <stddef.h>
+
+#include <memory>
+#include <utility>
+#include <vector>
+
+#include "base/macros.h"
+#include "tools/gn/err.h"
+#include "tools/gn/token.h"
+#include "tools/gn/value.h"
+
+class AccessorNode;
+class BinaryOpNode;
+class BlockCommentNode;
+class BlockNode;
+class ConditionNode;
+class EndNode;
+class FunctionCallNode;
+class IdentifierNode;
+class ListNode;
+class LiteralNode;
+class Scope;
+class UnaryOpNode;
+
+class Comments {
+ public:
+ Comments();
+ virtual ~Comments();
+
+ const std::vector<Token>& before() const { return before_; }
+ void append_before(Token c) { before_.push_back(c); }
+ void clear_before() { before_.clear(); }
+
+ const std::vector<Token>& suffix() const { return suffix_; }
+ void append_suffix(Token c) { suffix_.push_back(c); }
+ // Reverse the order of the suffix comments. When walking the tree in
+ // post-order we append suffix comments in reverse order, so this fixes them
+ // up.
+ void ReverseSuffix();
+
+ const std::vector<Token>& after() const { return after_; }
+ void append_after(Token c) { after_.push_back(c); }
+
+ private:
+ // Whole line comments before the expression.
+ std::vector<Token> before_;
+
+ // End-of-line comments after this expression.
+ std::vector<Token> suffix_;
+
+ // For top-level expressions only, after_ lists whole-line comments
+ // following the expression.
+ std::vector<Token> after_;
+
+ DISALLOW_COPY_AND_ASSIGN(Comments);
+};
+
+// ParseNode -------------------------------------------------------------------
+
+// A node in the AST.
+class ParseNode {
+ public:
+ ParseNode();
+ virtual ~ParseNode();
+
+ virtual const AccessorNode* AsAccessor() const;
+ virtual const BinaryOpNode* AsBinaryOp() const;
+ virtual const BlockCommentNode* AsBlockComment() const;
+ virtual const BlockNode* AsBlock() const;
+ virtual const ConditionNode* AsConditionNode() const;
+ virtual const EndNode* AsEnd() const;
+ virtual const FunctionCallNode* AsFunctionCall() const;
+ virtual const IdentifierNode* AsIdentifier() const;
+ virtual const ListNode* AsList() const;
+ virtual const LiteralNode* AsLiteral() const;
+ virtual const UnaryOpNode* AsUnaryOp() const;
+
+ virtual Value Execute(Scope* scope, Err* err) const = 0;
+
+ virtual LocationRange GetRange() const = 0;
+
+ // Returns an error with the given messages and the range set to something
+ // that indicates this node.
+ virtual Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const = 0;
+
+ // Prints a representation of this node to the given string, indenting
+ // by the given number of spaces.
+ virtual void Print(std::ostream& out, int indent) const = 0;
+
+ const Comments* comments() const { return comments_.get(); }
+ Comments* comments_mutable();
+ void PrintComments(std::ostream& out, int indent) const;
+
+ private:
+ std::unique_ptr<Comments> comments_;
+
+ DISALLOW_COPY_AND_ASSIGN(ParseNode);
+};
+
+// AccessorNode ----------------------------------------------------------------
+
+// Access an array or scope element.
+//
+// Currently, such values are only read-only. In that you can do:
+// a = obj1.a
+// b = obj2[0]
+// But not
+// obj1.a = 5
+// obj2[0] = 6
+//
+// In the current design where the dot operator is used only for templates, we
+// explicitly don't want to allow you to do "invoker.foo = 5", so if we added
+// support for accessors to be lvalues, we would also need to add some concept
+// of a constant scope. Supporting this would also add a lot of complications
+// to the operator= implementation, since some accessors might return values
+// in the const root scope that shouldn't be modified. Without a strong
+// use-case for this, it seems simpler to just disallow it.
+//
+// Additionally, the left-hand-side of the accessor must currently be an
+// identifier. So you can't do things like:
+// function_call()[1]
+// a = b.c.d
+// These are easier to implement if we needed them but given the very limited
+// use cases for this, it hasn't seemed worth the bother.
+class AccessorNode : public ParseNode {
+ public:
+ AccessorNode();
+ ~AccessorNode() override;
+
+ const AccessorNode* AsAccessor() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ // Base is the thing on the left of the [] or dot, currently always required
+ // to be an identifier token.
+ const Token& base() const { return base_; }
+ void set_base(const Token& b) { base_ = b; }
+
+ // Index is the expression inside the []. Will be null if member is set.
+ const ParseNode* index() const { return index_.get(); }
+ void set_index(std::unique_ptr<ParseNode> i) { index_ = std::move(i); }
+
+ // The member is the identifier on the right hand side of the dot. Will be
+ // null if the index is set.
+ const IdentifierNode* member() const { return member_.get(); }
+ void set_member(std::unique_ptr<IdentifierNode> i) { member_ = std::move(i); }
+
+ void SetNewLocation(int line_number);
+
+ private:
+ Value ExecuteArrayAccess(Scope* scope, Err* err) const;
+ Value ExecuteScopeAccess(Scope* scope, Err* err) const;
+
+ Token base_;
+
+ // Either index or member will be set according to what type of access this
+ // is.
+ std::unique_ptr<ParseNode> index_;
+ std::unique_ptr<IdentifierNode> member_;
+
+ DISALLOW_COPY_AND_ASSIGN(AccessorNode);
+};
+
+// BinaryOpNode ----------------------------------------------------------------
+
+class BinaryOpNode : public ParseNode {
+ public:
+ BinaryOpNode();
+ ~BinaryOpNode() override;
+
+ const BinaryOpNode* AsBinaryOp() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ const Token& op() const { return op_; }
+ void set_op(const Token& t) { op_ = t; }
+
+ const ParseNode* left() const { return left_.get(); }
+ void set_left(std::unique_ptr<ParseNode> left) { left_ = std::move(left); }
+
+ const ParseNode* right() const { return right_.get(); }
+ void set_right(std::unique_ptr<ParseNode> right) {
+ right_ = std::move(right);
+ }
+
+ private:
+ std::unique_ptr<ParseNode> left_;
+ Token op_;
+ std::unique_ptr<ParseNode> right_;
+
+ DISALLOW_COPY_AND_ASSIGN(BinaryOpNode);
+};
+
+// BlockNode -------------------------------------------------------------------
+
+class BlockNode : public ParseNode {
+ public:
+ BlockNode();
+ ~BlockNode() override;
+
+ const BlockNode* AsBlock() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ void set_begin_token(const Token& t) { begin_token_ = t; }
+ void set_end(std::unique_ptr<EndNode> e) { end_ = std::move(e); }
+ const EndNode* End() const { return end_.get(); }
+
+ const std::vector<ParseNode*>& statements() const { return statements_; }
+ void append_statement(std::unique_ptr<ParseNode> s) {
+ statements_.push_back(s.release());
+ }
+
+ private:
+ // Tokens corresponding to { and }, if any (may be NULL). The end is stored
+ // in a custom parse node so that it can have comments hung off of it.
+ Token begin_token_;
+ std::unique_ptr<EndNode> end_;
+
+ // Owning pointers, use unique_ptr when we can use C++11.
+ std::vector<ParseNode*> statements_;
+
+ DISALLOW_COPY_AND_ASSIGN(BlockNode);
+};
+
+// ConditionNode ---------------------------------------------------------------
+
+class ConditionNode : public ParseNode {
+ public:
+ ConditionNode();
+ ~ConditionNode() override;
+
+ const ConditionNode* AsConditionNode() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ void set_if_token(const Token& token) { if_token_ = token; }
+
+ const ParseNode* condition() const { return condition_.get(); }
+ void set_condition(std::unique_ptr<ParseNode> c) {
+ condition_ = std::move(c);
+ }
+
+ const BlockNode* if_true() const { return if_true_.get(); }
+ void set_if_true(std::unique_ptr<BlockNode> t) { if_true_ = std::move(t); }
+
+ // This is either empty, a block (for the else clause), or another
+ // condition.
+ const ParseNode* if_false() const { return if_false_.get(); }
+ void set_if_false(std::unique_ptr<ParseNode> f) { if_false_ = std::move(f); }
+
+ private:
+ // Token corresponding to the "if" string.
+ Token if_token_;
+
+ std::unique_ptr<ParseNode> condition_; // Always non-null.
+ std::unique_ptr<BlockNode> if_true_; // Always non-null.
+ std::unique_ptr<ParseNode> if_false_; // May be null.
+
+ DISALLOW_COPY_AND_ASSIGN(ConditionNode);
+};
+
+// FunctionCallNode ------------------------------------------------------------
+
+class FunctionCallNode : public ParseNode {
+ public:
+ FunctionCallNode();
+ ~FunctionCallNode() override;
+
+ const FunctionCallNode* AsFunctionCall() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ const Token& function() const { return function_; }
+ void set_function(Token t) { function_ = t; }
+
+ const ListNode* args() const { return args_.get(); }
+ void set_args(std::unique_ptr<ListNode> a) { args_ = std::move(a); }
+
+ const BlockNode* block() const { return block_.get(); }
+ void set_block(std::unique_ptr<BlockNode> b) { block_ = std::move(b); }
+
+ private:
+ Token function_;
+ std::unique_ptr<ListNode> args_;
+ std::unique_ptr<BlockNode> block_; // May be null.
+
+ DISALLOW_COPY_AND_ASSIGN(FunctionCallNode);
+};
+
+// IdentifierNode --------------------------------------------------------------
+
+class IdentifierNode : public ParseNode {
+ public:
+ IdentifierNode();
+ explicit IdentifierNode(const Token& token);
+ ~IdentifierNode() override;
+
+ const IdentifierNode* AsIdentifier() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ const Token& value() const { return value_; }
+ void set_value(const Token& t) { value_ = t; }
+
+ void SetNewLocation(int line_number);
+
+ private:
+ Token value_;
+
+ DISALLOW_COPY_AND_ASSIGN(IdentifierNode);
+};
+
+// ListNode --------------------------------------------------------------------
+
+class ListNode : public ParseNode {
+ public:
+ ListNode();
+ ~ListNode() override;
+
+ const ListNode* AsList() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ void set_begin_token(const Token& t) { begin_token_ = t; }
+ void set_end(std::unique_ptr<EndNode> e) { end_ = std::move(e); }
+ const EndNode* End() const { return end_.get(); }
+
+ void append_item(std::unique_ptr<ParseNode> s) {
+ contents_.push_back(s.release());
+ }
+ const std::vector<const ParseNode*>& contents() const { return contents_; }
+
+ void SortAsStringsList();
+ void SortAsDepsList();
+
+ // During formatting, do we want this list to always be multliline? This is
+ // used to make assignments to deps, sources, etc. always be multiline lists,
+ // rather than collapsed to a single line when they're one element.
+ bool prefer_multiline() const { return prefer_multiline_; }
+ void set_prefer_multiline(bool prefer_multiline) {
+ prefer_multiline_ = prefer_multiline;
+ }
+
+ struct SortRange {
+ size_t begin;
+ size_t end;
+ SortRange(size_t begin, size_t end) : begin(begin), end(end) {}
+ };
+ // Only public for testing.
+ std::vector<SortRange> GetSortRanges() const;
+
+ private:
+ template <typename Comparator>
+ void SortList(Comparator comparator);
+
+ // Tokens corresponding to the [ and ]. The end token is stored in inside an
+ // custom parse node so that it can have comments hung off of it.
+ Token begin_token_;
+ std::unique_ptr<EndNode> end_;
+ bool prefer_multiline_;
+
+ // Owning pointers, use unique_ptr when we can use C++11.
+ std::vector<const ParseNode*> contents_;
+
+ DISALLOW_COPY_AND_ASSIGN(ListNode);
+};
+
+// LiteralNode -----------------------------------------------------------------
+
+class LiteralNode : public ParseNode {
+ public:
+ LiteralNode();
+ explicit LiteralNode(const Token& token);
+ ~LiteralNode() override;
+
+ const LiteralNode* AsLiteral() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ const Token& value() const { return value_; }
+ void set_value(const Token& t) { value_ = t; }
+
+ void SetNewLocation(int line_number);
+
+ private:
+ Token value_;
+
+ DISALLOW_COPY_AND_ASSIGN(LiteralNode);
+};
+
+// UnaryOpNode -----------------------------------------------------------------
+
+class UnaryOpNode : public ParseNode {
+ public:
+ UnaryOpNode();
+ ~UnaryOpNode() override;
+
+ const UnaryOpNode* AsUnaryOp() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ const Token& op() const { return op_; }
+ void set_op(const Token& t) { op_ = t; }
+
+ const ParseNode* operand() const { return operand_.get(); }
+ void set_operand(std::unique_ptr<ParseNode> operand) {
+ operand_ = std::move(operand);
+ }
+
+ private:
+ Token op_;
+ std::unique_ptr<ParseNode> operand_;
+
+ DISALLOW_COPY_AND_ASSIGN(UnaryOpNode);
+};
+
+// BlockCommentNode ------------------------------------------------------------
+
+// This node type is only used for standalone comments (that is, those not
+// specifically attached to another syntax element. The most common of these
+// is a standard header block. This node contains only the last line of such
+// a comment block as the anchor, and other lines of the block comment are
+// hung off of it as Before comments, similar to other syntax elements.
+class BlockCommentNode : public ParseNode {
+ public:
+ BlockCommentNode();
+ ~BlockCommentNode() override;
+
+ const BlockCommentNode* AsBlockComment() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ const Token& comment() const { return comment_; }
+ void set_comment(const Token& t) { comment_ = t; }
+
+ private:
+ Token comment_;
+
+ DISALLOW_COPY_AND_ASSIGN(BlockCommentNode);
+};
+
+// EndNode ---------------------------------------------------------------------
+
+// This node type is used as the end_ object for lists and blocks (rather than
+// just the end ']', '}', or ')' token). This is so that during formatting
+// traversal there is a node that appears at the end of the block to which
+// comments can be attached.
+class EndNode : public ParseNode {
+ public:
+ explicit EndNode(const Token& token);
+ ~EndNode() override;
+
+ const EndNode* AsEnd() const override;
+ Value Execute(Scope* scope, Err* err) const override;
+ LocationRange GetRange() const override;
+ Err MakeErrorDescribing(
+ const std::string& msg,
+ const std::string& help = std::string()) const override;
+ void Print(std::ostream& out, int indent) const override;
+
+ const Token& value() const { return value_; }
+ void set_value(const Token& t) { value_ = t; }
+
+ private:
+ Token value_;
+
+ DISALLOW_COPY_AND_ASSIGN(EndNode);
+};
+
+#endif // TOOLS_GN_PARSE_TREE_H_
diff --git a/chromium/tools/gn/parse_tree_unittest.cc b/chromium/tools/gn/parse_tree_unittest.cc
new file mode 100644
index 00000000000..f5edbc8f064
--- /dev/null
+++ b/chromium/tools/gn/parse_tree_unittest.cc
@@ -0,0 +1,254 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/parse_tree.h"
+
+#include <stdint.h>
+#include <utility>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(ParseTree, Accessor) {
+ TestWithScope setup;
+
+ // Make a pretend parse node with proper tracking that we can blame for the
+ // given value.
+ InputFile input_file(SourceFile("//foo"));
+ Token base_token(Location(&input_file, 1, 1, 1), Token::IDENTIFIER, "a");
+ Token member_token(Location(&input_file, 1, 1, 1), Token::IDENTIFIER, "b");
+
+ AccessorNode accessor;
+ accessor.set_base(base_token);
+
+ std::unique_ptr<IdentifierNode> member_identifier(
+ new IdentifierNode(member_token));
+ accessor.set_member(std::move(member_identifier));
+
+ // The access should fail because a is not defined.
+ Err err;
+ Value result = accessor.Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+ EXPECT_EQ(Value::NONE, result.type());
+
+ // Define a as a Scope. It should still fail because b isn't defined.
+ err = Err();
+ setup.scope()->SetValue(
+ "a", Value(nullptr, std::unique_ptr<Scope>(new Scope(setup.scope()))),
+ nullptr);
+ result = accessor.Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+ EXPECT_EQ(Value::NONE, result.type());
+
+ // Define b, accessor should succeed now.
+ const int64_t kBValue = 42;
+ err = Err();
+ setup.scope()
+ ->GetMutableValue("a", false)
+ ->scope_value()
+ ->SetValue("b", Value(nullptr, kBValue), nullptr);
+ result = accessor.Execute(setup.scope(), &err);
+ EXPECT_FALSE(err.has_error());
+ ASSERT_EQ(Value::INTEGER, result.type());
+ EXPECT_EQ(kBValue, result.int_value());
+}
+
+TEST(ParseTree, BlockUnusedVars) {
+ TestWithScope setup;
+
+ // Printing both values should be OK.
+ //
+ // The crazy template definition here is a way to execute a block without
+ // defining a target. Templates require that both the target_name and the
+ // invoker be used, which is what the assertion statement inside the template
+ // does.
+ TestParseInput input_all_used(
+ "template(\"foo\") { assert(target_name != 0 && invoker != 0) }\n"
+ "foo(\"a\") {\n"
+ " a = 12\n"
+ " b = 13\n"
+ " print(\"$a $b\")\n"
+ "}");
+ EXPECT_FALSE(input_all_used.has_error());
+
+ Err err;
+ input_all_used.parsed()->Execute(setup.scope(), &err);
+ EXPECT_FALSE(err.has_error());
+
+ // Skipping one should throw an unused var error.
+ TestParseInput input_unused(
+ "foo(\"a\") {\n"
+ " a = 12\n"
+ " b = 13\n"
+ " print(\"$a\")\n"
+ "}");
+ EXPECT_FALSE(input_unused.has_error());
+
+ input_unused.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+
+ // Also verify that the unused variable has the correct origin set. The
+ // origin will point to the value assigned to the variable (in this case, the
+ // "13" assigned to "b".
+ EXPECT_EQ(3, err.location().line_number());
+ EXPECT_EQ(7, err.location().column_number());
+}
+
+TEST(ParseTree, OriginForDereference) {
+ TestWithScope setup;
+ TestParseInput input(
+ "a = 6\n"
+ "get_target_outputs(a)");
+ EXPECT_FALSE(input.has_error());
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+
+ // The origin for the "not a string" error message should be where the value
+ // was dereferenced (the "a" on the second line).
+ EXPECT_EQ(2, err.location().line_number());
+ EXPECT_EQ(20, err.location().column_number());
+}
+
+TEST(ParseTree, SortRangeExtraction) {
+ TestWithScope setup;
+
+ // Ranges are [begin, end).
+
+ {
+ TestParseInput input(
+ "sources = [\n"
+ " \"a\",\n"
+ " \"b\",\n"
+ " \n"
+ " #\n"
+ " # Block\n"
+ " #\n"
+ " \n"
+ " \"c\","
+ " \"d\","
+ "]\n");
+ EXPECT_FALSE(input.has_error());
+ ASSERT_TRUE(input.parsed()->AsBlock());
+ ASSERT_TRUE(input.parsed()->AsBlock()->statements()[0]->AsBinaryOp());
+ const BinaryOpNode* binop =
+ input.parsed()->AsBlock()->statements()[0]->AsBinaryOp();
+ ASSERT_TRUE(binop->right()->AsList());
+ const ListNode* list = binop->right()->AsList();
+ EXPECT_EQ(5u, list->contents().size());
+ auto ranges = list->GetSortRanges();
+ ASSERT_EQ(2u, ranges.size());
+ EXPECT_EQ(0u, ranges[0].begin);
+ EXPECT_EQ(2u, ranges[0].end);
+ EXPECT_EQ(3u, ranges[1].begin);
+ EXPECT_EQ(5u, ranges[1].end);
+ }
+
+ {
+ TestParseInput input(
+ "sources = [\n"
+ " \"a\",\n"
+ " \"b\",\n"
+ " \n"
+ " # Attached comment.\n"
+ " \"c\","
+ " \"d\","
+ "]\n");
+ EXPECT_FALSE(input.has_error());
+ ASSERT_TRUE(input.parsed()->AsBlock());
+ ASSERT_TRUE(input.parsed()->AsBlock()->statements()[0]->AsBinaryOp());
+ const BinaryOpNode* binop =
+ input.parsed()->AsBlock()->statements()[0]->AsBinaryOp();
+ ASSERT_TRUE(binop->right()->AsList());
+ const ListNode* list = binop->right()->AsList();
+ EXPECT_EQ(4u, list->contents().size());
+ auto ranges = list->GetSortRanges();
+ ASSERT_EQ(2u, ranges.size());
+ EXPECT_EQ(0u, ranges[0].begin);
+ EXPECT_EQ(2u, ranges[0].end);
+ EXPECT_EQ(2u, ranges[1].begin);
+ EXPECT_EQ(4u, ranges[1].end);
+ }
+
+ {
+ TestParseInput input(
+ "sources = [\n"
+ " # At end of list.\n"
+ " \"zzzzzzzzzzz.cc\","
+ "]\n");
+ EXPECT_FALSE(input.has_error());
+ ASSERT_TRUE(input.parsed()->AsBlock());
+ ASSERT_TRUE(input.parsed()->AsBlock()->statements()[0]->AsBinaryOp());
+ const BinaryOpNode* binop =
+ input.parsed()->AsBlock()->statements()[0]->AsBinaryOp();
+ ASSERT_TRUE(binop->right()->AsList());
+ const ListNode* list = binop->right()->AsList();
+ EXPECT_EQ(1u, list->contents().size());
+ auto ranges = list->GetSortRanges();
+ ASSERT_EQ(1u, ranges.size());
+ EXPECT_EQ(0u, ranges[0].begin);
+ EXPECT_EQ(1u, ranges[0].end);
+ }
+
+ {
+ TestParseInput input(
+ "sources = [\n"
+ " # Block at start.\n"
+ " \n"
+ " \"z.cc\","
+ " \"y.cc\","
+ "]\n");
+ EXPECT_FALSE(input.has_error());
+ ASSERT_TRUE(input.parsed()->AsBlock());
+ ASSERT_TRUE(input.parsed()->AsBlock()->statements()[0]->AsBinaryOp());
+ const BinaryOpNode* binop =
+ input.parsed()->AsBlock()->statements()[0]->AsBinaryOp();
+ ASSERT_TRUE(binop->right()->AsList());
+ const ListNode* list = binop->right()->AsList();
+ EXPECT_EQ(3u, list->contents().size());
+ auto ranges = list->GetSortRanges();
+ ASSERT_EQ(1u, ranges.size());
+ EXPECT_EQ(1u, ranges[0].begin);
+ EXPECT_EQ(3u, ranges[0].end);
+ }
+}
+
+TEST(ParseTree, Integers) {
+ static const char* const kGood[] = {
+ "0",
+ "10",
+ "-54321",
+ "9223372036854775807", // INT64_MAX
+ "-9223372036854775808", // INT64_MIN
+ };
+ for (auto s : kGood) {
+ TestParseInput input(std::string("x = ") + s);
+ EXPECT_FALSE(input.has_error());
+
+ TestWithScope setup;
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ EXPECT_FALSE(err.has_error());
+ }
+
+ static const char* const kBad[] = {
+ "-0",
+ "010",
+ "-010",
+ "9223372036854775808", // INT64_MAX + 1
+ "-9223372036854775809", // INT64_MIN - 1
+ };
+ for (auto s : kBad) {
+ TestParseInput input(std::string("x = ") + s);
+ EXPECT_FALSE(input.has_error());
+
+ TestWithScope setup;
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+ }
+}
diff --git a/chromium/tools/gn/parser.cc b/chromium/tools/gn/parser.cc
new file mode 100644
index 00000000000..33abfd0b2f0
--- /dev/null
+++ b/chromium/tools/gn/parser.cc
@@ -0,0 +1,770 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/parser.h"
+
+#include <utility>
+
+#include "base/logging.h"
+#include "base/memory/ptr_util.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/operators.h"
+#include "tools/gn/token.h"
+
+const char kGrammar_Help[] =
+ "GN build language grammar\n"
+ "\n"
+ "Tokens\n"
+ "\n"
+ " GN build files are read as sequences of tokens. While splitting the\n"
+ " file into tokens, the next token is the longest sequence of characters\n"
+ " that form a valid token.\n"
+ "\n"
+ "White space and comments\n"
+ "\n"
+ " White space is comprised of spaces (U+0020), horizontal tabs (U+0009),\n"
+ " carriage returns (U+000D), and newlines (U+000A).\n"
+ "\n"
+ " Comments start at the character \"#\" and stop at the next newline.\n"
+ "\n"
+ " White space and comments are ignored except that they may separate\n"
+ " tokens that would otherwise combine into a single token.\n"
+ "\n"
+ "Identifiers\n"
+ "\n"
+ " Identifiers name variables and functions.\n"
+ "\n"
+ " identifier = letter { letter | digit } .\n"
+ " letter = \"A\" ... \"Z\" | \"a\" ... \"z\" | \"_\" .\n"
+ " digit = \"0\" ... \"9\" .\n"
+ "\n"
+ "Keywords\n"
+ "\n"
+ " The following keywords are reserved and may not be used as\n"
+ " identifiers:\n"
+ "\n"
+ " else false if true\n"
+ "\n"
+ "Integer literals\n"
+ "\n"
+ " An integer literal represents a decimal integer value.\n"
+ "\n"
+ " integer = [ \"-\" ] digit { digit } .\n"
+ "\n"
+ " Leading zeros and negative zero are disallowed.\n"
+ "\n"
+ "String literals\n"
+ "\n"
+ " A string literal represents a string value consisting of the quoted\n"
+ " characters with possible escape sequences and variable expansions.\n"
+ "\n"
+ " string = `\"` { char | escape | expansion } `\"` .\n"
+ " escape = `\\` ( \"$\" | `\"` | char ) .\n"
+ " BracketExpansion = \"{\" ( identifier | ArrayAccess | ScopeAccess "
+ ") \"}\" .\n"
+ " Hex = \"0x\" [0-9A-Fa-f][0-9A-Fa-f]\n"
+ " expansion = \"$\" ( identifier | BracketExpansion | Hex ) .\n"
+ " char = /* any character except \"$\", `\"`, or newline "
+ "*/ .\n"
+ "\n"
+ " After a backslash, certain sequences represent special characters:\n"
+ "\n"
+ " \\\" U+0022 quotation mark\n"
+ " \\$ U+0024 dollar sign\n"
+ " \\\\ U+005C backslash\n"
+ "\n"
+ " All other backslashes represent themselves.\n"
+ "\n"
+ " To insert an arbitrary byte value, use $0xFF. For example, to\n"
+ " insert a newline character: \"Line one$0x0ALine two\".\n"
+ "\n"
+ "Punctuation\n"
+ "\n"
+ " The following character sequences represent punctuation:\n"
+ "\n"
+ " + += == != ( )\n"
+ " - -= < <= [ ]\n"
+ " ! = > >= { }\n"
+ " && || . ,\n"
+ "\n"
+ "Grammar\n"
+ "\n"
+ " The input tokens form a syntax tree following a context-free grammar:\n"
+ "\n"
+ " File = StatementList .\n"
+ "\n"
+ " Statement = Assignment | Call | Condition .\n"
+ " Assignment = identifier AssignOp Expr .\n"
+ " Call = identifier \"(\" [ ExprList ] \")\" [ Block ] .\n"
+ " Condition = \"if\" \"(\" Expr \")\" Block\n"
+ " [ \"else\" ( Condition | Block ) ] .\n"
+ " Block = \"{\" StatementList \"}\" .\n"
+ " StatementList = { Statement } .\n"
+ "\n"
+ " ArrayAccess = identifier \"[\" { identifier | integer } \"]\" .\n"
+ " ScopeAccess = identifier \".\" identifier .\n"
+ " Expr = UnaryExpr | Expr BinaryOp Expr .\n"
+ " UnaryExpr = PrimaryExpr | UnaryOp UnaryExpr .\n"
+ " PrimaryExpr = identifier | integer | string | Call\n"
+ " | ArrayAccess | ScopeAccess\n"
+ " | \"(\" Expr \")\"\n"
+ " | \"[\" [ ExprList [ \",\" ] ] \"]\" .\n"
+ " ExprList = Expr { \",\" Expr } .\n"
+ "\n"
+ " AssignOp = \"=\" | \"+=\" | \"-=\" .\n"
+ " UnaryOp = \"!\" .\n"
+ " BinaryOp = \"+\" | \"-\" // highest priority\n"
+ " | \"<\" | \"<=\" | \">\" | \">=\"\n"
+ " | \"==\" | \"!=\"\n"
+ " | \"&&\"\n"
+ " | \"||\" . // lowest priority\n"
+ "\n"
+ " All binary operators are left-associative.\n";
+
+enum Precedence {
+ PRECEDENCE_ASSIGNMENT = 1, // Lowest precedence.
+ PRECEDENCE_OR = 2,
+ PRECEDENCE_AND = 3,
+ PRECEDENCE_EQUALITY = 4,
+ PRECEDENCE_RELATION = 5,
+ PRECEDENCE_SUM = 6,
+ PRECEDENCE_PREFIX = 7,
+ PRECEDENCE_CALL = 8,
+ PRECEDENCE_DOT = 9, // Highest precedence.
+};
+
+// The top-level for blocks/ifs is recursive descent, the expression parser is
+// a Pratt parser. The basic idea there is to have the precedences (and
+// associativities) encoded relative to each other and only parse up until you
+// hit something of that precedence. There's a dispatch table in expressions_
+// at the top of parser.cc that describes how each token dispatches if it's
+// seen as either a prefix or infix operator, and if it's infix, what its
+// precedence is.
+//
+// Refs:
+// - http://javascript.crockford.com/tdop/tdop.html
+// - http://journal.stuffwithstuff.com/2011/03/19/pratt-parsers-expression-parsing-made-easy/
+
+// Indexed by Token::Type.
+ParserHelper Parser::expressions_[] = {
+ {nullptr, nullptr, -1}, // INVALID
+ {&Parser::Literal, nullptr, -1}, // INTEGER
+ {&Parser::Literal, nullptr, -1}, // STRING
+ {&Parser::Literal, nullptr, -1}, // TRUE_TOKEN
+ {&Parser::Literal, nullptr, -1}, // FALSE_TOKEN
+ {nullptr, &Parser::Assignment, PRECEDENCE_ASSIGNMENT}, // EQUAL
+ {nullptr, &Parser::BinaryOperator, PRECEDENCE_SUM}, // PLUS
+ {nullptr, &Parser::BinaryOperator, PRECEDENCE_SUM}, // MINUS
+ {nullptr, &Parser::Assignment, PRECEDENCE_ASSIGNMENT}, // PLUS_EQUALS
+ {nullptr, &Parser::Assignment, PRECEDENCE_ASSIGNMENT}, // MINUS_EQUALS
+ {nullptr, &Parser::BinaryOperator, PRECEDENCE_EQUALITY}, // EQUAL_EQUAL
+ {nullptr, &Parser::BinaryOperator, PRECEDENCE_EQUALITY}, // NOT_EQUAL
+ {nullptr, &Parser::BinaryOperator, PRECEDENCE_RELATION}, // LESS_EQUAL
+ {nullptr, &Parser::BinaryOperator, PRECEDENCE_RELATION}, // GREATER_EQUAL
+ {nullptr, &Parser::BinaryOperator, PRECEDENCE_RELATION}, // LESS_THAN
+ {nullptr, &Parser::BinaryOperator, PRECEDENCE_RELATION}, // GREATER_THAN
+ {nullptr, &Parser::BinaryOperator, PRECEDENCE_AND}, // BOOLEAN_AND
+ {nullptr, &Parser::BinaryOperator, PRECEDENCE_OR}, // BOOLEAN_OR
+ {&Parser::Not, nullptr, -1}, // BANG
+ {nullptr, &Parser::DotOperator, PRECEDENCE_DOT}, // DOT
+ {&Parser::Group, nullptr, -1}, // LEFT_PAREN
+ {nullptr, nullptr, -1}, // RIGHT_PAREN
+ {&Parser::List, &Parser::Subscript, PRECEDENCE_CALL}, // LEFT_BRACKET
+ {nullptr, nullptr, -1}, // RIGHT_BRACKET
+ {nullptr, nullptr, -1}, // LEFT_BRACE
+ {nullptr, nullptr, -1}, // RIGHT_BRACE
+ {nullptr, nullptr, -1}, // IF
+ {nullptr, nullptr, -1}, // ELSE
+ {&Parser::Name, &Parser::IdentifierOrCall, PRECEDENCE_CALL}, // IDENTIFIER
+ {nullptr, nullptr, -1}, // COMMA
+ {nullptr, nullptr, -1}, // UNCLASSIFIED_COMMENT
+ {nullptr, nullptr, -1}, // LINE_COMMENT
+ {nullptr, nullptr, -1}, // SUFFIX_COMMENT
+ {&Parser::BlockComment, nullptr, -1}, // BLOCK_COMMENT
+};
+
+Parser::Parser(const std::vector<Token>& tokens, Err* err)
+ : err_(err), cur_(0) {
+ for (const auto& token : tokens) {
+ switch (token.type()) {
+ case Token::LINE_COMMENT:
+ line_comment_tokens_.push_back(token);
+ break;
+ case Token::SUFFIX_COMMENT:
+ suffix_comment_tokens_.push_back(token);
+ break;
+ default:
+ // Note that BLOCK_COMMENTs (top-level standalone comments) are passed
+ // through the real parser.
+ tokens_.push_back(token);
+ break;
+ }
+ }
+}
+
+Parser::~Parser() {
+}
+
+// static
+std::unique_ptr<ParseNode> Parser::Parse(const std::vector<Token>& tokens,
+ Err* err) {
+ Parser p(tokens, err);
+ return p.ParseFile();
+}
+
+// static
+std::unique_ptr<ParseNode> Parser::ParseExpression(
+ const std::vector<Token>& tokens,
+ Err* err) {
+ Parser p(tokens, err);
+ std::unique_ptr<ParseNode> expr = p.ParseExpression();
+ if (!p.at_end() && !err->has_error()) {
+ *err = Err(p.cur_token(), "Trailing garbage");
+ return nullptr;
+ }
+ return expr;
+}
+
+// static
+std::unique_ptr<ParseNode> Parser::ParseValue(const std::vector<Token>& tokens,
+ Err* err) {
+ for (const Token& token : tokens) {
+ switch (token.type()) {
+ case Token::INTEGER:
+ case Token::STRING:
+ case Token::TRUE_TOKEN:
+ case Token::FALSE_TOKEN:
+ case Token::LEFT_BRACKET:
+ case Token::RIGHT_BRACKET:
+ case Token::COMMA:
+ continue;
+ default:
+ *err = Err(token, "Invalid token in literal value");
+ return nullptr;
+ }
+ }
+
+ return ParseExpression(tokens, err);
+}
+
+bool Parser::IsAssignment(const ParseNode* node) const {
+ return node && node->AsBinaryOp() &&
+ (node->AsBinaryOp()->op().type() == Token::EQUAL ||
+ node->AsBinaryOp()->op().type() == Token::PLUS_EQUALS ||
+ node->AsBinaryOp()->op().type() == Token::MINUS_EQUALS);
+}
+
+bool Parser::IsStatementBreak(Token::Type token_type) const {
+ switch (token_type) {
+ case Token::IDENTIFIER:
+ case Token::LEFT_BRACE:
+ case Token::RIGHT_BRACE:
+ case Token::IF:
+ case Token::ELSE:
+ return true;
+ default:
+ return false;
+ }
+}
+
+bool Parser::LookAhead(Token::Type type) {
+ if (at_end())
+ return false;
+ return cur_token().type() == type;
+}
+
+bool Parser::Match(Token::Type type) {
+ if (!LookAhead(type))
+ return false;
+ Consume();
+ return true;
+}
+
+Token Parser::Consume(Token::Type type, const char* error_message) {
+ Token::Type types[1] = { type };
+ return Consume(types, 1, error_message);
+}
+
+Token Parser::Consume(Token::Type* types,
+ size_t num_types,
+ const char* error_message) {
+ if (has_error()) {
+ // Don't overwrite current error, but make progress through tokens so that
+ // a loop that's expecting a particular token will still terminate.
+ cur_++;
+ return Token(Location(), Token::INVALID, base::StringPiece());
+ }
+ if (at_end()) {
+ const char kEOFMsg[] = "I hit EOF instead.";
+ if (tokens_.empty())
+ *err_ = Err(Location(), error_message, kEOFMsg);
+ else
+ *err_ = Err(tokens_[tokens_.size() - 1], error_message, kEOFMsg);
+ return Token(Location(), Token::INVALID, base::StringPiece());
+ }
+
+ for (size_t i = 0; i < num_types; ++i) {
+ if (cur_token().type() == types[i])
+ return Consume();
+ }
+ *err_ = Err(cur_token(), error_message);
+ return Token(Location(), Token::INVALID, base::StringPiece());
+}
+
+Token Parser::Consume() {
+ return tokens_[cur_++];
+}
+
+std::unique_ptr<ParseNode> Parser::ParseExpression() {
+ return ParseExpression(0);
+}
+
+std::unique_ptr<ParseNode> Parser::ParseExpression(int precedence) {
+ if (at_end())
+ return std::unique_ptr<ParseNode>();
+
+ Token token = Consume();
+ PrefixFunc prefix = expressions_[token.type()].prefix;
+
+ if (prefix == nullptr) {
+ *err_ = Err(token,
+ std::string("Unexpected token '") + token.value().as_string() +
+ std::string("'"));
+ return std::unique_ptr<ParseNode>();
+ }
+
+ std::unique_ptr<ParseNode> left = (this->*prefix)(token);
+ if (has_error())
+ return left;
+
+ while (!at_end() && !IsStatementBreak(cur_token().type()) &&
+ precedence <= expressions_[cur_token().type()].precedence) {
+ token = Consume();
+ InfixFunc infix = expressions_[token.type()].infix;
+ if (infix == nullptr) {
+ *err_ = Err(token,
+ std::string("Unexpected token '") +
+ token.value().as_string() + std::string("'"));
+ return std::unique_ptr<ParseNode>();
+ }
+ left = (this->*infix)(std::move(left), token);
+ if (has_error())
+ return std::unique_ptr<ParseNode>();
+ }
+
+ return left;
+}
+
+std::unique_ptr<ParseNode> Parser::Literal(Token token) {
+ return base::WrapUnique(new LiteralNode(token));
+}
+
+std::unique_ptr<ParseNode> Parser::Name(Token token) {
+ return IdentifierOrCall(std::unique_ptr<ParseNode>(), token);
+}
+
+std::unique_ptr<ParseNode> Parser::BlockComment(Token token) {
+ std::unique_ptr<BlockCommentNode> comment(new BlockCommentNode());
+ comment->set_comment(token);
+ return std::move(comment);
+}
+
+std::unique_ptr<ParseNode> Parser::Group(Token token) {
+ std::unique_ptr<ParseNode> expr = ParseExpression();
+ if (has_error())
+ return std::unique_ptr<ParseNode>();
+ Consume(Token::RIGHT_PAREN, "Expected ')'");
+ return expr;
+}
+
+std::unique_ptr<ParseNode> Parser::Not(Token token) {
+ std::unique_ptr<ParseNode> expr = ParseExpression(PRECEDENCE_PREFIX + 1);
+ if (has_error())
+ return std::unique_ptr<ParseNode>();
+ if (!expr) {
+ if (!has_error())
+ *err_ = Err(token, "Expected right-hand side for '!'.");
+ return std::unique_ptr<ParseNode>();
+ }
+ std::unique_ptr<UnaryOpNode> unary_op(new UnaryOpNode);
+ unary_op->set_op(token);
+ unary_op->set_operand(std::move(expr));
+ return std::move(unary_op);
+}
+
+std::unique_ptr<ParseNode> Parser::List(Token node) {
+ std::unique_ptr<ParseNode> list(ParseList(node, Token::RIGHT_BRACKET, true));
+ if (!has_error() && !at_end())
+ Consume(Token::RIGHT_BRACKET, "Expected ']'");
+ return list;
+}
+
+std::unique_ptr<ParseNode> Parser::BinaryOperator(
+ std::unique_ptr<ParseNode> left,
+ Token token) {
+ std::unique_ptr<ParseNode> right =
+ ParseExpression(expressions_[token.type()].precedence + 1);
+ if (!right) {
+ if (!has_error()) {
+ *err_ = Err(token, "Expected right-hand side for '" +
+ token.value().as_string() + "'");
+ }
+ return std::unique_ptr<ParseNode>();
+ }
+ std::unique_ptr<BinaryOpNode> binary_op(new BinaryOpNode);
+ binary_op->set_op(token);
+ binary_op->set_left(std::move(left));
+ binary_op->set_right(std::move(right));
+ return std::move(binary_op);
+}
+
+std::unique_ptr<ParseNode> Parser::IdentifierOrCall(
+ std::unique_ptr<ParseNode> left,
+ Token token) {
+ std::unique_ptr<ListNode> list(new ListNode);
+ list->set_begin_token(token);
+ list->set_end(base::WrapUnique(new EndNode(token)));
+ std::unique_ptr<BlockNode> block;
+ bool has_arg = false;
+ if (LookAhead(Token::LEFT_PAREN)) {
+ Token start_token = Consume();
+ // Parsing a function call.
+ has_arg = true;
+ if (Match(Token::RIGHT_PAREN)) {
+ // Nothing, just an empty call.
+ } else {
+ list = ParseList(start_token, Token::RIGHT_PAREN, false);
+ if (has_error())
+ return std::unique_ptr<ParseNode>();
+ Consume(Token::RIGHT_PAREN, "Expected ')' after call");
+ }
+ // Optionally with a scope.
+ if (LookAhead(Token::LEFT_BRACE)) {
+ block = ParseBlock();
+ if (has_error())
+ return std::unique_ptr<ParseNode>();
+ }
+ }
+
+ if (!left && !has_arg) {
+ // Not a function call, just a standalone identifier.
+ return std::unique_ptr<ParseNode>(new IdentifierNode(token));
+ }
+ std::unique_ptr<FunctionCallNode> func_call(new FunctionCallNode);
+ func_call->set_function(token);
+ func_call->set_args(std::move(list));
+ if (block)
+ func_call->set_block(std::move(block));
+ return std::move(func_call);
+}
+
+std::unique_ptr<ParseNode> Parser::Assignment(std::unique_ptr<ParseNode> left,
+ Token token) {
+ if (left->AsIdentifier() == nullptr) {
+ *err_ = Err(left.get(), "Left-hand side of assignment must be identifier.");
+ return std::unique_ptr<ParseNode>();
+ }
+ std::unique_ptr<ParseNode> value = ParseExpression(PRECEDENCE_ASSIGNMENT);
+ if (!value) {
+ if (!has_error())
+ *err_ = Err(token, "Expected right-hand side for assignment.");
+ return std::unique_ptr<ParseNode>();
+ }
+ std::unique_ptr<BinaryOpNode> assign(new BinaryOpNode);
+ assign->set_op(token);
+ assign->set_left(std::move(left));
+ assign->set_right(std::move(value));
+ return std::move(assign);
+}
+
+std::unique_ptr<ParseNode> Parser::Subscript(std::unique_ptr<ParseNode> left,
+ Token token) {
+ // TODO: Maybe support more complex expressions like a[0][0]. This would
+ // require work on the evaluator too.
+ if (left->AsIdentifier() == nullptr) {
+ *err_ = Err(left.get(), "May only subscript identifiers.",
+ "The thing on the left hand side of the [] must be an identifier\n"
+ "and not an expression. If you need this, you'll have to assign the\n"
+ "value to a temporary before subscripting. Sorry.");
+ return std::unique_ptr<ParseNode>();
+ }
+ std::unique_ptr<ParseNode> value = ParseExpression();
+ Consume(Token::RIGHT_BRACKET, "Expecting ']' after subscript.");
+ std::unique_ptr<AccessorNode> accessor(new AccessorNode);
+ accessor->set_base(left->AsIdentifier()->value());
+ accessor->set_index(std::move(value));
+ return std::move(accessor);
+}
+
+std::unique_ptr<ParseNode> Parser::DotOperator(std::unique_ptr<ParseNode> left,
+ Token token) {
+ if (left->AsIdentifier() == nullptr) {
+ *err_ = Err(left.get(), "May only use \".\" for identifiers.",
+ "The thing on the left hand side of the dot must be an identifier\n"
+ "and not an expression. If you need this, you'll have to assign the\n"
+ "value to a temporary first. Sorry.");
+ return std::unique_ptr<ParseNode>();
+ }
+
+ std::unique_ptr<ParseNode> right = ParseExpression(PRECEDENCE_DOT);
+ if (!right || !right->AsIdentifier()) {
+ *err_ = Err(token, "Expected identifier for right-hand-side of \".\"",
+ "Good: a.cookies\nBad: a.42\nLooks good but still bad: a.cookies()");
+ return std::unique_ptr<ParseNode>();
+ }
+
+ std::unique_ptr<AccessorNode> accessor(new AccessorNode);
+ accessor->set_base(left->AsIdentifier()->value());
+ accessor->set_member(std::unique_ptr<IdentifierNode>(
+ static_cast<IdentifierNode*>(right.release())));
+ return std::move(accessor);
+}
+
+// Does not Consume the start or end token.
+std::unique_ptr<ListNode> Parser::ParseList(Token start_token,
+ Token::Type stop_before,
+ bool allow_trailing_comma) {
+ std::unique_ptr<ListNode> list(new ListNode);
+ list->set_begin_token(start_token);
+ bool just_got_comma = false;
+ bool first_time = true;
+ while (!LookAhead(stop_before)) {
+ if (!first_time) {
+ if (!just_got_comma) {
+ // Require commas separate things in lists.
+ *err_ = Err(cur_token(), "Expected comma between items.");
+ return std::unique_ptr<ListNode>();
+ }
+ }
+ first_time = false;
+
+ // Why _OR? We're parsing things that are higher precedence than the ,
+ // that separates the items of the list. , should appear lower than
+ // boolean expressions (the lowest of which is OR), but above assignments.
+ list->append_item(ParseExpression(PRECEDENCE_OR));
+ if (has_error())
+ return std::unique_ptr<ListNode>();
+ if (at_end()) {
+ *err_ =
+ Err(tokens_[tokens_.size() - 1], "Unexpected end of file in list.");
+ return std::unique_ptr<ListNode>();
+ }
+ if (list->contents().back()->AsBlockComment()) {
+ // If there was a comment inside the list, we don't need a comma to the
+ // next item, so pretend we got one, if we're expecting one.
+ just_got_comma = allow_trailing_comma;
+ } else {
+ just_got_comma = Match(Token::COMMA);
+ }
+ }
+ if (just_got_comma && !allow_trailing_comma) {
+ *err_ = Err(cur_token(), "Trailing comma");
+ return std::unique_ptr<ListNode>();
+ }
+ list->set_end(base::WrapUnique(new EndNode(cur_token())));
+ return list;
+}
+
+std::unique_ptr<ParseNode> Parser::ParseFile() {
+ std::unique_ptr<BlockNode> file(new BlockNode);
+ for (;;) {
+ if (at_end())
+ break;
+ std::unique_ptr<ParseNode> statement = ParseStatement();
+ if (!statement)
+ break;
+ file->append_statement(std::move(statement));
+ }
+ if (!at_end() && !has_error())
+ *err_ = Err(cur_token(), "Unexpected here, should be newline.");
+ if (has_error())
+ return std::unique_ptr<ParseNode>();
+
+ // TODO(scottmg): If this is measurably expensive, it could be done only
+ // when necessary (when reformatting, or during tests). Comments are
+ // separate from the parse tree at this point, so downstream code can remain
+ // ignorant of them.
+ AssignComments(file.get());
+
+ return std::move(file);
+}
+
+std::unique_ptr<ParseNode> Parser::ParseStatement() {
+ if (LookAhead(Token::IF)) {
+ return ParseCondition();
+ } else if (LookAhead(Token::BLOCK_COMMENT)) {
+ return BlockComment(Consume());
+ } else {
+ // TODO(scottmg): Is this too strict? Just drop all the testing if we want
+ // to allow "pointless" expressions and return ParseExpression() directly.
+ std::unique_ptr<ParseNode> stmt = ParseExpression();
+ if (stmt) {
+ if (stmt->AsFunctionCall() || IsAssignment(stmt.get()))
+ return stmt;
+ }
+ if (!has_error()) {
+ Token token = at_end() ? tokens_[tokens_.size() - 1] : cur_token();
+ *err_ = Err(token, "Expecting assignment or function call.");
+ }
+ return std::unique_ptr<ParseNode>();
+ }
+}
+
+std::unique_ptr<BlockNode> Parser::ParseBlock() {
+ Token begin_token =
+ Consume(Token::LEFT_BRACE, "Expected '{' to start a block.");
+ if (has_error())
+ return std::unique_ptr<BlockNode>();
+ std::unique_ptr<BlockNode> block(new BlockNode);
+ block->set_begin_token(begin_token);
+
+ for (;;) {
+ if (LookAhead(Token::RIGHT_BRACE)) {
+ block->set_end(base::WrapUnique(new EndNode(Consume())));
+ break;
+ }
+
+ std::unique_ptr<ParseNode> statement = ParseStatement();
+ if (!statement)
+ return std::unique_ptr<BlockNode>();
+ block->append_statement(std::move(statement));
+ }
+ return block;
+}
+
+std::unique_ptr<ParseNode> Parser::ParseCondition() {
+ std::unique_ptr<ConditionNode> condition(new ConditionNode);
+ condition->set_if_token(Consume(Token::IF, "Expected 'if'"));
+ Consume(Token::LEFT_PAREN, "Expected '(' after 'if'.");
+ condition->set_condition(ParseExpression());
+ if (IsAssignment(condition->condition()))
+ *err_ = Err(condition->condition(), "Assignment not allowed in 'if'.");
+ Consume(Token::RIGHT_PAREN, "Expected ')' after condition of 'if'.");
+ condition->set_if_true(ParseBlock());
+ if (Match(Token::ELSE)) {
+ if (LookAhead(Token::LEFT_BRACE)) {
+ condition->set_if_false(ParseBlock());
+ } else if (LookAhead(Token::IF)) {
+ condition->set_if_false(ParseStatement());
+ } else {
+ *err_ = Err(cur_token(), "Expected '{' or 'if' after 'else'.");
+ return std::unique_ptr<ParseNode>();
+ }
+ }
+ if (has_error())
+ return std::unique_ptr<ParseNode>();
+ return std::move(condition);
+}
+
+void Parser::TraverseOrder(const ParseNode* root,
+ std::vector<const ParseNode*>* pre,
+ std::vector<const ParseNode*>* post) {
+ if (root) {
+ pre->push_back(root);
+
+ if (const AccessorNode* accessor = root->AsAccessor()) {
+ TraverseOrder(accessor->index(), pre, post);
+ TraverseOrder(accessor->member(), pre, post);
+ } else if (const BinaryOpNode* binop = root->AsBinaryOp()) {
+ TraverseOrder(binop->left(), pre, post);
+ TraverseOrder(binop->right(), pre, post);
+ } else if (const BlockNode* block = root->AsBlock()) {
+ for (const auto& statement : block->statements())
+ TraverseOrder(statement, pre, post);
+ TraverseOrder(block->End(), pre, post);
+ } else if (const ConditionNode* condition = root->AsConditionNode()) {
+ TraverseOrder(condition->condition(), pre, post);
+ TraverseOrder(condition->if_true(), pre, post);
+ TraverseOrder(condition->if_false(), pre, post);
+ } else if (const FunctionCallNode* func_call = root->AsFunctionCall()) {
+ TraverseOrder(func_call->args(), pre, post);
+ TraverseOrder(func_call->block(), pre, post);
+ } else if (root->AsIdentifier()) {
+ // Nothing.
+ } else if (const ListNode* list = root->AsList()) {
+ for (const auto& node : list->contents())
+ TraverseOrder(node, pre, post);
+ TraverseOrder(list->End(), pre, post);
+ } else if (root->AsLiteral()) {
+ // Nothing.
+ } else if (const UnaryOpNode* unaryop = root->AsUnaryOp()) {
+ TraverseOrder(unaryop->operand(), pre, post);
+ } else if (root->AsBlockComment()) {
+ // Nothing.
+ } else if (root->AsEnd()) {
+ // Nothing.
+ } else {
+ CHECK(false) << "Unhandled case in TraverseOrder.";
+ }
+
+ post->push_back(root);
+ }
+}
+
+void Parser::AssignComments(ParseNode* file) {
+ // Start by generating a pre- and post- order traversal of the tree so we
+ // can determine what's before and after comments.
+ std::vector<const ParseNode*> pre;
+ std::vector<const ParseNode*> post;
+ TraverseOrder(file, &pre, &post);
+
+ // Assign line comments to syntax immediately following.
+ int cur_comment = 0;
+ for (const auto& node : pre) {
+ const Location& start = node->GetRange().begin();
+ while (cur_comment < static_cast<int>(line_comment_tokens_.size())) {
+ if (start.byte() >= line_comment_tokens_[cur_comment].location().byte()) {
+ const_cast<ParseNode*>(node)->comments_mutable()->append_before(
+ line_comment_tokens_[cur_comment]);
+ ++cur_comment;
+ } else {
+ break;
+ }
+ }
+ }
+
+ // Remaining line comments go at end of file.
+ for (; cur_comment < static_cast<int>(line_comment_tokens_.size());
+ ++cur_comment)
+ file->comments_mutable()->append_after(line_comment_tokens_[cur_comment]);
+
+ // Assign suffix to syntax immediately before.
+ cur_comment = static_cast<int>(suffix_comment_tokens_.size() - 1);
+ for (std::vector<const ParseNode*>::const_reverse_iterator i = post.rbegin();
+ i != post.rend();
+ ++i) {
+ // Don't assign suffix comments to the function, list, or block, but instead
+ // to the last thing inside.
+ if ((*i)->AsFunctionCall() || (*i)->AsList() || (*i)->AsBlock())
+ continue;
+
+ const Location& start = (*i)->GetRange().begin();
+ const Location& end = (*i)->GetRange().end();
+
+ // Don't assign suffix comments to something that starts on an earlier
+ // line, so that in:
+ //
+ // sources = [ "a",
+ // "b" ] # comment
+ //
+ // it's attached to "b", not sources = [ ... ].
+ if (start.line_number() != end.line_number())
+ continue;
+
+ while (cur_comment >= 0) {
+ if (end.byte() <= suffix_comment_tokens_[cur_comment].location().byte()) {
+ const_cast<ParseNode*>(*i)->comments_mutable()->append_suffix(
+ suffix_comment_tokens_[cur_comment]);
+ --cur_comment;
+ } else {
+ break;
+ }
+ }
+
+ // Suffix comments were assigned in reverse, so if there were multiple on
+ // the same node, they need to be reversed.
+ if ((*i)->comments() && !(*i)->comments()->suffix().empty())
+ const_cast<ParseNode*>(*i)->comments_mutable()->ReverseSuffix();
+ }
+}
diff --git a/chromium/tools/gn/parser.h b/chromium/tools/gn/parser.h
new file mode 100644
index 00000000000..de828a4ecdf
--- /dev/null
+++ b/chromium/tools/gn/parser.h
@@ -0,0 +1,139 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_PARSER_H_
+#define TOOLS_GN_PARSER_H_
+
+#include <stddef.h>
+
+#include <map>
+#include <memory>
+#include <vector>
+
+#include "base/gtest_prod_util.h"
+#include "base/macros.h"
+#include "tools/gn/err.h"
+#include "tools/gn/parse_tree.h"
+
+class Parser;
+typedef std::unique_ptr<ParseNode> (Parser::*PrefixFunc)(Token token);
+typedef std::unique_ptr<ParseNode> (
+ Parser::*InfixFunc)(std::unique_ptr<ParseNode> left, Token token);
+
+extern const char kGrammar_Help[];
+
+struct ParserHelper {
+ PrefixFunc prefix;
+ InfixFunc infix;
+ int precedence;
+};
+
+// Parses a series of tokens. The resulting AST will refer to the tokens passed
+// to the input, so the tokens an the file data they refer to must outlive your
+// use of the ParseNode.
+class Parser {
+ public:
+ // Will return a null pointer and set the err on error.
+ static std::unique_ptr<ParseNode> Parse(const std::vector<Token>& tokens,
+ Err* err);
+
+ // Alternative to parsing that assumes the input is an expression.
+ static std::unique_ptr<ParseNode> ParseExpression(
+ const std::vector<Token>& tokens,
+ Err* err);
+
+ // Alternative to parsing that assumes the input is a literal value.
+ static std::unique_ptr<ParseNode> ParseValue(const std::vector<Token>& tokens,
+ Err* err);
+
+ private:
+ // Vector must be valid for lifetime of call.
+ Parser(const std::vector<Token>& tokens, Err* err);
+ ~Parser();
+
+ std::unique_ptr<ParseNode> ParseExpression();
+
+ // Parses an expression with the given precedence or higher.
+ std::unique_ptr<ParseNode> ParseExpression(int precedence);
+
+ // |PrefixFunc|s used in parsing expressions.
+ std::unique_ptr<ParseNode> Literal(Token token);
+ std::unique_ptr<ParseNode> Name(Token token);
+ std::unique_ptr<ParseNode> Group(Token token);
+ std::unique_ptr<ParseNode> Not(Token token);
+ std::unique_ptr<ParseNode> List(Token token);
+ std::unique_ptr<ParseNode> BlockComment(Token token);
+
+ // |InfixFunc|s used in parsing expressions.
+ std::unique_ptr<ParseNode> BinaryOperator(std::unique_ptr<ParseNode> left,
+ Token token);
+ std::unique_ptr<ParseNode> IdentifierOrCall(std::unique_ptr<ParseNode> left,
+ Token token);
+ std::unique_ptr<ParseNode> Assignment(std::unique_ptr<ParseNode> left,
+ Token token);
+ std::unique_ptr<ParseNode> Subscript(std::unique_ptr<ParseNode> left,
+ Token token);
+ std::unique_ptr<ParseNode> DotOperator(std::unique_ptr<ParseNode> left,
+ Token token);
+
+ // Helper to parse a comma separated list, optionally allowing trailing
+ // commas (allowed in [] lists, not in function calls).
+ std::unique_ptr<ListNode> ParseList(Token start_token,
+ Token::Type stop_before,
+ bool allow_trailing_comma);
+
+ std::unique_ptr<ParseNode> ParseFile();
+ std::unique_ptr<ParseNode> ParseStatement();
+ std::unique_ptr<BlockNode> ParseBlock();
+ std::unique_ptr<ParseNode> ParseCondition();
+
+ // Generates a pre- and post-order traversal of the tree.
+ void TraverseOrder(const ParseNode* root,
+ std::vector<const ParseNode*>* pre,
+ std::vector<const ParseNode*>* post);
+
+ // Attach comments to nearby syntax.
+ void AssignComments(ParseNode* file);
+
+ bool IsAssignment(const ParseNode* node) const;
+ bool IsStatementBreak(Token::Type token_type) const;
+
+ bool LookAhead(Token::Type type);
+ bool Match(Token::Type type);
+ Token Consume(Token::Type type, const char* error_message);
+ Token Consume(Token::Type* types,
+ size_t num_types,
+ const char* error_message);
+ Token Consume();
+
+ const Token& cur_token() const { return tokens_[cur_]; }
+
+ bool done() const { return at_end() || has_error(); }
+ bool at_end() const { return cur_ >= tokens_.size(); }
+ bool has_error() const { return err_->has_error(); }
+
+ std::vector<Token> tokens_;
+ std::vector<Token> line_comment_tokens_;
+ std::vector<Token> suffix_comment_tokens_;
+
+ static ParserHelper expressions_[Token::NUM_TYPES];
+
+ Err* err_;
+
+ // Current index into the tokens.
+ size_t cur_;
+
+ FRIEND_TEST_ALL_PREFIXES(Parser, BinaryOp);
+ FRIEND_TEST_ALL_PREFIXES(Parser, Block);
+ FRIEND_TEST_ALL_PREFIXES(Parser, Condition);
+ FRIEND_TEST_ALL_PREFIXES(Parser, Expression);
+ FRIEND_TEST_ALL_PREFIXES(Parser, FunctionCall);
+ FRIEND_TEST_ALL_PREFIXES(Parser, List);
+ FRIEND_TEST_ALL_PREFIXES(Parser, ParenExpression);
+ FRIEND_TEST_ALL_PREFIXES(Parser, UnaryOp);
+
+ DISALLOW_COPY_AND_ASSIGN(Parser);
+};
+
+#endif // TOOLS_GN_PARSER_H_
diff --git a/chromium/tools/gn/parser_unittest.cc b/chromium/tools/gn/parser_unittest.cc
new file mode 100644
index 00000000000..39703737a2a
--- /dev/null
+++ b/chromium/tools/gn/parser_unittest.cc
@@ -0,0 +1,711 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <iostream>
+#include <sstream>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parser.h"
+#include "tools/gn/tokenizer.h"
+
+namespace {
+
+bool GetTokens(const InputFile* input, std::vector<Token>* result) {
+ result->clear();
+ Err err;
+ *result = Tokenizer::Tokenize(input, &err);
+ return !err.has_error();
+}
+
+void DoParserPrintTest(const char* input, const char* expected) {
+ std::vector<Token> tokens;
+ InputFile input_file(SourceFile("/test"));
+ input_file.SetContents(input);
+ ASSERT_TRUE(GetTokens(&input_file, &tokens));
+
+ Err err;
+ std::unique_ptr<ParseNode> result = Parser::Parse(tokens, &err);
+ if (!result)
+ err.PrintToStdout();
+ ASSERT_TRUE(result);
+
+ std::ostringstream collector;
+ result->Print(collector, 0);
+
+ EXPECT_EQ(expected, collector.str());
+}
+
+void DoExpressionPrintTest(const char* input, const char* expected) {
+ std::vector<Token> tokens;
+ InputFile input_file(SourceFile("/test"));
+ input_file.SetContents(input);
+ ASSERT_TRUE(GetTokens(&input_file, &tokens));
+
+ Err err;
+ std::unique_ptr<ParseNode> result = Parser::ParseExpression(tokens, &err);
+ ASSERT_TRUE(result);
+
+ std::ostringstream collector;
+ result->Print(collector, 0);
+
+ EXPECT_EQ(expected, collector.str());
+}
+
+// Expects the tokenizer or parser to identify an error at the given line and
+// character.
+void DoParserErrorTest(const char* input, int err_line, int err_char) {
+ InputFile input_file(SourceFile("/test"));
+ input_file.SetContents(input);
+
+ Err err;
+ std::vector<Token> tokens = Tokenizer::Tokenize(&input_file, &err);
+ if (!err.has_error()) {
+ std::unique_ptr<ParseNode> result = Parser::Parse(tokens, &err);
+ ASSERT_FALSE(result);
+ ASSERT_TRUE(err.has_error());
+ }
+
+ EXPECT_EQ(err_line, err.location().line_number());
+ EXPECT_EQ(err_char, err.location().column_number());
+}
+
+// Expects the tokenizer or parser to identify an error at the given line and
+// character.
+void DoExpressionErrorTest(const char* input, int err_line, int err_char) {
+ InputFile input_file(SourceFile("/test"));
+ input_file.SetContents(input);
+
+ Err err;
+ std::vector<Token> tokens = Tokenizer::Tokenize(&input_file, &err);
+ if (!err.has_error()) {
+ std::unique_ptr<ParseNode> result = Parser::ParseExpression(tokens, &err);
+ ASSERT_FALSE(result);
+ ASSERT_TRUE(err.has_error());
+ }
+
+ EXPECT_EQ(err_line, err.location().line_number());
+ EXPECT_EQ(err_char, err.location().column_number());
+}
+
+} // namespace
+
+TEST(Parser, Literal) {
+ DoExpressionPrintTest("5", "LITERAL(5)\n");
+ DoExpressionPrintTest("\"stuff\"", "LITERAL(\"stuff\")\n");
+}
+
+TEST(Parser, BinaryOp) {
+ // TODO(scottmg): The tokenizer is dumb, and treats "5-1" as two integers,
+ // not a binary operator between two positive integers.
+ DoExpressionPrintTest("5 - 1",
+ "BINARY(-)\n"
+ " LITERAL(5)\n"
+ " LITERAL(1)\n");
+ DoExpressionPrintTest("5+1",
+ "BINARY(+)\n"
+ " LITERAL(5)\n"
+ " LITERAL(1)\n");
+ DoExpressionPrintTest("5 - 1 - 2",
+ "BINARY(-)\n"
+ " BINARY(-)\n"
+ " LITERAL(5)\n"
+ " LITERAL(1)\n"
+ " LITERAL(2)\n");
+}
+
+TEST(Parser, FunctionCall) {
+ DoExpressionPrintTest("foo()",
+ "FUNCTION(foo)\n"
+ " LIST\n");
+ DoExpressionPrintTest("blah(1, 2)",
+ "FUNCTION(blah)\n"
+ " LIST\n"
+ " LITERAL(1)\n"
+ " LITERAL(2)\n");
+ DoExpressionErrorTest("foo(1, 2,)", 1, 10);
+ DoExpressionErrorTest("foo(1 2)", 1, 7);
+}
+
+TEST(Parser, ParenExpression) {
+ const char* input = "(foo(1)) + (a + (b - c) + d)";
+ const char* expected =
+ "BINARY(+)\n"
+ " FUNCTION(foo)\n"
+ " LIST\n"
+ " LITERAL(1)\n"
+ " BINARY(+)\n"
+ " BINARY(+)\n"
+ " IDENTIFIER(a)\n"
+ " BINARY(-)\n"
+ " IDENTIFIER(b)\n"
+ " IDENTIFIER(c)\n"
+ " IDENTIFIER(d)\n";
+ DoExpressionPrintTest(input, expected);
+ DoExpressionErrorTest("(a +", 1, 4);
+}
+
+TEST(Parser, OrderOfOperationsLeftAssociative) {
+ const char* input = "5 - 1 - 2\n";
+ const char* expected =
+ "BINARY(-)\n"
+ " BINARY(-)\n"
+ " LITERAL(5)\n"
+ " LITERAL(1)\n"
+ " LITERAL(2)\n";
+ DoExpressionPrintTest(input, expected);
+}
+
+TEST(Parser, OrderOfOperationsEqualityBoolean) {
+ const char* input =
+ "if (a == \"b\" && is_stuff) {\n"
+ " print(\"hai\")\n"
+ "}\n";
+ const char* expected =
+ "BLOCK\n"
+ " CONDITION\n"
+ " BINARY(&&)\n"
+ " BINARY(==)\n"
+ " IDENTIFIER(a)\n"
+ " LITERAL(\"b\")\n"
+ " IDENTIFIER(is_stuff)\n"
+ " BLOCK\n"
+ " FUNCTION(print)\n"
+ " LIST\n"
+ " LITERAL(\"hai\")\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, UnaryOp) {
+ DoExpressionPrintTest("!foo",
+ "UNARY(!)\n"
+ " IDENTIFIER(foo)\n");
+
+ // No contents for binary operator.
+ DoExpressionErrorTest("a = !", 1, 5);
+}
+
+TEST(Parser, List) {
+ DoExpressionPrintTest("[]", "LIST\n");
+ DoExpressionPrintTest("[1,asd,]",
+ "LIST\n"
+ " LITERAL(1)\n"
+ " IDENTIFIER(asd)\n");
+ DoExpressionPrintTest("[1, 2+3 - foo]",
+ "LIST\n"
+ " LITERAL(1)\n"
+ " BINARY(-)\n"
+ " BINARY(+)\n"
+ " LITERAL(2)\n"
+ " LITERAL(3)\n"
+ " IDENTIFIER(foo)\n");
+ DoExpressionPrintTest("[1,\n2,\n 3,\n 4]",
+ "LIST\n"
+ " LITERAL(1)\n"
+ " LITERAL(2)\n"
+ " LITERAL(3)\n"
+ " LITERAL(4)\n");
+
+ DoExpressionErrorTest("[a, 2+,]", 1, 7);
+ DoExpressionErrorTest("[,]", 1, 2);
+ DoExpressionErrorTest("[a,,]", 1, 4);
+}
+
+TEST(Parser, Assignment) {
+ DoParserPrintTest("a=2",
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " LITERAL(2)\n");
+
+ DoExpressionErrorTest("a = ", 1, 3);
+}
+
+TEST(Parser, Accessor) {
+ // Accessor indexing.
+ DoParserPrintTest("a=b[c+2]",
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " ACCESSOR\n"
+ " b\n" // AccessorNode is a bit weird in that it holds
+ // a Token, not a ParseNode for the base.
+ " BINARY(+)\n"
+ " IDENTIFIER(c)\n"
+ " LITERAL(2)\n");
+ DoParserErrorTest("a = b[1][0]", 1, 5);
+
+ // Member accessors.
+ DoParserPrintTest("a=b.c+2",
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " BINARY(+)\n"
+ " ACCESSOR\n"
+ " b\n"
+ " IDENTIFIER(c)\n"
+ " LITERAL(2)\n");
+ DoParserErrorTest("a = b.c.d", 1, 6); // Can't nest accessors (currently).
+ DoParserErrorTest("a.b = 5", 1, 1); // Can't assign to accessors (currently).
+
+ // Error at the bad dot in the RHS, not the + operator (crbug.com/472038).
+ DoParserErrorTest("foo(a + b.c.d)", 1, 10);
+}
+
+TEST(Parser, Condition) {
+ DoParserPrintTest("if(1) { a = 2 }",
+ "BLOCK\n"
+ " CONDITION\n"
+ " LITERAL(1)\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " LITERAL(2)\n");
+
+ DoParserPrintTest("if(1) { a = 2 } else if (0) { a = 3 } else { a = 4 }",
+ "BLOCK\n"
+ " CONDITION\n"
+ " LITERAL(1)\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " LITERAL(2)\n"
+ " CONDITION\n"
+ " LITERAL(0)\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " LITERAL(3)\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " LITERAL(4)\n");
+}
+
+TEST(Parser, OnlyCallAndAssignInBody) {
+ DoParserErrorTest("[]", 1, 2);
+ DoParserErrorTest("3 + 4", 1, 5);
+ DoParserErrorTest("6 - 7", 1, 5);
+ DoParserErrorTest("if (1) { 5 } else { print(4) }", 1, 12);
+}
+
+TEST(Parser, NoAssignmentInCondition) {
+ DoParserErrorTest("if (a=2) {}", 1, 5);
+}
+
+TEST(Parser, CompleteFunction) {
+ const char* input =
+ "cc_test(\"foo\") {\n"
+ " sources = [\n"
+ " \"foo.cc\",\n"
+ " \"foo.h\"\n"
+ " ]\n"
+ " dependencies = [\n"
+ " \"base\"\n"
+ " ]\n"
+ "}\n";
+ const char* expected =
+ "BLOCK\n"
+ " FUNCTION(cc_test)\n"
+ " LIST\n"
+ " LITERAL(\"foo\")\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(sources)\n"
+ " LIST\n"
+ " LITERAL(\"foo.cc\")\n"
+ " LITERAL(\"foo.h\")\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(dependencies)\n"
+ " LIST\n"
+ " LITERAL(\"base\")\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, FunctionWithConditional) {
+ const char* input =
+ "cc_test(\"foo\") {\n"
+ " sources = [\"foo.cc\"]\n"
+ " if (OS == \"mac\") {\n"
+ " sources += \"bar.cc\"\n"
+ " } else if (OS == \"win\") {\n"
+ " sources -= [\"asd.cc\", \"foo.cc\"]\n"
+ " } else {\n"
+ " dependencies += [\"bar.cc\"]\n"
+ " }\n"
+ "}\n";
+ const char* expected =
+ "BLOCK\n"
+ " FUNCTION(cc_test)\n"
+ " LIST\n"
+ " LITERAL(\"foo\")\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(sources)\n"
+ " LIST\n"
+ " LITERAL(\"foo.cc\")\n"
+ " CONDITION\n"
+ " BINARY(==)\n"
+ " IDENTIFIER(OS)\n"
+ " LITERAL(\"mac\")\n"
+ " BLOCK\n"
+ " BINARY(+=)\n"
+ " IDENTIFIER(sources)\n"
+ " LITERAL(\"bar.cc\")\n"
+ " CONDITION\n"
+ " BINARY(==)\n"
+ " IDENTIFIER(OS)\n"
+ " LITERAL(\"win\")\n"
+ " BLOCK\n"
+ " BINARY(-=)\n"
+ " IDENTIFIER(sources)\n"
+ " LIST\n"
+ " LITERAL(\"asd.cc\")\n"
+ " LITERAL(\"foo.cc\")\n"
+ " BLOCK\n"
+ " BINARY(+=)\n"
+ " IDENTIFIER(dependencies)\n"
+ " LIST\n"
+ " LITERAL(\"bar.cc\")\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, UnterminatedBlock) {
+ DoParserErrorTest("stuff() {", 1, 9);
+}
+
+TEST(Parser, BadlyTerminatedNumber) {
+ DoParserErrorTest("1234z", 1, 5);
+}
+
+TEST(Parser, NewlinesInUnusualPlaces) {
+ DoParserPrintTest(
+ "if\n"
+ "(\n"
+ "a\n"
+ ")\n"
+ "{\n"
+ "}\n",
+ "BLOCK\n"
+ " CONDITION\n"
+ " IDENTIFIER(a)\n"
+ " BLOCK\n");
+}
+
+TEST(Parser, NewlinesInUnusualPlaces2) {
+ DoParserPrintTest(
+ "a\n=\n2\n",
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " LITERAL(2)\n");
+ DoParserPrintTest(
+ "x =\ny if\n(1\n) {}",
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(x)\n"
+ " IDENTIFIER(y)\n"
+ " CONDITION\n"
+ " LITERAL(1)\n"
+ " BLOCK\n");
+ DoParserPrintTest(
+ "x = 3\n+2",
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(x)\n"
+ " BINARY(+)\n"
+ " LITERAL(3)\n"
+ " LITERAL(2)\n"
+ );
+}
+
+TEST(Parser, NewlineBeforeSubscript) {
+ const char* input = "a = b[1]";
+ const char* input_with_newline = "a = b\n[1]";
+ const char* expected =
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " ACCESSOR\n"
+ " b\n"
+ " LITERAL(1)\n";
+ DoParserPrintTest(
+ input,
+ expected);
+ DoParserPrintTest(
+ input_with_newline,
+ expected);
+}
+
+TEST(Parser, SequenceOfExpressions) {
+ DoParserPrintTest(
+ "a = 1 b = 2",
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " LITERAL(1)\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(b)\n"
+ " LITERAL(2)\n");
+}
+
+TEST(Parser, BlockAfterFunction) {
+ const char* input = "func(\"stuff\") {\n}";
+ // TODO(scottmg): Do we really want these to mean different things?
+ const char* input_with_newline = "func(\"stuff\")\n{\n}";
+ const char* expected =
+ "BLOCK\n"
+ " FUNCTION(func)\n"
+ " LIST\n"
+ " LITERAL(\"stuff\")\n"
+ " BLOCK\n";
+ DoParserPrintTest(input, expected);
+ DoParserPrintTest(input_with_newline, expected);
+}
+
+TEST(Parser, LongExpression) {
+ const char* input = "a = b + c && d || e";
+ const char* expected =
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(a)\n"
+ " BINARY(||)\n"
+ " BINARY(&&)\n"
+ " BINARY(+)\n"
+ " IDENTIFIER(b)\n"
+ " IDENTIFIER(c)\n"
+ " IDENTIFIER(d)\n"
+ " IDENTIFIER(e)\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, CommentsStandalone) {
+ const char* input =
+ "# Toplevel comment.\n"
+ "\n"
+ "executable(\"wee\") {}\n";
+ const char* expected =
+ "BLOCK\n"
+ " BLOCK_COMMENT(# Toplevel comment.)\n"
+ " FUNCTION(executable)\n"
+ " LIST\n"
+ " LITERAL(\"wee\")\n"
+ " BLOCK\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, CommentsStandaloneEof) {
+ const char* input =
+ "executable(\"wee\") {}\n"
+ "# EOF comment.\n";
+ const char* expected =
+ "BLOCK\n"
+ " +AFTER_COMMENT(\"# EOF comment.\")\n"
+ " FUNCTION(executable)\n"
+ " LIST\n"
+ " LITERAL(\"wee\")\n"
+ " BLOCK\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, CommentsLineAttached) {
+ const char* input =
+ "executable(\"wee\") {\n"
+ " # Some sources.\n"
+ " sources = [\n"
+ " \"stuff.cc\",\n"
+ " \"things.cc\",\n"
+ " # This file is special or something.\n"
+ " \"another.cc\",\n"
+ " ]\n"
+ "}\n";
+ const char* expected =
+ "BLOCK\n"
+ " FUNCTION(executable)\n"
+ " LIST\n"
+ " LITERAL(\"wee\")\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " +BEFORE_COMMENT(\"# Some sources.\")\n"
+ " IDENTIFIER(sources)\n"
+ " LIST\n"
+ " LITERAL(\"stuff.cc\")\n"
+ " LITERAL(\"things.cc\")\n"
+ " LITERAL(\"another.cc\")\n"
+ " +BEFORE_COMMENT(\"# This file is special or something.\")\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, CommentsSuffix) {
+ const char* input =
+ "executable(\"wee\") { # This is some stuff.\n"
+ "sources = [ \"a.cc\" # And another comment here.\n"
+ "] }";
+ const char* expected =
+ "BLOCK\n"
+ " FUNCTION(executable)\n"
+ " LIST\n"
+ " LITERAL(\"wee\")\n"
+ " END())\n"
+ " +SUFFIX_COMMENT(\"# This is some stuff.\")\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(sources)\n"
+ " LIST\n"
+ " LITERAL(\"a.cc\")\n"
+ " +SUFFIX_COMMENT(\"# And another comment here.\")\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, CommentsSuffixDifferentLine) {
+ const char* input =
+ "executable(\"wee\") {\n"
+ " sources = [ \"a\",\n"
+ " \"b\" ] # Comment\n"
+ "}\n";
+ const char* expected =
+ "BLOCK\n"
+ " FUNCTION(executable)\n"
+ " LIST\n"
+ " LITERAL(\"wee\")\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(sources)\n"
+ " LIST\n"
+ " LITERAL(\"a\")\n"
+ " LITERAL(\"b\")\n"
+ " END(])\n"
+ " +SUFFIX_COMMENT(\"# Comment\")\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, CommentsSuffixMultiple) {
+ const char* input =
+ "executable(\"wee\") {\n"
+ " sources = [\n"
+ " \"a\", # This is a comment,\n"
+ " # and some more,\n" // Note that this is aligned with above.
+ " # then the end.\n"
+ " ]\n"
+ "}\n";
+ const char* expected =
+ "BLOCK\n"
+ " FUNCTION(executable)\n"
+ " LIST\n"
+ " LITERAL(\"wee\")\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(sources)\n"
+ " LIST\n"
+ " LITERAL(\"a\")\n"
+ " +SUFFIX_COMMENT(\"# This is a comment,\")\n"
+ " +SUFFIX_COMMENT(\"# and some more,\")\n"
+ " +SUFFIX_COMMENT(\"# then the end.\")\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, CommentsConnectedInList) {
+ const char* input =
+ "defines = [\n"
+ "\n"
+ " # Connected comment.\n"
+ " \"WEE\",\n"
+ " \"BLORPY\",\n"
+ "]\n";
+ const char* expected =
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(defines)\n"
+ " LIST\n"
+ " LITERAL(\"WEE\")\n"
+ " +BEFORE_COMMENT(\"# Connected comment.\")\n"
+ " LITERAL(\"BLORPY\")\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, CommentsAtEndOfBlock) {
+ const char* input =
+ "if (is_win) {\n"
+ " sources = [\"a.cc\"]\n"
+ " # Some comment at end.\n"
+ "}\n";
+ const char* expected =
+ "BLOCK\n"
+ " CONDITION\n"
+ " IDENTIFIER(is_win)\n"
+ " BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(sources)\n"
+ " LIST\n"
+ " LITERAL(\"a.cc\")\n"
+ " END(})\n"
+ " +BEFORE_COMMENT(\"# Some comment at end.\")\n";
+ DoParserPrintTest(input, expected);
+}
+
+// TODO(scottmg): I could be convinced this is incorrect. It's not clear to me
+// which thing this comment is intended to be attached to.
+TEST(Parser, CommentsEndOfBlockSingleLine) {
+ const char* input =
+ "defines = [ # EOL defines.\n"
+ "]\n";
+ const char* expected =
+ "BLOCK\n"
+ " BINARY(=)\n"
+ " IDENTIFIER(defines)\n"
+ " +SUFFIX_COMMENT(\"# EOL defines.\")\n"
+ " LIST\n";
+ DoParserPrintTest(input, expected);
+}
+
+TEST(Parser, HangingIf) {
+ DoParserErrorTest("if", 1, 1);
+}
+
+TEST(Parser, NegatingList) {
+ DoParserErrorTest("executable(\"wee\") { sources =- [ \"foo.cc\" ] }", 1, 30);
+}
+
+TEST(Parser, ConditionNoBracesIf) {
+ DoParserErrorTest(
+ "if (true)\n"
+ " foreach(foo, []) {}\n"
+ "else {\n"
+ " foreach(bar, []) {}\n"
+ "}\n",
+ 2, 3);
+}
+
+TEST(Parser, ConditionNoBracesElse) {
+ DoParserErrorTest(
+ "if (true) {\n"
+ " foreach(foo, []) {}\n"
+ "} else\n"
+ " foreach(bar, []) {}\n",
+ 4, 3);
+}
+
+TEST(Parser, ConditionNoBracesElseIf) {
+ DoParserErrorTest(
+ "if (true) {\n"
+ " foreach(foo, []) {}\n"
+ "} else if (true)\n"
+ " foreach(bar, []) {}\n",
+ 4, 3);
+}
+
+// Disallow standalone {} for introducing new scopes. These are ambiguous with
+// target declarations (e.g. is:
+// foo("bar") {}
+// a function with an associated block, or a standalone function with a
+// freestanding block.
+TEST(Parser, StandaloneBlock) {
+ DoParserErrorTest(
+ "if (true) {\n"
+ "}\n"
+ "{\n"
+ " assert(false)\n"
+ "}\n",
+ 3, 1);
+}
diff --git a/chromium/tools/gn/path_output.cc b/chromium/tools/gn/path_output.cc
new file mode 100644
index 00000000000..711341fcd19
--- /dev/null
+++ b/chromium/tools/gn/path_output.cc
@@ -0,0 +1,172 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/path_output.h"
+
+#include "base/strings/string_util.h"
+#include "build/build_config.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/string_utils.h"
+
+PathOutput::PathOutput(const SourceDir& current_dir,
+ const base::StringPiece& source_root,
+ EscapingMode escaping)
+ : current_dir_(current_dir) {
+ inverse_current_dir_ = RebasePath("//", current_dir, source_root);
+ if (!EndsWithSlash(inverse_current_dir_))
+ inverse_current_dir_.push_back('/');
+ options_.mode = escaping;
+}
+
+PathOutput::~PathOutput() {
+}
+
+void PathOutput::WriteFile(std::ostream& out, const SourceFile& file) const {
+ WritePathStr(out, file.value());
+}
+
+void PathOutput::WriteDir(std::ostream& out,
+ const SourceDir& dir,
+ DirSlashEnding slash_ending) const {
+ if (dir.value() == "/") {
+ // Writing system root is always a slash (this will normally only come up
+ // on Posix systems).
+ if (slash_ending == DIR_NO_LAST_SLASH)
+ out << "/.";
+ else
+ out << "/";
+ } else if (dir.value() == "//") {
+ // Writing out the source root.
+ if (slash_ending == DIR_NO_LAST_SLASH) {
+ // The inverse_current_dir_ will contain a [back]slash at the end, so we
+ // can't just write it out.
+ if (inverse_current_dir_.empty()) {
+ out << ".";
+ } else {
+ out.write(inverse_current_dir_.c_str(),
+ inverse_current_dir_.size() - 1);
+ }
+ } else {
+ if (inverse_current_dir_.empty())
+ out << "./";
+ else
+ out << inverse_current_dir_;
+ }
+ } else if (dir == current_dir_) {
+ // Writing the same directory. This needs special handling here since
+ // we need to output something else other than the input.
+ if (slash_ending == DIR_INCLUDE_LAST_SLASH)
+ out << "./";
+ else
+ out << ".";
+ } else if (slash_ending == DIR_INCLUDE_LAST_SLASH) {
+ WritePathStr(out, dir.value());
+ } else {
+ // DIR_NO_LAST_SLASH mode, just trim the last char.
+ WritePathStr(out, base::StringPiece(dir.value().data(),
+ dir.value().size() - 1));
+ }
+}
+
+void PathOutput::WriteFile(std::ostream& out, const OutputFile& file) const {
+ // Here we assume that the path is already preprocessed.
+ EscapeStringToStream(out, file.value(), options_);
+}
+
+void PathOutput::WriteFiles(std::ostream& out,
+ const std::vector<OutputFile>& files) const {
+ for (const auto& file : files) {
+ out << " ";
+ WriteFile(out, file);
+ }
+}
+
+void PathOutput::WriteFiles(std::ostream& out,
+ const UniqueVector<OutputFile>& files) const {
+ for (const auto& file : files) {
+ out << " ";
+ WriteFile(out, file);
+ }
+}
+
+void PathOutput::WriteDir(std::ostream& out,
+ const OutputFile& file,
+ DirSlashEnding slash_ending) const {
+ DCHECK(file.value().empty() ||
+ file.value()[file.value().size() - 1] == '/');
+
+ switch (slash_ending) {
+ case DIR_INCLUDE_LAST_SLASH:
+ EscapeStringToStream(out, file.value(), options_);
+ break;
+ case DIR_NO_LAST_SLASH:
+ if (!file.value().empty() &&
+ file.value()[file.value().size() - 1] == '/') {
+ // Trim trailing slash.
+ EscapeStringToStream(
+ out,
+ base::StringPiece(file.value().data(), file.value().size() - 1),
+ options_);
+ } else {
+ // Doesn't end with a slash, write the whole thing.
+ EscapeStringToStream(out, file.value(), options_);
+ }
+ break;
+ }
+}
+
+void PathOutput::WriteFile(std::ostream& out,
+ const base::FilePath& file) const {
+ // Assume native file paths are always absolute.
+ EscapeStringToStream(out, FilePathToUTF8(file), options_);
+}
+
+void PathOutput::WriteSourceRelativeString(
+ std::ostream& out,
+ const base::StringPiece& str) const {
+ if (options_.mode == ESCAPE_NINJA_COMMAND) {
+ // Shell escaping needs an intermediate string since it may end up
+ // quoting the whole thing.
+ std::string intermediate;
+ intermediate.reserve(inverse_current_dir_.size() + str.size());
+ intermediate.assign(inverse_current_dir_.c_str(),
+ inverse_current_dir_.size());
+ intermediate.append(str.data(), str.size());
+
+ EscapeStringToStream(out,
+ base::StringPiece(intermediate.c_str(), intermediate.size()),
+ options_);
+ } else {
+ // Ninja (and none) escaping can avoid the intermediate string and
+ // reprocessing of the inverse_current_dir_.
+ out << inverse_current_dir_;
+ EscapeStringToStream(out, str, options_);
+ }
+}
+
+void PathOutput::WritePathStr(std::ostream& out,
+ const base::StringPiece& str) const {
+ DCHECK(str.size() > 0 && str[0] == '/');
+
+ if (str.substr(0, current_dir_.value().size()) ==
+ base::StringPiece(current_dir_.value())) {
+ // The current dir is a prefix of the output file, so we can strip the
+ // prefix and write out the result.
+ EscapeStringToStream(out, str.substr(current_dir_.value().size()),
+ options_);
+ } else if (str.size() >= 2 && str[1] == '/') {
+ WriteSourceRelativeString(out, str.substr(2));
+ } else {
+ // Input begins with one slash, don't write the current directory since
+ // it's system-absolute.
+#if defined(OS_WIN)
+ // On Windows, trim the leading slash, since the input for absolute
+ // paths will look like "/C:/foo/bar.txt".
+ EscapeStringToStream(out, str.substr(1), options_);
+#else
+ EscapeStringToStream(out, str, options_);
+#endif
+ }
+}
diff --git a/chromium/tools/gn/path_output.h b/chromium/tools/gn/path_output.h
new file mode 100644
index 00000000000..ca72948ca5b
--- /dev/null
+++ b/chromium/tools/gn/path_output.h
@@ -0,0 +1,91 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_PATH_OUTPUT_H_
+#define TOOLS_GN_PATH_OUTPUT_H_
+
+#include <iosfwd>
+#include <string>
+
+#include "base/macros.h"
+#include "base/strings/string_piece.h"
+#include "tools/gn/escape.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/unique_vector.h"
+
+class OutputFile;
+class SourceFile;
+
+namespace base {
+class FilePath;
+}
+
+// Writes file names to streams assuming a certain input directory and
+// escaping rules. This gives us a central place for managing this state.
+class PathOutput {
+ public:
+ // Controls whether writing directory names include the trailing slash.
+ // Often we don't want the trailing slash when writing out to a command line,
+ // especially on Windows where it's a backslash and might be interpreted as
+ // escaping the thing following it.
+ enum DirSlashEnding {
+ DIR_INCLUDE_LAST_SLASH,
+ DIR_NO_LAST_SLASH,
+ };
+
+ PathOutput(const SourceDir& current_dir, const base::StringPiece& source_root,
+ EscapingMode escaping);
+ ~PathOutput();
+
+ // Read-only since inverse_current_dir_ is computed depending on this.
+ EscapingMode escaping_mode() const { return options_.mode; }
+
+ const SourceDir& current_dir() const { return current_dir_; }
+
+ // Getter/setters for flags inside the escape options.
+ bool inhibit_quoting() const { return options_.inhibit_quoting; }
+ void set_inhibit_quoting(bool iq) { options_.inhibit_quoting = iq; }
+ void set_escape_platform(EscapingPlatform p) { options_.platform = p; }
+
+ void WriteFile(std::ostream& out, const SourceFile& file) const;
+ void WriteFile(std::ostream& out, const OutputFile& file) const;
+ void WriteFile(std::ostream& out, const base::FilePath& file) const;
+
+ // Writes the given OutputFiles with spaces separating them. This will also
+ // write an initial space before the first item.
+ void WriteFiles(std::ostream& out,
+ const std::vector<OutputFile>& files) const;
+ void WriteFiles(std::ostream& out,
+ const UniqueVector<OutputFile>& files) const;
+
+ // This variant assumes the dir ends in a trailing slash or is empty.
+ void WriteDir(std::ostream& out,
+ const SourceDir& dir,
+ DirSlashEnding slash_ending) const;
+
+ void WriteDir(std::ostream& out,
+ const OutputFile& file,
+ DirSlashEnding slash_ending) const;
+
+ // Backend for WriteFile and WriteDir. This appends the given file or
+ // directory string to the file.
+ void WritePathStr(std::ostream& out, const base::StringPiece& str) const;
+
+ private:
+ // Takes the given string and writes it out, appending to the inverse
+ // current dir. This assumes leading slashes have been trimmed.
+ void WriteSourceRelativeString(std::ostream& out,
+ const base::StringPiece& str) const;
+
+ SourceDir current_dir_;
+
+ // Uses system slashes if convert_slashes_to_system_.
+ std::string inverse_current_dir_;
+
+ // Since the inverse_current_dir_ depends on some of these, we don't expose
+ // this directly to modification.
+ EscapeOptions options_;
+};
+
+#endif // TOOLS_GN_PATH_OUTPUT_H_
diff --git a/chromium/tools/gn/path_output_unittest.cc b/chromium/tools/gn/path_output_unittest.cc
new file mode 100644
index 00000000000..6a7da19b94e
--- /dev/null
+++ b/chromium/tools/gn/path_output_unittest.cc
@@ -0,0 +1,284 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <sstream>
+
+#include "base/files/file_path.h"
+#include "build/build_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/path_output.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/source_file.h"
+
+TEST(PathOutput, Basic) {
+ SourceDir build_dir("//out/Debug/");
+ base::StringPiece source_root("/source/root");
+ PathOutput writer(build_dir, source_root, ESCAPE_NONE);
+ {
+ // Normal source-root path.
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo/bar.cc"));
+ EXPECT_EQ("../../foo/bar.cc", out.str());
+ }
+ {
+ // File in the root dir.
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo.cc"));
+ EXPECT_EQ("../../foo.cc", out.str());
+ }
+ {
+ // Files in the output dir.
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//out/Debug/foo.cc"));
+ out << " ";
+ writer.WriteFile(out, SourceFile("//out/Debug/bar/baz.cc"));
+ EXPECT_EQ("foo.cc bar/baz.cc", out.str());
+ }
+#if defined(OS_WIN)
+ {
+ // System-absolute path.
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("/C:/foo/bar.cc"));
+ EXPECT_EQ("C:/foo/bar.cc", out.str());
+ }
+#else
+ {
+ // System-absolute path.
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("/foo/bar.cc"));
+ EXPECT_EQ("/foo/bar.cc", out.str());
+ }
+#endif
+}
+
+// Same as basic but the output dir is the root.
+TEST(PathOutput, BasicInRoot) {
+ SourceDir build_dir("//");
+ base::StringPiece source_root("/source/root");
+ PathOutput writer(build_dir, source_root, ESCAPE_NONE);
+ {
+ // Normal source-root path.
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo/bar.cc"));
+ EXPECT_EQ("foo/bar.cc", out.str());
+ }
+ {
+ // File in the root dir.
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo.cc"));
+ EXPECT_EQ("foo.cc", out.str());
+ }
+}
+
+TEST(PathOutput, NinjaEscaping) {
+ SourceDir build_dir("//out/Debug/");
+ base::StringPiece source_root("/source/root");
+ PathOutput writer(build_dir, source_root, ESCAPE_NINJA);
+ {
+ // Spaces and $ in filenames.
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo/foo bar$.cc"));
+ EXPECT_EQ("../../foo/foo$ bar$$.cc", out.str());
+ }
+ {
+ // Not other weird stuff
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo/\"foo\".cc"));
+ EXPECT_EQ("../../foo/\"foo\".cc", out.str());
+ }
+}
+
+TEST(PathOutput, NinjaForkEscaping) {
+ SourceDir build_dir("//out/Debug/");
+ base::StringPiece source_root("/source/root");
+ PathOutput writer(build_dir, source_root, ESCAPE_NINJA_COMMAND);
+
+ // Spaces in filenames should get quoted on Windows.
+ writer.set_escape_platform(ESCAPE_PLATFORM_WIN);
+ {
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo/foo bar.cc"));
+ EXPECT_EQ("\"../../foo/foo$ bar.cc\"", out.str());
+ }
+
+ // Spaces in filenames should get escaped on Posix.
+ writer.set_escape_platform(ESCAPE_PLATFORM_POSIX);
+ {
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo/foo bar.cc"));
+ EXPECT_EQ("../../foo/foo\\$ bar.cc", out.str());
+ }
+
+ // Quotes should get blackslash-escaped on Windows and Posix.
+ writer.set_escape_platform(ESCAPE_PLATFORM_WIN);
+ {
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo/\"foobar\".cc"));
+ // Our Windows code currently quotes the whole thing in this case for
+ // code simplicity, even though it's strictly unnecessary. This might
+ // change in the future.
+ EXPECT_EQ("\"../../foo/\\\"foobar\\\".cc\"", out.str());
+ }
+ writer.set_escape_platform(ESCAPE_PLATFORM_POSIX);
+ {
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo/\"foobar\".cc"));
+ EXPECT_EQ("../../foo/\\\"foobar\\\".cc", out.str());
+ }
+
+ // Backslashes should get escaped on non-Windows and preserved on Windows.
+ writer.set_escape_platform(ESCAPE_PLATFORM_WIN);
+ {
+ std::ostringstream out;
+ writer.WriteFile(out, OutputFile("foo\\bar.cc"));
+ EXPECT_EQ("foo\\bar.cc", out.str());
+ }
+ writer.set_escape_platform(ESCAPE_PLATFORM_POSIX);
+ {
+ std::ostringstream out;
+ writer.WriteFile(out, OutputFile("foo\\bar.cc"));
+ EXPECT_EQ("foo\\\\bar.cc", out.str());
+ }
+}
+
+TEST(PathOutput, InhibitQuoting) {
+ SourceDir build_dir("//out/Debug/");
+ base::StringPiece source_root("/source/root");
+ PathOutput writer(build_dir, source_root, ESCAPE_NINJA_COMMAND);
+ writer.set_inhibit_quoting(true);
+
+ writer.set_escape_platform(ESCAPE_PLATFORM_WIN);
+ {
+ // We should get unescaped spaces in the output with no quotes.
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo/foo bar.cc"));
+ EXPECT_EQ("../../foo/foo$ bar.cc", out.str());
+ }
+
+ writer.set_escape_platform(ESCAPE_PLATFORM_POSIX);
+ {
+ // Escapes the space.
+ std::ostringstream out;
+ writer.WriteFile(out, SourceFile("//foo/foo bar.cc"));
+ EXPECT_EQ("../../foo/foo\\$ bar.cc", out.str());
+ }
+}
+
+TEST(PathOutput, WriteDir) {
+ {
+ SourceDir build_dir("//out/Debug/");
+ base::StringPiece source_root("/source/root");
+ PathOutput writer(build_dir, source_root, ESCAPE_NINJA);
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("//foo/bar/"),
+ PathOutput::DIR_INCLUDE_LAST_SLASH);
+ EXPECT_EQ("../../foo/bar/", out.str());
+ }
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("//foo/bar/"),
+ PathOutput::DIR_NO_LAST_SLASH);
+ EXPECT_EQ("../../foo/bar", out.str());
+ }
+
+ // Output source root dir.
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("//"),
+ PathOutput::DIR_INCLUDE_LAST_SLASH);
+ EXPECT_EQ("../../", out.str());
+ }
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("//"),
+ PathOutput::DIR_NO_LAST_SLASH);
+ EXPECT_EQ("../..", out.str());
+ }
+
+ // Output system root dir.
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("/"),
+ PathOutput::DIR_INCLUDE_LAST_SLASH);
+ EXPECT_EQ("/", out.str());
+ }
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("/"),
+ PathOutput::DIR_INCLUDE_LAST_SLASH);
+ EXPECT_EQ("/", out.str());
+ }
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("/"),
+ PathOutput::DIR_NO_LAST_SLASH);
+ EXPECT_EQ("/.", out.str());
+ }
+
+ // Output inside current dir.
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("//out/Debug/"),
+ PathOutput::DIR_INCLUDE_LAST_SLASH);
+ EXPECT_EQ("./", out.str());
+ }
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("//out/Debug/"),
+ PathOutput::DIR_NO_LAST_SLASH);
+ EXPECT_EQ(".", out.str());
+ }
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("//out/Debug/foo/"),
+ PathOutput::DIR_INCLUDE_LAST_SLASH);
+ EXPECT_EQ("foo/", out.str());
+ }
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, SourceDir("//out/Debug/foo/"),
+ PathOutput::DIR_NO_LAST_SLASH);
+ EXPECT_EQ("foo", out.str());
+ }
+
+ // WriteDir using an OutputFile.
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, OutputFile("foo/"),
+ PathOutput::DIR_INCLUDE_LAST_SLASH);
+ EXPECT_EQ("foo/", out.str());
+ }
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, OutputFile("foo/"),
+ PathOutput::DIR_NO_LAST_SLASH);
+ EXPECT_EQ("foo", out.str());
+ }
+ {
+ std::ostringstream out;
+ writer.WriteDir(out, OutputFile(),
+ PathOutput::DIR_INCLUDE_LAST_SLASH);
+ EXPECT_EQ("", out.str());
+ }
+ }
+ {
+ // Empty build dir writer.
+ base::StringPiece source_root("/source/root");
+ PathOutput root_writer(SourceDir("//"), source_root, ESCAPE_NINJA);
+ {
+ std::ostringstream out;
+ root_writer.WriteDir(out, SourceDir("//"),
+ PathOutput::DIR_INCLUDE_LAST_SLASH);
+ EXPECT_EQ("./", out.str());
+ }
+ {
+ std::ostringstream out;
+ root_writer.WriteDir(out, SourceDir("//"),
+ PathOutput::DIR_NO_LAST_SLASH);
+ EXPECT_EQ(".", out.str());
+ }
+ }
+}
diff --git a/chromium/tools/gn/pattern.cc b/chromium/tools/gn/pattern.cc
new file mode 100644
index 00000000000..3de96a3f55d
--- /dev/null
+++ b/chromium/tools/gn/pattern.cc
@@ -0,0 +1,193 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/pattern.h"
+
+#include "tools/gn/value.h"
+
+namespace {
+
+void ParsePattern(const std::string& s, std::vector<Pattern::Subrange>* out) {
+ // Set when the last subrange is a literal so we can just append when we
+ // find another literal.
+ Pattern::Subrange* last_literal = nullptr;
+
+ for (size_t i = 0; i < s.size(); i++) {
+ if (s[i] == '*') {
+ // Don't allow two **.
+ if (out->size() == 0 ||
+ (*out)[out->size() - 1].type != Pattern::Subrange::ANYTHING)
+ out->push_back(Pattern::Subrange(Pattern::Subrange::ANYTHING));
+ last_literal = nullptr;
+ } else if (s[i] == '\\') {
+ if (i < s.size() - 1 && s[i + 1] == 'b') {
+ // "\b" means path boundary.
+ i++;
+ out->push_back(Pattern::Subrange(Pattern::Subrange::PATH_BOUNDARY));
+ last_literal = nullptr;
+ } else {
+ // Backslash + anything else means that literal char.
+ if (!last_literal) {
+ out->push_back(Pattern::Subrange(Pattern::Subrange::LITERAL));
+ last_literal = &(*out)[out->size() - 1];
+ }
+ if (i < s.size() - 1) {
+ i++;
+ last_literal->literal.push_back(s[i]);
+ } else {
+ // Single backslash at end, use literal backslash.
+ last_literal->literal.push_back('\\');
+ }
+ }
+ } else {
+ if (!last_literal) {
+ out->push_back(Pattern::Subrange(Pattern::Subrange::LITERAL));
+ last_literal = &(*out)[out->size() - 1];
+ }
+ last_literal->literal.push_back(s[i]);
+ }
+ }
+}
+
+} // namespace
+
+Pattern::Pattern(const std::string& s) {
+ ParsePattern(s, &subranges_);
+ is_suffix_ =
+ (subranges_.size() == 2 &&
+ subranges_[0].type == Subrange::ANYTHING &&
+ subranges_[1].type == Subrange::LITERAL);
+}
+
+Pattern::Pattern(const Pattern& other) = default;
+
+Pattern::~Pattern() {
+}
+
+bool Pattern::MatchesString(const std::string& s) const {
+ // Empty pattern matches only empty string.
+ if (subranges_.empty())
+ return s.empty();
+
+ if (is_suffix_) {
+ const std::string& suffix = subranges_[1].literal;
+ if (suffix.size() > s.size())
+ return false; // Too short.
+ return s.compare(s.size() - suffix.size(), suffix.size(), suffix) == 0;
+ }
+
+ return RecursiveMatch(s, 0, 0, true);
+}
+
+// We assume the number of ranges is small so recursive is always reasonable.
+// Could be optimized to only be recursive for *.
+bool Pattern::RecursiveMatch(const std::string& s,
+ size_t begin_char,
+ size_t subrange_index,
+ bool allow_implicit_path_boundary) const {
+ if (subrange_index >= subranges_.size()) {
+ // Hit the end of our subranges, the text should also be at the end for a
+ // match.
+ return begin_char == s.size();
+ }
+
+ const Subrange& sr = subranges_[subrange_index];
+ switch (sr.type) {
+ case Subrange::LITERAL: {
+ if (s.size() - begin_char < sr.literal.size())
+ return false; // Not enough room.
+ if (s.compare(begin_char, sr.literal.size(), sr.literal) != 0)
+ return false; // Literal doesn't match.
+
+ // Recursively check the next one.
+ return RecursiveMatch(s, begin_char + sr.literal.size(),
+ subrange_index + 1, true);
+ }
+
+ case Subrange::PATH_BOUNDARY: {
+ // When we can accept an implicit path boundary, we have to check both
+ // a match of the literal and the implicit one.
+ if (allow_implicit_path_boundary &&
+ (begin_char == 0 || begin_char == s.size())) {
+ // At implicit path boundary, see if the rest of the pattern matches.
+ if (RecursiveMatch(s, begin_char, subrange_index + 1, false))
+ return true;
+ }
+
+ // Check for a literal "/".
+ if (begin_char < s.size() && s[begin_char] == '/') {
+ // At explicit boundary, see if the rest of the pattern matches.
+ if (RecursiveMatch(s, begin_char + 1, subrange_index + 1, true))
+ return true;
+ }
+ return false;
+ }
+
+ case Subrange::ANYTHING: {
+ if (subrange_index == subranges_.size() - 1)
+ return true; // * at the end, consider it matching.
+
+ size_t min_next_size = sr.MinSize();
+
+ // We don't care about exactly what matched as long as there was a match,
+ // so we can do this front-to-back. If we needed the match, we would
+ // normally want "*" to be greedy so would work backwards.
+ for (size_t i = begin_char; i < s.size() - min_next_size; i++) {
+ // Note: this could probably be faster by detecting the type of the
+ // next match in advance and checking for a match in this loop rather
+ // than doing a full recursive call for each character.
+ if (RecursiveMatch(s, i, subrange_index + 1, true))
+ return true;
+ }
+ return false;
+ }
+
+ default:
+ NOTREACHED();
+ }
+
+ return false;
+}
+
+PatternList::PatternList() {
+}
+
+PatternList::PatternList(const PatternList& other) = default;
+
+PatternList::~PatternList() {
+}
+
+void PatternList::Append(const Pattern& pattern) {
+ patterns_.push_back(pattern);
+}
+
+void PatternList::SetFromValue(const Value& v, Err* err) {
+ patterns_.clear();
+
+ if (v.type() != Value::LIST) {
+ *err = Err(v.origin(), "This value must be a list.");
+ return;
+ }
+
+ const std::vector<Value>& list = v.list_value();
+ for (const auto& elem : list) {
+ if (!elem.VerifyTypeIs(Value::STRING, err))
+ return;
+ patterns_.push_back(Pattern(elem.string_value()));
+ }
+}
+
+bool PatternList::MatchesString(const std::string& s) const {
+ for (const auto& pattern : patterns_) {
+ if (pattern.MatchesString(s))
+ return true;
+ }
+ return false;
+}
+
+bool PatternList::MatchesValue(const Value& v) const {
+ if (v.type() == Value::STRING)
+ return MatchesString(v.string_value());
+ return false;
+}
diff --git a/chromium/tools/gn/pattern.h b/chromium/tools/gn/pattern.h
new file mode 100644
index 00000000000..f141f3eaeb1
--- /dev/null
+++ b/chromium/tools/gn/pattern.h
@@ -0,0 +1,92 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_PATTERN_H_
+#define TOOLS_GN_PATTERN_H_
+
+#include <stddef.h>
+
+#include <string>
+#include <vector>
+
+#include "tools/gn/value.h"
+
+class Pattern {
+ public:
+ struct Subrange {
+ enum Type {
+ LITERAL, // Matches exactly the contents of the string.
+ ANYTHING, // * (zero or more chars).
+ PATH_BOUNDARY // '/' or beginning of string.
+ };
+
+ explicit Subrange(Type t, const std::string& l = std::string())
+ : type(t),
+ literal(l) {
+ }
+
+ // Returns the minimum number of chars that this subrange requires.
+ size_t MinSize() const {
+ switch (type) {
+ case LITERAL:
+ return literal.size();
+ case ANYTHING:
+ return 0;
+ case PATH_BOUNDARY:
+ return 0; // Can match beginning or end of string, which is 0 len.
+ default:
+ return 0;
+ }
+ }
+
+ Type type;
+
+ // When type == LITERAL this is the text to match.
+ std::string literal;
+ };
+
+ explicit Pattern(const std::string& s);
+ Pattern(const Pattern& other);
+ ~Pattern();
+
+ // Returns true if the current pattern matches the given string.
+ bool MatchesString(const std::string& s) const;
+
+ private:
+ // allow_implicit_path_boundary determines if a path boundary should accept
+ // matches at the beginning or end of the string.
+ bool RecursiveMatch(const std::string& s,
+ size_t begin_char,
+ size_t subrange_index,
+ bool allow_implicit_path_boundary) const;
+
+ std::vector<Subrange> subranges_;
+
+ // Set to true when the subranges are "*foo" ("ANYTHING" followed by a
+ // literal). This covers most patterns so we optimize for this.
+ bool is_suffix_;
+};
+
+class PatternList {
+ public:
+ PatternList();
+ PatternList(const PatternList& other);
+ ~PatternList();
+
+ bool is_empty() const { return patterns_.empty(); }
+
+ void Append(const Pattern& pattern);
+
+ // Initializes the pattern list from a give list of pattern strings. Sets
+ // |*err| on failure.
+ void SetFromValue(const Value& v, Err* err);
+
+ bool MatchesString(const std::string& s) const;
+ bool MatchesValue(const Value& v) const;
+
+ private:
+ std::vector<Pattern> patterns_;
+};
+
+#endif // TOOLS_GN_PATTERN_H_
diff --git a/chromium/tools/gn/pattern_unittest.cc b/chromium/tools/gn/pattern_unittest.cc
new file mode 100644
index 00000000000..683fe5662c2
--- /dev/null
+++ b/chromium/tools/gn/pattern_unittest.cc
@@ -0,0 +1,64 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include "base/macros.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/pattern.h"
+
+namespace {
+
+struct Case {
+ const char* pattern;
+ const char* candidate;
+ bool expected_match;
+};
+
+} // namespace
+
+TEST(Pattern, Matches) {
+ Case pattern_cases[] = {
+ // Empty pattern matches only empty string.
+ { "", "", true },
+ { "", "foo", false },
+ // Exact matches.
+ { "foo", "foo", true },
+ { "foo", "bar", false },
+ // Path boundaries.
+ { "\\b", "", true },
+ { "\\b", "/", true },
+ { "\\b\\b", "/", true },
+ { "\\b\\b\\b", "", false },
+ { "\\b\\b\\b", "/", true },
+ { "\\b", "//", false },
+ { "\\bfoo\\b", "foo", true },
+ { "\\bfoo\\b", "/foo/", true },
+ { "\\b\\bfoo", "/foo", true },
+ // *
+ { "*", "", true },
+ { "*", "foo", true },
+ { "*foo", "foo", true },
+ { "*foo", "gagafoo", true },
+ { "*foo", "gagafoob", false },
+ { "foo*bar", "foobar", true },
+ { "foo*bar", "foo-bar", true },
+ { "foo*bar", "foolalalalabar", true },
+ { "foo*bar", "foolalalalabaz", false },
+ { "*a*b*c*d*", "abcd", true },
+ { "*a*b*c*d*", "1a2b3c4d5", true },
+ { "*a*b*c*d*", "1a2b3c45", false },
+ { "*\\bfoo\\b*", "foo", true },
+ { "*\\bfoo\\b*", "/foo/", true },
+ { "*\\bfoo\\b*", "foob", false },
+ { "*\\bfoo\\b*", "lala/foo/bar/baz", true },
+ };
+ for (size_t i = 0; i < arraysize(pattern_cases); i++) {
+ const Case& c = pattern_cases[i];
+ Pattern pattern(c.pattern);
+ bool result = pattern.MatchesString(c.candidate);
+ EXPECT_EQ(c.expected_match, result) << i << ": \"" << c.pattern
+ << "\", \"" << c.candidate << "\"";
+ }
+}
diff --git a/chromium/tools/gn/runtime_deps.cc b/chromium/tools/gn/runtime_deps.cc
new file mode 100644
index 00000000000..17b9c075227
--- /dev/null
+++ b/chromium/tools/gn/runtime_deps.cc
@@ -0,0 +1,294 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/runtime_deps.h"
+
+#include <map>
+#include <set>
+#include <sstream>
+
+#include "base/command_line.h"
+#include "base/files/file_util.h"
+#include "base/strings/string_split.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/builder.h"
+#include "tools/gn/deps_iterator.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/loader.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/switches.h"
+#include "tools/gn/target.h"
+#include "tools/gn/trace.h"
+
+namespace {
+
+using RuntimeDepsVector = std::vector<std::pair<OutputFile, const Target*>>;
+
+// Adds the given file to the deps list if it hasn't already been listed in
+// the found_files list. Updates the list.
+void AddIfNew(const OutputFile& output_file,
+ const Target* source,
+ RuntimeDepsVector* deps,
+ std::set<OutputFile>* found_file) {
+ if (found_file->find(output_file) != found_file->end())
+ return; // Already there.
+ deps->push_back(std::make_pair(output_file, source));
+}
+
+// Automatically converts a string that looks like a source to an OutputFile.
+void AddIfNew(const std::string& str,
+ const Target* source,
+ RuntimeDepsVector* deps,
+ std::set<OutputFile>* found_file) {
+ OutputFile output_file(RebasePath(
+ str,
+ source->settings()->build_settings()->build_dir(),
+ source->settings()->build_settings()->root_path_utf8()));
+ AddIfNew(output_file, source, deps, found_file);
+}
+
+// Returns the output file that the runtime deps considers for the given
+// targets. This is weird only for shared libraries.
+const OutputFile& GetMainOutput(const Target* target) {
+ if (target->output_type() == Target::SHARED_LIBRARY)
+ return target->runtime_link_output_file();
+ return target->dependency_output_file();
+}
+
+// To avoid duplicate traversals of targets, or duplicating output files that
+// might be listed by more than one target, the set of targets and output files
+// that have been found so far is passed. The "value" of the seen_targets map
+// is a boolean indicating if the seen dep was a data dep (true = data_dep).
+// data deps add more stuff, so we will want to revisit a target if it's a
+// data dependency and we've previously only seen it as a regular dep.
+void RecursiveCollectRuntimeDeps(const Target* target,
+ bool is_target_data_dep,
+ RuntimeDepsVector* deps,
+ std::map<const Target*, bool>* seen_targets,
+ std::set<OutputFile>* found_files) {
+ const auto& found_seen_target = seen_targets->find(target);
+ if (found_seen_target != seen_targets->end()) {
+ // Already visited.
+ if (found_seen_target->second || !is_target_data_dep) {
+ // Already visited as a data dep, or the current dep is not a data
+ // dep so visiting again will be a no-op.
+ return;
+ }
+ // In the else case, the previously seen target was a regular dependency
+ // and we'll now process it as a data dependency.
+ }
+ (*seen_targets)[target] = is_target_data_dep;
+
+ // Add the main output file for executables, shared libraries, and
+ // loadable modules.
+ if (target->output_type() == Target::EXECUTABLE ||
+ target->output_type() == Target::LOADABLE_MODULE ||
+ target->output_type() == Target::SHARED_LIBRARY)
+ AddIfNew(GetMainOutput(target), target, deps, found_files);
+
+ // Add all data files.
+ for (const auto& file : target->data())
+ AddIfNew(file, target, deps, found_files);
+
+ // Actions/copy have all outputs considered when the're a data dep.
+ if (is_target_data_dep &&
+ (target->output_type() == Target::ACTION ||
+ target->output_type() == Target::ACTION_FOREACH ||
+ target->output_type() == Target::COPY_FILES)) {
+ std::vector<SourceFile> outputs;
+ target->action_values().GetOutputsAsSourceFiles(target, &outputs);
+ for (const auto& output_file : outputs)
+ AddIfNew(output_file.value(), target, deps, found_files);
+ }
+
+ // Non-data dependencies (both public and private).
+ for (const auto& dep_pair : target->GetDeps(Target::DEPS_LINKED)) {
+ if (dep_pair.ptr->output_type() == Target::EXECUTABLE)
+ continue; // Skip executables that aren't data deps.
+ RecursiveCollectRuntimeDeps(dep_pair.ptr, false,
+ deps, seen_targets, found_files);
+ }
+
+ // Data dependencies.
+ for (const auto& dep_pair : target->data_deps()) {
+ RecursiveCollectRuntimeDeps(dep_pair.ptr, true,
+ deps, seen_targets, found_files);
+ }
+}
+
+bool CollectRuntimeDepsFromFlag(const Builder& builder,
+ RuntimeDepsVector* files_to_write,
+ Err* err) {
+ std::string deps_target_list_file =
+ base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(
+ switches::kRuntimeDepsListFile);
+
+ if (deps_target_list_file.empty())
+ return true;
+
+ std::string list_contents;
+ ScopedTrace load_trace(TraceItem::TRACE_FILE_LOAD, deps_target_list_file);
+ if (!base::ReadFileToString(UTF8ToFilePath(deps_target_list_file),
+ &list_contents)) {
+ *err = Err(Location(),
+ std::string("File for --") + switches::kRuntimeDepsListFile +
+ " doesn't exist.",
+ "The file given was \"" + deps_target_list_file + "\"");
+ return false;
+ }
+ load_trace.Done();
+
+ SourceDir root_dir("//");
+ Label default_toolchain_label = builder.loader()->GetDefaultToolchain();
+ for (const auto& line :
+ base::SplitString(list_contents, "\n", base::TRIM_WHITESPACE,
+ base::SPLIT_WANT_ALL)) {
+ if (line.empty())
+ continue;
+ Label label = Label::Resolve(root_dir, default_toolchain_label,
+ Value(nullptr, line), err);
+ if (err->has_error())
+ return false;
+
+ const Item* item = builder.GetItem(label);
+ const Target* target = item ? item->AsTarget() : nullptr;
+ if (!target) {
+ *err = Err(Location(), "The label \"" + label.GetUserVisibleName(true) +
+ "\" isn't a target.",
+ "When reading the line:\n " + line + "\n"
+ "from the --" + switches::kRuntimeDepsListFile + "=" +
+ deps_target_list_file);
+ return false;
+ }
+
+ OutputFile output_file =
+ OutputFile(GetMainOutput(target).value() + ".runtime_deps");
+ files_to_write->push_back(std::make_pair(output_file, target));
+ }
+ return true;
+}
+
+bool WriteRuntimeDepsFile(const OutputFile& output_file,
+ const Target* target,
+ Err* err) {
+ SourceFile output_as_source =
+ output_file.AsSourceFile(target->settings()->build_settings());
+ base::FilePath data_deps_file =
+ target->settings()->build_settings()->GetFullPath(output_as_source);
+
+ std::stringstream contents;
+ for (const auto& pair : ComputeRuntimeDeps(target))
+ contents << pair.first.value() << std::endl;
+
+ ScopedTrace trace(TraceItem::TRACE_FILE_WRITE, output_as_source.value());
+ return WriteFileIfChanged(data_deps_file, contents.str(), err);
+}
+
+} // namespace
+
+const char kRuntimeDeps_Help[] =
+ "Runtime dependencies\n"
+ "\n"
+ " Runtime dependencies of a target are exposed via the \"runtime_deps\"\n"
+ " category of \"gn desc\" (see \"gn help desc\") or they can be written\n"
+ " at build generation time via write_runtime_deps(), or\n"
+ " --runtime-deps-list-file (see \"gn help --runtime-deps-list-file\").\n"
+ "\n"
+ " To a first approximation, the runtime dependencies of a target are\n"
+ " the set of \"data\" files, data directories, and the shared libraries\n"
+ " from all transitive dependencies. Executables, shared libraries, and\n"
+ " loadable modules are considered runtime dependencies of themselves.\n"
+ "\n"
+ "Executables\n"
+ "\n"
+ " Executable targets and those executable targets' transitive\n"
+ " dependencies are not considered unless that executable is listed in\n"
+ " \"data_deps\". Otherwise, GN assumes that the executable (and\n"
+ " everything it requires) is a build-time dependency only.\n"
+ "\n"
+ "Actions and copies\n"
+ "\n"
+ " Action and copy targets that are listed as \"data_deps\" will have all\n"
+ " of their outputs and data files considered as runtime dependencies.\n"
+ " Action and copy targets that are \"deps\" or \"public_deps\" will have\n"
+ " only their data files considered as runtime dependencies. These\n"
+ " targets can list an output file in both the \"outputs\" and \"data\"\n"
+ " lists to force an output file as a runtime dependency in all cases.\n"
+ "\n"
+ " The different rules for deps and data_deps are to express build-time\n"
+ " (deps) vs. run-time (data_deps) outputs. If GN counted all build-time\n"
+ " copy steps as data dependencies, there would be a lot of extra stuff,\n"
+ " and if GN counted all run-time dependencies as regular deps, the\n"
+ " build's parallelism would be unnecessarily constrained.\n"
+ "\n"
+ " This rule can sometimes lead to unintuitive results. For example,\n"
+ " given the three targets:\n"
+ " A --[data_deps]--> B --[deps]--> ACTION\n"
+ " GN would say that A does not have runtime deps on the result of the\n"
+ " ACTION, which is often correct. But the purpose of the B target might\n"
+ " be to collect many actions into one logic unit, and the \"data\"-ness\n"
+ " of A's dependency is lost. Solutions:\n"
+ "\n"
+ " - List the outputs of the action in it's data section (if the\n"
+ " results of that action are always runtime files).\n"
+ " - Have B list the action in data_deps (if the outputs of the actions\n"
+ " are always runtime files).\n"
+ " - Have B list the action in both deps and data deps (if the outputs\n"
+ " might be used in both contexts and you don't care about unnecessary\n"
+ " entries in the list of files required at runtime).\n"
+ " - Split B into run-time and build-time versions with the appropriate\n"
+ " \"deps\" for each.\n"
+ "\n"
+ "Static libraries and source sets\n"
+ "\n"
+ " The results of static_library or source_set targets are not considered\n"
+ " runtime dependencies since these are assumed to be intermediate\n"
+ " targets only. If you need to list a static library as a runtime\n"
+ " dependency, you can manually compute the .a/.lib file name for the\n"
+ " current platform and list it in the \"data\" list of a target\n"
+ " (possibly on the static library target itself).\n"
+ "\n"
+ "Multiple outputs\n"
+ "\n"
+ " When a tool produces more than one output, only the first output\n"
+ " is considered. For example, a shared library target may produce a\n"
+ " .dll and a .lib file on Windows. Only the .dll file will be considered\n"
+ " a runtime dependency. This applies only to linker tools, scripts and\n"
+ " copy steps with multiple outputs will also get all outputs listed.\n";
+
+RuntimeDepsVector ComputeRuntimeDeps(const Target* target) {
+ RuntimeDepsVector result;
+ std::map<const Target*, bool> seen_targets;
+ std::set<OutputFile> found_files;
+
+ // The initial target is not considered a data dependency so that actions's
+ // outputs (if the current target is an action) are not automatically
+ // considered data deps.
+ RecursiveCollectRuntimeDeps(target, false,
+ &result, &seen_targets, &found_files);
+ return result;
+}
+
+bool WriteRuntimeDepsFilesIfNecessary(const Builder& builder, Err* err) {
+ RuntimeDepsVector files_to_write;
+ if (!CollectRuntimeDepsFromFlag(builder, &files_to_write, err))
+ return false;
+
+ // Files scheduled by write_runtime_deps.
+ for (const Target* target : g_scheduler->GetWriteRuntimeDepsTargets()) {
+ files_to_write.push_back(
+ std::make_pair(target->write_runtime_deps_output(), target));
+ }
+
+ for (const auto& entry : files_to_write) {
+ // Currently this writes all runtime deps files sequentially. We generally
+ // expect few of these. We can run this on the worker pool if it looks
+ // like it's talking a long time.
+ if (!WriteRuntimeDepsFile(entry.first, entry.second, err))
+ return false;
+ }
+ return true;
+}
diff --git a/chromium/tools/gn/runtime_deps.h b/chromium/tools/gn/runtime_deps.h
new file mode 100644
index 00000000000..8592677b6dd
--- /dev/null
+++ b/chromium/tools/gn/runtime_deps.h
@@ -0,0 +1,28 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_RUNTIME_DEPS_H
+#define TOOLS_GN_RUNTIME_DEPS_H
+
+#include <utility>
+#include <vector>
+
+class Builder;
+class Err;
+class OutputFile;
+class Target;
+
+extern const char kRuntimeDeps_Help[];
+
+// Computes the runtime dependencies of the given target. The result is a list
+// of pairs listing the runtime dependency and the target that the runtime
+// dependency is from (for blaming).
+std::vector<std::pair<OutputFile, const Target*>> ComputeRuntimeDeps(
+ const Target* target);
+
+// Writes all runtime deps files requested on the command line, or does nothing
+// if no files were specified.
+bool WriteRuntimeDepsFilesIfNecessary(const Builder& builder, Err* err);
+
+#endif // TOOLS_GN_RUNTIME_DEPS_H
diff --git a/chromium/tools/gn/runtime_deps_unittest.cc b/chromium/tools/gn/runtime_deps_unittest.cc
new file mode 100644
index 00000000000..915fbc28302
--- /dev/null
+++ b/chromium/tools/gn/runtime_deps_unittest.cc
@@ -0,0 +1,284 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include <algorithm>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/runtime_deps.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+void InitTargetWithType(TestWithScope& setup,
+ Target* target,
+ Target::OutputType type) {
+ target->set_output_type(type);
+ target->visibility().SetPublic();
+ target->SetToolchain(setup.toolchain());
+}
+
+// Convenience function to make the correct kind of pair.
+std::pair<OutputFile, const Target*> MakePair(const char* str,
+ const Target* t) {
+ return std::pair<OutputFile, const Target*>(OutputFile(str), t);
+}
+
+std::string GetVectorDescription(
+ const std::vector<std::pair<OutputFile, const Target*>>& v) {
+ std::string result;
+ for (size_t i = 0; i < v.size(); i++) {
+ if (i != 0)
+ result.append(", ");
+ result.append("\"" + v[i].first.value() + "\"");
+ }
+ return result;
+}
+
+} // namespace
+
+// Tests an exe depending on different types of libraries.
+TEST(RuntimeDeps, Libs) {
+ TestWithScope setup;
+ Err err;
+
+ // Dependency hierarchy: main(exe) -> static library
+ // -> shared library
+ // -> loadable module
+ // -> source set
+
+ Target stat(setup.settings(), Label(SourceDir("//"), "stat"));
+ InitTargetWithType(setup, &stat, Target::STATIC_LIBRARY);
+ stat.data().push_back("//stat.dat");
+ ASSERT_TRUE(stat.OnResolved(&err));
+
+ Target shared(setup.settings(), Label(SourceDir("//"), "shared"));
+ InitTargetWithType(setup, &shared, Target::SHARED_LIBRARY);
+ shared.data().push_back("//shared.dat");
+ ASSERT_TRUE(shared.OnResolved(&err));
+
+ Target loadable(setup.settings(), Label(SourceDir("//"), "loadable"));
+ InitTargetWithType(setup, &loadable, Target::LOADABLE_MODULE);
+ loadable.data().push_back("//loadable.dat");
+ ASSERT_TRUE(loadable.OnResolved(&err));
+
+ Target set(setup.settings(), Label(SourceDir("//"), "set"));
+ InitTargetWithType(setup, &set, Target::SOURCE_SET);
+ set.data().push_back("//set.dat");
+ ASSERT_TRUE(set.OnResolved(&err));
+
+ Target main(setup.settings(), Label(SourceDir("//"), "main"));
+ InitTargetWithType(setup, &main, Target::EXECUTABLE);
+ main.private_deps().push_back(LabelTargetPair(&stat));
+ main.private_deps().push_back(LabelTargetPair(&shared));
+ main.private_deps().push_back(LabelTargetPair(&loadable));
+ main.private_deps().push_back(LabelTargetPair(&set));
+ main.data().push_back("//main.dat");
+ ASSERT_TRUE(main.OnResolved(&err));
+
+ std::vector<std::pair<OutputFile, const Target*>> result =
+ ComputeRuntimeDeps(&main);
+
+ // The result should have deps of main, all 5 dat files, libshared.so, and
+ // libloadable.so.
+ ASSERT_EQ(8u, result.size()) << GetVectorDescription(result);
+
+ // The first one should always be the main exe.
+ EXPECT_TRUE(MakePair("./main", &main) == result[0]);
+
+ // The rest of the ordering is undefined. First the data files.
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../stat.dat", &stat)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../shared.dat", &shared)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../loadable.dat", &loadable)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../set.dat", &set)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../main.dat", &main)) !=
+ result.end()) << GetVectorDescription(result);
+
+ // Check the static library and loadable module.
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("./libshared.so", &shared)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("./libloadable.so", &loadable)) !=
+ result.end()) << GetVectorDescription(result);
+}
+
+// Tests that executables that aren't listed as data deps aren't included in
+// the output, but executables that are data deps are included.
+TEST(RuntimeDeps, ExeDataDep) {
+ TestWithScope setup;
+ Err err;
+
+ // Dependency hierarchy: main(exe) -> datadep(exe) -> final_in(source set)
+ // -> dep(exe) -> final_out(source set)
+ // The final_in/out targets each have data files. final_in's should be
+ // included, final_out's should not be.
+
+ Target final_in(setup.settings(), Label(SourceDir("//"), "final_in"));
+ InitTargetWithType(setup, &final_in, Target::SOURCE_SET);
+ final_in.data().push_back("//final_in.dat");
+ ASSERT_TRUE(final_in.OnResolved(&err));
+
+ Target datadep(setup.settings(), Label(SourceDir("//"), "datadep"));
+ InitTargetWithType(setup, &datadep, Target::EXECUTABLE);
+ datadep.private_deps().push_back(LabelTargetPair(&final_in));
+ ASSERT_TRUE(datadep.OnResolved(&err));
+
+ Target final_out(setup.settings(), Label(SourceDir("//"), "final_out"));
+ InitTargetWithType(setup, &final_out, Target::SOURCE_SET);
+ final_out.data().push_back("//final_out.dat");
+ ASSERT_TRUE(final_out.OnResolved(&err));
+
+ Target dep(setup.settings(), Label(SourceDir("//"), "dep"));
+ InitTargetWithType(setup, &dep, Target::EXECUTABLE);
+ dep.private_deps().push_back(LabelTargetPair(&final_out));
+ ASSERT_TRUE(dep.OnResolved(&err));
+
+ Target main(setup.settings(), Label(SourceDir("//"), "main"));
+ InitTargetWithType(setup, &main, Target::EXECUTABLE);
+ main.private_deps().push_back(LabelTargetPair(&dep));
+ main.data_deps().push_back(LabelTargetPair(&datadep));
+ ASSERT_TRUE(main.OnResolved(&err));
+
+ std::vector<std::pair<OutputFile, const Target*>> result =
+ ComputeRuntimeDeps(&main);
+
+ // The result should have deps of main, datadep, final_in.dat
+ ASSERT_EQ(3u, result.size()) << GetVectorDescription(result);
+
+ // The first one should always be the main exe.
+ EXPECT_TRUE(MakePair("./main", &main) == result[0]);
+
+ // The rest of the ordering is undefined.
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("./datadep", &datadep)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../final_in.dat", &final_in)) !=
+ result.end()) << GetVectorDescription(result);
+}
+
+// Tests that action and copy outputs are considered if they're data deps, but
+// not if they're regular deps. Action and copy "data" files are always
+// included.
+TEST(RuntimeDeps, ActionOutputs) {
+ TestWithScope setup;
+ Err err;
+
+ // Dependency hierarchy: main(exe) -> datadep (action)
+ // -> datadep_copy (copy)
+ // -> dep (action)
+ // -> dep_copy (copy)
+
+ Target datadep(setup.settings(), Label(SourceDir("//"), "datadep"));
+ InitTargetWithType(setup, &datadep, Target::ACTION);
+ datadep.data().push_back("//datadep.data");
+ datadep.action_values().outputs() =
+ SubstitutionList::MakeForTest("//datadep.output");
+ ASSERT_TRUE(datadep.OnResolved(&err));
+
+ Target datadep_copy(setup.settings(), Label(SourceDir("//"), "datadep_copy"));
+ InitTargetWithType(setup, &datadep_copy, Target::COPY_FILES);
+ datadep_copy.sources().push_back(SourceFile("//input"));
+ datadep_copy.data().push_back("//datadep_copy.data");
+ datadep_copy.action_values().outputs() =
+ SubstitutionList::MakeForTest("//datadep_copy.output");
+ ASSERT_TRUE(datadep_copy.OnResolved(&err));
+
+ Target dep(setup.settings(), Label(SourceDir("//"), "dep"));
+ InitTargetWithType(setup, &dep, Target::ACTION);
+ dep.data().push_back("//dep.data");
+ dep.action_values().outputs() =
+ SubstitutionList::MakeForTest("//dep.output");
+ ASSERT_TRUE(dep.OnResolved(&err));
+
+ Target dep_copy(setup.settings(), Label(SourceDir("//"), "dep_copy"));
+ InitTargetWithType(setup, &dep_copy, Target::COPY_FILES);
+ dep_copy.sources().push_back(SourceFile("//input"));
+ dep_copy.data().push_back("//dep_copy/data/"); // Tests a directory.
+ dep_copy.action_values().outputs() =
+ SubstitutionList::MakeForTest("//dep_copy.output");
+ ASSERT_TRUE(dep_copy.OnResolved(&err));
+
+ Target main(setup.settings(), Label(SourceDir("//"), "main"));
+ InitTargetWithType(setup, &main, Target::EXECUTABLE);
+ main.private_deps().push_back(LabelTargetPair(&dep));
+ main.private_deps().push_back(LabelTargetPair(&dep_copy));
+ main.data_deps().push_back(LabelTargetPair(&datadep));
+ main.data_deps().push_back(LabelTargetPair(&datadep_copy));
+ ASSERT_TRUE(main.OnResolved(&err));
+
+ std::vector<std::pair<OutputFile, const Target*>> result =
+ ComputeRuntimeDeps(&main);
+
+ // The result should have deps of main, both datadeps files, but only
+ // the data file from dep.
+ ASSERT_EQ(7u, result.size()) << GetVectorDescription(result);
+
+ // The first one should always be the main exe.
+ EXPECT_TRUE(MakePair("./main", &main) == result[0]);
+
+ // The rest of the ordering is undefined.
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../datadep.data", &datadep)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../datadep_copy.data", &datadep_copy)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../datadep.output", &datadep)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../datadep_copy.output", &datadep_copy)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../dep.data", &dep)) !=
+ result.end()) << GetVectorDescription(result);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../dep_copy/data/", &dep_copy)) !=
+ result.end()) << GetVectorDescription(result);
+
+ // Explicitly asking for the runtime deps of an action target only includes
+ // the data and not all outputs.
+ result = ComputeRuntimeDeps(&dep);
+ ASSERT_EQ(1u, result.size());
+ EXPECT_TRUE(MakePair("../../dep.data", &dep) == result[0]);
+}
+
+// Tests that a dependency duplicated in regular and data deps is processed
+// as a data dep.
+TEST(RuntimeDeps, Dupe) {
+ TestWithScope setup;
+ Err err;
+
+ Target action(setup.settings(), Label(SourceDir("//"), "action"));
+ InitTargetWithType(setup, &action, Target::ACTION);
+ action.action_values().outputs() =
+ SubstitutionList::MakeForTest("//action.output");
+ ASSERT_TRUE(action.OnResolved(&err));
+
+ Target target(setup.settings(), Label(SourceDir("//"), "foo"));
+ InitTargetWithType(setup, &target, Target::EXECUTABLE);
+ target.private_deps().push_back(LabelTargetPair(&action));
+ target.data_deps().push_back(LabelTargetPair(&action));
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ // The results should be the executable and the copy output.
+ std::vector<std::pair<OutputFile, const Target*>> result =
+ ComputeRuntimeDeps(&target);
+ EXPECT_TRUE(std::find(result.begin(), result.end(),
+ MakePair("../../action.output", &action)) !=
+ result.end()) << GetVectorDescription(result);
+}
diff --git a/chromium/tools/gn/scheduler.cc b/chromium/tools/gn/scheduler.cc
new file mode 100644
index 00000000000..a711df1cf02
--- /dev/null
+++ b/chromium/tools/gn/scheduler.cc
@@ -0,0 +1,237 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/scheduler.h"
+
+#include <algorithm>
+
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/strings/string_number_conversions.h"
+#include "build/build_config.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/switches.h"
+#include "tools/gn/target.h"
+
+#if defined(OS_WIN)
+#include <windows.h>
+#else
+#include <unistd.h>
+#endif
+
+Scheduler* g_scheduler = nullptr;
+
+namespace {
+
+#if defined(OS_WIN)
+int GetCPUCount() {
+ SYSTEM_INFO sysinfo;
+ ::GetSystemInfo(&sysinfo);
+ return sysinfo.dwNumberOfProcessors;
+}
+#else
+int GetCPUCount() {
+ return static_cast<int>(sysconf(_SC_NPROCESSORS_ONLN));
+}
+#endif
+
+int GetThreadCount() {
+ std::string thread_count =
+ base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(
+ switches::kThreads);
+
+ // See if an override was specified on the command line.
+ int result;
+ if (!thread_count.empty() && base::StringToInt(thread_count, &result))
+ return result;
+
+ // Base the default number of worker threads on number of cores in the
+ // system. When building large projects, the speed can be limited by how fast
+ // the main thread can dispatch work and connect the dependency graph. If
+ // there are too many worker threads, the main thread can be starved and it
+ // will run slower overall.
+ //
+ // One less worker thread than the number of physical CPUs seems to be a
+ // good value, both theoretically and experimentally. But always use at
+ // least three workers to prevent us from being too sensitive to I/O latency
+ // on low-end systems.
+ int num_cores = GetCPUCount() / 2; // Almost all CPUs now are hyperthreaded.
+ return std::max(num_cores - 1, 3);
+}
+
+} // namespace
+
+Scheduler::Scheduler()
+ : pool_(new base::SequencedWorkerPool(GetThreadCount(), "worker_")),
+ input_file_manager_(new InputFileManager),
+ verbose_logging_(false),
+ work_count_(0),
+ is_failed_(false),
+ has_been_shutdown_(false) {
+ g_scheduler = this;
+}
+
+Scheduler::~Scheduler() {
+ if (!has_been_shutdown_)
+ pool_->Shutdown();
+ g_scheduler = nullptr;
+}
+
+bool Scheduler::Run() {
+ runner_.Run();
+ bool local_is_failed;
+ {
+ base::AutoLock lock(lock_);
+ local_is_failed = is_failed();
+ has_been_shutdown_ = true;
+ }
+ // Don't do this inside the lock since it will block on the workers, which
+ // may be in turn waiting on the lock.
+ pool_->Shutdown();
+ return !local_is_failed;
+}
+
+void Scheduler::Log(const std::string& verb, const std::string& msg) {
+ if (base::MessageLoop::current() == &main_loop_) {
+ LogOnMainThread(verb, msg);
+ } else {
+ // The run loop always joins on the sub threads, so the lifetime of this
+ // object outlives the invocations of this function, hence "unretained".
+ main_loop_.PostTask(FROM_HERE,
+ base::Bind(&Scheduler::LogOnMainThread,
+ base::Unretained(this), verb, msg));
+ }
+}
+
+void Scheduler::FailWithError(const Err& err) {
+ DCHECK(err.has_error());
+ {
+ base::AutoLock lock(lock_);
+
+ if (is_failed_ || has_been_shutdown_)
+ return; // Ignore errors once we see one.
+ is_failed_ = true;
+ }
+
+ if (base::MessageLoop::current() == &main_loop_) {
+ FailWithErrorOnMainThread(err);
+ } else {
+ // The run loop always joins on the sub threads, so the lifetime of this
+ // object outlives the invocations of this function, hence "unretained".
+ main_loop_.PostTask(FROM_HERE,
+ base::Bind(&Scheduler::FailWithErrorOnMainThread,
+ base::Unretained(this), err));
+ }
+}
+
+void Scheduler::ScheduleWork(const base::Closure& work) {
+ IncrementWorkCount();
+ pool_->PostWorkerTaskWithShutdownBehavior(
+ FROM_HERE, base::Bind(&Scheduler::DoWork,
+ base::Unretained(this), work),
+ base::SequencedWorkerPool::BLOCK_SHUTDOWN);
+}
+
+void Scheduler::AddGenDependency(const base::FilePath& file) {
+ base::AutoLock lock(lock_);
+ gen_dependencies_.push_back(file);
+}
+
+std::vector<base::FilePath> Scheduler::GetGenDependencies() const {
+ base::AutoLock lock(lock_);
+ return gen_dependencies_;
+}
+
+void Scheduler::AddWrittenFile(const SourceFile& file) {
+ base::AutoLock lock(lock_);
+ written_files_.push_back(file);
+}
+
+void Scheduler::AddUnknownGeneratedInput(const Target* target,
+ const SourceFile& file) {
+ base::AutoLock lock(lock_);
+ unknown_generated_inputs_.insert(std::make_pair(file, target));
+}
+
+void Scheduler::AddWriteRuntimeDepsTarget(const Target* target) {
+ base::AutoLock lock(lock_);
+ write_runtime_deps_targets_.push_back(target);
+}
+
+std::vector<const Target*> Scheduler::GetWriteRuntimeDepsTargets() const {
+ base::AutoLock lock(lock_);
+ return write_runtime_deps_targets_;
+}
+
+bool Scheduler::IsFileGeneratedByWriteRuntimeDeps(
+ const OutputFile& file) const {
+ base::AutoLock lock(lock_);
+ // Number of targets should be quite small, so brute-force search is fine.
+ for (const Target* target : write_runtime_deps_targets_) {
+ if (file == target->write_runtime_deps_output()) {
+ return true;
+ }
+ }
+ return false;
+}
+
+std::multimap<SourceFile, const Target*>
+ Scheduler::GetUnknownGeneratedInputs() const {
+ base::AutoLock lock(lock_);
+
+ // Remove all unknown inputs that were written files. These are OK as inputs
+ // to build steps since they were written as a side-effect of running GN.
+ //
+ // It's assumed that this function is called once during cleanup to check for
+ // errors, so performing this work in the lock doesn't matter.
+ std::multimap<SourceFile, const Target*> filtered = unknown_generated_inputs_;
+ for (const SourceFile& file : written_files_)
+ filtered.erase(file);
+
+ return filtered;
+}
+
+void Scheduler::ClearUnknownGeneratedInputsAndWrittenFiles() {
+ base::AutoLock lock(lock_);
+ unknown_generated_inputs_.clear();
+ written_files_.clear();
+}
+
+void Scheduler::IncrementWorkCount() {
+ base::AtomicRefCountInc(&work_count_);
+}
+
+void Scheduler::DecrementWorkCount() {
+ if (!base::AtomicRefCountDec(&work_count_)) {
+ if (base::MessageLoop::current() == &main_loop_) {
+ OnComplete();
+ } else {
+ main_loop_.PostTask(FROM_HERE,
+ base::Bind(&Scheduler::OnComplete,
+ base::Unretained(this)));
+ }
+ }
+}
+
+void Scheduler::LogOnMainThread(const std::string& verb,
+ const std::string& msg) {
+ OutputString(verb, DECORATION_YELLOW);
+ OutputString(" " + msg + "\n");
+}
+
+void Scheduler::FailWithErrorOnMainThread(const Err& err) {
+ err.PrintToStdout();
+ runner_.Quit();
+}
+
+void Scheduler::DoWork(const base::Closure& closure) {
+ closure.Run();
+ DecrementWorkCount();
+}
+
+void Scheduler::OnComplete() {
+ // Should be called on the main thread.
+ DCHECK(base::MessageLoop::current() == main_loop());
+ runner_.Quit();
+}
diff --git a/chromium/tools/gn/scheduler.h b/chromium/tools/gn/scheduler.h
new file mode 100644
index 00000000000..a9bb0365122
--- /dev/null
+++ b/chromium/tools/gn/scheduler.h
@@ -0,0 +1,132 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SCHEDULER_H_
+#define TOOLS_GN_SCHEDULER_H_
+
+#include <map>
+
+#include "base/atomic_ref_count.h"
+#include "base/files/file_path.h"
+#include "base/macros.h"
+#include "base/message_loop/message_loop.h"
+#include "base/run_loop.h"
+#include "base/synchronization/lock.h"
+#include "base/threading/sequenced_worker_pool.h"
+#include "tools/gn/input_file_manager.h"
+#include "tools/gn/label.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/token.h"
+
+class Target;
+
+// Maintains the thread pool and error state.
+class Scheduler {
+ public:
+ Scheduler();
+ ~Scheduler();
+
+ bool Run();
+
+ base::MessageLoop* main_loop() { return &main_loop_; }
+ base::SequencedWorkerPool* pool() { return pool_.get(); }
+
+ InputFileManager* input_file_manager() { return input_file_manager_.get(); }
+
+ bool verbose_logging() const { return verbose_logging_; }
+ void set_verbose_logging(bool v) { verbose_logging_ = v; }
+
+ // TODO(brettw) data race on this access (benign?).
+ bool is_failed() const { return is_failed_; }
+
+ void Log(const std::string& verb, const std::string& msg);
+ void FailWithError(const Err& err);
+
+ void ScheduleWork(const base::Closure& work);
+
+ void Shutdown();
+
+ // Declares that the given file was read and affected the build output.
+ //
+ // TODO(brettw) this is global rather than per-BuildSettings. If we
+ // start using >1 build settings, then we probably want this to take a
+ // BuildSettings object so we know the depdency on a per-build basis.
+ // If moved, most of the Add/Get functions below should move as well.
+ void AddGenDependency(const base::FilePath& file);
+ std::vector<base::FilePath> GetGenDependencies() const;
+
+ // Tracks calls to write_file for resolving with the unknown generated
+ // inputs (see AddUnknownGeneratedInput below).
+ void AddWrittenFile(const SourceFile& file);
+
+ // Schedules a file to be written due to a target setting write_runtime_deps.
+ void AddWriteRuntimeDepsTarget(const Target* entry);
+ std::vector<const Target*> GetWriteRuntimeDepsTargets() const;
+ bool IsFileGeneratedByWriteRuntimeDeps(const OutputFile& file) const;
+
+ // Unknown generated inputs are files that a target declares as an input
+ // in the output directory, but which aren't generated by any dependency.
+ //
+ // Some of these files will be files written by write_file and will be
+ // GenDependencies (see AddWrittenFile above). There are OK and include
+ // things like response files for scripts. Others cases will be ones where
+ // the file is generated by a target that's not a dependency.
+ //
+ // In order to distinguish these two cases, the checking for these input
+ // files needs to be done after all targets are complete. This also has the
+ // nice side effect that if a target generates the file we can find it and
+ // tell the user which dependency is missing.
+ //
+ // The result returned by GetUnknownGeneratedInputs will not count any files
+ // that were written by write_file during execution.
+ void AddUnknownGeneratedInput(const Target* target, const SourceFile& file);
+ std::multimap<SourceFile, const Target*> GetUnknownGeneratedInputs() const;
+ void ClearUnknownGeneratedInputsAndWrittenFiles(); // For testing.
+
+ // We maintain a count of the things we need to do that works like a
+ // refcount. When this reaches 0, the program exits.
+ void IncrementWorkCount();
+ void DecrementWorkCount();
+
+ private:
+ void LogOnMainThread(const std::string& verb, const std::string& msg);
+ void FailWithErrorOnMainThread(const Err& err);
+
+ void DoTargetFileWrite(const Target* target);
+
+ void DoWork(const base::Closure& closure);
+
+ void OnComplete();
+
+ base::MessageLoop main_loop_;
+ scoped_refptr<base::SequencedWorkerPool> pool_;
+
+ scoped_refptr<InputFileManager> input_file_manager_;
+
+ base::RunLoop runner_;
+
+ bool verbose_logging_;
+
+ base::AtomicRefCount work_count_;
+
+ mutable base::Lock lock_;
+ bool is_failed_;
+
+ // Used to track whether the worker pool has been shutdown. This is necessary
+ // to clean up after tests that make a scheduler but don't run the message
+ // loop.
+ bool has_been_shutdown_;
+
+ // Protected by the lock. See the corresponding Add/Get functions above.
+ std::vector<base::FilePath> gen_dependencies_;
+ std::vector<SourceFile> written_files_;
+ std::vector<const Target*> write_runtime_deps_targets_;
+ std::multimap<SourceFile, const Target*> unknown_generated_inputs_;
+
+ DISALLOW_COPY_AND_ASSIGN(Scheduler);
+};
+
+extern Scheduler* g_scheduler;
+
+#endif // TOOLS_GN_SCHEDULER_H_
diff --git a/chromium/tools/gn/scope.cc b/chromium/tools/gn/scope.cc
new file mode 100644
index 00000000000..d3d29d44927
--- /dev/null
+++ b/chromium/tools/gn/scope.cc
@@ -0,0 +1,521 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/scope.h"
+
+#include "base/logging.h"
+#include "base/stl_util.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/template.h"
+
+namespace {
+
+// FLags set in the mode_flags_ of a scope. If a bit is set, it applies
+// recursively to all dependent scopes.
+const unsigned kProcessingBuildConfigFlag = 1;
+const unsigned kProcessingImportFlag = 2;
+
+// Returns true if this variable name should be considered private. Private
+// values start with an underscore, and are not imported from "gni" files
+// when processing an import.
+bool IsPrivateVar(const base::StringPiece& name) {
+ return name.empty() || name[0] == '_';
+}
+
+} // namespace
+
+// Defaults to all false, which are the things least likely to cause errors.
+Scope::MergeOptions::MergeOptions()
+ : clobber_existing(false),
+ skip_private_vars(false),
+ mark_dest_used(false) {
+}
+
+Scope::MergeOptions::~MergeOptions() {
+}
+
+Scope::ProgrammaticProvider::~ProgrammaticProvider() {
+ scope_->RemoveProvider(this);
+}
+
+Scope::Scope(const Settings* settings)
+ : const_containing_(nullptr),
+ mutable_containing_(nullptr),
+ settings_(settings),
+ mode_flags_(0),
+ item_collector_(nullptr) {
+}
+
+Scope::Scope(Scope* parent)
+ : const_containing_(nullptr),
+ mutable_containing_(parent),
+ settings_(parent->settings()),
+ mode_flags_(0),
+ item_collector_(nullptr) {
+}
+
+Scope::Scope(const Scope* parent)
+ : const_containing_(parent),
+ mutable_containing_(nullptr),
+ settings_(parent->settings()),
+ mode_flags_(0),
+ item_collector_(nullptr) {
+}
+
+Scope::~Scope() {
+ STLDeleteContainerPairSecondPointers(target_defaults_.begin(),
+ target_defaults_.end());
+}
+
+const Value* Scope::GetValue(const base::StringPiece& ident,
+ bool counts_as_used) {
+ // First check for programmatically-provided values.
+ for (const auto& provider : programmatic_providers_) {
+ const Value* v = provider->GetProgrammaticValue(ident);
+ if (v)
+ return v;
+ }
+
+ RecordMap::iterator found = values_.find(ident);
+ if (found != values_.end()) {
+ if (counts_as_used)
+ found->second.used = true;
+ return &found->second.value;
+ }
+
+ // Search in the parent scope.
+ if (const_containing_)
+ return const_containing_->GetValue(ident);
+ if (mutable_containing_)
+ return mutable_containing_->GetValue(ident, counts_as_used);
+ return nullptr;
+}
+
+Value* Scope::GetMutableValue(const base::StringPiece& ident,
+ bool counts_as_used) {
+ // Don't do programmatic values, which are not mutable.
+ RecordMap::iterator found = values_.find(ident);
+ if (found != values_.end()) {
+ if (counts_as_used)
+ found->second.used = true;
+ return &found->second.value;
+ }
+
+ // Search in the parent mutable scope, but not const one.
+ if (mutable_containing_)
+ return mutable_containing_->GetMutableValue(ident, counts_as_used);
+ return nullptr;
+}
+
+Value* Scope::GetValueForcedToCurrentScope(const base::StringPiece& ident,
+ const ParseNode* set_node) {
+ RecordMap::iterator found = values_.find(ident);
+ if (found != values_.end())
+ return &found->second.value; // Already have in the current scope.
+
+ // Search in the parent scope.
+ if (containing()) {
+ const Value* in_containing = containing()->GetValue(ident);
+ if (in_containing) {
+ // Promote to current scope.
+ return SetValue(ident, *in_containing, set_node);
+ }
+ }
+ return nullptr;
+}
+
+base::StringPiece Scope::GetStorageKey(const base::StringPiece& ident) const {
+ RecordMap::const_iterator found = values_.find(ident);
+ if (found != values_.end())
+ return found->first;
+
+ // Search in parent scope.
+ if (containing())
+ return containing()->GetStorageKey(ident);
+ return base::StringPiece();
+}
+
+const Value* Scope::GetValue(const base::StringPiece& ident) const {
+ RecordMap::const_iterator found = values_.find(ident);
+ if (found != values_.end())
+ return &found->second.value;
+ if (containing())
+ return containing()->GetValue(ident);
+ return nullptr;
+}
+
+Value* Scope::SetValue(const base::StringPiece& ident,
+ const Value& v,
+ const ParseNode* set_node) {
+ Record& r = values_[ident]; // Clears any existing value.
+ r.value = v;
+ r.value.set_origin(set_node);
+ return &r.value;
+}
+
+void Scope::RemoveIdentifier(const base::StringPiece& ident) {
+ RecordMap::iterator found = values_.find(ident);
+ if (found != values_.end())
+ values_.erase(found);
+}
+
+void Scope::RemovePrivateIdentifiers() {
+ // Do it in two phases to avoid mutating while iterating. Our hash map is
+ // currently backed by several different vendor-specific implementations and
+ // I'm not sure if all of them support mutating while iterating. Since this
+ // is not perf-critical, do the safe thing.
+ std::vector<base::StringPiece> to_remove;
+ for (const auto& cur : values_) {
+ if (IsPrivateVar(cur.first))
+ to_remove.push_back(cur.first);
+ }
+
+ for (const auto& cur : to_remove)
+ values_.erase(cur);
+}
+
+bool Scope::AddTemplate(const std::string& name, const Template* templ) {
+ if (GetTemplate(name))
+ return false;
+ templates_[name] = templ;
+ return true;
+}
+
+const Template* Scope::GetTemplate(const std::string& name) const {
+ TemplateMap::const_iterator found = templates_.find(name);
+ if (found != templates_.end())
+ return found->second.get();
+ if (containing())
+ return containing()->GetTemplate(name);
+ return nullptr;
+}
+
+void Scope::MarkUsed(const base::StringPiece& ident) {
+ RecordMap::iterator found = values_.find(ident);
+ if (found == values_.end()) {
+ NOTREACHED();
+ return;
+ }
+ found->second.used = true;
+}
+
+void Scope::MarkAllUsed() {
+ for (auto& cur : values_)
+ cur.second.used = true;
+}
+
+void Scope::MarkUnused(const base::StringPiece& ident) {
+ RecordMap::iterator found = values_.find(ident);
+ if (found == values_.end()) {
+ NOTREACHED();
+ return;
+ }
+ found->second.used = false;
+}
+
+bool Scope::IsSetButUnused(const base::StringPiece& ident) const {
+ RecordMap::const_iterator found = values_.find(ident);
+ if (found != values_.end()) {
+ if (!found->second.used) {
+ return true;
+ }
+ }
+ return false;
+}
+
+bool Scope::CheckForUnusedVars(Err* err) const {
+ for (const auto& pair : values_) {
+ if (!pair.second.used) {
+ std::string help = "You set the variable \"" + pair.first.as_string() +
+ "\" here and it was unused before it went\nout of scope.";
+
+ const BinaryOpNode* binary = pair.second.value.origin()->AsBinaryOp();
+ if (binary && binary->op().type() == Token::EQUAL) {
+ // Make a nicer error message for normal var sets.
+ *err = Err(binary->left()->GetRange(), "Assignment had no effect.",
+ help);
+ } else {
+ // This will happen for internally-generated variables.
+ *err = Err(pair.second.value.origin(), "Assignment had no effect.",
+ help);
+ }
+ return false;
+ }
+ }
+ return true;
+}
+
+void Scope::GetCurrentScopeValues(KeyValueMap* output) const {
+ for (const auto& pair : values_)
+ (*output)[pair.first] = pair.second.value;
+}
+
+bool Scope::NonRecursiveMergeTo(Scope* dest,
+ const MergeOptions& options,
+ const ParseNode* node_for_err,
+ const char* desc_for_err,
+ Err* err) const {
+ // Values.
+ for (const auto& pair : values_) {
+ const base::StringPiece& current_name = pair.first;
+ if (options.skip_private_vars && IsPrivateVar(current_name))
+ continue; // Skip this private var.
+ if (!options.excluded_values.empty() &&
+ options.excluded_values.find(current_name.as_string()) !=
+ options.excluded_values.end()) {
+ continue; // Skip this excluded value.
+ }
+
+ const Value& new_value = pair.second.value;
+ if (!options.clobber_existing) {
+ const Value* existing_value = dest->GetValue(current_name);
+ if (existing_value && new_value != *existing_value) {
+ // Value present in both the source and the dest.
+ std::string desc_string(desc_for_err);
+ *err = Err(node_for_err, "Value collision.",
+ "This " + desc_string + " contains \"" + current_name.as_string() +
+ "\"");
+ err->AppendSubErr(Err(pair.second.value, "defined here.",
+ "Which would clobber the one in your current scope"));
+ err->AppendSubErr(Err(*existing_value, "defined here.",
+ "Executing " + desc_string + " should not conflict with anything "
+ "in the current\nscope unless the values are identical."));
+ return false;
+ }
+ }
+ dest->values_[current_name] = pair.second;
+
+ if (options.mark_dest_used)
+ dest->MarkUsed(current_name);
+ }
+
+ // Target defaults are owning pointers.
+ for (const auto& pair : target_defaults_) {
+ const std::string& current_name = pair.first;
+ if (!options.excluded_values.empty() &&
+ options.excluded_values.find(current_name) !=
+ options.excluded_values.end()) {
+ continue; // Skip the excluded value.
+ }
+
+ if (!options.clobber_existing) {
+ if (dest->GetTargetDefaults(current_name)) {
+ // TODO(brettw) it would be nice to know the origin of a
+ // set_target_defaults so we can give locations for the colliding target
+ // defaults.
+ std::string desc_string(desc_for_err);
+ *err = Err(node_for_err, "Target defaults collision.",
+ "This " + desc_string + " contains target defaults for\n"
+ "\"" + current_name + "\" which would clobber one for the\n"
+ "same target type in your current scope. It's unfortunate that I'm "
+ "too stupid\nto tell you the location of where the target defaults "
+ "were set. Usually\nthis happens in the BUILDCONFIG.gn file.");
+ return false;
+ }
+ }
+
+ // Be careful to delete any pointer we're about to clobber.
+ Scope** dest_scope = &dest->target_defaults_[current_name];
+ if (*dest_scope)
+ delete *dest_scope;
+ *dest_scope = new Scope(settings_);
+ pair.second->NonRecursiveMergeTo(*dest_scope, options, node_for_err,
+ "<SHOULDN'T HAPPEN>", err);
+ }
+
+ // Sources assignment filter.
+ if (sources_assignment_filter_) {
+ if (!options.clobber_existing) {
+ if (dest->GetSourcesAssignmentFilter()) {
+ // Sources assignment filter present in both the source and the dest.
+ std::string desc_string(desc_for_err);
+ *err = Err(node_for_err, "Assignment filter collision.",
+ "The " + desc_string + " contains a sources_assignment_filter "
+ "which\nwould clobber the one in your current scope.");
+ return false;
+ }
+ }
+ dest->sources_assignment_filter_.reset(
+ new PatternList(*sources_assignment_filter_));
+ }
+
+ // Templates.
+ for (const auto& pair : templates_) {
+ const std::string& current_name = pair.first;
+ if (options.skip_private_vars && IsPrivateVar(current_name))
+ continue; // Skip this private template.
+ if (!options.excluded_values.empty() &&
+ options.excluded_values.find(current_name) !=
+ options.excluded_values.end()) {
+ continue; // Skip the excluded value.
+ }
+
+ if (!options.clobber_existing) {
+ const Template* existing_template = dest->GetTemplate(current_name);
+ // Since templates are refcounted, we can check if it's the same one by
+ // comparing pointers.
+ if (existing_template && pair.second.get() != existing_template) {
+ // Rule present in both the source and the dest, and they're not the
+ // same one.
+ std::string desc_string(desc_for_err);
+ *err = Err(node_for_err, "Template collision.",
+ "This " + desc_string + " contains a template \"" +
+ current_name + "\"");
+ err->AppendSubErr(Err(pair.second->GetDefinitionRange(),
+ "defined here.",
+ "Which would clobber the one in your current scope"));
+ err->AppendSubErr(Err(existing_template->GetDefinitionRange(),
+ "defined here.",
+ "Executing " + desc_string + " should not conflict with anything "
+ "in the current\nscope."));
+ return false;
+ }
+ }
+
+ // Be careful to delete any pointer we're about to clobber.
+ dest->templates_[current_name] = pair.second;
+ }
+
+ return true;
+}
+
+std::unique_ptr<Scope> Scope::MakeClosure() const {
+ std::unique_ptr<Scope> result;
+ if (const_containing_) {
+ // We reached the top of the mutable scope stack. The result scope just
+ // references the const scope (which will never change).
+ result.reset(new Scope(const_containing_));
+ } else if (mutable_containing_) {
+ // There are more nested mutable scopes. Recursively go up the stack to
+ // get the closure.
+ result = mutable_containing_->MakeClosure();
+ } else {
+ // This is a standalone scope, just copy it.
+ result.reset(new Scope(settings_));
+ }
+
+ // Want to clobber since we've flattened some nested scopes, and our parent
+ // scope may have a duplicate value set.
+ MergeOptions options;
+ options.clobber_existing = true;
+
+ // Add in our variables and we're done.
+ Err err;
+ NonRecursiveMergeTo(result.get(), options, nullptr, "<SHOULDN'T HAPPEN>",
+ &err);
+ DCHECK(!err.has_error());
+ return result;
+}
+
+Scope* Scope::MakeTargetDefaults(const std::string& target_type) {
+ if (GetTargetDefaults(target_type))
+ return nullptr;
+
+ Scope** dest = &target_defaults_[target_type];
+ if (*dest) {
+ NOTREACHED(); // Already set.
+ return *dest;
+ }
+ *dest = new Scope(settings_);
+ return *dest;
+}
+
+const Scope* Scope::GetTargetDefaults(const std::string& target_type) const {
+ NamedScopeMap::const_iterator found = target_defaults_.find(target_type);
+ if (found != target_defaults_.end())
+ return found->second;
+ if (containing())
+ return containing()->GetTargetDefaults(target_type);
+ return nullptr;
+}
+
+const PatternList* Scope::GetSourcesAssignmentFilter() const {
+ if (sources_assignment_filter_)
+ return sources_assignment_filter_.get();
+ if (containing())
+ return containing()->GetSourcesAssignmentFilter();
+ return nullptr;
+}
+
+void Scope::SetProcessingBuildConfig() {
+ DCHECK((mode_flags_ & kProcessingBuildConfigFlag) == 0);
+ mode_flags_ |= kProcessingBuildConfigFlag;
+}
+
+void Scope::ClearProcessingBuildConfig() {
+ DCHECK(mode_flags_ & kProcessingBuildConfigFlag);
+ mode_flags_ &= ~(kProcessingBuildConfigFlag);
+}
+
+bool Scope::IsProcessingBuildConfig() const {
+ if (mode_flags_ & kProcessingBuildConfigFlag)
+ return true;
+ if (containing())
+ return containing()->IsProcessingBuildConfig();
+ return false;
+}
+
+void Scope::SetProcessingImport() {
+ DCHECK((mode_flags_ & kProcessingImportFlag) == 0);
+ mode_flags_ |= kProcessingImportFlag;
+}
+
+void Scope::ClearProcessingImport() {
+ DCHECK(mode_flags_ & kProcessingImportFlag);
+ mode_flags_ &= ~(kProcessingImportFlag);
+}
+
+bool Scope::IsProcessingImport() const {
+ if (mode_flags_ & kProcessingImportFlag)
+ return true;
+ if (containing())
+ return containing()->IsProcessingImport();
+ return false;
+}
+
+const SourceDir& Scope::GetSourceDir() const {
+ if (!source_dir_.is_null())
+ return source_dir_;
+ if (containing())
+ return containing()->GetSourceDir();
+ return source_dir_;
+}
+
+Scope::ItemVector* Scope::GetItemCollector() {
+ if (item_collector_)
+ return item_collector_;
+ if (mutable_containing())
+ return mutable_containing()->GetItemCollector();
+ return nullptr;
+}
+
+void Scope::SetProperty(const void* key, void* value) {
+ if (!value) {
+ DCHECK(properties_.find(key) != properties_.end());
+ properties_.erase(key);
+ } else {
+ properties_[key] = value;
+ }
+}
+
+void* Scope::GetProperty(const void* key, const Scope** found_on_scope) const {
+ PropertyMap::const_iterator found = properties_.find(key);
+ if (found != properties_.end()) {
+ if (found_on_scope)
+ *found_on_scope = this;
+ return found->second;
+ }
+ if (containing())
+ return containing()->GetProperty(key, found_on_scope);
+ return nullptr;
+}
+
+void Scope::AddProvider(ProgrammaticProvider* p) {
+ programmatic_providers_.insert(p);
+}
+
+void Scope::RemoveProvider(ProgrammaticProvider* p) {
+ DCHECK(programmatic_providers_.find(p) != programmatic_providers_.end());
+ programmatic_providers_.erase(p);
+}
diff --git a/chromium/tools/gn/scope.h b/chromium/tools/gn/scope.h
new file mode 100644
index 00000000000..72aa0c3e2fb
--- /dev/null
+++ b/chromium/tools/gn/scope.h
@@ -0,0 +1,364 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SCOPE_H_
+#define TOOLS_GN_SCOPE_H_
+
+#include <map>
+#include <memory>
+#include <set>
+#include <utility>
+
+#include "base/containers/hash_tables.h"
+#include "base/macros.h"
+#include "base/memory/ref_counted.h"
+#include "base/memory/scoped_vector.h"
+#include "tools/gn/err.h"
+#include "tools/gn/pattern.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/value.h"
+
+class FunctionCallNode;
+class ImportManager;
+class Item;
+class ParseNode;
+class Settings;
+class TargetManager;
+class Template;
+
+// Scope for the script execution.
+//
+// Scopes are nested. Writing goes into the toplevel scope, reading checks
+// values resursively down the stack until a match is found or there are no
+// more containing scopes.
+//
+// A containing scope can be const or non-const. The const containing scope is
+// used primarily to refer to the master build config which is shared across
+// many invocations. A const containing scope, however, prevents us from
+// marking variables "used" which prevents us from issuing errors on unused
+// variables. So you should use a non-const containing scope whenever possible.
+class Scope {
+ public:
+ typedef base::hash_map<base::StringPiece, Value, base::StringPieceHash>
+ KeyValueMap;
+ // Holds an owning list of Items.
+ typedef ScopedVector<Item> ItemVector;
+
+ // Allows code to provide values for built-in variables. This class will
+ // automatically register itself on construction and deregister itself on
+ // destruction.
+ class ProgrammaticProvider {
+ public:
+ explicit ProgrammaticProvider(Scope* scope) : scope_(scope) {
+ scope_->AddProvider(this);
+ }
+ virtual ~ProgrammaticProvider();
+
+ // Returns a non-null value if the given value can be programmatically
+ // generated, or NULL if there is none.
+ virtual const Value* GetProgrammaticValue(
+ const base::StringPiece& ident) = 0;
+
+ protected:
+ Scope* scope_;
+ };
+
+ // Options for configuring scope merges.
+ struct MergeOptions {
+ MergeOptions();
+ ~MergeOptions();
+
+ // When set, all existing avlues in the destination scope will be
+ // overwritten.
+ //
+ // When false, it will be an error to merge a variable into another scope
+ // where a variable with the same name is already set. The exception is
+ // if both of the variables have the same value (which happens if you
+ // somehow multiply import the same file, for example). This case will be
+ // ignored since there is nothing getting lost.
+ bool clobber_existing;
+
+ // When true, private variables (names beginning with an underscore) will
+ // be copied to the destination scope. When false, private values will be
+ // skipped.
+ bool skip_private_vars;
+
+ // When set, values copied to the destination scope will be marked as used
+ // so won't trigger an unused variable warning. You want this when doing an
+ // import, for example, or files that don't need a variable from the .gni
+ // file will throw an error.
+ bool mark_dest_used;
+
+ // When set, those variables are not merged.
+ std::set<std::string> excluded_values;
+ };
+
+ // Creates an empty toplevel scope.
+ explicit Scope(const Settings* settings);
+
+ // Creates a dependent scope.
+ explicit Scope(Scope* parent);
+ explicit Scope(const Scope* parent);
+
+ ~Scope();
+
+ const Settings* settings() const { return settings_; }
+
+ // See the const_/mutable_containing_ var declaraions below. Yes, it's a
+ // bit weird that we can have a const pointer to the "mutable" one.
+ Scope* mutable_containing() { return mutable_containing_; }
+ const Scope* mutable_containing() const { return mutable_containing_; }
+ const Scope* const_containing() const { return const_containing_; }
+ const Scope* containing() const {
+ return mutable_containing_ ? mutable_containing_ : const_containing_;
+ }
+
+ // Returns NULL if there's no such value.
+ //
+ // counts_as_used should be set if the variable is being read in a way that
+ // should count for unused variable checking.
+ const Value* GetValue(const base::StringPiece& ident,
+ bool counts_as_used);
+ const Value* GetValue(const base::StringPiece& ident) const;
+
+ // Returns the requested value as a mutable one if possible. If the value
+ // is not found in a mutable scope, then returns null. Note that the value
+ // could still exist in a const scope, so GetValue() could still return
+ // non-null in this case.
+ //
+ // Say you have a local scope that then refers to the const root scope from
+ // the master build config. You can't change the values from the master
+ // build config (it's read-only so it can be read from multiple threads
+ // without locking). Read-only operations would work on values from the root
+ // scope, but write operations would only work on values in the derived
+ // scope(s).
+ //
+ // Be careful when calling this. It's not normally correct to modify values,
+ // but you should instead do a new Set each time.
+ //
+ // Consider this code:
+ // a = 5
+ // {
+ // a = 6
+ // }
+ // The 6 should get set on the nested scope rather than modify the value
+ // in the outer one.
+ Value* GetMutableValue(const base::StringPiece& ident, bool counts_as_used);
+
+ // Same as GetValue, but if the value exists in a parent scope, we'll copy
+ // it to the current scope. If the return value is non-null, the value is
+ // guaranteed to be set in the current scope. Generatlly this will be used
+ // if the calling code is planning on modifying the value in-place.
+ //
+ // Since this is used when doing read-modifies, we never count this access
+ // as reading the variable, since we assume it will be written to.
+ Value* GetValueForcedToCurrentScope(const base::StringPiece& ident,
+ const ParseNode* set_node);
+
+ // Returns the StringPiece used to identify the value. This string piece
+ // will have the same contents as "ident" passed in, but may point to a
+ // different underlying buffer. This is useful because this StringPiece is
+ // static and won't be deleted for the life of the program, so it can be used
+ // as keys in places that may outlive a temporary. It will return an empty
+ // string for programmatic and nonexistant values.
+ base::StringPiece GetStorageKey(const base::StringPiece& ident) const;
+
+ // The set_node indicates the statement that caused the set, for displaying
+ // errors later. Returns a pointer to the value in the current scope (a copy
+ // is made for storage).
+ Value* SetValue(const base::StringPiece& ident,
+ const Value& v,
+ const ParseNode* set_node);
+
+ // Removes the value with the given identifier if it exists on the current
+ // scope. This does not search recursive scopes. Does nothing if not found.
+ void RemoveIdentifier(const base::StringPiece& ident);
+
+ // Removes from this scope all identifiers and templates that are considered
+ // private.
+ void RemovePrivateIdentifiers();
+
+ // Templates associated with this scope. A template can only be set once, so
+ // AddTemplate will fail and return false if a rule with that name already
+ // exists. GetTemplate returns NULL if the rule doesn't exist, and it will
+ // check all containing scoped rescursively.
+ bool AddTemplate(const std::string& name, const Template* templ);
+ const Template* GetTemplate(const std::string& name) const;
+
+ // Marks the given identifier as (un)used in the current scope.
+ void MarkUsed(const base::StringPiece& ident);
+ void MarkAllUsed();
+ void MarkUnused(const base::StringPiece& ident);
+
+ // Checks to see if the scope has a var set that hasn't been used. This is
+ // called before replacing the var with a different one. It does not check
+ // containing scopes.
+ //
+ // If the identifier is present but hasnn't been used, return true.
+ bool IsSetButUnused(const base::StringPiece& ident) const;
+
+ // Checks the scope to see if any values were set but not used, and fills in
+ // the error and returns false if they were.
+ bool CheckForUnusedVars(Err* err) const;
+
+ // Returns all values set in the current scope, without going to the parent
+ // scopes.
+ void GetCurrentScopeValues(KeyValueMap* output) const;
+
+ // Copies this scope's values into the destination. Values from the
+ // containing scope(s) (normally shadowed into the current one) will not be
+ // copied, neither will the reference to the containing scope (this is why
+ // it's "non-recursive").
+ //
+ // This is used in different contexts. When generating the error, the given
+ // parse node will be blamed, and the given desc will be used to describe
+ // the operation that doesn't support doing this. For example, desc_for_err
+ // would be "import" when doing an import, and the error string would say
+ // something like "The import contains...".
+ bool NonRecursiveMergeTo(Scope* dest,
+ const MergeOptions& options,
+ const ParseNode* node_for_err,
+ const char* desc_for_err,
+ Err* err) const;
+
+ // Constructs a scope that is a copy of the current one. Nested scopes will
+ // be collapsed until we reach a const containing scope. Private values will
+ // be included. The resulting closure will reference the const containing
+ // scope as its containing scope (since we assume the const scope won't
+ // change, we don't have to copy its values).
+ std::unique_ptr<Scope> MakeClosure() const;
+
+ // Makes an empty scope with the given name. Returns NULL if the name is
+ // already set.
+ Scope* MakeTargetDefaults(const std::string& target_type);
+
+ // Gets the scope associated with the given target name, or null if it hasn't
+ // been set.
+ const Scope* GetTargetDefaults(const std::string& target_type) const;
+
+ // Filter to apply when the sources variable is assigned. May return NULL.
+ const PatternList* GetSourcesAssignmentFilter() const;
+ void set_sources_assignment_filter(std::unique_ptr<PatternList> f) {
+ sources_assignment_filter_ = std::move(f);
+ }
+
+ // Indicates if we're currently processing the build configuration file.
+ // This is true when processing the config file for any toolchain.
+ //
+ // To set or clear the flag, it must currently be in the opposite state in
+ // the current scope. Note that querying the state of the flag recursively
+ // checks all containing scopes until it reaches the top or finds the flag
+ // set.
+ void SetProcessingBuildConfig();
+ void ClearProcessingBuildConfig();
+ bool IsProcessingBuildConfig() const;
+
+ // Indicates if we're currently processing an import file.
+ //
+ // See SetProcessingBaseConfig for how flags work.
+ void SetProcessingImport();
+ void ClearProcessingImport();
+ bool IsProcessingImport() const;
+
+ // The source directory associated with this scope. This will check embedded
+ // scopes until it finds a nonempty source directory. This will default to
+ // an empty dir if no containing scope has a source dir set.
+ const SourceDir& GetSourceDir() const;
+ void set_source_dir(const SourceDir& d) { source_dir_ = d; }
+
+ // The item collector is where Items (Targets, Configs, etc.) go that have
+ // been defined. If a scope can generate items, this non-owning pointer will
+ // point to the storage for such items. The creator of this scope will be
+ // responsible for setting up the collector and then dealing with the
+ // collected items once execution of the context is complete.
+ //
+ // The items in a scope are collected as we go and then dispatched at the end
+ // of execution of a scope so that we can query the previously-generated
+ // targets (like getting the outputs).
+ //
+ // This can be null if the current scope can not generate items (like for
+ // imports and such).
+ //
+ // When retrieving the collector, the non-const scopes are recursively
+ // queried. The collector is not copied for closures, etc.
+ void set_item_collector(ItemVector* collector) {
+ item_collector_ = collector;
+ }
+ ItemVector* GetItemCollector();
+
+ // Properties are opaque pointers that code can use to set state on a Scope
+ // that it can retrieve later.
+ //
+ // The key should be a pointer to some use-case-specific object (to avoid
+ // collisions, otherwise it doesn't matter). Memory management is up to the
+ // setter. Setting the value to NULL will delete the property.
+ //
+ // Getting a property recursively searches all scopes, and the optional
+ // |found_on_scope| variable will be filled with the actual scope containing
+ // the key (if the pointer is non-NULL).
+ void SetProperty(const void* key, void* value);
+ void* GetProperty(const void* key, const Scope** found_on_scope) const;
+
+ private:
+ friend class ProgrammaticProvider;
+
+ struct Record {
+ Record() : used(false) {}
+ explicit Record(const Value& v) : used(false), value(v) {}
+
+ bool used; // Set to true when the variable is used.
+ Value value;
+ };
+
+ void AddProvider(ProgrammaticProvider* p);
+ void RemoveProvider(ProgrammaticProvider* p);
+
+ // Scopes can have no containing scope (both null), a mutable containing
+ // scope, or a const containing scope. The reason is that when we're doing
+ // a new target, we want to refer to the base_config scope which will be read
+ // by multiple threads at the same time, so we REALLY want it to be const.
+ // When you jsut do a nested {}, however, we sometimes want to be able to
+ // change things (especially marking unused vars).
+ const Scope* const_containing_;
+ Scope* mutable_containing_;
+
+ const Settings* settings_;
+
+ // Bits set for different modes. See the flag definitions in the .cc file
+ // for more.
+ unsigned mode_flags_;
+
+ typedef base::hash_map<base::StringPiece, Record, base::StringPieceHash>
+ RecordMap;
+ RecordMap values_;
+
+ // Owning pointers. Note that this can't use string pieces since the names
+ // are constructed from Values which might be deallocated before this goes
+ // out of scope.
+ typedef base::hash_map<std::string, Scope*> NamedScopeMap;
+ NamedScopeMap target_defaults_;
+
+ // Null indicates not set and that we should fallback to the containing
+ // scope's filter.
+ std::unique_ptr<PatternList> sources_assignment_filter_;
+
+ // Owning pointers, must be deleted.
+ typedef std::map<std::string, scoped_refptr<const Template> > TemplateMap;
+ TemplateMap templates_;
+
+ ItemVector* item_collector_;
+
+ // Opaque pointers. See SetProperty() above.
+ typedef std::map<const void*, void*> PropertyMap;
+ PropertyMap properties_;
+
+ typedef std::set<ProgrammaticProvider*> ProviderSet;
+ ProviderSet programmatic_providers_;
+
+ SourceDir source_dir_;
+
+ DISALLOW_COPY_AND_ASSIGN(Scope);
+};
+
+#endif // TOOLS_GN_SCOPE_H_
diff --git a/chromium/tools/gn/scope_per_file_provider.cc b/chromium/tools/gn/scope_per_file_provider.cc
new file mode 100644
index 00000000000..d2dea11e645
--- /dev/null
+++ b/chromium/tools/gn/scope_per_file_provider.cc
@@ -0,0 +1,116 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/scope_per_file_provider.h"
+
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/value.h"
+#include "tools/gn/variables.h"
+
+ScopePerFileProvider::ScopePerFileProvider(Scope* scope,
+ bool allow_target_vars)
+ : ProgrammaticProvider(scope),
+ allow_target_vars_(allow_target_vars) {
+}
+
+ScopePerFileProvider::~ScopePerFileProvider() {
+}
+
+const Value* ScopePerFileProvider::GetProgrammaticValue(
+ const base::StringPiece& ident) {
+ if (ident == variables::kCurrentToolchain)
+ return GetCurrentToolchain();
+ if (ident == variables::kDefaultToolchain)
+ return GetDefaultToolchain();
+ if (ident == variables::kPythonPath)
+ return GetPythonPath();
+
+ if (ident == variables::kRootBuildDir)
+ return GetRootBuildDir();
+ if (ident == variables::kRootGenDir)
+ return GetRootGenDir();
+ if (ident == variables::kRootOutDir)
+ return GetRootOutDir();
+
+ if (allow_target_vars_) {
+ if (ident == variables::kTargetGenDir)
+ return GetTargetGenDir();
+ if (ident == variables::kTargetOutDir)
+ return GetTargetOutDir();
+ }
+ return nullptr;
+}
+
+const Value* ScopePerFileProvider::GetCurrentToolchain() {
+ if (!current_toolchain_) {
+ current_toolchain_.reset(new Value(
+ nullptr,
+ scope_->settings()->toolchain_label().GetUserVisibleName(false)));
+ }
+ return current_toolchain_.get();
+}
+
+const Value* ScopePerFileProvider::GetDefaultToolchain() {
+ if (!default_toolchain_) {
+ default_toolchain_.reset(new Value(
+ nullptr,
+ scope_->settings()->default_toolchain_label().GetUserVisibleName(
+ false)));
+ }
+ return default_toolchain_.get();
+}
+
+const Value* ScopePerFileProvider::GetPythonPath() {
+ if (!python_path_) {
+ python_path_.reset(new Value(
+ nullptr,
+ FilePathToUTF8(scope_->settings()->build_settings()->python_path())));
+ }
+ return python_path_.get();
+}
+
+const Value* ScopePerFileProvider::GetRootBuildDir() {
+ if (!root_build_dir_) {
+ root_build_dir_.reset(new Value(
+ nullptr, DirectoryWithNoLastSlash(
+ scope_->settings()->build_settings()->build_dir())));
+ }
+ return root_build_dir_.get();
+}
+
+const Value* ScopePerFileProvider::GetRootGenDir() {
+ if (!root_gen_dir_) {
+ root_gen_dir_.reset(new Value(
+ nullptr,
+ DirectoryWithNoLastSlash(GetToolchainGenDir(scope_->settings()))));
+ }
+ return root_gen_dir_.get();
+}
+
+const Value* ScopePerFileProvider::GetRootOutDir() {
+ if (!root_out_dir_) {
+ root_out_dir_.reset(new Value(
+ nullptr,
+ DirectoryWithNoLastSlash(GetToolchainOutputDir(scope_->settings()))));
+ }
+ return root_out_dir_.get();
+}
+
+const Value* ScopePerFileProvider::GetTargetGenDir() {
+ if (!target_gen_dir_) {
+ target_gen_dir_.reset(
+ new Value(nullptr, DirectoryWithNoLastSlash(GetCurrentGenDir(scope_))));
+ }
+ return target_gen_dir_.get();
+}
+
+const Value* ScopePerFileProvider::GetTargetOutDir() {
+ if (!target_out_dir_) {
+ target_out_dir_.reset(new Value(
+ nullptr, DirectoryWithNoLastSlash(GetCurrentOutputDir(scope_))));
+ }
+ return target_out_dir_.get();
+}
diff --git a/chromium/tools/gn/scope_per_file_provider.h b/chromium/tools/gn/scope_per_file_provider.h
new file mode 100644
index 00000000000..ac0d8720615
--- /dev/null
+++ b/chromium/tools/gn/scope_per_file_provider.h
@@ -0,0 +1,51 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SCOPE_PER_FILE_PROVIDER_H_
+#define TOOLS_GN_SCOPE_PER_FILE_PROVIDER_H_
+
+#include <memory>
+
+#include "base/macros.h"
+#include "tools/gn/scope.h"
+
+// ProgrammaticProvider for a scope to provide it with per-file built-in
+// variable support.
+class ScopePerFileProvider : public Scope::ProgrammaticProvider {
+ public:
+ // allow_target_vars allows the target-related variables to get resolved.
+ // When allow_target_vars is unset, the target-related values will be
+ // undefined to GN script.
+ ScopePerFileProvider(Scope* scope, bool allow_target_vars);
+ ~ScopePerFileProvider() override;
+
+ // ProgrammaticProvider implementation.
+ const Value* GetProgrammaticValue(const base::StringPiece& ident) override;
+
+ private:
+ const Value* GetCurrentToolchain();
+ const Value* GetDefaultToolchain();
+ const Value* GetPythonPath();
+ const Value* GetRootBuildDir();
+ const Value* GetRootGenDir();
+ const Value* GetRootOutDir();
+ const Value* GetTargetGenDir();
+ const Value* GetTargetOutDir();
+
+ bool allow_target_vars_;
+
+ // All values are lazily created.
+ std::unique_ptr<Value> current_toolchain_;
+ std::unique_ptr<Value> default_toolchain_;
+ std::unique_ptr<Value> python_path_;
+ std::unique_ptr<Value> root_build_dir_;
+ std::unique_ptr<Value> root_gen_dir_;
+ std::unique_ptr<Value> root_out_dir_;
+ std::unique_ptr<Value> target_gen_dir_;
+ std::unique_ptr<Value> target_out_dir_;
+
+ DISALLOW_COPY_AND_ASSIGN(ScopePerFileProvider);
+};
+
+#endif // TOOLS_GN_SCOPE_PER_FILE_PROVIDER_H_
diff --git a/chromium/tools/gn/scope_per_file_provider_unittest.cc b/chromium/tools/gn/scope_per_file_provider_unittest.cc
new file mode 100644
index 00000000000..d016a1e1f5c
--- /dev/null
+++ b/chromium/tools/gn/scope_per_file_provider_unittest.cc
@@ -0,0 +1,55 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/scope_per_file_provider.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/test_with_scope.h"
+#include "tools/gn/toolchain.h"
+#include "tools/gn/variables.h"
+
+TEST(ScopePerFileProvider, Expected) {
+ TestWithScope test;
+
+// Prevent horrible wrapping of calls below.
+#define GPV(val) provider.GetProgrammaticValue(val)->string_value()
+
+ // Test the default toolchain.
+ {
+ Scope scope(test.settings());
+ scope.set_source_dir(SourceDir("//source/"));
+ ScopePerFileProvider provider(&scope, true);
+
+ EXPECT_EQ("//toolchain:default", GPV(variables::kCurrentToolchain));
+ // TODO(brettw) this test harness does not set up the Toolchain manager
+ // which is the source of this value, so we can't test this yet.
+ //EXPECT_EQ("//toolchain:default", GPV(variables::kDefaultToolchain));
+ EXPECT_EQ("//out/Debug", GPV(variables::kRootBuildDir));
+ EXPECT_EQ("//out/Debug/gen", GPV(variables::kRootGenDir));
+ EXPECT_EQ("//out/Debug", GPV(variables::kRootOutDir));
+ EXPECT_EQ("//out/Debug/gen/source", GPV(variables::kTargetGenDir));
+ EXPECT_EQ("//out/Debug/obj/source", GPV(variables::kTargetOutDir));
+ }
+
+ // Test some with an alternate toolchain.
+ {
+ Settings settings(test.build_settings(), "tc/");
+ Toolchain toolchain(&settings, Label(SourceDir("//toolchain/"), "tc"));
+ settings.set_toolchain_label(toolchain.label());
+
+ Scope scope(&settings);
+ scope.set_source_dir(SourceDir("//source/"));
+ ScopePerFileProvider provider(&scope, true);
+
+ EXPECT_EQ("//toolchain:tc", GPV(variables::kCurrentToolchain));
+ // See above.
+ //EXPECT_EQ("//toolchain:default", GPV(variables::kDefaultToolchain));
+ EXPECT_EQ("//out/Debug", GPV(variables::kRootBuildDir));
+ EXPECT_EQ("//out/Debug/tc/gen", GPV(variables::kRootGenDir));
+ EXPECT_EQ("//out/Debug/tc", GPV(variables::kRootOutDir));
+ EXPECT_EQ("//out/Debug/tc/gen/source", GPV(variables::kTargetGenDir));
+ EXPECT_EQ("//out/Debug/tc/obj/source", GPV(variables::kTargetOutDir));
+ }
+}
diff --git a/chromium/tools/gn/scope_unittest.cc b/chromium/tools/gn/scope_unittest.cc
new file mode 100644
index 00000000000..de2005a5144
--- /dev/null
+++ b/chromium/tools/gn/scope_unittest.cc
@@ -0,0 +1,295 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/template.h"
+#include "tools/gn/test_with_scope.h"
+
+namespace {
+
+bool HasStringValueEqualTo(const Scope* scope,
+ const char* name,
+ const char* expected_value) {
+ const Value* value = scope->GetValue(name);
+ if (!value)
+ return false;
+ if (value->type() != Value::STRING)
+ return false;
+ return value->string_value() == expected_value;
+}
+
+} // namespace
+
+TEST(Scope, NonRecursiveMergeTo) {
+ TestWithScope setup;
+
+ // Make a pretend parse node with proper tracking that we can blame for the
+ // given value.
+ InputFile input_file(SourceFile("//foo"));
+ Token assignment_token(Location(&input_file, 1, 1, 1), Token::STRING,
+ "\"hello\"");
+ LiteralNode assignment;
+ assignment.set_value(assignment_token);
+
+ // Add some values to the scope.
+ Value old_value(&assignment, "hello");
+ setup.scope()->SetValue("v", old_value, &assignment);
+ base::StringPiece private_var_name("_private");
+ setup.scope()->SetValue(private_var_name, old_value, &assignment);
+
+ // Add some templates to the scope.
+ FunctionCallNode templ_definition;
+ scoped_refptr<Template> templ(new Template(setup.scope(), &templ_definition));
+ setup.scope()->AddTemplate("templ", templ.get());
+ scoped_refptr<Template> private_templ(
+ new Template(setup.scope(), &templ_definition));
+ setup.scope()->AddTemplate("_templ", private_templ.get());
+
+ // Detect collisions of values' values.
+ {
+ Scope new_scope(setup.settings());
+ Value new_value(&assignment, "goodbye");
+ new_scope.SetValue("v", new_value, &assignment);
+
+ Err err;
+ EXPECT_FALSE(setup.scope()->NonRecursiveMergeTo(
+ &new_scope, Scope::MergeOptions(),
+ &assignment, "error", &err));
+ EXPECT_TRUE(err.has_error());
+ }
+
+ // Template name collisions.
+ {
+ Scope new_scope(setup.settings());
+
+ scoped_refptr<Template> new_templ(
+ new Template(&new_scope, &templ_definition));
+ new_scope.AddTemplate("templ", new_templ.get());
+
+ Err err;
+ EXPECT_FALSE(setup.scope()->NonRecursiveMergeTo(
+ &new_scope, Scope::MergeOptions(), &assignment, "error", &err));
+ EXPECT_TRUE(err.has_error());
+ }
+
+ // The clobber flag should just overwrite colliding values.
+ {
+ Scope new_scope(setup.settings());
+ Value new_value(&assignment, "goodbye");
+ new_scope.SetValue("v", new_value, &assignment);
+
+ Err err;
+ Scope::MergeOptions options;
+ options.clobber_existing = true;
+ EXPECT_TRUE(setup.scope()->NonRecursiveMergeTo(
+ &new_scope, options, &assignment, "error", &err));
+ EXPECT_FALSE(err.has_error());
+
+ const Value* found_value = new_scope.GetValue("v");
+ ASSERT_TRUE(found_value);
+ EXPECT_TRUE(old_value == *found_value);
+ }
+
+ // Clobber flag for templates.
+ {
+ Scope new_scope(setup.settings());
+
+ scoped_refptr<Template> new_templ(
+ new Template(&new_scope, &templ_definition));
+ new_scope.AddTemplate("templ", new_templ.get());
+ Scope::MergeOptions options;
+ options.clobber_existing = true;
+
+ Err err;
+ EXPECT_TRUE(setup.scope()->NonRecursiveMergeTo(
+ &new_scope, options, &assignment, "error", &err));
+ EXPECT_FALSE(err.has_error());
+
+ const Template* found_value = new_scope.GetTemplate("templ");
+ ASSERT_TRUE(found_value);
+ EXPECT_TRUE(templ.get() == found_value);
+ }
+
+ // Don't flag values that technically collide but have the same value.
+ {
+ Scope new_scope(setup.settings());
+ Value new_value(&assignment, "hello");
+ new_scope.SetValue("v", new_value, &assignment);
+
+ Err err;
+ EXPECT_TRUE(setup.scope()->NonRecursiveMergeTo(
+ &new_scope, Scope::MergeOptions(), &assignment, "error", &err));
+ EXPECT_FALSE(err.has_error());
+ }
+
+ // Templates that technically collide but are the same.
+ {
+ Scope new_scope(setup.settings());
+
+ scoped_refptr<Template> new_templ(
+ new Template(&new_scope, &templ_definition));
+ new_scope.AddTemplate("templ", templ.get());
+
+ Err err;
+ EXPECT_TRUE(setup.scope()->NonRecursiveMergeTo(
+ &new_scope, Scope::MergeOptions(), &assignment, "error", &err));
+ EXPECT_FALSE(err.has_error());
+ }
+
+ // Copy private values and templates.
+ {
+ Scope new_scope(setup.settings());
+
+ Err err;
+ EXPECT_TRUE(setup.scope()->NonRecursiveMergeTo(
+ &new_scope, Scope::MergeOptions(), &assignment, "error", &err));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(new_scope.GetValue(private_var_name));
+ EXPECT_TRUE(new_scope.GetTemplate("_templ"));
+ }
+
+ // Skip private values and templates.
+ {
+ Scope new_scope(setup.settings());
+
+ Err err;
+ Scope::MergeOptions options;
+ options.skip_private_vars = true;
+ EXPECT_TRUE(setup.scope()->NonRecursiveMergeTo(
+ &new_scope, options, &assignment, "error", &err));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_FALSE(new_scope.GetValue(private_var_name));
+ EXPECT_FALSE(new_scope.GetTemplate("_templ"));
+ }
+
+ // Don't mark used.
+ {
+ Scope new_scope(setup.settings());
+
+ Err err;
+ Scope::MergeOptions options;
+ EXPECT_TRUE(setup.scope()->NonRecursiveMergeTo(
+ &new_scope, options, &assignment, "error", &err));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_FALSE(new_scope.CheckForUnusedVars(&err));
+ EXPECT_TRUE(err.has_error());
+ }
+
+ // Mark dest used.
+ {
+ Scope new_scope(setup.settings());
+
+ Err err;
+ Scope::MergeOptions options;
+ options.mark_dest_used = true;
+ EXPECT_TRUE(setup.scope()->NonRecursiveMergeTo(
+ &new_scope, options, &assignment, "error", &err));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(new_scope.CheckForUnusedVars(&err));
+ EXPECT_FALSE(err.has_error());
+ }
+}
+
+TEST(Scope, MakeClosure) {
+ // Create 3 nested scopes [const root from setup] <- nested1 <- nested2.
+ TestWithScope setup;
+
+ // Make a pretend parse node with proper tracking that we can blame for the
+ // given value.
+ InputFile input_file(SourceFile("//foo"));
+ Token assignment_token(Location(&input_file, 1, 1, 1), Token::STRING,
+ "\"hello\"");
+ LiteralNode assignment;
+ assignment.set_value(assignment_token);
+ setup.scope()->SetValue("on_root", Value(&assignment, "on_root"),
+ &assignment);
+
+ // Root scope should be const from the nested caller's perspective.
+ Scope nested1(static_cast<const Scope*>(setup.scope()));
+ nested1.SetValue("on_one", Value(&assignment, "on_one"), &assignment);
+
+ Scope nested2(&nested1);
+ nested2.SetValue("on_one", Value(&assignment, "on_two"), &assignment);
+ nested2.SetValue("on_two", Value(&assignment, "on_two2"), &assignment);
+
+ // Making a closure from the root scope.
+ std::unique_ptr<Scope> result = setup.scope()->MakeClosure();
+ EXPECT_FALSE(result->containing()); // Should have no containing scope.
+ EXPECT_TRUE(result->GetValue("on_root")); // Value should be copied.
+
+ // Making a closure from the second nested scope.
+ result = nested2.MakeClosure();
+ EXPECT_EQ(setup.scope(),
+ result->containing()); // Containing scope should be the root.
+ EXPECT_TRUE(HasStringValueEqualTo(result.get(), "on_root", "on_root"));
+ EXPECT_TRUE(HasStringValueEqualTo(result.get(), "on_one", "on_two"));
+ EXPECT_TRUE(HasStringValueEqualTo(result.get(), "on_two", "on_two2"));
+}
+
+TEST(Scope, GetMutableValue) {
+ TestWithScope setup;
+
+ // Make a pretend parse node with proper tracking that we can blame for the
+ // given value.
+ InputFile input_file(SourceFile("//foo"));
+ Token assignment_token(Location(&input_file, 1, 1, 1), Token::STRING,
+ "\"hello\"");
+ LiteralNode assignment;
+ assignment.set_value(assignment_token);
+
+ const char kOnConst[] = "on_const";
+ const char kOnMutable1[] = "on_mutable1";
+ const char kOnMutable2[] = "on_mutable2";
+
+ Value value(&assignment, "hello");
+
+ // Create a root scope with one value.
+ Scope root_scope(setup.settings());
+ root_scope.SetValue(kOnConst, value, &assignment);
+
+ // Create a first nested scope with a different value.
+ const Scope* const_root_scope = &root_scope;
+ Scope mutable_scope1(const_root_scope);
+ mutable_scope1.SetValue(kOnMutable1, value, &assignment);
+
+ // Create a second nested scope with a different value.
+ Scope mutable_scope2(&mutable_scope1);
+ mutable_scope2.SetValue(kOnMutable2, value, &assignment);
+
+ // Check getting root scope values.
+ EXPECT_TRUE(mutable_scope2.GetValue(kOnConst, true));
+ EXPECT_FALSE(mutable_scope2.GetMutableValue(kOnConst, true));
+
+ // Test reading a value from scope 1.
+ Value* mutable1_result = mutable_scope2.GetMutableValue(kOnMutable1, false);
+ ASSERT_TRUE(mutable1_result);
+ EXPECT_TRUE(*mutable1_result == value);
+
+ // Make sure CheckForUnusedVars works on scope1 (we didn't mark the value as
+ // used in the previous step).
+ Err err;
+ EXPECT_FALSE(mutable_scope1.CheckForUnusedVars(&err));
+ mutable1_result = mutable_scope2.GetMutableValue(kOnMutable1, true);
+ EXPECT_TRUE(mutable1_result);
+ err = Err();
+ EXPECT_TRUE(mutable_scope1.CheckForUnusedVars(&err));
+
+ // Test reading a value from scope 2.
+ Value* mutable2_result = mutable_scope2.GetMutableValue(kOnMutable2, true);
+ ASSERT_TRUE(mutable2_result);
+ EXPECT_TRUE(*mutable2_result == value);
+}
+
+TEST(Scope, RemovePrivateIdentifiers) {
+ TestWithScope setup;
+ setup.scope()->SetValue("a", Value(nullptr, true), nullptr);
+ setup.scope()->SetValue("_b", Value(nullptr, true), nullptr);
+
+ setup.scope()->RemovePrivateIdentifiers();
+ EXPECT_TRUE(setup.scope()->GetValue("a"));
+ EXPECT_FALSE(setup.scope()->GetValue("_b"));
+}
diff --git a/chromium/tools/gn/settings.cc b/chromium/tools/gn/settings.cc
new file mode 100644
index 00000000000..034e601a347
--- /dev/null
+++ b/chromium/tools/gn/settings.cc
@@ -0,0 +1,35 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/settings.h"
+
+#include "base/logging.h"
+#include "build/build_config.h"
+#include "tools/gn/filesystem_utils.h"
+
+Settings::Settings(const BuildSettings* build_settings,
+ const std::string& output_subdir_name)
+ : build_settings_(build_settings),
+ import_manager_(),
+ base_config_(this),
+ greedy_target_generation_(false) {
+ if (output_subdir_name.empty()) {
+ toolchain_output_dir_ = build_settings->build_dir();
+ } else {
+ // We guarantee this ends in a slash.
+ DCHECK(output_subdir_name[output_subdir_name.size() - 1] == '/');
+ toolchain_output_subdir_.value().append(output_subdir_name);
+
+ DCHECK(!build_settings->build_dir().is_null());
+ toolchain_output_dir_ = SourceDir(build_settings->build_dir().value() +
+ toolchain_output_subdir_.value());
+ }
+ // The output dir will be null in some tests and when invoked to parsed
+ // one-off data without doing generation.
+ if (!toolchain_output_dir_.is_null())
+ toolchain_gen_dir_ = SourceDir(toolchain_output_dir_.value() + "gen/");
+}
+
+Settings::~Settings() {
+}
diff --git a/chromium/tools/gn/settings.h b/chromium/tools/gn/settings.h
new file mode 100644
index 00000000000..919255136b5
--- /dev/null
+++ b/chromium/tools/gn/settings.h
@@ -0,0 +1,116 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SETTINGS_H_
+#define TOOLS_GN_SETTINGS_H_
+
+#include "base/files/file_path.h"
+#include "base/macros.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/import_manager.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/toolchain.h"
+
+// Holds the settings for one toolchain invocation. There will be one
+// Settings object for each toolchain type, each referring to the same
+// BuildSettings object for shared stuff.
+//
+// The Settings object is const once it is constructed, which allows us to
+// use it from multiple threads during target generation without locking (which
+// is important, because it gets used a lot).
+//
+// The Toolchain object holds the set of stuff that is set by the toolchain
+// declaration, which obviously needs to be set later when we actually parse
+// the file with the toolchain declaration in it.
+class Settings {
+ public:
+ // Constructs a toolchain settings.
+ //
+ // The output_subdir_name is the name we should use for the subdirectory in
+ // the build output directory for this toolchain's outputs. The default
+ // toolchain would use an empty string (it goes in the root build dir).
+ // Otherwise, it must end in a slash.
+ Settings(const BuildSettings* build_settings,
+ const std::string& output_subdir_name);
+ ~Settings();
+
+ const BuildSettings* build_settings() const { return build_settings_; }
+
+ const Label& toolchain_label() const { return toolchain_label_; }
+ void set_toolchain_label(const Label& l) { toolchain_label_ = l; }
+
+ const Label& default_toolchain_label() const {
+ return default_toolchain_label_;
+ }
+ void set_default_toolchain_label(const Label& default_label) {
+ default_toolchain_label_ = default_label;
+ }
+
+ // Indicates if this corresponds to the default toolchain.
+ bool is_default() const {
+ return toolchain_label_ == default_toolchain_label_;
+ }
+
+ const OutputFile& toolchain_output_subdir() const {
+ return toolchain_output_subdir_;
+ }
+ const SourceDir& toolchain_output_dir() const {
+ return toolchain_output_dir_;
+ }
+
+ // Directory for generated files.
+ const SourceDir& toolchain_gen_dir() const {
+ return toolchain_gen_dir_;
+ }
+
+ // The import manager caches the result of executing imported files in the
+ // context of a given settings object.
+ //
+ // See the ItemTree getter in GlobalSettings for why this doesn't return a
+ // const pointer.
+ ImportManager& import_manager() const { return import_manager_; }
+
+ const Scope* base_config() const { return &base_config_; }
+ Scope* base_config() { return &base_config_; }
+
+ // Set to true when every target we encounter should be generated. False
+ // means that only targets that have a dependency from (directly or
+ // indirectly) some magic root node are actually generated. See the comments
+ // on ItemTree for more.
+ bool greedy_target_generation() const {
+ return greedy_target_generation_;
+ }
+ void set_greedy_target_generation(bool gtg) {
+ greedy_target_generation_ = gtg;
+ }
+
+ private:
+ const BuildSettings* build_settings_;
+
+ Label toolchain_label_;
+ Label default_toolchain_label_;
+
+ mutable ImportManager import_manager_;
+
+ // The subdirectory inside the build output for this toolchain. For the
+ // default toolchain, this will be empty (since the deafult toolchain's
+ // output directory is the same as the build directory). When nonempty, this
+ // is guaranteed to end in a slash.
+ OutputFile toolchain_output_subdir_;
+
+ // Full source file path to the toolchain output directory.
+ SourceDir toolchain_output_dir_;
+
+ SourceDir toolchain_gen_dir_;
+
+ Scope base_config_;
+
+ bool greedy_target_generation_;
+
+ DISALLOW_COPY_AND_ASSIGN(Settings);
+};
+
+#endif // TOOLS_GN_SETTINGS_H_
diff --git a/chromium/tools/gn/setup.cc b/chromium/tools/gn/setup.cc
new file mode 100644
index 00000000000..384a2af17ea
--- /dev/null
+++ b/chromium/tools/gn/setup.cc
@@ -0,0 +1,731 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/setup.h"
+
+#include <stdlib.h>
+#include <algorithm>
+#include <sstream>
+#include <utility>
+
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/files/file_path.h"
+#include "base/files/file_util.h"
+#include "base/process/launch.h"
+#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
+#include "base/strings/sys_string_conversions.h"
+#include "base/strings/utf_string_conversions.h"
+#include "build/build_config.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/parser.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/switches.h"
+#include "tools/gn/tokenizer.h"
+#include "tools/gn/trace.h"
+#include "tools/gn/value.h"
+#include "tools/gn/value_extractors.h"
+
+#if defined(OS_WIN)
+#include <windows.h>
+#endif
+
+extern const char kDotfile_Help[] =
+ ".gn file\n"
+ "\n"
+ " When gn starts, it will search the current directory and parent\n"
+ " directories for a file called \".gn\". This indicates the source root.\n"
+ " You can override this detection by using the --root command-line\n"
+ " argument\n"
+ "\n"
+ " The .gn file in the source root will be executed. The syntax is the\n"
+ " same as a buildfile, but with very limited build setup-specific\n"
+ " meaning.\n"
+ "\n"
+ " If you specify --root, by default GN will look for the file .gn in\n"
+ " that directory. If you want to specify a different file, you can\n"
+ " additionally pass --dotfile:\n"
+ "\n"
+ " gn gen out/Debug --root=/home/build --dotfile=/home/my_gn_file.gn\n"
+ "\n"
+ "Variables\n"
+ "\n"
+ " buildconfig [required]\n"
+ " Label of the build config file. This file will be used to set up\n"
+ " the build file execution environment for each toolchain.\n"
+ "\n"
+ " check_targets [optional]\n"
+ " A list of labels and label patterns that should be checked when\n"
+ " running \"gn check\" or \"gn gen --check\". If unspecified, all\n"
+ " targets will be checked. If it is the empty list, no targets will\n"
+ " be checked.\n"
+ "\n"
+ " The format of this list is identical to that of \"visibility\"\n"
+ " so see \"gn help visibility\" for examples.\n"
+ "\n"
+ " exec_script_whitelist [optional]\n"
+ " A list of .gn/.gni files (not labels) that have permission to call\n"
+ " the exec_script function. If this list is defined, calls to\n"
+ " exec_script will be checked against this list and GN will fail if\n"
+ " the current file isn't in the list.\n"
+ "\n"
+ " This is to allow the use of exec_script to be restricted since\n"
+ " is easy to use inappropriately. Wildcards are not supported.\n"
+ " Files in the secondary_source tree (if defined) should be\n"
+ " referenced by ignoring the secondary tree and naming them as if\n"
+ " they are in the main tree.\n"
+ "\n"
+ " If unspecified, the ability to call exec_script is unrestricted.\n"
+ "\n"
+ " Example:\n"
+ " exec_script_whitelist = [\n"
+ " \"//base/BUILD.gn\",\n"
+ " \"//build/my_config.gni\",\n"
+ " ]\n"
+ "\n"
+ " root [optional]\n"
+ " Label of the root build target. The GN build will start by loading\n"
+ " the build file containing this target name. This defaults to\n"
+ " \"//:\" which will cause the file //BUILD.gn to be loaded.\n"
+ "\n"
+ " secondary_source [optional]\n"
+ " Label of an alternate directory tree to find input files. When\n"
+ " searching for a BUILD.gn file (or the build config file discussed\n"
+ " above), the file will first be looked for in the source root.\n"
+ " If it's not found, the secondary source root will be checked\n"
+ " (which would contain a parallel directory hierarchy).\n"
+ "\n"
+ " This behavior is intended to be used when BUILD.gn files can't be\n"
+ " checked in to certain source directories for whatever reason.\n"
+ "\n"
+ " The secondary source root must be inside the main source tree.\n"
+ "\n"
+ "Example .gn file contents\n"
+ "\n"
+ " buildconfig = \"//build/config/BUILDCONFIG.gn\"\n"
+ "\n"
+ " check_targets = [\n"
+ " \"//doom_melon/*\", # Check everything in this subtree.\n"
+ " \"//tools:mind_controlling_ant\", # Check this specific target.\n"
+ " ]\n"
+ "\n"
+ " root = \"//:root\"\n"
+ "\n"
+ " secondary_source = \"//build/config/temporary_buildfiles/\"\n";
+
+namespace {
+
+const base::FilePath::CharType kGnFile[] = FILE_PATH_LITERAL(".gn");
+
+base::FilePath FindDotFile(const base::FilePath& current_dir) {
+ base::FilePath try_this_file = current_dir.Append(kGnFile);
+ if (base::PathExists(try_this_file))
+ return try_this_file;
+
+ base::FilePath with_no_slash = current_dir.StripTrailingSeparators();
+ base::FilePath up_one_dir = with_no_slash.DirName();
+ if (up_one_dir == current_dir)
+ return base::FilePath(); // Got to the top.
+
+ return FindDotFile(up_one_dir);
+}
+
+// Called on any thread. Post the item to the builder on the main thread.
+void ItemDefinedCallback(base::MessageLoop* main_loop,
+ scoped_refptr<Builder> builder,
+ std::unique_ptr<Item> item) {
+ DCHECK(item);
+ main_loop->PostTask(FROM_HERE, base::Bind(&Builder::ItemDefined, builder,
+ base::Passed(&item)));
+}
+
+void DecrementWorkCount() {
+ g_scheduler->DecrementWorkCount();
+}
+
+#if defined(OS_WIN)
+
+// Given the path to a batch file that runs Python, extracts the name of the
+// executable actually implementing Python. Generally people write a batch file
+// to put something named "python" on the path, which then just redirects to
+// a python.exe somewhere else. This step decodes that setup. On failure,
+// returns empty path.
+base::FilePath PythonBatToExe(const base::FilePath& bat_path) {
+ // Note exciting double-quoting to allow spaces. The /c switch seems to check
+ // for quotes around the whole thing and then deletes them. If you want to
+ // quote the first argument in addition (to allow for spaces in the Python
+ // path, you need *another* set of quotes around that, likewise, we need
+ // two quotes at the end.
+ base::string16 command = L"cmd.exe /c \"\"";
+ command.append(bat_path.value());
+ command.append(L"\" -c \"import sys; print sys.executable\"\"");
+
+ std::string python_path;
+ if (base::GetAppOutput(command, &python_path)) {
+ base::TrimWhitespaceASCII(python_path, base::TRIM_ALL, &python_path);
+
+ // Python uses the system multibyte code page for sys.executable.
+ base::FilePath exe_path(base::SysNativeMBToWide(python_path));
+
+ // Check for reasonable output, cmd may have output an error message.
+ if (base::PathExists(exe_path))
+ return exe_path;
+ }
+ return base::FilePath();
+}
+
+const base::char16 kPythonExeName[] = L"python.exe";
+const base::char16 kPythonBatName[] = L"python.bat";
+
+base::FilePath FindWindowsPython() {
+ base::char16 current_directory[MAX_PATH];
+ ::GetCurrentDirectory(MAX_PATH, current_directory);
+
+ // First search for python.exe in the current directory.
+ base::FilePath cur_dir_candidate_exe =
+ base::FilePath(current_directory).Append(kPythonExeName);
+ if (base::PathExists(cur_dir_candidate_exe))
+ return cur_dir_candidate_exe;
+
+ // Get the path.
+ const base::char16 kPathEnvVarName[] = L"Path";
+ DWORD path_length = ::GetEnvironmentVariable(kPathEnvVarName, nullptr, 0);
+ if (path_length == 0)
+ return base::FilePath();
+ std::unique_ptr<base::char16[]> full_path(new base::char16[path_length]);
+ DWORD actual_path_length =
+ ::GetEnvironmentVariable(kPathEnvVarName, full_path.get(), path_length);
+ CHECK_EQ(path_length, actual_path_length + 1);
+
+ // Search for python.exe in the path.
+ for (const auto& component : base::SplitStringPiece(
+ base::StringPiece16(full_path.get(), path_length), L";",
+ base::TRIM_WHITESPACE, base::SPLIT_WANT_NONEMPTY)) {
+ base::FilePath candidate_exe =
+ base::FilePath(component).Append(kPythonExeName);
+ if (base::PathExists(candidate_exe))
+ return candidate_exe;
+
+ // Also allow python.bat, but convert into the .exe.
+ base::FilePath candidate_bat =
+ base::FilePath(component).Append(kPythonBatName);
+ if (base::PathExists(candidate_bat)) {
+ base::FilePath python_exe = PythonBatToExe(candidate_bat);
+ if (!python_exe.empty())
+ return python_exe;
+ }
+ }
+ return base::FilePath();
+}
+#endif
+
+// Expands all ./, ../, and symbolic links in the given path.
+bool GetRealPath(const base::FilePath& path, base::FilePath* out) {
+#if defined(OS_POSIX)
+ char buf[PATH_MAX];
+ if (!realpath(path.value().c_str(), buf)) {
+ return false;
+ }
+ *out = base::FilePath(buf);
+#else
+ // Do nothing on a non-POSIX system.
+ *out = path;
+#endif
+ return true;
+}
+
+} // namespace
+
+const char Setup::kBuildArgFileName[] = "args.gn";
+
+Setup::Setup()
+ : build_settings_(),
+ loader_(new LoaderImpl(&build_settings_)),
+ builder_(new Builder(loader_.get())),
+ root_build_file_("//BUILD.gn"),
+ check_public_headers_(false),
+ dotfile_settings_(&build_settings_, std::string()),
+ dotfile_scope_(&dotfile_settings_),
+ fill_arguments_(true) {
+ dotfile_settings_.set_toolchain_label(Label());
+ build_settings_.set_item_defined_callback(
+ base::Bind(&ItemDefinedCallback, scheduler_.main_loop(), builder_));
+
+ loader_->set_complete_callback(base::Bind(&DecrementWorkCount));
+ // The scheduler's main loop wasn't created when the Loader was created, so
+ // we need to set it now.
+ loader_->set_main_loop(scheduler_.main_loop());
+}
+
+Setup::~Setup() {
+}
+
+bool Setup::DoSetup(const std::string& build_dir, bool force_create) {
+ base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+
+ scheduler_.set_verbose_logging(cmdline->HasSwitch(switches::kVerbose));
+ if (cmdline->HasSwitch(switches::kTime) ||
+ cmdline->HasSwitch(switches::kTracelog))
+ EnableTracing();
+
+ ScopedTrace setup_trace(TraceItem::TRACE_SETUP, "DoSetup");
+
+ if (!FillSourceDir(*cmdline))
+ return false;
+ if (!RunConfigFile())
+ return false;
+ if (!FillOtherConfig(*cmdline))
+ return false;
+
+ // Must be after FillSourceDir to resolve.
+ if (!FillBuildDir(build_dir, !force_create))
+ return false;
+
+ // Check for unused variables in the .gn file.
+ Err err;
+ if (!dotfile_scope_.CheckForUnusedVars(&err)) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ if (fill_arguments_) {
+ if (!FillArguments(*cmdline))
+ return false;
+ }
+ FillPythonPath();
+
+ return true;
+}
+
+bool Setup::Run() {
+ RunPreMessageLoop();
+ if (!scheduler_.Run())
+ return false;
+ return RunPostMessageLoop();
+}
+
+SourceFile Setup::GetBuildArgFile() const {
+ return SourceFile(build_settings_.build_dir().value() + kBuildArgFileName);
+}
+
+void Setup::RunPreMessageLoop() {
+ // Load the root build file.
+ loader_->Load(root_build_file_, LocationRange(), Label());
+
+ // Will be decremented with the loader is drained.
+ g_scheduler->IncrementWorkCount();
+}
+
+bool Setup::RunPostMessageLoop() {
+ Err err;
+ if (build_settings_.check_for_bad_items()) {
+ if (!builder_->CheckForBadItems(&err)) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ if (!build_settings_.build_args().VerifyAllOverridesUsed(&err)) {
+ // TODO(brettw) implement a system of warnings. Until we have a better
+ // system, print the error but don't return failure.
+ err.PrintToStdout();
+ return true;
+ }
+ }
+
+ if (check_public_headers_) {
+ std::vector<const Target*> all_targets = builder_->GetAllResolvedTargets();
+ std::vector<const Target*> to_check;
+ if (check_patterns()) {
+ commands::FilterTargetsByPatterns(all_targets, *check_patterns(),
+ &to_check);
+ } else {
+ to_check = all_targets;
+ }
+
+ if (!commands::CheckPublicHeaders(&build_settings_, all_targets,
+ to_check, false)) {
+ return false;
+ }
+ }
+
+ // Write out tracing and timing if requested.
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+ if (cmdline->HasSwitch(switches::kTime))
+ PrintLongHelp(SummarizeTraces());
+ if (cmdline->HasSwitch(switches::kTracelog))
+ SaveTraces(cmdline->GetSwitchValuePath(switches::kTracelog));
+
+ return true;
+}
+
+bool Setup::FillArguments(const base::CommandLine& cmdline) {
+ // Use the args on the command line if specified, and save them. Do this even
+ // if the list is empty (this means clear any defaults).
+ if (cmdline.HasSwitch(switches::kArgs)) {
+ if (!FillArgsFromCommandLine(cmdline.GetSwitchValueASCII(switches::kArgs)))
+ return false;
+ SaveArgsToFile();
+ return true;
+ }
+
+ // No command line args given, use the arguments from the build dir (if any).
+ return FillArgsFromFile();
+}
+
+bool Setup::FillArgsFromCommandLine(const std::string& args) {
+ args_input_file_.reset(new InputFile(SourceFile()));
+ args_input_file_->SetContents(args);
+ args_input_file_->set_friendly_name("the command-line \"--args\"");
+ return FillArgsFromArgsInputFile();
+}
+
+bool Setup::FillArgsFromFile() {
+ ScopedTrace setup_trace(TraceItem::TRACE_SETUP, "Load args file");
+
+ SourceFile build_arg_source_file = GetBuildArgFile();
+ base::FilePath build_arg_file =
+ build_settings_.GetFullPath(build_arg_source_file);
+
+ std::string contents;
+ if (!base::ReadFileToString(build_arg_file, &contents))
+ return true; // File doesn't exist, continue with default args.
+
+ // Add a dependency on the build arguments file. If this changes, we want
+ // to re-generate the build.
+ g_scheduler->AddGenDependency(build_arg_file);
+
+ if (contents.empty())
+ return true; // Empty file, do nothing.
+
+ args_input_file_.reset(new InputFile(build_arg_source_file));
+ args_input_file_->SetContents(contents);
+ args_input_file_->set_friendly_name(
+ "build arg file (use \"gn args <out_dir>\" to edit)");
+
+ setup_trace.Done(); // Only want to count the load as part of the trace.
+ return FillArgsFromArgsInputFile();
+}
+
+bool Setup::FillArgsFromArgsInputFile() {
+ ScopedTrace setup_trace(TraceItem::TRACE_SETUP, "Parse args");
+
+ Err err;
+ args_tokens_ = Tokenizer::Tokenize(args_input_file_.get(), &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ args_root_ = Parser::Parse(args_tokens_, &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ Scope arg_scope(&dotfile_settings_);
+ args_root_->Execute(&arg_scope, &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ // Save the result of the command args.
+ Scope::KeyValueMap overrides;
+ arg_scope.GetCurrentScopeValues(&overrides);
+ build_settings_.build_args().AddArgOverrides(overrides);
+ return true;
+}
+
+bool Setup::SaveArgsToFile() {
+ ScopedTrace setup_trace(TraceItem::TRACE_SETUP, "Save args file");
+
+ std::ostringstream stream;
+ for (const auto& pair : build_settings_.build_args().GetAllOverrides()) {
+ stream << pair.first.as_string() << " = " << pair.second.ToString(true);
+ stream << std::endl;
+ }
+
+ // For the first run, the build output dir might not be created yet, so do
+ // that so we can write a file into it. Ignore errors, we'll catch the error
+ // when we try to write a file to it below.
+ base::FilePath build_arg_file =
+ build_settings_.GetFullPath(GetBuildArgFile());
+ base::CreateDirectory(build_arg_file.DirName());
+
+ std::string contents = stream.str();
+#if defined(OS_WIN)
+ // Use Windows lineendings for this file since it will often open in
+ // Notepad which can't handle Unix ones.
+ base::ReplaceSubstringsAfterOffset(&contents, 0, "\n", "\r\n");
+#endif
+ if (base::WriteFile(build_arg_file, contents.c_str(),
+ static_cast<int>(contents.size())) == -1) {
+ Err(Location(), "Args file could not be written.",
+ "The file is \"" + FilePathToUTF8(build_arg_file) +
+ "\"").PrintToStdout();
+ return false;
+ }
+
+ // Add a dependency on the build arguments file. If this changes, we want
+ // to re-generate the build.
+ g_scheduler->AddGenDependency(build_arg_file);
+
+ return true;
+}
+
+bool Setup::FillSourceDir(const base::CommandLine& cmdline) {
+ // Find the .gn file.
+ base::FilePath root_path;
+
+ // Prefer the command line args to the config file.
+ base::FilePath relative_root_path =
+ cmdline.GetSwitchValuePath(switches::kRoot);
+ if (!relative_root_path.empty()) {
+ root_path = base::MakeAbsoluteFilePath(relative_root_path);
+ if (root_path.empty()) {
+ Err(Location(), "Root source path not found.",
+ "The path \"" + FilePathToUTF8(relative_root_path) +
+ "\" doesn't exist.").PrintToStdout();
+ return false;
+ }
+
+ // When --root is specified, an alternate --dotfile can also be set.
+ // --dotfile should be a real file path and not a "//foo" source-relative
+ // path.
+ base::FilePath dot_file_path =
+ cmdline.GetSwitchValuePath(switches::kDotfile);
+ if (dot_file_path.empty()) {
+ dotfile_name_ = root_path.Append(kGnFile);
+ } else {
+ dotfile_name_ = base::MakeAbsoluteFilePath(dot_file_path);
+ if (dotfile_name_.empty()) {
+ Err(Location(), "Could not load dotfile.",
+ "The file \"" + FilePathToUTF8(dot_file_path) +
+ "\" cound't be loaded.").PrintToStdout();
+ return false;
+ }
+ }
+ } else {
+ // In the default case, look for a dotfile and that also tells us where the
+ // source root is.
+ base::FilePath cur_dir;
+ base::GetCurrentDirectory(&cur_dir);
+ dotfile_name_ = FindDotFile(cur_dir);
+ if (dotfile_name_.empty()) {
+ Err(Location(), "Can't find source root.",
+ "I could not find a \".gn\" file in the current directory or any "
+ "parent,\nand the --root command-line argument was not specified.")
+ .PrintToStdout();
+ return false;
+ }
+ root_path = dotfile_name_.DirName();
+ }
+
+ base::FilePath root_realpath;
+ if (!GetRealPath(root_path, &root_realpath)) {
+ Err(Location(), "Can't get the real root path.",
+ "I could not get the real path of \"" + FilePathToUTF8(root_path) +
+ "\".").PrintToStdout();
+ return false;
+ }
+ if (scheduler_.verbose_logging())
+ scheduler_.Log("Using source root", FilePathToUTF8(root_realpath));
+ build_settings_.SetRootPath(root_realpath);
+
+ return true;
+}
+
+bool Setup::FillBuildDir(const std::string& build_dir, bool require_exists) {
+ Err err;
+ SourceDir resolved =
+ SourceDirForCurrentDirectory(build_settings_.root_path()).
+ ResolveRelativeDir(Value(nullptr, build_dir), &err,
+ build_settings_.root_path_utf8());
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ base::FilePath build_dir_path = build_settings_.GetFullPath(resolved);
+ if (!base::CreateDirectory(build_dir_path)) {
+ Err(Location(), "Can't create the build dir.",
+ "I could not create the build dir \"" + FilePathToUTF8(build_dir_path) +
+ "\".").PrintToStdout();
+ return false;
+ }
+ base::FilePath build_dir_realpath;
+ if (!GetRealPath(build_dir_path, &build_dir_realpath)) {
+ Err(Location(), "Can't get the real build dir path.",
+ "I could not get the real path of \"" + FilePathToUTF8(build_dir_path) +
+ "\".").PrintToStdout();
+ return false;
+ }
+ resolved = SourceDirForPath(build_settings_.root_path(),
+ build_dir_realpath);
+
+ if (scheduler_.verbose_logging())
+ scheduler_.Log("Using build dir", resolved.value());
+
+ if (require_exists) {
+ if (!base::PathExists(build_dir_path.Append(
+ FILE_PATH_LITERAL("build.ninja")))) {
+ Err(Location(), "Not a build directory.",
+ "This command requires an existing build directory. I interpreted "
+ "your input\n\"" + build_dir + "\" as:\n " +
+ FilePathToUTF8(build_dir_path) +
+ "\nwhich doesn't seem to contain a previously-generated build.")
+ .PrintToStdout();
+ return false;
+ }
+ }
+
+ build_settings_.SetBuildDir(resolved);
+ return true;
+}
+
+void Setup::FillPythonPath() {
+ // Trace this since it tends to be a bit slow on Windows.
+ ScopedTrace setup_trace(TraceItem::TRACE_SETUP, "Fill Python Path");
+#if defined(OS_WIN)
+ base::FilePath python_path = FindWindowsPython();
+ if (python_path.empty()) {
+ scheduler_.Log("WARNING", "Could not find python on path, using "
+ "just \"python.exe\"");
+ python_path = base::FilePath(kPythonExeName);
+ }
+ build_settings_.set_python_path(python_path.NormalizePathSeparatorsTo('/'));
+#else
+ build_settings_.set_python_path(base::FilePath("python"));
+#endif
+}
+
+bool Setup::RunConfigFile() {
+ if (scheduler_.verbose_logging())
+ scheduler_.Log("Got dotfile", FilePathToUTF8(dotfile_name_));
+
+ dotfile_input_file_.reset(new InputFile(SourceFile("//.gn")));
+ if (!dotfile_input_file_->Load(dotfile_name_)) {
+ Err(Location(), "Could not load dotfile.",
+ "The file \"" + FilePathToUTF8(dotfile_name_) + "\" cound't be loaded")
+ .PrintToStdout();
+ return false;
+ }
+
+ Err err;
+ dotfile_tokens_ = Tokenizer::Tokenize(dotfile_input_file_.get(), &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ dotfile_root_ = Parser::Parse(dotfile_tokens_, &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ dotfile_root_->Execute(&dotfile_scope_, &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ return true;
+}
+
+bool Setup::FillOtherConfig(const base::CommandLine& cmdline) {
+ Err err;
+ SourceDir current_dir("//");
+
+ // Secondary source path, read from the config file if present.
+ // Read from the config file if present.
+ const Value* secondary_value =
+ dotfile_scope_.GetValue("secondary_source", true);
+ if (secondary_value) {
+ if (!secondary_value->VerifyTypeIs(Value::STRING, &err)) {
+ err.PrintToStdout();
+ return false;
+ }
+ build_settings_.SetSecondarySourcePath(
+ SourceDir(secondary_value->string_value()));
+ }
+
+ // Root build file.
+ const Value* root_value = dotfile_scope_.GetValue("root", true);
+ if (root_value) {
+ if (!root_value->VerifyTypeIs(Value::STRING, &err)) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ Label root_target_label =
+ Label::Resolve(current_dir, Label(), *root_value, &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+
+ root_build_file_ = Loader::BuildFileForLabel(root_target_label);
+ }
+
+ // Build config file.
+ const Value* build_config_value =
+ dotfile_scope_.GetValue("buildconfig", true);
+ if (!build_config_value) {
+ Err(Location(), "No build config file.",
+ "Your .gn file (\"" + FilePathToUTF8(dotfile_name_) + "\")\n"
+ "didn't specify a \"buildconfig\" value.").PrintToStdout();
+ return false;
+ } else if (!build_config_value->VerifyTypeIs(Value::STRING, &err)) {
+ err.PrintToStdout();
+ return false;
+ }
+ build_settings_.set_build_config_file(
+ SourceFile(build_config_value->string_value()));
+
+ // Targets to check.
+ const Value* check_targets_value =
+ dotfile_scope_.GetValue("check_targets", true);
+ if (check_targets_value) {
+ check_patterns_.reset(new std::vector<LabelPattern>);
+ ExtractListOfLabelPatterns(*check_targets_value, current_dir,
+ check_patterns_.get(), &err);
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+ }
+
+ // Fill exec_script_whitelist.
+ const Value* exec_script_whitelist_value =
+ dotfile_scope_.GetValue("exec_script_whitelist", true);
+ if (exec_script_whitelist_value) {
+ // Fill the list of targets to check.
+ if (!exec_script_whitelist_value->VerifyTypeIs(Value::LIST, &err)) {
+ err.PrintToStdout();
+ return false;
+ }
+ std::unique_ptr<std::set<SourceFile>> whitelist(new std::set<SourceFile>);
+ for (const auto& item : exec_script_whitelist_value->list_value()) {
+ if (!item.VerifyTypeIs(Value::STRING, &err)) {
+ err.PrintToStdout();
+ return false;
+ }
+ whitelist->insert(current_dir.ResolveRelativeFile(item, &err));
+ if (err.has_error()) {
+ err.PrintToStdout();
+ return false;
+ }
+ }
+ build_settings_.set_exec_script_whitelist(std::move(whitelist));
+ }
+
+ return true;
+}
diff --git a/chromium/tools/gn/setup.h b/chromium/tools/gn/setup.h
new file mode 100644
index 00000000000..4f2d8859951
--- /dev/null
+++ b/chromium/tools/gn/setup.h
@@ -0,0 +1,167 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SETUP_H_
+#define TOOLS_GN_SETUP_H_
+
+#include <memory>
+#include <vector>
+
+#include "base/files/file_path.h"
+#include "base/macros.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/builder.h"
+#include "tools/gn/label_pattern.h"
+#include "tools/gn/loader.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/token.h"
+#include "tools/gn/toolchain.h"
+
+class InputFile;
+class ParseNode;
+
+namespace base {
+class CommandLine;
+}
+
+extern const char kDotfile_Help[];
+
+// Helper class to setup the build settings and environment for the various
+// commands to run.
+class Setup {
+ public:
+ Setup();
+ ~Setup();
+
+ // Configures the build for the current command line. On success returns
+ // true. On failure, prints the error and returns false.
+ //
+ // The parameter is the string the user specified for the build directory. We
+ // will try to interpret this as a SourceDir if possible, and will fail if is
+ // is malformed.
+ //
+ // With force_create = false, setup will fail if the build directory doesn't
+ // alreay exist with an args file in it. With force_create set to true, the
+ // directory will be created if necessary. Commands explicitly doing
+ // generation should set this to true to create it, but querying commands
+ // should set it to false to prevent creating oddly-named directories in case
+ // the user omits the build directory argument (which is easy to do).
+ bool DoSetup(const std::string& build_dir, bool force_create);
+
+ // Runs the load, returning true on success. On failure, prints the error
+ // and returns false. This includes both RunPreMessageLoop() and
+ // RunPostMessageLoop().
+ bool Run();
+
+ Scheduler& scheduler() { return scheduler_; }
+
+ // Returns the file used to store the build arguments. Note that the path
+ // might not exist.
+ SourceFile GetBuildArgFile() const;
+
+ // Sets whether the build arguments should be filled during setup from the
+ // command line/build argument file. This will be true by default. The use
+ // case for setting it to false is when editing build arguments, we don't
+ // want to rely on them being valid.
+ void set_fill_arguments(bool fa) { fill_arguments_ = fa; }
+
+ // After a successful run, setting this will additionally cause the public
+ // headers to be checked. Defaults to false.
+ void set_check_public_headers(bool s) {
+ check_public_headers_ = s;
+ }
+
+ // Read from the .gn file, these are the targets to check. If the .gn file
+ // does not specify anything, this will be null. If the .gn file specifies
+ // the empty list, this will be non-null but empty.
+ const std::vector<LabelPattern>* check_patterns() const {
+ return check_patterns_.get();
+ }
+
+ BuildSettings& build_settings() { return build_settings_; }
+ Builder* builder() { return builder_.get(); }
+ LoaderImpl* loader() { return loader_.get(); }
+
+ // Name of the file in the root build directory that contains the build
+ // arguements.
+ static const char kBuildArgFileName[];
+
+ private:
+ // Performs the two sets of operations to run the generation before and after
+ // the message loop is run.
+ void RunPreMessageLoop();
+ bool RunPostMessageLoop();
+
+ // Fills build arguments. Returns true on success.
+ bool FillArguments(const base::CommandLine& cmdline);
+
+ // Fills the build arguments from the command line or from the build arg file.
+ bool FillArgsFromCommandLine(const std::string& args);
+ bool FillArgsFromFile();
+
+ // Given an already-loaded args_input_file_, parses and saves the resulting
+ // arguments. Backend for the different FillArgs variants.
+ bool FillArgsFromArgsInputFile();
+
+ // Writes the build arguments to the build arg file.
+ bool SaveArgsToFile();
+
+ // Fills the root directory into the settings. Returns true on success.
+ bool FillSourceDir(const base::CommandLine& cmdline);
+
+ // Fills the build directory given the value the user has specified.
+ // Must happen after FillSourceDir so we can resolve source-relative
+ // paths. If require_exists is false, it will fail if the dir doesn't exist.
+ bool FillBuildDir(const std::string& build_dir, bool require_exists);
+
+ // Fills the python path portion of the command line. On failure, sets
+ // it to just "python".
+ void FillPythonPath();
+
+ // Run config file.
+ bool RunConfigFile();
+
+ bool FillOtherConfig(const base::CommandLine& cmdline);
+
+ BuildSettings build_settings_;
+ scoped_refptr<LoaderImpl> loader_;
+ scoped_refptr<Builder> builder_;
+
+ SourceFile root_build_file_;
+
+ bool check_public_headers_;
+
+ // See getter for info.
+ std::unique_ptr<std::vector<LabelPattern>> check_patterns_;
+
+ Scheduler scheduler_;
+
+ // These settings and toolchain are used to interpret the command line and
+ // dot file.
+ Settings dotfile_settings_;
+ Scope dotfile_scope_;
+
+ // State for invoking the dotfile.
+ base::FilePath dotfile_name_;
+ std::unique_ptr<InputFile> dotfile_input_file_;
+ std::vector<Token> dotfile_tokens_;
+ std::unique_ptr<ParseNode> dotfile_root_;
+
+ // Set to true when we should populate the build arguments from the command
+ // line or build argument file. See setter above.
+ bool fill_arguments_;
+
+ // State for invoking the command line args. We specifically want to keep
+ // this around for the entire run so that Values can blame to the command
+ // line when we issue errors about them.
+ std::unique_ptr<InputFile> args_input_file_;
+ std::vector<Token> args_tokens_;
+ std::unique_ptr<ParseNode> args_root_;
+
+ DISALLOW_COPY_AND_ASSIGN(Setup);
+};
+
+#endif // TOOLS_GN_SETUP_H_
diff --git a/chromium/tools/gn/source_dir.cc b/chromium/tools/gn/source_dir.cc
new file mode 100644
index 00000000000..41c8931ae95
--- /dev/null
+++ b/chromium/tools/gn/source_dir.cc
@@ -0,0 +1,220 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/source_dir.h"
+
+#include "base/logging.h"
+#include "build/build_config.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/source_file.h"
+
+namespace {
+
+void AssertValueSourceDirString(const std::string& s) {
+ if (!s.empty()) {
+#if defined(OS_WIN)
+ DCHECK(s[0] == '/' ||
+ (s.size() > 2 && s[0] != '/' && s[1] == ':' && IsSlash(s[2])));
+#else
+ DCHECK(s[0] == '/');
+#endif
+ DCHECK(EndsWithSlash(s)) << s;
+ }
+}
+
+} // namespace
+
+SourceDir::SourceDir() {
+}
+
+SourceDir::SourceDir(const base::StringPiece& p)
+ : value_(p.data(), p.size()) {
+ if (!EndsWithSlash(value_))
+ value_.push_back('/');
+ AssertValueSourceDirString(value_);
+}
+
+SourceDir::SourceDir(SwapIn, std::string* s) {
+ value_.swap(*s);
+ if (!EndsWithSlash(value_))
+ value_.push_back('/');
+ AssertValueSourceDirString(value_);
+}
+
+SourceDir::~SourceDir() {
+}
+
+SourceFile SourceDir::ResolveRelativeFile(
+ const Value& p,
+ Err* err,
+ const base::StringPiece& source_root) const {
+ SourceFile ret;
+ if (!p.VerifyTypeIs(Value::STRING, err))
+ return ret;
+
+ // It's an error to resolve an empty string or one that is a directory
+ // (indicated by a trailing slash) because this is the function that expects
+ // to return a file.
+ const std::string& str = p.string_value();
+ if (str.empty()) {
+ *err = Err(p, "Empty file path.",
+ "You can't use empty strings as file paths. That's just wrong.");
+ return ret;
+ } else if (str[str.size() - 1] == '/') {
+ *err = Err(p, "File path ends in a slash.",
+ "You specified the path\n " + str + "\n"
+ "and it ends in a slash, indicating you think it's a directory."
+ "\nBut here you're supposed to be listing a file.");
+ return ret;
+ }
+
+ if (str.size() >= 2 && str[0] == '/' && str[1] == '/') {
+ // Source-relative.
+ ret.value_.assign(str.data(), str.size());
+ NormalizePath(&ret.value_, source_root);
+ return ret;
+ } else if (IsPathAbsolute(str)) {
+ if (source_root.empty() ||
+ !MakeAbsolutePathRelativeIfPossible(source_root, str, &ret.value_)) {
+#if defined(OS_WIN)
+ // On Windows we'll accept "C:\foo" as an absolute path, which we want
+ // to convert to "/C:..." here.
+ if (str[0] != '/')
+ ret.value_ = "/";
+#endif
+ ret.value_.append(str.data(), str.size());
+ }
+ NormalizePath(&ret.value_);
+ return ret;
+ }
+
+ if (!source_root.empty()) {
+ std::string absolute =
+ FilePathToUTF8(Resolve(UTF8ToFilePath(source_root)).AppendASCII(
+ str).value());
+ NormalizePath(&absolute);
+ if (!MakeAbsolutePathRelativeIfPossible(source_root, absolute,
+ &ret.value_)) {
+#if defined(OS_WIN)
+ // On Windows we'll accept "C:\foo" as an absolute path, which we want
+ // to convert to "/C:..." here.
+ if (absolute[0] != '/')
+ ret.value_ = "/";
+#endif
+ ret.value_.append(absolute.data(), absolute.size());
+ }
+ return ret;
+ }
+
+ // With no source_root_, there's nothing we can do about
+ // e.g. p=../../../path/to/file and value_=//source and we'll
+ // errornously return //file.
+ ret.value_.reserve(value_.size() + str.size());
+ ret.value_.assign(value_);
+ ret.value_.append(str.data(), str.size());
+
+ NormalizePath(&ret.value_);
+ return ret;
+}
+
+SourceDir SourceDir::ResolveRelativeDir(
+ const Value& p,
+ Err* err,
+ const base::StringPiece& source_root) const {
+ if (!p.VerifyTypeIs(Value::STRING, err))
+ return SourceDir();
+ return ResolveRelativeDir(p, p.string_value(), err, source_root);
+}
+
+SourceDir SourceDir::ResolveRelativeDir(
+ const Value& blame_but_dont_use,
+ const base::StringPiece& str,
+ Err* err,
+ const base::StringPiece& source_root) const {
+ SourceDir ret;
+
+ if (str.empty()) {
+ *err = Err(blame_but_dont_use, "Empty directory path.",
+ "You can't use empty strings as directories. "
+ "That's just wrong.");
+ return ret;
+ }
+
+ if (str.size() >= 2 && str[0] == '/' && str[1] == '/') {
+ // Source-relative.
+ ret.value_.assign(str.data(), str.size());
+ if (!EndsWithSlash(ret.value_))
+ ret.value_.push_back('/');
+ NormalizePath(&ret.value_, source_root);
+ return ret;
+ } else if (IsPathAbsolute(str)) {
+ if (source_root.empty() ||
+ !MakeAbsolutePathRelativeIfPossible(source_root, str, &ret.value_)) {
+#if defined(OS_WIN)
+ if (str[0] != '/') // See the file case for why we do this check.
+ ret.value_ = "/";
+#endif
+ ret.value_.append(str.data(), str.size());
+ }
+ NormalizePath(&ret.value_);
+ if (!EndsWithSlash(ret.value_))
+ ret.value_.push_back('/');
+ return ret;
+ }
+
+ if (!source_root.empty()) {
+ std::string absolute =
+ FilePathToUTF8(Resolve(UTF8ToFilePath(source_root)).AppendASCII(
+ str.as_string()).value());
+ NormalizePath(&absolute);
+ if (!MakeAbsolutePathRelativeIfPossible(source_root, absolute,
+ &ret.value_)) {
+#if defined(OS_WIN)
+ if (absolute[0] != '/') // See the file case for why we do this check.
+ ret.value_ = "/";
+#endif
+ ret.value_.append(absolute.data(), absolute.size());
+ }
+ if (!EndsWithSlash(ret.value_))
+ ret.value_.push_back('/');
+ return ret;
+ }
+
+ ret.value_.reserve(value_.size() + str.size());
+ ret.value_.assign(value_);
+ ret.value_.append(str.data(), str.size());
+
+ NormalizePath(&ret.value_);
+ if (!EndsWithSlash(ret.value_))
+ ret.value_.push_back('/');
+ AssertValueSourceDirString(ret.value_);
+
+ return ret;
+}
+
+base::FilePath SourceDir::Resolve(const base::FilePath& source_root) const {
+ if (is_null())
+ return base::FilePath();
+
+ std::string converted;
+ if (is_system_absolute()) {
+ if (value_.size() > 2 && value_[2] == ':') {
+ // Windows path, strip the leading slash.
+ converted.assign(&value_[1], value_.size() - 1);
+ } else {
+ converted.assign(value_);
+ }
+ return base::FilePath(UTF8ToFilePath(converted));
+ }
+
+ // String the double-leading slash for source-relative paths.
+ converted.assign(&value_[2], value_.size() - 2);
+ return source_root.Append(UTF8ToFilePath(converted))
+ .NormalizePathSeparatorsTo('/');
+}
+
+void SourceDir::SwapValue(std::string* v) {
+ value_.swap(*v);
+ AssertValueSourceDirString(value_);
+}
diff --git a/chromium/tools/gn/source_dir.h b/chromium/tools/gn/source_dir.h
new file mode 100644
index 00000000000..422fa89c7ef
--- /dev/null
+++ b/chromium/tools/gn/source_dir.h
@@ -0,0 +1,134 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SOURCE_DIR_H_
+#define TOOLS_GN_SOURCE_DIR_H_
+
+#include <stddef.h>
+
+#include <algorithm>
+#include <string>
+
+#include "base/containers/hash_tables.h"
+#include "base/files/file_path.h"
+#include "base/logging.h"
+#include "base/strings/string_piece.h"
+
+class Err;
+class SourceFile;
+class Value;
+
+// Represents a directory within the source tree. Source dirs begin and end in
+// slashes.
+//
+// If there is one slash at the beginning, it will mean a system-absolute file
+// path. On Windows, absolute system paths will be of the form "/C:/foo/bar".
+//
+// Two slashes at the beginning indicate a path relative to the source root.
+class SourceDir {
+ public:
+ enum SwapIn { SWAP_IN };
+
+ SourceDir();
+ explicit SourceDir(const base::StringPiece& p);
+ // Swaps the given string in without copies. The given string will be empty
+ // after this call.
+ SourceDir(SwapIn, std::string* s);
+ ~SourceDir();
+
+ // Resolves a file or dir name relative to this source directory. Will return
+ // an empty SourceDir/File on error and set the give *err pointer (required).
+ // Empty input is always an error.
+ //
+ // If source_root is supplied, these functions will additionally handle the
+ // case where the input is a system-absolute but still inside the source
+ // tree. This is the case for some external tools.
+ SourceFile ResolveRelativeFile(
+ const Value& p,
+ Err* err,
+ const base::StringPiece& source_root = base::StringPiece()) const;
+ SourceDir ResolveRelativeDir(
+ const Value& p,
+ Err* err,
+ const base::StringPiece& source_root = base::StringPiece()) const;
+
+ // Like ResolveRelativeDir but takes a separate value (which gets blamed)
+ // and string to use (in cases where a substring has been extracted from the
+ // value, as with label resolution).
+ SourceDir ResolveRelativeDir(
+ const Value& blame_but_dont_use,
+ const base::StringPiece& p,
+ Err* err,
+ const base::StringPiece& source_root = base::StringPiece()) const;
+
+ // Resolves this source file relative to some given source root. Returns
+ // an empty file path on error.
+ base::FilePath Resolve(const base::FilePath& source_root) const;
+
+ bool is_null() const { return value_.empty(); }
+ const std::string& value() const { return value_; }
+
+ // Returns true if this path starts with a "//" which indicates a path
+ // from the source root.
+ bool is_source_absolute() const {
+ return value_.size() >= 2 && value_[0] == '/' && value_[1] == '/';
+ }
+
+ // Returns true if this path starts with a single slash which indicates a
+ // system-absolute path.
+ bool is_system_absolute() const {
+ return !is_source_absolute();
+ }
+
+ // Returns a source-absolute path starting with only one slash at the
+ // beginning (normally source-absolute paths start with two slashes to mark
+ // them as such). This is normally used when concatenating directories
+ // together.
+ //
+ // This function asserts that the directory is actually source-absolute. The
+ // return value points into our buffer.
+ base::StringPiece SourceAbsoluteWithOneSlash() const {
+ CHECK(is_source_absolute());
+ return base::StringPiece(&value_[1], value_.size() - 1);
+ }
+
+ void SwapValue(std::string* v);
+
+ bool operator==(const SourceDir& other) const {
+ return value_ == other.value_;
+ }
+ bool operator!=(const SourceDir& other) const {
+ return !operator==(other);
+ }
+ bool operator<(const SourceDir& other) const {
+ return value_ < other.value_;
+ }
+
+ void swap(SourceDir& other) {
+ value_.swap(other.value_);
+ }
+
+ private:
+ friend class SourceFile;
+ std::string value_;
+
+ // Copy & assign supported.
+};
+
+namespace BASE_HASH_NAMESPACE {
+
+template<> struct hash<SourceDir> {
+ std::size_t operator()(const SourceDir& v) const {
+ hash<std::string> h;
+ return h(v.value());
+ }
+};
+
+} // namespace BASE_HASH_NAMESPACE
+
+inline void swap(SourceDir& lhs, SourceDir& rhs) {
+ lhs.swap(rhs);
+}
+
+#endif // TOOLS_GN_SOURCE_DIR_H_
diff --git a/chromium/tools/gn/source_dir_unittest.cc b/chromium/tools/gn/source_dir_unittest.cc
new file mode 100644
index 00000000000..80f9a5f6d4b
--- /dev/null
+++ b/chromium/tools/gn/source_dir_unittest.cc
@@ -0,0 +1,187 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "build/build_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/err.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/value.h"
+
+TEST(SourceDir, ResolveRelativeFile) {
+ Err err;
+ SourceDir base("//base/");
+#if defined(OS_WIN)
+ base::StringPiece source_root("C:/source/root");
+#else
+ base::StringPiece source_root("/source/root");
+#endif
+
+ // Empty input is an error.
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, std::string()), &err, source_root) == SourceFile());
+ EXPECT_TRUE(err.has_error());
+
+ // These things are directories, so should be an error.
+ err = Err();
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "//foo/bar/"), &err, source_root) == SourceFile());
+ EXPECT_TRUE(err.has_error());
+
+ err = Err();
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "bar/"), &err, source_root) == SourceFile());
+ EXPECT_TRUE(err.has_error());
+
+ // Absolute paths should be passed unchanged.
+ err = Err();
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "//foo"), &err, source_root) == SourceFile("//foo"));
+ EXPECT_FALSE(err.has_error());
+
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "/foo"), &err, source_root) == SourceFile("/foo"));
+ EXPECT_FALSE(err.has_error());
+
+ // Basic relative stuff.
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "foo"), &err, source_root) == SourceFile("//base/foo"));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "./foo"), &err, source_root) == SourceFile("//base/foo"));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "../foo"), &err, source_root) == SourceFile("//foo"));
+ EXPECT_FALSE(err.has_error());
+
+ // If the given relative path points outside the source root, we
+ // expect an absolute path.
+#if defined(OS_WIN)
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "../../foo"), &err, source_root) ==
+ SourceFile("/C:/source/foo"));
+ EXPECT_FALSE(err.has_error());
+
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "//../foo"), &err, source_root) ==
+ SourceFile("/C:/source/foo"));
+ EXPECT_FALSE(err.has_error());
+
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "//../root/foo"), &err, source_root) ==
+ SourceFile("/C:/source/root/foo"));
+ EXPECT_FALSE(err.has_error());
+
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "//../../../foo/bar"), &err, source_root) ==
+ SourceFile("/foo/bar"));
+ EXPECT_FALSE(err.has_error());
+#else
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "../../foo"), &err, source_root) ==
+ SourceFile("/source/foo"));
+ EXPECT_FALSE(err.has_error());
+
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "//../foo"), &err, source_root) ==
+ SourceFile("/source/foo"));
+ EXPECT_FALSE(err.has_error());
+
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "//../root/foo"), &err, source_root) ==
+ SourceFile("/source/root/foo"));
+ EXPECT_FALSE(err.has_error());
+
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "//../../../foo/bar"), &err, source_root) ==
+ SourceFile("/foo/bar"));
+ EXPECT_FALSE(err.has_error());
+#endif
+
+#if defined(OS_WIN)
+ // Note that we don't canonicalize the backslashes to forward slashes.
+ // This could potentially be changed in the future which would mean we should
+ // just change the expected result.
+ EXPECT_TRUE(base.ResolveRelativeFile(
+ Value(nullptr, "C:\\foo\\bar.txt"), &err, source_root) ==
+ SourceFile("/C:/foo/bar.txt"));
+ EXPECT_FALSE(err.has_error());
+#endif
+}
+
+TEST(SourceDir, ResolveRelativeDir) {
+ Err err;
+ SourceDir base("//base/");
+#if defined(OS_WIN)
+ base::StringPiece source_root("C:/source/root");
+#else
+ base::StringPiece source_root("/source/root");
+#endif
+
+ // Empty input is an error.
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, std::string()), &err, source_root) == SourceDir());
+ EXPECT_TRUE(err.has_error());
+
+ // Absolute paths should be passed unchanged.
+ err = Err();
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "//foo"), &err, source_root) == SourceDir("//foo/"));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "/foo"), &err, source_root) == SourceDir("/foo/"));
+ EXPECT_FALSE(err.has_error());
+
+ // Basic relative stuff.
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "foo"), &err, source_root) == SourceDir("//base/foo/"));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "./foo"), &err, source_root) == SourceDir("//base/foo/"));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "../foo"), &err, source_root) == SourceDir("//foo/"));
+ EXPECT_FALSE(err.has_error());
+
+ // If the given relative path points outside the source root, we
+ // expect an absolute path.
+#if defined(OS_WIN)
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "../../foo"), &err, source_root) ==
+ SourceDir("/C:/source/foo/"));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "//../foo"), &err, source_root) ==
+ SourceDir("/C:/source/foo/"));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "//.."), &err, source_root) ==
+ SourceDir("/C:/source/"));
+ EXPECT_FALSE(err.has_error());
+#else
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "../../foo"), &err, source_root) ==
+ SourceDir("/source/foo/"));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "//../foo"), &err, source_root) ==
+ SourceDir("/source/foo/"));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "//.."), &err, source_root) ==
+ SourceDir("/source/"));
+ EXPECT_FALSE(err.has_error());
+#endif
+
+#if defined(OS_WIN)
+ // Canonicalize the existing backslashes to forward slashes and add a
+ // leading slash if necessary.
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "\\C:\\foo"), &err) == SourceDir("/C:/foo/"));
+ EXPECT_FALSE(err.has_error());
+ EXPECT_TRUE(base.ResolveRelativeDir(
+ Value(nullptr, "C:\\foo"), &err) == SourceDir("/C:/foo/"));
+ EXPECT_FALSE(err.has_error());
+#endif
+}
diff --git a/chromium/tools/gn/source_file.cc b/chromium/tools/gn/source_file.cc
new file mode 100644
index 00000000000..41a602d2f01
--- /dev/null
+++ b/chromium/tools/gn/source_file.cc
@@ -0,0 +1,85 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/source_file.h"
+
+#include "base/logging.h"
+#include "build/build_config.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/source_dir.h"
+
+namespace {
+
+void AssertValueSourceFileString(const std::string& s) {
+#if defined(OS_WIN)
+ DCHECK(s[0] == '/' ||
+ (s.size() > 2 && s[0] != '/' && s[1] == ':' && IsSlash(s[2])));
+#else
+ DCHECK(s[0] == '/');
+#endif
+ DCHECK(!EndsWithSlash(s)) << s;
+}
+
+} // namespace
+
+SourceFile::SourceFile() {
+}
+
+SourceFile::SourceFile(const base::StringPiece& p)
+ : value_(p.data(), p.size()) {
+ DCHECK(!value_.empty());
+ AssertValueSourceFileString(value_);
+ NormalizePath(&value_);
+}
+
+SourceFile::SourceFile(SwapIn, std::string* value) {
+ value_.swap(*value);
+ DCHECK(!value_.empty());
+ AssertValueSourceFileString(value_);
+ NormalizePath(&value_);
+}
+
+SourceFile::~SourceFile() {
+}
+
+std::string SourceFile::GetName() const {
+ if (is_null())
+ return std::string();
+
+ DCHECK(value_.find('/') != std::string::npos);
+ size_t last_slash = value_.rfind('/');
+ return std::string(&value_[last_slash + 1],
+ value_.size() - last_slash - 1);
+}
+
+SourceDir SourceFile::GetDir() const {
+ if (is_null())
+ return SourceDir();
+
+ DCHECK(value_.find('/') != std::string::npos);
+ size_t last_slash = value_.rfind('/');
+ return SourceDir(base::StringPiece(&value_[0], last_slash + 1));
+}
+
+base::FilePath SourceFile::Resolve(const base::FilePath& source_root) const {
+ if (is_null())
+ return base::FilePath();
+
+ std::string converted;
+ if (is_system_absolute()) {
+ if (value_.size() > 2 && value_[2] == ':') {
+ // Windows path, strip the leading slash.
+ converted.assign(&value_[1], value_.size() - 1);
+ } else {
+ converted.assign(value_);
+ }
+ return base::FilePath(UTF8ToFilePath(converted));
+ }
+
+ converted.assign(&value_[2], value_.size() - 2);
+ if (source_root.empty())
+ return UTF8ToFilePath(converted).NormalizePathSeparatorsTo('/');
+ return source_root.Append(UTF8ToFilePath(converted))
+ .NormalizePathSeparatorsTo('/');
+}
diff --git a/chromium/tools/gn/source_file.h b/chromium/tools/gn/source_file.h
new file mode 100644
index 00000000000..87fd5900b0f
--- /dev/null
+++ b/chromium/tools/gn/source_file.h
@@ -0,0 +1,108 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SOURCE_FILE_H_
+#define TOOLS_GN_SOURCE_FILE_H_
+
+#include <stddef.h>
+
+#include <algorithm>
+#include <string>
+
+#include "base/containers/hash_tables.h"
+#include "base/files/file_path.h"
+#include "base/logging.h"
+#include "base/strings/string_piece.h"
+
+class SourceDir;
+
+// Represents a file within the source tree. Always begins in a slash, never
+// ends in one.
+class SourceFile {
+ public:
+ enum SwapIn { SWAP_IN };
+
+ SourceFile();
+
+ // Takes a known absolute source file. Always begins in a slash.
+ explicit SourceFile(const base::StringPiece& p);
+
+ // Constructs from the given string by swapping in the contents of the given
+ // value. The value will be the empty string after this call.
+ SourceFile(SwapIn, std::string* value);
+
+ ~SourceFile();
+
+ bool is_null() const { return value_.empty(); }
+ const std::string& value() const { return value_; }
+
+ // Returns everything after the last slash.
+ std::string GetName() const;
+ SourceDir GetDir() const;
+
+ // Resolves this source file relative to some given source root. Returns
+ // an empty file path on error.
+ base::FilePath Resolve(const base::FilePath& source_root) const;
+
+ // Returns true if this file starts with a "//" which indicates a path
+ // from the source root.
+ bool is_source_absolute() const {
+ return value_.size() >= 2 && value_[0] == '/' && value_[1] == '/';
+ }
+
+ // Returns true if this file starts with a single slash which indicates a
+ // system-absolute path.
+ bool is_system_absolute() const {
+ return !is_source_absolute();
+ }
+
+ // Returns a source-absolute path starting with only one slash at the
+ // beginning (normally source-absolute paths start with two slashes to mark
+ // them as such). This is normally used when concatenating names together.
+ //
+ // This function asserts that the file is actually source-absolute. The
+ // return value points into our buffer.
+ base::StringPiece SourceAbsoluteWithOneSlash() const {
+ CHECK(is_source_absolute());
+ return base::StringPiece(&value_[1], value_.size() - 1);
+ }
+
+ bool operator==(const SourceFile& other) const {
+ return value_ == other.value_;
+ }
+ bool operator!=(const SourceFile& other) const {
+ return !operator==(other);
+ }
+ bool operator<(const SourceFile& other) const {
+ return value_ < other.value_;
+ }
+
+ void swap(SourceFile& other) {
+ value_.swap(other.value_);
+ }
+
+ private:
+ friend class SourceDir;
+
+ std::string value_;
+
+ // Copy & assign supported.
+};
+
+namespace BASE_HASH_NAMESPACE {
+
+template<> struct hash<SourceFile> {
+ std::size_t operator()(const SourceFile& v) const {
+ hash<std::string> h;
+ return h(v.value());
+ }
+};
+
+} // namespace BASE_HASH_NAMESPACE
+
+inline void swap(SourceFile& lhs, SourceFile& rhs) {
+ lhs.swap(rhs);
+}
+
+#endif // TOOLS_GN_SOURCE_FILE_H_
diff --git a/chromium/tools/gn/source_file_type.cc b/chromium/tools/gn/source_file_type.cc
new file mode 100644
index 00000000000..72b872c5cae
--- /dev/null
+++ b/chromium/tools/gn/source_file_type.cc
@@ -0,0 +1,33 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/source_file_type.h"
+
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/source_file.h"
+
+SourceFileType GetSourceFileType(const SourceFile& file) {
+ base::StringPiece extension = FindExtension(&file.value());
+ if (extension == "cc" || extension == "cpp" || extension == "cxx")
+ return SOURCE_CPP;
+ if (extension == "h")
+ return SOURCE_H;
+ if (extension == "c")
+ return SOURCE_C;
+ if (extension == "m")
+ return SOURCE_M;
+ if (extension == "mm")
+ return SOURCE_MM;
+ if (extension == "rc")
+ return SOURCE_RC;
+ if (extension == "S" || extension == "s" || extension == "asm")
+ return SOURCE_S;
+ if (extension == "o" || extension == "obj")
+ return SOURCE_O;
+ if (extension == "def")
+ return SOURCE_DEF;
+
+ return SOURCE_UNKNOWN;
+}
+
diff --git a/chromium/tools/gn/source_file_type.h b/chromium/tools/gn/source_file_type.h
new file mode 100644
index 00000000000..c43b4324443
--- /dev/null
+++ b/chromium/tools/gn/source_file_type.h
@@ -0,0 +1,31 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SOURCE_FILE_TYPE_H_
+#define TOOLS_GN_SOURCE_FILE_TYPE_H_
+
+class SourceFile;
+
+// This should be sequential integers starting from 0 so they can be used as
+// array indices.
+enum SourceFileType {
+ SOURCE_UNKNOWN = 0,
+ SOURCE_ASM,
+ SOURCE_C,
+ SOURCE_CPP,
+ SOURCE_H,
+ SOURCE_M,
+ SOURCE_MM,
+ SOURCE_S,
+ SOURCE_RC,
+ SOURCE_O, // Object files can be inputs, too. Also counts .obj.
+ SOURCE_DEF,
+
+ // Must be last.
+ SOURCE_NUMTYPES,
+};
+
+SourceFileType GetSourceFileType(const SourceFile& file);
+
+#endif // TOOLS_GN_SOURCE_FILE_TYPE_H_
diff --git a/chromium/tools/gn/source_file_unittest.cc b/chromium/tools/gn/source_file_unittest.cc
new file mode 100644
index 00000000000..8a9218f5d28
--- /dev/null
+++ b/chromium/tools/gn/source_file_unittest.cc
@@ -0,0 +1,19 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/source_file.h"
+
+// The SourceFile object should normalize the input passed to the constructor.
+// The normalizer unit test checks for all the weird edge cases for normalizing
+// so here just check that it gets called.
+TEST(SourceFile, Normalize) {
+ SourceFile a("//foo/../bar.cc");
+ EXPECT_EQ("//bar.cc", a.value());
+
+ std::string b_str("//foo/././../bar.cc");
+ SourceFile b(SourceFile::SwapIn(), &b_str);
+ EXPECT_TRUE(b_str.empty()); // Should have been swapped in.
+ EXPECT_EQ("//bar.cc", b.value());
+}
diff --git a/chromium/tools/gn/standard_out.cc b/chromium/tools/gn/standard_out.cc
new file mode 100644
index 00000000000..6f7dcc8ff6f
--- /dev/null
+++ b/chromium/tools/gn/standard_out.cc
@@ -0,0 +1,285 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/standard_out.h"
+
+#include <stddef.h>
+
+#include <vector>
+
+#include "base/command_line.h"
+#include "base/logging.h"
+#include "base/strings/string_piece.h"
+#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
+#include "build/build_config.h"
+#include "tools/gn/switches.h"
+
+#if defined(OS_WIN)
+#include <windows.h>
+#else
+#include <stdio.h>
+#include <unistd.h>
+#endif
+
+namespace {
+
+bool initialized = false;
+
+#if defined(OS_WIN)
+HANDLE hstdout;
+WORD default_attributes;
+#endif
+bool is_console = false;
+
+bool is_markdown = false;
+
+void EnsureInitialized() {
+ if (initialized)
+ return;
+ initialized = true;
+
+ const base::CommandLine* cmdline = base::CommandLine::ForCurrentProcess();
+ if (cmdline->HasSwitch(switches::kMarkdown)) {
+ // Output help in Markdown's syntax, not color-highlighted.
+ is_markdown = true;
+ }
+
+ if (cmdline->HasSwitch(switches::kNoColor)) {
+ // Force color off.
+ is_console = false;
+ return;
+ }
+
+#if defined(OS_WIN)
+ // On Windows, we can't force the color on. If the output handle isn't a
+ // console, there's nothing we can do about it.
+ hstdout = ::GetStdHandle(STD_OUTPUT_HANDLE);
+ CONSOLE_SCREEN_BUFFER_INFO info;
+ is_console = !!::GetConsoleScreenBufferInfo(hstdout, &info);
+ default_attributes = info.wAttributes;
+#else
+ if (cmdline->HasSwitch(switches::kColor))
+ is_console = true;
+ else
+ is_console = isatty(fileno(stdout));
+#endif
+}
+
+#if !defined(OS_WIN)
+void WriteToStdOut(const std::string& output) {
+ size_t written_bytes = fwrite(output.data(), 1, output.size(), stdout);
+ DCHECK_EQ(output.size(), written_bytes);
+}
+#endif // !defined(OS_WIN)
+
+void OutputMarkdownDec(TextDecoration dec) {
+ // The markdown rendering turns "dim" text to italics and any
+ // other colored text to bold.
+
+#if defined(OS_WIN)
+ DWORD written = 0;
+ if (dec == DECORATION_DIM)
+ ::WriteFile(hstdout, "*", 1, &written, nullptr);
+ else if (dec != DECORATION_NONE)
+ ::WriteFile(hstdout, "**", 2, &written, nullptr);
+#else
+ if (dec == DECORATION_DIM)
+ WriteToStdOut("*");
+ else if (dec != DECORATION_NONE)
+ WriteToStdOut("**");
+#endif
+}
+
+} // namespace
+
+#if defined(OS_WIN)
+
+void OutputString(const std::string& output, TextDecoration dec) {
+ EnsureInitialized();
+ DWORD written = 0;
+
+ if (is_markdown) {
+ OutputMarkdownDec(dec);
+ } else if (is_console) {
+ switch (dec) {
+ case DECORATION_NONE:
+ break;
+ case DECORATION_DIM:
+ ::SetConsoleTextAttribute(hstdout, FOREGROUND_INTENSITY);
+ break;
+ case DECORATION_RED:
+ ::SetConsoleTextAttribute(hstdout,
+ FOREGROUND_RED | FOREGROUND_INTENSITY);
+ break;
+ case DECORATION_GREEN:
+ // Keep green non-bold.
+ ::SetConsoleTextAttribute(hstdout, FOREGROUND_GREEN);
+ break;
+ case DECORATION_BLUE:
+ ::SetConsoleTextAttribute(hstdout,
+ FOREGROUND_BLUE | FOREGROUND_INTENSITY);
+ break;
+ case DECORATION_YELLOW:
+ ::SetConsoleTextAttribute(hstdout,
+ FOREGROUND_RED | FOREGROUND_GREEN);
+ break;
+ }
+ }
+
+ std::string tmpstr = output;
+ if (is_markdown && dec == DECORATION_YELLOW) {
+ // https://code.google.com/p/gitiles/issues/detail?id=77
+ // Gitiles will replace "--" with an em dash in non-code text.
+ // Figuring out all instances of this might be difficult, but we can
+ // at least escape the instances where this shows up in a heading.
+ base::ReplaceSubstringsAfterOffset(&tmpstr, 0, "--", "\\--");
+ }
+ ::WriteFile(hstdout, tmpstr.c_str(), static_cast<DWORD>(tmpstr.size()),
+ &written, nullptr);
+
+ if (is_markdown) {
+ OutputMarkdownDec(dec);
+ } else if (is_console) {
+ ::SetConsoleTextAttribute(hstdout, default_attributes);
+ }
+}
+
+#else
+
+void OutputString(const std::string& output, TextDecoration dec) {
+ EnsureInitialized();
+ if (is_markdown) {
+ OutputMarkdownDec(dec);
+ } else if (is_console) {
+ switch (dec) {
+ case DECORATION_NONE:
+ break;
+ case DECORATION_DIM:
+ WriteToStdOut("\e[2m");
+ break;
+ case DECORATION_RED:
+ WriteToStdOut("\e[31m\e[1m");
+ break;
+ case DECORATION_GREEN:
+ WriteToStdOut("\e[32m");
+ break;
+ case DECORATION_BLUE:
+ WriteToStdOut("\e[34m\e[1m");
+ break;
+ case DECORATION_YELLOW:
+ WriteToStdOut("\e[33m\e[1m");
+ break;
+ }
+ }
+
+ std::string tmpstr = output;
+ if (is_markdown && dec == DECORATION_YELLOW) {
+ // https://code.google.com/p/gitiles/issues/detail?id=77
+ // Gitiles will replace "--" with an em dash in non-code text.
+ // Figuring out all instances of this might be difficult, but we can
+ // at least escape the instances where this shows up in a heading.
+ base::ReplaceSubstringsAfterOffset(&tmpstr, 0, "--", "\\--");
+ }
+ WriteToStdOut(tmpstr.data());
+
+ if (is_markdown) {
+ OutputMarkdownDec(dec);
+ } else if (is_console && dec != DECORATION_NONE) {
+ WriteToStdOut("\e[0m");
+ }
+}
+
+#endif
+
+void PrintShortHelp(const std::string& line) {
+ EnsureInitialized();
+
+ size_t colon_offset = line.find(':');
+ size_t first_normal = 0;
+ if (colon_offset != std::string::npos) {
+ OutputString(" " + line.substr(0, colon_offset), DECORATION_YELLOW);
+ first_normal = colon_offset;
+ }
+
+ // See if the colon is followed by a " [" and if so, dim the contents of [ ].
+ if (first_normal > 0 &&
+ line.size() > first_normal + 2 &&
+ line[first_normal + 1] == ' ' && line[first_normal + 2] == '[') {
+ size_t begin_bracket = first_normal + 2;
+ OutputString(": ");
+ first_normal = line.find(']', begin_bracket);
+ if (first_normal == std::string::npos)
+ first_normal = line.size();
+ else
+ first_normal++;
+ OutputString(line.substr(begin_bracket, first_normal - begin_bracket),
+ DECORATION_DIM);
+ }
+
+ OutputString(line.substr(first_normal) + "\n");
+}
+
+void PrintLongHelp(const std::string& text) {
+ EnsureInitialized();
+
+ bool first_header = true;
+ bool in_body = false;
+ for (const std::string& line : base::SplitString(
+ text, "\n", base::KEEP_WHITESPACE, base::SPLIT_WANT_ALL)) {
+ // Check for a heading line.
+ if (!line.empty() && line[0] != ' ') {
+ if (is_markdown) {
+ // GN's block-level formatting is converted to markdown as follows:
+ // * The first heading is treated as an H2.
+ // * Subsequent heading are treated as H3s.
+ // * Any other text is wrapped in a code block and displayed as-is.
+ //
+ // Span-level formatting (the decorations) is converted inside
+ // OutputString().
+ if (in_body) {
+ OutputString("```\n\n", DECORATION_NONE);
+ in_body = false;
+ }
+
+ if (first_header) {
+ OutputString("## ", DECORATION_NONE);
+ first_header = false;
+ } else {
+ OutputString("### ", DECORATION_NONE);
+ }
+ }
+
+ // Highlight up to the colon (if any).
+ size_t chars_to_highlight = line.find(':');
+ if (chars_to_highlight == std::string::npos)
+ chars_to_highlight = line.size();
+
+ OutputString(line.substr(0, chars_to_highlight), DECORATION_YELLOW);
+ OutputString(line.substr(chars_to_highlight) + "\n");
+ continue;
+ } else if (is_markdown && !line.empty() && !in_body) {
+ OutputString("```\n", DECORATION_NONE);
+ in_body = true;
+ }
+
+ // Check for a comment.
+ TextDecoration dec = DECORATION_NONE;
+ for (const auto& elem : line) {
+ if (elem == '#' && !is_markdown) {
+ // Got a comment, draw dimmed.
+ dec = DECORATION_DIM;
+ break;
+ } else if (elem != ' ') {
+ break;
+ }
+ }
+
+ OutputString(line + "\n", dec);
+ }
+
+ if (is_markdown && in_body)
+ OutputString("\n```\n");
+}
+
diff --git a/chromium/tools/gn/standard_out.h b/chromium/tools/gn/standard_out.h
new file mode 100644
index 00000000000..9ccbe52b4e4
--- /dev/null
+++ b/chromium/tools/gn/standard_out.h
@@ -0,0 +1,35 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_STANDARD_OUT_H_
+#define TOOLS_GN_STANDARD_OUT_H_
+
+#include <string>
+
+enum TextDecoration {
+ DECORATION_NONE = 0,
+ DECORATION_DIM,
+ DECORATION_RED,
+ DECORATION_GREEN,
+ DECORATION_BLUE,
+ DECORATION_YELLOW
+};
+
+void OutputString(const std::string& output,
+ TextDecoration dec = DECORATION_NONE);
+
+// Prints a line for a command, assuming there is a colon. Everything before
+// the colon is the command (and is highlighted). After the colon if there is
+// a square bracket, the contents of the bracket is dimmed.
+//
+// The line is indented 2 spaces.
+void PrintShortHelp(const std::string& line);
+
+// Rules:
+// - Lines beginning with non-whitespace are highlighted up to the first
+// colon (or the whole line if not).
+// - Lines whose first non-whitespace character is a # are dimmed.
+void PrintLongHelp(const std::string& text);
+
+#endif // TOOLS_GN_STANDARD_OUT_H_
diff --git a/chromium/tools/gn/string_utils.cc b/chromium/tools/gn/string_utils.cc
new file mode 100644
index 00000000000..5e8068111a5
--- /dev/null
+++ b/chromium/tools/gn/string_utils.cc
@@ -0,0 +1,345 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/string_utils.h"
+
+#include <stddef.h>
+#include <cctype>
+
+#include "base/strings/string_number_conversions.h"
+#include "tools/gn/err.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parser.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/token.h"
+#include "tools/gn/tokenizer.h"
+#include "tools/gn/value.h"
+
+namespace {
+
+// Constructs an Err indicating a range inside a string. We assume that the
+// token has quotes around it that are not counted by the offset.
+Err ErrInsideStringToken(const Token& token, size_t offset, size_t size,
+ const std::string& msg,
+ const std::string& help = std::string()) {
+ // The "+1" is skipping over the " at the beginning of the token.
+ int int_offset = static_cast<int>(offset);
+ Location begin_loc(token.location().file(),
+ token.location().line_number(),
+ token.location().column_number() + int_offset + 1,
+ token.location().byte() + int_offset + 1);
+ Location end_loc(
+ token.location().file(),
+ token.location().line_number(),
+ token.location().column_number() + int_offset + 1 +
+ static_cast<int>(size),
+ token.location().byte() + int_offset + 1 + static_cast<int>(size));
+ return Err(LocationRange(begin_loc, end_loc), msg, help);
+}
+
+// Notes about expression interpolation. This is based loosly on Dart but is
+// slightly less flexible. In Dart, seeing the ${ in a string is something
+// the toplevel parser knows about, and it will recurse into the block
+// treating it as a first-class {...} block. So even things like this work:
+// "hello ${"foo}"*2+"bar"}" => "hello foo}foo}bar"
+// (you can see it did not get confused by the nested strings or the nested "}"
+// inside the block).
+//
+// This is cool but complicates the parser for almost no benefit for this
+// non-general-purpose programming language. The main reason expressions are
+// supported here at all are to support "${scope.variable}" and "${list[0]}",
+// neither of which have any of these edge-cases.
+//
+// In this simplified approach, we search for the terminating '}' and execute
+// the result. This means we can't support any expressions with embedded '}'
+// or '"'. To keep people from getting confusing about what's supported and
+// what's not, only identifier and accessor expressions are allowed (neither
+// of these run into any of these edge-cases).
+bool AppendInterpolatedExpression(Scope* scope,
+ const Token& token,
+ const char* input,
+ size_t begin_offset,
+ size_t end_offset,
+ std::string* output,
+ Err* err) {
+ SourceFile empty_source_file; // Prevent most vexing parse.
+ InputFile input_file(empty_source_file);
+ input_file.SetContents(
+ std::string(&input[begin_offset], end_offset - begin_offset));
+
+ // Tokenize.
+ std::vector<Token> tokens = Tokenizer::Tokenize(&input_file, err);
+ if (err->has_error()) {
+ // The error will point into our temporary buffer, rewrite it to refer
+ // to the original token. This will make the location information less
+ // precise, but generally there won't be complicated things in string
+ // interpolations.
+ *err = ErrInsideStringToken(token, begin_offset, end_offset - begin_offset,
+ err->message(), err->help_text());
+ return false;
+ }
+
+ // Parse.
+ std::unique_ptr<ParseNode> node = Parser::ParseExpression(tokens, err);
+ if (err->has_error()) {
+ // Rewrite error as above.
+ *err = ErrInsideStringToken(token, begin_offset, end_offset - begin_offset,
+ err->message(), err->help_text());
+ return false;
+ }
+ if (!(node->AsIdentifier() || node->AsAccessor())) {
+ *err = ErrInsideStringToken(token, begin_offset, end_offset - begin_offset,
+ "Invalid string interpolation.",
+ "The thing inside the ${} must be an identifier ${foo},\n"
+ "a scope access ${foo.bar}, or a list access ${foo[0]}.");
+ return false;
+ }
+
+ // Evaluate.
+ Value result = node->Execute(scope, err);
+ if (err->has_error()) {
+ // Rewrite error as above.
+ *err = ErrInsideStringToken(token, begin_offset, end_offset - begin_offset,
+ err->message(), err->help_text());
+ return false;
+ }
+
+ output->append(result.ToString(false));
+ return true;
+}
+
+bool AppendInterpolatedIdentifier(Scope* scope,
+ const Token& token,
+ const char* input,
+ size_t begin_offset,
+ size_t end_offset,
+ std::string* output,
+ Err* err) {
+ base::StringPiece identifier(&input[begin_offset],
+ end_offset - begin_offset);
+ const Value* value = scope->GetValue(identifier, true);
+ if (!value) {
+ // We assume the input points inside the token.
+ *err = ErrInsideStringToken(
+ token, identifier.data() - token.value().data() - 1, identifier.size(),
+ "Undefined identifier in string expansion.",
+ std::string("\"") + identifier + "\" is not currently in scope.");
+ return false;
+ }
+
+ output->append(value->ToString(false));
+ return true;
+}
+
+// Handles string interpolations: $identifier and ${expression}
+//
+// |*i| is the index into |input| after the $. This will be updated to point to
+// the last character consumed on success. The token is the original string
+// to blame on failure.
+//
+// On failure, returns false and sets the error. On success, appends the
+// result of the interpolation to |*output|.
+bool AppendStringInterpolation(Scope* scope,
+ const Token& token,
+ const char* input, size_t size,
+ size_t* i,
+ std::string* output,
+ Err* err) {
+ size_t dollars_index = *i - 1;
+
+ if (input[*i] == '{') {
+ // Bracketed expression.
+ (*i)++;
+ size_t begin_offset = *i;
+
+ // Find the closing } and check for non-identifier chars. Don't need to
+ // bother checking for the more-restricted first character of an identifier
+ // since the {} unambiguously denotes the range, and identifiers with
+ // invalid names just won't be found later.
+ bool has_non_ident_chars = false;
+ while (*i < size && input[*i] != '}') {
+ has_non_ident_chars |= Tokenizer::IsIdentifierContinuingChar(input[*i]);
+ (*i)++;
+ }
+ if (*i == size) {
+ *err = ErrInsideStringToken(token, dollars_index, *i - dollars_index,
+ "Unterminated ${...");
+ return false;
+ }
+
+ // In the common case, the thing inside the {} will actually be a
+ // simple identifier. Avoid all the complicated parsing of accessors
+ // in this case.
+ if (!has_non_ident_chars) {
+ return AppendInterpolatedIdentifier(scope, token, input, begin_offset,
+ *i, output, err);
+ }
+ return AppendInterpolatedExpression(scope, token, input, begin_offset, *i,
+ output, err);
+ }
+
+ // Simple identifier.
+ // The first char of an identifier is more restricted.
+ if (!Tokenizer::IsIdentifierFirstChar(input[*i])) {
+ *err = ErrInsideStringToken(
+ token, dollars_index, *i - dollars_index + 1,
+ "$ not followed by an identifier char.",
+ "It you want a literal $ use \"\\$\".");
+ return false;
+ }
+ size_t begin_offset = *i;
+ (*i)++;
+
+ // Find the first non-identifier char following the string.
+ while (*i < size && Tokenizer::IsIdentifierContinuingChar(input[*i]))
+ (*i)++;
+ size_t end_offset = *i;
+ (*i)--; // Back up to mark the last character consumed.
+ return AppendInterpolatedIdentifier(scope, token, input, begin_offset,
+ end_offset, output, err);
+}
+
+// Handles a hex literal: $0xFF
+//
+// |*i| is the index into |input| after the $. This will be updated to point to
+// the last character consumed on success. The token is the original string
+// to blame on failure.
+//
+// On failure, returns false and sets the error. On success, appends the
+// char with the given hex value to |*output|.
+bool AppendHexByte(Scope* scope,
+ const Token& token,
+ const char* input, size_t size,
+ size_t* i,
+ std::string* output,
+ Err* err) {
+ size_t dollars_index = *i - 1;
+ // "$0" is already known to exist.
+ if (*i + 3 >= size || input[*i + 1] != 'x' || !std::isxdigit(input[*i + 2]) ||
+ !std::isxdigit(input[*i + 3])) {
+ *err = ErrInsideStringToken(
+ token, dollars_index, *i - dollars_index + 1,
+ "Invalid hex character. Hex values must look like 0xFF.");
+ return false;
+ }
+ int value = 0;
+ if (!base::HexStringToInt(base::StringPiece(&input[*i + 2], 2), &value)) {
+ *err = ErrInsideStringToken(token, dollars_index, *i - dollars_index + 1,
+ "Could not convert hex value.");
+ return false;
+ }
+ *i += 3;
+ output->push_back(value);
+ return true;
+}
+
+} // namespace
+
+bool ExpandStringLiteral(Scope* scope,
+ const Token& literal,
+ Value* result,
+ Err* err) {
+ DCHECK(literal.type() == Token::STRING);
+ DCHECK(literal.value().size() > 1); // Should include quotes.
+ DCHECK(result->type() == Value::STRING); // Should be already set.
+
+ // The token includes the surrounding quotes, so strip those off.
+ const char* input = &literal.value().data()[1];
+ size_t size = literal.value().size() - 2;
+
+ std::string& output = result->string_value();
+ output.reserve(size);
+ for (size_t i = 0; i < size; i++) {
+ if (input[i] == '\\') {
+ if (i < size - 1) {
+ switch (input[i + 1]) {
+ case '\\':
+ case '"':
+ case '$':
+ output.push_back(input[i + 1]);
+ i++;
+ continue;
+ default: // Everything else has no meaning: pass the literal.
+ break;
+ }
+ }
+ output.push_back(input[i]);
+ } else if (input[i] == '$') {
+ i++;
+ if (i == size) {
+ *err = ErrInsideStringToken(literal, i - 1, 1, "$ at end of string.",
+ "I was expecting an identifier, 0xFF, or {...} after the $.");
+ return false;
+ }
+ if (input[i] == '0') {
+ if (!AppendHexByte(scope, literal, input, size, &i, &output, err))
+ return false;
+ } else if (!AppendStringInterpolation(scope, literal, input, size, &i,
+ &output, err))
+ return false;
+ } else {
+ output.push_back(input[i]);
+ }
+ }
+ return true;
+}
+
+size_t EditDistance(const base::StringPiece& s1,
+ const base::StringPiece& s2,
+ size_t max_edit_distance) {
+ // The algorithm implemented below is the "classic"
+ // dynamic-programming algorithm for computing the Levenshtein
+ // distance, which is described here:
+ //
+ // http://en.wikipedia.org/wiki/Levenshtein_distance
+ //
+ // Although the algorithm is typically described using an m x n
+ // array, only one row plus one element are used at a time, so this
+ // implementation just keeps one vector for the row. To update one entry,
+ // only the entries to the left, top, and top-left are needed. The left
+ // entry is in row[x-1], the top entry is what's in row[x] from the last
+ // iteration, and the top-left entry is stored in previous.
+ size_t m = s1.size();
+ size_t n = s2.size();
+
+ std::vector<size_t> row(n + 1);
+ for (size_t i = 1; i <= n; ++i)
+ row[i] = i;
+
+ for (size_t y = 1; y <= m; ++y) {
+ row[0] = y;
+ size_t best_this_row = row[0];
+
+ size_t previous = y - 1;
+ for (size_t x = 1; x <= n; ++x) {
+ size_t old_row = row[x];
+ row[x] = std::min(previous + (s1[y - 1] == s2[x - 1] ? 0u : 1u),
+ std::min(row[x - 1], row[x]) + 1u);
+ previous = old_row;
+ best_this_row = std::min(best_this_row, row[x]);
+ }
+
+ if (max_edit_distance && best_this_row > max_edit_distance)
+ return max_edit_distance + 1;
+ }
+
+ return row[n];
+}
+
+base::StringPiece SpellcheckString(
+ const base::StringPiece& text,
+ const std::vector<base::StringPiece>& words) {
+ const size_t kMaxValidEditDistance = 3u;
+
+ size_t min_distance = kMaxValidEditDistance + 1u;
+ base::StringPiece result;
+ for (base::StringPiece word : words) {
+ size_t distance = EditDistance(word, text, kMaxValidEditDistance);
+ if (distance < min_distance) {
+ min_distance = distance;
+ result = word;
+ }
+ }
+ return result;
+}
diff --git a/chromium/tools/gn/string_utils.h b/chromium/tools/gn/string_utils.h
new file mode 100644
index 00000000000..744714a754a
--- /dev/null
+++ b/chromium/tools/gn/string_utils.h
@@ -0,0 +1,53 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_STRING_UTILS_H_
+#define TOOLS_GN_STRING_UTILS_H_
+
+#include <vector>
+
+#include "base/strings/string_piece.h"
+
+class Err;
+class Scope;
+class Token;
+class Value;
+
+inline std::string operator+(const std::string& a, const base::StringPiece& b) {
+ std::string ret;
+ ret.reserve(a.size() + b.size());
+ ret.assign(a);
+ ret.append(b.data(), b.size());
+ return ret;
+}
+
+inline std::string operator+(const base::StringPiece& a, const std::string& b) {
+ std::string ret;
+ ret.reserve(a.size() + b.size());
+ ret.assign(a.data(), a.size());
+ ret.append(b);
+ return ret;
+}
+
+// Unescapes and expands variables in the given literal, writing the result
+// to the given value. On error, sets |err| and returns false.
+bool ExpandStringLiteral(Scope* scope,
+ const Token& literal,
+ Value* result,
+ Err* err);
+
+// Returns the minimum number of inserts, deleted, and replacements of
+// characters needed to transform s1 to s2, or max_edit_distance + 1 if
+// transforming s1 into s2 isn't possible in at most max_edit_distance steps.
+size_t EditDistance(const base::StringPiece& s1,
+ const base::StringPiece& s2,
+ size_t max_edit_distance);
+
+// Given a string |text| and a vector of correctly-spelled strings |words|,
+// returns the first string in |words| closest to |text|, or an empty
+// StringPiece if none of the strings in |words| is close.
+base::StringPiece SpellcheckString(const base::StringPiece& text,
+ const std::vector<base::StringPiece>& words);
+
+#endif // TOOLS_GN_STRING_UTILS_H_
diff --git a/chromium/tools/gn/string_utils_unittest.cc b/chromium/tools/gn/string_utils_unittest.cc
new file mode 100644
index 00000000000..eb021a8d5c7
--- /dev/null
+++ b/chromium/tools/gn/string_utils_unittest.cc
@@ -0,0 +1,155 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/string_utils.h"
+
+#include <stdint.h>
+#include <utility>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/err.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/token.h"
+#include "tools/gn/value.h"
+
+namespace {
+
+bool CheckExpansionCase(const char* input, const char* expected, bool success) {
+ Scope scope(static_cast<const Settings*>(nullptr));
+ int64_t one = 1;
+ scope.SetValue("one", Value(nullptr, one), nullptr);
+ scope.SetValue("onestring", Value(nullptr, "one"), nullptr);
+
+ // Nested scope called "onescope" with a value "one" inside it.
+ std::unique_ptr<Scope> onescope(
+ new Scope(static_cast<const Settings*>(nullptr)));
+ onescope->SetValue("one", Value(nullptr, one), nullptr);
+ scope.SetValue("onescope", Value(nullptr, std::move(onescope)), nullptr);
+
+ // List called "onelist" with one value that maps to 1.
+ Value onelist(nullptr, Value::LIST);
+ onelist.list_value().push_back(Value(nullptr, one));
+ scope.SetValue("onelist", onelist, nullptr);
+
+ // Construct the string token, which includes the quotes.
+ std::string literal_string;
+ literal_string.push_back('"');
+ literal_string.append(input);
+ literal_string.push_back('"');
+ Token literal(Location(), Token::STRING, literal_string);
+
+ Value result(nullptr, Value::STRING);
+ Err err;
+ bool ret = ExpandStringLiteral(&scope, literal, &result, &err);
+
+ // Err and return value should agree.
+ EXPECT_NE(ret, err.has_error());
+
+ if (ret != success)
+ return false;
+
+ if (!success)
+ return true; // Don't check result on failure.
+ printf("%s\n", result.string_value().c_str());
+ return result.string_value() == expected;
+}
+
+} // namespace
+
+TEST(StringUtils, ExpandStringLiteralIdentifier) {
+ EXPECT_TRUE(CheckExpansionCase("", "", true));
+ EXPECT_TRUE(CheckExpansionCase("hello", "hello", true));
+ EXPECT_TRUE(CheckExpansionCase("hello #$one", "hello #1", true));
+ EXPECT_TRUE(CheckExpansionCase("hello #$one/two", "hello #1/two", true));
+ EXPECT_TRUE(CheckExpansionCase("hello #${one}", "hello #1", true));
+ EXPECT_TRUE(CheckExpansionCase("hello #${one}one", "hello #1one", true));
+ EXPECT_TRUE(CheckExpansionCase("hello #${one}$one", "hello #11", true));
+ EXPECT_TRUE(CheckExpansionCase("$onestring${one}$one", "one11", true));
+ EXPECT_TRUE(CheckExpansionCase("$onescope", "{\n one = 1\n}", true));
+ EXPECT_TRUE(CheckExpansionCase("$onelist", "[1]", true));
+
+ // Hex values
+ EXPECT_TRUE(CheckExpansionCase("$0x0AA", "\x0A""A", true));
+ EXPECT_TRUE(CheckExpansionCase("$0x0a$0xfF", "\x0A\xFF", true));
+
+ // Errors
+ EXPECT_TRUE(CheckExpansionCase("hello #$", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hello #$%", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hello #${", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hello #${}", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hello #$nonexistant", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hello #${unterminated", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hex truncated: $0", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hex truncated: $0x", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hex truncated: $0x0", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hex with bad char: $0a", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hex with bad char: $0x1z", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("hex with bad char: $0xz1", nullptr, false));
+
+ // Unknown backslash values aren't special.
+ EXPECT_TRUE(CheckExpansionCase("\\", "\\", true));
+ EXPECT_TRUE(CheckExpansionCase("\\b", "\\b", true));
+
+ // Backslashes escape some special things. \"\$\\ -> "$\ Note that gtest
+ // doesn't like this escape sequence so we have to put it out-of-line.
+ const char* in = "\\\"\\$\\\\";
+ const char* out = "\"$\\";
+ EXPECT_TRUE(CheckExpansionCase(in, out, true));
+}
+
+TEST(StringUtils, ExpandStringLiteralExpression) {
+ // Accessing the scope.
+ EXPECT_TRUE(CheckExpansionCase("hello #${onescope.one}", "hello #1", true));
+ EXPECT_TRUE(CheckExpansionCase("hello #${onescope.two}", nullptr, false));
+
+ // Accessing the list.
+ EXPECT_TRUE(CheckExpansionCase("hello #${onelist[0]}", "hello #1", true));
+ EXPECT_TRUE(CheckExpansionCase("hello #${onelist[1]}", nullptr, false));
+
+ // Trying some other (otherwise valid) expressions should fail.
+ EXPECT_TRUE(CheckExpansionCase("${1 + 2}", nullptr, false));
+ EXPECT_TRUE(CheckExpansionCase("${print(1)}", nullptr, false));
+}
+
+TEST(StringUtils, EditDistance) {
+ EXPECT_EQ(3u, EditDistance("doom melon", "dune melon", 100));
+ EXPECT_EQ(2u, EditDistance("doom melon", "dune melon", 1));
+
+ EXPECT_EQ(2u, EditDistance("ab", "ba", 100));
+ EXPECT_EQ(2u, EditDistance("ba", "ab", 100));
+
+ EXPECT_EQ(2u, EditDistance("ananas", "banana", 100));
+ EXPECT_EQ(2u, EditDistance("banana", "ananas", 100));
+
+ EXPECT_EQ(2u, EditDistance("unclear", "nuclear", 100));
+ EXPECT_EQ(2u, EditDistance("nuclear", "unclear", 100));
+
+ EXPECT_EQ(3u, EditDistance("chrome", "chromium", 100));
+ EXPECT_EQ(3u, EditDistance("chromium", "chrome", 100));
+
+ EXPECT_EQ(4u, EditDistance("", "abcd", 100));
+ EXPECT_EQ(4u, EditDistance("abcd", "", 100));
+
+ EXPECT_EQ(4u, EditDistance("xxx", "xxxxxxx", 100));
+ EXPECT_EQ(4u, EditDistance("xxxxxxx", "xxx", 100));
+
+ EXPECT_EQ(7u, EditDistance("yyy", "xxxxxxx", 100));
+ EXPECT_EQ(7u, EditDistance("xxxxxxx", "yyy", 100));
+}
+
+TEST(StringUtils, SpellcheckString) {
+ std::vector<base::StringPiece> words;
+ words.push_back("your");
+ words.push_back("bravado");
+ words.push_back("won\'t");
+ words.push_back("help");
+ words.push_back("you");
+ words.push_back("now");
+
+ EXPECT_EQ("help", SpellcheckString("halp", words));
+
+ // barbados has an edit distance of 4 from bravado, so there's no suggestion.
+ EXPECT_TRUE(SpellcheckString("barbados", words).empty());
+}
diff --git a/chromium/tools/gn/substitution_list.cc b/chromium/tools/gn/substitution_list.cc
new file mode 100644
index 00000000000..517f327fe4d
--- /dev/null
+++ b/chromium/tools/gn/substitution_list.cc
@@ -0,0 +1,72 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/substitution_list.h"
+
+#include <stddef.h>
+#include <string.h>
+
+#include "tools/gn/value.h"
+
+SubstitutionList::SubstitutionList() {
+}
+
+SubstitutionList::SubstitutionList(const SubstitutionList& other) = default;
+
+SubstitutionList::~SubstitutionList() {
+}
+
+bool SubstitutionList::Parse(const Value& value, Err* err) {
+ if (!value.VerifyTypeIs(Value::LIST, err))
+ return false;
+
+ const std::vector<Value>& input_list = value.list_value();
+ list_.resize(input_list.size());
+ for (size_t i = 0; i < input_list.size(); i++) {
+ if (!list_[i].Parse(input_list[i], err))
+ return false;
+ }
+
+ SubstitutionBits bits;
+ FillRequiredTypes(&bits);
+ bits.FillVector(&required_types_);
+ return true;
+}
+
+bool SubstitutionList::Parse(const std::vector<std::string>& values,
+ const ParseNode* origin,
+ Err* err) {
+ list_.resize(values.size());
+ for (size_t i = 0; i < values.size(); i++) {
+ if (!list_[i].Parse(values[i], origin, err))
+ return false;
+ }
+
+ SubstitutionBits bits;
+ FillRequiredTypes(&bits);
+ bits.FillVector(&required_types_);
+ return true;
+}
+
+SubstitutionList SubstitutionList::MakeForTest(
+ const char* a,
+ const char* b,
+ const char* c) {
+ std::vector<std::string> input_strings;
+ input_strings.push_back(a);
+ if (b)
+ input_strings.push_back(b);
+ if (c)
+ input_strings.push_back(c);
+
+ Err err;
+ SubstitutionList result;
+ result.Parse(input_strings, nullptr, &err);
+ return result;
+}
+
+void SubstitutionList::FillRequiredTypes(SubstitutionBits* bits) const {
+ for (const auto& item : list_)
+ item.FillRequiredTypes(bits);
+}
diff --git a/chromium/tools/gn/substitution_list.h b/chromium/tools/gn/substitution_list.h
new file mode 100644
index 00000000000..eaf8a614bd6
--- /dev/null
+++ b/chromium/tools/gn/substitution_list.h
@@ -0,0 +1,47 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SUBSTITUTION_LIST_H_
+#define TOOLS_GN_SUBSTITUTION_LIST_H_
+
+#include <string>
+#include <vector>
+
+#include "tools/gn/substitution_pattern.h"
+
+// Represents a list of strings with {{substitution_patterns}} in them.
+class SubstitutionList {
+ public:
+ SubstitutionList();
+ SubstitutionList(const SubstitutionList& other);
+ ~SubstitutionList();
+
+ bool Parse(const Value& value, Err* err);
+ bool Parse(const std::vector<std::string>& values,
+ const ParseNode* origin,
+ Err* err);
+
+ // Makes a SubstitutionList from the given hardcoded patterns.
+ static SubstitutionList MakeForTest(
+ const char* a,
+ const char* b = nullptr,
+ const char* c = nullptr);
+
+ const std::vector<SubstitutionPattern>& list() const { return list_; }
+
+ // Returns a list of all substitution types used by the patterns in this
+ // list, with the exception of LITERAL.
+ const std::vector<SubstitutionType>& required_types() const {
+ return required_types_;
+ }
+
+ void FillRequiredTypes(SubstitutionBits* bits) const;
+
+ private:
+ std::vector<SubstitutionPattern> list_;
+
+ std::vector<SubstitutionType> required_types_;
+};
+
+#endif // TOOLS_GN_SUBSTITUTION_LIST_H_
diff --git a/chromium/tools/gn/substitution_pattern.cc b/chromium/tools/gn/substitution_pattern.cc
new file mode 100644
index 00000000000..c8e54899844
--- /dev/null
+++ b/chromium/tools/gn/substitution_pattern.cc
@@ -0,0 +1,152 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/substitution_pattern.h"
+
+#include <stddef.h>
+
+#include "base/strings/string_number_conversions.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/value.h"
+
+SubstitutionPattern::Subrange::Subrange()
+ : type(SUBSTITUTION_LITERAL) {
+}
+
+SubstitutionPattern::Subrange::Subrange(SubstitutionType t,
+ const std::string& l)
+ : type(t),
+ literal(l) {
+}
+
+SubstitutionPattern::Subrange::~Subrange() {
+}
+
+SubstitutionPattern::SubstitutionPattern() : origin_(nullptr) {
+}
+
+SubstitutionPattern::SubstitutionPattern(const SubstitutionPattern& other) =
+ default;
+
+SubstitutionPattern::~SubstitutionPattern() {
+}
+
+bool SubstitutionPattern::Parse(const Value& value, Err* err) {
+ if (!value.VerifyTypeIs(Value::STRING, err))
+ return false;
+ return Parse(value.string_value(), value.origin(), err);
+}
+
+bool SubstitutionPattern::Parse(const std::string& str,
+ const ParseNode* origin,
+ Err* err) {
+ DCHECK(ranges_.empty()); // Should only be called once.
+
+ size_t cur = 0;
+ while (true) {
+ size_t next = str.find("{{", cur);
+
+ // Pick up everything from the previous spot to here as a literal.
+ if (next == std::string::npos) {
+ if (cur != str.size())
+ ranges_.push_back(Subrange(SUBSTITUTION_LITERAL, str.substr(cur)));
+ break;
+ } else if (next > cur) {
+ ranges_.push_back(
+ Subrange(SUBSTITUTION_LITERAL, str.substr(cur, next - cur)));
+ }
+
+ // Find which specific pattern this corresponds to.
+ bool found_match = false;
+ for (size_t i = SUBSTITUTION_FIRST_PATTERN;
+ i < SUBSTITUTION_NUM_TYPES; i++) {
+ const char* cur_pattern = kSubstitutionNames[i];
+ size_t cur_len = strlen(cur_pattern);
+ if (str.compare(next, cur_len, cur_pattern) == 0) {
+ ranges_.push_back(Subrange(static_cast<SubstitutionType>(i)));
+ cur = next + cur_len;
+ found_match = true;
+ break;
+ }
+ }
+
+ // Expect all occurrances of {{ to resolve to a pattern.
+ if (!found_match) {
+ // Could make this error message more friendly if it comes up a lot. But
+ // most people will not be writing substitution patterns and the code
+ // to exactly indicate the error location is tricky.
+ *err = Err(origin, "Unknown substitution pattern",
+ "Found a {{ at offset " +
+ base::SizeTToString(next) +
+ " and did not find a known substitution following it.");
+ ranges_.clear();
+ return false;
+ }
+ }
+
+ origin_ = origin;
+
+ // Fill required types vector.
+ SubstitutionBits bits;
+ FillRequiredTypes(&bits);
+ bits.FillVector(&required_types_);
+ return true;
+}
+
+// static
+SubstitutionPattern SubstitutionPattern::MakeForTest(const char* str) {
+ Err err;
+ SubstitutionPattern pattern;
+ CHECK(pattern.Parse(str, nullptr, &err)) << err.message();
+ return pattern;
+}
+
+std::string SubstitutionPattern::AsString() const {
+ std::string result;
+ for (const auto& elem : ranges_) {
+ if (elem.type == SUBSTITUTION_LITERAL)
+ result.append(elem.literal);
+ else
+ result.append(kSubstitutionNames[elem.type]);
+ }
+ return result;
+}
+
+void SubstitutionPattern::FillRequiredTypes(SubstitutionBits* bits) const {
+ for (const auto& elem : ranges_) {
+ if (elem.type != SUBSTITUTION_LITERAL)
+ bits->used[static_cast<size_t>(elem.type)] = true;
+ }
+}
+
+bool SubstitutionPattern::IsInOutputDir(const BuildSettings* build_settings,
+ Err* err) const {
+ if (ranges_.empty()) {
+ *err = Err(origin_, "This is empty but I was expecting an output file.");
+ return false;
+ }
+
+ if (ranges_[0].type == SUBSTITUTION_LITERAL) {
+ // If the first thing is a literal, it must start with the output dir.
+ if (!EnsureStringIsInOutputDir(
+ build_settings->build_dir(),
+ ranges_[0].literal, origin_, err))
+ return false;
+ } else {
+ // Otherwise, the first subrange must be a pattern that expands to
+ // something in the output directory.
+ if (!SubstitutionIsInOutputDir(ranges_[0].type)) {
+ *err = Err(origin_,
+ "File is not inside output directory.",
+ "The given file should be in the output directory. Normally you\n"
+ "would specify\n\"$target_out_dir/foo\" or "
+ "\"{{source_gen_dir}}/foo\".");
+ return false;
+ }
+ }
+
+ return true;
+}
diff --git a/chromium/tools/gn/substitution_pattern.h b/chromium/tools/gn/substitution_pattern.h
new file mode 100644
index 00000000000..538980660fc
--- /dev/null
+++ b/chromium/tools/gn/substitution_pattern.h
@@ -0,0 +1,78 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SUBSTITUTION_PATTERN_H_
+#define TOOLS_GN_SUBSTITUTION_PATTERN_H_
+
+#include <string>
+#include <vector>
+
+#include "tools/gn/substitution_type.h"
+
+class BuildSettings;
+class Err;
+class ParseNode;
+class Value;
+
+// Represents a string with {{substitution_patterns}} in them.
+class SubstitutionPattern {
+ public:
+ struct Subrange {
+ Subrange();
+ explicit Subrange(SubstitutionType t, const std::string& l = std::string());
+ ~Subrange();
+
+ inline bool operator==(const Subrange& other) const {
+ return type == other.type && literal == other.literal;
+ }
+
+ SubstitutionType type;
+
+ // When type_ == LITERAL, this specifies the literal.
+ std::string literal;
+ };
+
+ SubstitutionPattern();
+ SubstitutionPattern(const SubstitutionPattern& other);
+ ~SubstitutionPattern();
+
+ // Parses the given string and fills in the pattern. The pattern must only
+ // be initialized once. On failure, returns false and sets the error.
+ bool Parse(const Value& value, Err* err);
+ bool Parse(const std::string& str, const ParseNode* origin, Err* err);
+
+ // Makes a pattern given a hardcoded string. Will assert if the string is
+ // not a valid pattern.
+ static SubstitutionPattern MakeForTest(const char* str);
+
+ // Returns the pattern as a string with substitutions in them.
+ std::string AsString() const;
+
+ // Sets the bits in the given vector corresponding to the substitutions used
+ // by this pattern. SUBSTITUTION_LITERAL is ignored.
+ void FillRequiredTypes(SubstitutionBits* bits) const;
+
+ // Checks whether this pattern resolves to something in the output directory
+ // for the given build settings. If not, returns false and fills in the given
+ // error.
+ bool IsInOutputDir(const BuildSettings* build_settings,
+ Err* err) const;
+
+ // Returns a vector listing the substitutions used by this pattern, not
+ // counting SUBSTITUTION_LITERAL.
+ const std::vector<SubstitutionType>& required_types() const {
+ return required_types_;
+ }
+
+ const std::vector<Subrange>& ranges() const { return ranges_; }
+ bool empty() const { return ranges_.empty(); }
+
+ private:
+ std::vector<Subrange> ranges_;
+ const ParseNode* origin_;
+
+ std::vector<SubstitutionType> required_types_;
+};
+
+#endif // TOOLS_GN_SUBSTITUTION_PATTERN_H_
diff --git a/chromium/tools/gn/substitution_pattern_unittest.cc b/chromium/tools/gn/substitution_pattern_unittest.cc
new file mode 100644
index 00000000000..9f606af0e7b
--- /dev/null
+++ b/chromium/tools/gn/substitution_pattern_unittest.cc
@@ -0,0 +1,49 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/err.h"
+#include "tools/gn/substitution_pattern.h"
+
+TEST(SubstitutionPattern, ParseLiteral) {
+ SubstitutionPattern pattern;
+ Err err;
+ EXPECT_TRUE(pattern.Parse("This is a literal", nullptr, &err));
+ EXPECT_FALSE(err.has_error());
+ ASSERT_EQ(1u, pattern.ranges().size());
+ EXPECT_EQ(SUBSTITUTION_LITERAL, pattern.ranges()[0].type);
+ EXPECT_EQ("This is a literal", pattern.ranges()[0].literal);
+}
+
+TEST(SubstitutionPattern, ParseComplex) {
+ SubstitutionPattern pattern;
+ Err err;
+ EXPECT_TRUE(pattern.Parse(
+ "AA{{source}}{{source_name_part}}BB{{source_file_part}}", nullptr, &err));
+ EXPECT_FALSE(err.has_error());
+ ASSERT_EQ(5u, pattern.ranges().size());
+
+ EXPECT_EQ(SUBSTITUTION_LITERAL, pattern.ranges()[0].type);
+ EXPECT_EQ("AA", pattern.ranges()[0].literal);
+ EXPECT_EQ(SUBSTITUTION_SOURCE, pattern.ranges()[1].type);
+ EXPECT_EQ(SUBSTITUTION_SOURCE_NAME_PART, pattern.ranges()[2].type);
+ EXPECT_EQ(SUBSTITUTION_LITERAL, pattern.ranges()[3].type);
+ EXPECT_EQ("BB", pattern.ranges()[3].literal);
+ EXPECT_EQ(SUBSTITUTION_SOURCE_FILE_PART, pattern.ranges()[4].type);
+}
+
+TEST(SubstitutionPattern, ParseErrors) {
+ SubstitutionPattern pattern;
+ Err err;
+ EXPECT_FALSE(pattern.Parse("AA{{source", nullptr, &err));
+ EXPECT_TRUE(err.has_error());
+
+ err = Err();
+ EXPECT_FALSE(pattern.Parse("{{source_of_evil}}", nullptr, &err));
+ EXPECT_TRUE(err.has_error());
+
+ err = Err();
+ EXPECT_FALSE(pattern.Parse("{{source{{source}}", nullptr, &err));
+ EXPECT_TRUE(err.has_error());
+}
diff --git a/chromium/tools/gn/substitution_type.cc b/chromium/tools/gn/substitution_type.cc
new file mode 100644
index 00000000000..e1ea14de947
--- /dev/null
+++ b/chromium/tools/gn/substitution_type.cc
@@ -0,0 +1,230 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/substitution_type.h"
+
+#include <stddef.h>
+#include <stdlib.h>
+
+#include "tools/gn/err.h"
+
+const char* kSubstitutionNames[SUBSTITUTION_NUM_TYPES] = {
+ "<<literal>>", // SUBSTITUTION_LITERAL
+
+ "{{source}}", // SUBSTITUTION_SOURCE
+ "{{output}}", // SUBSTITUTION_OUTPUT
+
+ "{{source_name_part}}", // SUBSTITUTION_NAME_PART
+ "{{source_file_part}}", // SUBSTITUTION_FILE_PART
+ "{{source_dir}}", // SUBSTITUTION_SOURCE_DIR
+ "{{source_root_relative_dir}}", // SUBSTITUTION_SOURCE_ROOT_RELATIVE_DIR
+ "{{source_gen_dir}}", // SUBSTITUTION_SOURCE_GEN_DIR
+ "{{source_out_dir}}", // SUBSTITUTION_SOURCE_OUT_DIR
+
+ "{{label}}", // SUBSTITUTION_LABEL
+ "{{label_name}}", // SUBSTITUTION_LABEL_NAME
+ "{{root_gen_dir}}", // SUBSTITUTION_ROOT_GEN_DIR
+ "{{root_out_dir}}", // SUBSTITUTION_ROOT_OUT_DIR
+ "{{target_gen_dir}}", // SUBSTITUTION_TARGET_GEN_DIR
+ "{{target_out_dir}}", // SUBSTITUTION_TARGET_OUT_DIR
+ "{{target_output_name}}", // SUBSTITUTION_TARGET_OUTPUT_NAME
+
+ "{{asmflags}}", // SUBSTITUTION_ASMFLAGS
+ "{{cflags}}", // SUBSTITUTION_CFLAGS
+ "{{cflags_c}}", // SUBSTITUTION_CFLAGS_C
+ "{{cflags_cc}}", // SUBSTITUTION_CFLAGS_CC
+ "{{cflags_objc}}", // SUBSTITUTION_CFLAGS_OBJC
+ "{{cflags_objcc}}", // SUBSTITUTION_CFLAGS_OBJCC
+ "{{defines}}", // SUBSTITUTION_DEFINES
+ "{{include_dirs}}", // SUBSTITUTION_INCLUDE_DIRS
+
+ "{{inputs}}", // SUBSTITUTION_LINKER_INPUTS
+ "{{inputs_newline}}", // SUBSTITUTION_LINKER_INPUTS_NEWLINE
+ "{{ldflags}}", // SUBSTITUTION_LDFLAGS
+ "{{libs}}", // SUBSTITUTION_LIBS
+ "{{output_extension}}", // SUBSTITUTION_OUTPUT_EXTENSION
+ "{{solibs}}", // SUBSTITUTION_SOLIBS
+
+ "{{bundle_root_dir}}", // SUBSTITUTION_BUNDLE_ROOT_DIR
+ "{{bundle_resources_dir}}", // SUBSTITUTION_BUNDLE_RESOURCES_DIR
+ "{{bundle_executable_dir}}", // SUBSTITUTION_BUNDLE_EXECUTABLE_DIR
+ "{{bundle_plugins_dir}}", // SUBSTITUTION_BUNDLE_PLUGINS_DIR
+
+ "{{response_file_name}}", // SUBSTITUTION_RSP_FILE_NAME
+};
+
+const char* kSubstitutionNinjaNames[SUBSTITUTION_NUM_TYPES] = {
+ nullptr, // SUBSTITUTION_LITERAL
+
+ "in", // SUBSTITUTION_SOURCE
+ "out", // SUBSTITUTION_OUTPUT
+
+ "source_name_part", // SUBSTITUTION_NAME_PART
+ "source_file_part", // SUBSTITUTION_FILE_PART
+ "source_dir", // SUBSTITUTION_SOURCE_DIR
+ "source_root_relative_dir", // SUBSTITUTION_SOURCE_ROOT_RELATIVE_DIR
+ "source_gen_dir", // SUBSTITUTION_SOURCE_GEN_DIR
+ "source_out_dir", // SUBSTITUTION_SOURCE_OUT_DIR
+
+ "label", // SUBSTITUTION_LABEL
+ "label_name", // SUBSTITUTION_LABEL_NAME
+ "root_gen_dir", // SUBSTITUTION_ROOT_GEN_DIR
+ "root_out_dir", // SUBSTITUTION_ROOT_OUT_DIR
+ "target_gen_dir", // SUBSTITUTION_TARGET_GEN_DIR
+ "target_out_dir", // SUBSTITUTION_TARGET_OUT_DIR
+ "target_output_name", // SUBSTITUTION_TARGET_OUTPUT_NAME
+
+ "asmflags", // SUBSTITUTION_ASMFLAGS
+ "cflags", // SUBSTITUTION_CFLAGS
+ "cflags_c", // SUBSTITUTION_CFLAGS_C
+ "cflags_cc", // SUBSTITUTION_CFLAGS_CC
+ "cflags_objc", // SUBSTITUTION_CFLAGS_OBJC
+ "cflags_objcc", // SUBSTITUTION_CFLAGS_OBJCC
+ "defines", // SUBSTITUTION_DEFINES
+ "include_dirs", // SUBSTITUTION_INCLUDE_DIRS
+
+ // LINKER_INPUTS expands to the same Ninja var as SUBSTITUTION_SOURCE. These
+ // are used in different contexts and are named differently to keep things
+ // clear, but they both expand to the "set of input files" for a build rule.
+ "in", // SUBSTITUTION_LINKER_INPUTS
+ "in_newline", // SUBSTITUTION_LINKER_INPUTS_NEWLINE
+ "ldflags", // SUBSTITUTION_LDFLAGS
+ "libs", // SUBSTITUTION_LIBS
+ "output_extension", // SUBSTITUTION_OUTPUT_EXTENSION
+ "solibs", // SUBSTITUTION_SOLIBS
+
+ "bundle_root_dir", // SUBSTITUTION_BUNDLE_ROOT_DIR
+ "bundle_resources_dir", // SUBSTITUTION_BUNDLE_RESOURCES_DIR
+ "bundle_executable_dir", // SUBSTITUTION_BUNDLE_EXECUTABLE_DIR
+ "bundle_plugins_dir", // SUBSTITUTION_BUNDLE_PLUGINS_DIR
+
+ "rspfile", // SUBSTITUTION_RSP_FILE_NAME
+};
+
+SubstitutionBits::SubstitutionBits() : used() {
+}
+
+void SubstitutionBits::MergeFrom(const SubstitutionBits& other) {
+ for (size_t i = 0; i < SUBSTITUTION_NUM_TYPES; i++)
+ used[i] |= other.used[i];
+}
+
+void SubstitutionBits::FillVector(std::vector<SubstitutionType>* vect) const {
+ for (size_t i = SUBSTITUTION_FIRST_PATTERN; i < SUBSTITUTION_NUM_TYPES; i++) {
+ if (used[i])
+ vect->push_back(static_cast<SubstitutionType>(i));
+ }
+}
+
+bool SubstitutionIsInOutputDir(SubstitutionType type) {
+ return type == SUBSTITUTION_SOURCE_GEN_DIR ||
+ type == SUBSTITUTION_SOURCE_OUT_DIR ||
+ type == SUBSTITUTION_ROOT_GEN_DIR ||
+ type == SUBSTITUTION_ROOT_OUT_DIR ||
+ type == SUBSTITUTION_TARGET_GEN_DIR ||
+ type == SUBSTITUTION_TARGET_OUT_DIR;
+}
+
+bool SubstitutionIsInBundleDir(SubstitutionType type) {
+ return type == SUBSTITUTION_BUNDLE_ROOT_DIR ||
+ type == SUBSTITUTION_BUNDLE_RESOURCES_DIR ||
+ type == SUBSTITUTION_BUNDLE_EXECUTABLE_DIR ||
+ type == SUBSTITUTION_BUNDLE_PLUGINS_DIR;
+}
+
+bool IsValidBundleDataSubstitution(SubstitutionType type) {
+ return type == SUBSTITUTION_LITERAL ||
+ type == SUBSTITUTION_SOURCE_NAME_PART ||
+ type == SUBSTITUTION_SOURCE_FILE_PART ||
+ type == SUBSTITUTION_SOURCE_ROOT_RELATIVE_DIR ||
+ type == SUBSTITUTION_BUNDLE_ROOT_DIR ||
+ type == SUBSTITUTION_BUNDLE_RESOURCES_DIR ||
+ type == SUBSTITUTION_BUNDLE_EXECUTABLE_DIR ||
+ type == SUBSTITUTION_BUNDLE_PLUGINS_DIR;
+}
+
+bool IsValidSourceSubstitution(SubstitutionType type) {
+ return type == SUBSTITUTION_LITERAL ||
+ type == SUBSTITUTION_SOURCE ||
+ type == SUBSTITUTION_SOURCE_NAME_PART ||
+ type == SUBSTITUTION_SOURCE_FILE_PART ||
+ type == SUBSTITUTION_SOURCE_DIR ||
+ type == SUBSTITUTION_SOURCE_ROOT_RELATIVE_DIR ||
+ type == SUBSTITUTION_SOURCE_GEN_DIR ||
+ type == SUBSTITUTION_SOURCE_OUT_DIR;
+}
+
+bool IsValidToolSubstitution(SubstitutionType type) {
+ return type == SUBSTITUTION_LITERAL ||
+ type == SUBSTITUTION_OUTPUT ||
+ type == SUBSTITUTION_LABEL ||
+ type == SUBSTITUTION_LABEL_NAME ||
+ type == SUBSTITUTION_ROOT_GEN_DIR ||
+ type == SUBSTITUTION_ROOT_OUT_DIR ||
+ type == SUBSTITUTION_TARGET_GEN_DIR ||
+ type == SUBSTITUTION_TARGET_OUT_DIR ||
+ type == SUBSTITUTION_TARGET_OUTPUT_NAME;
+}
+
+bool IsValidCompilerSubstitution(SubstitutionType type) {
+ return IsValidToolSubstitution(type) ||
+ IsValidSourceSubstitution(type) ||
+ type == SUBSTITUTION_SOURCE ||
+ type == SUBSTITUTION_ASMFLAGS ||
+ type == SUBSTITUTION_CFLAGS ||
+ type == SUBSTITUTION_CFLAGS_C ||
+ type == SUBSTITUTION_CFLAGS_CC ||
+ type == SUBSTITUTION_CFLAGS_OBJC ||
+ type == SUBSTITUTION_CFLAGS_OBJCC ||
+ type == SUBSTITUTION_DEFINES ||
+ type == SUBSTITUTION_INCLUDE_DIRS;
+}
+
+bool IsValidCompilerOutputsSubstitution(SubstitutionType type) {
+ // All tool types except "output" (which would be infinitely recursive).
+ return (IsValidToolSubstitution(type) && type != SUBSTITUTION_OUTPUT) ||
+ IsValidSourceSubstitution(type);
+}
+
+bool IsValidLinkerSubstitution(SubstitutionType type) {
+ return IsValidToolSubstitution(type) ||
+ type == SUBSTITUTION_LINKER_INPUTS ||
+ type == SUBSTITUTION_LINKER_INPUTS_NEWLINE ||
+ type == SUBSTITUTION_LDFLAGS ||
+ type == SUBSTITUTION_LIBS ||
+ type == SUBSTITUTION_OUTPUT_EXTENSION ||
+ type == SUBSTITUTION_SOLIBS;
+}
+
+bool IsValidLinkerOutputsSubstitution(SubstitutionType type) {
+ // All valid compiler outputs plus the output extension.
+ return IsValidCompilerOutputsSubstitution(type) ||
+ type == SUBSTITUTION_OUTPUT_EXTENSION;
+}
+
+bool IsValidCopySubstitution(SubstitutionType type) {
+ return IsValidToolSubstitution(type) ||
+ type == SUBSTITUTION_SOURCE;
+}
+
+bool IsValidCompileXCassetsSubstitution(SubstitutionType type) {
+ return IsValidToolSubstitution(type) ||
+ type == SUBSTITUTION_LINKER_INPUTS;
+}
+
+bool EnsureValidSourcesSubstitutions(
+ const std::vector<SubstitutionType>& types,
+ const ParseNode* origin,
+ Err* err) {
+ for (size_t i = 0; i < types.size(); i++) {
+ if (!IsValidSourceSubstitution(types[i])) {
+ *err = Err(origin, "Invalid substitution type.",
+ "The substitution " + std::string(kSubstitutionNames[types[i]]) +
+ " isn't valid for something\n"
+ "operating on a source file such as this.");
+ return false;
+ }
+ }
+ return true;
+}
diff --git a/chromium/tools/gn/substitution_type.h b/chromium/tools/gn/substitution_type.h
new file mode 100644
index 00000000000..3f7ec971abd
--- /dev/null
+++ b/chromium/tools/gn/substitution_type.h
@@ -0,0 +1,129 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SUBSTITUTION_TYPE_H_
+#define TOOLS_GN_SUBSTITUTION_TYPE_H_
+
+#include <vector>
+
+class Err;
+class ParseNode;
+
+// Keep kSubstitutionNames, kSubstitutionNinjaNames and the
+// IsValid*Substitution functions in sync if you change anything here.
+enum SubstitutionType {
+ SUBSTITUTION_LITERAL = 0,
+
+ // The index of the first pattern. To loop overal all patterns, go from here
+ // until NUM_TYPES.
+ SUBSTITUTION_FIRST_PATTERN,
+
+ // These map to Ninja's {in} and {out} variables.
+ SUBSTITUTION_SOURCE = SUBSTITUTION_FIRST_PATTERN, // {{source}}
+ SUBSTITUTION_OUTPUT, // {{output}}
+
+ // Valid for all compiler tools.
+ SUBSTITUTION_SOURCE_NAME_PART, // {{source_name_part}}
+ SUBSTITUTION_SOURCE_FILE_PART, // {{source_file_part}}
+ SUBSTITUTION_SOURCE_DIR, // {{source_dir}}
+ SUBSTITUTION_SOURCE_ROOT_RELATIVE_DIR, // {{root_relative_dir}}
+ SUBSTITUTION_SOURCE_GEN_DIR, // {{source_gen_dir}}
+ SUBSTITUTION_SOURCE_OUT_DIR, // {{source_out_dir}}
+
+ // Valid for all compiler and linker tools. These depend on the target and
+ // do not vary on a per-file basis.
+ SUBSTITUTION_LABEL, // {{label}}
+ SUBSTITUTION_LABEL_NAME, // {{label_name}}
+ SUBSTITUTION_ROOT_GEN_DIR, // {{root_gen_dir}}
+ SUBSTITUTION_ROOT_OUT_DIR, // {{root_out_dir}}
+ SUBSTITUTION_TARGET_GEN_DIR, // {{target_gen_dir}}
+ SUBSTITUTION_TARGET_OUT_DIR, // {{target_out_dir}}
+ SUBSTITUTION_TARGET_OUTPUT_NAME, // {{target_output_name}}
+
+ // Valid for compiler tools.
+ SUBSTITUTION_ASMFLAGS, // {{asmflags}}
+ SUBSTITUTION_CFLAGS, // {{cflags}}
+ SUBSTITUTION_CFLAGS_C, // {{cflags_c}}
+ SUBSTITUTION_CFLAGS_CC, // {{cflags_cc}}
+ SUBSTITUTION_CFLAGS_OBJC, // {{cflags_objc}}
+ SUBSTITUTION_CFLAGS_OBJCC, // {{cflags_objcc}}
+ SUBSTITUTION_DEFINES, // {{defines}}
+ SUBSTITUTION_INCLUDE_DIRS, // {{include_dirs}}
+
+ // Valid for linker tools.
+ SUBSTITUTION_LINKER_INPUTS, // {{inputs}}
+ SUBSTITUTION_LINKER_INPUTS_NEWLINE, // {{inputs_newline}}
+ SUBSTITUTION_LDFLAGS, // {{ldflags}}
+ SUBSTITUTION_LIBS, // {{libs}}
+ SUBSTITUTION_OUTPUT_EXTENSION, // {{output_extension}}
+ SUBSTITUTION_SOLIBS, // {{solibs}}
+
+ // Valid for bundle_data targets.
+ SUBSTITUTION_BUNDLE_ROOT_DIR, // {{bundle_root_dir}}
+ SUBSTITUTION_BUNDLE_RESOURCES_DIR, // {{bundle_resources_dir}}
+ SUBSTITUTION_BUNDLE_EXECUTABLE_DIR, // {{bundle_executable_dir}}
+ SUBSTITUTION_BUNDLE_PLUGINS_DIR, // {{bundle_plugins_dir}}
+
+ // Used only for the args of actions.
+ SUBSTITUTION_RSP_FILE_NAME, // {{response_file_name}}
+
+ SUBSTITUTION_NUM_TYPES // Must be last.
+};
+
+// An array of size SUBSTITUTION_NUM_TYPES that lists the names of the
+// substitution patterns, including the curly braces. So, for example,
+// kSubstitutionNames[SUBSTITUTION_SOURCE] == "{{source}}".
+extern const char* kSubstitutionNames[SUBSTITUTION_NUM_TYPES];
+
+// Ninja variables corresponding to each substitution. These do not include
+// the dollar sign.
+extern const char* kSubstitutionNinjaNames[SUBSTITUTION_NUM_TYPES];
+
+// A wrapper around an array if flags indicating whether a given substitution
+// type is required in some context. By convention, the LITERAL type bit is
+// not set.
+struct SubstitutionBits {
+ SubstitutionBits();
+
+ // Merges any bits set in the given "other" to this one. This object will
+ // then be the union of all bits in the two lists.
+ void MergeFrom(const SubstitutionBits& other);
+
+ // Converts the substitution type bitfield (with a true set for each required
+ // item) to a vector of the types listed. Does not include LITERAL.
+ void FillVector(std::vector<SubstitutionType>* vect) const;
+
+ bool used[SUBSTITUTION_NUM_TYPES];
+};
+
+// Returns true if the given substitution pattern references the output
+// directory. This is used to check strings that begin with a substitution to
+// verify that they produce a file in the output directory.
+bool SubstitutionIsInOutputDir(SubstitutionType type);
+
+// Returns true if the given substitution pattern references the bundle
+// directory. This is used to check strings that begin with a substitution to
+// verify that they produce a file in the bundle directory.
+bool SubstitutionIsInBundleDir(SubstitutionType type);
+
+// Returns true if the given substitution is valid for the named purpose.
+bool IsValidBundleDataSubstitution(SubstitutionType type);
+bool IsValidSourceSubstitution(SubstitutionType type);
+// Both compiler and linker tools.
+bool IsValidToolSubstitution(SubstitutionType type);
+bool IsValidCompilerSubstitution(SubstitutionType type);
+bool IsValidCompilerOutputsSubstitution(SubstitutionType type);
+bool IsValidLinkerSubstitution(SubstitutionType type);
+bool IsValidLinkerOutputsSubstitution(SubstitutionType type);
+bool IsValidCopySubstitution(SubstitutionType type);
+bool IsValidCompileXCassetsSubstitution(SubstitutionType type);
+
+// Like the "IsValid..." version above but checks a list of types and sets a
+// an error blaming the given source if the test fails.
+bool EnsureValidSourcesSubstitutions(
+ const std::vector<SubstitutionType>& types,
+ const ParseNode* origin,
+ Err* err);
+
+#endif // TOOLS_GN_SUBSTITUTION_TYPE_H_
diff --git a/chromium/tools/gn/substitution_writer.cc b/chromium/tools/gn/substitution_writer.cc
new file mode 100644
index 00000000000..23d6914ac4e
--- /dev/null
+++ b/chromium/tools/gn/substitution_writer.cc
@@ -0,0 +1,563 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/substitution_writer.h"
+
+#include "tools/gn/build_settings.h"
+#include "tools/gn/escape.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/string_utils.h"
+#include "tools/gn/substitution_list.h"
+#include "tools/gn/substitution_pattern.h"
+#include "tools/gn/target.h"
+
+namespace {
+
+// Sets the given directory string to the destination, trimming any trailing
+// slash from the directory (SourceDirs and OutputFiles representing
+// directories will end in a trailing slash). If the directory is empty,
+// it will be replaced with a ".".
+void SetDirOrDotWithNoSlash(const std::string& dir, std::string* dest) {
+ if (!dir.empty() && dir[dir.size() - 1] == '/')
+ dest->assign(dir.data(), dir.size() - 1);
+ else
+ dest->assign(dir);
+
+ if (dest->empty())
+ dest->push_back('.');
+}
+
+} // namespace
+
+const char kSourceExpansion_Help[] =
+ "How Source Expansion Works\n"
+ "\n"
+ " Source expansion is used for the action_foreach and copy target types\n"
+ " to map source file names to output file names or arguments.\n"
+ "\n"
+ " To perform source expansion in the outputs, GN maps every entry in the\n"
+ " sources to every entry in the outputs list, producing the cross\n"
+ " product of all combinations, expanding placeholders (see below).\n"
+ "\n"
+ " Source expansion in the args works similarly, but performing the\n"
+ " placeholder substitution produces a different set of arguments for\n"
+ " each invocation of the script.\n"
+ "\n"
+ " If no placeholders are found, the outputs or args list will be treated\n"
+ " as a static list of literal file names that do not depend on the\n"
+ " sources.\n"
+ "\n"
+ " See \"gn help copy\" and \"gn help action_foreach\" for more on how\n"
+ " this is applied.\n"
+ "\n"
+ "Placeholders\n"
+ "\n"
+ " {{source}}\n"
+ " The name of the source file including directory (*). This will\n"
+ " generally be used for specifying inputs to a script in the\n"
+ " \"args\" variable.\n"
+ " \"//foo/bar/baz.txt\" => \"../../foo/bar/baz.txt\"\n"
+ "\n"
+ " {{source_file_part}}\n"
+ " The file part of the source including the extension.\n"
+ " \"//foo/bar/baz.txt\" => \"baz.txt\"\n"
+ "\n"
+ " {{source_name_part}}\n"
+ " The filename part of the source file with no directory or\n"
+ " extension. This will generally be used for specifying a\n"
+ " transformation from a source file to a destination file with the\n"
+ " same name but different extension.\n"
+ " \"//foo/bar/baz.txt\" => \"baz\"\n"
+ "\n"
+ " {{source_dir}}\n"
+ " The directory (*) containing the source file with no\n"
+ " trailing slash.\n"
+ " \"//foo/bar/baz.txt\" => \"../../foo/bar\"\n"
+ "\n"
+ " {{source_root_relative_dir}}\n"
+ " The path to the source file's directory relative to the source\n"
+ " root, with no leading \"//\" or trailing slashes. If the path is\n"
+ " system-absolute, (beginning in a single slash) this will just\n"
+ " return the path with no trailing slash. This value will always\n"
+ " be the same, regardless of whether it appears in the \"outputs\"\n"
+ " or \"args\" section.\n"
+ " \"//foo/bar/baz.txt\" => \"foo/bar\"\n"
+ "\n"
+ " {{source_gen_dir}}\n"
+ " The generated file directory (*) corresponding to the source\n"
+ " file's path. This will be different than the target's generated\n"
+ " file directory if the source file is in a different directory\n"
+ " than the BUILD.gn file.\n"
+ " \"//foo/bar/baz.txt\" => \"gen/foo/bar\"\n"
+ "\n"
+ " {{source_out_dir}}\n"
+ " The object file directory (*) corresponding to the source file's\n"
+ " path, relative to the build directory. this us be different than\n"
+ " the target's out directory if the source file is in a different\n"
+ " directory than the build.gn file.\n"
+ " \"//foo/bar/baz.txt\" => \"obj/foo/bar\"\n"
+ "\n"
+ "(*) Note on directories\n"
+ "\n"
+ " Paths containing directories (except the source_root_relative_dir)\n"
+ " will be different depending on what context the expansion is evaluated\n"
+ " in. Generally it should \"just work\" but it means you can't\n"
+ " concatenate strings containing these values with reasonable results.\n"
+ "\n"
+ " Details: source expansions can be used in the \"outputs\" variable,\n"
+ " the \"args\" variable, and in calls to \"process_file_template\". The\n"
+ " \"args\" are passed to a script which is run from the build directory,\n"
+ " so these directories will relative to the build directory for the\n"
+ " script to find. In the other cases, the directories will be source-\n"
+ " absolute (begin with a \"//\") because the results of those expansions\n"
+ " will be handled by GN internally.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " Non-varying outputs:\n"
+ " action(\"hardcoded_outputs\") {\n"
+ " sources = [ \"input1.idl\", \"input2.idl\" ]\n"
+ " outputs = [ \"$target_out_dir/output1.dat\",\n"
+ " \"$target_out_dir/output2.dat\" ]\n"
+ " }\n"
+ " The outputs in this case will be the two literal files given.\n"
+ "\n"
+ " Varying outputs:\n"
+ " action_foreach(\"varying_outputs\") {\n"
+ " sources = [ \"input1.idl\", \"input2.idl\" ]\n"
+ " outputs = [ \"{{source_gen_dir}}/{{source_name_part}}.h\",\n"
+ " \"{{source_gen_dir}}/{{source_name_part}}.cc\" ]\n"
+ " }\n"
+ " Performing source expansion will result in the following output names:\n"
+ " //out/Debug/obj/mydirectory/input1.h\n"
+ " //out/Debug/obj/mydirectory/input1.cc\n"
+ " //out/Debug/obj/mydirectory/input2.h\n"
+ " //out/Debug/obj/mydirectory/input2.cc\n";
+
+// static
+void SubstitutionWriter::WriteWithNinjaVariables(
+ const SubstitutionPattern& pattern,
+ const EscapeOptions& escape_options,
+ std::ostream& out) {
+ // The result needs to be quoted as if it was one string, but the $ for
+ // the inserted Ninja variables can't be escaped. So write to a buffer with
+ // no quoting, and then quote the whole thing if necessary.
+ EscapeOptions no_quoting(escape_options);
+ no_quoting.inhibit_quoting = true;
+
+ bool needs_quotes = false;
+ std::string result;
+ for (const auto& range : pattern.ranges()) {
+ if (range.type == SUBSTITUTION_LITERAL) {
+ result.append(EscapeString(range.literal, no_quoting, &needs_quotes));
+ } else {
+ result.append("${");
+ result.append(kSubstitutionNinjaNames[range.type]);
+ result.append("}");
+ }
+ }
+
+ if (needs_quotes && !escape_options.inhibit_quoting)
+ out << "\"" << result << "\"";
+ else
+ out << result;
+}
+
+// static
+void SubstitutionWriter::GetListAsSourceFiles(
+ const SubstitutionList& list,
+ std::vector<SourceFile>* output) {
+ for (const auto& pattern : list.list()) {
+ CHECK(pattern.ranges().size() == 1 &&
+ pattern.ranges()[0].type == SUBSTITUTION_LITERAL)
+ << "The substitution pattern \""
+ << pattern.AsString()
+ << "\" was expected to be a literal with no {{substitutions}}.";
+ const std::string& literal = pattern.ranges()[0].literal;
+ CHECK(literal.size() >= 1 && literal[0] == '/')
+ << "The result of the pattern \""
+ << pattern.AsString()
+ << "\" was not an absolute path.";
+ output->push_back(SourceFile(literal));
+ }
+}
+
+// static
+void SubstitutionWriter::GetListAsOutputFiles(
+ const Settings* settings,
+ const SubstitutionList& list,
+ std::vector<OutputFile>* output) {
+ std::vector<SourceFile> output_as_sources;
+ GetListAsSourceFiles(list, &output_as_sources);
+ for (const auto& file : output_as_sources)
+ output->push_back(OutputFile(settings->build_settings(), file));
+}
+
+// static
+SourceFile SubstitutionWriter::ApplyPatternToSource(
+ const Settings* settings,
+ const SubstitutionPattern& pattern,
+ const SourceFile& source) {
+ std::string result_value = ApplyPatternToSourceAsString(
+ settings, pattern, source);
+ CHECK(!result_value.empty() && result_value[0] == '/')
+ << "The result of the pattern \""
+ << pattern.AsString()
+ << "\" was not a path beginning in \"/\" or \"//\".";
+ return SourceFile(SourceFile::SWAP_IN, &result_value);
+}
+
+// static
+std::string SubstitutionWriter::ApplyPatternToSourceAsString(
+ const Settings* settings,
+ const SubstitutionPattern& pattern,
+ const SourceFile& source) {
+ std::string result_value;
+ for (const auto& subrange : pattern.ranges()) {
+ if (subrange.type == SUBSTITUTION_LITERAL) {
+ result_value.append(subrange.literal);
+ } else {
+ result_value.append(
+ GetSourceSubstitution(settings, source, subrange.type,
+ OUTPUT_ABSOLUTE, SourceDir()));
+ }
+ }
+ return result_value;
+}
+
+// static
+OutputFile SubstitutionWriter::ApplyPatternToSourceAsOutputFile(
+ const Settings* settings,
+ const SubstitutionPattern& pattern,
+ const SourceFile& source) {
+ SourceFile result_as_source = ApplyPatternToSource(settings, pattern, source);
+ return OutputFile(settings->build_settings(), result_as_source);
+}
+
+// static
+void SubstitutionWriter::ApplyListToSource(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const SourceFile& source,
+ std::vector<SourceFile>* output) {
+ for (const auto& item : list.list())
+ output->push_back(ApplyPatternToSource(settings, item, source));
+}
+
+// static
+void SubstitutionWriter::ApplyListToSourceAsString(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const SourceFile& source,
+ std::vector<std::string>* output) {
+ for (const auto& item : list.list())
+ output->push_back(ApplyPatternToSourceAsString(settings, item, source));
+}
+
+// static
+void SubstitutionWriter::ApplyListToSourceAsOutputFile(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const SourceFile& source,
+ std::vector<OutputFile>* output) {
+ for (const auto& item : list.list())
+ output->push_back(ApplyPatternToSourceAsOutputFile(settings, item, source));
+}
+
+// static
+void SubstitutionWriter::ApplyListToSources(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const std::vector<SourceFile>& sources,
+ std::vector<SourceFile>* output) {
+ output->clear();
+ for (const auto& source : sources)
+ ApplyListToSource(settings, list, source, output);
+}
+
+// static
+void SubstitutionWriter::ApplyListToSourcesAsString(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const std::vector<SourceFile>& sources,
+ std::vector<std::string>* output) {
+ output->clear();
+ for (const auto& source : sources)
+ ApplyListToSourceAsString(settings, list, source, output);
+}
+
+// static
+void SubstitutionWriter::ApplyListToSourcesAsOutputFile(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const std::vector<SourceFile>& sources,
+ std::vector<OutputFile>* output) {
+ output->clear();
+ for (const auto& source : sources)
+ ApplyListToSourceAsOutputFile(settings, list, source, output);
+}
+
+// static
+void SubstitutionWriter::WriteNinjaVariablesForSource(
+ const Settings* settings,
+ const SourceFile& source,
+ const std::vector<SubstitutionType>& types,
+ const EscapeOptions& escape_options,
+ std::ostream& out) {
+ for (const auto& type : types) {
+ // Don't write SOURCE since that just maps to Ninja's $in variable, which
+ // is implicit in the rule. RESPONSE_FILE_NAME is written separately
+ // only when writing target rules since it can never be used in any
+ // other context (like process_file_template).
+ if (type != SUBSTITUTION_SOURCE && type != SUBSTITUTION_RSP_FILE_NAME) {
+ out << " " << kSubstitutionNinjaNames[type] << " = ";
+ EscapeStringToStream(
+ out,
+ GetSourceSubstitution(settings, source, type, OUTPUT_RELATIVE,
+ settings->build_settings()->build_dir()),
+ escape_options);
+ out << std::endl;
+ }
+ }
+}
+
+// static
+std::string SubstitutionWriter::GetSourceSubstitution(
+ const Settings* settings,
+ const SourceFile& source,
+ SubstitutionType type,
+ OutputStyle output_style,
+ const SourceDir& relative_to) {
+ std::string to_rebase;
+ switch (type) {
+ case SUBSTITUTION_SOURCE:
+ if (source.is_system_absolute())
+ return source.value();
+ to_rebase = source.value();
+ break;
+
+ case SUBSTITUTION_SOURCE_NAME_PART:
+ return FindFilenameNoExtension(&source.value()).as_string();
+
+ case SUBSTITUTION_SOURCE_FILE_PART:
+ return source.GetName();
+
+ case SUBSTITUTION_SOURCE_DIR:
+ if (source.is_system_absolute())
+ return DirectoryWithNoLastSlash(source.GetDir());
+ to_rebase = DirectoryWithNoLastSlash(source.GetDir());
+ break;
+
+ case SUBSTITUTION_SOURCE_ROOT_RELATIVE_DIR:
+ if (source.is_system_absolute())
+ return DirectoryWithNoLastSlash(source.GetDir());
+ return RebasePath(
+ DirectoryWithNoLastSlash(source.GetDir()), SourceDir("//"),
+ settings->build_settings()->root_path_utf8());
+
+ case SUBSTITUTION_SOURCE_GEN_DIR:
+ to_rebase = DirectoryWithNoLastSlash(
+ GetGenDirForSourceDir(settings, source.GetDir()));
+ break;
+
+ case SUBSTITUTION_SOURCE_OUT_DIR:
+ to_rebase = DirectoryWithNoLastSlash(
+ GetOutputDirForSourceDir(settings, source.GetDir()));
+ break;
+
+ default:
+ NOTREACHED()
+ << "Unsupported substitution for this function: "
+ << kSubstitutionNames[type];
+ return std::string();
+ }
+
+ // If we get here, the result is a path that should be made relative or
+ // absolute according to the output_style. Other cases (just file name or
+ // extension extraction) will have been handled via early return above.
+ if (output_style == OUTPUT_ABSOLUTE)
+ return to_rebase;
+ return RebasePath(to_rebase, relative_to,
+ settings->build_settings()->root_path_utf8());
+}
+
+// static
+OutputFile SubstitutionWriter::ApplyPatternToTargetAsOutputFile(
+ const Target* target,
+ const Tool* tool,
+ const SubstitutionPattern& pattern) {
+ std::string result_value;
+ for (const auto& subrange : pattern.ranges()) {
+ if (subrange.type == SUBSTITUTION_LITERAL) {
+ result_value.append(subrange.literal);
+ } else {
+ std::string subst;
+ CHECK(GetTargetSubstitution(target, subrange.type, &subst));
+ result_value.append(subst);
+ }
+ }
+ return OutputFile(result_value);
+}
+
+// static
+void SubstitutionWriter::ApplyListToTargetAsOutputFile(
+ const Target* target,
+ const Tool* tool,
+ const SubstitutionList& list,
+ std::vector<OutputFile>* output) {
+ for (const auto& item : list.list())
+ output->push_back(ApplyPatternToTargetAsOutputFile(target, tool, item));
+}
+
+// static
+bool SubstitutionWriter::GetTargetSubstitution(
+ const Target* target,
+ SubstitutionType type,
+ std::string* result) {
+ switch (type) {
+ case SUBSTITUTION_LABEL:
+ // Only include the toolchain for non-default toolchains.
+ *result = target->label().GetUserVisibleName(
+ !target->settings()->is_default());
+ break;
+ case SUBSTITUTION_LABEL_NAME:
+ *result = target->label().name();
+ break;
+ case SUBSTITUTION_ROOT_GEN_DIR:
+ SetDirOrDotWithNoSlash(
+ GetToolchainGenDirAsOutputFile(target->settings()).value(),
+ result);
+ break;
+ case SUBSTITUTION_ROOT_OUT_DIR:
+ SetDirOrDotWithNoSlash(
+ target->settings()->toolchain_output_subdir().value(),
+ result);
+ break;
+ case SUBSTITUTION_TARGET_GEN_DIR:
+ SetDirOrDotWithNoSlash(
+ GetTargetGenDirAsOutputFile(target).value(),
+ result);
+ break;
+ case SUBSTITUTION_TARGET_OUT_DIR:
+ SetDirOrDotWithNoSlash(
+ GetTargetOutputDirAsOutputFile(target).value(),
+ result);
+ break;
+ case SUBSTITUTION_TARGET_OUTPUT_NAME:
+ *result = target->GetComputedOutputName();
+ break;
+ default:
+ return false;
+ }
+ return true;
+}
+
+// static
+std::string SubstitutionWriter::GetTargetSubstitution(
+ const Target* target,
+ SubstitutionType type) {
+ std::string result;
+ GetTargetSubstitution(target, type, &result);
+ return result;
+}
+
+// static
+OutputFile SubstitutionWriter::ApplyPatternToCompilerAsOutputFile(
+ const Target* target,
+ const SourceFile& source,
+ const SubstitutionPattern& pattern) {
+ OutputFile result;
+ for (const auto& subrange : pattern.ranges()) {
+ if (subrange.type == SUBSTITUTION_LITERAL) {
+ result.value().append(subrange.literal);
+ } else {
+ result.value().append(
+ GetCompilerSubstitution(target, source, subrange.type));
+ }
+ }
+ return result;
+}
+
+// static
+void SubstitutionWriter::ApplyListToCompilerAsOutputFile(
+ const Target* target,
+ const SourceFile& source,
+ const SubstitutionList& list,
+ std::vector<OutputFile>* output) {
+ for (const auto& item : list.list())
+ output->push_back(ApplyPatternToCompilerAsOutputFile(target, source, item));
+}
+
+// static
+std::string SubstitutionWriter::GetCompilerSubstitution(
+ const Target* target,
+ const SourceFile& source,
+ SubstitutionType type) {
+ // First try the common tool ones.
+ std::string result;
+ if (GetTargetSubstitution(target, type, &result))
+ return result;
+
+ // Fall-through to the source ones.
+ return GetSourceSubstitution(
+ target->settings(), source, type, OUTPUT_RELATIVE,
+ target->settings()->build_settings()->build_dir());
+}
+
+// static
+OutputFile SubstitutionWriter::ApplyPatternToLinkerAsOutputFile(
+ const Target* target,
+ const Tool* tool,
+ const SubstitutionPattern& pattern) {
+ OutputFile result;
+ for (const auto& subrange : pattern.ranges()) {
+ if (subrange.type == SUBSTITUTION_LITERAL) {
+ result.value().append(subrange.literal);
+ } else {
+ result.value().append(GetLinkerSubstitution(target, tool, subrange.type));
+ }
+ }
+ return result;
+}
+
+// static
+void SubstitutionWriter::ApplyListToLinkerAsOutputFile(
+ const Target* target,
+ const Tool* tool,
+ const SubstitutionList& list,
+ std::vector<OutputFile>* output) {
+ for (const auto& item : list.list())
+ output->push_back(ApplyPatternToLinkerAsOutputFile(target, tool, item));
+}
+
+// static
+std::string SubstitutionWriter::GetLinkerSubstitution(
+ const Target* target,
+ const Tool* tool,
+ SubstitutionType type) {
+ // First try the common tool ones.
+ std::string result;
+ if (GetTargetSubstitution(target, type, &result))
+ return result;
+
+ // Fall-through to the linker-specific ones.
+ switch (type) {
+ case SUBSTITUTION_OUTPUT_EXTENSION:
+ // Use the extension provided on the target if specified, otherwise
+ // fall back on the default. Note that the target's output extension
+ // does not include the dot but the tool's does.
+ if (!target->output_extension_set())
+ return tool->default_output_extension();
+ if (target->output_extension().empty())
+ return std::string(); // Explicitly set to no extension.
+ return std::string(".") + target->output_extension();
+
+ default:
+ NOTREACHED();
+ return std::string();
+ }
+}
diff --git a/chromium/tools/gn/substitution_writer.h b/chromium/tools/gn/substitution_writer.h
new file mode 100644
index 00000000000..a450bbbc03c
--- /dev/null
+++ b/chromium/tools/gn/substitution_writer.h
@@ -0,0 +1,229 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SUBSTITUTION_WRITER_H_
+#define TOOLS_GN_SUBSTITUTION_WRITER_H_
+
+#include <iosfwd>
+#include <vector>
+
+#include "tools/gn/substitution_type.h"
+
+struct EscapeOptions;
+class OutputFile;
+class Settings;
+class SourceDir;
+class SourceFile;
+class SubstitutionList;
+class SubstitutionPattern;
+class Target;
+class Tool;
+
+// Help text for script source expansion.
+extern const char kSourceExpansion_Help[];
+
+// This class handles writing or applying substitution patterns to strings.
+//
+// There are several different uses:
+//
+// - Source substitutions: These are used to compute action_foreach
+// outputs and arguments. Functions are provided to expand these in terms
+// of both OutputFiles (for writing Ninja files) as well as SourceFiles
+// (for computing lists used by code).
+//
+// - Target substitutions: These are specific to the target+tool combination
+// and are shared between the compiler and linker ones. It includes things
+// like the target_gen_dir.
+//
+// - Compiler substitutions: These are used to compute compiler outputs.
+// It includes all source substitutions (since they depend on the various
+// parts of the source file) as well as the target substitutions.
+//
+// - Linker substitutions: These are used to compute linker outputs. It
+// includes the target substitutions.
+//
+// The compiler and linker specific substitutions do NOT include the various
+// cflags, ldflags, libraries, etc. These are written by the ninja target
+// writer since they depend on traversing the dependency tree.
+class SubstitutionWriter {
+ public:
+ enum OutputStyle {
+ OUTPUT_ABSOLUTE, // Dirs will be absolute "//foo/bar".
+ OUTPUT_RELATIVE, // Dirs will be relative to a given directory.
+ };
+
+ // Writes the pattern to the given stream with no special handling, and with
+ // Ninja variables replacing the patterns.
+ static void WriteWithNinjaVariables(
+ const SubstitutionPattern& pattern,
+ const EscapeOptions& escape_options,
+ std::ostream& out);
+
+ // NOP substitutions ---------------------------------------------------------
+
+ // Converts the given SubstitutionList to OutputFiles assuming there are
+ // no substitutions (it will assert if there are). This is used for cases
+ // like actions where the outputs are explicit, but the list is stored as
+ // a SubstitutionList.
+ static void GetListAsSourceFiles(
+ const SubstitutionList& list,
+ std::vector<SourceFile>* output);
+ static void GetListAsOutputFiles(
+ const Settings* settings,
+ const SubstitutionList& list,
+ std::vector<OutputFile>* output);
+
+ // Source substitutions -----------------------------------------------------
+
+ // Applies the substitution pattern to a source file, returning the result
+ // as either a string, a SourceFile or an OutputFile. If the result is
+ // expected to be a SourceFile or an OutputFile, this will CHECK if the
+ // result isn't in the correct directory. The caller should validate this
+ // first (see for example IsFileInOuputDir).
+ static SourceFile ApplyPatternToSource(
+ const Settings* settings,
+ const SubstitutionPattern& pattern,
+ const SourceFile& source);
+ static std::string ApplyPatternToSourceAsString(
+ const Settings* settings,
+ const SubstitutionPattern& pattern,
+ const SourceFile& source);
+ static OutputFile ApplyPatternToSourceAsOutputFile(
+ const Settings* settings,
+ const SubstitutionPattern& pattern,
+ const SourceFile& source);
+
+ // Applies the substitution list to a source, APPENDING the result to the
+ // given output vector. It works this way so one can call multiple times to
+ // apply to multiple files and create a list. The result can either be
+ // SourceFiles or OutputFiles.
+ static void ApplyListToSource(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const SourceFile& source,
+ std::vector<SourceFile>* output);
+ static void ApplyListToSourceAsString(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const SourceFile& source,
+ std::vector<std::string>* output);
+ static void ApplyListToSourceAsOutputFile(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const SourceFile& source,
+ std::vector<OutputFile>* output);
+
+ // Like ApplyListToSource but applies the list to all sources and replaces
+ // rather than appends the output (this produces the complete output).
+ static void ApplyListToSources(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const std::vector<SourceFile>& sources,
+ std::vector<SourceFile>* output);
+ static void ApplyListToSourcesAsString(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const std::vector<SourceFile>& sources,
+ std::vector<std::string>* output);
+ static void ApplyListToSourcesAsOutputFile(
+ const Settings* settings,
+ const SubstitutionList& list,
+ const std::vector<SourceFile>& sources,
+ std::vector<OutputFile>* output);
+
+ // Given a list of source replacement types used, writes the Ninja variable
+ // definitions for the given source file to use for those replacements. The
+ // variables will be indented two spaces. Since this is for writing to
+ // Ninja files, paths will be relative to the build dir, and no definition
+ // for {{source}} will be written since that maps to Ninja's implicit $in
+ // variable.
+ static void WriteNinjaVariablesForSource(
+ const Settings* settings,
+ const SourceFile& source,
+ const std::vector<SubstitutionType>& types,
+ const EscapeOptions& escape_options,
+ std::ostream& out);
+
+ // Extracts the given type of substitution related to a source file from the
+ // given source file. If output_style is OUTPUT_RELATIVE, relative_to
+ // indicates the directory that the relative directories should be relative
+ // to, otherwise it is ignored.
+ static std::string GetSourceSubstitution(
+ const Settings* settings,
+ const SourceFile& source,
+ SubstitutionType type,
+ OutputStyle output_style,
+ const SourceDir& relative_to);
+
+ // Target substitutions ------------------------------------------------------
+ //
+ // Handles the target substitutions that apply to both compiler and linker
+ // tools.
+ static OutputFile ApplyPatternToTargetAsOutputFile(
+ const Target* target,
+ const Tool* tool,
+ const SubstitutionPattern& pattern);
+ static void ApplyListToTargetAsOutputFile(
+ const Target* target,
+ const Tool* tool,
+ const SubstitutionList& list,
+ std::vector<OutputFile>* output);
+
+ // This function is slightly different than the other substitution getters
+ // since it can handle failure (since it is designed to be used by the
+ // compiler and linker ones which will fall through if it's not a common tool
+ // one).
+ static bool GetTargetSubstitution(
+ const Target* target,
+ SubstitutionType type,
+ std::string* result);
+ static std::string GetTargetSubstitution(
+ const Target* target,
+ SubstitutionType type);
+
+ // Compiler substitutions ----------------------------------------------------
+ //
+ // A compiler substitution allows both source and tool substitutions. These
+ // are used to compute output names for compiler tools.
+
+ static OutputFile ApplyPatternToCompilerAsOutputFile(
+ const Target* target,
+ const SourceFile& source,
+ const SubstitutionPattern& pattern);
+ static void ApplyListToCompilerAsOutputFile(
+ const Target* target,
+ const SourceFile& source,
+ const SubstitutionList& list,
+ std::vector<OutputFile>* output);
+
+ // Like GetSourceSubstitution but for strings based on the target or
+ // toolchain. This type of result will always be relative to the build
+ // directory.
+ static std::string GetCompilerSubstitution(
+ const Target* target,
+ const SourceFile& source,
+ SubstitutionType type);
+
+ // Linker substitutions ------------------------------------------------------
+
+ static OutputFile ApplyPatternToLinkerAsOutputFile(
+ const Target* target,
+ const Tool* tool,
+ const SubstitutionPattern& pattern);
+ static void ApplyListToLinkerAsOutputFile(
+ const Target* target,
+ const Tool* tool,
+ const SubstitutionList& list,
+ std::vector<OutputFile>* output);
+
+ // Like GetSourceSubstitution but for strings based on the target or
+ // toolchain. This type of result will always be relative to the build
+ // directory.
+ static std::string GetLinkerSubstitution(
+ const Target* target,
+ const Tool* tool,
+ SubstitutionType type);
+};
+
+#endif // TOOLS_GN_SUBSTITUTION_WRITER_H_
diff --git a/chromium/tools/gn/substitution_writer_unittest.cc b/chromium/tools/gn/substitution_writer_unittest.cc
new file mode 100644
index 00000000000..77374f1a313
--- /dev/null
+++ b/chromium/tools/gn/substitution_writer_unittest.cc
@@ -0,0 +1,281 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <sstream>
+
+#include "build/build_config.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/err.h"
+#include "tools/gn/escape.h"
+#include "tools/gn/substitution_list.h"
+#include "tools/gn/substitution_pattern.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/target.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(SubstitutionWriter, GetListAs) {
+ TestWithScope setup;
+
+ SubstitutionList list = SubstitutionList::MakeForTest(
+ "//foo/bar/a.cc",
+ "//foo/bar/b.cc");
+
+ std::vector<SourceFile> sources;
+ SubstitutionWriter::GetListAsSourceFiles(list, &sources);
+ ASSERT_EQ(2u, sources.size());
+ EXPECT_EQ("//foo/bar/a.cc", sources[0].value());
+ EXPECT_EQ("//foo/bar/b.cc", sources[1].value());
+
+ std::vector<OutputFile> outputs;
+ SubstitutionWriter::GetListAsOutputFiles(setup.settings(), list, &outputs);
+ ASSERT_EQ(2u, outputs.size());
+ EXPECT_EQ("../../foo/bar/a.cc", outputs[0].value());
+ EXPECT_EQ("../../foo/bar/b.cc", outputs[1].value());
+}
+
+TEST(SubstitutionWriter, ApplyPatternToSource) {
+ TestWithScope setup;
+
+ SubstitutionPattern pattern;
+ Err err;
+ ASSERT_TRUE(pattern.Parse("{{source_gen_dir}}/{{source_name_part}}.tmp",
+ nullptr, &err));
+
+ SourceFile result = SubstitutionWriter::ApplyPatternToSource(
+ setup.settings(), pattern, SourceFile("//foo/bar/myfile.txt"));
+ ASSERT_EQ("//out/Debug/gen/foo/bar/myfile.tmp", result.value());
+}
+
+TEST(SubstitutionWriter, ApplyPatternToSourceAsOutputFile) {
+ TestWithScope setup;
+
+ SubstitutionPattern pattern;
+ Err err;
+ ASSERT_TRUE(pattern.Parse("{{source_gen_dir}}/{{source_name_part}}.tmp",
+ nullptr, &err));
+
+ OutputFile result = SubstitutionWriter::ApplyPatternToSourceAsOutputFile(
+ setup.settings(), pattern, SourceFile("//foo/bar/myfile.txt"));
+ ASSERT_EQ("gen/foo/bar/myfile.tmp", result.value());
+}
+
+TEST(SubstitutionWriter, WriteNinjaVariablesForSource) {
+ TestWithScope setup;
+
+ std::vector<SubstitutionType> types;
+ types.push_back(SUBSTITUTION_SOURCE);
+ types.push_back(SUBSTITUTION_SOURCE_NAME_PART);
+ types.push_back(SUBSTITUTION_SOURCE_DIR);
+
+ EscapeOptions options;
+ options.mode = ESCAPE_NONE;
+
+ std::ostringstream out;
+ SubstitutionWriter::WriteNinjaVariablesForSource(
+ setup.settings(), SourceFile("//foo/bar/baz.txt"), types, options, out);
+
+ // The "source" should be skipped since that will expand to $in which is
+ // implicit.
+ EXPECT_EQ(
+ " source_name_part = baz\n"
+ " source_dir = ../../foo/bar\n",
+ out.str());
+}
+
+TEST(SubstitutionWriter, WriteWithNinjaVariables) {
+ Err err;
+ SubstitutionPattern pattern;
+ ASSERT_TRUE(pattern.Parse("-i {{source}} --out=bar\"{{source_name_part}}\".o",
+ nullptr, &err));
+ EXPECT_FALSE(err.has_error());
+
+ EscapeOptions options;
+ options.mode = ESCAPE_NONE;
+
+ std::ostringstream out;
+ SubstitutionWriter::WriteWithNinjaVariables(pattern, options, out);
+
+ EXPECT_EQ(
+ "-i ${in} --out=bar\"${source_name_part}\".o",
+ out.str());
+}
+
+TEST(SubstitutionWriter, SourceSubstitutions) {
+ TestWithScope setup;
+
+ // Call to get substitutions relative to the build dir.
+ #define GetRelSubst(str, what) \
+ SubstitutionWriter::GetSourceSubstitution( \
+ setup.settings(), \
+ SourceFile(str), \
+ what, \
+ SubstitutionWriter::OUTPUT_RELATIVE, \
+ setup.settings()->build_settings()->build_dir())
+
+ // Call to get absolute directory substitutions.
+ #define GetAbsSubst(str, what) \
+ SubstitutionWriter::GetSourceSubstitution( \
+ setup.settings(), \
+ SourceFile(str), \
+ what, \
+ SubstitutionWriter::OUTPUT_ABSOLUTE, \
+ SourceDir())
+
+ // Try all possible templates with a normal looking string.
+ EXPECT_EQ("../../foo/bar/baz.txt",
+ GetRelSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE));
+ EXPECT_EQ("//foo/bar/baz.txt",
+ GetAbsSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE));
+
+ EXPECT_EQ("baz",
+ GetRelSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE_NAME_PART));
+ EXPECT_EQ("baz",
+ GetAbsSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE_NAME_PART));
+
+ EXPECT_EQ("baz.txt",
+ GetRelSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE_FILE_PART));
+ EXPECT_EQ("baz.txt",
+ GetAbsSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE_FILE_PART));
+
+ EXPECT_EQ("../../foo/bar",
+ GetRelSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE_DIR));
+ EXPECT_EQ("//foo/bar",
+ GetAbsSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE_DIR));
+
+ EXPECT_EQ("foo/bar", GetRelSubst("//foo/bar/baz.txt",
+ SUBSTITUTION_SOURCE_ROOT_RELATIVE_DIR));
+ EXPECT_EQ("foo/bar", GetAbsSubst("//foo/bar/baz.txt",
+ SUBSTITUTION_SOURCE_ROOT_RELATIVE_DIR));
+
+ EXPECT_EQ("gen/foo/bar",
+ GetRelSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE_GEN_DIR));
+ EXPECT_EQ("//out/Debug/gen/foo/bar",
+ GetAbsSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE_GEN_DIR));
+
+ EXPECT_EQ("obj/foo/bar",
+ GetRelSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE_OUT_DIR));
+ EXPECT_EQ("//out/Debug/obj/foo/bar",
+ GetAbsSubst("//foo/bar/baz.txt", SUBSTITUTION_SOURCE_OUT_DIR));
+
+ // Operations on an absolute path.
+ EXPECT_EQ("/baz.txt", GetRelSubst("/baz.txt", SUBSTITUTION_SOURCE));
+ EXPECT_EQ("/.", GetRelSubst("/baz.txt", SUBSTITUTION_SOURCE_DIR));
+ EXPECT_EQ("gen/ABS_PATH",
+ GetRelSubst("/baz.txt", SUBSTITUTION_SOURCE_GEN_DIR));
+ EXPECT_EQ("obj/ABS_PATH",
+ GetRelSubst("/baz.txt", SUBSTITUTION_SOURCE_OUT_DIR));
+#if defined(OS_WIN)
+ EXPECT_EQ("gen/ABS_PATH/C",
+ GetRelSubst("/C:/baz.txt", SUBSTITUTION_SOURCE_GEN_DIR));
+ EXPECT_EQ("obj/ABS_PATH/C",
+ GetRelSubst("/C:/baz.txt", SUBSTITUTION_SOURCE_OUT_DIR));
+#endif
+
+ EXPECT_EQ(".",
+ GetRelSubst("//baz.txt", SUBSTITUTION_SOURCE_ROOT_RELATIVE_DIR));
+
+ #undef GetAbsSubst
+ #undef GetRelSubst
+}
+
+TEST(SubstitutionWriter, TargetSubstitutions) {
+ TestWithScope setup;
+ Err err;
+
+ Target target(setup.settings(), Label(SourceDir("//foo/bar/"), "baz"));
+ target.set_output_type(Target::STATIC_LIBRARY);
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ std::string result;
+ EXPECT_TRUE(SubstitutionWriter::GetTargetSubstitution(
+ &target, SUBSTITUTION_LABEL, &result));
+ EXPECT_EQ("//foo/bar:baz", result);
+
+ EXPECT_TRUE(SubstitutionWriter::GetTargetSubstitution(
+ &target, SUBSTITUTION_LABEL_NAME, &result));
+ EXPECT_EQ("baz", result);
+
+ EXPECT_TRUE(SubstitutionWriter::GetTargetSubstitution(
+ &target, SUBSTITUTION_ROOT_GEN_DIR, &result));
+ EXPECT_EQ("gen", result);
+
+ EXPECT_TRUE(SubstitutionWriter::GetTargetSubstitution(
+ &target, SUBSTITUTION_ROOT_OUT_DIR, &result));
+ EXPECT_EQ(".", result);
+
+ EXPECT_TRUE(SubstitutionWriter::GetTargetSubstitution(
+ &target, SUBSTITUTION_TARGET_GEN_DIR, &result));
+ EXPECT_EQ("gen/foo/bar", result);
+
+ EXPECT_TRUE(SubstitutionWriter::GetTargetSubstitution(
+ &target, SUBSTITUTION_TARGET_OUT_DIR, &result));
+ EXPECT_EQ("obj/foo/bar", result);
+
+ EXPECT_TRUE(SubstitutionWriter::GetTargetSubstitution(
+ &target, SUBSTITUTION_TARGET_OUTPUT_NAME, &result));
+ EXPECT_EQ("libbaz", result);
+}
+
+TEST(SubstitutionWriter, CompilerSubstitutions) {
+ TestWithScope setup;
+ Err err;
+
+ Target target(setup.settings(), Label(SourceDir("//foo/bar/"), "baz"));
+ target.set_output_type(Target::STATIC_LIBRARY);
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ // The compiler substitution is just source + target combined. So test one
+ // of each of those classes of things to make sure this is hooked up.
+ EXPECT_EQ("file",
+ SubstitutionWriter::GetCompilerSubstitution(
+ &target, SourceFile("//foo/bar/file.txt"),
+ SUBSTITUTION_SOURCE_NAME_PART));
+ EXPECT_EQ("gen/foo/bar",
+ SubstitutionWriter::GetCompilerSubstitution(
+ &target, SourceFile("//foo/bar/file.txt"),
+ SUBSTITUTION_TARGET_GEN_DIR));
+}
+
+TEST(SubstitutionWriter, LinkerSubstitutions) {
+ TestWithScope setup;
+ Err err;
+
+ Target target(setup.settings(), Label(SourceDir("//foo/bar/"), "baz"));
+ target.set_output_type(Target::SHARED_LIBRARY);
+ target.SetToolchain(setup.toolchain());
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ const Tool* tool = setup.toolchain()->GetToolForTargetFinalOutput(&target);
+
+ // The compiler substitution is just target + OUTPUT_EXTENSION combined. So
+ // test one target one plus the output extension.
+ EXPECT_EQ(".so",
+ SubstitutionWriter::GetLinkerSubstitution(
+ &target, tool, SUBSTITUTION_OUTPUT_EXTENSION));
+ EXPECT_EQ("gen/foo/bar",
+ SubstitutionWriter::GetLinkerSubstitution(
+ &target, tool, SUBSTITUTION_TARGET_GEN_DIR));
+
+ // Test that we handle paths that end up in the root build dir properly
+ // (no leading "./" or "/").
+ SubstitutionPattern pattern;
+ ASSERT_TRUE(pattern.Parse("{{root_out_dir}}/{{target_output_name}}.so",
+ nullptr, &err));
+
+ OutputFile output = SubstitutionWriter::ApplyPatternToLinkerAsOutputFile(
+ &target, tool, pattern);
+ EXPECT_EQ("./libbaz.so", output.value());
+
+ // Output extensions can be overridden.
+ target.set_output_extension("extension");
+ EXPECT_EQ(".extension",
+ SubstitutionWriter::GetLinkerSubstitution(
+ &target, tool, SUBSTITUTION_OUTPUT_EXTENSION));
+ target.set_output_extension("");
+ EXPECT_EQ("",
+ SubstitutionWriter::GetLinkerSubstitution(
+ &target, tool, SUBSTITUTION_OUTPUT_EXTENSION));
+}
diff --git a/chromium/tools/gn/switches.cc b/chromium/tools/gn/switches.cc
new file mode 100644
index 00000000000..df167b30aa5
--- /dev/null
+++ b/chromium/tools/gn/switches.cc
@@ -0,0 +1,242 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/switches.h"
+
+namespace switches {
+
+const char kArgs[] = "args";
+const char kArgs_HelpShort[] =
+ "--args: Specifies build arguments overrides.";
+const char kArgs_Help[] =
+ "--args: Specifies build arguments overrides.\n"
+ "\n"
+ " See \"gn help buildargs\" for an overview of how build arguments work.\n"
+ "\n"
+ " Most operations take a build directory. The build arguments are taken\n"
+ " from the previous build done in that directory. If a command specifies\n"
+ " --args, it will override the previous arguments stored in the build\n"
+ " directory, and use the specified ones.\n"
+ "\n"
+ " The args specified will be saved to the build directory for subsequent\n"
+ " commands. Specifying --args=\"\" will clear all build arguments.\n"
+ "\n"
+ "Formatting\n"
+ "\n"
+ " The value of the switch is interpreted in GN syntax. For typical usage\n"
+ " of string arguments, you will need to be careful about escaping of\n"
+ " quotes.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " gn gen out/Default --args=\"foo=\\\"bar\\\"\"\n"
+ "\n"
+ " gn gen out/Default --args='foo=\"bar\" enable=true blah=7'\n"
+ "\n"
+ " gn check out/Default --args=\"\"\n"
+ " Clears existing build args from the directory.\n"
+ "\n"
+ " gn desc out/Default --args=\"some_list=[1, false, \\\"foo\\\"]\"\n";
+
+#define COLOR_HELP_LONG \
+ "--[no]color: Forces colored output on or off.\n"\
+ "\n"\
+ " Normally GN will try to detect whether it is outputting to a terminal\n"\
+ " and will enable or disable color accordingly. Use of these switches\n"\
+ " will override the default.\n"\
+ "\n"\
+ "Examples\n"\
+ "\n"\
+ " gn gen out/Default --color\n"\
+ "\n"\
+ " gn gen out/Default --nocolor\n"
+const char kColor[] = "color";
+const char kColor_HelpShort[] =
+ "--color: Force colored output.";
+const char kColor_Help[] = COLOR_HELP_LONG;
+
+const char kDotfile[] = "dotfile";
+const char kDotfile_HelpShort[] =
+ "--dotfile: Override the name of the \".gn\" file.";
+const char kDotfile_Help[] =
+ "--dotfile: Override the name of the \".gn\" file.\n"
+ "\n"
+ " Normally GN loads the \".gn\"file from the source root for some basic\n"
+ " configuration (see \"gn help dotfile\"). This flag allows you to\n"
+ " use a different file.\n"
+ "\n"
+ " Note that this interacts with \"--root\" in a possibly incorrect way.\n"
+ " It would be nice to test the edge cases and document or fix.\n";
+
+const char kMarkdown[] = "markdown";
+const char kMarkdown_HelpShort[] =
+ "--markdown: write the output in the Markdown format.";
+const char kMarkdown_Help[] =
+ "--markdown: write the output in the Markdown format.\n";
+
+const char kNoColor[] = "nocolor";
+const char kNoColor_HelpShort[] =
+ "--nocolor: Force non-colored output.";
+const char kNoColor_Help[] = COLOR_HELP_LONG;
+
+const char kQuiet[] = "q";
+const char kQuiet_HelpShort[] =
+ "-q: Quiet mode. Don't print output on success.";
+const char kQuiet_Help[] =
+ "-q: Quiet mode. Don't print output on success.\n"
+ "\n"
+ " This is useful when running as a part of another script.\n";
+
+const char kRoot[] = "root";
+const char kRoot_HelpShort[] =
+ "--root: Explicitly specify source root.";
+const char kRoot_Help[] =
+ "--root: Explicitly specify source root.\n"
+ "\n"
+ " Normally GN will look up in the directory tree from the current\n"
+ " directory to find a \".gn\" file. The source root directory specifies\n"
+ " the meaning of \"//\" beginning with paths, and the BUILD.gn file\n"
+ " in that directory will be the first thing loaded.\n"
+ "\n"
+ " Specifying --root allows GN to do builds in a specific directory\n"
+ " regardless of the current directory.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " gn gen //out/Default --root=/home/baracko/src\n"
+ "\n"
+ " gn desc //out/Default --root=\"C:\\Users\\BObama\\My Documents\\foo\"\n";
+
+const char kRuntimeDepsListFile[] = "runtime-deps-list-file";
+const char kRuntimeDepsListFile_HelpShort[] =
+ "--runtime-deps-list-file: Save runtime dependencies for targets in file.";
+const char kRuntimeDepsListFile_Help[] =
+ "--runtime-deps-list-file: Save runtime dependencies for targets in file.\n"
+ "\n"
+ " --runtime-deps-list-file=<filename>\n"
+ "\n"
+ " Where <filename> is a text file consisting of the labels, one per\n"
+ " line, of the targets for which runtime dependencies are desired.\n"
+ "\n"
+ " See \"gn help runtime_deps\" for a description of how runtime\n"
+ " dependencies are computed.\n"
+ "\n"
+ "Runtime deps output file\n"
+ "\n"
+ " For each target requested, GN will write a separate runtime dependency\n"
+ " file. The runtime dependency file will be in the output directory\n"
+ " alongside the output file of the target, with a \".runtime_deps\"\n"
+ " extension. For example, if the target \"//foo:bar\" is listed in the\n"
+ " input file, and that target produces an output file \"bar.so\", GN\n"
+ " will create a file \"bar.so.runtime_deps\" in the build directory.\n"
+ "\n"
+ " If a source set, action, copy, or group is listed, the runtime deps\n"
+ " file will correspond to the .stamp file corresponding to that target.\n"
+ " This is probably not useful; the use-case for this feature is\n"
+ " generally executable targets.\n"
+ "\n"
+ " The runtime dependency file will list one file per line, with no\n"
+ " escaping. The files will be relative to the root_build_dir. The first\n"
+ " line of the file will be the main output file of the target itself\n"
+ " (in the above example, \"bar.so\").\n";
+
+const char kThreads[] = "threads";
+const char kThreads_HelpShort[] =
+ "--threads: Specify number of worker threads.";
+const char kThreads_Help[] =
+ "--threads: Specify number of worker threads.\n"
+ "\n"
+ " GN runs many threads to load and run build files. This can make\n"
+ " debugging challenging. Or you may want to experiment with different\n"
+ " values to see how it affects performance.\n"
+ "\n"
+ " The parameter is the number of worker threads. This does not count the\n"
+ " main thread (so there are always at least two).\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " gen gen out/Default --threads=1\n";
+
+const char kTime[] = "time";
+const char kTime_HelpShort[] =
+ "--time: Outputs a summary of how long everything took.";
+const char kTime_Help[] =
+ "--time: Outputs a summary of how long everything took.\n"
+ "\n"
+ " Hopefully self-explanatory.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " gn gen out/Default --time\n";
+
+const char kTracelog[] = "tracelog";
+const char kTracelog_HelpShort[] =
+ "--tracelog: Writes a Chrome-compatible trace log to the given file.";
+const char kTracelog_Help[] =
+ "--tracelog: Writes a Chrome-compatible trace log to the given file.\n"
+ "\n"
+ " The trace log will show file loads, executions, scripts, and writes.\n"
+ " This allows performance analysis of the generation step.\n"
+ "\n"
+ " To view the trace, open Chrome and navigate to \"chrome://tracing/\",\n"
+ " then press \"Load\" and specify the file you passed to this parameter.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " gn gen out/Default --tracelog=mytrace.trace\n";
+
+const char kVerbose[] = "v";
+const char kVerbose_HelpShort[] =
+ "-v: Verbose logging.";
+const char kVerbose_Help[] =
+ "-v: Verbose logging.\n"
+ "\n"
+ " This will spew logging events to the console for debugging issues.\n"
+ " Good luck!\n";
+
+const char kVersion[] = "version";
+const char kVersion_HelpShort[] =
+ "--version: Prints the GN version number and exits.";
+// It's impossible to see this since gn_main prints the version and exits
+// immediately if this switch is used.
+const char kVersion_Help[] = "";
+
+// -----------------------------------------------------------------------------
+
+SwitchInfo::SwitchInfo()
+ : short_help(""),
+ long_help("") {
+}
+
+SwitchInfo::SwitchInfo(const char* short_help, const char* long_help)
+ : short_help(short_help),
+ long_help(long_help) {
+}
+
+#define INSERT_VARIABLE(var) \
+ info_map[k##var] = SwitchInfo(k##var##_HelpShort, k##var##_Help);
+
+const SwitchInfoMap& GetSwitches() {
+ static SwitchInfoMap info_map;
+ if (info_map.empty()) {
+ INSERT_VARIABLE(Args)
+ INSERT_VARIABLE(Color)
+ INSERT_VARIABLE(Dotfile)
+ INSERT_VARIABLE(Markdown)
+ INSERT_VARIABLE(NoColor)
+ INSERT_VARIABLE(Root)
+ INSERT_VARIABLE(Quiet)
+ INSERT_VARIABLE(RuntimeDepsListFile)
+ INSERT_VARIABLE(Threads)
+ INSERT_VARIABLE(Time)
+ INSERT_VARIABLE(Tracelog)
+ INSERT_VARIABLE(Verbose)
+ INSERT_VARIABLE(Version)
+ }
+ return info_map;
+}
+
+#undef INSERT_VARIABLE
+
+} // namespace switches
diff --git a/chromium/tools/gn/switches.h b/chromium/tools/gn/switches.h
new file mode 100644
index 00000000000..3dc5b9f0f25
--- /dev/null
+++ b/chromium/tools/gn/switches.h
@@ -0,0 +1,85 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_SWITCHES_H_
+#define TOOLS_GN_SWITCHES_H_
+
+#include <map>
+
+#include "base/strings/string_piece.h"
+
+namespace switches {
+
+struct SwitchInfo {
+ SwitchInfo();
+ SwitchInfo(const char* short_help,
+ const char* long_help);
+
+ const char* short_help;
+ const char* long_help;
+};
+
+typedef std::map<base::StringPiece, SwitchInfo> SwitchInfoMap;
+
+// Returns the mapping of all global switches.
+const SwitchInfoMap& GetSwitches();
+
+// This file contains global switches. If a command takes specific ones only
+// to that command, just put them in that command's .cc file.
+
+extern const char kArgs[];
+extern const char kArgs_HelpShort[];
+extern const char kArgs_Help[];
+
+extern const char kColor[];
+extern const char kColor_HelpShort[];
+extern const char kColor_Help[];
+
+extern const char kDotfile[];
+extern const char kDotfile_HelpShort[];
+extern const char kDotfile_Help[];
+
+extern const char kMarkdown[];
+extern const char kMarkdown_HelpShort[];
+extern const char kMarkdown_Help[];
+
+extern const char kNoColor[];
+extern const char kNoColor_HelpShort[];
+extern const char kNoColor_Help[];
+
+extern const char kQuiet[];
+extern const char kQuiet_HelpShort[];
+extern const char kQuiet_Help[];
+
+extern const char kRoot[];
+extern const char kRoot_HelpShort[];
+extern const char kRoot_Help[];
+
+extern const char kRuntimeDepsListFile[];
+extern const char kRuntimeDepsListFile_HelpShort[];
+extern const char kRuntimeDepsListFile_Help[];
+
+extern const char kThreads[];
+extern const char kThreads_HelpShort[];
+extern const char kThreads_Help[];
+
+extern const char kTime[];
+extern const char kTime_HelpShort[];
+extern const char kTime_Help[];
+
+extern const char kTracelog[];
+extern const char kTracelog_HelpShort[];
+extern const char kTracelog_Help[];
+
+extern const char kVerbose[];
+extern const char kVerbose_HelpShort[];
+extern const char kVerbose_Help[];
+
+extern const char kVersion[];
+extern const char kVersion_HelpShort[];
+extern const char kVersion_Help[];
+
+} // namespace switches
+
+#endif // TOOLS_GN_SWITCHES_H_
diff --git a/chromium/tools/gn/target.cc b/chromium/tools/gn/target.cc
new file mode 100644
index 00000000000..18c353eac05
--- /dev/null
+++ b/chromium/tools/gn/target.cc
@@ -0,0 +1,788 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/target.h"
+
+#include <stddef.h>
+
+#include <algorithm>
+
+#include "base/bind.h"
+#include "base/strings/string_util.h"
+#include "base/strings/stringprintf.h"
+#include "tools/gn/config_values_extractors.h"
+#include "tools/gn/deps_iterator.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/source_file_type.h"
+#include "tools/gn/substitution_writer.h"
+#include "tools/gn/tool.h"
+#include "tools/gn/toolchain.h"
+#include "tools/gn/trace.h"
+
+namespace {
+
+typedef std::set<const Config*> ConfigSet;
+
+// Merges the public configs from the given target to the given config list.
+void MergePublicConfigsFrom(const Target* from_target,
+ UniqueVector<LabelConfigPair>* dest) {
+ const UniqueVector<LabelConfigPair>& pub = from_target->public_configs();
+ dest->Append(pub.begin(), pub.end());
+}
+
+// Like MergePublicConfigsFrom above except does the "all dependent" ones. This
+// additionally adds all configs to the all_dependent_configs_ of the dest
+// target given in *all_dest.
+void MergeAllDependentConfigsFrom(const Target* from_target,
+ UniqueVector<LabelConfigPair>* dest,
+ UniqueVector<LabelConfigPair>* all_dest) {
+ for (const auto& pair : from_target->all_dependent_configs()) {
+ all_dest->push_back(pair);
+ dest->push_back(pair);
+ }
+}
+
+Err MakeTestOnlyError(const Target* from, const Target* to) {
+ return Err(from->defined_from(), "Test-only dependency not allowed.",
+ from->label().GetUserVisibleName(false) + "\n"
+ "which is NOT marked testonly can't depend on\n" +
+ to->label().GetUserVisibleName(false) + "\n"
+ "which is marked testonly. Only targets with \"testonly = true\"\n"
+ "can depend on other test-only targets.\n"
+ "\n"
+ "Either mark it test-only or don't do this dependency.");
+}
+
+Err MakeStaticLibDepsError(const Target* from, const Target* to) {
+ return Err(from->defined_from(),
+ "Complete static libraries can't depend on static libraries.",
+ from->label().GetUserVisibleName(false) +
+ "\n"
+ "which is a complete static library can't depend on\n" +
+ to->label().GetUserVisibleName(false) +
+ "\n"
+ "which is a static library.\n"
+ "\n"
+ "Use source sets for intermediate targets instead.");
+}
+
+// Set check_private_deps to true for the first invocation since a target
+// can see all of its dependencies. For recursive invocations this will be set
+// to false to follow only public dependency paths.
+//
+// Pass a pointer to an empty set for the first invocation. This will be used
+// to avoid duplicate checking.
+//
+// Checking of object files is optional because it is much slower. This allows
+// us to check targets for normal outputs, and then as a second pass check
+// object files (since we know it will be an error otherwise). This allows
+// us to avoid computing all object file names in the common case.
+bool EnsureFileIsGeneratedByDependency(const Target* target,
+ const OutputFile& file,
+ bool check_private_deps,
+ bool consider_object_files,
+ bool check_data_deps,
+ std::set<const Target*>* seen_targets) {
+ if (seen_targets->find(target) != seen_targets->end())
+ return false; // Already checked this one and it's not found.
+ seen_targets->insert(target);
+
+ // Assume that we have relatively few generated inputs so brute-force
+ // searching here is OK. If this becomes a bottleneck, consider storing
+ // computed_outputs as a hash set.
+ for (const OutputFile& cur : target->computed_outputs()) {
+ if (file == cur)
+ return true;
+ }
+
+ if (file == target->write_runtime_deps_output())
+ return true;
+
+ // Check binary target intermediate files if requested.
+ if (consider_object_files && target->IsBinary()) {
+ std::vector<OutputFile> source_outputs;
+ for (const SourceFile& source : target->sources()) {
+ Toolchain::ToolType tool_type;
+ if (!target->GetOutputFilesForSource(source, &tool_type, &source_outputs))
+ continue;
+ if (std::find(source_outputs.begin(), source_outputs.end(), file) !=
+ source_outputs.end())
+ return true;
+ }
+ }
+
+ if (check_data_deps) {
+ check_data_deps = false; // Consider only direct data_deps.
+ for (const auto& pair : target->data_deps()) {
+ if (EnsureFileIsGeneratedByDependency(pair.ptr, file, false,
+ consider_object_files,
+ check_data_deps, seen_targets))
+ return true; // Found a path.
+ }
+ }
+
+ // Check all public dependencies (don't do data ones since those are
+ // runtime-only).
+ for (const auto& pair : target->public_deps()) {
+ if (EnsureFileIsGeneratedByDependency(pair.ptr, file, false,
+ consider_object_files,
+ check_data_deps, seen_targets))
+ return true; // Found a path.
+ }
+
+ // Only check private deps if requested.
+ if (check_private_deps) {
+ for (const auto& pair : target->private_deps()) {
+ if (EnsureFileIsGeneratedByDependency(pair.ptr, file, false,
+ consider_object_files,
+ check_data_deps, seen_targets))
+ return true; // Found a path.
+ }
+ if (target->output_type() == Target::CREATE_BUNDLE) {
+ for (const auto& dep : target->bundle_data().bundle_deps()) {
+ if (EnsureFileIsGeneratedByDependency(dep, file, false,
+ consider_object_files,
+ check_data_deps, seen_targets))
+ return true; // Found a path.
+ }
+ }
+ }
+ return false;
+}
+
+// check_this indicates if the given target should be matched against the
+// patterns. It should be set to false for the first call since assert_no_deps
+// shouldn't match the target itself.
+//
+// visited should point to an empty set, this will be used to prevent
+// multiple visits.
+//
+// *failure_path_str will be filled with a string describing the path of the
+// dependency failure, and failure_pattern will indicate the pattern in
+// assert_no that matched the target.
+//
+// Returns true if everything is OK. failure_path_str and failure_pattern_index
+// will be unchanged in this case.
+bool RecursiveCheckAssertNoDeps(const Target* target,
+ bool check_this,
+ const std::vector<LabelPattern>& assert_no,
+ std::set<const Target*>* visited,
+ std::string* failure_path_str,
+ const LabelPattern** failure_pattern) {
+ static const char kIndentPath[] = " ";
+
+ if (visited->find(target) != visited->end())
+ return true; // Already checked this target.
+ visited->insert(target);
+
+ if (check_this) {
+ // Check this target against the given list of patterns.
+ for (const LabelPattern& pattern : assert_no) {
+ if (pattern.Matches(target->label())) {
+ // Found a match.
+ *failure_pattern = &pattern;
+ *failure_path_str =
+ kIndentPath + target->label().GetUserVisibleName(false);
+ return false;
+ }
+ }
+ }
+
+ // Recursively check dependencies.
+ for (const auto& pair : target->GetDeps(Target::DEPS_ALL)) {
+ if (pair.ptr->output_type() == Target::EXECUTABLE)
+ continue;
+ if (!RecursiveCheckAssertNoDeps(pair.ptr, true, assert_no, visited,
+ failure_path_str, failure_pattern)) {
+ // To reconstruct the path, prepend the current target to the error.
+ std::string prepend_path =
+ kIndentPath + target->label().GetUserVisibleName(false) + " ->\n";
+ failure_path_str->insert(0, prepend_path);
+ return false;
+ }
+ }
+
+ return true;
+}
+
+} // namespace
+
+Target::Target(const Settings* settings, const Label& label)
+ : Item(settings, label),
+ output_type_(UNKNOWN),
+ output_prefix_override_(false),
+ output_extension_set_(false),
+ all_headers_public_(true),
+ check_includes_(true),
+ complete_static_lib_(false),
+ testonly_(false),
+ toolchain_(nullptr) {
+}
+
+Target::~Target() {
+}
+
+// static
+const char* Target::GetStringForOutputType(OutputType type) {
+ switch (type) {
+ case UNKNOWN:
+ return "Unknown";
+ case GROUP:
+ return "Group";
+ case EXECUTABLE:
+ return "Executable";
+ case LOADABLE_MODULE:
+ return "Loadable module";
+ case SHARED_LIBRARY:
+ return "Shared library";
+ case STATIC_LIBRARY:
+ return "Static library";
+ case SOURCE_SET:
+ return "Source set";
+ case COPY_FILES:
+ return "Copy";
+ case ACTION:
+ return "Action";
+ case ACTION_FOREACH:
+ return "ActionForEach";
+ case BUNDLE_DATA:
+ return "Bundle data";
+ case CREATE_BUNDLE:
+ return "Create bundle";
+ default:
+ return "";
+ }
+}
+
+Target* Target::AsTarget() {
+ return this;
+}
+
+const Target* Target::AsTarget() const {
+ return this;
+}
+
+bool Target::OnResolved(Err* err) {
+ DCHECK(output_type_ != UNKNOWN);
+ DCHECK(toolchain_) << "Toolchain should have been set before resolving.";
+
+ ScopedTrace trace(TraceItem::TRACE_ON_RESOLVED, label());
+ trace.SetToolchain(settings()->toolchain_label());
+
+ // Copy this target's own dependent and public configs to the list of configs
+ // applying to it.
+ configs_.Append(all_dependent_configs_.begin(), all_dependent_configs_.end());
+ MergePublicConfigsFrom(this, &configs_);
+
+ // Copy public configs from all dependencies into the list of configs
+ // applying to this target (configs_).
+ PullDependentTargetConfigs();
+
+ // Copies public dependencies' public configs to this target's public
+ // configs. These configs have already been applied to this target by
+ // PullDependentTargetConfigs above, along with the public configs from
+ // private deps. This step re-exports them as public configs for targets that
+ // depend on this one.
+ for (const auto& dep : public_deps_) {
+ public_configs_.Append(dep.ptr->public_configs().begin(),
+ dep.ptr->public_configs().end());
+ }
+
+ // Copy our own libs and lib_dirs to the final set. This will be from our
+ // target and all of our configs. We do this specially since these must be
+ // inherited through the dependency tree (other flags don't work this way).
+ //
+ // This needs to happen after we pull dependent target configs for the
+ // public config's libs to be included here. And it needs to happen
+ // before pulling the dependent target libs so the libs are in the correct
+ // order (local ones first, then the dependency's).
+ for (ConfigValuesIterator iter(this); !iter.done(); iter.Next()) {
+ const ConfigValues& cur = iter.cur();
+ all_lib_dirs_.append(cur.lib_dirs().begin(), cur.lib_dirs().end());
+ all_libs_.append(cur.libs().begin(), cur.libs().end());
+ }
+
+ PullRecursiveBundleData();
+ PullDependentTargetLibs();
+ PullRecursiveHardDeps();
+ if (!ResolvePrecompiledHeaders(err))
+ return false;
+
+ FillOutputFiles();
+
+ if (settings()->build_settings()->check_for_bad_items()) {
+ if (!CheckVisibility(err))
+ return false;
+ if (!CheckTestonly(err))
+ return false;
+ if (!CheckNoNestedStaticLibs(err))
+ return false;
+ if (!CheckAssertNoDeps(err))
+ return false;
+ CheckSourcesGenerated();
+ }
+
+ return true;
+}
+
+bool Target::IsBinary() const {
+ return output_type_ == EXECUTABLE ||
+ output_type_ == SHARED_LIBRARY ||
+ output_type_ == LOADABLE_MODULE ||
+ output_type_ == STATIC_LIBRARY ||
+ output_type_ == SOURCE_SET;
+}
+
+bool Target::IsLinkable() const {
+ return output_type_ == STATIC_LIBRARY || output_type_ == SHARED_LIBRARY;
+}
+
+bool Target::IsFinal() const {
+ return output_type_ == EXECUTABLE ||
+ output_type_ == SHARED_LIBRARY ||
+ output_type_ == LOADABLE_MODULE ||
+ output_type_ == ACTION ||
+ output_type_ == ACTION_FOREACH ||
+ output_type_ == COPY_FILES ||
+ output_type_ == CREATE_BUNDLE ||
+ (output_type_ == STATIC_LIBRARY && complete_static_lib_);
+}
+
+DepsIteratorRange Target::GetDeps(DepsIterationType type) const {
+ if (type == DEPS_LINKED) {
+ return DepsIteratorRange(DepsIterator(
+ &public_deps_, &private_deps_, nullptr));
+ }
+ // All deps.
+ return DepsIteratorRange(DepsIterator(
+ &public_deps_, &private_deps_, &data_deps_));
+}
+
+std::string Target::GetComputedOutputName() const {
+ DCHECK(toolchain_)
+ << "Toolchain must be specified before getting the computed output name.";
+
+ const std::string& name = output_name_.empty() ? label().name()
+ : output_name_;
+
+ std::string result;
+ const Tool* tool = toolchain_->GetToolForTargetFinalOutput(this);
+ if (tool) {
+ // Only add the prefix if the name doesn't already have it and it's not
+ // being overridden.
+ if (!output_prefix_override_ &&
+ !base::StartsWith(name, tool->output_prefix(),
+ base::CompareCase::SENSITIVE))
+ result = tool->output_prefix();
+ }
+ result.append(name);
+ return result;
+}
+
+bool Target::SetToolchain(const Toolchain* toolchain, Err* err) {
+ DCHECK(!toolchain_);
+ DCHECK_NE(UNKNOWN, output_type_);
+ toolchain_ = toolchain;
+
+ const Tool* tool = toolchain->GetToolForTargetFinalOutput(this);
+ if (tool)
+ return true;
+
+ // Tool not specified for this target type.
+ if (err) {
+ *err = Err(defined_from(), "This target uses an undefined tool.",
+ base::StringPrintf(
+ "The target %s\n"
+ "of type \"%s\"\n"
+ "uses toolchain %s\n"
+ "which doesn't have the tool \"%s\" defined.\n\n"
+ "Alas, I can not continue.",
+ label().GetUserVisibleName(false).c_str(),
+ GetStringForOutputType(output_type_),
+ label().GetToolchainLabel().GetUserVisibleName(false).c_str(),
+ Toolchain::ToolTypeToName(
+ toolchain->GetToolTypeForTargetFinalOutput(this)).c_str()));
+ }
+ return false;
+}
+
+bool Target::GetOutputFilesForSource(const SourceFile& source,
+ Toolchain::ToolType* computed_tool_type,
+ std::vector<OutputFile>* outputs) const {
+ outputs->clear();
+ *computed_tool_type = Toolchain::TYPE_NONE;
+
+ SourceFileType file_type = GetSourceFileType(source);
+ if (file_type == SOURCE_UNKNOWN)
+ return false;
+ if (file_type == SOURCE_O) {
+ // Object files just get passed to the output and not compiled.
+ outputs->push_back(OutputFile(settings()->build_settings(), source));
+ return true;
+ }
+
+ *computed_tool_type = toolchain_->GetToolTypeForSourceType(file_type);
+ if (*computed_tool_type == Toolchain::TYPE_NONE)
+ return false; // No tool for this file (it's a header file or something).
+ const Tool* tool = toolchain_->GetTool(*computed_tool_type);
+ if (!tool)
+ return false; // Tool does not apply for this toolchain.file.
+
+ // Figure out what output(s) this compiler produces.
+ SubstitutionWriter::ApplyListToCompilerAsOutputFile(
+ this, source, tool->outputs(), outputs);
+ return !outputs->empty();
+}
+
+void Target::PullDependentTargetConfigsFrom(const Target* dep) {
+ MergeAllDependentConfigsFrom(dep, &configs_, &all_dependent_configs_);
+ MergePublicConfigsFrom(dep, &configs_);
+}
+
+void Target::PullDependentTargetConfigs() {
+ for (const auto& pair : GetDeps(DEPS_LINKED))
+ PullDependentTargetConfigsFrom(pair.ptr);
+}
+
+void Target::PullDependentTargetLibsFrom(const Target* dep, bool is_public) {
+ // Direct dependent libraries.
+ if (dep->output_type() == STATIC_LIBRARY ||
+ dep->output_type() == SHARED_LIBRARY ||
+ dep->output_type() == SOURCE_SET)
+ inherited_libraries_.Append(dep, is_public);
+
+ if (dep->output_type() == SHARED_LIBRARY) {
+ // Shared library dependendencies are inherited across public shared
+ // library boundaries.
+ //
+ // In this case:
+ // EXE -> INTERMEDIATE_SHLIB --[public]--> FINAL_SHLIB
+ // The EXE will also link to to FINAL_SHLIB. The public dependeny means
+ // that the EXE can use the headers in FINAL_SHLIB so the FINAL_SHLIB
+ // will need to appear on EXE's link line.
+ //
+ // However, if the dependency is private:
+ // EXE -> INTERMEDIATE_SHLIB --[private]--> FINAL_SHLIB
+ // the dependency will not be propagated because INTERMEDIATE_SHLIB is
+ // not granting permission to call functiosn from FINAL_SHLIB. If EXE
+ // wants to use functions (and link to) FINAL_SHLIB, it will need to do
+ // so explicitly.
+ //
+ // Static libraries and source sets aren't inherited across shared
+ // library boundaries because they will be linked into the shared
+ // library.
+ inherited_libraries_.AppendPublicSharedLibraries(
+ dep->inherited_libraries(), is_public);
+ } else if (!dep->IsFinal()) {
+ // The current target isn't linked, so propogate linked deps and
+ // libraries up the dependency tree.
+ inherited_libraries_.AppendInherited(dep->inherited_libraries(), is_public);
+
+ // Inherited library settings.
+ all_lib_dirs_.append(dep->all_lib_dirs());
+ all_libs_.append(dep->all_libs());
+ }
+}
+
+void Target::PullDependentTargetLibs() {
+ for (const auto& dep : public_deps_)
+ PullDependentTargetLibsFrom(dep.ptr, true);
+ for (const auto& dep : private_deps_)
+ PullDependentTargetLibsFrom(dep.ptr, false);
+}
+
+void Target::PullRecursiveHardDeps() {
+ for (const auto& pair : GetDeps(DEPS_LINKED)) {
+ // Direct hard dependencies.
+ if (pair.ptr->hard_dep())
+ recursive_hard_deps_.insert(pair.ptr);
+
+ // Recursive hard dependencies of all dependencies.
+ recursive_hard_deps_.insert(pair.ptr->recursive_hard_deps().begin(),
+ pair.ptr->recursive_hard_deps().end());
+ }
+}
+
+void Target::PullRecursiveBundleData() {
+ for (const auto& pair : GetDeps(DEPS_LINKED)) {
+ // Don't propagate bundle_data once they are added to a bundle.
+ if (pair.ptr->output_type() == CREATE_BUNDLE)
+ continue;
+
+ // Direct dependency on a bundle_data target.
+ if (pair.ptr->output_type() == BUNDLE_DATA)
+ bundle_data_.AddBundleData(pair.ptr);
+
+ // Recursive bundle_data informations from all dependencies.
+ for (const auto& target : pair.ptr->bundle_data().bundle_deps())
+ bundle_data_.AddBundleData(target);
+ }
+
+ bundle_data_.OnTargetResolved(this);
+}
+
+void Target::FillOutputFiles() {
+ const Tool* tool = toolchain_->GetToolForTargetFinalOutput(this);
+ bool check_tool_outputs = false;
+ switch (output_type_) {
+ case GROUP:
+ case BUNDLE_DATA:
+ case CREATE_BUNDLE:
+ case SOURCE_SET:
+ case COPY_FILES:
+ case ACTION:
+ case ACTION_FOREACH: {
+ // These don't get linked to and use stamps which should be the first
+ // entry in the outputs. These stamps are named
+ // "<target_out_dir>/<targetname>.stamp".
+ dependency_output_file_ = GetTargetOutputDirAsOutputFile(this);
+ dependency_output_file_.value().append(GetComputedOutputName());
+ dependency_output_file_.value().append(".stamp");
+ break;
+ }
+ case EXECUTABLE:
+ case LOADABLE_MODULE:
+ // Executables and loadable modules don't get linked to, but the first
+ // output is used for dependency management.
+ CHECK_GE(tool->outputs().list().size(), 1u);
+ check_tool_outputs = true;
+ dependency_output_file_ =
+ SubstitutionWriter::ApplyPatternToLinkerAsOutputFile(
+ this, tool, tool->outputs().list()[0]);
+ break;
+ case STATIC_LIBRARY:
+ // Static libraries both have dependencies and linking going off of the
+ // first output.
+ CHECK(tool->outputs().list().size() >= 1);
+ check_tool_outputs = true;
+ link_output_file_ = dependency_output_file_ =
+ SubstitutionWriter::ApplyPatternToLinkerAsOutputFile(
+ this, tool, tool->outputs().list()[0]);
+ break;
+ case SHARED_LIBRARY:
+ CHECK(tool->outputs().list().size() >= 1);
+ check_tool_outputs = true;
+ if (tool->link_output().empty() && tool->depend_output().empty()) {
+ // Default behavior, use the first output file for both.
+ link_output_file_ = dependency_output_file_ =
+ SubstitutionWriter::ApplyPatternToLinkerAsOutputFile(
+ this, tool, tool->outputs().list()[0]);
+ } else {
+ // Use the tool-specified ones.
+ if (!tool->link_output().empty()) {
+ link_output_file_ =
+ SubstitutionWriter::ApplyPatternToLinkerAsOutputFile(
+ this, tool, tool->link_output());
+ }
+ if (!tool->depend_output().empty()) {
+ dependency_output_file_ =
+ SubstitutionWriter::ApplyPatternToLinkerAsOutputFile(
+ this, tool, tool->depend_output());
+ }
+ }
+ if (tool->runtime_link_output().empty()) {
+ runtime_link_output_file_ = link_output_file_;
+ } else {
+ runtime_link_output_file_ =
+ SubstitutionWriter::ApplyPatternToLinkerAsOutputFile(
+ this, tool, tool->runtime_link_output());
+ }
+ break;
+ case UNKNOWN:
+ default:
+ NOTREACHED();
+ }
+
+ // Count anything generated from bundle_data dependencies.
+ if (output_type_ == CREATE_BUNDLE)
+ bundle_data_.GetOutputFiles(settings(), &computed_outputs_);
+
+ // Count all outputs from this tool as something generated by this target.
+ if (check_tool_outputs) {
+ SubstitutionWriter::ApplyListToLinkerAsOutputFile(
+ this, tool, tool->outputs(), &computed_outputs_);
+
+ // Output names aren't canonicalized in the same way that source files
+ // are. For example, the tool outputs often use
+ // {{some_var}}/{{output_name}} which expands to "./foo", but this won't
+ // match "foo" which is what we'll compute when converting a SourceFile to
+ // an OutputFile.
+ for (auto& out : computed_outputs_)
+ NormalizePath(&out.value());
+ }
+
+ // Also count anything the target has declared to be an output.
+ std::vector<SourceFile> outputs_as_sources;
+ action_values_.GetOutputsAsSourceFiles(this, &outputs_as_sources);
+ for (const SourceFile& out : outputs_as_sources)
+ computed_outputs_.push_back(OutputFile(settings()->build_settings(), out));
+}
+
+bool Target::ResolvePrecompiledHeaders(Err* err) {
+ // Precompiled headers are stored on a ConfigValues struct. This way, the
+ // build can set all the precompiled header settings in a config and apply
+ // it to many targets. Likewise, the precompiled header values may be
+ // specified directly on a target.
+ //
+ // Unlike other values on configs which are lists that just get concatenated,
+ // the precompiled header settings are unique values. We allow them to be
+ // specified anywhere, but if they are specified in more than one place all
+ // places must match.
+
+ // Track where the current settings came from for issuing errors.
+ const Label* pch_header_settings_from = NULL;
+ if (config_values_.has_precompiled_headers())
+ pch_header_settings_from = &label();
+
+ for (ConfigValuesIterator iter(this); !iter.done(); iter.Next()) {
+ if (!iter.GetCurrentConfig())
+ continue; // Skip the one on the target itself.
+
+ const Config* config = iter.GetCurrentConfig();
+ const ConfigValues& cur = config->resolved_values();
+ if (!cur.has_precompiled_headers())
+ continue; // This one has no precompiled header info, skip.
+
+ if (config_values_.has_precompiled_headers()) {
+ // Already have a precompiled header values, the settings must match.
+ if (config_values_.precompiled_header() != cur.precompiled_header() ||
+ config_values_.precompiled_source() != cur.precompiled_source()) {
+ *err = Err(defined_from(),
+ "Precompiled header setting conflict.",
+ "The target " + label().GetUserVisibleName(false) + "\n"
+ "has conflicting precompiled header settings.\n"
+ "\n"
+ "From " + pch_header_settings_from->GetUserVisibleName(false) +
+ "\n header: " + config_values_.precompiled_header() +
+ "\n source: " + config_values_.precompiled_source().value() +
+ "\n\n"
+ "From " + config->label().GetUserVisibleName(false) +
+ "\n header: " + cur.precompiled_header() +
+ "\n source: " + cur.precompiled_source().value());
+ return false;
+ }
+ } else {
+ // Have settings from a config, apply them to ourselves.
+ pch_header_settings_from = &config->label();
+ config_values_.set_precompiled_header(cur.precompiled_header());
+ config_values_.set_precompiled_source(cur.precompiled_source());
+ }
+ }
+
+ return true;
+}
+
+bool Target::CheckVisibility(Err* err) const {
+ for (const auto& pair : GetDeps(DEPS_ALL)) {
+ if (!Visibility::CheckItemVisibility(this, pair.ptr, err))
+ return false;
+ }
+ return true;
+}
+
+bool Target::CheckTestonly(Err* err) const {
+ // If the current target is marked testonly, it can include both testonly
+ // and non-testonly targets, so there's nothing to check.
+ if (testonly())
+ return true;
+
+ // Verify no deps have "testonly" set.
+ for (const auto& pair : GetDeps(DEPS_ALL)) {
+ if (pair.ptr->testonly()) {
+ *err = MakeTestOnlyError(this, pair.ptr);
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool Target::CheckNoNestedStaticLibs(Err* err) const {
+ // If the current target is not a complete static library, it can depend on
+ // static library targets with no problem.
+ if (!(output_type() == Target::STATIC_LIBRARY && complete_static_lib()))
+ return true;
+
+ // Verify no deps are static libraries.
+ for (const auto& pair : GetDeps(DEPS_ALL)) {
+ if (pair.ptr->output_type() == Target::STATIC_LIBRARY) {
+ *err = MakeStaticLibDepsError(this, pair.ptr);
+ return false;
+ }
+ }
+
+ // Verify no inherited libraries are static libraries.
+ for (const auto& lib : inherited_libraries().GetOrdered()) {
+ if (lib->output_type() == Target::STATIC_LIBRARY) {
+ *err = MakeStaticLibDepsError(this, lib);
+ return false;
+ }
+ }
+ return true;
+}
+
+bool Target::CheckAssertNoDeps(Err* err) const {
+ if (assert_no_deps_.empty())
+ return true;
+
+ std::set<const Target*> visited;
+ std::string failure_path_str;
+ const LabelPattern* failure_pattern = nullptr;
+
+ if (!RecursiveCheckAssertNoDeps(this, false, assert_no_deps_, &visited,
+ &failure_path_str, &failure_pattern)) {
+ *err = Err(defined_from(), "assert_no_deps failed.",
+ label().GetUserVisibleName(false) +
+ " has an assert_no_deps entry:\n " +
+ failure_pattern->Describe() +
+ "\nwhich fails for the dependency path:\n" +
+ failure_path_str);
+ return false;
+ }
+ return true;
+}
+
+void Target::CheckSourcesGenerated() const {
+ // Checks that any inputs or sources to this target that are in the build
+ // directory are generated by a target that this one transitively depends on
+ // in some way. We already guarantee that all generated files are written
+ // to the build dir.
+ //
+ // See Scheduler::AddUnknownGeneratedInput's declaration for more.
+ for (const SourceFile& file : sources_)
+ CheckSourceGenerated(file);
+ for (const SourceFile& file : inputs_)
+ CheckSourceGenerated(file);
+ // TODO(agrieve): Check all_libs_ here as well (those that are source files).
+ // http://crbug.com/571731
+}
+
+void Target::CheckSourceGenerated(const SourceFile& source) const {
+ if (!IsStringInOutputDir(settings()->build_settings()->build_dir(),
+ source.value()))
+ return; // Not in output dir, this is OK.
+
+ // Tell the scheduler about unknown files. This will be noted for later so
+ // the list of files written by the GN build itself (often response files)
+ // can be filtered out of this list.
+ OutputFile out_file(settings()->build_settings(), source);
+ std::set<const Target*> seen_targets;
+ bool check_data_deps = false;
+ bool consider_object_files = false;
+ if (!EnsureFileIsGeneratedByDependency(this, out_file, true,
+ consider_object_files, check_data_deps,
+ &seen_targets)) {
+ seen_targets.clear();
+ // Allow dependency to be through data_deps for files generated by gn.
+ check_data_deps = g_scheduler->IsFileGeneratedByWriteRuntimeDeps(out_file);
+ // Check object files (much slower and very rare) only if the "normal"
+ // output check failed.
+ consider_object_files = !check_data_deps;
+ if (!EnsureFileIsGeneratedByDependency(this, out_file, true,
+ consider_object_files,
+ check_data_deps, &seen_targets))
+ g_scheduler->AddUnknownGeneratedInput(this, source);
+ }
+}
diff --git a/chromium/tools/gn/target.h b/chromium/tools/gn/target.h
new file mode 100644
index 00000000000..fdd6f1a6821
--- /dev/null
+++ b/chromium/tools/gn/target.h
@@ -0,0 +1,395 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_TARGET_H_
+#define TOOLS_GN_TARGET_H_
+
+#include <set>
+#include <string>
+#include <vector>
+
+#include "base/gtest_prod_util.h"
+#include "base/logging.h"
+#include "base/macros.h"
+#include "tools/gn/action_values.h"
+#include "tools/gn/bundle_data.h"
+#include "tools/gn/config_values.h"
+#include "tools/gn/inherited_libraries.h"
+#include "tools/gn/item.h"
+#include "tools/gn/label_pattern.h"
+#include "tools/gn/label_ptr.h"
+#include "tools/gn/lib_file.h"
+#include "tools/gn/ordered_set.h"
+#include "tools/gn/output_file.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/toolchain.h"
+#include "tools/gn/unique_vector.h"
+
+class DepsIteratorRange;
+class InputFile;
+class Settings;
+class Token;
+class Toolchain;
+
+class Target : public Item {
+ public:
+ enum OutputType {
+ UNKNOWN,
+ GROUP,
+ EXECUTABLE,
+ SHARED_LIBRARY,
+ LOADABLE_MODULE,
+ STATIC_LIBRARY,
+ SOURCE_SET,
+ COPY_FILES,
+ ACTION,
+ ACTION_FOREACH,
+ BUNDLE_DATA,
+ CREATE_BUNDLE,
+ };
+
+ enum DepsIterationType {
+ DEPS_ALL, // Iterates through all public, private, and data deps.
+ DEPS_LINKED, // Iterates through all non-data dependencies.
+ };
+
+ typedef std::vector<SourceFile> FileList;
+ typedef std::vector<std::string> StringVector;
+
+ Target(const Settings* settings, const Label& label);
+ ~Target() override;
+
+ // Returns a string naming the output type.
+ static const char* GetStringForOutputType(OutputType type);
+
+ // Item overrides.
+ Target* AsTarget() override;
+ const Target* AsTarget() const override;
+ bool OnResolved(Err* err) override;
+
+ OutputType output_type() const { return output_type_; }
+ void set_output_type(OutputType t) { output_type_ = t; }
+
+ // True for targets that compile source code (all types of libaries and
+ // executables).
+ bool IsBinary() const;
+
+ // Can be linked into other targets.
+ bool IsLinkable() const;
+
+ // True if the target links dependencies rather than propogated up the graph.
+ // This is also true of action and copy steps even though they don't link
+ // dependencies, because they also don't propogate libraries up.
+ bool IsFinal() const;
+
+ // Will be the empty string to use the target label as the output name.
+ // See GetComputedOutputName().
+ const std::string& output_name() const { return output_name_; }
+ void set_output_name(const std::string& name) { output_name_ = name; }
+
+ // Returns the output name for this target, which is the output_name if
+ // specified, or the target label if not.
+ //
+ // Because this depends on the tool for this target, the toolchain must
+ // have been set before calling.
+ std::string GetComputedOutputName() const;
+
+ bool output_prefix_override() const { return output_prefix_override_; }
+ void set_output_prefix_override(bool prefix_override) {
+ output_prefix_override_ = prefix_override;
+ }
+
+ // The output extension is really a tri-state: unset (output_extension_set
+ // is false and the string is empty, meaning the default extension should be
+ // used), the output extension is set but empty (output should have no
+ // extension) and the output extension is set but nonempty (use the given
+ // extension).
+ const std::string& output_extension() const { return output_extension_; }
+ void set_output_extension(const std::string& extension) {
+ output_extension_ = extension;
+ output_extension_set_ = true;
+ }
+ bool output_extension_set() const {
+ return output_extension_set_;
+ }
+
+ const FileList& sources() const { return sources_; }
+ FileList& sources() { return sources_; }
+
+ // Set to true when all sources are public. This is the default. In this case
+ // the public headers list should be empty.
+ bool all_headers_public() const { return all_headers_public_; }
+ void set_all_headers_public(bool p) { all_headers_public_ = p; }
+
+ // When all_headers_public is false, this is the list of public headers. It
+ // could be empty which would mean no headers are public.
+ const FileList& public_headers() const { return public_headers_; }
+ FileList& public_headers() { return public_headers_; }
+
+ // Whether this target's includes should be checked by "gn check".
+ bool check_includes() const { return check_includes_; }
+ void set_check_includes(bool ci) { check_includes_ = ci; }
+
+ // Whether this static_library target should have code linked in.
+ bool complete_static_lib() const { return complete_static_lib_; }
+ void set_complete_static_lib(bool complete) {
+ DCHECK_EQ(STATIC_LIBRARY, output_type_);
+ complete_static_lib_ = complete;
+ }
+
+ bool testonly() const { return testonly_; }
+ void set_testonly(bool value) { testonly_ = value; }
+
+ OutputFile write_runtime_deps_output() const {
+ return write_runtime_deps_output_;
+ }
+ void set_write_runtime_deps_output(const OutputFile& value) {
+ write_runtime_deps_output_ = value;
+ }
+
+ // Compile-time extra dependencies.
+ const FileList& inputs() const { return inputs_; }
+ FileList& inputs() { return inputs_; }
+
+ // Runtime dependencies. These are "file-like things" that can either be
+ // directories or files. They do not need to exist, these are just passed as
+ // runtime dependencies to external test systems as necessary.
+ const std::vector<std::string>& data() const { return data_; }
+ std::vector<std::string>& data() { return data_; }
+
+ // Information about the bundle. Only valid for CREATE_BUNDLE target after
+ // they have been resolved.
+ const BundleData& bundle_data() const { return bundle_data_; }
+ BundleData& bundle_data() { return bundle_data_; }
+
+ // Returns true if targets depending on this one should have an order
+ // dependency.
+ bool hard_dep() const {
+ return output_type_ == ACTION ||
+ output_type_ == ACTION_FOREACH ||
+ output_type_ == COPY_FILES ||
+ output_type_ == CREATE_BUNDLE;
+ }
+
+ // Returns the iterator range which can be used in range-based for loops
+ // to iterate over multiple types of deps in one loop:
+ // for (const auto& pair : target->GetDeps(Target::DEPS_ALL)) ...
+ DepsIteratorRange GetDeps(DepsIterationType type) const;
+
+ // Linked private dependencies.
+ const LabelTargetVector& private_deps() const { return private_deps_; }
+ LabelTargetVector& private_deps() { return private_deps_; }
+
+ // Linked public dependencies.
+ const LabelTargetVector& public_deps() const { return public_deps_; }
+ LabelTargetVector& public_deps() { return public_deps_; }
+
+ // Non-linked dependencies.
+ const LabelTargetVector& data_deps() const { return data_deps_; }
+ LabelTargetVector& data_deps() { return data_deps_; }
+
+ // List of configs that this class inherits settings from. Once a target is
+ // resolved, this will also list all-dependent and public configs.
+ const UniqueVector<LabelConfigPair>& configs() const { return configs_; }
+ UniqueVector<LabelConfigPair>& configs() { return configs_; }
+
+ // List of configs that all dependencies (direct and indirect) of this
+ // target get. These configs are not added to this target. Note that due
+ // to the way this is computed, there may be duplicates in this list.
+ const UniqueVector<LabelConfigPair>& all_dependent_configs() const {
+ return all_dependent_configs_;
+ }
+ UniqueVector<LabelConfigPair>& all_dependent_configs() {
+ return all_dependent_configs_;
+ }
+
+ // List of configs that targets depending directly on this one get. These
+ // configs are also added to this target.
+ const UniqueVector<LabelConfigPair>& public_configs() const {
+ return public_configs_;
+ }
+ UniqueVector<LabelConfigPair>& public_configs() {
+ return public_configs_;
+ }
+
+ // Dependencies that can include files from this target.
+ const std::set<Label>& allow_circular_includes_from() const {
+ return allow_circular_includes_from_;
+ }
+ std::set<Label>& allow_circular_includes_from() {
+ return allow_circular_includes_from_;
+ }
+
+ const InheritedLibraries& inherited_libraries() const {
+ return inherited_libraries_;
+ }
+
+ // This config represents the configuration set directly on this target.
+ ConfigValues& config_values() { return config_values_; }
+ const ConfigValues& config_values() const { return config_values_; }
+
+ ActionValues& action_values() { return action_values_; }
+ const ActionValues& action_values() const { return action_values_; }
+
+ const OrderedSet<SourceDir>& all_lib_dirs() const { return all_lib_dirs_; }
+ const OrderedSet<LibFile>& all_libs() const { return all_libs_; }
+
+ const std::set<const Target*>& recursive_hard_deps() const {
+ return recursive_hard_deps_;
+ }
+
+ std::vector<LabelPattern>& assert_no_deps() {
+ return assert_no_deps_;
+ }
+ const std::vector<LabelPattern>& assert_no_deps() const {
+ return assert_no_deps_;
+ }
+
+ // The toolchain is only known once this target is resolved (all if its
+ // dependencies are known). They will be null until then. Generally, this can
+ // only be used during target writing.
+ const Toolchain* toolchain() const { return toolchain_; }
+
+ // Sets the toolchain. The toolchain must include a tool for this target
+ // or the error will be set and the function will return false. Unusually,
+ // this function's "err" output is optional since this is commonly used
+ // frequently by unit tests which become needlessly verbose.
+ bool SetToolchain(const Toolchain* toolchain, Err* err = nullptr);
+
+ // Once this target has been resolved, all outputs from the target will be
+ // listed here. This will include things listed in the "outputs" for an
+ // action or a copy step, and the output library or executable file(s) from
+ // binary targets.
+ //
+ // It will NOT include stamp files and object files.
+ const std::vector<OutputFile>& computed_outputs() const {
+ return computed_outputs_;
+ }
+
+ // Returns outputs from this target. The link output file is the one that
+ // other targets link to when they depend on this target. This will only be
+ // valid for libraries and will be empty for all other target types.
+ //
+ // The dependency output file is the file that should be used to express
+ // a dependency on this one. It could be the same as the link output file
+ // (this will be the case for static libraries). For shared libraries it
+ // could be the same or different than the link output file, depending on the
+ // system. For actions this will be the stamp file.
+ //
+ // These are only known once the target is resolved and will be empty before
+ // that. This is a cache of the files to prevent every target that depends on
+ // a given library from recomputing the same pattern.
+ const OutputFile& link_output_file() const {
+ return link_output_file_;
+ }
+ const OutputFile& dependency_output_file() const {
+ return dependency_output_file_;
+ }
+ const OutputFile& runtime_link_output_file() const {
+ return runtime_link_output_file_;
+ }
+
+ // Computes the set of output files resulting from compiling the given source
+ // file. If the file can be compiled and the tool exists, fills the outputs
+ // in and writes the tool type to computed_tool_type. If the file is not
+ // compilable, returns false.
+ //
+ // The function can succeed with a "NONE" tool type for object files which
+ // are just passed to the output. The output will always be overwritten, not
+ // appended to.
+ bool GetOutputFilesForSource(const SourceFile& source,
+ Toolchain::ToolType* computed_tool_type,
+ std::vector<OutputFile>* outputs) const;
+
+ private:
+ FRIEND_TEST_ALL_PREFIXES(Target, ResolvePrecompiledHeaders);
+
+ // Pulls necessary information from dependencies to this one when all
+ // dependencies have been resolved.
+ void PullDependentTargetConfigsFrom(const Target* dep);
+ void PullDependentTargetConfigs();
+ void PullDependentTargetLibsFrom(const Target* dep, bool is_public);
+ void PullDependentTargetLibs();
+ void PullRecursiveHardDeps();
+ void PullRecursiveBundleData();
+
+ // Fills the link and dependency output files when a target is resolved.
+ void FillOutputFiles();
+
+ // Checks precompiled headers from configs and makes sure the resulting
+ // values are in config_values_.
+ bool ResolvePrecompiledHeaders(Err* err);
+
+ // Validates the given thing when a target is resolved.
+ bool CheckVisibility(Err* err) const;
+ bool CheckTestonly(Err* err) const;
+ bool CheckNoNestedStaticLibs(Err* err) const;
+ bool CheckAssertNoDeps(Err* err) const;
+ void CheckSourcesGenerated() const;
+ void CheckSourceGenerated(const SourceFile& source) const;
+
+ OutputType output_type_;
+ std::string output_name_;
+ bool output_prefix_override_;
+ std::string output_extension_;
+ bool output_extension_set_;
+
+ FileList sources_;
+ bool all_headers_public_;
+ FileList public_headers_;
+ bool check_includes_;
+ bool complete_static_lib_;
+ bool testonly_;
+ FileList inputs_;
+ std::vector<std::string> data_;
+ BundleData bundle_data_;
+ OutputFile write_runtime_deps_output_;
+
+ LabelTargetVector private_deps_;
+ LabelTargetVector public_deps_;
+ LabelTargetVector data_deps_;
+
+ // See getters for more info.
+ UniqueVector<LabelConfigPair> configs_;
+ UniqueVector<LabelConfigPair> all_dependent_configs_;
+ UniqueVector<LabelConfigPair> public_configs_;
+
+ std::set<Label> allow_circular_includes_from_;
+
+ // Static libraries, shared libraries, and source sets from transitive deps
+ // that need to be linked.
+ InheritedLibraries inherited_libraries_;
+
+ // These libs and dirs are inherited from statically linked deps and all
+ // configs applying to this target.
+ OrderedSet<SourceDir> all_lib_dirs_;
+ OrderedSet<LibFile> all_libs_;
+
+ // All hard deps from this target and all dependencies. Filled in when this
+ // target is marked resolved. This will not include the current target.
+ std::set<const Target*> recursive_hard_deps_;
+
+ std::vector<LabelPattern> assert_no_deps_;
+
+ // Used for all binary targets. The precompiled header values in this struct
+ // will be resolved to the ones to use for this target, if precompiled
+ // headers are used.
+ ConfigValues config_values_;
+
+ // Used for action[_foreach] targets.
+ ActionValues action_values_;
+
+ // Toolchain used by this target. Null until target is resolved.
+ const Toolchain* toolchain_;
+
+ // Output files. Empty until the target is resolved.
+ std::vector<OutputFile> computed_outputs_;
+ OutputFile link_output_file_;
+ OutputFile dependency_output_file_;
+ OutputFile runtime_link_output_file_;
+
+ DISALLOW_COPY_AND_ASSIGN(Target);
+};
+
+#endif // TOOLS_GN_TARGET_H_
diff --git a/chromium/tools/gn/target_generator.cc b/chromium/tools/gn/target_generator.cc
new file mode 100644
index 00000000000..2132a5b06bd
--- /dev/null
+++ b/chromium/tools/gn/target_generator.cc
@@ -0,0 +1,408 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/target_generator.h"
+
+#include <stddef.h>
+
+#include "tools/gn/action_target_generator.h"
+#include "tools/gn/binary_target_generator.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/bundle_data_target_generator.h"
+#include "tools/gn/config.h"
+#include "tools/gn/copy_target_generator.h"
+#include "tools/gn/create_bundle_target_generator.h"
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/group_target_generator.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/token.h"
+#include "tools/gn/value.h"
+#include "tools/gn/value_extractors.h"
+#include "tools/gn/variables.h"
+
+TargetGenerator::TargetGenerator(Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Err* err)
+ : target_(target),
+ scope_(scope),
+ function_call_(function_call),
+ err_(err) {
+}
+
+TargetGenerator::~TargetGenerator() {
+}
+
+void TargetGenerator::Run() {
+ // All target types use these.
+ if (!FillDependentConfigs())
+ return;
+
+ if (!FillData())
+ return;
+
+ if (!FillDependencies())
+ return;
+
+ if (!FillTestonly())
+ return;
+
+ if (!FillAssertNoDeps())
+ return;
+
+ if (!Visibility::FillItemVisibility(target_, scope_, err_))
+ return;
+
+ if (!FillWriteRuntimeDeps())
+ return;
+
+ // Do type-specific generation.
+ DoRun();
+}
+
+// static
+void TargetGenerator::GenerateTarget(Scope* scope,
+ const FunctionCallNode* function_call,
+ const std::vector<Value>& args,
+ const std::string& output_type,
+ Err* err) {
+ // Name is the argument to the function.
+ if (args.size() != 1u || args[0].type() != Value::STRING) {
+ *err = Err(function_call,
+ "Target generator requires one string argument.",
+ "Otherwise I'm not sure what to call this target.");
+ return;
+ }
+
+ // The location of the target is the directory name with no slash at the end.
+ // FIXME(brettw) validate name.
+ const Label& toolchain_label = ToolchainLabelForScope(scope);
+ Label label(scope->GetSourceDir(), args[0].string_value(),
+ toolchain_label.dir(), toolchain_label.name());
+
+ if (g_scheduler->verbose_logging())
+ g_scheduler->Log("Defining target", label.GetUserVisibleName(true));
+
+ std::unique_ptr<Target> target(new Target(scope->settings(), label));
+ target->set_defined_from(function_call);
+
+ // Create and call out to the proper generator.
+ if (output_type == functions::kBundleData) {
+ BundleDataTargetGenerator generator(
+ target.get(), scope, function_call, err);
+ generator.Run();
+ } else if (output_type == functions::kCreateBundle) {
+ CreateBundleTargetGenerator generator(target.get(), scope, function_call,
+ err);
+ generator.Run();
+ } else if (output_type == functions::kCopy) {
+ CopyTargetGenerator generator(target.get(), scope, function_call, err);
+ generator.Run();
+ } else if (output_type == functions::kAction) {
+ ActionTargetGenerator generator(target.get(), scope, function_call,
+ Target::ACTION, err);
+ generator.Run();
+ } else if (output_type == functions::kActionForEach) {
+ ActionTargetGenerator generator(target.get(), scope, function_call,
+ Target::ACTION_FOREACH, err);
+ generator.Run();
+ } else if (output_type == functions::kExecutable) {
+ BinaryTargetGenerator generator(target.get(), scope, function_call,
+ Target::EXECUTABLE, err);
+ generator.Run();
+ } else if (output_type == functions::kGroup) {
+ GroupTargetGenerator generator(target.get(), scope, function_call, err);
+ generator.Run();
+ } else if (output_type == functions::kLoadableModule) {
+ BinaryTargetGenerator generator(target.get(), scope, function_call,
+ Target::LOADABLE_MODULE, err);
+ generator.Run();
+ } else if (output_type == functions::kSharedLibrary) {
+ BinaryTargetGenerator generator(target.get(), scope, function_call,
+ Target::SHARED_LIBRARY, err);
+ generator.Run();
+ } else if (output_type == functions::kSourceSet) {
+ BinaryTargetGenerator generator(target.get(), scope, function_call,
+ Target::SOURCE_SET, err);
+ generator.Run();
+ } else if (output_type == functions::kStaticLibrary) {
+ BinaryTargetGenerator generator(target.get(), scope, function_call,
+ Target::STATIC_LIBRARY, err);
+ generator.Run();
+ } else {
+ *err = Err(function_call, "Not a known target type",
+ "I am very confused by the target type \"" + output_type + "\"");
+ }
+
+ if (err->has_error())
+ return;
+
+ // Save this target for the file.
+ Scope::ItemVector* collector = scope->GetItemCollector();
+ if (!collector) {
+ *err = Err(function_call, "Can't define a target in this context.");
+ return;
+ }
+ collector->push_back(target.release());
+}
+
+const BuildSettings* TargetGenerator::GetBuildSettings() const {
+ return scope_->settings()->build_settings();
+}
+
+bool TargetGenerator::FillSources() {
+ const Value* value = scope_->GetValue(variables::kSources, true);
+ if (!value)
+ return true;
+
+ Target::FileList dest_sources;
+ if (!ExtractListOfRelativeFiles(scope_->settings()->build_settings(), *value,
+ scope_->GetSourceDir(), &dest_sources, err_))
+ return false;
+ target_->sources().swap(dest_sources);
+ return true;
+}
+
+bool TargetGenerator::FillPublic() {
+ const Value* value = scope_->GetValue(variables::kPublic, true);
+ if (!value)
+ return true;
+
+ // If the public headers are defined, don't default to public.
+ target_->set_all_headers_public(false);
+
+ Target::FileList dest_public;
+ if (!ExtractListOfRelativeFiles(scope_->settings()->build_settings(), *value,
+ scope_->GetSourceDir(), &dest_public, err_))
+ return false;
+ target_->public_headers().swap(dest_public);
+ return true;
+}
+
+bool TargetGenerator::FillInputs() {
+ const Value* value = scope_->GetValue(variables::kInputs, true);
+ if (!value)
+ return true;
+
+ Target::FileList dest_inputs;
+ if (!ExtractListOfRelativeFiles(scope_->settings()->build_settings(), *value,
+ scope_->GetSourceDir(), &dest_inputs, err_))
+ return false;
+ target_->inputs().swap(dest_inputs);
+ return true;
+}
+
+bool TargetGenerator::FillConfigs() {
+ return FillGenericConfigs(variables::kConfigs, &target_->configs());
+}
+
+bool TargetGenerator::FillDependentConfigs() {
+ if (!FillGenericConfigs(variables::kAllDependentConfigs,
+ &target_->all_dependent_configs()))
+ return false;
+
+ if (!FillGenericConfigs(variables::kPublicConfigs,
+ &target_->public_configs()))
+ return false;
+
+ return true;
+}
+
+bool TargetGenerator::FillData() {
+ const Value* value = scope_->GetValue(variables::kData, true);
+ if (!value)
+ return true;
+ if (!value->VerifyTypeIs(Value::LIST, err_))
+ return false;
+
+ const std::vector<Value>& input_list = value->list_value();
+ std::vector<std::string>& output_list = target_->data();
+ output_list.reserve(input_list.size());
+
+ const SourceDir& dir = scope_->GetSourceDir();
+ const std::string& root_path =
+ scope_->settings()->build_settings()->root_path_utf8();
+
+ for (size_t i = 0; i < input_list.size(); i++) {
+ const Value& input = input_list[i];
+ if (!input.VerifyTypeIs(Value::STRING, err_))
+ return false;
+ const std::string& input_str = input.string_value();
+
+ // Treat each input as either a file or a directory, depending on the
+ // last character.
+ if (!input_str.empty() && input_str[input_str.size() - 1] == '/') {
+ // Resolve as directory.
+ SourceDir resolved =
+ dir.ResolveRelativeDir(input, input_str, err_, root_path);
+ if (err_->has_error())
+ return false;
+ output_list.push_back(resolved.value());
+ } else {
+ // Resolve as file.
+ SourceFile resolved = dir.ResolveRelativeFile(input, err_, root_path);
+ if (err_->has_error())
+ return false;
+ output_list.push_back(resolved.value());
+ }
+ }
+ return true;
+}
+
+bool TargetGenerator::FillDependencies() {
+ if (!FillGenericDeps(variables::kDeps, &target_->private_deps()))
+ return false;
+ if (!FillGenericDeps(variables::kPublicDeps, &target_->public_deps()))
+ return false;
+ if (!FillGenericDeps(variables::kDataDeps, &target_->data_deps()))
+ return false;
+
+ // "data_deps" was previously named "datadeps". For backwards-compat, read
+ // the old one if no "data_deps" were specified.
+ if (!scope_->GetValue(variables::kDataDeps, false)) {
+ if (!FillGenericDeps("datadeps", &target_->data_deps()))
+ return false;
+ }
+
+ return true;
+}
+
+bool TargetGenerator::FillTestonly() {
+ const Value* value = scope_->GetValue(variables::kTestonly, true);
+ if (value) {
+ if (!value->VerifyTypeIs(Value::BOOLEAN, err_))
+ return false;
+ target_->set_testonly(value->boolean_value());
+ }
+ return true;
+}
+
+bool TargetGenerator::FillAssertNoDeps() {
+ const Value* value = scope_->GetValue(variables::kAssertNoDeps, true);
+ if (value) {
+ return ExtractListOfLabelPatterns(*value, scope_->GetSourceDir(),
+ &target_->assert_no_deps(), err_);
+ }
+ return true;
+}
+
+bool TargetGenerator::FillOutputs(bool allow_substitutions) {
+ const Value* value = scope_->GetValue(variables::kOutputs, true);
+ if (!value)
+ return true;
+
+ SubstitutionList& outputs = target_->action_values().outputs();
+ if (!outputs.Parse(*value, err_))
+ return false;
+
+ if (!allow_substitutions) {
+ // Verify no substitutions were actually used.
+ if (!outputs.required_types().empty()) {
+ *err_ = Err(*value, "Source expansions not allowed here.",
+ "The outputs of this target used source {{expansions}} but this "
+ "targe type\ndoesn't support them. Just express the outputs "
+ "literally.");
+ return false;
+ }
+ }
+
+ // Check the substitutions used are valid for this purpose.
+ if (!EnsureValidSourcesSubstitutions(outputs.required_types(),
+ value->origin(), err_))
+ return false;
+
+ // Validate that outputs are in the output dir.
+ CHECK(outputs.list().size() == value->list_value().size());
+ for (size_t i = 0; i < outputs.list().size(); i++) {
+ if (!EnsureSubstitutionIsInOutputDir(outputs.list()[i],
+ value->list_value()[i]))
+ return false;
+ }
+ return true;
+}
+
+bool TargetGenerator::FillCheckIncludes() {
+ const Value* value = scope_->GetValue(variables::kCheckIncludes, true);
+ if (!value)
+ return true;
+ if (!value->VerifyTypeIs(Value::BOOLEAN, err_))
+ return false;
+ target_->set_check_includes(value->boolean_value());
+ return true;
+}
+
+bool TargetGenerator::EnsureSubstitutionIsInOutputDir(
+ const SubstitutionPattern& pattern,
+ const Value& original_value) {
+ if (pattern.ranges().empty()) {
+ // Pattern is empty, error out (this prevents weirdness below).
+ *err_ = Err(original_value, "This has an empty value in it.");
+ return false;
+ }
+
+ if (pattern.ranges()[0].type == SUBSTITUTION_LITERAL) {
+ // If the first thing is a literal, it must start with the output dir.
+ if (!EnsureStringIsInOutputDir(
+ GetBuildSettings()->build_dir(),
+ pattern.ranges()[0].literal, original_value.origin(), err_))
+ return false;
+ } else {
+ // Otherwise, the first subrange must be a pattern that expands to
+ // something in the output directory.
+ if (!SubstitutionIsInOutputDir(pattern.ranges()[0].type)) {
+ *err_ = Err(original_value,
+ "File is not inside output directory.",
+ "The given file should be in the output directory. Normally you\n"
+ "would specify\n\"$target_out_dir/foo\" or "
+ "\"{{source_gen_dir}}/foo\".");
+ return false;
+ }
+ }
+
+ return true;
+}
+
+bool TargetGenerator::FillGenericConfigs(const char* var_name,
+ UniqueVector<LabelConfigPair>* dest) {
+ const Value* value = scope_->GetValue(var_name, true);
+ if (value) {
+ ExtractListOfUniqueLabels(*value, scope_->GetSourceDir(),
+ ToolchainLabelForScope(scope_), dest, err_);
+ }
+ return !err_->has_error();
+}
+
+bool TargetGenerator::FillGenericDeps(const char* var_name,
+ LabelTargetVector* dest) {
+ const Value* value = scope_->GetValue(var_name, true);
+ if (value) {
+ ExtractListOfLabels(*value, scope_->GetSourceDir(),
+ ToolchainLabelForScope(scope_), dest, err_);
+ }
+ return !err_->has_error();
+}
+
+bool TargetGenerator::FillWriteRuntimeDeps() {
+ const Value* value = scope_->GetValue(variables::kWriteRuntimeDeps, true);
+ if (!value)
+ return true;
+
+ // Compute the file name and make sure it's in the output dir.
+ SourceFile source_file = scope_->GetSourceDir().ResolveRelativeFile(
+ *value, err_, GetBuildSettings()->root_path_utf8());
+ if (err_->has_error())
+ return false;
+ if (!EnsureStringIsInOutputDir(GetBuildSettings()->build_dir(),
+ source_file.value(), value->origin(), err_))
+ return false;
+ OutputFile output_file(GetBuildSettings(), source_file);
+ target_->set_write_runtime_deps_output(output_file);
+
+ g_scheduler->AddWriteRuntimeDepsTarget(target_);
+ return true;
+}
diff --git a/chromium/tools/gn/target_generator.h b/chromium/tools/gn/target_generator.h
new file mode 100644
index 00000000000..a3bdd58e013
--- /dev/null
+++ b/chromium/tools/gn/target_generator.h
@@ -0,0 +1,85 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_TARGET_GENERATOR_H_
+#define TOOLS_GN_TARGET_GENERATOR_H_
+
+#include <string>
+#include <vector>
+
+#include "base/macros.h"
+#include "tools/gn/label_ptr.h"
+#include "tools/gn/unique_vector.h"
+
+class BuildSettings;
+class Err;
+class FunctionCallNode;
+class Scope;
+class SubstitutionPattern;
+class Value;
+
+// Fills the variables in a Target object from a Scope (the result of a script
+// execution). Target-type-specific derivations of this class will be used
+// for each different type of function call. This class implements the common
+// behavior.
+class TargetGenerator {
+ public:
+ TargetGenerator(Target* target,
+ Scope* scope,
+ const FunctionCallNode* function_call,
+ Err* err);
+ virtual ~TargetGenerator();
+
+ void Run();
+
+ // The function call is the parse tree node that invoked the target.
+ // err() will be set on failure.
+ static void GenerateTarget(Scope* scope,
+ const FunctionCallNode* function_call,
+ const std::vector<Value>& args,
+ const std::string& output_type,
+ Err* err);
+
+ protected:
+ // Derived classes implement this to do type-specific generation.
+ virtual void DoRun() = 0;
+
+ const BuildSettings* GetBuildSettings() const;
+
+ bool FillSources();
+ bool FillPublic();
+ bool FillInputs();
+ bool FillConfigs();
+ bool FillOutputs(bool allow_substitutions);
+ bool FillCheckIncludes();
+
+ // Rrturns true if the given pattern will expand to a file in the output
+ // directory. If not, returns false and sets the error, blaming the given
+ // Value.
+ bool EnsureSubstitutionIsInOutputDir(const SubstitutionPattern& pattern,
+ const Value& original_value);
+
+ Target* target_;
+ Scope* scope_;
+ const FunctionCallNode* function_call_;
+ Err* err_;
+
+ private:
+ bool FillDependentConfigs(); // Includes all types of dependent configs.
+ bool FillData();
+ bool FillDependencies(); // Includes data dependencies.
+ bool FillTestonly();
+ bool FillAssertNoDeps();
+ bool FillWriteRuntimeDeps();
+
+ // Reads configs/deps from the given var name, and uses the given setting on
+ // the target to save them.
+ bool FillGenericConfigs(const char* var_name,
+ UniqueVector<LabelConfigPair>* dest);
+ bool FillGenericDeps(const char* var_name, LabelTargetVector* dest);
+
+ DISALLOW_COPY_AND_ASSIGN(TargetGenerator);
+};
+
+#endif // TOOLS_GN_TARGET_GENERATOR_H_
diff --git a/chromium/tools/gn/target_unittest.cc b/chromium/tools/gn/target_unittest.cc
new file mode 100644
index 00000000000..e2e41a8708c
--- /dev/null
+++ b/chromium/tools/gn/target_unittest.cc
@@ -0,0 +1,885 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/target.h"
+
+#include <utility>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/config.h"
+#include "tools/gn/scheduler.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/test_with_scope.h"
+#include "tools/gn/toolchain.h"
+
+namespace {
+
+// Asserts that the current global scheduler has a single unknown generated
+// file with the given name from the given target.
+void AssertSchedulerHasOneUnknownFileMatching(const Target* target,
+ const SourceFile& file) {
+ auto unknown = g_scheduler->GetUnknownGeneratedInputs();
+ ASSERT_EQ(1u, unknown.size()); // Should be one unknown file.
+ auto found = unknown.find(file);
+ ASSERT_TRUE(found != unknown.end()) << file.value();
+ EXPECT_TRUE(target == found->second)
+ << "Target doesn't match. Expected\n "
+ << target->label().GetUserVisibleName(false)
+ << "\nBut got\n " << found->second->label().GetUserVisibleName(false);
+}
+
+} // namespace
+
+// Tests that lib[_dir]s are inherited across deps boundaries for static
+// libraries but not executables.
+TEST(Target, LibInheritance) {
+ TestWithScope setup;
+ Err err;
+
+ const LibFile lib("foo");
+ const SourceDir libdir("/foo_dir/");
+
+ // Leaf target with ldflags set.
+ TestTarget z(setup, "//foo:z", Target::STATIC_LIBRARY);
+ z.config_values().libs().push_back(lib);
+ z.config_values().lib_dirs().push_back(libdir);
+ ASSERT_TRUE(z.OnResolved(&err));
+
+ // All lib[_dir]s should be set when target is resolved.
+ ASSERT_EQ(1u, z.all_libs().size());
+ EXPECT_EQ(lib, z.all_libs()[0]);
+ ASSERT_EQ(1u, z.all_lib_dirs().size());
+ EXPECT_EQ(libdir, z.all_lib_dirs()[0]);
+
+ // Shared library target should inherit the libs from the static library
+ // and its own. Its own flag should be before the inherited one.
+ const LibFile second_lib("bar");
+ const SourceDir second_libdir("/bar_dir/");
+ TestTarget shared(setup, "//foo:shared", Target::SHARED_LIBRARY);
+ shared.config_values().libs().push_back(second_lib);
+ shared.config_values().lib_dirs().push_back(second_libdir);
+ shared.private_deps().push_back(LabelTargetPair(&z));
+ ASSERT_TRUE(shared.OnResolved(&err));
+
+ ASSERT_EQ(2u, shared.all_libs().size());
+ EXPECT_EQ(second_lib, shared.all_libs()[0]);
+ EXPECT_EQ(lib, shared.all_libs()[1]);
+ ASSERT_EQ(2u, shared.all_lib_dirs().size());
+ EXPECT_EQ(second_libdir, shared.all_lib_dirs()[0]);
+ EXPECT_EQ(libdir, shared.all_lib_dirs()[1]);
+
+ // Executable target shouldn't get either by depending on shared.
+ TestTarget exec(setup, "//foo:exec", Target::EXECUTABLE);
+ exec.private_deps().push_back(LabelTargetPair(&shared));
+ ASSERT_TRUE(exec.OnResolved(&err));
+ EXPECT_EQ(0u, exec.all_libs().size());
+ EXPECT_EQ(0u, exec.all_lib_dirs().size());
+}
+
+// Test all_dependent_configs and public_config inheritance.
+TEST(Target, DependentConfigs) {
+ TestWithScope setup;
+ Err err;
+
+ // Set up a dependency chain of a -> b -> c
+ TestTarget a(setup, "//foo:a", Target::EXECUTABLE);
+ TestTarget b(setup, "//foo:b", Target::STATIC_LIBRARY);
+ TestTarget c(setup, "//foo:c", Target::STATIC_LIBRARY);
+ a.private_deps().push_back(LabelTargetPair(&b));
+ b.private_deps().push_back(LabelTargetPair(&c));
+
+ // Normal non-inherited config.
+ Config config(setup.settings(), Label(SourceDir("//foo/"), "config"));
+ ASSERT_TRUE(config.OnResolved(&err));
+ c.configs().push_back(LabelConfigPair(&config));
+
+ // All dependent config.
+ Config all(setup.settings(), Label(SourceDir("//foo/"), "all"));
+ ASSERT_TRUE(all.OnResolved(&err));
+ c.all_dependent_configs().push_back(LabelConfigPair(&all));
+
+ // Direct dependent config.
+ Config direct(setup.settings(), Label(SourceDir("//foo/"), "direct"));
+ ASSERT_TRUE(direct.OnResolved(&err));
+ c.public_configs().push_back(LabelConfigPair(&direct));
+
+ ASSERT_TRUE(c.OnResolved(&err));
+ ASSERT_TRUE(b.OnResolved(&err));
+ ASSERT_TRUE(a.OnResolved(&err));
+
+ // B should have gotten both dependent configs from C.
+ ASSERT_EQ(2u, b.configs().size());
+ EXPECT_EQ(&all, b.configs()[0].ptr);
+ EXPECT_EQ(&direct, b.configs()[1].ptr);
+ ASSERT_EQ(1u, b.all_dependent_configs().size());
+ EXPECT_EQ(&all, b.all_dependent_configs()[0].ptr);
+
+ // A should have just gotten the "all" dependent config from C.
+ ASSERT_EQ(1u, a.configs().size());
+ EXPECT_EQ(&all, a.configs()[0].ptr);
+ EXPECT_EQ(&all, a.all_dependent_configs()[0].ptr);
+
+ // Making an an alternate A and B with B forwarding the direct dependents.
+ TestTarget a_fwd(setup, "//foo:a_fwd", Target::EXECUTABLE);
+ TestTarget b_fwd(setup, "//foo:b_fwd", Target::STATIC_LIBRARY);
+ a_fwd.private_deps().push_back(LabelTargetPair(&b_fwd));
+ b_fwd.private_deps().push_back(LabelTargetPair(&c));
+
+ ASSERT_TRUE(b_fwd.OnResolved(&err));
+ ASSERT_TRUE(a_fwd.OnResolved(&err));
+
+ // A_fwd should now have both configs.
+ ASSERT_EQ(1u, a_fwd.configs().size());
+ EXPECT_EQ(&all, a_fwd.configs()[0].ptr);
+ ASSERT_EQ(1u, a_fwd.all_dependent_configs().size());
+ EXPECT_EQ(&all, a_fwd.all_dependent_configs()[0].ptr);
+}
+
+TEST(Target, InheritLibs) {
+ TestWithScope setup;
+ Err err;
+
+ // Create a dependency chain:
+ // A (executable) -> B (shared lib) -> C (static lib) -> D (source set)
+ TestTarget a(setup, "//foo:a", Target::EXECUTABLE);
+ TestTarget b(setup, "//foo:b", Target::SHARED_LIBRARY);
+ TestTarget c(setup, "//foo:c", Target::STATIC_LIBRARY);
+ TestTarget d(setup, "//foo:d", Target::SOURCE_SET);
+ a.private_deps().push_back(LabelTargetPair(&b));
+ b.private_deps().push_back(LabelTargetPair(&c));
+ c.private_deps().push_back(LabelTargetPair(&d));
+
+ ASSERT_TRUE(d.OnResolved(&err));
+ ASSERT_TRUE(c.OnResolved(&err));
+ ASSERT_TRUE(b.OnResolved(&err));
+ ASSERT_TRUE(a.OnResolved(&err));
+
+ // C should have D in its inherited libs.
+ std::vector<const Target*> c_inherited = c.inherited_libraries().GetOrdered();
+ ASSERT_EQ(1u, c_inherited.size());
+ EXPECT_EQ(&d, c_inherited[0]);
+
+ // B should have C and D in its inherited libs.
+ std::vector<const Target*> b_inherited = b.inherited_libraries().GetOrdered();
+ ASSERT_EQ(2u, b_inherited.size());
+ EXPECT_EQ(&c, b_inherited[0]);
+ EXPECT_EQ(&d, b_inherited[1]);
+
+ // A should have B in its inherited libs, but not any others (the shared
+ // library will include the static library and source set).
+ std::vector<const Target*> a_inherited = a.inherited_libraries().GetOrdered();
+ ASSERT_EQ(1u, a_inherited.size());
+ EXPECT_EQ(&b, a_inherited[0]);
+}
+
+TEST(Target, InheritCompleteStaticLib) {
+ TestWithScope setup;
+ Err err;
+
+ // Create a dependency chain:
+ // A (executable) -> B (complete static lib) -> C (source set)
+ TestTarget a(setup, "//foo:a", Target::EXECUTABLE);
+ TestTarget b(setup, "//foo:b", Target::STATIC_LIBRARY);
+ b.set_complete_static_lib(true);
+ TestTarget c(setup, "//foo:c", Target::SOURCE_SET);
+ a.public_deps().push_back(LabelTargetPair(&b));
+ b.public_deps().push_back(LabelTargetPair(&c));
+
+ ASSERT_TRUE(c.OnResolved(&err));
+ ASSERT_TRUE(b.OnResolved(&err));
+ ASSERT_TRUE(a.OnResolved(&err));
+
+ // B should have C in its inherited libs.
+ std::vector<const Target*> b_inherited = b.inherited_libraries().GetOrdered();
+ ASSERT_EQ(1u, b_inherited.size());
+ EXPECT_EQ(&c, b_inherited[0]);
+
+ // A should have B in its inherited libs, but not any others (the complete
+ // static library will include the source set).
+ std::vector<const Target*> a_inherited = a.inherited_libraries().GetOrdered();
+ EXPECT_EQ(1u, a_inherited.size());
+ EXPECT_EQ(&b, a_inherited[0]);
+}
+
+TEST(Target, InheritCompleteStaticLibNoDirectStaticLibDeps) {
+ TestWithScope setup;
+ Err err;
+
+ // Create a dependency chain:
+ // A (complete static lib) -> B (static lib)
+ TestTarget a(setup, "//foo:a", Target::STATIC_LIBRARY);
+ a.set_complete_static_lib(true);
+ TestTarget b(setup, "//foo:b", Target::STATIC_LIBRARY);
+
+ a.public_deps().push_back(LabelTargetPair(&b));
+ ASSERT_TRUE(b.OnResolved(&err));
+ ASSERT_FALSE(a.OnResolved(&err));
+}
+
+TEST(Target, InheritCompleteStaticLibNoIheritedStaticLibDeps) {
+ TestWithScope setup;
+ Err err;
+
+ // Create a dependency chain:
+ // A (complete static lib) -> B (source set) -> C (static lib)
+ TestTarget a(setup, "//foo:a", Target::STATIC_LIBRARY);
+ a.set_complete_static_lib(true);
+ TestTarget b(setup, "//foo:b", Target::SOURCE_SET);
+ TestTarget c(setup, "//foo:c", Target::STATIC_LIBRARY);
+
+ a.public_deps().push_back(LabelTargetPair(&b));
+ b.public_deps().push_back(LabelTargetPair(&c));
+
+ ASSERT_TRUE(c.OnResolved(&err));
+ ASSERT_TRUE(b.OnResolved(&err));
+ ASSERT_FALSE(a.OnResolved(&err));
+}
+
+TEST(Target, NoActionDepPropgation) {
+ TestWithScope setup;
+ Err err;
+
+ // Create a dependency chain:
+ // A (exe) -> B (action) -> C (source_set)
+ {
+ TestTarget a(setup, "//foo:a", Target::EXECUTABLE);
+ TestTarget b(setup, "//foo:b", Target::ACTION);
+ TestTarget c(setup, "//foo:c", Target::SOURCE_SET);
+
+ a.private_deps().push_back(LabelTargetPair(&b));
+ b.private_deps().push_back(LabelTargetPair(&c));
+
+ ASSERT_TRUE(c.OnResolved(&err));
+ ASSERT_TRUE(b.OnResolved(&err));
+ ASSERT_TRUE(a.OnResolved(&err));
+
+ // The executable should not have inherited the source set across the
+ // action.
+ std::vector<const Target*> libs = a.inherited_libraries().GetOrdered();
+ ASSERT_TRUE(libs.empty());
+ }
+}
+
+TEST(Target, GetComputedOutputName) {
+ TestWithScope setup;
+ Err err;
+
+ // Basic target with no prefix (executable type tool in the TestWithScope has
+ // no prefix) or output name.
+ TestTarget basic(setup, "//foo:bar", Target::EXECUTABLE);
+ ASSERT_TRUE(basic.OnResolved(&err));
+ EXPECT_EQ("bar", basic.GetComputedOutputName());
+
+ // Target with no prefix but an output name.
+ TestTarget with_name(setup, "//foo:bar", Target::EXECUTABLE);
+ with_name.set_output_name("myoutput");
+ ASSERT_TRUE(with_name.OnResolved(&err));
+ EXPECT_EQ("myoutput", with_name.GetComputedOutputName());
+
+ // Target with a "lib" prefix (the static library tool in the TestWithScope
+ // should specify a "lib" output prefix).
+ TestTarget with_prefix(setup, "//foo:bar", Target::STATIC_LIBRARY);
+ ASSERT_TRUE(with_prefix.OnResolved(&err));
+ EXPECT_EQ("libbar", with_prefix.GetComputedOutputName());
+
+ // Target with a "lib" prefix that already has it applied. The prefix should
+ // not duplicate something already in the target name.
+ TestTarget dup_prefix(setup, "//foo:bar", Target::STATIC_LIBRARY);
+ dup_prefix.set_output_name("libbar");
+ ASSERT_TRUE(dup_prefix.OnResolved(&err));
+ EXPECT_EQ("libbar", dup_prefix.GetComputedOutputName());
+
+ // Target with an output prefix override should not have a prefix.
+ TestTarget override_prefix(setup, "//foo:bar", Target::SHARED_LIBRARY);
+ override_prefix.set_output_prefix_override(true);
+ ASSERT_TRUE(dup_prefix.OnResolved(&err));
+ EXPECT_EQ("bar", override_prefix.GetComputedOutputName());
+}
+
+// Test visibility failure case.
+TEST(Target, VisibilityFails) {
+ TestWithScope setup;
+ Err err;
+
+ TestTarget b(setup, "//private:b", Target::STATIC_LIBRARY);
+ b.visibility().SetPrivate(b.label().dir());
+ ASSERT_TRUE(b.OnResolved(&err));
+
+ // Make a target depending on "b". The dependency must have an origin to mark
+ // it as user-set so we check visibility. This check should fail.
+ TestTarget a(setup, "//app:a", Target::EXECUTABLE);
+ a.private_deps().push_back(LabelTargetPair(&b));
+ IdentifierNode origin; // Dummy origin.
+ a.private_deps()[0].origin = &origin;
+ ASSERT_FALSE(a.OnResolved(&err));
+}
+
+// Test visibility with a single data_dep.
+TEST(Target, VisibilityDatadeps) {
+ TestWithScope setup;
+ Err err;
+
+ TestTarget b(setup, "//public:b", Target::STATIC_LIBRARY);
+ ASSERT_TRUE(b.OnResolved(&err));
+
+ // Make a target depending on "b". The dependency must have an origin to mark
+ // it as user-set so we check visibility. This check should fail.
+ TestTarget a(setup, "//app:a", Target::EXECUTABLE);
+ a.data_deps().push_back(LabelTargetPair(&b));
+ IdentifierNode origin; // Dummy origin.
+ a.data_deps()[0].origin = &origin;
+ ASSERT_TRUE(a.OnResolved(&err)) << err.help_text();
+}
+
+// Tests that A -> Group -> B where the group is visible from A but B isn't,
+// passes visibility even though the group's deps get expanded into A.
+TEST(Target, VisibilityGroup) {
+ TestWithScope setup;
+ Err err;
+
+ IdentifierNode origin; // Dummy origin.
+
+ // B has private visibility. This lets the group see it since the group is in
+ // the same directory.
+ TestTarget b(setup, "//private:b", Target::STATIC_LIBRARY);
+ b.visibility().SetPrivate(b.label().dir());
+ ASSERT_TRUE(b.OnResolved(&err));
+
+ // The group has public visibility and depends on b.
+ TestTarget g(setup, "//public:g", Target::GROUP);
+ g.private_deps().push_back(LabelTargetPair(&b));
+ g.private_deps()[0].origin = &origin;
+ ASSERT_TRUE(b.OnResolved(&err));
+
+ // Make a target depending on "g". This should succeed.
+ TestTarget a(setup, "//app:a", Target::EXECUTABLE);
+ a.private_deps().push_back(LabelTargetPair(&g));
+ a.private_deps()[0].origin = &origin;
+ ASSERT_TRUE(a.OnResolved(&err));
+}
+
+// Verifies that only testonly targets can depend on other testonly targets.
+// Many of the above dependency checking cases covered the non-testonly
+// case.
+TEST(Target, Testonly) {
+ TestWithScope setup;
+ Err err;
+
+ // "testlib" is a test-only library.
+ TestTarget testlib(setup, "//test:testlib", Target::STATIC_LIBRARY);
+ testlib.set_testonly(true);
+ ASSERT_TRUE(testlib.OnResolved(&err));
+
+ // "test" is a test-only executable depending on testlib, this is OK.
+ TestTarget test(setup, "//test:test", Target::EXECUTABLE);
+ test.set_testonly(true);
+ test.private_deps().push_back(LabelTargetPair(&testlib));
+ ASSERT_TRUE(test.OnResolved(&err));
+
+ // "product" is a non-test depending on testlib. This should fail.
+ TestTarget product(setup, "//app:product", Target::EXECUTABLE);
+ product.set_testonly(false);
+ product.private_deps().push_back(LabelTargetPair(&testlib));
+ ASSERT_FALSE(product.OnResolved(&err));
+}
+
+TEST(Target, PublicConfigs) {
+ TestWithScope setup;
+ Err err;
+
+ Label pub_config_label(SourceDir("//a/"), "pubconfig");
+ Config pub_config(setup.settings(), pub_config_label);
+ LibFile lib_name("testlib");
+ pub_config.own_values().libs().push_back(lib_name);
+ ASSERT_TRUE(pub_config.OnResolved(&err));
+
+ // This is the destination target that has a public config.
+ TestTarget dest(setup, "//a:a", Target::SOURCE_SET);
+ dest.public_configs().push_back(LabelConfigPair(&pub_config));
+ ASSERT_TRUE(dest.OnResolved(&err));
+
+ // This target has a public dependency on dest.
+ TestTarget pub(setup, "//a:pub", Target::SOURCE_SET);
+ pub.public_deps().push_back(LabelTargetPair(&dest));
+ ASSERT_TRUE(pub.OnResolved(&err));
+
+ // Depending on the target with the public dependency should forward dest's
+ // to the current target.
+ TestTarget dep_on_pub(setup, "//a:dop", Target::SOURCE_SET);
+ dep_on_pub.private_deps().push_back(LabelTargetPair(&pub));
+ ASSERT_TRUE(dep_on_pub.OnResolved(&err));
+ ASSERT_EQ(1u, dep_on_pub.configs().size());
+ EXPECT_EQ(&pub_config, dep_on_pub.configs()[0].ptr);
+
+ // Libs have special handling, check that they were forwarded from the
+ // public config to all_libs.
+ ASSERT_EQ(1u, dep_on_pub.all_libs().size());
+ ASSERT_EQ(lib_name, dep_on_pub.all_libs()[0]);
+
+ // This target has a private dependency on dest for forwards configs.
+ TestTarget forward(setup, "//a:f", Target::SOURCE_SET);
+ forward.private_deps().push_back(LabelTargetPair(&dest));
+ ASSERT_TRUE(forward.OnResolved(&err));
+}
+
+// Tests that different link/depend outputs work for solink tools.
+TEST(Target, LinkAndDepOutputs) {
+ TestWithScope setup;
+ Err err;
+
+ Toolchain toolchain(setup.settings(), Label(SourceDir("//tc/"), "tc"));
+
+ std::unique_ptr<Tool> solink_tool(new Tool());
+ solink_tool->set_output_prefix("lib");
+ solink_tool->set_default_output_extension(".so");
+
+ const char kLinkPattern[] =
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}";
+ SubstitutionPattern link_output = SubstitutionPattern::MakeForTest(
+ kLinkPattern);
+ solink_tool->set_link_output(link_output);
+
+ const char kDependPattern[] =
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.TOC";
+ SubstitutionPattern depend_output = SubstitutionPattern::MakeForTest(
+ kDependPattern);
+ solink_tool->set_depend_output(depend_output);
+
+ solink_tool->set_outputs(SubstitutionList::MakeForTest(
+ kLinkPattern, kDependPattern));
+
+ toolchain.SetTool(Toolchain::TYPE_SOLINK, std::move(solink_tool));
+
+ Target target(setup.settings(), Label(SourceDir("//a/"), "a"));
+ target.set_output_type(Target::SHARED_LIBRARY);
+ target.SetToolchain(&toolchain);
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ EXPECT_EQ("./liba.so", target.link_output_file().value());
+ EXPECT_EQ("./liba.so.TOC", target.dependency_output_file().value());
+ EXPECT_EQ("./liba.so", target.runtime_link_output_file().value());
+}
+
+// Tests that runtime_link output works without an explicit link_output for
+// solink tools.
+TEST(Target, RuntimeLinkOuput) {
+ TestWithScope setup;
+ Err err;
+
+ Toolchain toolchain(setup.settings(), Label(SourceDir("//tc/"), "tc"));
+
+ std::unique_ptr<Tool> solink_tool(new Tool());
+ solink_tool->set_output_prefix("");
+ solink_tool->set_default_output_extension(".dll");
+
+ const char kLibPattern[] =
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.lib";
+ SubstitutionPattern lib_output =
+ SubstitutionPattern::MakeForTest(kLibPattern);
+
+ const char kDllPattern[] =
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}";
+ SubstitutionPattern dll_output =
+ SubstitutionPattern::MakeForTest(kDllPattern);
+
+ solink_tool->set_outputs(
+ SubstitutionList::MakeForTest(kLibPattern, kDllPattern));
+
+ solink_tool->set_runtime_link_output(dll_output);
+
+ toolchain.SetTool(Toolchain::TYPE_SOLINK, std::move(solink_tool));
+
+ Target target(setup.settings(), Label(SourceDir("//a/"), "a"));
+ target.set_output_type(Target::SHARED_LIBRARY);
+ target.SetToolchain(&toolchain);
+ ASSERT_TRUE(target.OnResolved(&err));
+
+ EXPECT_EQ("./a.dll.lib", target.link_output_file().value());
+ EXPECT_EQ("./a.dll.lib", target.dependency_output_file().value());
+ EXPECT_EQ("./a.dll", target.runtime_link_output_file().value());
+}
+
+// Shared libraries should be inherited across public shared liobrary
+// boundaries.
+TEST(Target, SharedInheritance) {
+ TestWithScope setup;
+ Err err;
+
+ // Create two leaf shared libraries.
+ TestTarget pub(setup, "//foo:pub", Target::SHARED_LIBRARY);
+ ASSERT_TRUE(pub.OnResolved(&err));
+
+ TestTarget priv(setup, "//foo:priv", Target::SHARED_LIBRARY);
+ ASSERT_TRUE(priv.OnResolved(&err));
+
+ // Intermediate shared library with the leaf shared libraries as
+ // dependencies, one public, one private.
+ TestTarget inter(setup, "//foo:inter", Target::SHARED_LIBRARY);
+ inter.public_deps().push_back(LabelTargetPair(&pub));
+ inter.private_deps().push_back(LabelTargetPair(&priv));
+ ASSERT_TRUE(inter.OnResolved(&err));
+
+ // The intermediate shared library should have both "pub" and "priv" in its
+ // inherited libraries.
+ std::vector<const Target*> inter_inherited =
+ inter.inherited_libraries().GetOrdered();
+ ASSERT_EQ(2u, inter_inherited.size());
+ EXPECT_EQ(&pub, inter_inherited[0]);
+ EXPECT_EQ(&priv, inter_inherited[1]);
+
+ // Make a toplevel executable target depending on the intermediate one.
+ TestTarget exe(setup, "//foo:exe", Target::SHARED_LIBRARY);
+ exe.private_deps().push_back(LabelTargetPair(&inter));
+ ASSERT_TRUE(exe.OnResolved(&err));
+
+ // The exe's inherited libraries should be "inter" (because it depended
+ // directly on it) and "pub" (because inter depended publicly on it).
+ std::vector<const Target*> exe_inherited =
+ exe.inherited_libraries().GetOrdered();
+ ASSERT_EQ(2u, exe_inherited.size());
+ EXPECT_EQ(&inter, exe_inherited[0]);
+ EXPECT_EQ(&pub, exe_inherited[1]);
+}
+
+TEST(Target, GeneratedInputs) {
+ Scheduler scheduler;
+ TestWithScope setup;
+ Err err;
+
+ SourceFile generated_file("//out/Debug/generated.cc");
+
+ // This target has a generated input and no dependency makes it.
+ TestTarget non_existent_generator(setup, "//foo:non_existent_generator",
+ Target::EXECUTABLE);
+ non_existent_generator.sources().push_back(generated_file);
+ EXPECT_TRUE(non_existent_generator.OnResolved(&err)) << err.message();
+ AssertSchedulerHasOneUnknownFileMatching(&non_existent_generator,
+ generated_file);
+ scheduler.ClearUnknownGeneratedInputsAndWrittenFiles();
+
+ // Make a target that generates the file.
+ TestTarget generator(setup, "//foo:generator", Target::ACTION);
+ generator.action_values().outputs() =
+ SubstitutionList::MakeForTest(generated_file.value().c_str());
+ err = Err();
+ EXPECT_TRUE(generator.OnResolved(&err)) << err.message();
+
+ // A target that depends on the generator that uses the file as a source
+ // should be OK. This uses a private dep (will be used later).
+ TestTarget existent_generator(setup, "//foo:existent_generator",
+ Target::SHARED_LIBRARY);
+ existent_generator.sources().push_back(generated_file);
+ existent_generator.private_deps().push_back(LabelTargetPair(&generator));
+ EXPECT_TRUE(existent_generator.OnResolved(&err)) << err.message();
+ EXPECT_TRUE(scheduler.GetUnknownGeneratedInputs().empty());
+
+ // A target that depends on the previous one should *not* be allowed to
+ // use the generated file, because existent_generator used private deps.
+ // This is:
+ // indirect_private --> existent_generator --[private]--> generator
+ TestTarget indirect_private(setup, "//foo:indirect_private",
+ Target::EXECUTABLE);
+ indirect_private.sources().push_back(generated_file);
+ indirect_private.public_deps().push_back(
+ LabelTargetPair(&existent_generator));
+ EXPECT_TRUE(indirect_private.OnResolved(&err));
+ AssertSchedulerHasOneUnknownFileMatching(&indirect_private, generated_file);
+ scheduler.ClearUnknownGeneratedInputsAndWrittenFiles();
+
+ // Now make a chain like the above but with all public deps, it should be OK.
+ TestTarget existent_public(setup, "//foo:existent_public",
+ Target::SHARED_LIBRARY);
+ existent_public.public_deps().push_back(LabelTargetPair(&generator));
+ EXPECT_TRUE(existent_public.OnResolved(&err)) << err.message();
+ TestTarget indirect_public(setup, "//foo:indirect_public",
+ Target::EXECUTABLE);
+ indirect_public.sources().push_back(generated_file);
+ indirect_public.public_deps().push_back(LabelTargetPair(&existent_public));
+ EXPECT_TRUE(indirect_public.OnResolved(&err)) << err.message();
+ EXPECT_TRUE(scheduler.GetUnknownGeneratedInputs().empty());
+}
+
+// This is sort of a Scheduler test, but is related to the above test more.
+TEST(Target, WriteFileGeneratedInputs) {
+ Scheduler scheduler;
+ TestWithScope setup;
+ Err err;
+
+ SourceFile generated_file("//out/Debug/generated.data");
+
+ // This target has a generated input and no dependency makes it.
+ TestTarget non_existent_generator(setup, "//foo:non_existent_generator",
+ Target::EXECUTABLE);
+ non_existent_generator.sources().push_back(generated_file);
+ EXPECT_TRUE(non_existent_generator.OnResolved(&err));
+ AssertSchedulerHasOneUnknownFileMatching(&non_existent_generator,
+ generated_file);
+ scheduler.ClearUnknownGeneratedInputsAndWrittenFiles();
+
+ // This target has a generated file and we've decared we write it.
+ TestTarget existent_generator(setup, "//foo:existent_generator",
+ Target::EXECUTABLE);
+ existent_generator.sources().push_back(generated_file);
+ EXPECT_TRUE(existent_generator.OnResolved(&err));
+ scheduler.AddWrittenFile(generated_file);
+
+ // Should be OK.
+ EXPECT_TRUE(scheduler.GetUnknownGeneratedInputs().empty());
+}
+
+TEST(Target, WriteRuntimeDepsGeneratedInputs) {
+ Scheduler scheduler;
+ TestWithScope setup;
+ Err err;
+
+ SourceFile source_file("//out/Debug/generated.runtime_deps");
+ OutputFile output_file(setup.build_settings(), source_file);
+
+ TestTarget generator(setup, "//foo:generator", Target::EXECUTABLE);
+ generator.set_write_runtime_deps_output(output_file);
+ g_scheduler->AddWriteRuntimeDepsTarget(&generator);
+
+ TestTarget middle_data_dep(setup, "//foo:middle", Target::EXECUTABLE);
+ middle_data_dep.data_deps().push_back(LabelTargetPair(&generator));
+
+ // This target has a generated input and no dependency makes it.
+ TestTarget dep_missing(setup, "//foo:no_dep", Target::EXECUTABLE);
+ dep_missing.sources().push_back(source_file);
+ EXPECT_TRUE(dep_missing.OnResolved(&err));
+ AssertSchedulerHasOneUnknownFileMatching(&dep_missing, source_file);
+ scheduler.ClearUnknownGeneratedInputsAndWrittenFiles();
+
+ // This target has a generated file and we've directly dependended on it.
+ TestTarget dep_present(setup, "//foo:with_dep", Target::EXECUTABLE);
+ dep_present.sources().push_back(source_file);
+ dep_present.private_deps().push_back(LabelTargetPair(&generator));
+ EXPECT_TRUE(dep_present.OnResolved(&err));
+ EXPECT_TRUE(scheduler.GetUnknownGeneratedInputs().empty());
+
+ // This target has a generated file and we've indirectly dependended on it
+ // via data_deps.
+ TestTarget dep_indirect(setup, "//foo:with_dep", Target::EXECUTABLE);
+ dep_indirect.sources().push_back(source_file);
+ dep_indirect.data_deps().push_back(LabelTargetPair(&middle_data_dep));
+ EXPECT_TRUE(dep_indirect.OnResolved(&err));
+ AssertSchedulerHasOneUnknownFileMatching(&dep_indirect, source_file);
+ scheduler.ClearUnknownGeneratedInputsAndWrittenFiles();
+
+ // This target has a generated file and we've directly dependended on it
+ // via data_deps.
+ TestTarget data_dep_present(setup, "//foo:with_dep", Target::EXECUTABLE);
+ data_dep_present.sources().push_back(source_file);
+ data_dep_present.data_deps().push_back(LabelTargetPair(&generator));
+ EXPECT_TRUE(data_dep_present.OnResolved(&err));
+ EXPECT_TRUE(scheduler.GetUnknownGeneratedInputs().empty());
+}
+
+// Tests that intermediate object files generated by binary targets are also
+// considered generated for the purposes of input checking. Above, we tested
+// the failure cases for generated inputs, so here only test .o files that are
+// present.
+TEST(Target, ObjectGeneratedInputs) {
+ Scheduler scheduler;
+ TestWithScope setup;
+ Err err;
+
+ // This target compiles the source.
+ SourceFile source_file("//source.cc");
+ TestTarget source_generator(setup, "//:source_target", Target::SOURCE_SET);
+ source_generator.sources().push_back(source_file);
+ EXPECT_TRUE(source_generator.OnResolved(&err));
+
+ // This is the object file that the test toolchain generates for the source.
+ SourceFile object_file("//out/Debug/obj/source_target.source.o");
+
+ TestTarget final_target(setup, "//:final", Target::ACTION);
+ final_target.inputs().push_back(object_file);
+ EXPECT_TRUE(final_target.OnResolved(&err));
+
+ AssertSchedulerHasOneUnknownFileMatching(&final_target, object_file);
+}
+
+TEST(Target, ResolvePrecompiledHeaders) {
+ TestWithScope setup;
+ Err err;
+
+ Target target(setup.settings(), Label(SourceDir("//foo/"), "bar"));
+
+ // Target with no settings, no configs, should be a no-op.
+ EXPECT_TRUE(target.ResolvePrecompiledHeaders(&err));
+
+ // Config with PCH values.
+ Config config_1(setup.settings(), Label(SourceDir("//foo/"), "c1"));
+ std::string pch_1("pch.h");
+ SourceFile pcs_1("//pcs.cc");
+ config_1.own_values().set_precompiled_header(pch_1);
+ config_1.own_values().set_precompiled_source(pcs_1);
+ ASSERT_TRUE(config_1.OnResolved(&err));
+ target.configs().push_back(LabelConfigPair(&config_1));
+
+ // No PCH info specified on target, but the config specifies one, the
+ // values should get copied to the target.
+ EXPECT_TRUE(target.ResolvePrecompiledHeaders(&err));
+ EXPECT_EQ(pch_1, target.config_values().precompiled_header());
+ EXPECT_TRUE(target.config_values().precompiled_source() == pcs_1);
+
+ // Now both target and config have matching PCH values. Resolving again
+ // should be a no-op since they all match.
+ EXPECT_TRUE(target.ResolvePrecompiledHeaders(&err));
+ EXPECT_TRUE(target.config_values().precompiled_header() == pch_1);
+ EXPECT_TRUE(target.config_values().precompiled_source() == pcs_1);
+
+ // Second config with different PCH values.
+ Config config_2(setup.settings(), Label(SourceDir("//foo/"), "c2"));
+ std::string pch_2("pch2.h");
+ SourceFile pcs_2("//pcs2.cc");
+ config_2.own_values().set_precompiled_header(pch_2);
+ config_2.own_values().set_precompiled_source(pcs_2);
+ ASSERT_TRUE(config_2.OnResolved(&err));
+ target.configs().push_back(LabelConfigPair(&config_2));
+
+ // This should be an error since they don't match.
+ EXPECT_FALSE(target.ResolvePrecompiledHeaders(&err));
+
+ // Make sure the proper labels are blamed.
+ EXPECT_EQ(
+ "The target //foo:bar\n"
+ "has conflicting precompiled header settings.\n"
+ "\n"
+ "From //foo:bar\n"
+ " header: pch.h\n"
+ " source: //pcs.cc\n"
+ "\n"
+ "From //foo:c2\n"
+ " header: pch2.h\n"
+ " source: //pcs2.cc",
+ err.help_text());
+}
+
+TEST(Target, AssertNoDeps) {
+ TestWithScope setup;
+ Err err;
+
+ // A target.
+ TestTarget a(setup, "//a", Target::SHARED_LIBRARY);
+ ASSERT_TRUE(a.OnResolved(&err));
+
+ // B depends on A and has an assert_no_deps for a random dir.
+ TestTarget b(setup, "//b", Target::SHARED_LIBRARY);
+ b.private_deps().push_back(LabelTargetPair(&a));
+ b.assert_no_deps().push_back(LabelPattern(
+ LabelPattern::RECURSIVE_DIRECTORY, SourceDir("//disallowed/"),
+ std::string(), Label()));
+ ASSERT_TRUE(b.OnResolved(&err));
+
+ LabelPattern disallow_a(LabelPattern::RECURSIVE_DIRECTORY, SourceDir("//a/"),
+ std::string(), Label());
+
+ // C depends on B and disallows depending on A. This should fail.
+ TestTarget c(setup, "//c", Target::EXECUTABLE);
+ c.private_deps().push_back(LabelTargetPair(&b));
+ c.assert_no_deps().push_back(disallow_a);
+ ASSERT_FALSE(c.OnResolved(&err));
+
+ // Validate the error message has the proper path.
+ EXPECT_EQ(
+ "//c:c has an assert_no_deps entry:\n"
+ " //a/*\n"
+ "which fails for the dependency path:\n"
+ " //c:c ->\n"
+ " //b:b ->\n"
+ " //a:a",
+ err.help_text());
+ err = Err();
+
+ // Add an intermediate executable with: exe -> b -> a
+ TestTarget exe(setup, "//exe", Target::EXECUTABLE);
+ exe.private_deps().push_back(LabelTargetPair(&b));
+ ASSERT_TRUE(exe.OnResolved(&err));
+
+ // D depends on the executable and disallows depending on A. Since there is
+ // an intermediate executable, this should be OK.
+ TestTarget d(setup, "//d", Target::EXECUTABLE);
+ d.private_deps().push_back(LabelTargetPair(&exe));
+ d.assert_no_deps().push_back(disallow_a);
+ ASSERT_TRUE(d.OnResolved(&err));
+
+ // A2 disallows depending on anything in its own directory, but the
+ // assertions should not match the target itself so this should be OK.
+ TestTarget a2(setup, "//a:a2", Target::EXECUTABLE);
+ a2.assert_no_deps().push_back(disallow_a);
+ ASSERT_TRUE(a2.OnResolved(&err));
+}
+
+TEST(Target, PullRecursiveBundleData) {
+ TestWithScope setup;
+ Err err;
+
+ // We have the following dependency graph:
+ // A (create_bundle) -> B (bundle_data)
+ // \-> C (create_bundle) -> D (bundle_data)
+ // \-> E (group) -> F (bundle_data)
+ // \-> B (bundle_data)
+ TestTarget a(setup, "//foo:a", Target::CREATE_BUNDLE);
+ TestTarget b(setup, "//foo:b", Target::BUNDLE_DATA);
+ TestTarget c(setup, "//foo:c", Target::CREATE_BUNDLE);
+ TestTarget d(setup, "//foo:d", Target::BUNDLE_DATA);
+ TestTarget e(setup, "//foo:e", Target::GROUP);
+ TestTarget f(setup, "//foo:f", Target::BUNDLE_DATA);
+ a.public_deps().push_back(LabelTargetPair(&b));
+ a.public_deps().push_back(LabelTargetPair(&c));
+ a.public_deps().push_back(LabelTargetPair(&e));
+ c.public_deps().push_back(LabelTargetPair(&d));
+ e.public_deps().push_back(LabelTargetPair(&f));
+ e.public_deps().push_back(LabelTargetPair(&b));
+
+ b.sources().push_back(SourceFile("//foo/b1.txt"));
+ b.sources().push_back(SourceFile("//foo/b2.txt"));
+ b.action_values().outputs() = SubstitutionList::MakeForTest(
+ "{{bundle_resources_dir}}/{{source_file_part}}");
+ ASSERT_TRUE(b.OnResolved(&err));
+
+ d.sources().push_back(SourceFile("//foo/d.txt"));
+ d.action_values().outputs() = SubstitutionList::MakeForTest(
+ "{{bundle_resources_dir}}/{{source_file_part}}");
+ ASSERT_TRUE(d.OnResolved(&err));
+
+ f.sources().push_back(SourceFile("//foo/f1.txt"));
+ f.sources().push_back(SourceFile("//foo/f2.txt"));
+ f.sources().push_back(SourceFile("//foo/f3.txt"));
+ f.sources().push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/Contents.json"));
+ f.sources().push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/FooEmpty-29.png"));
+ f.sources().push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/FooEmpty-29@2x.png"));
+ f.sources().push_back(
+ SourceFile("//foo/Foo.xcassets/foo.imageset/FooEmpty-29@3x.png"));
+ f.action_values().outputs() = SubstitutionList::MakeForTest(
+ "{{bundle_resources_dir}}/{{source_file_part}}");
+ ASSERT_TRUE(f.OnResolved(&err));
+
+ ASSERT_TRUE(e.OnResolved(&err));
+ ASSERT_TRUE(c.OnResolved(&err));
+ ASSERT_TRUE(a.OnResolved(&err));
+
+ // A gets its data from B and F.
+ ASSERT_EQ(a.bundle_data().file_rules().size(), 2u);
+ ASSERT_EQ(a.bundle_data().file_rules()[0].sources().size(), 2u);
+ ASSERT_EQ(a.bundle_data().file_rules()[1].sources().size(), 3u);
+ ASSERT_EQ(a.bundle_data().asset_catalog_sources().size(), 4u);
+ ASSERT_EQ(a.bundle_data().bundle_deps().size(), 2u);
+
+ // C gets its data from D.
+ ASSERT_EQ(c.bundle_data().file_rules().size(), 1u);
+ ASSERT_EQ(c.bundle_data().file_rules()[0].sources().size(), 1u);
+ ASSERT_EQ(c.bundle_data().bundle_deps().size(), 1u);
+
+ // E does not have any bundle_data information but gets a list of
+ // bundle_deps to propagate them during target resolution.
+ ASSERT_TRUE(e.bundle_data().file_rules().empty());
+ ASSERT_TRUE(e.bundle_data().asset_catalog_sources().empty());
+ ASSERT_EQ(e.bundle_data().bundle_deps().size(), 2u);
+}
diff --git a/chromium/tools/gn/template.cc b/chromium/tools/gn/template.cc
new file mode 100644
index 00000000000..8b8ae692bc4
--- /dev/null
+++ b/chromium/tools/gn/template.cc
@@ -0,0 +1,125 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/template.h"
+
+#include <utility>
+
+#include "tools/gn/err.h"
+#include "tools/gn/functions.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/scope_per_file_provider.h"
+#include "tools/gn/value.h"
+
+Template::Template(const Scope* scope, const FunctionCallNode* def)
+ : closure_(scope->MakeClosure()),
+ definition_(def) {
+}
+
+Template::Template(std::unique_ptr<Scope> scope, const FunctionCallNode* def)
+ : closure_(std::move(scope)), definition_(def) {}
+
+Template::~Template() {
+}
+
+Value Template::Invoke(Scope* scope,
+ const FunctionCallNode* invocation,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) const {
+ // Don't allow templates to be executed from imported files. Imports are for
+ // simple values only.
+ if (!EnsureNotProcessingImport(invocation, scope, err))
+ return Value();
+
+ // First run the invocation's block. Need to allocate the scope on the heap
+ // so we can pass ownership to the template.
+ std::unique_ptr<Scope> invocation_scope(new Scope(scope));
+ if (!FillTargetBlockScope(scope, invocation,
+ invocation->function().value().as_string(),
+ block, args, invocation_scope.get(), err))
+ return Value();
+
+ {
+ // Don't allow the block of the template invocation to include other
+ // targets configs, or template invocations. This must only be applied
+ // to the invoker's block rather than the whole function because the
+ // template execution itself must be able to define targets, etc.
+ NonNestableBlock non_nestable(scope, invocation, "template invocation");
+ if (!non_nestable.Enter(err))
+ return Value();
+
+ block->Execute(invocation_scope.get(), err);
+ if (err->has_error())
+ return Value();
+ }
+
+ // Set up the scope to run the template and set the current directory for the
+ // template (which ScopePerFileProvider uses to base the target-related
+ // variables target_gen_dir and target_out_dir on) to be that of the invoker.
+ // This way, files don't have to be rebased and target_*_dir works the way
+ // people expect (otherwise its to easy to be putting generated files in the
+ // gen dir corresponding to an imported file).
+ Scope template_scope(closure_.get());
+ template_scope.set_source_dir(scope->GetSourceDir());
+
+ ScopePerFileProvider per_file_provider(&template_scope, true);
+
+ // Targets defined in the template go in the collector for the invoking file.
+ template_scope.set_item_collector(scope->GetItemCollector());
+
+ // We jump through some hoops to avoid copying the invocation scope when
+ // setting it in the template scope (since the invocation scope may have
+ // large lists of source files in it and could be expensive to copy).
+ //
+ // Scope.SetValue will copy the value which will in turn copy the scope, but
+ // if we instead create a value and then set the scope on it, the copy can
+ // be avoided.
+ const char kInvoker[] = "invoker";
+ template_scope.SetValue(kInvoker, Value(nullptr, std::unique_ptr<Scope>()),
+ invocation);
+ Value* invoker_value = template_scope.GetMutableValue(kInvoker, false);
+ invoker_value->SetScopeValue(std::move(invocation_scope));
+ template_scope.set_source_dir(scope->GetSourceDir());
+
+ const base::StringPiece target_name("target_name");
+ template_scope.SetValue(target_name,
+ Value(invocation, args[0].string_value()),
+ invocation);
+
+ // Actually run the template code.
+ Value result =
+ definition_->block()->Execute(&template_scope, err);
+ if (err->has_error()) {
+ // If there was an error, append the caller location so the error message
+ // displays a stack trace of how it got here.
+ err->AppendSubErr(Err(invocation, "whence it was called."));
+ return Value();
+ }
+
+ // Check for unused variables in the invocation scope. This will find typos
+ // of things the caller meant to pass to the template but the template didn't
+ // read out.
+ //
+ // This is a bit tricky because it's theoretically possible for the template
+ // to overwrite the value of "invoker" and free the Scope owned by the
+ // value. So we need to look it up again and don't do anything if it doesn't
+ // exist.
+ invoker_value = template_scope.GetMutableValue(kInvoker, false);
+ if (invoker_value && invoker_value->type() == Value::SCOPE) {
+ if (!invoker_value->scope_value()->CheckForUnusedVars(err))
+ return Value();
+ }
+
+ // Check for unused variables in the template itself.
+ if (!template_scope.CheckForUnusedVars(err))
+ return Value();
+
+ return result;
+}
+
+LocationRange Template::GetDefinitionRange() const {
+ return definition_->GetRange();
+}
diff --git a/chromium/tools/gn/template.h b/chromium/tools/gn/template.h
new file mode 100644
index 00000000000..a79d82f368b
--- /dev/null
+++ b/chromium/tools/gn/template.h
@@ -0,0 +1,56 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_TEMPLATE_H_
+#define TOOLS_GN_TEMPLATE_H_
+
+#include <memory>
+#include <vector>
+
+#include "base/memory/ref_counted.h"
+
+class BlockNode;
+class Err;
+class FunctionCallNode;
+class LocationRange;
+class Scope;
+class Value;
+
+// Represents the information associated with a template() call in GN, which
+// includes a closure and the code to run when the template is invoked.
+//
+// This class is immutable so we can reference it from multiple threads without
+// locking. Normally, this will be assocated with a .gni file and then a
+// reference will be taken by each .gn file that imports it. These files might
+// execute the template in parallel.
+class Template : public base::RefCountedThreadSafe<Template> {
+ public:
+ // Makes a new closure based on the given scope.
+ Template(const Scope* scope, const FunctionCallNode* def);
+
+ // Takes ownership of a previously-constructed closure.
+ Template(std::unique_ptr<Scope> closure, const FunctionCallNode* def);
+
+ // Invoke the template. The values correspond to the state of the code
+ // invoking the template.
+ Value Invoke(Scope* scope,
+ const FunctionCallNode* invocation,
+ const std::vector<Value>& args,
+ BlockNode* block,
+ Err* err) const;
+
+ // Returns the location range where this template was defined.
+ LocationRange GetDefinitionRange() const;
+
+ private:
+ friend class base::RefCountedThreadSafe<Template>;
+
+ Template();
+ ~Template();
+
+ std::unique_ptr<Scope> closure_;
+ const FunctionCallNode* definition_;
+};
+
+#endif // TOOLS_GN_TEMPLATE_H_
diff --git a/chromium/tools/gn/template_unittest.cc b/chromium/tools/gn/template_unittest.cc
new file mode 100644
index 00000000000..905dc7bb048
--- /dev/null
+++ b/chromium/tools/gn/template_unittest.cc
@@ -0,0 +1,93 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/strings/string_number_conversions.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/test_with_scope.h"
+
+TEST(Template, Basic) {
+ TestWithScope setup;
+ TestParseInput input(
+ "template(\"foo\") {\n"
+ " print(target_name)\n"
+ " print(invoker.bar)\n"
+ "}\n"
+ "foo(\"lala\") {\n"
+ " bar = 42\n"
+ "}");
+ ASSERT_FALSE(input.has_error());
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(err.has_error()) << err.message();
+
+ EXPECT_EQ("lala\n42\n", setup.print_output());
+}
+
+TEST(Template, UnusedTargetNameShouldThrowError) {
+ TestWithScope setup;
+ TestParseInput input(
+ "template(\"foo\") {\n"
+ " print(invoker.bar)\n"
+ "}\n"
+ "foo(\"lala\") {\n"
+ " bar = 42\n"
+ "}");
+ ASSERT_FALSE(input.has_error());
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+}
+
+TEST(Template, UnusedInvokerShouldThrowError) {
+ TestWithScope setup;
+ TestParseInput input(
+ "template(\"foo\") {\n"
+ " print(target_name)\n"
+ "}\n"
+ "foo(\"lala\") {\n"
+ " bar = 42\n"
+ "}");
+ ASSERT_FALSE(input.has_error());
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+}
+
+TEST(Template, UnusedVarInInvokerShouldThrowError) {
+ TestWithScope setup;
+ TestParseInput input(
+ "template(\"foo\") {\n"
+ " print(target_name)\n"
+ " print(invoker.bar)\n"
+ "}\n"
+ "foo(\"lala\") {\n"
+ " bar = 42\n"
+ " baz = [ \"foo\" ]\n"
+ "}");
+ ASSERT_FALSE(input.has_error());
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ EXPECT_TRUE(err.has_error());
+}
+
+// Previous versions of the template implementation would copy templates by
+// value when it makes a closure. Doing a sequence of them means that every new
+// one copies all previous ones, which gives a significant blow-up in memory.
+// If this test doesn't crash with out-of-memory, it passed.
+TEST(Template, MemoryBlowUp) {
+ TestWithScope setup;
+ std::string code;
+ for (int i = 0; i < 100; i++)
+ code += "template(\"test" + base::IntToString(i) + "\") {}\n";
+
+ TestParseInput input(code);
+
+ Err err;
+ input.parsed()->Execute(setup.scope(), &err);
+ ASSERT_FALSE(input.has_error());
+}
diff --git a/chromium/tools/gn/test_with_scope.cc b/chromium/tools/gn/test_with_scope.cc
new file mode 100644
index 00000000000..32d5dd67585
--- /dev/null
+++ b/chromium/tools/gn/test_with_scope.cc
@@ -0,0 +1,192 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/test_with_scope.h"
+
+#include <utility>
+
+#include "base/bind.h"
+#include "tools/gn/parser.h"
+#include "tools/gn/tokenizer.h"
+
+TestWithScope::TestWithScope()
+ : build_settings_(),
+ settings_(&build_settings_, std::string()),
+ toolchain_(&settings_, Label(SourceDir("//toolchain/"), "default")),
+ scope_(&settings_),
+ scope_progammatic_provider_(&scope_, true) {
+ build_settings_.SetBuildDir(SourceDir("//out/Debug/"));
+ build_settings_.set_print_callback(
+ base::Bind(&TestWithScope::AppendPrintOutput, base::Unretained(this)));
+
+ settings_.set_toolchain_label(toolchain_.label());
+ settings_.set_default_toolchain_label(toolchain_.label());
+
+ SetupToolchain(&toolchain_);
+}
+
+TestWithScope::~TestWithScope() {
+}
+
+Label TestWithScope::ParseLabel(const std::string& str) const {
+ Err err;
+ Label result = Label::Resolve(SourceDir("//"), toolchain_.label(),
+ Value(nullptr, str), &err);
+ CHECK(!err.has_error());
+ return result;
+}
+
+// static
+void TestWithScope::SetupToolchain(Toolchain* toolchain) {
+ Err err;
+
+ // CC
+ std::unique_ptr<Tool> cc_tool(new Tool);
+ SetCommandForTool(
+ "cc {{source}} {{cflags}} {{cflags_c}} {{defines}} {{include_dirs}} "
+ "-o {{output}}",
+ cc_tool.get());
+ cc_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o"));
+ toolchain->SetTool(Toolchain::TYPE_CC, std::move(cc_tool));
+
+ // CXX
+ std::unique_ptr<Tool> cxx_tool(new Tool);
+ SetCommandForTool(
+ "c++ {{source}} {{cflags}} {{cflags_cc}} {{defines}} {{include_dirs}} "
+ "-o {{output}}",
+ cxx_tool.get());
+ cxx_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o"));
+ toolchain->SetTool(Toolchain::TYPE_CXX, std::move(cxx_tool));
+
+ // OBJC
+ std::unique_ptr<Tool> objc_tool(new Tool);
+ SetCommandForTool(
+ "objcc {{source}} {{cflags}} {{cflags_objc}} {{defines}} "
+ "{{include_dirs}} -o {{output}}",
+ objc_tool.get());
+ objc_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o"));
+ toolchain->SetTool(Toolchain::TYPE_OBJC, std::move(objc_tool));
+
+ // OBJC
+ std::unique_ptr<Tool> objcxx_tool(new Tool);
+ SetCommandForTool(
+ "objcxx {{source}} {{cflags}} {{cflags_objcc}} {{defines}} "
+ "{{include_dirs}} -o {{output}}",
+ objcxx_tool.get());
+ objcxx_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{source_out_dir}}/{{target_output_name}}.{{source_name_part}}.o"));
+ toolchain->SetTool(Toolchain::TYPE_OBJCXX, std::move(objcxx_tool));
+
+ // Don't use RC and ASM tools in unit tests yet. Add here if needed.
+
+ // ALINK
+ std::unique_ptr<Tool> alink_tool(new Tool);
+ SetCommandForTool("ar {{output}} {{source}}", alink_tool.get());
+ alink_tool->set_lib_switch("-l");
+ alink_tool->set_lib_dir_switch("-L");
+ alink_tool->set_output_prefix("lib");
+ alink_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{target_out_dir}}/{{target_output_name}}.a"));
+ toolchain->SetTool(Toolchain::TYPE_ALINK, std::move(alink_tool));
+
+ // SOLINK
+ std::unique_ptr<Tool> solink_tool(new Tool);
+ SetCommandForTool("ld -shared -o {{target_output_name}}.so {{inputs}} "
+ "{{ldflags}} {{libs}}", solink_tool.get());
+ solink_tool->set_lib_switch("-l");
+ solink_tool->set_lib_dir_switch("-L");
+ solink_tool->set_output_prefix("lib");
+ solink_tool->set_default_output_extension(".so");
+ solink_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"));
+ toolchain->SetTool(Toolchain::TYPE_SOLINK, std::move(solink_tool));
+
+ // SOLINK_MODULE
+ std::unique_ptr<Tool> solink_module_tool(new Tool);
+ SetCommandForTool("ld -bundle -o {{target_output_name}}.so {{inputs}} "
+ "{{ldflags}} {{libs}}", solink_module_tool.get());
+ solink_module_tool->set_lib_switch("-l");
+ solink_module_tool->set_lib_dir_switch("-L");
+ solink_module_tool->set_output_prefix("lib");
+ solink_module_tool->set_default_output_extension(".so");
+ solink_module_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"));
+ toolchain->SetTool(Toolchain::TYPE_SOLINK_MODULE,
+ std::move(solink_module_tool));
+
+ // LINK
+ std::unique_ptr<Tool> link_tool(new Tool);
+ SetCommandForTool("ld -o {{target_output_name}} {{source}} "
+ "{{ldflags}} {{libs}}", link_tool.get());
+ link_tool->set_lib_switch("-l");
+ link_tool->set_lib_dir_switch("-L");
+ link_tool->set_outputs(SubstitutionList::MakeForTest(
+ "{{root_out_dir}}/{{target_output_name}}"));
+ toolchain->SetTool(Toolchain::TYPE_LINK, std::move(link_tool));
+
+ // STAMP
+ std::unique_ptr<Tool> stamp_tool(new Tool);
+ SetCommandForTool("touch {{output}}", stamp_tool.get());
+ toolchain->SetTool(Toolchain::TYPE_STAMP, std::move(stamp_tool));
+
+ // COPY
+ std::unique_ptr<Tool> copy_tool(new Tool);
+ SetCommandForTool("cp {{source}} {{output}}", copy_tool.get());
+ toolchain->SetTool(Toolchain::TYPE_COPY, std::move(copy_tool));
+
+ // COPY_BUNDLE_DATA
+ std::unique_ptr<Tool> copy_bundle_data_tool(new Tool);
+ SetCommandForTool("cp {{source}} {{output}}", copy_bundle_data_tool.get());
+ toolchain->SetTool(Toolchain::TYPE_COPY_BUNDLE_DATA,
+ std::move(copy_bundle_data_tool));
+
+ // COMPILE_XCASSETS
+ std::unique_ptr<Tool> compile_xcassets_tool(new Tool);
+ SetCommandForTool("touch {{output}}", compile_xcassets_tool.get());
+ toolchain->SetTool(Toolchain::TYPE_COMPILE_XCASSETS,
+ std::move(compile_xcassets_tool));
+
+ toolchain->ToolchainSetupComplete();
+}
+
+// static
+void TestWithScope::SetCommandForTool(const std::string& cmd, Tool* tool) {
+ Err err;
+ SubstitutionPattern command;
+ command.Parse(cmd, nullptr, &err);
+ CHECK(!err.has_error())
+ << "Couldn't parse \"" << cmd << "\", " << "got " << err.message();
+ tool->set_command(command);
+}
+
+void TestWithScope::AppendPrintOutput(const std::string& str) {
+ print_output_.append(str);
+}
+
+TestParseInput::TestParseInput(const std::string& input)
+ : input_file_(SourceFile("//test")) {
+ input_file_.SetContents(input);
+
+ tokens_ = Tokenizer::Tokenize(&input_file_, &parse_err_);
+ if (!parse_err_.has_error())
+ parsed_ = Parser::Parse(tokens_, &parse_err_);
+}
+
+TestParseInput::~TestParseInput() {
+}
+
+TestTarget::TestTarget(const TestWithScope& setup,
+ const std::string& label_string,
+ Target::OutputType type)
+ : Target(setup.settings(), setup.ParseLabel(label_string)) {
+ visibility().SetPublic();
+ set_output_type(type);
+ SetToolchain(setup.toolchain());
+}
+
+TestTarget::~TestTarget() {
+}
diff --git a/chromium/tools/gn/test_with_scope.h b/chromium/tools/gn/test_with_scope.h
new file mode 100644
index 00000000000..fa6fb4b324b
--- /dev/null
+++ b/chromium/tools/gn/test_with_scope.h
@@ -0,0 +1,113 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_TEST_WITH_SCOPE_H_
+#define TOOLS_GN_TEST_WITH_SCOPE_H_
+
+#include <string>
+#include <vector>
+
+#include "base/macros.h"
+#include "tools/gn/build_settings.h"
+#include "tools/gn/err.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/scope_per_file_provider.h"
+#include "tools/gn/settings.h"
+#include "tools/gn/target.h"
+#include "tools/gn/token.h"
+#include "tools/gn/toolchain.h"
+#include "tools/gn/value.h"
+
+// A helper class for setting up a Scope that a test can use. It makes a
+// toolchain and sets up all the build state.
+class TestWithScope {
+ public:
+ TestWithScope();
+ ~TestWithScope();
+
+ BuildSettings* build_settings() { return &build_settings_; }
+ Settings* settings() { return &settings_; }
+ const Settings* settings() const { return &settings_; }
+ Toolchain* toolchain() { return &toolchain_; }
+ const Toolchain* toolchain() const { return &toolchain_; }
+ Scope* scope() { return &scope_; }
+
+ // This buffer accumulates output from any print() commands executed in the
+ // context of this test. Note that the implementation of this is not
+ // threadsafe so don't write tests that call print from multiple threads.
+ std::string& print_output() { return print_output_; }
+
+ // Parse the given string into a label in the default toolchain. This will
+ // assert if the label isn't valid (this is intended for hardcoded labels).
+ Label ParseLabel(const std::string& str) const;
+
+ // Fills in the tools for the given toolchain with reasonable default values.
+ // The toolchain in this object will be automatically set up with this
+ // function, it is exposed to allow tests to get the same functionality for
+ // other toolchains they make.
+ static void SetupToolchain(Toolchain* toolchain);
+
+ // Sets the given text command on the given tool, parsing it as a
+ // substitution pattern. This will assert if the input is malformed. This is
+ // designed to help setting up Tools for tests.
+ static void SetCommandForTool(const std::string& cmd, Tool* tool);
+
+ private:
+ void AppendPrintOutput(const std::string& str);
+
+ BuildSettings build_settings_;
+ Settings settings_;
+ Toolchain toolchain_;
+ Scope scope_;
+
+ // Supplies the scope with built-in variables like root_out_dir.
+ ScopePerFileProvider scope_progammatic_provider_;
+
+ std::string print_output_;
+
+ DISALLOW_COPY_AND_ASSIGN(TestWithScope);
+};
+
+// Helper class to treat some string input as a file.
+//
+// Instantiate it with the contents you want, be sure to check for error, and
+// then you can execute the ParseNode or whatever.
+class TestParseInput {
+ public:
+ explicit TestParseInput(const std::string& input);
+ ~TestParseInput();
+
+ // Indicates whether and what error occurred during tokenizing and parsing.
+ bool has_error() const { return parse_err_.has_error(); }
+ const Err& parse_err() const { return parse_err_; }
+
+ const InputFile& input_file() const { return input_file_; }
+ const std::vector<Token>& tokens() const { return tokens_; }
+ const ParseNode* parsed() const { return parsed_.get(); }
+
+ private:
+ InputFile input_file_;
+
+ std::vector<Token> tokens_;
+ std::unique_ptr<ParseNode> parsed_;
+
+ Err parse_err_;
+
+ DISALLOW_COPY_AND_ASSIGN(TestParseInput);
+};
+
+// Shortcut for creating targets for tests that take the test setup, a pretty-
+// style label, and a target type and sets everything up. The target will
+// default to public visibility.
+class TestTarget : public Target {
+ public:
+ TestTarget(const TestWithScope& setup,
+ const std::string& label_string,
+ Target::OutputType type);
+ ~TestTarget() override;
+};
+
+#endif // TOOLS_GN_TEST_WITH_SCOPE_H_
diff --git a/chromium/tools/gn/token.cc b/chromium/tools/gn/token.cc
new file mode 100644
index 00000000000..6721619d0d6
--- /dev/null
+++ b/chromium/tools/gn/token.cc
@@ -0,0 +1,28 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/token.h"
+
+#include "base/logging.h"
+
+Token::Token() : type_(INVALID), value_() {
+}
+
+Token::Token(const Location& location,
+ Type t,
+ const base::StringPiece& v)
+ : type_(t),
+ value_(v),
+ location_(location) {
+}
+
+Token::Token(const Token& other) = default;
+
+bool Token::IsIdentifierEqualTo(const char* v) const {
+ return type_ == IDENTIFIER && value_ == v;
+}
+
+bool Token::IsStringEqualTo(const char* v) const {
+ return type_ == STRING && value_ == v;
+}
diff --git a/chromium/tools/gn/token.h b/chromium/tools/gn/token.h
new file mode 100644
index 00000000000..24c4e9c2e10
--- /dev/null
+++ b/chromium/tools/gn/token.h
@@ -0,0 +1,86 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_TOKEN_H_
+#define TOOLS_GN_TOKEN_H_
+
+#include "base/strings/string_piece.h"
+#include "tools/gn/location.h"
+
+class Token {
+ public:
+ enum Type {
+ INVALID,
+ INTEGER, // 123
+ STRING, // "blah"
+ TRUE_TOKEN, // Not "TRUE" to avoid collisions with #define in windows.h.
+ FALSE_TOKEN,
+
+ // Various operators.
+ EQUAL,
+ PLUS,
+ MINUS,
+ PLUS_EQUALS,
+ MINUS_EQUALS,
+ EQUAL_EQUAL,
+ NOT_EQUAL,
+ LESS_EQUAL,
+ GREATER_EQUAL,
+ LESS_THAN,
+ GREATER_THAN,
+ BOOLEAN_AND,
+ BOOLEAN_OR,
+ BANG,
+ DOT,
+
+ LEFT_PAREN,
+ RIGHT_PAREN,
+ LEFT_BRACKET,
+ RIGHT_BRACKET,
+ LEFT_BRACE,
+ RIGHT_BRACE,
+
+ IF,
+ ELSE,
+ IDENTIFIER, // foo
+ COMMA, // ,
+ UNCLASSIFIED_COMMENT, // #...\n, of unknown style (will be converted
+ // to one below)
+ LINE_COMMENT, // #...\n on a line alone.
+ SUFFIX_COMMENT, // #...\n on a line following other code.
+ BLOCK_COMMENT, // #...\n line comment, but free-standing.
+
+ UNCLASSIFIED_OPERATOR,
+
+ NUM_TYPES
+ };
+
+ Token();
+ Token(const Location& location, Type t, const base::StringPiece& v);
+ Token(const Token& other);
+
+ Type type() const { return type_; }
+ const base::StringPiece& value() const { return value_; }
+ const Location& location() const { return location_; }
+ void set_location(Location location) { location_ = location; }
+ LocationRange range() const {
+ return LocationRange(
+ location_,
+ Location(location_.file(),
+ location_.line_number(),
+ location_.column_number() + static_cast<int>(value_.size()),
+ location_.byte() + static_cast<int>(value_.size())));
+ }
+
+ // Helper functions for comparing this token to something.
+ bool IsIdentifierEqualTo(const char* v) const;
+ bool IsStringEqualTo(const char* v) const;
+
+ private:
+ Type type_;
+ base::StringPiece value_;
+ Location location_;
+};
+
+#endif // TOOLS_GN_TOKEN_H_
diff --git a/chromium/tools/gn/tokenizer.cc b/chromium/tools/gn/tokenizer.cc
new file mode 100644
index 00000000000..0568becbc8d
--- /dev/null
+++ b/chromium/tools/gn/tokenizer.cc
@@ -0,0 +1,410 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/tokenizer.h"
+
+#include "base/logging.h"
+#include "base/strings/string_util.h"
+#include "tools/gn/input_file.h"
+
+namespace {
+
+bool CouldBeTwoCharOperatorBegin(char c) {
+ return c == '<' || c == '>' || c == '!' || c == '=' || c == '-' ||
+ c == '+' || c == '|' || c == '&';
+}
+
+bool CouldBeTwoCharOperatorEnd(char c) {
+ return c == '=' || c == '|' || c == '&';
+}
+
+bool CouldBeOneCharOperator(char c) {
+ return c == '=' || c == '<' || c == '>' || c == '+' || c == '!' ||
+ c == ':' || c == '|' || c == '&' || c == '-';
+}
+
+bool CouldBeOperator(char c) {
+ return CouldBeOneCharOperator(c) || CouldBeTwoCharOperatorBegin(c);
+}
+
+bool IsScoperChar(char c) {
+ return c == '(' || c == ')' || c == '[' || c == ']' || c == '{' || c == '}';
+}
+
+Token::Type GetSpecificOperatorType(base::StringPiece value) {
+ if (value == "=")
+ return Token::EQUAL;
+ if (value == "+")
+ return Token::PLUS;
+ if (value == "-")
+ return Token::MINUS;
+ if (value == "+=")
+ return Token::PLUS_EQUALS;
+ if (value == "-=")
+ return Token::MINUS_EQUALS;
+ if (value == "==")
+ return Token::EQUAL_EQUAL;
+ if (value == "!=")
+ return Token::NOT_EQUAL;
+ if (value == "<=")
+ return Token::LESS_EQUAL;
+ if (value == ">=")
+ return Token::GREATER_EQUAL;
+ if (value == "<")
+ return Token::LESS_THAN;
+ if (value == ">")
+ return Token::GREATER_THAN;
+ if (value == "&&")
+ return Token::BOOLEAN_AND;
+ if (value == "||")
+ return Token::BOOLEAN_OR;
+ if (value == "!")
+ return Token::BANG;
+ if (value == ".")
+ return Token::DOT;
+ return Token::INVALID;
+}
+
+} // namespace
+
+Tokenizer::Tokenizer(const InputFile* input_file, Err* err)
+ : input_file_(input_file),
+ input_(input_file->contents()),
+ err_(err),
+ cur_(0),
+ line_number_(1),
+ column_number_(1) {
+}
+
+Tokenizer::~Tokenizer() {
+}
+
+// static
+std::vector<Token> Tokenizer::Tokenize(const InputFile* input_file, Err* err) {
+ Tokenizer t(input_file, err);
+ return t.Run();
+}
+
+std::vector<Token> Tokenizer::Run() {
+ DCHECK(tokens_.empty());
+ while (!done()) {
+ AdvanceToNextToken();
+ if (done())
+ break;
+ Location location = GetCurrentLocation();
+
+ Token::Type type = ClassifyCurrent();
+ if (type == Token::INVALID) {
+ *err_ = GetErrorForInvalidToken(location);
+ break;
+ }
+ size_t token_begin = cur_;
+ AdvanceToEndOfToken(location, type);
+ if (has_error())
+ break;
+ size_t token_end = cur_;
+
+ base::StringPiece token_value(&input_.data()[token_begin],
+ token_end - token_begin);
+
+ if (type == Token::UNCLASSIFIED_OPERATOR) {
+ type = GetSpecificOperatorType(token_value);
+ } else if (type == Token::IDENTIFIER) {
+ if (token_value == "if")
+ type = Token::IF;
+ else if (token_value == "else")
+ type = Token::ELSE;
+ else if (token_value == "true")
+ type = Token::TRUE_TOKEN;
+ else if (token_value == "false")
+ type = Token::FALSE_TOKEN;
+ } else if (type == Token::UNCLASSIFIED_COMMENT) {
+ if (AtStartOfLine(token_begin) &&
+ // If it's a standalone comment, but is a continuation of a comment on
+ // a previous line, then instead make it a continued suffix comment.
+ (tokens_.empty() || tokens_.back().type() != Token::SUFFIX_COMMENT ||
+ tokens_.back().location().line_number() + 1 !=
+ location.line_number() ||
+ tokens_.back().location().column_number() !=
+ location.column_number())) {
+ type = Token::LINE_COMMENT;
+ if (!at_end()) // Could be EOF.
+ Advance(); // The current \n.
+ // If this comment is separated from the next syntax element, then we
+ // want to tag it as a block comment. This will become a standalone
+ // statement at the parser level to keep this comment separate, rather
+ // than attached to the subsequent statement.
+ while (!at_end() && IsCurrentWhitespace()) {
+ if (IsCurrentNewline()) {
+ type = Token::BLOCK_COMMENT;
+ break;
+ }
+ Advance();
+ }
+ } else {
+ type = Token::SUFFIX_COMMENT;
+ }
+ }
+
+ tokens_.push_back(Token(location, type, token_value));
+ }
+ if (err_->has_error())
+ tokens_.clear();
+ return tokens_;
+}
+
+// static
+size_t Tokenizer::ByteOffsetOfNthLine(const base::StringPiece& buf, int n) {
+ DCHECK_GT(n, 0);
+
+ if (n == 1)
+ return 0;
+
+ int cur_line = 1;
+ size_t cur_byte = 0;
+ while (cur_byte < buf.size()) {
+ if (IsNewline(buf, cur_byte)) {
+ cur_line++;
+ if (cur_line == n)
+ return cur_byte + 1;
+ }
+ cur_byte++;
+ }
+ return static_cast<size_t>(-1);
+}
+
+// static
+bool Tokenizer::IsNewline(const base::StringPiece& buffer, size_t offset) {
+ DCHECK(offset < buffer.size());
+ // We may need more logic here to handle different line ending styles.
+ return buffer[offset] == '\n';
+}
+
+// static
+bool Tokenizer::IsIdentifierFirstChar(char c) {
+ return base::IsAsciiAlpha(c) || c == '_';
+}
+
+// static
+bool Tokenizer::IsIdentifierContinuingChar(char c) {
+ // Also allow digits after the first char.
+ return IsIdentifierFirstChar(c) || base::IsAsciiDigit(c);
+}
+
+void Tokenizer::AdvanceToNextToken() {
+ while (!at_end() && IsCurrentWhitespace())
+ Advance();
+}
+
+Token::Type Tokenizer::ClassifyCurrent() const {
+ DCHECK(!at_end());
+ char next_char = cur_char();
+ if (base::IsAsciiDigit(next_char))
+ return Token::INTEGER;
+ if (next_char == '"')
+ return Token::STRING;
+
+ // Note: '-' handled specially below.
+ if (next_char != '-' && CouldBeOperator(next_char))
+ return Token::UNCLASSIFIED_OPERATOR;
+
+ if (IsIdentifierFirstChar(next_char))
+ return Token::IDENTIFIER;
+
+ if (next_char == '[')
+ return Token::LEFT_BRACKET;
+ if (next_char == ']')
+ return Token::RIGHT_BRACKET;
+ if (next_char == '(')
+ return Token::LEFT_PAREN;
+ if (next_char == ')')
+ return Token::RIGHT_PAREN;
+ if (next_char == '{')
+ return Token::LEFT_BRACE;
+ if (next_char == '}')
+ return Token::RIGHT_BRACE;
+
+ if (next_char == '.')
+ return Token::DOT;
+ if (next_char == ',')
+ return Token::COMMA;
+
+ if (next_char == '#')
+ return Token::UNCLASSIFIED_COMMENT;
+
+ // For the case of '-' differentiate between a negative number and anything
+ // else.
+ if (next_char == '-') {
+ if (!CanIncrement())
+ return Token::UNCLASSIFIED_OPERATOR; // Just the minus before end of
+ // file.
+ char following_char = input_[cur_ + 1];
+ if (base::IsAsciiDigit(following_char))
+ return Token::INTEGER;
+ return Token::UNCLASSIFIED_OPERATOR;
+ }
+
+ return Token::INVALID;
+}
+
+void Tokenizer::AdvanceToEndOfToken(const Location& location,
+ Token::Type type) {
+ switch (type) {
+ case Token::INTEGER:
+ do {
+ Advance();
+ } while (!at_end() && base::IsAsciiDigit(cur_char()));
+ if (!at_end()) {
+ // Require the char after a number to be some kind of space, scope,
+ // or operator.
+ char c = cur_char();
+ if (!IsCurrentWhitespace() && !CouldBeOperator(c) &&
+ !IsScoperChar(c) && c != ',') {
+ *err_ = Err(GetCurrentLocation(),
+ "This is not a valid number.",
+ "Learn to count.");
+ // Highlight the number.
+ err_->AppendRange(LocationRange(location, GetCurrentLocation()));
+ }
+ }
+ break;
+
+ case Token::STRING: {
+ char initial = cur_char();
+ Advance(); // Advance past initial "
+ for (;;) {
+ if (at_end()) {
+ *err_ = Err(LocationRange(location, GetCurrentLocation()),
+ "Unterminated string literal.",
+ "Don't leave me hanging like this!");
+ break;
+ }
+ if (IsCurrentStringTerminator(initial)) {
+ Advance(); // Skip past last "
+ break;
+ } else if (IsCurrentNewline()) {
+ *err_ = Err(LocationRange(location, GetCurrentLocation()),
+ "Newline in string constant.");
+ }
+ Advance();
+ }
+ break;
+ }
+
+ case Token::UNCLASSIFIED_OPERATOR:
+ // Some operators are two characters, some are one.
+ if (CouldBeTwoCharOperatorBegin(cur_char())) {
+ if (CanIncrement() && CouldBeTwoCharOperatorEnd(input_[cur_ + 1]))
+ Advance();
+ }
+ Advance();
+ break;
+
+ case Token::IDENTIFIER:
+ while (!at_end() && IsIdentifierContinuingChar(cur_char()))
+ Advance();
+ break;
+
+ case Token::LEFT_BRACKET:
+ case Token::RIGHT_BRACKET:
+ case Token::LEFT_BRACE:
+ case Token::RIGHT_BRACE:
+ case Token::LEFT_PAREN:
+ case Token::RIGHT_PAREN:
+ case Token::DOT:
+ case Token::COMMA:
+ Advance(); // All are one char.
+ break;
+
+ case Token::UNCLASSIFIED_COMMENT:
+ // Eat to EOL.
+ while (!at_end() && !IsCurrentNewline())
+ Advance();
+ break;
+
+ case Token::INVALID:
+ default:
+ *err_ = Err(location, "Everything is all messed up",
+ "Please insert system disk in drive A: and press any key.");
+ NOTREACHED();
+ return;
+ }
+}
+
+bool Tokenizer::AtStartOfLine(size_t location) const {
+ while (location > 0) {
+ --location;
+ char c = input_[location];
+ if (c == '\n')
+ return true;
+ if (c != ' ')
+ return false;
+ }
+ return true;
+}
+
+bool Tokenizer::IsCurrentWhitespace() const {
+ DCHECK(!at_end());
+ char c = input_[cur_];
+ // Note that tab (0x09), vertical tab (0x0B), and formfeed (0x0C) are illegal.
+ return c == 0x0A || c == 0x0D || c == 0x20;
+}
+
+bool Tokenizer::IsCurrentStringTerminator(char quote_char) const {
+ DCHECK(!at_end());
+ if (cur_char() != quote_char)
+ return false;
+
+ // Check for escaping. \" is not a string terminator, but \\" is. Count
+ // the number of preceeding backslashes.
+ int num_backslashes = 0;
+ for (int i = static_cast<int>(cur_) - 1; i >= 0 && input_[i] == '\\'; i--)
+ num_backslashes++;
+
+ // Even backslashes mean that they were escaping each other and don't count
+ // as escaping this quote.
+ return (num_backslashes % 2) == 0;
+}
+
+bool Tokenizer::IsCurrentNewline() const {
+ return IsNewline(input_, cur_);
+}
+
+void Tokenizer::Advance() {
+ DCHECK(cur_ < input_.size());
+ if (IsCurrentNewline()) {
+ line_number_++;
+ column_number_ = 1;
+ } else {
+ column_number_++;
+ }
+ cur_++;
+}
+
+Location Tokenizer::GetCurrentLocation() const {
+ return Location(
+ input_file_, line_number_, column_number_, static_cast<int>(cur_));
+}
+
+Err Tokenizer::GetErrorForInvalidToken(const Location& location) const {
+ std::string help;
+ if (cur_char() == ';') {
+ // Semicolon.
+ help = "Semicolons are not needed, delete this one.";
+ } else if (cur_char() == '\t') {
+ // Tab.
+ help = "You got a tab character in here. Tabs are evil. "
+ "Convert to spaces.";
+ } else if (cur_char() == '/' && cur_ + 1 < input_.size() &&
+ (input_[cur_ + 1] == '/' || input_[cur_ + 1] == '*')) {
+ // Different types of comments.
+ help = "Comments should start with # instead";
+ } else if (cur_char() == '\'') {
+ help = "Strings are delimited by \" characters, not apostrophes.";
+ } else {
+ help = "I have no idea what this is.";
+ }
+
+ return Err(location, "Invalid token.", help);
+}
diff --git a/chromium/tools/gn/tokenizer.h b/chromium/tools/gn/tokenizer.h
new file mode 100644
index 00000000000..9b6ef33c417
--- /dev/null
+++ b/chromium/tools/gn/tokenizer.h
@@ -0,0 +1,90 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_TOKENIZER_H_
+#define TOOLS_GN_TOKENIZER_H_
+
+#include <stddef.h>
+
+#include <vector>
+
+#include "base/macros.h"
+#include "base/strings/string_piece.h"
+#include "tools/gn/err.h"
+#include "tools/gn/token.h"
+
+class InputFile;
+
+class Tokenizer {
+ public:
+ static std::vector<Token> Tokenize(const InputFile* input_file, Err* err);
+
+ // Counts lines in the given buffer (the first line is "1") and returns
+ // the byte offset of the beginning of that line, or (size_t)-1 if there
+ // aren't that many lines in the file. Note that this will return the byte
+ // one past the end of the input if the last character is a newline.
+ //
+ // This is a helper function for error output so that the tokenizer's
+ // notion of lines can be used elsewhere.
+ static size_t ByteOffsetOfNthLine(const base::StringPiece& buf, int n);
+
+ // Returns true if the given offset of the string piece counts as a newline.
+ // The offset must be in the buffer.
+ static bool IsNewline(const base::StringPiece& buffer, size_t offset);
+
+ static bool IsIdentifierFirstChar(char c);
+
+ static bool IsIdentifierContinuingChar(char c);
+
+ private:
+ // InputFile must outlive the tokenizer and all generated tokens.
+ Tokenizer(const InputFile* input_file, Err* err);
+ ~Tokenizer();
+
+ std::vector<Token> Run();
+
+ void AdvanceToNextToken();
+ Token::Type ClassifyCurrent() const;
+ void AdvanceToEndOfToken(const Location& location, Token::Type type);
+
+ // Whether from this location back to the beginning of the line is only
+ // whitespace. |location| should be the first character of the token to be
+ // checked.
+ bool AtStartOfLine(size_t location) const;
+
+ bool IsCurrentWhitespace() const;
+ bool IsCurrentNewline() const;
+ bool IsCurrentStringTerminator(char quote_char) const;
+
+ bool CanIncrement() const { return cur_ < input_.size(); }
+
+ // Increments the current location by one.
+ void Advance();
+
+ // Returns the current character in the file as a location.
+ Location GetCurrentLocation() const;
+
+ Err GetErrorForInvalidToken(const Location& location) const;
+
+ bool done() const { return at_end() || has_error(); }
+
+ bool at_end() const { return cur_ == input_.size(); }
+ char cur_char() const { return input_[cur_]; }
+
+ bool has_error() const { return err_->has_error(); }
+
+ std::vector<Token> tokens_;
+
+ const InputFile* input_file_;
+ const base::StringPiece input_;
+ Err* err_;
+ size_t cur_; // Byte offset into input buffer.
+
+ int line_number_;
+ int column_number_;
+
+ DISALLOW_COPY_AND_ASSIGN(Tokenizer);
+};
+
+#endif // TOOLS_GN_TOKENIZER_H_
diff --git a/chromium/tools/gn/tokenizer_unittest.cc b/chromium/tools/gn/tokenizer_unittest.cc
new file mode 100644
index 00000000000..1f34c334662
--- /dev/null
+++ b/chromium/tools/gn/tokenizer_unittest.cc
@@ -0,0 +1,228 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/input_file.h"
+#include "tools/gn/token.h"
+#include "tools/gn/tokenizer.h"
+
+namespace {
+
+struct TokenExpectation {
+ Token::Type type;
+ const char* value;
+};
+
+template<size_t len>
+bool CheckTokenizer(const char* input, const TokenExpectation (&expect)[len]) {
+ InputFile input_file(SourceFile("/test"));
+ input_file.SetContents(input);
+
+ Err err;
+ std::vector<Token> results = Tokenizer::Tokenize(&input_file, &err);
+
+ if (results.size() != len)
+ return false;
+ for (size_t i = 0; i < len; i++) {
+ if (expect[i].type != results[i].type())
+ return false;
+ if (expect[i].value != results[i].value())
+ return false;
+ }
+ return true;
+}
+
+} // namespace
+
+TEST(Tokenizer, Empty) {
+ InputFile empty_string_input(SourceFile("/test"));
+ empty_string_input.SetContents("");
+
+ Err err;
+ std::vector<Token> results = Tokenizer::Tokenize(&empty_string_input, &err);
+ EXPECT_TRUE(results.empty());
+
+ InputFile whitespace_input(SourceFile("/test"));
+ whitespace_input.SetContents(" \r \n \r\n");
+
+ results = Tokenizer::Tokenize(&whitespace_input, &err);
+ EXPECT_TRUE(results.empty());
+}
+
+TEST(Tokenizer, Identifier) {
+ TokenExpectation one_ident[] = {
+ { Token::IDENTIFIER, "foo" }
+ };
+ EXPECT_TRUE(CheckTokenizer(" foo ", one_ident));
+}
+
+TEST(Tokenizer, Integer) {
+ TokenExpectation integers[] = {
+ { Token::INTEGER, "123" },
+ { Token::INTEGER, "-123" }
+ };
+ EXPECT_TRUE(CheckTokenizer(" 123 -123 ", integers));
+}
+
+TEST(Tokenizer, IntegerNoSpace) {
+ TokenExpectation integers[] = {
+ { Token::INTEGER, "123" },
+ { Token::INTEGER, "-123" }
+ };
+ EXPECT_TRUE(CheckTokenizer(" 123-123 ", integers));
+}
+
+TEST(Tokenizer, String) {
+ TokenExpectation strings[] = {
+ { Token::STRING, "\"foo\"" },
+ { Token::STRING, "\"bar\\\"baz\"" },
+ { Token::STRING, "\"asdf\\\\\"" }
+ };
+ EXPECT_TRUE(CheckTokenizer(" \"foo\" \"bar\\\"baz\" \"asdf\\\\\" ",
+ strings));
+}
+
+TEST(Tokenizer, Operator) {
+ TokenExpectation operators[] = {
+ { Token::MINUS, "-" },
+ { Token::PLUS, "+" },
+ { Token::EQUAL, "=" },
+ { Token::PLUS_EQUALS, "+=" },
+ { Token::MINUS_EQUALS, "-=" },
+ { Token::NOT_EQUAL, "!=" },
+ { Token::EQUAL_EQUAL, "==" },
+ { Token::LESS_THAN, "<" },
+ { Token::GREATER_THAN, ">" },
+ { Token::LESS_EQUAL, "<=" },
+ { Token::GREATER_EQUAL, ">=" },
+ { Token::BANG, "!" },
+ { Token::BOOLEAN_OR, "||" },
+ { Token::BOOLEAN_AND, "&&" },
+ { Token::DOT, "." },
+ { Token::COMMA, "," },
+ };
+ EXPECT_TRUE(CheckTokenizer("- + = += -= != == < > <= >= ! || && . ,",
+ operators));
+}
+
+TEST(Tokenizer, Scoper) {
+ TokenExpectation scopers[] = {
+ { Token::LEFT_BRACE, "{" },
+ { Token::LEFT_BRACKET, "[" },
+ { Token::RIGHT_BRACKET, "]" },
+ { Token::RIGHT_BRACE, "}" },
+ { Token::LEFT_PAREN, "(" },
+ { Token::RIGHT_PAREN, ")" },
+ };
+ EXPECT_TRUE(CheckTokenizer("{[ ]} ()", scopers));
+}
+
+TEST(Tokenizer, FunctionCall) {
+ TokenExpectation fn[] = {
+ { Token::IDENTIFIER, "fun" },
+ { Token::LEFT_PAREN, "(" },
+ { Token::STRING, "\"foo\"" },
+ { Token::RIGHT_PAREN, ")" },
+ { Token::LEFT_BRACE, "{" },
+ { Token::IDENTIFIER, "foo" },
+ { Token::EQUAL, "=" },
+ { Token::INTEGER, "12" },
+ { Token::RIGHT_BRACE, "}" },
+ };
+ EXPECT_TRUE(CheckTokenizer("fun(\"foo\") {\nfoo = 12}", fn));
+}
+
+TEST(Tokenizer, Locations) {
+ InputFile input(SourceFile("/test"));
+ input.SetContents("1 2 \"three\"\n 4");
+ Err err;
+ std::vector<Token> results = Tokenizer::Tokenize(&input, &err);
+
+ ASSERT_EQ(4u, results.size());
+ ASSERT_TRUE(results[0].location() == Location(&input, 1, 1, 1));
+ ASSERT_TRUE(results[1].location() == Location(&input, 1, 3, 3));
+ ASSERT_TRUE(results[2].location() == Location(&input, 1, 5, 5));
+ ASSERT_TRUE(results[3].location() == Location(&input, 2, 3, 8));
+}
+
+TEST(Tokenizer, ByteOffsetOfNthLine) {
+ EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine("foo", 1));
+
+ // Windows and Posix have different line endings, so check the byte at the
+ // location rather than the offset.
+ char input1[] = "aaa\nxaa\n\nya";
+ EXPECT_EQ('x', input1[Tokenizer::ByteOffsetOfNthLine(input1, 2)]);
+ EXPECT_EQ('y', input1[Tokenizer::ByteOffsetOfNthLine(input1, 4)]);
+
+ char input2[3];
+ input2[0] = 'a';
+ input2[1] = '\n'; // Manually set to avoid Windows double-byte endings.
+ input2[2] = 0;
+ EXPECT_EQ(0u, Tokenizer::ByteOffsetOfNthLine(input2, 1));
+ EXPECT_EQ(2u, Tokenizer::ByteOffsetOfNthLine(input2, 2));
+}
+
+TEST(Tokenizer, Comments) {
+ TokenExpectation fn[] = {
+ { Token::LINE_COMMENT, "# Stuff" },
+ { Token::IDENTIFIER, "fun" },
+ { Token::LEFT_PAREN, "(" },
+ { Token::STRING, "\"foo\"" },
+ { Token::RIGHT_PAREN, ")" },
+ { Token::LEFT_BRACE, "{" },
+ { Token::SUFFIX_COMMENT, "# Things" },
+ { Token::LINE_COMMENT, "#Wee" },
+ { Token::IDENTIFIER, "foo" },
+ { Token::EQUAL, "=" },
+ { Token::INTEGER, "12" },
+ { Token::SUFFIX_COMMENT, "#Zip" },
+ { Token::RIGHT_BRACE, "}" },
+ };
+ EXPECT_TRUE(CheckTokenizer(
+ "# Stuff\n"
+ "fun(\"foo\") { # Things\n"
+ "#Wee\n"
+ "foo = 12 #Zip\n"
+ "}",
+ fn));
+}
+
+TEST(Tokenizer, CommentsContinued) {
+ // In the first test, the comments aren't horizontally aligned, so they're
+ // considered separate. In the second test, they are, so "B" is a
+ // continuation of "A" (another SUFFIX comment).
+ TokenExpectation fn1[] = {
+ { Token::IDENTIFIER, "fun" },
+ { Token::LEFT_PAREN, "(" },
+ { Token::STRING, "\"foo\"" },
+ { Token::RIGHT_PAREN, ")" },
+ { Token::LEFT_BRACE, "{" },
+ { Token::SUFFIX_COMMENT, "# A" },
+ { Token::LINE_COMMENT, "# B" },
+ { Token::RIGHT_BRACE, "}" },
+ };
+ EXPECT_TRUE(CheckTokenizer(
+ "fun(\"foo\") { # A\n"
+ " # B\n"
+ "}",
+ fn1));
+
+ TokenExpectation fn2[] = {
+ { Token::IDENTIFIER, "fun" },
+ { Token::LEFT_PAREN, "(" },
+ { Token::STRING, "\"foo\"" },
+ { Token::RIGHT_PAREN, ")" },
+ { Token::LEFT_BRACE, "{" },
+ { Token::SUFFIX_COMMENT, "# A" },
+ { Token::SUFFIX_COMMENT, "# B" },
+ { Token::RIGHT_BRACE, "}" },
+ };
+ EXPECT_TRUE(CheckTokenizer(
+ "fun(\"foo\") { # A\n"
+ " # B\n" // Note that these are aligned, the \"s move A out.
+ "}",
+ fn2));
+}
diff --git a/chromium/tools/gn/tool.cc b/chromium/tools/gn/tool.cc
new file mode 100644
index 00000000000..139c03294c3
--- /dev/null
+++ b/chromium/tools/gn/tool.cc
@@ -0,0 +1,29 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/tool.h"
+
+Tool::Tool()
+ : depsformat_(DEPS_GCC),
+ precompiled_header_type_(PCH_NONE),
+ restat_(false),
+ complete_(false) {
+}
+
+Tool::~Tool() {
+}
+
+void Tool::SetComplete() {
+ DCHECK(!complete_);
+ complete_ = true;
+
+ command_.FillRequiredTypes(&substitution_bits_);
+ depfile_.FillRequiredTypes(&substitution_bits_);
+ description_.FillRequiredTypes(&substitution_bits_);
+ outputs_.FillRequiredTypes(&substitution_bits_);
+ link_output_.FillRequiredTypes(&substitution_bits_);
+ depend_output_.FillRequiredTypes(&substitution_bits_);
+ rspfile_.FillRequiredTypes(&substitution_bits_);
+ rspfile_content_.FillRequiredTypes(&substitution_bits_);
+}
diff --git a/chromium/tools/gn/tool.h b/chromium/tools/gn/tool.h
new file mode 100644
index 00000000000..9360ce66ef6
--- /dev/null
+++ b/chromium/tools/gn/tool.h
@@ -0,0 +1,211 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_TOOL_H_
+#define TOOLS_GN_TOOL_H_
+
+#include <string>
+
+#include "base/logging.h"
+#include "base/macros.h"
+#include "tools/gn/substitution_list.h"
+#include "tools/gn/substitution_pattern.h"
+
+class Tool {
+ public:
+ enum DepsFormat {
+ DEPS_GCC = 0,
+ DEPS_MSVC = 1
+ };
+
+ enum PrecompiledHeaderType {
+ PCH_NONE = 0,
+ PCH_GCC = 1,
+ PCH_MSVC = 2
+ };
+
+ Tool();
+ ~Tool();
+
+ // Getters/setters ----------------------------------------------------------
+ //
+ // After the tool has had its attributes set, the caller must call
+ // SetComplete(), at which point no other changes can be made.
+
+ // Command to run.
+ const SubstitutionPattern& command() const {
+ return command_;
+ }
+ void set_command(const SubstitutionPattern& cmd) {
+ DCHECK(!complete_);
+ command_ = cmd;
+ }
+
+ // Should include a leading "." if nonempty.
+ const std::string& default_output_extension() const {
+ return default_output_extension_;
+ }
+ void set_default_output_extension(const std::string& ext) {
+ DCHECK(!complete_);
+ DCHECK(ext.empty() || ext[0] == '.');
+ default_output_extension_ = ext;
+ }
+
+ // Dependency file (if supported).
+ const SubstitutionPattern& depfile() const {
+ return depfile_;
+ }
+ void set_depfile(const SubstitutionPattern& df) {
+ DCHECK(!complete_);
+ depfile_ = df;
+ }
+
+ DepsFormat depsformat() const {
+ return depsformat_;
+ }
+ void set_depsformat(DepsFormat f) {
+ DCHECK(!complete_);
+ depsformat_ = f;
+ }
+
+ PrecompiledHeaderType precompiled_header_type() const {
+ return precompiled_header_type_;
+ }
+ void set_precompiled_header_type(PrecompiledHeaderType pch_type) {
+ precompiled_header_type_ = pch_type;
+ }
+
+ const SubstitutionPattern& description() const {
+ return description_;
+ }
+ void set_description(const SubstitutionPattern& desc) {
+ DCHECK(!complete_);
+ description_ = desc;
+ }
+
+ const std::string& lib_switch() const {
+ return lib_switch_;
+ }
+ void set_lib_switch(const std::string& s) {
+ DCHECK(!complete_);
+ lib_switch_ = s;
+ }
+
+ const std::string& lib_dir_switch() const {
+ return lib_dir_switch_;
+ }
+ void set_lib_dir_switch(const std::string& s) {
+ DCHECK(!complete_);
+ lib_dir_switch_ = s;
+ }
+
+ const SubstitutionList& outputs() const {
+ return outputs_;
+ }
+ void set_outputs(const SubstitutionList& out) {
+ DCHECK(!complete_);
+ outputs_ = out;
+ }
+
+ // Should match files in the outputs() if nonempty.
+ const SubstitutionPattern& link_output() const {
+ return link_output_;
+ }
+ void set_link_output(const SubstitutionPattern& link_out) {
+ DCHECK(!complete_);
+ link_output_ = link_out;
+ }
+
+ const SubstitutionPattern& depend_output() const {
+ return depend_output_;
+ }
+ void set_depend_output(const SubstitutionPattern& dep_out) {
+ DCHECK(!complete_);
+ depend_output_ = dep_out;
+ }
+
+ const SubstitutionPattern& runtime_link_output() const {
+ return runtime_link_output_;
+ }
+ void set_runtime_link_output(const SubstitutionPattern& run_out) {
+ DCHECK(!complete_);
+ runtime_link_output_ = run_out;
+ }
+
+ const std::string& output_prefix() const {
+ return output_prefix_;
+ }
+ void set_output_prefix(const std::string& s) {
+ DCHECK(!complete_);
+ output_prefix_ = s;
+ }
+
+ bool restat() const {
+ return restat_;
+ }
+ void set_restat(bool r) {
+ DCHECK(!complete_);
+ restat_ = r;
+ }
+
+ const SubstitutionPattern& rspfile() const {
+ return rspfile_;
+ }
+ void set_rspfile(const SubstitutionPattern& rsp) {
+ DCHECK(!complete_);
+ rspfile_ = rsp;
+ }
+
+ const SubstitutionPattern& rspfile_content() const {
+ return rspfile_content_;
+ }
+ void set_rspfile_content(const SubstitutionPattern& content) {
+ DCHECK(!complete_);
+ rspfile_content_ = content;
+ }
+
+ // Other functions ----------------------------------------------------------
+
+ // Called when the toolchain is saving this tool, after everything is filled
+ // in.
+ void SetComplete();
+
+ // Returns true if this tool has separate outputs for dependency tracking
+ // and linking.
+ bool has_separate_solink_files() const {
+ return !link_output_.empty() || !depend_output_.empty();
+ }
+
+ // Substitutions required by this tool.
+ const SubstitutionBits& substitution_bits() const {
+ DCHECK(complete_);
+ return substitution_bits_;
+ }
+
+ private:
+ SubstitutionPattern command_;
+ std::string default_output_extension_;
+ SubstitutionPattern depfile_;
+ DepsFormat depsformat_;
+ PrecompiledHeaderType precompiled_header_type_;
+ SubstitutionPattern description_;
+ std::string lib_switch_;
+ std::string lib_dir_switch_;
+ SubstitutionList outputs_;
+ SubstitutionPattern link_output_;
+ SubstitutionPattern depend_output_;
+ SubstitutionPattern runtime_link_output_;
+ std::string output_prefix_;
+ bool restat_;
+ SubstitutionPattern rspfile_;
+ SubstitutionPattern rspfile_content_;
+
+ bool complete_;
+
+ SubstitutionBits substitution_bits_;
+
+ DISALLOW_COPY_AND_ASSIGN(Tool);
+};
+
+#endif // TOOLS_GN_TOOL_H_
diff --git a/chromium/tools/gn/toolchain.cc b/chromium/tools/gn/toolchain.cc
new file mode 100644
index 00000000000..14d2bfd1595
--- /dev/null
+++ b/chromium/tools/gn/toolchain.cc
@@ -0,0 +1,174 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/toolchain.h"
+
+#include <stddef.h>
+#include <string.h>
+#include <utility>
+
+#include "base/logging.h"
+#include "tools/gn/target.h"
+#include "tools/gn/value.h"
+
+const char* Toolchain::kToolCc = "cc";
+const char* Toolchain::kToolCxx = "cxx";
+const char* Toolchain::kToolObjC = "objc";
+const char* Toolchain::kToolObjCxx = "objcxx";
+const char* Toolchain::kToolRc = "rc";
+const char* Toolchain::kToolAsm = "asm";
+const char* Toolchain::kToolAlink = "alink";
+const char* Toolchain::kToolSolink = "solink";
+const char* Toolchain::kToolSolinkModule = "solink_module";
+const char* Toolchain::kToolLink = "link";
+const char* Toolchain::kToolStamp = "stamp";
+const char* Toolchain::kToolCopy = "copy";
+const char* Toolchain::kToolCopyBundleData = "copy_bundle_data";
+const char* Toolchain::kToolCompileXCAssets = "compile_xcassets";
+
+Toolchain::Toolchain(const Settings* settings, const Label& label)
+ : Item(settings, label),
+ concurrent_links_(0),
+ setup_complete_(false) {
+}
+
+Toolchain::~Toolchain() {
+}
+
+Toolchain* Toolchain::AsToolchain() {
+ return this;
+}
+
+const Toolchain* Toolchain::AsToolchain() const {
+ return this;
+}
+
+// static
+Toolchain::ToolType Toolchain::ToolNameToType(const base::StringPiece& str) {
+ if (str == kToolCc) return TYPE_CC;
+ if (str == kToolCxx) return TYPE_CXX;
+ if (str == kToolObjC) return TYPE_OBJC;
+ if (str == kToolObjCxx) return TYPE_OBJCXX;
+ if (str == kToolRc) return TYPE_RC;
+ if (str == kToolAsm) return TYPE_ASM;
+ if (str == kToolAlink) return TYPE_ALINK;
+ if (str == kToolSolink) return TYPE_SOLINK;
+ if (str == kToolSolinkModule) return TYPE_SOLINK_MODULE;
+ if (str == kToolLink) return TYPE_LINK;
+ if (str == kToolStamp) return TYPE_STAMP;
+ if (str == kToolCopy) return TYPE_COPY;
+ if (str == kToolCopyBundleData) return TYPE_COPY_BUNDLE_DATA;
+ if (str == kToolCompileXCAssets) return TYPE_COMPILE_XCASSETS;
+ return TYPE_NONE;
+}
+
+// static
+std::string Toolchain::ToolTypeToName(ToolType type) {
+ switch (type) {
+ case TYPE_CC: return kToolCc;
+ case TYPE_CXX: return kToolCxx;
+ case TYPE_OBJC: return kToolObjC;
+ case TYPE_OBJCXX: return kToolObjCxx;
+ case TYPE_RC: return kToolRc;
+ case TYPE_ASM: return kToolAsm;
+ case TYPE_ALINK: return kToolAlink;
+ case TYPE_SOLINK: return kToolSolink;
+ case TYPE_SOLINK_MODULE: return kToolSolinkModule;
+ case TYPE_LINK: return kToolLink;
+ case TYPE_STAMP: return kToolStamp;
+ case TYPE_COPY: return kToolCopy;
+ case TYPE_COPY_BUNDLE_DATA: return kToolCopyBundleData;
+ case TYPE_COMPILE_XCASSETS: return kToolCompileXCAssets;
+ default:
+ NOTREACHED();
+ return std::string();
+ }
+}
+
+const Tool* Toolchain::GetTool(ToolType type) const {
+ DCHECK(type != TYPE_NONE);
+ return tools_[static_cast<size_t>(type)].get();
+}
+
+void Toolchain::SetTool(ToolType type, std::unique_ptr<Tool> t) {
+ DCHECK(type != TYPE_NONE);
+ DCHECK(!tools_[type].get());
+ t->SetComplete();
+ tools_[type] = std::move(t);
+}
+
+void Toolchain::ToolchainSetupComplete() {
+ // Collect required bits from all tools.
+ for (const auto& tool : tools_) {
+ if (tool)
+ substitution_bits_.MergeFrom(tool->substitution_bits());
+ }
+
+ setup_complete_ = true;
+}
+
+// static
+Toolchain::ToolType Toolchain::GetToolTypeForSourceType(SourceFileType type) {
+ switch (type) {
+ case SOURCE_C:
+ return TYPE_CC;
+ case SOURCE_CPP:
+ return TYPE_CXX;
+ case SOURCE_M:
+ return TYPE_OBJC;
+ case SOURCE_MM:
+ return TYPE_OBJCXX;
+ case SOURCE_ASM:
+ case SOURCE_S:
+ return TYPE_ASM;
+ case SOURCE_RC:
+ return TYPE_RC;
+ case SOURCE_UNKNOWN:
+ case SOURCE_H:
+ case SOURCE_O:
+ case SOURCE_DEF:
+ return TYPE_NONE;
+ default:
+ NOTREACHED();
+ return TYPE_NONE;
+ }
+}
+
+const Tool* Toolchain::GetToolForSourceType(SourceFileType type) {
+ return tools_[GetToolTypeForSourceType(type)].get();
+}
+
+// static
+Toolchain::ToolType Toolchain::GetToolTypeForTargetFinalOutput(
+ const Target* target) {
+ // The contents of this list might be suprising (i.e. stamp tool for copy
+ // rules). See the header for why.
+ switch (target->output_type()) {
+ case Target::GROUP:
+ return TYPE_STAMP;
+ case Target::EXECUTABLE:
+ return Toolchain::TYPE_LINK;
+ case Target::SHARED_LIBRARY:
+ return Toolchain::TYPE_SOLINK;
+ case Target::LOADABLE_MODULE:
+ return Toolchain::TYPE_SOLINK_MODULE;
+ case Target::STATIC_LIBRARY:
+ return Toolchain::TYPE_ALINK;
+ case Target::SOURCE_SET:
+ return TYPE_STAMP;
+ case Target::ACTION:
+ case Target::ACTION_FOREACH:
+ case Target::BUNDLE_DATA:
+ case Target::CREATE_BUNDLE:
+ case Target::COPY_FILES:
+ return TYPE_STAMP;
+ default:
+ NOTREACHED();
+ return Toolchain::TYPE_NONE;
+ }
+}
+
+const Tool* Toolchain::GetToolForTargetFinalOutput(const Target* target) const {
+ return tools_[GetToolTypeForTargetFinalOutput(target)].get();
+}
diff --git a/chromium/tools/gn/toolchain.h b/chromium/tools/gn/toolchain.h
new file mode 100644
index 00000000000..0c207262622
--- /dev/null
+++ b/chromium/tools/gn/toolchain.h
@@ -0,0 +1,136 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_TOOLCHAIN_H_
+#define TOOLS_GN_TOOLCHAIN_H_
+
+#include <memory>
+
+#include "base/logging.h"
+#include "base/strings/string_piece.h"
+#include "tools/gn/item.h"
+#include "tools/gn/label_ptr.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/source_file_type.h"
+#include "tools/gn/substitution_type.h"
+#include "tools/gn/tool.h"
+#include "tools/gn/value.h"
+
+// Holds information on a specific toolchain. This data is filled in when we
+// encounter a toolchain definition.
+//
+// This class is an Item so it can participate in dependency management. In
+// particular, when a target uses a toolchain, it should have a dependency on
+// that toolchain's object so that we can be sure we loaded the toolchain
+// before generating the build for that target.
+//
+// Note on threadsafety: The label of the toolchain never changes so can
+// safely be accessed from any thread at any time (we do this when asking for
+// the toolchain name). But the values in the toolchain do, so these can't
+// be accessed until this Item is resolved.
+class Toolchain : public Item {
+ public:
+ enum ToolType {
+ TYPE_NONE = 0,
+ TYPE_CC,
+ TYPE_CXX,
+ TYPE_OBJC,
+ TYPE_OBJCXX,
+ TYPE_RC,
+ TYPE_ASM,
+ TYPE_ALINK,
+ TYPE_SOLINK,
+ TYPE_SOLINK_MODULE,
+ TYPE_LINK,
+ TYPE_STAMP,
+ TYPE_COPY,
+ TYPE_COPY_BUNDLE_DATA,
+ TYPE_COMPILE_XCASSETS,
+
+ TYPE_NUMTYPES // Must be last.
+ };
+
+ static const char* kToolCc;
+ static const char* kToolCxx;
+ static const char* kToolObjC;
+ static const char* kToolObjCxx;
+ static const char* kToolRc;
+ static const char* kToolAsm;
+ static const char* kToolAlink;
+ static const char* kToolSolink;
+ static const char* kToolSolinkModule;
+ static const char* kToolLink;
+ static const char* kToolStamp;
+ static const char* kToolCopy;
+ static const char* kToolCopyBundleData;
+ static const char* kToolCompileXCAssets;
+
+ Toolchain(const Settings* settings, const Label& label);
+ ~Toolchain() override;
+
+ // Item overrides.
+ Toolchain* AsToolchain() override;
+ const Toolchain* AsToolchain() const override;
+
+ // Returns TYPE_NONE on failure.
+ static ToolType ToolNameToType(const base::StringPiece& str);
+ static std::string ToolTypeToName(ToolType type);
+
+ // Returns null if the tool hasn't been defined.
+ const Tool* GetTool(ToolType type) const;
+
+ // Set a tool. When all tools are configured, you should call
+ // ToolchainSetupComplete().
+ void SetTool(ToolType type, std::unique_ptr<Tool> t);
+
+ // Does final setup on the toolchain once all tools are known.
+ void ToolchainSetupComplete();
+
+ // Targets that must be resolved before compiling any targets.
+ const LabelTargetVector& deps() const { return deps_; }
+ LabelTargetVector& deps() { return deps_; }
+
+ // Specifies build argument overrides that will be set on the base scope. It
+ // will be as if these arguments were passed in on the command line. This
+ // allows a toolchain to override the OS type of the default toolchain or
+ // pass in other settings.
+ Scope::KeyValueMap& args() { return args_; }
+ const Scope::KeyValueMap& args() const { return args_; }
+
+ // Returns the tool for compiling the given source file type.
+ static ToolType GetToolTypeForSourceType(SourceFileType type);
+ const Tool* GetToolForSourceType(SourceFileType type);
+
+ // Returns the tool that produces the final output for the given target type.
+ // This isn't necessarily the tool you would expect. For copy target, this
+ // will return the stamp tool instead since the final output of a copy
+ // target is to stamp the set of copies done so there is one output.
+ static ToolType GetToolTypeForTargetFinalOutput(const Target* target);
+ const Tool* GetToolForTargetFinalOutput(const Target* target) const;
+
+ const SubstitutionBits& substitution_bits() const {
+ DCHECK(setup_complete_);
+ return substitution_bits_;
+ }
+
+ void set_concurrent_links(int cl) { concurrent_links_ = cl; }
+ int concurrent_links() const { return concurrent_links_; }
+
+ private:
+ std::unique_ptr<Tool> tools_[TYPE_NUMTYPES];
+
+ // How many links to run in parallel. Only the default toolchain's version of
+ // this variable applies.
+ int concurrent_links_;
+
+ bool setup_complete_;
+
+ // Substitutions used by the tools in this toolchain.
+ SubstitutionBits substitution_bits_;
+
+ LabelTargetVector deps_;
+ Scope::KeyValueMap args_;
+};
+
+#endif // TOOLS_GN_TOOLCHAIN_H_
diff --git a/chromium/tools/gn/trace.cc b/chromium/tools/gn/trace.cc
new file mode 100644
index 00000000000..13ebf6202c5
--- /dev/null
+++ b/chromium/tools/gn/trace.cc
@@ -0,0 +1,328 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/trace.h"
+
+#include <stddef.h>
+
+#include <algorithm>
+#include <map>
+#include <sstream>
+#include <vector>
+
+#include "base/command_line.h"
+#include "base/files/file_path.h"
+#include "base/files/file_util.h"
+#include "base/json/string_escape.h"
+#include "base/logging.h"
+#include "base/macros.h"
+#include "base/strings/stringprintf.h"
+#include "base/synchronization/lock.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/label.h"
+
+namespace {
+
+class TraceLog {
+ public:
+ TraceLog() {
+ events_.reserve(16384);
+ }
+ // Trace items leaked intentionally.
+
+ void Add(TraceItem* item) {
+ base::AutoLock lock(lock_);
+ events_.push_back(item);
+ }
+
+ // Returns a copy for threadsafety.
+ std::vector<TraceItem*> events() const { return events_; }
+
+ private:
+ base::Lock lock_;
+
+ std::vector<TraceItem*> events_;
+
+ DISALLOW_COPY_AND_ASSIGN(TraceLog);
+};
+
+TraceLog* trace_log = nullptr;
+
+struct Coalesced {
+ Coalesced() : name_ptr(nullptr), total_duration(0.0), count(0) {}
+
+ const std::string* name_ptr; // Pointer to a string with the name in it.
+ double total_duration;
+ int count;
+};
+
+bool DurationGreater(const TraceItem* a, const TraceItem* b) {
+ return a->delta() > b->delta();
+}
+
+bool CoalescedDurationGreater(const Coalesced& a, const Coalesced& b) {
+ return a.total_duration > b.total_duration;
+}
+
+void SummarizeParses(std::vector<const TraceItem*>& loads,
+ std::ostream& out) {
+ out << "File parse times: (time in ms, name)\n";
+
+ std::sort(loads.begin(), loads.end(), &DurationGreater);
+ for (const auto& load : loads) {
+ out << base::StringPrintf(" %8.2f ", load->delta().InMillisecondsF());
+ out << load->name() << std::endl;
+ }
+}
+
+void SummarizeCoalesced(std::vector<const TraceItem*>& items,
+ std::ostream& out) {
+ // Group by file name.
+ std::map<std::string, Coalesced> coalesced;
+ for (const auto& item : items) {
+ Coalesced& c = coalesced[item->name()];
+ c.name_ptr = &item->name();
+ c.total_duration += item->delta().InMillisecondsF();
+ c.count++;
+ }
+
+ // Sort by duration.
+ std::vector<Coalesced> sorted;
+ for (const auto& pair : coalesced)
+ sorted.push_back(pair.second);
+ std::sort(sorted.begin(), sorted.end(), &CoalescedDurationGreater);
+
+ for (const auto& cur : sorted) {
+ out << base::StringPrintf(" %8.2f %d ", cur.total_duration, cur.count);
+ out << *cur.name_ptr << std::endl;
+ }
+}
+
+void SummarizeFileExecs(std::vector<const TraceItem*>& execs,
+ std::ostream& out) {
+ out << "File execute times: (total time in ms, # executions, name)\n";
+ SummarizeCoalesced(execs, out);
+}
+
+void SummarizeScriptExecs(std::vector<const TraceItem*>& execs,
+ std::ostream& out) {
+ out << "Script execute times: (total time in ms, # executions, name)\n";
+ SummarizeCoalesced(execs, out);
+}
+
+} // namespace
+
+TraceItem::TraceItem(Type type,
+ const std::string& name,
+ base::PlatformThreadId thread_id)
+ : type_(type),
+ name_(name),
+ thread_id_(thread_id) {
+}
+
+TraceItem::~TraceItem() {
+}
+
+ScopedTrace::ScopedTrace(TraceItem::Type t, const std::string& name)
+ : item_(nullptr), done_(false) {
+ if (trace_log) {
+ item_ = new TraceItem(t, name, base::PlatformThread::CurrentId());
+ item_->set_begin(base::TimeTicks::Now());
+ }
+}
+
+ScopedTrace::ScopedTrace(TraceItem::Type t, const Label& label)
+ : item_(nullptr), done_(false) {
+ if (trace_log) {
+ item_ = new TraceItem(t, label.GetUserVisibleName(false),
+ base::PlatformThread::CurrentId());
+ item_->set_begin(base::TimeTicks::Now());
+ }
+}
+
+ScopedTrace::~ScopedTrace() {
+ Done();
+}
+
+void ScopedTrace::SetToolchain(const Label& label) {
+ if (item_)
+ item_->set_toolchain(label.GetUserVisibleName(false));
+}
+
+void ScopedTrace::SetCommandLine(const base::CommandLine& cmdline) {
+ if (item_)
+ item_->set_cmdline(FilePathToUTF8(cmdline.GetArgumentsString()));
+}
+
+void ScopedTrace::Done() {
+ if (!done_) {
+ done_ = true;
+ if (trace_log) {
+ item_->set_end(base::TimeTicks::Now());
+ AddTrace(item_);
+ }
+ }
+}
+
+void EnableTracing() {
+ if (!trace_log)
+ trace_log = new TraceLog;
+}
+
+void AddTrace(TraceItem* item) {
+ trace_log->Add(item);
+}
+
+std::string SummarizeTraces() {
+ if (!trace_log)
+ return std::string();
+
+ std::vector<TraceItem*> events = trace_log->events();
+
+ // Classify all events.
+ std::vector<const TraceItem*> parses;
+ std::vector<const TraceItem*> file_execs;
+ std::vector<const TraceItem*> script_execs;
+ std::vector<const TraceItem*> check_headers;
+ int headers_checked = 0;
+ for (const auto& event : events) {
+ switch (event->type()) {
+ case TraceItem::TRACE_FILE_PARSE:
+ parses.push_back(event);
+ break;
+ case TraceItem::TRACE_FILE_EXECUTE:
+ file_execs.push_back(event);
+ break;
+ case TraceItem::TRACE_SCRIPT_EXECUTE:
+ script_execs.push_back(event);
+ break;
+ case TraceItem::TRACE_CHECK_HEADERS:
+ check_headers.push_back(event);
+ break;
+ case TraceItem::TRACE_CHECK_HEADER:
+ headers_checked++;
+ break;
+ case TraceItem::TRACE_SETUP:
+ case TraceItem::TRACE_FILE_LOAD:
+ case TraceItem::TRACE_FILE_WRITE:
+ case TraceItem::TRACE_DEFINE_TARGET:
+ case TraceItem::TRACE_ON_RESOLVED:
+ break; // Ignore these for the summary.
+ }
+ }
+
+ std::ostringstream out;
+ SummarizeParses(parses, out);
+ out << std::endl;
+ SummarizeFileExecs(file_execs, out);
+ out << std::endl;
+ SummarizeScriptExecs(script_execs, out);
+ out << std::endl;
+
+ // Generally there will only be one header check, but it's theoretically
+ // possible for more than one to run if more than one build is going in
+ // parallel. Just report the total of all of them.
+ if (!check_headers.empty()) {
+ double check_headers_time = 0;
+ for (const auto& cur : check_headers)
+ check_headers_time += cur->delta().InMillisecondsF();
+
+ out << "Header check time: (total time in ms, files checked)\n";
+ out << base::StringPrintf(" %8.2f %d\n",
+ check_headers_time, headers_checked);
+ }
+
+ return out.str();
+}
+
+void SaveTraces(const base::FilePath& file_name) {
+ std::ostringstream out;
+
+ out << "{\"traceEvents\":[";
+
+ std::string quote_buffer; // Allocate outside loop to prevent reallocationg.
+
+ // Write main thread metadata (assume this is being written on the main
+ // thread).
+ out << "{\"pid\":0,\"tid\":" << base::PlatformThread::CurrentId();
+ out << ",\"ts\":0,\"ph\":\"M\",";
+ out << "\"name\":\"thread_name\",\"args\":{\"name\":\"Main thread\"}},";
+
+ std::vector<TraceItem*> events = trace_log->events();
+ for (size_t i = 0; i < events.size(); i++) {
+ const TraceItem& item = *events[i];
+
+ if (i != 0)
+ out << ",";
+ out << "{\"pid\":0,\"tid\":" << item.thread_id();
+ out << ",\"ts\":" << item.begin().ToInternalValue();
+ out << ",\"ph\":\"X\""; // "X" = complete event with begin & duration.
+ out << ",\"dur\":" << item.delta().InMicroseconds();
+
+ quote_buffer.resize(0);
+ base::EscapeJSONString(item.name(), true, &quote_buffer);
+ out << ",\"name\":" << quote_buffer;
+
+ out << ",\"cat\":";
+ switch (item.type()) {
+ case TraceItem::TRACE_SETUP:
+ out << "\"setup\"";
+ break;
+ case TraceItem::TRACE_FILE_LOAD:
+ out << "\"load\"";
+ break;
+ case TraceItem::TRACE_FILE_PARSE:
+ out << "\"parse\"";
+ break;
+ case TraceItem::TRACE_FILE_EXECUTE:
+ out << "\"file_exec\"";
+ break;
+ case TraceItem::TRACE_FILE_WRITE:
+ out << "\"file_write\"";
+ break;
+ case TraceItem::TRACE_SCRIPT_EXECUTE:
+ out << "\"script_exec\"";
+ break;
+ case TraceItem::TRACE_DEFINE_TARGET:
+ out << "\"define\"";
+ break;
+ case TraceItem::TRACE_ON_RESOLVED:
+ out << "\"onresolved\"";
+ break;
+ case TraceItem::TRACE_CHECK_HEADER:
+ out << "\"hdr\"";
+ break;
+ case TraceItem::TRACE_CHECK_HEADERS:
+ out << "\"header_check\"";
+ break;
+ }
+
+ if (!item.toolchain().empty() || !item.cmdline().empty()) {
+ out << ",\"args\":{";
+ bool needs_comma = false;
+ if (!item.toolchain().empty()) {
+ quote_buffer.resize(0);
+ base::EscapeJSONString(item.toolchain(), true, &quote_buffer);
+ out << "\"toolchain\":" << quote_buffer;
+ needs_comma = true;
+ }
+ if (!item.cmdline().empty()) {
+ quote_buffer.resize(0);
+ base::EscapeJSONString(item.cmdline(), true, &quote_buffer);
+ if (needs_comma)
+ out << ",";
+ out << "\"cmdline\":" << quote_buffer;
+ needs_comma = true;
+ }
+ out << "}";
+ }
+ out << "}";
+ }
+
+ out << "]}";
+
+ std::string out_str = out.str();
+ base::WriteFile(file_name, out_str.data(),
+ static_cast<int>(out_str.size()));
+}
diff --git a/chromium/tools/gn/trace.h b/chromium/tools/gn/trace.h
new file mode 100644
index 00000000000..de8ab09d490
--- /dev/null
+++ b/chromium/tools/gn/trace.h
@@ -0,0 +1,101 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_TRACE_H_
+#define TOOLS_GN_TRACE_H_
+
+#include <string>
+
+#include "base/macros.h"
+#include "base/threading/platform_thread.h"
+#include "base/time/time.h"
+
+class Label;
+
+namespace base {
+class CommandLine;
+class FilePath;
+}
+
+class TraceItem {
+ public:
+ enum Type {
+ TRACE_SETUP,
+ TRACE_FILE_LOAD,
+ TRACE_FILE_PARSE,
+ TRACE_FILE_EXECUTE,
+ TRACE_FILE_WRITE,
+ TRACE_SCRIPT_EXECUTE,
+ TRACE_DEFINE_TARGET,
+ TRACE_ON_RESOLVED,
+ TRACE_CHECK_HEADER, // One file.
+ TRACE_CHECK_HEADERS, // All files.
+ };
+
+ TraceItem(Type type,
+ const std::string& name,
+ base::PlatformThreadId thread_id);
+ ~TraceItem();
+
+ Type type() const { return type_; }
+ const std::string& name() const { return name_; }
+ base::PlatformThreadId thread_id() const { return thread_id_; }
+
+ base::TimeTicks begin() const { return begin_; }
+ void set_begin(base::TimeTicks b) { begin_ = b; }
+ base::TimeTicks end() const { return end_; }
+ void set_end(base::TimeTicks e) { end_ = e; }
+
+ base::TimeDelta delta() const { return end_ - begin_; }
+
+ // Optional toolchain label.
+ const std::string& toolchain() const { return toolchain_; }
+ void set_toolchain(const std::string& t) { toolchain_ = t; }
+
+ // Optional command line.
+ const std::string& cmdline() const { return cmdline_; }
+ void set_cmdline(const std::string& c) { cmdline_ = c; }
+
+ private:
+ Type type_;
+ std::string name_;
+ base::PlatformThreadId thread_id_;
+
+ base::TimeTicks begin_;
+ base::TimeTicks end_;
+
+ std::string toolchain_;
+ std::string cmdline_;
+};
+
+class ScopedTrace {
+ public:
+ ScopedTrace(TraceItem::Type t, const std::string& name);
+ ScopedTrace(TraceItem::Type t, const Label& label);
+ ~ScopedTrace();
+
+ void SetToolchain(const Label& label);
+ void SetCommandLine(const base::CommandLine& cmdline);
+
+ void Done();
+
+ private:
+ TraceItem* item_;
+ bool done_;
+};
+
+// Call to turn tracing on. It's off by default.
+void EnableTracing();
+
+// Adds a trace event to the log. Takes ownership of the pointer.
+void AddTrace(TraceItem* item);
+
+// Returns a summary of the current traces, or the empty string if tracing is
+// not enabled.
+std::string SummarizeTraces();
+
+// Saves the current traces to the given filename in JSON format.
+void SaveTraces(const base::FilePath& file_name);
+
+#endif // TOOLS_GN_TRACE_H_
diff --git a/chromium/tools/gn/tutorial/hello.cc b/chromium/tools/gn/tutorial/hello.cc
new file mode 100644
index 00000000000..8b25cd9db82
--- /dev/null
+++ b/chromium/tools/gn/tutorial/hello.cc
@@ -0,0 +1,17 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/tutorial/hello.h"
+
+#include <stdio.h>
+
+void Hello(const char* who) {
+ printf("Hello, %s.\n", who);
+}
+
+#if defined(TWO_PEOPLE)
+void Hello(const char* one, const char* two) {
+ printf("Hello, %s and %s.\n", one, two);
+}
+#endif
diff --git a/chromium/tools/gn/tutorial/hello.h b/chromium/tools/gn/tutorial/hello.h
new file mode 100644
index 00000000000..253fe241048
--- /dev/null
+++ b/chromium/tools/gn/tutorial/hello.h
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_TUTORIAL_HELLO_H_
+#define TOOLS_GN_TUTORIAL_HELLO_H_
+
+void Hello(const char* who);
+
+#if defined(TWO_PEOPLE)
+void Hello(const char* one, const char* two);
+#endif
+
+#endif // TOOLS_GN_TUTORIAL_HELLO_H_
diff --git a/chromium/tools/gn/tutorial/hello_world.cc b/chromium/tools/gn/tutorial/hello_world.cc
new file mode 100644
index 00000000000..84ffc3fc387
--- /dev/null
+++ b/chromium/tools/gn/tutorial/hello_world.cc
@@ -0,0 +1,10 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+int main() {
+ printf("Hello, world.\n");
+ return 0;
+}
diff --git a/chromium/tools/gn/tutorial/say_hello.cc b/chromium/tools/gn/tutorial/say_hello.cc
new file mode 100644
index 00000000000..d8092bd4dae
--- /dev/null
+++ b/chromium/tools/gn/tutorial/say_hello.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/tutorial/hello.h"
+
+int main() {
+#if defined(TWO_PEOPLE)
+ Hello("Bill", "Joy");
+#else
+ Hello("everyone");
+#endif
+ return 0;
+}
diff --git a/chromium/tools/gn/unique_vector.h b/chromium/tools/gn/unique_vector.h
new file mode 100644
index 00000000000..e629ce2da7c
--- /dev/null
+++ b/chromium/tools/gn/unique_vector.h
@@ -0,0 +1,178 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_UNIQUE_VECTOR_H_
+#define TOOLS_GN_UNIQUE_VECTOR_H_
+
+#include <stddef.h>
+
+#include <algorithm>
+
+#include "base/containers/hash_tables.h"
+
+namespace internal {
+
+// This lass allows us to insert things by reference into a hash table which
+// avoids copies. The hash function of a UniquifyRef is that of the object it
+// points to.
+//
+// There are two ways it can store data: (1) by (vector*, index) which is used
+// to refer to the array in UniqueVector and make it work even when the
+// underlying vector is reallocated; (2) by T* which is used to do lookups
+// into the hash table of things that aren't in a vector yet.
+//
+// It also caches the hash value which allows us to query and then insert
+// without recomputing the hash.
+template<typename T>
+class UniquifyRef {
+ public:
+ UniquifyRef()
+ : value_(nullptr),
+ vect_(nullptr),
+ index_(static_cast<size_t>(-1)),
+ hash_val_(0) {
+ }
+
+ // Initialize with a pointer to a value.
+ explicit UniquifyRef(const T* v)
+ : value_(v),
+ vect_(nullptr),
+ index_(static_cast<size_t>(-1)) {
+ FillHashValue();
+ }
+
+ // Initialize with an array + index.
+ UniquifyRef(const std::vector<T>* v, size_t i)
+ : value_(nullptr),
+ vect_(v),
+ index_(i) {
+ FillHashValue();
+ }
+
+ // Initialize with an array + index and a known hash value to prevent
+ // re-hashing.
+ UniquifyRef(const std::vector<T>* v, size_t i, size_t hash_value)
+ : value_(nullptr),
+ vect_(v),
+ index_(i),
+ hash_val_(hash_value) {
+ }
+
+ const T& value() const { return value_ ? *value_ : (*vect_)[index_]; }
+ size_t hash_val() const { return hash_val_; }
+ size_t index() const { return index_; }
+
+ private:
+ void FillHashValue() {
+ BASE_HASH_NAMESPACE::hash<T> h;
+ hash_val_ = h(value());
+ }
+
+ // When non-null, points to the object.
+ const T* value_;
+
+ // When value is null these are used.
+ const std::vector<T>* vect_;
+ size_t index_;
+
+ size_t hash_val_;
+};
+
+template<typename T> inline bool operator==(const UniquifyRef<T>& a,
+ const UniquifyRef<T>& b) {
+ return a.value() == b.value();
+}
+
+template<typename T> inline bool operator<(const UniquifyRef<T>& a,
+ const UniquifyRef<T>& b) {
+ return a.value() < b.value();
+}
+
+} // namespace internal
+
+namespace BASE_HASH_NAMESPACE {
+
+template<typename T> struct hash< internal::UniquifyRef<T> > {
+ std::size_t operator()(const internal::UniquifyRef<T>& v) const {
+ return v.hash_val();
+ }
+};
+
+} // namespace BASE_HASH_NAMESPACE
+
+// An ordered set optimized for GN's usage. Such sets are used to store lists
+// of configs and libraries, and are appended to but not randomly inserted
+// into.
+template<typename T>
+class UniqueVector {
+ public:
+ typedef std::vector<T> Vector;
+ typedef typename Vector::iterator iterator;
+ typedef typename Vector::const_iterator const_iterator;
+
+ const Vector& vector() const { return vector_; }
+ size_t size() const { return vector_.size(); }
+ bool empty() const { return vector_.empty(); }
+ void clear() { vector_.clear(); set_.clear(); }
+ void reserve(size_t s) { vector_.reserve(s); }
+
+ const T& operator[](size_t index) const { return vector_[index]; }
+
+ const_iterator begin() const { return vector_.begin(); }
+ iterator begin() { return vector_.begin(); }
+ const_iterator end() const { return vector_.end(); }
+ iterator end() { return vector_.end(); }
+
+ // Returns true if the item was appended, false if it already existed (and
+ // thus the vector was not modified).
+ bool push_back(const T& t) {
+ Ref ref(&t);
+ if (set_.find(ref) != set_.end())
+ return false; // Already have this one.
+
+ vector_.push_back(t);
+ set_.insert(Ref(&vector_, vector_.size() - 1, ref.hash_val()));
+ return true;
+ }
+
+ // Like push_back but swaps in the type to avoid a copy.
+ bool PushBackViaSwap(T* t) {
+ using std::swap;
+
+ Ref ref(t);
+ if (set_.find(ref) != set_.end())
+ return false; // Already have this one.
+
+ size_t new_index = vector_.size();
+ vector_.resize(new_index + 1);
+ swap(vector_[new_index], *t);
+ set_.insert(Ref(&vector_, vector_.size() - 1, ref.hash_val()));
+ return true;
+ }
+
+ // Appends a range of items from an iterator.
+ template<typename iter> void Append(const iter& begin, const iter& end) {
+ for (iter i = begin; i != end; ++i)
+ push_back(*i);
+ }
+
+ // Returns the index of the item matching the given value in the list, or
+ // (size_t)(-1) if it's not found.
+ size_t IndexOf(const T& t) const {
+ Ref ref(&t);
+ typename HashSet::const_iterator found = set_.find(ref);
+ if (found == set_.end())
+ return static_cast<size_t>(-1);
+ return found->index();
+ }
+
+ private:
+ typedef internal::UniquifyRef<T> Ref;
+ typedef base::hash_set<Ref> HashSet;
+
+ HashSet set_;
+ Vector vector_;
+};
+
+#endif // TOOLS_GN_UNIQUE_VECTOR_H_
diff --git a/chromium/tools/gn/unique_vector_unittest.cc b/chromium/tools/gn/unique_vector_unittest.cc
new file mode 100644
index 00000000000..d735423ff4d
--- /dev/null
+++ b/chromium/tools/gn/unique_vector_unittest.cc
@@ -0,0 +1,45 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include <algorithm>
+
+#include "base/time/time.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/unique_vector.h"
+
+TEST(UniqueVector, PushBack) {
+ UniqueVector<int> foo;
+ EXPECT_TRUE(foo.push_back(1));
+ EXPECT_FALSE(foo.push_back(1));
+ EXPECT_TRUE(foo.push_back(2));
+ EXPECT_TRUE(foo.push_back(0));
+ EXPECT_FALSE(foo.push_back(2));
+ EXPECT_FALSE(foo.push_back(1));
+
+ EXPECT_EQ(3u, foo.size());
+ EXPECT_EQ(1, foo[0]);
+ EXPECT_EQ(2, foo[1]);
+ EXPECT_EQ(0, foo[2]);
+
+ // Verify those results with IndexOf as well.
+ EXPECT_EQ(0u, foo.IndexOf(1));
+ EXPECT_EQ(1u, foo.IndexOf(2));
+ EXPECT_EQ(2u, foo.IndexOf(0));
+ EXPECT_EQ(static_cast<size_t>(-1), foo.IndexOf(99));
+}
+
+TEST(UniqueVector, PushBackViaSwap) {
+ UniqueVector<std::string> vect;
+ std::string a("a");
+ EXPECT_TRUE(vect.PushBackViaSwap(&a));
+ EXPECT_EQ("", a);
+
+ a = "a";
+ EXPECT_FALSE(vect.PushBackViaSwap(&a));
+ EXPECT_EQ("a", a);
+
+ EXPECT_EQ(0u, vect.IndexOf("a"));
+}
diff --git a/chromium/tools/gn/value.cc b/chromium/tools/gn/value.cc
new file mode 100644
index 00000000000..1bf0bb7a302
--- /dev/null
+++ b/chromium/tools/gn/value.cc
@@ -0,0 +1,223 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/value.h"
+
+#include <stddef.h>
+#include <utility>
+
+#include "base/strings/string_number_conversions.h"
+#include "base/strings/string_util.h"
+#include "tools/gn/scope.h"
+
+Value::Value()
+ : type_(NONE),
+ boolean_value_(false),
+ int_value_(0),
+ origin_(nullptr) {
+}
+
+Value::Value(const ParseNode* origin, Type t)
+ : type_(t),
+ boolean_value_(false),
+ int_value_(0),
+ origin_(origin) {
+}
+
+Value::Value(const ParseNode* origin, bool bool_val)
+ : type_(BOOLEAN),
+ boolean_value_(bool_val),
+ int_value_(0),
+ origin_(origin) {
+}
+
+Value::Value(const ParseNode* origin, int64_t int_val)
+ : type_(INTEGER),
+ boolean_value_(false),
+ int_value_(int_val),
+ origin_(origin) {
+}
+
+Value::Value(const ParseNode* origin, std::string str_val)
+ : type_(STRING),
+ string_value_(),
+ boolean_value_(false),
+ int_value_(0),
+ origin_(origin) {
+ string_value_.swap(str_val);
+}
+
+Value::Value(const ParseNode* origin, const char* str_val)
+ : type_(STRING),
+ string_value_(str_val),
+ boolean_value_(false),
+ int_value_(0),
+ origin_(origin) {
+}
+
+Value::Value(const ParseNode* origin, std::unique_ptr<Scope> scope)
+ : type_(SCOPE),
+ string_value_(),
+ boolean_value_(false),
+ int_value_(0),
+ scope_value_(std::move(scope)),
+ origin_(origin) {}
+
+Value::Value(const Value& other)
+ : type_(other.type_),
+ string_value_(other.string_value_),
+ boolean_value_(other.boolean_value_),
+ int_value_(other.int_value_),
+ list_value_(other.list_value_),
+ origin_(other.origin_) {
+ if (type() == SCOPE && other.scope_value_.get())
+ scope_value_ = other.scope_value_->MakeClosure();
+}
+
+Value::~Value() {
+}
+
+Value& Value::operator=(const Value& other) {
+ type_ = other.type_;
+ string_value_ = other.string_value_;
+ boolean_value_ = other.boolean_value_;
+ int_value_ = other.int_value_;
+ list_value_ = other.list_value_;
+ if (type() == SCOPE && other.scope_value_.get())
+ scope_value_ = other.scope_value_->MakeClosure();
+ origin_ = other.origin_;
+ return *this;
+}
+
+// static
+const char* Value::DescribeType(Type t) {
+ switch (t) {
+ case NONE:
+ return "none";
+ case BOOLEAN:
+ return "boolean";
+ case INTEGER:
+ return "integer";
+ case STRING:
+ return "string";
+ case LIST:
+ return "list";
+ case SCOPE:
+ return "scope";
+ default:
+ NOTREACHED();
+ return "UNKNOWN";
+ }
+}
+
+void Value::SetScopeValue(std::unique_ptr<Scope> scope) {
+ DCHECK(type_ == SCOPE);
+ scope_value_ = std::move(scope);
+}
+
+std::string Value::ToString(bool quote_string) const {
+ switch (type_) {
+ case NONE:
+ return "<void>";
+ case BOOLEAN:
+ return boolean_value_ ? "true" : "false";
+ case INTEGER:
+ return base::Int64ToString(int_value_);
+ case STRING:
+ if (quote_string) {
+ std::string result = "\"";
+ bool hanging_backslash = false;
+ for (char ch : string_value_) {
+ // If the last character was a literal backslash and the next
+ // character could form a valid escape sequence, we need to insert
+ // an extra backslash to prevent that.
+ if (hanging_backslash && (ch == '$' || ch == '"' || ch == '\\'))
+ result += '\\';
+ // If the next character is a dollar sign or double quote, it needs
+ // to be escaped; otherwise it can be printed as is.
+ if (ch == '$' || ch == '"')
+ result += '\\';
+ result += ch;
+ hanging_backslash = (ch == '\\');
+ }
+ // Again, we need to prevent the closing double quotes from becoming
+ // an escape sequence.
+ if (hanging_backslash)
+ result += '\\';
+ result += '"';
+ return result;
+ }
+ return string_value_;
+ case LIST: {
+ std::string result = "[";
+ for (size_t i = 0; i < list_value_.size(); i++) {
+ if (i > 0)
+ result += ", ";
+ result += list_value_[i].ToString(true);
+ }
+ result.push_back(']');
+ return result;
+ }
+ case SCOPE: {
+ Scope::KeyValueMap scope_values;
+ scope_value_->GetCurrentScopeValues(&scope_values);
+ if (scope_values.empty())
+ return std::string("{ }");
+
+ std::string result = "{\n";
+ for (const auto& pair : scope_values) {
+ result += " " + pair.first.as_string() + " = " +
+ pair.second.ToString(true) + "\n";
+ }
+ result += "}";
+
+ return result;
+ }
+ }
+ return std::string();
+}
+
+bool Value::VerifyTypeIs(Type t, Err* err) const {
+ if (type_ == t)
+ return true;
+
+ *err = Err(origin(),
+ std::string("This is not a ") + DescribeType(t) + ".",
+ std::string("Instead I see a ") + DescribeType(type_) + " = " +
+ ToString(true));
+ return false;
+}
+
+bool Value::operator==(const Value& other) const {
+ if (type_ != other.type_)
+ return false;
+
+ switch (type_) {
+ case Value::BOOLEAN:
+ return boolean_value() == other.boolean_value();
+ case Value::INTEGER:
+ return int_value() == other.int_value();
+ case Value::STRING:
+ return string_value() == other.string_value();
+ case Value::LIST:
+ if (list_value().size() != other.list_value().size())
+ return false;
+ for (size_t i = 0; i < list_value().size(); i++) {
+ if (list_value()[i] != other.list_value()[i])
+ return false;
+ }
+ return true;
+ case Value::SCOPE:
+ // Scopes are always considered not equal because there's currently
+ // no use case for comparing them, and it requires a bunch of complex
+ // iteration code.
+ return false;
+ default:
+ return false;
+ }
+}
+
+bool Value::operator!=(const Value& other) const {
+ return !operator==(other);
+}
diff --git a/chromium/tools/gn/value.h b/chromium/tools/gn/value.h
new file mode 100644
index 00000000000..a8a83fb7c8c
--- /dev/null
+++ b/chromium/tools/gn/value.h
@@ -0,0 +1,133 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_VALUE_H_
+#define TOOLS_GN_VALUE_H_
+
+#include <stdint.h>
+
+#include <map>
+#include <memory>
+
+#include "base/logging.h"
+#include "base/macros.h"
+#include "tools/gn/err.h"
+
+class ParseNode;
+class Scope;
+
+// Represents a variable value in the interpreter.
+class Value {
+ public:
+ enum Type {
+ NONE = 0,
+ BOOLEAN,
+ INTEGER,
+ STRING,
+ LIST,
+ SCOPE,
+ };
+
+ Value();
+ Value(const ParseNode* origin, Type t);
+ Value(const ParseNode* origin, bool bool_val);
+ Value(const ParseNode* origin, int64_t int_val);
+ Value(const ParseNode* origin, std::string str_val);
+ Value(const ParseNode* origin, const char* str_val);
+ // Values "shouldn't" have null scopes when type == Scope, so be sure to
+ // always set one. However, this is not asserted since there are some
+ // use-cases for creating values and immediately setting the scope on it. So
+ // you can pass a null scope here if you promise to set it before any other
+ // code gets it (code will generally assume the scope is not null).
+ Value(const ParseNode* origin, std::unique_ptr<Scope> scope);
+
+ Value(const Value& other);
+ ~Value();
+
+ Value& operator=(const Value& other);
+
+ Type type() const { return type_; }
+
+ // Returns a string describing the given type.
+ static const char* DescribeType(Type t);
+
+ // Returns the node that made this. May be NULL.
+ const ParseNode* origin() const { return origin_; }
+ void set_origin(const ParseNode* o) { origin_ = o; }
+
+ bool& boolean_value() {
+ DCHECK(type_ == BOOLEAN);
+ return boolean_value_;
+ }
+ const bool& boolean_value() const {
+ DCHECK(type_ == BOOLEAN);
+ return boolean_value_;
+ }
+
+ int64_t& int_value() {
+ DCHECK(type_ == INTEGER);
+ return int_value_;
+ }
+ const int64_t& int_value() const {
+ DCHECK(type_ == INTEGER);
+ return int_value_;
+ }
+
+ std::string& string_value() {
+ DCHECK(type_ == STRING);
+ return string_value_;
+ }
+ const std::string& string_value() const {
+ DCHECK(type_ == STRING);
+ return string_value_;
+ }
+
+ std::vector<Value>& list_value() {
+ DCHECK(type_ == LIST);
+ return list_value_;
+ }
+ const std::vector<Value>& list_value() const {
+ DCHECK(type_ == LIST);
+ return list_value_;
+ }
+
+ Scope* scope_value() {
+ DCHECK(type_ == SCOPE);
+ return scope_value_.get();
+ }
+ const Scope* scope_value() const {
+ DCHECK(type_ == SCOPE);
+ return scope_value_.get();
+ }
+ void SetScopeValue(std::unique_ptr<Scope> scope);
+
+ // Converts the given value to a string. Returns true if strings should be
+ // quoted or the ToString of a string should be the string itself. If the
+ // string is quoted, it will also enable escaping.
+ std::string ToString(bool quote_strings) const;
+
+ // Verifies that the value is of the given type. If it isn't, returns
+ // false and sets the error.
+ bool VerifyTypeIs(Type t, Err* err) const;
+
+ // Compares values. Only the "value" is compared, not the origin.
+ bool operator==(const Value& other) const;
+ bool operator!=(const Value& other) const;
+
+ private:
+ // This are a lot of objects associated with every Value that need
+ // initialization and tear down every time. It might be more efficient to
+ // create a union of ManualConstructor objects (see SmallMap) and only
+ // use the one we care about.
+ Type type_;
+ std::string string_value_;
+ bool boolean_value_;
+ int64_t int_value_;
+ std::vector<Value> list_value_;
+ std::unique_ptr<Scope> scope_value_;
+
+ const ParseNode* origin_;
+};
+
+#endif // TOOLS_GN_VALUE_H_
diff --git a/chromium/tools/gn/value_extractors.cc b/chromium/tools/gn/value_extractors.cc
new file mode 100644
index 00000000000..ff009ce1b59
--- /dev/null
+++ b/chromium/tools/gn/value_extractors.cc
@@ -0,0 +1,257 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/value_extractors.h"
+
+#include <stddef.h>
+
+#include "tools/gn/build_settings.h"
+#include "tools/gn/err.h"
+#include "tools/gn/label.h"
+#include "tools/gn/source_dir.h"
+#include "tools/gn/source_file.h"
+#include "tools/gn/target.h"
+#include "tools/gn/value.h"
+
+namespace {
+
+// Sets the error and returns false on failure.
+template<typename T, class Converter>
+bool ListValueExtractor(const Value& value,
+ std::vector<T>* dest,
+ Err* err,
+ const Converter& converter) {
+ if (!value.VerifyTypeIs(Value::LIST, err))
+ return false;
+ const std::vector<Value>& input_list = value.list_value();
+ dest->resize(input_list.size());
+ for (size_t i = 0; i < input_list.size(); i++) {
+ if (!converter(input_list[i], &(*dest)[i], err))
+ return false;
+ }
+ return true;
+}
+
+// Like the above version but extracts to a UniqueVector and sets the error if
+// there are duplicates.
+template<typename T, class Converter>
+bool ListValueUniqueExtractor(const Value& value,
+ UniqueVector<T>* dest,
+ Err* err,
+ const Converter& converter) {
+ if (!value.VerifyTypeIs(Value::LIST, err))
+ return false;
+ const std::vector<Value>& input_list = value.list_value();
+
+ for (const auto& item : input_list) {
+ T new_one;
+ if (!converter(item, &new_one, err))
+ return false;
+ if (!dest->push_back(new_one)) {
+ // Already in the list, throw error.
+ *err = Err(item, "Duplicate item in list");
+ size_t previous_index = dest->IndexOf(new_one);
+ err->AppendSubErr(Err(input_list[previous_index],
+ "This was the previous definition."));
+ return false;
+ }
+ }
+ return true;
+}
+
+struct RelativeFileConverter {
+ RelativeFileConverter(const BuildSettings* build_settings_in,
+ const SourceDir& current_dir_in)
+ : build_settings(build_settings_in),
+ current_dir(current_dir_in) {
+ }
+ bool operator()(const Value& v, SourceFile* out, Err* err) const {
+ *out = current_dir.ResolveRelativeFile(v, err,
+ build_settings->root_path_utf8());
+ return !err->has_error();
+ }
+ const BuildSettings* build_settings;
+ const SourceDir& current_dir;
+};
+
+struct LibFileConverter {
+ LibFileConverter(const BuildSettings* build_settings_in,
+ const SourceDir& current_dir_in)
+ : build_settings(build_settings_in),
+ current_dir(current_dir_in) {
+ }
+ bool operator()(const Value& v, LibFile* out, Err* err) const {
+ if (!v.VerifyTypeIs(Value::STRING, err))
+ return false;
+ if (v.string_value().find('/') == std::string::npos) {
+ *out = LibFile(v.string_value());
+ } else {
+ *out = LibFile(current_dir.ResolveRelativeFile(
+ v, err, build_settings->root_path_utf8()));
+ }
+ return !err->has_error();
+ }
+ const BuildSettings* build_settings;
+ const SourceDir& current_dir;
+};
+
+struct RelativeDirConverter {
+ RelativeDirConverter(const BuildSettings* build_settings_in,
+ const SourceDir& current_dir_in)
+ : build_settings(build_settings_in),
+ current_dir(current_dir_in) {
+ }
+ bool operator()(const Value& v, SourceDir* out, Err* err) const {
+ *out = current_dir.ResolveRelativeDir(v, err,
+ build_settings->root_path_utf8());
+ return true;
+ }
+ const BuildSettings* build_settings;
+ const SourceDir& current_dir;
+};
+
+// Fills in a label.
+template<typename T> struct LabelResolver {
+ LabelResolver(const SourceDir& current_dir_in,
+ const Label& current_toolchain_in)
+ : current_dir(current_dir_in),
+ current_toolchain(current_toolchain_in) {}
+ bool operator()(const Value& v, Label* out, Err* err) const {
+ if (!v.VerifyTypeIs(Value::STRING, err))
+ return false;
+ *out = Label::Resolve(current_dir, current_toolchain, v, err);
+ return !err->has_error();
+ }
+ const SourceDir& current_dir;
+ const Label& current_toolchain;
+};
+
+// Fills the label part of a LabelPtrPair, leaving the pointer null.
+template<typename T> struct LabelPtrResolver {
+ LabelPtrResolver(const SourceDir& current_dir_in,
+ const Label& current_toolchain_in)
+ : current_dir(current_dir_in),
+ current_toolchain(current_toolchain_in) {}
+ bool operator()(const Value& v, LabelPtrPair<T>* out, Err* err) const {
+ if (!v.VerifyTypeIs(Value::STRING, err))
+ return false;
+ out->label = Label::Resolve(current_dir, current_toolchain, v, err);
+ out->origin = v.origin();
+ return !err->has_error();
+ }
+ const SourceDir& current_dir;
+ const Label& current_toolchain;
+};
+
+struct LabelPatternResolver {
+ LabelPatternResolver(const SourceDir& current_dir_in)
+ : current_dir(current_dir_in) {
+ }
+ bool operator()(const Value& v, LabelPattern* out, Err* err) const {
+ *out = LabelPattern::GetPattern(current_dir, v, err);
+ return !err->has_error();
+ }
+ const SourceDir& current_dir;
+};
+
+} // namespace
+
+bool ExtractListOfStringValues(const Value& value,
+ std::vector<std::string>* dest,
+ Err* err) {
+ if (!value.VerifyTypeIs(Value::LIST, err))
+ return false;
+ const std::vector<Value>& input_list = value.list_value();
+ dest->reserve(input_list.size());
+ for (const auto& item : input_list) {
+ if (!item.VerifyTypeIs(Value::STRING, err))
+ return false;
+ dest->push_back(item.string_value());
+ }
+ return true;
+}
+
+bool ExtractListOfRelativeFiles(const BuildSettings* build_settings,
+ const Value& value,
+ const SourceDir& current_dir,
+ std::vector<SourceFile>* files,
+ Err* err) {
+ return ListValueExtractor(value, files, err,
+ RelativeFileConverter(build_settings, current_dir));
+}
+
+bool ExtractListOfLibs(const BuildSettings* build_settings,
+ const Value& value,
+ const SourceDir& current_dir,
+ std::vector<LibFile>* libs,
+ Err* err) {
+ return ListValueExtractor(value, libs, err,
+ LibFileConverter(build_settings, current_dir));
+}
+
+bool ExtractListOfRelativeDirs(const BuildSettings* build_settings,
+ const Value& value,
+ const SourceDir& current_dir,
+ std::vector<SourceDir>* dest,
+ Err* err) {
+ return ListValueExtractor(value, dest, err,
+ RelativeDirConverter(build_settings, current_dir));
+}
+
+bool ExtractListOfLabels(const Value& value,
+ const SourceDir& current_dir,
+ const Label& current_toolchain,
+ LabelTargetVector* dest,
+ Err* err) {
+ return ListValueExtractor(value, dest, err,
+ LabelPtrResolver<Target>(current_dir,
+ current_toolchain));
+}
+
+bool ExtractListOfUniqueLabels(const Value& value,
+ const SourceDir& current_dir,
+ const Label& current_toolchain,
+ UniqueVector<Label>* dest,
+ Err* err) {
+ return ListValueUniqueExtractor(value, dest, err,
+ LabelResolver<Config>(current_dir,
+ current_toolchain));
+}
+
+bool ExtractListOfUniqueLabels(const Value& value,
+ const SourceDir& current_dir,
+ const Label& current_toolchain,
+ UniqueVector<LabelConfigPair>* dest,
+ Err* err) {
+ return ListValueUniqueExtractor(value, dest, err,
+ LabelPtrResolver<Config>(current_dir,
+ current_toolchain));
+}
+
+bool ExtractListOfUniqueLabels(const Value& value,
+ const SourceDir& current_dir,
+ const Label& current_toolchain,
+ UniqueVector<LabelTargetPair>* dest,
+ Err* err) {
+ return ListValueUniqueExtractor(value, dest, err,
+ LabelPtrResolver<Target>(current_dir,
+ current_toolchain));
+}
+
+bool ExtractRelativeFile(const BuildSettings* build_settings,
+ const Value& value,
+ const SourceDir& current_dir,
+ SourceFile* file,
+ Err* err) {
+ RelativeFileConverter converter(build_settings, current_dir);
+ return converter(value, file, err);
+}
+
+bool ExtractListOfLabelPatterns(const Value& value,
+ const SourceDir& current_dir,
+ std::vector<LabelPattern>* patterns,
+ Err* err) {
+ return ListValueExtractor(value, patterns, err,
+ LabelPatternResolver(current_dir));
+}
diff --git a/chromium/tools/gn/value_extractors.h b/chromium/tools/gn/value_extractors.h
new file mode 100644
index 00000000000..1e426502036
--- /dev/null
+++ b/chromium/tools/gn/value_extractors.h
@@ -0,0 +1,89 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_VALUE_EXTRACTORS_H_
+#define TOOLS_GN_VALUE_EXTRACTORS_H_
+
+#include <string>
+#include <vector>
+
+#include "tools/gn/label_ptr.h"
+#include "tools/gn/lib_file.h"
+#include "tools/gn/unique_vector.h"
+
+class BuildSettings;
+class Err;
+class Label;
+class LabelPattern;
+class SourceDir;
+class SourceFile;
+class Value;
+
+// On failure, returns false and sets the error.
+bool ExtractListOfStringValues(const Value& value,
+ std::vector<std::string>* dest,
+ Err* err);
+
+// Looks for a list of source files relative to a given current dir.
+bool ExtractListOfRelativeFiles(const BuildSettings* build_settings,
+ const Value& value,
+ const SourceDir& current_dir,
+ std::vector<SourceFile>* files,
+ Err* err);
+
+// Extracts a list of libraries. When they contain a "/" they are treated as
+// source paths and are otherwise treated as plain strings.
+bool ExtractListOfLibs(const BuildSettings* build_settings,
+ const Value& value,
+ const SourceDir& current_dir,
+ std::vector<LibFile>* libs,
+ Err* err);
+
+// Looks for a list of source directories relative to a given current dir.
+bool ExtractListOfRelativeDirs(const BuildSettings* build_settings,
+ const Value& value,
+ const SourceDir& current_dir,
+ std::vector<SourceDir>* dest,
+ Err* err);
+
+// Extracts the list of labels and their origins to the given vector. Only the
+// labels are filled in, the ptr for each pair in the vector will be null.
+bool ExtractListOfLabels(const Value& value,
+ const SourceDir& current_dir,
+ const Label& current_toolchain,
+ LabelTargetVector* dest,
+ Err* err);
+
+// Extracts the list of labels and their origins to the given vector. For the
+// version taking Label*Pair, only the labels are filled in, the ptr for each
+// pair in the vector will be null. Sets an error and returns false if a label
+// is maformed or there are duplicates.
+bool ExtractListOfUniqueLabels(const Value& value,
+ const SourceDir& current_dir,
+ const Label& current_toolchain,
+ UniqueVector<Label>* dest,
+ Err* err);
+bool ExtractListOfUniqueLabels(const Value& value,
+ const SourceDir& current_dir,
+ const Label& current_toolchain,
+ UniqueVector<LabelConfigPair>* dest,
+ Err* err);
+bool ExtractListOfUniqueLabels(const Value& value,
+ const SourceDir& current_dir,
+ const Label& current_toolchain,
+ UniqueVector<LabelTargetPair>* dest,
+ Err* err);
+
+bool ExtractRelativeFile(const BuildSettings* build_settings,
+ const Value& value,
+ const SourceDir& current_dir,
+ SourceFile* file,
+ Err* err);
+
+bool ExtractListOfLabelPatterns(const Value& value,
+ const SourceDir& current_dir,
+ std::vector<LabelPattern>* patterns,
+ Err* err);
+
+#endif // TOOLS_GN_VALUE_EXTRACTORS_H_
diff --git a/chromium/tools/gn/value_unittest.cc b/chromium/tools/gn/value_unittest.cc
new file mode 100644
index 00000000000..9fefaf7948b
--- /dev/null
+++ b/chromium/tools/gn/value_unittest.cc
@@ -0,0 +1,43 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdint.h>
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/test_with_scope.h"
+#include "tools/gn/value.h"
+
+TEST(Value, ToString) {
+ Value strval(nullptr, "hi\" $me\\you\\$\\\"");
+ EXPECT_EQ("hi\" $me\\you\\$\\\"", strval.ToString(false));
+ EXPECT_EQ("\"hi\\\" \\$me\\you\\\\\\$\\\\\\\"\"", strval.ToString(true));
+
+ // crbug.com/470217
+ Value strval2(nullptr, "\\foo\\\\bar\\");
+ EXPECT_EQ("\"\\foo\\\\\\bar\\\\\"", strval2.ToString(true));
+
+ // Void type.
+ EXPECT_EQ("<void>", Value().ToString(false));
+
+ // Test lists, bools, and ints.
+ Value listval(nullptr, Value::LIST);
+ listval.list_value().push_back(Value(nullptr, "hi\"me"));
+ listval.list_value().push_back(Value(nullptr, true));
+ listval.list_value().push_back(Value(nullptr, false));
+ listval.list_value().push_back(Value(nullptr, static_cast<int64_t>(42)));
+ // Printing lists always causes embedded strings to be quoted (ignoring the
+ // quote flag), or else they wouldn't make much sense.
+ EXPECT_EQ("[\"hi\\\"me\", true, false, 42]", listval.ToString(false));
+ EXPECT_EQ("[\"hi\\\"me\", true, false, 42]", listval.ToString(true));
+
+ // Scopes.
+ TestWithScope setup;
+ Scope* scope = new Scope(setup.scope());
+ Value scopeval(nullptr, std::unique_ptr<Scope>(scope));
+ EXPECT_EQ("{ }", scopeval.ToString(false));
+
+ scope->SetValue("a", Value(nullptr, static_cast<int64_t>(42)), nullptr);
+ scope->SetValue("b", Value(nullptr, "hello, world"), nullptr);
+ EXPECT_EQ("{\n a = 42\n b = \"hello, world\"\n}", scopeval.ToString(false));
+}
diff --git a/chromium/tools/gn/variables.cc b/chromium/tools/gn/variables.cc
new file mode 100644
index 00000000000..4c5fae1adf6
--- /dev/null
+++ b/chromium/tools/gn/variables.cc
@@ -0,0 +1,1660 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/variables.h"
+
+namespace variables {
+
+// Built-in variables ----------------------------------------------------------
+
+const char kHostCpu[] = "host_cpu";
+const char kHostCpu_HelpShort[] =
+ "host_cpu: [string] The processor architecture that GN is running on.";
+const char kHostCpu_Help[] =
+ "host_cpu: The processor architecture that GN is running on.\n"
+ "\n"
+ " This is value is exposed so that cross-compile toolchains can\n"
+ " access the host architecture when needed.\n"
+ "\n"
+ " The value should generally be considered read-only, but it can be\n"
+ " overriden in order to handle unusual cases where there might\n"
+ " be multiple plausible values for the host architecture (e.g., if\n"
+ " you can do either 32-bit or 64-bit builds). The value is not used\n"
+ " internally by GN for any purpose.\n"
+ "\n"
+ "Some possible values:\n"
+ " - \"x64\"\n"
+ " - \"x86\"\n";
+
+const char kHostOs[] = "host_os";
+const char kHostOs_HelpShort[] =
+ "host_os: [string] The operating system that GN is running on.";
+const char kHostOs_Help[] =
+ "host_os: [string] The operating system that GN is running on.\n"
+ "\n"
+ " This value is exposed so that cross-compiles can access the host\n"
+ " build system's settings.\n"
+ "\n"
+ " This value should generally be treated as read-only. It, however,\n"
+ " is not used internally by GN for any purpose.\n"
+ "\n"
+ "Some possible values:\n"
+ " - \"linux\"\n"
+ " - \"mac\"\n"
+ " - \"win\"\n";
+
+const char kTargetCpu[] = "target_cpu";
+const char kTargetCpu_HelpShort[] =
+ "target_cpu: [string] The desired cpu architecture for the build.";
+const char kTargetCpu_Help[] =
+ "target_cpu: The desired cpu architecture for the build.\n"
+ "\n"
+ " This value should be used to indicate the desired architecture for\n"
+ " the primary objects of the build. It will match the cpu architecture\n"
+ " of the default toolchain.\n"
+ "\n"
+ " In many cases, this is the same as \"host_cpu\", but in the case\n"
+ " of cross-compiles, this can be set to something different. This \n"
+ " value is different from \"current_cpu\" in that it can be referenced\n"
+ " from inside any toolchain. This value can also be ignored if it is\n"
+ " not needed or meaningful for a project.\n"
+ "\n"
+ " This value is not used internally by GN for any purpose, so it\n"
+ " may be set to whatever value is needed for the build.\n"
+ " GN defaults this value to the empty string (\"\") and the\n"
+ " configuration files should set it to an appropriate value\n"
+ " (e.g., setting it to the value of \"host_cpu\") if it is not\n"
+ " overridden on the command line or in the args.gn file.\n"
+ "\n"
+ " Where practical, use one of the following list of common values:\n"
+ "\n"
+ "Possible values:\n"
+ " - \"x86\"\n"
+ " - \"x64\"\n"
+ " - \"arm\"\n"
+ " - \"arm64\"\n"
+ " - \"mipsel\"\n";
+
+const char kTargetOs[] = "target_os";
+const char kTargetOs_HelpShort[] =
+ "target_os: [string] The desired operating system for the build.";
+const char kTargetOs_Help[] =
+ "target_os: The desired operating system for the build.\n"
+ "\n"
+ " This value should be used to indicate the desired operating system\n"
+ " for the primary object(s) of the build. It will match the OS of\n"
+ " the default toolchain.\n"
+ "\n"
+ " In many cases, this is the same as \"host_os\", but in the case of\n"
+ " cross-compiles, it may be different. This variable differs from\n"
+ " \"current_os\" in that it can be referenced from inside any\n"
+ " toolchain and will always return the initial value.\n"
+ "\n"
+ " This should be set to the most specific value possible. So,\n"
+ " \"android\" or \"chromeos\" should be used instead of \"linux\"\n"
+ " where applicable, even though Android and ChromeOS are both Linux\n"
+ " variants. This can mean that one needs to write\n"
+ "\n"
+ " if (target_os == \"android\" || target_os == \"linux\") {\n"
+ " # ...\n"
+ " }\n"
+ "\n"
+ " and so forth.\n"
+ "\n"
+ " This value is not used internally by GN for any purpose, so it\n"
+ " may be set to whatever value is needed for the build.\n"
+ " GN defaults this value to the empty string (\"\") and the\n"
+ " configuration files should set it to an appropriate value\n"
+ " (e.g., setting it to the value of \"host_os\") if it is not\n"
+ " set via the command line or in the args.gn file.\n"
+ "\n"
+ " Where practical, use one of the following list of common values:\n"
+ "\n"
+ "Possible values:\n"
+ " - \"android\"\n"
+ " - \"chromeos\"\n"
+ " - \"ios\"\n"
+ " - \"linux\"\n"
+ " - \"nacl\"\n"
+ " - \"mac\"\n"
+ " - \"win\"\n";
+
+const char kCurrentCpu[] = "current_cpu";
+const char kCurrentCpu_HelpShort[] =
+ "current_cpu: [string] The processor architecture of the current "
+ "toolchain.";
+const char kCurrentCpu_Help[] =
+ "current_cpu: The processor architecture of the current toolchain.\n"
+ "\n"
+ " The build configuration usually sets this value based on the value\n"
+ " of \"host_cpu\" (see \"gn help host_cpu\") and then threads\n"
+ " this through the toolchain definitions to ensure that it always\n"
+ " reflects the appropriate value.\n"
+ "\n"
+ " This value is not used internally by GN for any purpose. It is\n"
+ " set it to the empty string (\"\") by default but is declared so\n"
+ " that it can be overridden on the command line if so desired.\n"
+ "\n"
+ " See \"gn help target_cpu\" for a list of common values returned.\n";
+
+const char kCurrentOs[] = "current_os";
+const char kCurrentOs_HelpShort[] =
+ "current_os: [string] The operating system of the current toolchain.";
+const char kCurrentOs_Help[] =
+ "current_os: The operating system of the current toolchain.\n"
+ "\n"
+ " The build configuration usually sets this value based on the value\n"
+ " of \"target_os\" (see \"gn help target_os\"), and then threads this\n"
+ " through the toolchain definitions to ensure that it always reflects\n"
+ " the appropriate value.\n"
+ "\n"
+ " This value is not used internally by GN for any purpose. It is\n"
+ " set it to the empty string (\"\") by default but is declared so\n"
+ " that it can be overridden on the command line if so desired.\n"
+ "\n"
+ " See \"gn help target_os\" for a list of common values returned.\n";
+
+const char kCurrentToolchain[] = "current_toolchain";
+const char kCurrentToolchain_HelpShort[] =
+ "current_toolchain: [string] Label of the current toolchain.";
+const char kCurrentToolchain_Help[] =
+ "current_toolchain: Label of the current toolchain.\n"
+ "\n"
+ " A fully-qualified label representing the current toolchain. You can\n"
+ " use this to make toolchain-related decisions in the build. See also\n"
+ " \"default_toolchain\".\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " if (current_toolchain == \"//build:64_bit_toolchain\") {\n"
+ " executable(\"output_thats_64_bit_only\") {\n"
+ " ...\n";
+
+const char kDefaultToolchain[] = "default_toolchain";
+const char kDefaultToolchain_HelpShort[] =
+ "default_toolchain: [string] Label of the default toolchain.";
+const char kDefaultToolchain_Help[] =
+ "default_toolchain: [string] Label of the default toolchain.\n"
+ "\n"
+ " A fully-qualified label representing the default toolchain, which may\n"
+ " not necessarily be the current one (see \"current_toolchain\").\n";
+
+const char kPythonPath[] = "python_path";
+const char kPythonPath_HelpShort[] =
+ "python_path: [string] Absolute path of Python.";
+const char kPythonPath_Help[] =
+ "python_path: Absolute path of Python.\n"
+ "\n"
+ " Normally used in toolchain definitions if running some command\n"
+ " requires Python. You will normally not need this when invoking scripts\n"
+ " since GN automatically finds it for you.\n";
+
+const char kRootBuildDir[] = "root_build_dir";
+const char kRootBuildDir_HelpShort[] =
+ "root_build_dir: [string] Directory where build commands are run.";
+const char kRootBuildDir_Help[] =
+ "root_build_dir: [string] Directory where build commands are run.\n"
+ "\n"
+ " This is the root build output directory which will be the current\n"
+ " directory when executing all compilers and scripts.\n"
+ "\n"
+ " Most often this is used with rebase_path (see \"gn help rebase_path\")\n"
+ " to convert arguments to be relative to a script's current directory.\n";
+
+const char kRootGenDir[] = "root_gen_dir";
+const char kRootGenDir_HelpShort[] =
+ "root_gen_dir: [string] Directory for the toolchain's generated files.";
+const char kRootGenDir_Help[] =
+ "root_gen_dir: Directory for the toolchain's generated files.\n"
+ "\n"
+ " Absolute path to the root of the generated output directory tree for\n"
+ " the current toolchain. An example would be \"//out/Debug/gen\" for the\n"
+ " default toolchain, or \"//out/Debug/arm/gen\" for the \"arm\"\n"
+ " toolchain.\n"
+ "\n"
+ " This is primarily useful for setting up include paths for generated\n"
+ " files. If you are passing this to a script, you will want to pass it\n"
+ " through rebase_path() (see \"gn help rebase_path\") to convert it\n"
+ " to be relative to the build directory.\n"
+ "\n"
+ " See also \"target_gen_dir\" which is usually a better location for\n"
+ " generated files. It will be inside the root generated dir.\n";
+
+const char kRootOutDir[] = "root_out_dir";
+const char kRootOutDir_HelpShort[] =
+ "root_out_dir: [string] Root directory for toolchain output files.";
+const char kRootOutDir_Help[] =
+ "root_out_dir: [string] Root directory for toolchain output files.\n"
+ "\n"
+ " Absolute path to the root of the output directory tree for the current\n"
+ " toolchain. It will not have a trailing slash.\n"
+ "\n"
+ " For the default toolchain this will be the same as the root_build_dir.\n"
+ " An example would be \"//out/Debug\" for the default toolchain, or\n"
+ " \"//out/Debug/arm\" for the \"arm\" toolchain.\n"
+ "\n"
+ " This is primarily useful for setting up script calls. If you are\n"
+ " passing this to a script, you will want to pass it through\n"
+ " rebase_path() (see \"gn help rebase_path\") to convert it\n"
+ " to be relative to the build directory.\n"
+ "\n"
+ " See also \"target_out_dir\" which is usually a better location for\n"
+ " output files. It will be inside the root output dir.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " action(\"myscript\") {\n"
+ " # Pass the output dir to the script.\n"
+ " args = [ \"-o\", rebase_path(root_out_dir, root_build_dir) ]\n"
+ " }\n";
+
+const char kTargetGenDir[] = "target_gen_dir";
+const char kTargetGenDir_HelpShort[] =
+ "target_gen_dir: [string] Directory for a target's generated files.";
+const char kTargetGenDir_Help[] =
+ "target_gen_dir: Directory for a target's generated files.\n"
+ "\n"
+ " Absolute path to the target's generated file directory. This will be\n"
+ " the \"root_gen_dir\" followed by the relative path to the current\n"
+ " build file. If your file is in \"//tools/doom_melon\" then\n"
+ " target_gen_dir would be \"//out/Debug/gen/tools/doom_melon\". It will\n"
+ " not have a trailing slash.\n"
+ "\n"
+ " This is primarily useful for setting up include paths for generated\n"
+ " files. If you are passing this to a script, you will want to pass it\n"
+ " through rebase_path() (see \"gn help rebase_path\") to convert it\n"
+ " to be relative to the build directory.\n"
+ "\n"
+ " See also \"gn help root_gen_dir\".\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " action(\"myscript\") {\n"
+ " # Pass the generated output dir to the script.\n"
+ " args = [ \"-o\", rebase_path(target_gen_dir, root_build_dir) ]"
+ "\n"
+ " }\n";
+
+const char kTargetOutDir[] = "target_out_dir";
+const char kTargetOutDir_HelpShort[] =
+ "target_out_dir: [string] Directory for target output files.";
+const char kTargetOutDir_Help[] =
+ "target_out_dir: [string] Directory for target output files.\n"
+ "\n"
+ " Absolute path to the target's generated file directory. If your\n"
+ " current target is in \"//tools/doom_melon\" then this value might be\n"
+ " \"//out/Debug/obj/tools/doom_melon\". It will not have a trailing\n"
+ " slash.\n"
+ "\n"
+ " This is primarily useful for setting up arguments for calling\n"
+ " scripts. If you are passing this to a script, you will want to pass it\n"
+ " through rebase_path() (see \"gn help rebase_path\") to convert it\n"
+ " to be relative to the build directory.\n"
+ "\n"
+ " See also \"gn help root_out_dir\".\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " action(\"myscript\") {\n"
+ " # Pass the output dir to the script.\n"
+ " args = [ \"-o\", rebase_path(target_out_dir, root_build_dir) ]"
+ "\n"
+ " }\n";
+
+// Target variables ------------------------------------------------------------
+
+#define COMMON_ORDERING_HELP \
+ "\n" \
+ "Ordering of flags and values\n" \
+ "\n" \
+ " 1. Those set on the current target (not in a config).\n" \
+ " 2. Those set on the \"configs\" on the target in order that the\n" \
+ " configs appear in the list.\n" \
+ " 3. Those set on the \"all_dependent_configs\" on the target in order\n" \
+ " that the configs appear in the list.\n" \
+ " 4. Those set on the \"public_configs\" on the target in order that\n" \
+ " those configs appear in the list.\n" \
+ " 5. all_dependent_configs pulled from dependencies, in the order of\n" \
+ " the \"deps\" list. This is done recursively. If a config appears\n" \
+ " more than once, only the first occurance will be used.\n" \
+ " 6. public_configs pulled from dependencies, in the order of the\n" \
+ " \"deps\" list. If a dependency is public, they will be applied\n" \
+ " recursively.\n"
+
+const char kAllDependentConfigs[] = "all_dependent_configs";
+const char kAllDependentConfigs_HelpShort[] =
+ "all_dependent_configs: [label list] Configs to be forced on dependents.";
+const char kAllDependentConfigs_Help[] =
+ "all_dependent_configs: Configs to be forced on dependents.\n"
+ "\n"
+ " A list of config labels.\n"
+ "\n"
+ " All targets depending on this one, and recursively, all targets\n"
+ " depending on those, will have the configs listed in this variable\n"
+ " added to them. These configs will also apply to the current target.\n"
+ "\n"
+ " This addition happens in a second phase once a target and all of its\n"
+ " dependencies have been resolved. Therefore, a target will not see\n"
+ " these force-added configs in their \"configs\" variable while the\n"
+ " script is running, and then can not be removed. As a result, this\n"
+ " capability should generally only be used to add defines and include\n"
+ " directories necessary to compile a target's headers.\n"
+ "\n"
+ " See also \"public_configs\".\n"
+ COMMON_ORDERING_HELP;
+
+const char kAllowCircularIncludesFrom[] = "allow_circular_includes_from";
+const char kAllowCircularIncludesFrom_HelpShort[] =
+ "allow_circular_includes_from: [label list] Permit includes from deps.";
+const char kAllowCircularIncludesFrom_Help[] =
+ "allow_circular_includes_from: Permit includes from deps.\n"
+ "\n"
+ " A list of target labels. Must be a subset of the target's \"deps\".\n"
+ " These targets will be permitted to include headers from the current\n"
+ " target despite the dependency going in the opposite direction.\n"
+ "\n"
+ " When you use this, both targets must be included in a final binary\n"
+ " for it to link. To keep linker errors from happening, it is good\n"
+ " practice to have all external dependencies depend only on one of\n"
+ " the two targets, and to set the visibility on the other to enforce\n"
+ " this. Thus the targets will always be linked together in any output.\n"
+ "\n"
+ "Details\n"
+ "\n"
+ " Normally, for a file in target A to include a file from target B,\n"
+ " A must list B as a dependency. This invariant is enforced by the\n"
+ " \"gn check\" command (and the --check flag to \"gn gen\" -- see\n"
+ " \"gn help check\").\n"
+ "\n"
+ " Sometimes, two targets might be the same unit for linking purposes\n"
+ " (two source sets or static libraries that would always be linked\n"
+ " together in a final executable or shared library) and they each\n"
+ " include headers from the other: you want A to be able to include B's\n"
+ " headers, and B to include A's headers. This is not an ideal situation\n"
+ " but is sometimes unavoidable.\n"
+ "\n"
+ " This list, if specified, lists which of the dependencies of the\n"
+ " current target can include header files from the current target.\n"
+ " That is, if A depends on B, B can only include headers from A if it is\n"
+ " in A's allow_circular_includes_from list. Normally includes must\n"
+ " follow the direction of dependencies, this flag allows them to go\n"
+ " in the opposite direction.\n"
+ "\n"
+ "Danger\n"
+ "\n"
+ " In the above example, A's headers are likely to include headers from\n"
+ " A's dependencies. Those dependencies may have public_configs that\n"
+ " apply flags, defines, and include paths that make those headers work\n"
+ " properly.\n"
+ "\n"
+ " With allow_circular_includes_from, B can include A's headers, and\n"
+ " transitively from A's dependencies, without having the dependencies\n"
+ " that would bring in the public_configs those headers need. The result\n"
+ " may be errors or inconsistent builds.\n"
+ "\n"
+ " So when you use allow_circular_includes_from, make sure that any\n"
+ " compiler settings, flags, and include directories are the same between\n"
+ " both targets (consider putting such things in a shared config they can\n"
+ " both reference). Make sure the dependencies are also the same (you\n"
+ " might consider a group to collect such dependencies they both\n"
+ " depend on).\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " source_set(\"a\") {\n"
+ " deps = [ \":b\", \":a_b_shared_deps\" ]\n"
+ " allow_circular_includes_from = [ \":b\" ]\n"
+ " ...\n"
+ " }\n"
+ "\n"
+ " source_set(\"b\") {\n"
+ " deps = [ \":a_b_shared_deps\" ]\n"
+ " # Sources here can include headers from a despite lack of deps.\n"
+ " ...\n"
+ " }\n"
+ "\n"
+ " group(\"a_b_shared_deps\") {\n"
+ " public_deps = [ \":c\" ]\n"
+ " }\n";
+
+const char kArgs[] = "args";
+const char kArgs_HelpShort[] =
+ "args: [string list] Arguments passed to an action.";
+const char kArgs_Help[] =
+ "args: Arguments passed to an action.\n"
+ "\n"
+ " For action and action_foreach targets, args is the list of arguments\n"
+ " to pass to the script. Typically you would use source expansion (see\n"
+ " \"gn help source_expansion\") to insert the source file names.\n"
+ "\n"
+ " See also \"gn help action\" and \"gn help action_foreach\".\n";
+
+const char kAssertNoDeps[] = "assert_no_deps";
+const char kAssertNoDeps_HelpShort[] =
+ "assert_no_deps: [label pattern list] Ensure no deps on these targets.";
+const char kAssertNoDeps_Help[] =
+ "assert_no_deps: Ensure no deps on these targets.\n"
+ "\n"
+ " A list of label patterns.\n"
+ "\n"
+ " This list is a list of patterns that must not match any of the\n"
+ " transitive dependencies of the target. These include all public,\n"
+ " private, and data dependencies, and cross shared library boundaries.\n"
+ " This allows you to express that undesirable code isn't accidentally\n"
+ " added to downstream dependencies in a way that might otherwise be\n"
+ " difficult to notice.\n"
+ "\n"
+ " Checking does not cross executable boundaries. If a target depends on\n"
+ " an executable, it's assumed that the executable is a tool that is\n"
+ " producing part of the build rather than something that is linked and\n"
+ " distributed. This allows assert_no_deps to express what is distributed\n"
+ " in the final target rather than depend on the internal build steps\n"
+ " (which may include non-distributable code).\n"
+ "\n"
+ " See \"gn help label_pattern\" for the format of the entries in the\n"
+ " list. These patterns allow blacklisting individual targets or whole\n"
+ " directory hierarchies.\n"
+ "\n"
+ " Sometimes it is desirable to enforce that many targets have no\n"
+ " dependencies on a target or set of targets. One efficient way to\n"
+ " express this is to create a group with the assert_no_deps rule on\n"
+ " it, and make that group depend on all targets you want to apply that\n"
+ " assertion to.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " executable(\"doom_melon\") {\n"
+ " deps = [ \"//foo:bar\" ]\n"
+ " ...\n"
+ " assert_no_deps = [\n"
+ " \"//evil/*\", # Don't link any code from the evil directory.\n"
+ " \"//foo:test_support\", # This target is also disallowed.\n"
+ " ]\n"
+ " }\n";
+
+const char kBundleRootDir[] = "bundle_root_dir";
+const char kBundleRootDir_HelpShort[] =
+ "bundle_root_dir: Expansion of {{bundle_root_dir}} in create_bundle.";
+const char kBundleRootDir_Help[] =
+ "bundle_root_dir: Expansion of {{bundle_root_dir}} in create_bundle.\n"
+ "\n"
+ " A string corresponding to a path in root_build_dir.\n"
+ "\n"
+ " This string is used by the \"create_bundle\" target to expand the\n"
+ " {{bundle_root_dir}} of the \"bundle_data\" target it depends on.\n"
+ " This must correspond to a path under root_build_dir.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " bundle_data(\"info_plist\") {\n"
+ " sources = [ \"Info.plist\" ]\n"
+ " outputs = [ \"{{bundle_root_dir}}/Info.plist\" ]\n"
+ " }\n"
+ "\n"
+ " create_bundle(\"doom_melon.app\") {\n"
+ " deps = [ \":info_plist\" ]\n"
+ " bundle_root_dir = root_build_dir + \"/doom_melon.app/Contents\"\n"
+ " bundle_resources_dir = bundle_root_dir + \"/Resources\"\n"
+ " bundle_executable_dir = bundle_root_dir + \"/MacOS\"\n"
+ " bundle_plugins_dir = bundle_root_dir + \"/PlugIns\"\n"
+ " }\n";
+
+const char kBundleResourcesDir[] = "bundle_resources_dir";
+const char kBundleResourcesDir_HelpShort[] =
+ "bundle_resources_dir: "
+ "Expansion of {{bundle_resources_dir}} in create_bundle.";
+const char kBundleResourcesDir_Help[] =
+ "bundle_resources_dir: "
+ "Expansion of {{bundle_resources_dir}} in create_bundle.\n"
+ "\n"
+ " A string corresponding to a path in $root_build_dir.\n"
+ "\n"
+ " This string is used by the \"create_bundle\" target to expand the\n"
+ " {{bundle_resources_dir}} of the \"bundle_data\" target it depends on.\n"
+ " This must correspond to a path under \"bundle_root_dir\".\n"
+ "\n"
+ " See \"gn help bundle_root_dir\" for examples.\n";
+
+const char kBundleExecutableDir[] = "bundle_executable_dir";
+const char kBundleExecutableDir_HelpShort[] =
+ "bundle_executable_dir: "
+ "Expansion of {{bundle_executable_dir}} in create_bundle";
+const char kBundleExecutableDir_Help[] =
+ "bundle_executable_dir: "
+ "Expansion of {{bundle_executable_dir}} in create_bundle.\n"
+ "\n"
+ " A string corresponding to a path in $root_build_dir.\n"
+ "\n"
+ " This string is used by the \"create_bundle\" target to expand the\n"
+ " {{bundle_executable_dir}} of the \"bundle_data\" target it depends on.\n"
+ " This must correspond to a path under \"bundle_root_dir\".\n"
+ "\n"
+ " See \"gn help bundle_root_dir\" for examples.\n";
+
+const char kBundlePlugInsDir[] = "bundle_plugins_dir";
+const char kBundlePlugInsDir_HelpShort[] =
+ "bundle_plugins_dir: "
+ "Expansion of {{bundle_plugins_dir}} in create_bundle.";
+const char kBundlePlugInsDir_Help[] =
+ "bundle_plugins_dir: "
+ "Expansion of {{bundle_plugins_dir}} in create_bundle.\n"
+ "\n"
+ " A string corresponding to a path in $root_build_dir.\n"
+ "\n"
+ " This string is used by the \"create_bundle\" target to expand the\n"
+ " {{bundle_plugins_dir}} of the \"bundle_data\" target it depends on.\n"
+ " This must correspond to a path under \"bundle_root_dir\".\n"
+ "\n"
+ " See \"gn help bundle_root_dir\" for examples.\n";
+
+const char kCflags[] = "cflags";
+const char kCflags_HelpShort[] =
+ "cflags: [string list] Flags passed to all C compiler variants.";
+const char kCommonCflagsHelp[] =
+ "cflags*: Flags passed to the C compiler.\n"
+ "\n"
+ " A list of strings.\n"
+ "\n"
+ " \"cflags\" are passed to all invocations of the C, C++, Objective C,\n"
+ " and Objective C++ compilers.\n"
+ "\n"
+ " To target one of these variants individually, use \"cflags_c\",\n"
+ " \"cflags_cc\", \"cflags_objc\", and \"cflags_objcc\",\n"
+ " respectively. These variant-specific versions of cflags* will be\n"
+ " appended on the compiler command line after \"cflags\".\n"
+ "\n"
+ " See also \"asmflags\" for flags for assembly-language files.\n"
+ COMMON_ORDERING_HELP;
+const char* kCflags_Help = kCommonCflagsHelp;
+
+const char kAsmflags[] = "asmflags";
+const char kAsmflags_HelpShort[] =
+ "asmflags: [string list] Flags passed to the assembler.";
+const char* kAsmflags_Help =
+ "asmflags: Flags passed to the assembler.\n"
+ "\n"
+ " A list of strings.\n"
+ "\n"
+ " \"asmflags\" are passed to any invocation of a tool that takes an\n"
+ " .asm or .S file as input.\n"
+ COMMON_ORDERING_HELP;
+
+const char kCflagsC[] = "cflags_c";
+const char kCflagsC_HelpShort[] =
+ "cflags_c: [string list] Flags passed to the C compiler.";
+const char* kCflagsC_Help = kCommonCflagsHelp;
+
+const char kCflagsCC[] = "cflags_cc";
+const char kCflagsCC_HelpShort[] =
+ "cflags_cc: [string list] Flags passed to the C++ compiler.";
+const char* kCflagsCC_Help = kCommonCflagsHelp;
+
+const char kCflagsObjC[] = "cflags_objc";
+const char kCflagsObjC_HelpShort[] =
+ "cflags_objc: [string list] Flags passed to the Objective C compiler.";
+const char* kCflagsObjC_Help = kCommonCflagsHelp;
+
+const char kCflagsObjCC[] = "cflags_objcc";
+const char kCflagsObjCC_HelpShort[] =
+ "cflags_objcc: [string list] Flags passed to the Objective C++ compiler.";
+const char* kCflagsObjCC_Help = kCommonCflagsHelp;
+
+const char kCheckIncludes[] = "check_includes";
+const char kCheckIncludes_HelpShort[] =
+ "check_includes: [boolean] Controls whether a target's files are checked.";
+const char kCheckIncludes_Help[] =
+ "check_includes: [boolean] Controls whether a target's files are checked.\n"
+ "\n"
+ " When true (the default), the \"gn check\" command (as well as\n"
+ " \"gn gen\" with the --check flag) will check this target's sources\n"
+ " and headers for proper dependencies.\n"
+ "\n"
+ " When false, the files in this target will be skipped by default.\n"
+ " This does not affect other targets that depend on the current target,\n"
+ " it just skips checking the includes of the current target's files.\n"
+ "\n"
+ " If there are a few conditionally included headers that trip up\n"
+ " checking, you can exclude headers individually by annotating them with\n"
+ " \"nogncheck\" (see \"gn help nogncheck\").\n"
+ "\n"
+ " The topic \"gn help check\" has general information on how checking\n"
+ " works and advice on how to pass a check in problematic cases.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " source_set(\"busted_includes\") {\n"
+ " # This target's includes are messed up, exclude it from checking.\n"
+ " check_includes = false\n"
+ " ...\n"
+ " }\n";
+
+const char kCompleteStaticLib[] = "complete_static_lib";
+const char kCompleteStaticLib_HelpShort[] =
+ "complete_static_lib: [boolean] Links all deps into a static library.";
+const char kCompleteStaticLib_Help[] =
+ "complete_static_lib: [boolean] Links all deps into a static library.\n"
+ "\n"
+ " A static library normally doesn't include code from dependencies, but\n"
+ " instead forwards the static libraries and source sets in its deps up\n"
+ " the dependency chain until a linkable target (an executable or shared\n"
+ " library) is reached. The final linkable target only links each static\n"
+ " library once, even if it appears more than once in its dependency\n"
+ " graph.\n"
+ "\n"
+ " In some cases the static library might be the final desired output.\n"
+ " For example, you may be producing a static library for distribution to\n"
+ " third parties. In this case, the static library should include code\n"
+ " for all dependencies in one complete package. Since GN does not unpack\n"
+ " static libraries to forward their contents up the dependency chain,\n"
+ " it is an error for complete static libraries to depend on other static\n"
+ "\n"
+ " In rare cases it makes sense to list a header in more than one\n"
+ " target if it could be considered conceptually a member of both.\n"
+ " libraries.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " static_library(\"foo\") {\n"
+ " complete_static_lib = true\n"
+ " deps = [ \"bar\" ]\n"
+ " }\n";
+
+const char kConfigs[] = "configs";
+const char kConfigs_HelpShort[] =
+ "configs: [label list] Configs applying to this target or config.";
+const char kConfigs_Help[] =
+ "configs: Configs applying to this target or config.\n"
+ "\n"
+ " A list of config labels.\n"
+ "\n"
+ "Configs on a target\n"
+ "\n"
+ " When used on a target, the include_dirs, defines, etc. in each config\n"
+ " are appended in the order they appear to the compile command for each\n"
+ " file in the target. They will appear after the include_dirs, defines,\n"
+ " etc. that the target sets directly.\n"
+ "\n"
+ " Since configs apply after the values set on a target, directly setting\n"
+ " a compiler flag will prepend it to the command line. If you want to\n"
+ " append a flag instead, you can put that flag in a one-off config and\n"
+ " append that config to the target's configs list.\n"
+ "\n"
+ " The build configuration script will generally set up the default\n"
+ " configs applying to a given target type (see \"set_defaults\").\n"
+ " When a target is being defined, it can add to or remove from this\n"
+ " list.\n"
+ "\n"
+ "Configs on a config\n"
+ "\n"
+ " It is possible to create composite configs by specifying configs on a\n"
+ " config. One might do this to forward values, or to factor out blocks\n"
+ " of settings from very large configs into more manageable named chunks.\n"
+ "\n"
+ " In this case, the composite config is expanded to be the concatenation\n"
+ " of its own values, and in order, the values from its sub-configs\n"
+ " *before* anything else happens. This has some ramifications:\n"
+ "\n"
+ " - A target has no visibility into a config's sub-configs. Target\n"
+ " code only sees the name of the composite config. It can't remove\n"
+ " sub-configs or opt in to only parts of it. The composite config may\n"
+ " not even be defined before the target is.\n"
+ "\n"
+ " - You can get duplication of values if a config is listed twice, say,\n"
+ " on a target and in a sub-config that also applies. In other cases,\n"
+ " the configs applying to a target are de-duped. It's expected that\n"
+ " if a config is listed as a sub-config that it is only used in that\n"
+ " context. (Note that it's possible to fix this and de-dupe, but it's\n"
+ " not normally relevant and complicates the implementation.)\n"
+ COMMON_ORDERING_HELP
+ "\n"
+ "Example\n"
+ "\n"
+ " # Configs on a target.\n"
+ " source_set(\"foo\") {\n"
+ " # Don't use the default RTTI config that BUILDCONFIG applied to us.\n"
+ " configs -= [ \"//build:no_rtti\" ]\n"
+ "\n"
+ " # Add some of our own settings.\n"
+ " configs += [ \":mysettings\" ]\n"
+ " }\n"
+ "\n"
+ " # Create a default_optimization config that forwards to one of a set\n"
+ " # of more specialized configs depending on build flags. This pattern\n"
+ " # is useful because it allows a target to opt in to either a default\n"
+ " # set, or a more specific set, while avoid duplicating the settings in\n"
+ " # two places.\n"
+ " config(\"super_optimization\") {\n"
+ " cflags = [ ... ]\n"
+ " }\n"
+ " config(\"default_optimization\") {\n"
+ " if (optimize_everything) {\n"
+ " configs = [ \":super_optimization\" ]\n"
+ " } else {\n"
+ " configs = [ \":no_optimization\" ]\n"
+ " }\n"
+ " }\n";
+
+const char kConsole[] = "console";
+const char kConsole_HelpShort[] =
+ "console [boolean]: Run this action in the console pool.";
+const char kConsole_Help[] =
+ "console: Run this action in the console pool.\n"
+ "\n"
+ " Boolean. Defaults to false.\n"
+ "\n"
+ " Actions marked \"console = true\" will be run in the built-in ninja\n"
+ " \"console\" pool. They will have access to real stdin and stdout, and\n"
+ " output will not be buffered by ninja. This can be useful for\n"
+ " long-running actions with progress logs, or actions that require user \n"
+ " input.\n"
+ "\n"
+ " Only one console pool target can run at any one time in Ninja. Refer\n"
+ " to the Ninja documentation on the console pool for more info.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " action(\"long_action_with_progress_logs\") {\n"
+ " console = true\n"
+ " }\n";
+
+const char kData[] = "data";
+const char kData_HelpShort[] =
+ "data: [file list] Runtime data file dependencies.";
+const char kData_Help[] =
+ "data: Runtime data file dependencies.\n"
+ "\n"
+ " Lists files or directories required to run the given target. These are\n"
+ " typically data files or directories of data files. The paths are\n"
+ " interpreted as being relative to the current build file. Since these\n"
+ " are runtime dependencies, they do not affect which targets are built\n"
+ " or when. To declare input files to a script, use \"inputs\".\n"
+ "\n"
+ " Appearing in the \"data\" section does not imply any special handling\n"
+ " such as copying them to the output directory. This is just used for\n"
+ " declaring runtime dependencies. Runtime dependencies can be queried\n"
+ " using the \"runtime_deps\" category of \"gn desc\" or written during\n"
+ " build generation via \"--runtime-deps-list-file\".\n"
+ "\n"
+ " GN doesn't require data files to exist at build-time. So actions that\n"
+ " produce files that are in turn runtime dependencies can list those\n"
+ " generated files both in the \"outputs\" list as well as the \"data\"\n"
+ " list.\n"
+ "\n"
+ " By convention, directories are be listed with a trailing slash:\n"
+ " data = [ \"test/data/\" ]\n"
+ " However, no verification is done on these so GN doesn't enforce this.\n"
+ " The paths are just rebased and passed along when requested.\n"
+ "\n"
+ " See \"gn help runtime_deps\" for how these are used.\n";
+
+const char kDataDeps[] = "data_deps";
+const char kDataDeps_HelpShort[] =
+ "data_deps: [label list] Non-linked dependencies.";
+const char kDataDeps_Help[] =
+ "data_deps: Non-linked dependencies.\n"
+ "\n"
+ " A list of target labels.\n"
+ "\n"
+ " Specifies dependencies of a target that are not actually linked into\n"
+ " the current target. Such dependencies will be built and will be\n"
+ " available at runtime.\n"
+ "\n"
+ " This is normally used for things like plugins or helper programs that\n"
+ " a target needs at runtime.\n"
+ "\n"
+ " See also \"gn help deps\" and \"gn help data\".\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " executable(\"foo\") {\n"
+ " deps = [ \"//base\" ]\n"
+ " data_deps = [ \"//plugins:my_runtime_plugin\" ]\n"
+ " }\n";
+
+const char kDefines[] = "defines";
+const char kDefines_HelpShort[] =
+ "defines: [string list] C preprocessor defines.";
+const char kDefines_Help[] =
+ "defines: C preprocessor defines.\n"
+ "\n"
+ " A list of strings\n"
+ "\n"
+ " These strings will be passed to the C/C++ compiler as #defines. The\n"
+ " strings may or may not include an \"=\" to assign a value.\n"
+ COMMON_ORDERING_HELP
+ "\n"
+ "Example\n"
+ "\n"
+ " defines = [ \"AWESOME_FEATURE\", \"LOG_LEVEL=3\" ]\n";
+
+const char kDepfile[] = "depfile";
+const char kDepfile_HelpShort[] =
+ "depfile: [string] File name for input dependencies for actions.";
+const char kDepfile_Help[] =
+ "depfile: [string] File name for input dependencies for actions.\n"
+ "\n"
+ " If nonempty, this string specifies that the current action or\n"
+ " action_foreach target will generate the given \".d\" file containing\n"
+ " the dependencies of the input. Empty or unset means that the script\n"
+ " doesn't generate the files.\n"
+ "\n"
+ " The .d file should go in the target output directory. If you have more\n"
+ " than one source file that the script is being run over, you can use\n"
+ " the output file expansions described in \"gn help action_foreach\" to\n"
+ " name the .d file according to the input."
+ "\n"
+ " The format is that of a Makefile, and all of the paths should be\n"
+ " relative to the root build directory.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " action_foreach(\"myscript_target\") {\n"
+ " script = \"myscript.py\"\n"
+ " sources = [ ... ]\n"
+ "\n"
+ " # Locate the depfile in the output directory named like the\n"
+ " # inputs but with a \".d\" appended.\n"
+ " depfile = \"$relative_target_output_dir/{{source_name}}.d\"\n"
+ "\n"
+ " # Say our script uses \"-o <d file>\" to indicate the depfile.\n"
+ " args = [ \"{{source}}\", \"-o\", depfile ]\n"
+ " }\n";
+
+const char kDeps[] = "deps";
+const char kDeps_HelpShort[] =
+ "deps: [label list] Private linked dependencies.";
+const char kDeps_Help[] =
+ "deps: Private linked dependencies.\n"
+ "\n"
+ " A list of target labels.\n"
+ "\n"
+ " Specifies private dependencies of a target. Private dependencies are\n"
+ " propagated up the dependency tree and linked to dependant targets, but\n"
+ " do not grant the ability to include headers from the dependency.\n"
+ " Public configs are not forwarded.\n"
+ "\n"
+ "Details of dependency propagation\n"
+ "\n"
+ " Source sets, shared libraries, and non-complete static libraries\n"
+ " will be propagated up the dependency tree across groups, non-complete\n"
+ " static libraries and source sets.\n"
+ "\n"
+ " Executables, shared libraries, and complete static libraries will\n"
+ " link all propagated targets and stop propagation. Actions and copy\n"
+ " steps also stop propagation, allowing them to take a library as an\n"
+ " input but not force dependants to link to it.\n"
+ "\n"
+ " Propagation of all_dependent_configs and public_configs happens\n"
+ " independently of target type. all_dependent_configs are always\n"
+ " propagated across all types of targets, and public_configs\n"
+ " are always propagated across public deps of all types of targets.\n"
+ "\n"
+ " Data dependencies are propagated differently. See\n"
+ " \"gn help data_deps\" and \"gn help runtime_deps\".\n"
+ "\n"
+ " See also \"public_deps\".\n";
+
+const char kIncludeDirs[] = "include_dirs";
+const char kIncludeDirs_HelpShort[] =
+ "include_dirs: [directory list] Additional include directories.";
+const char kIncludeDirs_Help[] =
+ "include_dirs: Additional include directories.\n"
+ "\n"
+ " A list of source directories.\n"
+ "\n"
+ " The directories in this list will be added to the include path for\n"
+ " the files in the affected target.\n"
+ COMMON_ORDERING_HELP
+ "\n"
+ "Example\n"
+ "\n"
+ " include_dirs = [ \"src/include\", \"//third_party/foo\" ]\n";
+
+const char kInputs[] = "inputs";
+const char kInputs_HelpShort[] =
+ "inputs: [file list] Additional compile-time dependencies.";
+const char kInputs_Help[] =
+ "inputs: Additional compile-time dependencies.\n"
+ "\n"
+ " Inputs are compile-time dependencies of the current target. This means\n"
+ " that all inputs must be available before compiling any of the sources\n"
+ " or executing any actions.\n"
+ "\n"
+ " Inputs are typically only used for action and action_foreach targets.\n"
+ "\n"
+ "Inputs for actions\n"
+ "\n"
+ " For action and action_foreach targets, inputs should be the inputs to\n"
+ " script that don't vary. These should be all .py files that the script\n"
+ " uses via imports (the main script itself will be an implicit dependency"
+ "\n"
+ " of the action so need not be listed).\n"
+ "\n"
+ " For action targets, inputs and sources are treated the same, but from\n"
+ " a style perspective, it's recommended to follow the same rule as\n"
+ " action_foreach and put helper files in the inputs, and the data used\n"
+ " by the script (if any) in sources.\n"
+ "\n"
+ " Note that another way to declare input dependencies from an action\n"
+ " is to have the action write a depfile (see \"gn help depfile\"). This\n"
+ " allows the script to dynamically write input dependencies, that might\n"
+ " not be known until actually executing the script. This is more\n"
+ " efficient than doing processing while running GN to determine the\n"
+ " inputs, and is easier to keep in-sync than hardcoding the list.\n"
+ "\n"
+ "Script input gotchas\n"
+ "\n"
+ " It may be tempting to write a script that enumerates all files in a\n"
+ " directory as inputs. Don't do this! Even if you specify all the files\n"
+ " in the inputs or sources in the GN target (or worse, enumerate the\n"
+ " files in an exec_script call when running GN, which will be slow), the\n"
+ " dependencies will be broken.\n"
+ "\n"
+ " The problem happens if a file is ever removed because the inputs are\n"
+ " not listed on the command line to the script. Because the script\n"
+ " hasn't changed and all inputs are up-to-date, the script will not\n"
+ " re-run and you will get a stale build. Instead, either list all\n"
+ " inputs on the command line to the script, or if there are many, create\n"
+ " a separate list file that the script reads. As long as this file is\n"
+ " listed in the inputs, the build will detect when it has changed in any\n"
+ " way and the action will re-run.\n"
+ "\n"
+ "Inputs for binary targets\n"
+ "\n"
+ " Any input dependencies will be resolved before compiling any sources.\n"
+ " Normally, all actions that a target depends on will be run before any\n"
+ " files in a target are compiled. So if you depend on generated headers,\n"
+ " you do not typically need to list them in the inputs section.\n"
+ "\n"
+ " Inputs for binary targets will be treated as order-only dependencies,\n"
+ " meaning that they will be forced up-to-date before compiling or\n"
+ " any files in the target, but changes in the inputs will not\n"
+ " necessarily force the target to compile. This is because it is\n"
+ " expected that the compiler will report the precise list of input\n"
+ " dependencies required to recompile each file once the initial build\n"
+ " is done.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " action(\"myscript\") {\n"
+ " script = \"domything.py\"\n"
+ " inputs = [ \"input.data\" ]\n"
+ " }\n";
+
+const char kLdflags[] = "ldflags";
+const char kLdflags_HelpShort[] =
+ "ldflags: [string list] Flags passed to the linker.";
+const char kLdflags_Help[] =
+ "ldflags: Flags passed to the linker.\n"
+ "\n"
+ " A list of strings.\n"
+ "\n"
+ " These flags are passed on the command-line to the linker and generally\n"
+ " specify various linking options. Most targets will not need these and\n"
+ " will use \"libs\" and \"lib_dirs\" instead.\n"
+ "\n"
+ " ldflags are NOT pushed to dependents, so applying ldflags to source\n"
+ " sets or static libraries will be a no-op. If you want to apply ldflags\n"
+ " to dependent targets, put them in a config and set it in the\n"
+ " all_dependent_configs or public_configs.\n"
+ COMMON_ORDERING_HELP;
+
+#define COMMON_LIB_INHERITANCE_HELP \
+ "\n" \
+ " libs and lib_dirs work differently than other flags in two respects.\n" \
+ " First, then are inherited across static library boundaries until a\n" \
+ " shared library or executable target is reached. Second, they are\n" \
+ " uniquified so each one is only passed once (the first instance of it\n" \
+ " will be the one used).\n"
+
+#define LIBS_AND_LIB_DIRS_ORDERING_HELP \
+ "\n" \
+ " For \"libs\" and \"lib_dirs\" only, the values propagated from\n" \
+ " dependencies (as described above) are applied last assuming they\n" \
+ " are not already in the list.\n"
+
+const char kLibDirs[] = "lib_dirs";
+const char kLibDirs_HelpShort[] =
+ "lib_dirs: [directory list] Additional library directories.";
+const char kLibDirs_Help[] =
+ "lib_dirs: Additional library directories.\n"
+ "\n"
+ " A list of directories.\n"
+ "\n"
+ " Specifies additional directories passed to the linker for searching\n"
+ " for the required libraries. If an item is not an absolute path, it\n"
+ " will be treated as being relative to the current build file.\n"
+ COMMON_LIB_INHERITANCE_HELP
+ COMMON_ORDERING_HELP
+ LIBS_AND_LIB_DIRS_ORDERING_HELP
+ "\n"
+ "Example\n"
+ "\n"
+ " lib_dirs = [ \"/usr/lib/foo\", \"lib/doom_melon\" ]\n";
+
+const char kLibs[] = "libs";
+const char kLibs_HelpShort[] =
+ "libs: [string list] Additional libraries to link.";
+const char kLibs_Help[] =
+ "libs: Additional libraries to link.\n"
+ "\n"
+ " A list of library names or library paths.\n"
+ "\n"
+ " These libraries will be linked into the final binary (executable or\n"
+ " shared library) containing the current target.\n"
+ COMMON_LIB_INHERITANCE_HELP
+ "\n"
+ "Types of libs\n"
+ "\n"
+ " There are several different things that can be expressed in libs:\n"
+ "\n"
+ " File paths\n"
+ " Values containing '/' will be treated as references to files in\n"
+ " the checkout. They will be rebased to be relative to the build\n"
+ " directory and specified in the \"libs\" for linker tools. This\n"
+ " facility should be used for libraries that are checked in to the\n"
+ " version control. For libraries that are generated by the build,\n"
+ " use normal GN deps to link them.\n"
+ "\n"
+ " System libraries\n"
+ " Values not containing '/' will be treated as system library names.\n"
+ " These will be passed unmodified to the linker and prefixed with\n"
+ " the \"lib_prefix\" attribute of the linker tool. Generally you\n"
+ " would set the \"lib_dirs\" so the given library is found. Your\n"
+ " BUILD.gn file should not specify the switch (like \"-l\"): this\n"
+ " will be encoded in the \"lib_prefix\" of the tool.\n"
+ "\n"
+ " Apple frameworks\n"
+ " System libraries ending in \".framework\" will be special-cased:\n"
+ " the switch \"-framework\" will be prepended instead of the\n"
+ " lib_prefix, and the \".framework\" suffix will be trimmed. This is\n"
+ " to support the way Mac links framework dependencies.\n"
+ COMMON_ORDERING_HELP
+ LIBS_AND_LIB_DIRS_ORDERING_HELP
+ "\n"
+ "Examples\n"
+ "\n"
+ " On Windows:\n"
+ " libs = [ \"ctl3d.lib\" ]\n"
+ "\n"
+ " On Linux:\n"
+ " libs = [ \"ld\" ]\n";
+
+const char kOutputExtension[] = "output_extension";
+const char kOutputExtension_HelpShort[] =
+ "output_extension: [string] Value to use for the output's file extension.";
+const char kOutputExtension_Help[] =
+ "output_extension: Value to use for the output's file extension.\n"
+ "\n"
+ " Normally the file extension for a target is based on the target\n"
+ " type and the operating system, but in rare cases you will need to\n"
+ " override the name (for example to use \"libfreetype.so.6\" instead\n"
+ " of libfreetype.so on Linux).\n"
+ "\n"
+ " This value should not include a leading dot. If undefined, the default\n"
+ " specified on the tool will be used. If set to the empty string, no\n"
+ " output extension will be used.\n"
+ "\n"
+ " The output_extension will be used to set the \"{{output_extension}}\"\n"
+ " expansion which the linker tool will generally use to specify the\n"
+ " output file name. See \"gn help tool\".\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " shared_library(\"freetype\") {\n"
+ " if (is_linux) {\n"
+ " # Call the output \"libfreetype.so.6\"\n"
+ " output_extension = \"so.6\"\n"
+ " }\n"
+ " ...\n"
+ " }\n"
+ "\n"
+ " # On Windows, generate a \"mysettings.cpl\" control panel applet.\n"
+ " # Control panel applets are actually special shared libraries.\n"
+ " if (is_win) {\n"
+ " shared_library(\"mysettings\") {\n"
+ " output_extension = \"cpl\"\n"
+ " ...\n"
+ " }\n"
+ " }\n";
+
+const char kOutputName[] = "output_name";
+const char kOutputName_HelpShort[] =
+ "output_name: [string] Name for the output file other than the default.";
+const char kOutputName_Help[] =
+ "output_name: Define a name for the output file other than the default.\n"
+ "\n"
+ " Normally the output name of a target will be based on the target name,\n"
+ " so the target \"//foo/bar:bar_unittests\" will generate an output\n"
+ " file such as \"bar_unittests.exe\" (using Windows as an example).\n"
+ "\n"
+ " Sometimes you will want an alternate name to avoid collisions or\n"
+ " if the internal name isn't appropriate for public distribution.\n"
+ "\n"
+ " The output name should have no extension or prefixes, these will be\n"
+ " added using the default system rules. For example, on Linux an output\n"
+ " name of \"foo\" will produce a shared library \"libfoo.so\". There\n"
+ " is no way to override the output prefix of a linker tool on a per-\n"
+ " target basis. If you need more flexibility, create a copy target\n"
+ " to produce the file you want.\n"
+ "\n"
+ " This variable is valid for all binary output target types.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " static_library(\"doom_melon\") {\n"
+ " output_name = \"fluffy_bunny\"\n"
+ " }\n";
+
+const char kOutputPrefixOverride[] = "output_prefix_override";
+const char kOutputPrefixOverride_HelpShort[] =
+ "output_prefix_override: [boolean] Don't use prefix for output name.";
+const char kOutputPrefixOverride_Help[] =
+ "output_prefix_override: Don't use prefix for output name.\n"
+ "\n"
+ " A boolean that overrides the output prefix for a target. Defaults to\n"
+ " false.\n"
+ "\n"
+ " Some systems use prefixes for the names of the final target output\n"
+ " file. The normal example is \"libfoo.so\" on Linux for a target\n"
+ " named \"foo\".\n"
+ "\n"
+ " The output prefix for a given target type is specified on the linker\n"
+ " tool (see \"gn help tool\"). Sometimes this prefix is undesired.\n"
+ "\n"
+ " See also \"gn help output_extension\".\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " shared_library(\"doom_melon\") {\n"
+ " # Normally this will produce \"libdoom_melon.so\" on Linux, setting\n"
+ " # Setting this flag will produce \"doom_melon.so\".\n"
+ " output_prefix_override = true\n"
+ " ...\n"
+ " }\n";
+
+const char kOutputs[] = "outputs";
+const char kOutputs_HelpShort[] =
+ "outputs: [file list] Output files for actions and copy targets.";
+const char kOutputs_Help[] =
+ "outputs: Output files for actions and copy targets.\n"
+ "\n"
+ " Outputs is valid for \"copy\", \"action\", and \"action_foreach\"\n"
+ " target types and indicates the resulting files. Outputs must always\n"
+ " refer to files in the build directory.\n"
+ "\n"
+ " copy\n"
+ " Copy targets should have exactly one entry in the outputs list. If\n"
+ " there is exactly one source, this can be a literal file name or a\n"
+ " source expansion. If there is more than one source, this must\n"
+ " contain a source expansion to map a single input name to a single\n"
+ " output name. See \"gn help copy\".\n"
+ "\n"
+ " action_foreach\n"
+ " Action_foreach targets must always use source expansions to map\n"
+ " input files to output files. There can be more than one output,\n"
+ " which means that each invocation of the script will produce a set of\n"
+ " files (presumably based on the name of the input file). See\n"
+ " \"gn help action_foreach\".\n"
+ "\n"
+ " action\n"
+ " Action targets (excluding action_foreach) must list literal output\n"
+ " file(s) with no source expansions. See \"gn help action\".\n";
+
+const char kPrecompiledHeader[] = "precompiled_header";
+const char kPrecompiledHeader_HelpShort[] =
+ "precompiled_header: [string] Header file to precompile.";
+const char kPrecompiledHeader_Help[] =
+ "precompiled_header: [string] Header file to precompile.\n"
+ "\n"
+ " Precompiled headers will be used when a target specifies this\n"
+ " value, or a config applying to this target specifies this value.\n"
+ " In addition, the tool corresponding to the source files must also\n"
+ " specify precompiled headers (see \"gn help tool\"). The tool\n"
+ " will also specify what type of precompiled headers to use.\n"
+ "\n"
+ " The precompiled header/source variables can be specified on a target\n"
+ " or a config, but must be the same for all configs applying to a given\n"
+ " target since a target can only have one precompiled header.\n"
+ "\n"
+ "MSVC precompiled headers\n"
+ "\n"
+ " When using MSVC-style precompiled headers, the \"precompiled_header\"\n"
+ " value is a string corresponding to the header. This is NOT a path\n"
+ " to a file that GN recognises, but rather the exact string that appears\n"
+ " in quotes after an #include line in source code. The compiler will\n"
+ " match this string against includes or forced includes (/FI).\n"
+ "\n"
+ " MSVC also requires a source file to compile the header with. This must\n"
+ " be specified by the \"precompiled_source\" value. In contrast to the\n"
+ " header value, this IS a GN-style file name, and tells GN which source\n"
+ " file to compile to make the .pch file used for subsequent compiles.\n"
+ "\n"
+ " If you use both C and C++ sources, the precompiled header and source\n"
+ " file will be compiled using both tools. You will want to make sure\n"
+ " to wrap C++ includes in __cplusplus #ifdefs so the file will compile\n"
+ " in C mode.\n"
+ "\n"
+ " For example, if the toolchain specifies MSVC headers:\n"
+ "\n"
+ " toolchain(\"vc_x64\") {\n"
+ " ...\n"
+ " tool(\"cxx\") {\n"
+ " precompiled_header_type = \"msvc\"\n"
+ " ...\n"
+ "\n"
+ " You might make a config like this:\n"
+ "\n"
+ " config(\"use_precompiled_headers\") {\n"
+ " precompiled_header = \"build/precompile.h\"\n"
+ " precompiled_source = \"//build/precompile.cc\"\n"
+ "\n"
+ " # Either your source files should #include \"build/precompile.h\"\n"
+ " # first, or you can do this to force-include the header.\n"
+ " cflags = [ \"/FI$precompiled_header\" ]\n"
+ " }\n"
+ "\n"
+ " And then define a target that uses the config:\n"
+ "\n"
+ " executable(\"doom_melon\") {\n"
+ " configs += [ \":use_precompiled_headers\" ]\n"
+ " ...\n"
+ "\n";
+
+const char kPrecompiledSource[] = "precompiled_source";
+const char kPrecompiledSource_HelpShort[] =
+ "precompiled_source: [file name] Source file to precompile.";
+const char kPrecompiledSource_Help[] =
+ "precompiled_source: [file name] Source file to precompile.\n"
+ "\n"
+ " The source file that goes along with the precompiled_header when\n"
+ " using \"msvc\"-style precompiled headers. It will be implicitly added\n"
+ " to the sources of the target. See \"gn help precompiled_header\".\n";
+
+const char kPublic[] = "public";
+const char kPublic_HelpShort[] =
+ "public: [file list] Declare public header files for a target.";
+const char kPublic_Help[] =
+ "public: Declare public header files for a target.\n"
+ "\n"
+ " A list of files that other targets can include. These permissions are\n"
+ " checked via the \"check\" command (see \"gn help check\").\n"
+ "\n"
+ " If no public files are declared, other targets (assuming they have\n"
+ " visibility to depend on this target can include any file in the\n"
+ " sources list. If this variable is defined on a target, dependent\n"
+ " targets may only include files on this whitelist.\n"
+ "\n"
+ " Header file permissions are also subject to visibility. A target\n"
+ " must be visible to another target to include any files from it at all\n"
+ " and the public headers indicate which subset of those files are\n"
+ " permitted. See \"gn help visibility\" for more.\n"
+ "\n"
+ " Public files are inherited through the dependency tree. So if there is\n"
+ " a dependency A -> B -> C, then A can include C's public headers.\n"
+ " However, the same is NOT true of visibility, so unless A is in C's\n"
+ " visibility list, the include will be rejected.\n"
+ "\n"
+ " GN only knows about files declared in the \"sources\" and \"public\"\n"
+ " sections of targets. If a file is included that is not known to the\n"
+ " build, it will be allowed.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " These exact files are public:\n"
+ " public = [ \"foo.h\", \"bar.h\" ]\n"
+ "\n"
+ " No files are public (no targets may include headers from this one):\n"
+ " public = []\n";
+
+const char kPublicConfigs[] = "public_configs";
+const char kPublicConfigs_HelpShort[] =
+ "public_configs: [label list] Configs applied to dependents.";
+const char kPublicConfigs_Help[] =
+ "public_configs: Configs to be applied on dependents.\n"
+ "\n"
+ " A list of config labels.\n"
+ "\n"
+ " Targets directly depending on this one will have the configs listed in\n"
+ " this variable added to them. These configs will also apply to the\n"
+ " current target.\n"
+ "\n"
+ " This addition happens in a second phase once a target and all of its\n"
+ " dependencies have been resolved. Therefore, a target will not see\n"
+ " these force-added configs in their \"configs\" variable while the\n"
+ " script is running, and then can not be removed. As a result, this\n"
+ " capability should generally only be used to add defines and include\n"
+ " directories necessary to compile a target's headers.\n"
+ "\n"
+ " See also \"all_dependent_configs\".\n"
+ COMMON_ORDERING_HELP;
+
+const char kPublicDeps[] = "public_deps";
+const char kPublicDeps_HelpShort[] =
+ "public_deps: [label list] Declare public dependencies.";
+const char kPublicDeps_Help[] =
+ "public_deps: Declare public dependencies.\n"
+ "\n"
+ " Public dependencies are like private dependencies (see\n"
+ " \"gn help deps\") but additionally express that the current target\n"
+ " exposes the listed deps as part of its public API.\n"
+ "\n"
+ " This has several ramifications:\n"
+ "\n"
+ " - public_configs that are part of the dependency are forwarded\n"
+ " to direct dependents.\n"
+ "\n"
+ " - Public headers in the dependency are usable by dependents\n"
+ " (includes do not require a direct dependency or visibility).\n"
+ "\n"
+ " - If the current target is a shared library, other shared libraries\n"
+ " that it publicly depends on (directly or indirectly) are\n"
+ " propagated up the dependency tree to dependents for linking.\n"
+ "\n"
+ "Discussion\n"
+ "\n"
+ " Say you have three targets: A -> B -> C. C's visibility may allow\n"
+ " B to depend on it but not A. Normally, this would prevent A from\n"
+ " including any headers from C, and C's public_configs would apply\n"
+ " only to B.\n"
+ "\n"
+ " If B lists C in its public_deps instead of regular deps, A will now\n"
+ " inherit C's public_configs and the ability to include C's public\n"
+ " headers.\n"
+ "\n"
+ " Generally if you are writing a target B and you include C's headers\n"
+ " as part of B's public headers, or targets depending on B should\n"
+ " consider B and C to be part of a unit, you should use public_deps\n"
+ " instead of deps.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " # This target can include files from \"c\" but not from\n"
+ " # \"super_secret_implementation_details\".\n"
+ " executable(\"a\") {\n"
+ " deps = [ \":b\" ]\n"
+ " }\n"
+ "\n"
+ " shared_library(\"b\") {\n"
+ " deps = [ \":super_secret_implementation_details\" ]\n"
+ " public_deps = [ \":c\" ]\n"
+ " }\n";
+
+const char kResponseFileContents[] = "response_file_contents";
+const char kResponseFileContents_HelpShort[] =
+ "response_file_contents: [string list] Contents of .rsp file for actions.";
+const char kResponseFileContents_Help[] =
+ "response_file_contents: Contents of a response file for actions.\n"
+ "\n"
+ " Sometimes the arguments passed to a script can be too long for the\n"
+ " system's command-line capabilities. This is especially the case on\n"
+ " Windows where the maximum command-line length is less than 8K. A\n"
+ " response file allows you to pass an unlimited amount of data to a\n"
+ " script in a temporary file for an action or action_foreach target.\n"
+ "\n"
+ " If the response_file_contents variable is defined and non-empty, the\n"
+ " list will be treated as script args (including possibly substitution\n"
+ " patterns) that will be written to a temporary file at build time.\n"
+ " The name of the temporary file will be substituted for\n"
+ " \"{{response_file_name}}\" in the script args.\n"
+ "\n"
+ " The response file contents will always be quoted and escaped\n"
+ " according to Unix shell rules. To parse the response file, the Python\n"
+ " script should use \"shlex.split(file_contents)\".\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " action(\"process_lots_of_files\") {\n"
+ " script = \"process.py\",\n"
+ " inputs = [ ... huge list of files ... ]\n"
+ "\n"
+ " # Write all the inputs to a response file for the script. Also,\n"
+ " # make the paths relative to the script working directory.\n"
+ " response_file_contents = rebase_path(inputs, root_build_dir)\n"
+ "\n"
+ " # The script expects the name of the response file in --file-list.\n"
+ " args = [\n"
+ " \"--enable-foo\",\n"
+ " \"--file-list={{response_file_name}}\",\n"
+ " ]\n"
+ " }\n";
+
+const char kScript[] = "script";
+const char kScript_HelpShort[] =
+ "script: [file name] Script file for actions.";
+const char kScript_Help[] =
+ "script: Script file for actions.\n"
+ "\n"
+ " An absolute or buildfile-relative file name of a Python script to run\n"
+ " for a action and action_foreach targets (see \"gn help action\" and\n"
+ " \"gn help action_foreach\").\n";
+
+const char kSources[] = "sources";
+const char kSources_HelpShort[] =
+ "sources: [file list] Source files for a target.";
+const char kSources_Help[] =
+ "sources: Source files for a target\n"
+ "\n"
+ " A list of files. Non-absolute paths will be resolved relative to the\n"
+ " current build file.\n"
+ "\n"
+ "Sources for binary targets\n"
+ "\n"
+ " For binary targets (source sets, executables, and libraries), the\n"
+ " known file types will be compiled with the associated tools. Unknown\n"
+ " file types and headers will be skipped. However, you should still\n"
+ " list all C/C+ header files so GN knows about the existance of those\n"
+ " files for the purposes of include checking.\n"
+ "\n"
+ " As a special case, a file ending in \".def\" will be treated as a\n"
+ " Windows module definition file. It will be appended to the link\n"
+ " line with a preceeding \"/DEF:\" string. There must be at most one\n"
+ " .def file in a target and they do not cross dependency boundaries\n"
+ " (so specifying a .def file in a static library or source set will have\n"
+ " no effect on the executable or shared library they're linked into).\n"
+ "\n"
+ "Sources for non-binary targets\n"
+ "\n"
+ " action_foreach\n"
+ " The sources are the set of files that the script will be executed\n"
+ " over. The script will run once per file.\n"
+ "\n"
+ " action\n"
+ " The sources will be treated the same as inputs. See "
+ "\"gn help inputs\"\n"
+ " for more information and usage advice.\n"
+ "\n"
+ " copy\n"
+ " The source are the source files to copy.\n";
+
+const char kTestonly[] = "testonly";
+const char kTestonly_HelpShort[] =
+ "testonly: [boolean] Declares a target must only be used for testing.";
+const char kTestonly_Help[] =
+ "testonly: Declares a target must only be used for testing.\n"
+ "\n"
+ " Boolean. Defaults to false.\n"
+ "\n"
+ " When a target is marked \"testonly = true\", it must only be depended\n"
+ " on by other test-only targets. Otherwise, GN will issue an error\n"
+ " that the depenedency is not allowed.\n"
+ "\n"
+ " This feature is intended to prevent accidentally shipping test code\n"
+ " in a final product.\n"
+ "\n"
+ "Example\n"
+ "\n"
+ " source_set(\"test_support\") {\n"
+ " testonly = true\n"
+ " ...\n"
+ " }\n";
+
+const char kVisibility[] = "visibility";
+const char kVisibility_HelpShort[] =
+ "visibility: [label list] A list of labels that can depend on a target.";
+const char kVisibility_Help[] =
+ "visibility: A list of labels that can depend on a target.\n"
+ "\n"
+ " A list of labels and label patterns that define which targets can\n"
+ " depend on the current one. These permissions are checked via the\n"
+ " \"check\" command (see \"gn help check\").\n"
+ "\n"
+ " If visibility is not defined, it defaults to public (\"*\").\n"
+ "\n"
+ " If visibility is defined, only the targets with labels that match it\n"
+ " can depend on the current target. The empty list means no targets\n"
+ " can depend on the current target.\n"
+ "\n"
+ " Tip: Often you will want the same visibility for all targets in a\n"
+ " BUILD file. In this case you can just put the definition at the top,\n"
+ " outside of any target, and the targets will inherit that scope and see\n"
+ " the definition.\n"
+ "\n"
+ "Patterns\n"
+ "\n"
+ " See \"gn help label_pattern\" for more details on what types of\n"
+ " patterns are supported. If a toolchain is specified, only targets\n"
+ " in that toolchain will be matched. If a toolchain is not specified on\n"
+ " a pattern, targets in all toolchains will be matched.\n"
+ "\n"
+ "Examples\n"
+ "\n"
+ " Only targets in the current buildfile (\"private\"):\n"
+ " visibility = [ \":*\" ]\n"
+ "\n"
+ " No targets (used for targets that should be leaf nodes):\n"
+ " visibility = []\n"
+ "\n"
+ " Any target (\"public\", the default):\n"
+ " visibility = [ \"*\" ]\n"
+ "\n"
+ " All targets in the current directory and any subdirectory:\n"
+ " visibility = [ \"./*\" ]\n"
+ "\n"
+ " Any target in \"//bar/BUILD.gn\":\n"
+ " visibility = [ \"//bar:*\" ]\n"
+ "\n"
+ " Any target in \"//bar/\" or any subdirectory thereof:\n"
+ " visibility = [ \"//bar/*\" ]\n"
+ "\n"
+ " Just these specific targets:\n"
+ " visibility = [ \":mything\", \"//foo:something_else\" ]\n"
+ "\n"
+ " Any target in the current directory and any subdirectory thereof, plus\n"
+ " any targets in \"//bar/\" and any subdirectory thereof.\n"
+ " visibility = [ \"./*\", \"//bar/*\" ]\n";
+
+const char kWriteRuntimeDeps[] = "write_runtime_deps";
+const char kWriteRuntimeDeps_HelpShort[] =
+ "write_runtime_deps: Writes the target's runtime_deps to the given path.";
+const char kWriteRuntimeDeps_Help[] =
+ "write_runtime_deps: Writes the target's runtime_deps to the given path.\n"
+ "\n"
+ " Does not synchronously write the file, but rather schedules it\n"
+ " to be written at the end of generation.\n"
+ "\n"
+ " If the file exists and the contents are identical to that being\n"
+ " written, the file will not be updated. This will prevent unnecessary\n"
+ " rebuilds of targets that depend on this file.\n"
+ "\n"
+ " Path must be within the output directory.\n"
+ "\n"
+ " See \"gn help runtime_deps\" for how the runtime dependencies are\n"
+ " computed.\n"
+ "\n"
+ " The format of this file will list one file per line with no escaping.\n"
+ " The files will be relative to the root_build_dir. The first line of\n"
+ " the file will be the main output file of the target itself. The file\n"
+ " contents will be the same as requesting the runtime deps be written on\n"
+ " the command line (see \"gn help --runtime-deps-list-file\").\n";
+
+// -----------------------------------------------------------------------------
+
+VariableInfo::VariableInfo()
+ : help_short(""),
+ help("") {
+}
+
+VariableInfo::VariableInfo(const char* in_help_short, const char* in_help)
+ : help_short(in_help_short),
+ help(in_help) {
+}
+
+#define INSERT_VARIABLE(var) \
+ info_map[k##var] = VariableInfo(k##var##_HelpShort, k##var##_Help);
+
+const VariableInfoMap& GetBuiltinVariables() {
+ static VariableInfoMap info_map;
+ if (info_map.empty()) {
+ INSERT_VARIABLE(CurrentCpu)
+ INSERT_VARIABLE(CurrentOs)
+ INSERT_VARIABLE(CurrentToolchain)
+ INSERT_VARIABLE(DefaultToolchain)
+ INSERT_VARIABLE(HostCpu)
+ INSERT_VARIABLE(HostOs)
+ INSERT_VARIABLE(PythonPath)
+ INSERT_VARIABLE(RootBuildDir)
+ INSERT_VARIABLE(RootGenDir)
+ INSERT_VARIABLE(RootOutDir)
+ INSERT_VARIABLE(TargetCpu)
+ INSERT_VARIABLE(TargetOs)
+ INSERT_VARIABLE(TargetGenDir)
+ INSERT_VARIABLE(TargetOutDir)
+ }
+ return info_map;
+}
+
+const VariableInfoMap& GetTargetVariables() {
+ static VariableInfoMap info_map;
+ if (info_map.empty()) {
+ INSERT_VARIABLE(AllDependentConfigs)
+ INSERT_VARIABLE(AllowCircularIncludesFrom)
+ INSERT_VARIABLE(Args)
+ INSERT_VARIABLE(Asmflags)
+ INSERT_VARIABLE(AssertNoDeps)
+ INSERT_VARIABLE(BundleRootDir)
+ INSERT_VARIABLE(BundleResourcesDir)
+ INSERT_VARIABLE(BundleExecutableDir)
+ INSERT_VARIABLE(BundlePlugInsDir)
+ INSERT_VARIABLE(Cflags)
+ INSERT_VARIABLE(CflagsC)
+ INSERT_VARIABLE(CflagsCC)
+ INSERT_VARIABLE(CflagsObjC)
+ INSERT_VARIABLE(CflagsObjCC)
+ INSERT_VARIABLE(CheckIncludes)
+ INSERT_VARIABLE(CompleteStaticLib)
+ INSERT_VARIABLE(Configs)
+ INSERT_VARIABLE(Console)
+ INSERT_VARIABLE(Data)
+ INSERT_VARIABLE(DataDeps)
+ INSERT_VARIABLE(Defines)
+ INSERT_VARIABLE(Depfile)
+ INSERT_VARIABLE(Deps)
+ INSERT_VARIABLE(IncludeDirs)
+ INSERT_VARIABLE(Inputs)
+ INSERT_VARIABLE(Ldflags)
+ INSERT_VARIABLE(Libs)
+ INSERT_VARIABLE(LibDirs)
+ INSERT_VARIABLE(OutputExtension)
+ INSERT_VARIABLE(OutputName)
+ INSERT_VARIABLE(OutputPrefixOverride)
+ INSERT_VARIABLE(Outputs)
+ INSERT_VARIABLE(PrecompiledHeader)
+ INSERT_VARIABLE(PrecompiledSource)
+ INSERT_VARIABLE(Public)
+ INSERT_VARIABLE(PublicConfigs)
+ INSERT_VARIABLE(PublicDeps)
+ INSERT_VARIABLE(ResponseFileContents)
+ INSERT_VARIABLE(Script)
+ INSERT_VARIABLE(Sources)
+ INSERT_VARIABLE(Testonly)
+ INSERT_VARIABLE(Visibility)
+ INSERT_VARIABLE(WriteRuntimeDeps)
+ }
+ return info_map;
+}
+
+#undef INSERT_VARIABLE
+
+} // namespace variables
diff --git a/chromium/tools/gn/variables.h b/chromium/tools/gn/variables.h
new file mode 100644
index 00000000000..08fea1ab82b
--- /dev/null
+++ b/chromium/tools/gn/variables.h
@@ -0,0 +1,269 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_VARIABLES_H_
+#define TOOLS_GN_VARIABLES_H_
+
+#include <map>
+
+#include "base/strings/string_piece.h"
+
+namespace variables {
+
+// Builtin vars ----------------------------------------------------------------
+
+extern const char kHostCpu[];
+extern const char kHostCpu_HelpShort[];
+extern const char kHostCpu_Help[];
+
+extern const char kHostOs[];
+extern const char kHostOs_HelpShort[];
+extern const char kHostOs_Help[];
+
+extern const char kCurrentCpu[];
+extern const char kCurrentCpu_HelpShort[];
+extern const char kCurrentCpu_Help[];
+
+extern const char kCurrentOs[];
+extern const char kCurrentOs_HelpShort[];
+extern const char kCurrentOs_Help[];
+
+extern const char kCurrentToolchain[];
+extern const char kCurrentToolchain_HelpShort[];
+extern const char kCurrentToolchain_Help[];
+
+extern const char kDefaultToolchain[];
+extern const char kDefaultToolchain_HelpShort[];
+extern const char kDefaultToolchain_Help[];
+
+extern const char kPythonPath[];
+extern const char kPythonPath_HelpShort[];
+extern const char kPythonPath_Help[];
+
+extern const char kRootBuildDir[];
+extern const char kRootBuildDir_HelpShort[];
+extern const char kRootBuildDir_Help[];
+
+extern const char kRootGenDir[];
+extern const char kRootGenDir_HelpShort[];
+extern const char kRootGenDir_Help[];
+
+extern const char kRootOutDir[];
+extern const char kRootOutDir_HelpShort[];
+extern const char kRootOutDir_Help[];
+
+extern const char kTargetCpu[];
+extern const char kTargetCpu_HelpShort[];
+extern const char kTargetCpu_Help[];
+
+extern const char kTargetOs[];
+extern const char kTargetOs_HelpShort[];
+extern const char kTargetOs_Help[];
+
+extern const char kTargetGenDir[];
+extern const char kTargetGenDir_HelpShort[];
+extern const char kTargetGenDir_Help[];
+
+extern const char kTargetOutDir[];
+extern const char kTargetOutDir_HelpShort[];
+extern const char kTargetOutDir_Help[];
+
+// Target vars -----------------------------------------------------------------
+
+extern const char kAllDependentConfigs[];
+extern const char kAllDependentConfigs_HelpShort[];
+extern const char kAllDependentConfigs_Help[];
+
+extern const char kAllowCircularIncludesFrom[];
+extern const char kAllowCircularIncludesFrom_HelpShort[];
+extern const char kAllowCircularIncludesFrom_Help[];
+
+extern const char kArgs[];
+extern const char kArgs_HelpShort[];
+extern const char kArgs_Help[];
+
+extern const char kAsmflags[];
+extern const char kAsmflags_HelpShort[];
+extern const char* kAsmflags_Help;
+
+extern const char kAssertNoDeps[];
+extern const char kAssertNoDeps_HelpShort[];
+extern const char kAssertNoDeps_Help[];
+
+extern const char kBundleRootDir[];
+extern const char kBundleRootDir_HelpShort[];
+extern const char kBundleRootDir_Help[];
+
+extern const char kBundleResourcesDir[];
+extern const char kBundleResourcesDir_HelpShort[];
+extern const char kBundleResourcesDir_Help[];
+
+extern const char kBundleExecutableDir[];
+extern const char kBundleExecutableDir_HelpShort[];
+extern const char kBundleExecutableDir_Help[];
+
+extern const char kBundlePlugInsDir[];
+extern const char kBundlePlugInsDir_HelpShort[];
+extern const char kBundlePlugInsDir_Help[];
+
+extern const char kCflags[];
+extern const char kCflags_HelpShort[];
+extern const char* kCflags_Help;
+
+extern const char kCflagsC[];
+extern const char kCflagsC_HelpShort[];
+extern const char* kCflagsC_Help;
+
+extern const char kCflagsCC[];
+extern const char kCflagsCC_HelpShort[];
+extern const char* kCflagsCC_Help;
+
+extern const char kCflagsObjC[];
+extern const char kCflagsObjC_HelpShort[];
+extern const char* kCflagsObjC_Help;
+
+extern const char kCflagsObjCC[];
+extern const char kCflagsObjCC_HelpShort[];
+extern const char* kCflagsObjCC_Help;
+
+extern const char kCheckIncludes[];
+extern const char kCheckIncludes_HelpShort[];
+extern const char kCheckIncludes_Help[];
+
+extern const char kCompleteStaticLib[];
+extern const char kCompleteStaticLib_HelpShort[];
+extern const char kCompleteStaticLib_Help[];
+
+extern const char kConfigs[];
+extern const char kConfigs_HelpShort[];
+extern const char kConfigs_Help[];
+
+extern const char kConsole[];
+extern const char kConsole_HelpShort[];
+extern const char kConsole_Help[];
+
+extern const char kData[];
+extern const char kData_HelpShort[];
+extern const char kData_Help[];
+
+extern const char kDataDeps[];
+extern const char kDataDeps_HelpShort[];
+extern const char kDataDeps_Help[];
+
+extern const char kDefines[];
+extern const char kDefines_HelpShort[];
+extern const char kDefines_Help[];
+
+extern const char kDepfile[];
+extern const char kDepfile_HelpShort[];
+extern const char kDepfile_Help[];
+
+extern const char kDeps[];
+extern const char kDeps_HelpShort[];
+extern const char kDeps_Help[];
+
+extern const char kIncludeDirs[];
+extern const char kIncludeDirs_HelpShort[];
+extern const char kIncludeDirs_Help[];
+
+extern const char kInputs[];
+extern const char kInputs_HelpShort[];
+extern const char kInputs_Help[];
+
+extern const char kLdflags[];
+extern const char kLdflags_HelpShort[];
+extern const char kLdflags_Help[];
+
+extern const char kLibDirs[];
+extern const char kLibDirs_HelpShort[];
+extern const char kLibDirs_Help[];
+
+extern const char kLibs[];
+extern const char kLibs_HelpShort[];
+extern const char kLibs_Help[];
+
+extern const char kOutputExtension[];
+extern const char kOutputExtension_HelpShort[];
+extern const char kOutputExtension_Help[];
+
+extern const char kOutputName[];
+extern const char kOutputName_HelpShort[];
+extern const char kOutputName_Help[];
+
+extern const char kOutputPrefixOverride[];
+extern const char kOutputPrefixOverride_HelpShort[];
+extern const char kOutputPrefixOverride_Help[];
+
+extern const char kOutputs[];
+extern const char kOutputs_HelpShort[];
+extern const char kOutputs_Help[];
+
+extern const char kPrecompiledHeader[];
+extern const char kPrecompiledHeader_HelpShort[];
+extern const char kPrecompiledHeader_Help[];
+
+extern const char kPrecompiledSource[];
+extern const char kPrecompiledSource_HelpShort[];
+extern const char kPrecompiledSource_Help[];
+
+extern const char kPublic[];
+extern const char kPublic_HelpShort[];
+extern const char kPublic_Help[];
+
+extern const char kPublicConfigs[];
+extern const char kPublicConfigs_HelpShort[];
+extern const char kPublicConfigs_Help[];
+
+extern const char kPublicDeps[];
+extern const char kPublicDeps_HelpShort[];
+extern const char kPublicDeps_Help[];
+
+extern const char kResponseFileContents[];
+extern const char kResponseFileContents_HelpShort[];
+extern const char kResponseFileContents_Help[];
+
+extern const char kScript[];
+extern const char kScript_HelpShort[];
+extern const char kScript_Help[];
+
+extern const char kSources[];
+extern const char kSources_HelpShort[];
+extern const char kSources_Help[];
+
+extern const char kTestonly[];
+extern const char kTestonly_HelpShort[];
+extern const char kTestonly_Help[];
+
+extern const char kVisibility[];
+extern const char kVisibility_HelpShort[];
+extern const char kVisibility_Help[];
+
+extern const char kWriteRuntimeDeps[];
+extern const char kWriteRuntimeDeps_HelpShort[];
+extern const char kWriteRuntimeDeps_Help[];
+
+// -----------------------------------------------------------------------------
+
+struct VariableInfo {
+ VariableInfo();
+ VariableInfo(const char* in_help_short,
+ const char* in_help);
+
+ const char* help_short;
+ const char* help;
+};
+
+typedef std::map<base::StringPiece, VariableInfo> VariableInfoMap;
+
+// Returns the built-in readonly variables.
+// Note: this is used only for help so this getter is not threadsafe.
+const VariableInfoMap& GetBuiltinVariables();
+
+// Returns the variables used by target generators.
+// Note: this is used only for help so this getter is not threadsafe.
+const VariableInfoMap& GetTargetVariables();
+
+} // namespace variables
+
+#endif // TOOLS_GN_VARIABLES_H_
diff --git a/chromium/tools/gn/visibility.cc b/chromium/tools/gn/visibility.cc
new file mode 100644
index 00000000000..a61a5cd9c45
--- /dev/null
+++ b/chromium/tools/gn/visibility.cc
@@ -0,0 +1,110 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/visibility.h"
+
+#include "base/strings/string_piece.h"
+#include "base/strings/string_util.h"
+#include "tools/gn/err.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/item.h"
+#include "tools/gn/label.h"
+#include "tools/gn/scope.h"
+#include "tools/gn/value.h"
+#include "tools/gn/variables.h"
+
+Visibility::Visibility() {
+}
+
+Visibility::~Visibility() {
+}
+
+bool Visibility::Set(const SourceDir& current_dir,
+ const Value& value,
+ Err* err) {
+ patterns_.clear();
+
+ if (!value.VerifyTypeIs(Value::LIST, err)) {
+ CHECK(err->has_error());
+ return false;
+ }
+
+ for (const auto& item : value.list_value()) {
+ patterns_.push_back(LabelPattern::GetPattern(current_dir, item, err));
+ if (err->has_error())
+ return false;
+ }
+ return true;
+}
+
+void Visibility::SetPublic() {
+ patterns_.clear();
+ patterns_.push_back(
+ LabelPattern(LabelPattern::RECURSIVE_DIRECTORY, SourceDir(),
+ std::string(), Label()));
+}
+
+void Visibility::SetPrivate(const SourceDir& current_dir) {
+ patterns_.clear();
+ patterns_.push_back(
+ LabelPattern(LabelPattern::DIRECTORY, current_dir, std::string(),
+ Label()));
+}
+
+bool Visibility::CanSeeMe(const Label& label) const {
+ for (const auto& pattern : patterns_) {
+ if (pattern.Matches(label))
+ return true;
+ }
+ return false;
+}
+
+std::string Visibility::Describe(int indent, bool include_brackets) const {
+ std::string outer_indent_string(indent, ' ');
+
+ if (patterns_.empty())
+ return outer_indent_string + "[] (no visibility)\n";
+
+ std::string result;
+
+ std::string inner_indent_string = outer_indent_string;
+ if (include_brackets) {
+ result += outer_indent_string + "[\n";
+ // Indent the insides more if brackets are requested.
+ inner_indent_string += " ";
+ }
+
+ for (const auto& pattern : patterns_)
+ result += inner_indent_string + pattern.Describe() + "\n";
+
+ if (include_brackets)
+ result += outer_indent_string + "]\n";
+ return result;
+}
+
+// static
+bool Visibility::CheckItemVisibility(const Item* from,
+ const Item* to,
+ Err* err) {
+ if (!to->visibility().CanSeeMe(from->label())) {
+ std::string to_label = to->label().GetUserVisibleName(false);
+ *err = Err(from->defined_from(), "Dependency not allowed.",
+ "The item " + from->label().GetUserVisibleName(false) + "\n"
+ "can not depend on " + to_label + "\n"
+ "because it is not in " + to_label + "'s visibility list: " +
+ to->visibility().Describe(0, true));
+ return false;
+ }
+ return true;
+}
+
+// static
+bool Visibility::FillItemVisibility(Item* item, Scope* scope, Err* err) {
+ const Value* vis_value = scope->GetValue(variables::kVisibility, true);
+ if (vis_value)
+ item->visibility().Set(scope->GetSourceDir(), *vis_value, err);
+ else // Default to public.
+ item->visibility().SetPublic();
+ return !err->has_error();
+}
diff --git a/chromium/tools/gn/visibility.h b/chromium/tools/gn/visibility.h
new file mode 100644
index 00000000000..d6d7f1ffe01
--- /dev/null
+++ b/chromium/tools/gn/visibility.h
@@ -0,0 +1,60 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_VISIBILITY_H_
+#define TOOLS_GN_VISIBILITY_H_
+
+#include <vector>
+
+#include "base/macros.h"
+#include "tools/gn/label_pattern.h"
+#include "tools/gn/source_dir.h"
+
+class Err;
+class Item;
+class Label;
+class Scope;
+class Value;
+
+class Visibility {
+ public:
+ // Defaults to private visibility (only the current file).
+ Visibility();
+ ~Visibility();
+
+ // Set the visibility to the thing specified by the given value. On failure,
+ // returns false and sets the error.
+ bool Set(const SourceDir& current_dir, const Value& value, Err* err);
+
+ // Sets the visibility to be public.
+ void SetPublic();
+
+ // Sets the visibility to be private to the given directory.
+ void SetPrivate(const SourceDir& current_dir);
+
+ // Returns true if the target with the given label can depend on one with the
+ // current visibility.
+ bool CanSeeMe(const Label& label) const;
+
+ // Returns a string listing the visibility. |indent| number of spaces will
+ // be added on the left side of the output. If |include_brackets| is set, the
+ // result will be wrapped in "[ ]" and the contents further indented. The
+ // result will end in a newline.
+ std::string Describe(int indent, bool include_brackets) const;
+
+ // Helper function to check visibility between the given two items. If
+ // to is invisible to from, returns false and sets the error.
+ static bool CheckItemVisibility(const Item* from, const Item* to, Err* err);
+
+ // Helper function to fill an item's visibility from the "visibility" value
+ // in the current scope.
+ static bool FillItemVisibility(Item* item, Scope* scope, Err* err);
+
+ private:
+ std::vector<LabelPattern> patterns_;
+
+ DISALLOW_COPY_AND_ASSIGN(Visibility);
+};
+
+#endif // TOOLS_GN_VISIBILITY_H_
diff --git a/chromium/tools/gn/visibility_unittest.cc b/chromium/tools/gn/visibility_unittest.cc
new file mode 100644
index 00000000000..e120f74a040
--- /dev/null
+++ b/chromium/tools/gn/visibility_unittest.cc
@@ -0,0 +1,52 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/err.h"
+#include "tools/gn/label.h"
+#include "tools/gn/value.h"
+#include "tools/gn/visibility.h"
+
+TEST(Visibility, CanSeeMe) {
+ Value list(nullptr, Value::LIST);
+ list.list_value().push_back(Value(nullptr, "//rec/*")); // Recursive.
+ list.list_value().push_back(Value(nullptr, "//dir:*")); // One dir.
+ list.list_value().push_back(Value(nullptr, "//my:name")); // Exact match.
+
+ Err err;
+ Visibility vis;
+ ASSERT_TRUE(vis.Set(SourceDir("//"), list, &err));
+
+ EXPECT_FALSE(vis.CanSeeMe(Label(SourceDir("//random/"), "thing")));
+ EXPECT_FALSE(vis.CanSeeMe(Label(SourceDir("//my/"), "notname")));
+
+ EXPECT_TRUE(vis.CanSeeMe(Label(SourceDir("//my/"), "name")));
+ EXPECT_TRUE(vis.CanSeeMe(Label(SourceDir("//rec/"), "anything")));
+ EXPECT_TRUE(vis.CanSeeMe(Label(SourceDir("//rec/a/"), "anything")));
+ EXPECT_TRUE(vis.CanSeeMe(Label(SourceDir("//rec/b/"), "anything")));
+ EXPECT_TRUE(vis.CanSeeMe(Label(SourceDir("//dir/"), "anything")));
+ EXPECT_FALSE(vis.CanSeeMe(Label(SourceDir("//dir/a/"), "anything")));
+ EXPECT_FALSE(vis.CanSeeMe(Label(SourceDir("//directory/"), "anything")));
+}
+
+TEST(Visibility, Public) {
+ Err err;
+ Visibility vis;
+
+ Value list(nullptr, Value::LIST);
+ list.list_value().push_back(Value(nullptr, "*"));
+ ASSERT_TRUE(vis.Set(SourceDir("//"), list, &err));
+
+ EXPECT_TRUE(vis.CanSeeMe(Label(SourceDir("//random/"), "thing")));
+ EXPECT_TRUE(vis.CanSeeMe(Label(SourceDir("//"), "")));
+}
+
+TEST(Visibility, Private) {
+ Err err;
+ Visibility vis;
+ ASSERT_TRUE(vis.Set(SourceDir("//"), Value(nullptr, Value::LIST), &err));
+
+ EXPECT_FALSE(vis.CanSeeMe(Label(SourceDir("//random/"), "thing")));
+ EXPECT_FALSE(vis.CanSeeMe(Label(SourceDir("//"), "")));
+}
diff --git a/chromium/tools/gn/visual_studio_utils.cc b/chromium/tools/gn/visual_studio_utils.cc
new file mode 100644
index 00000000000..894472213dc
--- /dev/null
+++ b/chromium/tools/gn/visual_studio_utils.cc
@@ -0,0 +1,117 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/visual_studio_utils.h"
+
+#include "base/md5.h"
+#include "base/strings/string_util.h"
+
+CompilerOptions::CompilerOptions() = default;
+
+CompilerOptions::~CompilerOptions() = default;
+
+std::string MakeGuid(const std::string& entry_path, const std::string& seed) {
+ std::string str = base::ToUpperASCII(base::MD5String(seed + entry_path));
+ return '{' + str.substr(0, 8) + '-' + str.substr(8, 4) + '-' +
+ str.substr(12, 4) + '-' + str.substr(16, 4) + '-' +
+ str.substr(20, 12) + '}';
+}
+
+#define SetOption(condition, member, value) \
+ if (condition) { \
+ options->member = value; \
+ return; \
+ }
+
+#define AppendOption(condition, member, value, separator) \
+ if (condition) { \
+ options->member += value + separator; \
+ return; \
+ }
+
+void ParseCompilerOption(const std::string& cflag, CompilerOptions* options) {
+ if (cflag.size() > 2 && cflag[0] == '/') {
+ switch (cflag[1]) {
+ case 'F':
+ AppendOption(cflag.size() > 3 && cflag[2] == 'I', forced_include_files,
+ cflag.substr(3), ';')
+ break;
+
+ case 'G':
+ if (cflag[2] == 'S') {
+ SetOption(cflag.size() == 3, buffer_security_check, "true")
+ SetOption(cflag.size() == 4 && cflag[3] == '-',
+ buffer_security_check, "false")
+ }
+ break;
+
+ case 'M':
+ switch (cflag[2]) {
+ case 'D':
+ SetOption(cflag.size() == 3, runtime_library, "MultiThreadedDLL")
+ SetOption(cflag.size() == 4 && cflag[3] == 'd', runtime_library,
+ "MultiThreadedDebugDLL")
+ break;
+
+ case 'T':
+ SetOption(cflag.size() == 3, runtime_library, "MultiThreaded")
+ SetOption(cflag.size() == 4 && cflag[3] == 'd', runtime_library,
+ "MultiThreadedDebug")
+ break;
+ }
+ break;
+
+ case 'O':
+ switch (cflag[2]) {
+ case '1':
+ SetOption(cflag.size() == 3, optimization, "MinSpace")
+ break;
+
+ case '2':
+ SetOption(cflag.size() == 3, optimization, "MaxSpeed")
+ break;
+
+ case 'd':
+ SetOption(cflag.size() == 3, optimization, "Disabled")
+ break;
+
+ case 'x':
+ SetOption(cflag.size() == 3, optimization, "Full")
+ break;
+ }
+ break;
+
+ case 'T':
+ // Skip flags that cause treating all source files as C and C++ files.
+ if (cflag.size() == 3 && (cflag[2] == 'C' || cflag[2] == 'P'))
+ return;
+ break;
+
+ case 'W':
+ switch (cflag[2]) {
+ case '0':
+ case '1':
+ case '2':
+ case '3':
+ case '4':
+ SetOption(cflag.size() == 3, warning_level,
+ std::string("Level") + cflag[2])
+ break;
+
+ case 'X':
+ SetOption(cflag.size() == 3, treat_warning_as_error, "true")
+ break;
+ }
+ break;
+
+ case 'w':
+ AppendOption(cflag.size() > 3 && cflag[2] == 'd',
+ disable_specific_warnings, cflag.substr(3), ';')
+ break;
+ }
+ }
+
+ // Put everything else into additional_options.
+ options->additional_options += cflag + ' ';
+}
diff --git a/chromium/tools/gn/visual_studio_utils.h b/chromium/tools/gn/visual_studio_utils.h
new file mode 100644
index 00000000000..b91b2183741
--- /dev/null
+++ b/chromium/tools/gn/visual_studio_utils.h
@@ -0,0 +1,37 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_VISUAL_STUDIO_UTILS_H_
+#define TOOLS_GN_VISUAL_STUDIO_UTILS_H_
+
+#include <string>
+
+// Some compiler options which will be written to project file. We don't need to
+// specify all options because generated project file is going to be used only
+// for compilation of single file. For real build ninja files are used.
+struct CompilerOptions {
+ CompilerOptions();
+ ~CompilerOptions();
+
+ std::string additional_options;
+ std::string buffer_security_check;
+ std::string forced_include_files;
+ std::string disable_specific_warnings;
+ std::string optimization;
+ std::string runtime_library;
+ std::string treat_warning_as_error;
+ std::string warning_level;
+};
+
+// Generates something which looks like a GUID, but depends only on the name and
+// seed. This means the same name / seed will always generate the same GUID, so
+// that projects and solutions which refer to each other can explicitly
+// determine the GUID to refer to explicitly. It also means that the GUID will
+// not change when the project for a target is rebuilt.
+std::string MakeGuid(const std::string& entry_path, const std::string& seed);
+
+// Parses |cflag| value and stores it in |options|.
+void ParseCompilerOption(const std::string& cflag, CompilerOptions* options);
+
+#endif // TOOLS_GN_VISUAL_STUDIO_UTILS_H_
diff --git a/chromium/tools/gn/visual_studio_utils_unittest.cc b/chromium/tools/gn/visual_studio_utils_unittest.cc
new file mode 100644
index 00000000000..c4e25305452
--- /dev/null
+++ b/chromium/tools/gn/visual_studio_utils_unittest.cc
@@ -0,0 +1,94 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/visual_studio_utils.h"
+
+#include "base/location.h"
+#include "base/strings/string_util.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+TEST(VisualStudioUtils, MakeGuid) {
+ std::string pattern = "{xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx}";
+ std::string guid = MakeGuid(__FILE__, "foo");
+ ASSERT_EQ(pattern.size(), guid.size());
+ for (size_t i = 0; i < pattern.size(); ++i) {
+ if (pattern[i] == 'x')
+ ASSERT_TRUE(base::IsAsciiAlpha(guid[i]) || base::IsAsciiDigit(guid[i]));
+ else
+ ASSERT_EQ(pattern[i], guid[i]);
+ }
+
+ // Calling function again should produce the same GUID.
+ ASSERT_EQ(guid, MakeGuid(__FILE__, "foo"));
+
+ // GUIDs should be different if path or seed is different.
+ ASSERT_NE(guid, MakeGuid(std::string(__FILE__) + ".txt", "foo"));
+ ASSERT_NE(guid, MakeGuid(__FILE__, "bar"));
+}
+
+TEST(VisualStudioUtils, ParseCompilerOption) {
+ CompilerOptions options;
+ ParseCompilerOption("/FIinclude.h", &options);
+ ParseCompilerOption("/FIC:/path/file.h", &options);
+ ASSERT_EQ("include.h;C:/path/file.h;", options.forced_include_files);
+
+ CHECK(options.buffer_security_check.empty());
+ ParseCompilerOption("/GS", &options);
+ ASSERT_EQ("true", options.buffer_security_check);
+ ParseCompilerOption("/GS-", &options);
+ ASSERT_EQ("false", options.buffer_security_check);
+
+ CHECK(options.runtime_library.empty());
+ ParseCompilerOption("/MD", &options);
+ ASSERT_EQ("MultiThreadedDLL", options.runtime_library);
+ ParseCompilerOption("/MDd", &options);
+ ASSERT_EQ("MultiThreadedDebugDLL", options.runtime_library);
+ ParseCompilerOption("/MT", &options);
+ ASSERT_EQ("MultiThreaded", options.runtime_library);
+ ParseCompilerOption("/MTd", &options);
+ ASSERT_EQ("MultiThreadedDebug", options.runtime_library);
+
+ CHECK(options.optimization.empty());
+ ParseCompilerOption("/O1", &options);
+ ASSERT_EQ("MinSpace", options.optimization);
+ ParseCompilerOption("/O2", &options);
+ ASSERT_EQ("MaxSpeed", options.optimization);
+ ParseCompilerOption("/Od", &options);
+ ASSERT_EQ("Disabled", options.optimization);
+ ParseCompilerOption("/Ox", &options);
+ ASSERT_EQ("Full", options.optimization);
+
+ CHECK(options.additional_options.empty());
+ ParseCompilerOption("/TC", &options);
+ ASSERT_TRUE(options.additional_options.empty());
+ ParseCompilerOption("/TP", &options);
+ ASSERT_TRUE(options.additional_options.empty());
+
+ CHECK(options.warning_level.empty());
+ ParseCompilerOption("/W0", &options);
+ ASSERT_EQ("Level0", options.warning_level);
+ ParseCompilerOption("/W1", &options);
+ ASSERT_EQ("Level1", options.warning_level);
+ ParseCompilerOption("/W2", &options);
+ ASSERT_EQ("Level2", options.warning_level);
+ ParseCompilerOption("/W3", &options);
+ ASSERT_EQ("Level3", options.warning_level);
+ ParseCompilerOption("/W4", &options);
+ ASSERT_EQ("Level4", options.warning_level);
+
+ CHECK(options.treat_warning_as_error.empty());
+ ParseCompilerOption("/WX", &options);
+ ASSERT_EQ("true", options.treat_warning_as_error);
+
+ CHECK(options.disable_specific_warnings.empty());
+ ParseCompilerOption("/wd1234", &options);
+ ParseCompilerOption("/wd56", &options);
+ ASSERT_EQ("1234;56;", options.disable_specific_warnings);
+
+ CHECK(options.additional_options.empty());
+ ParseCompilerOption("/MP", &options);
+ ParseCompilerOption("/bigobj", &options);
+ ParseCompilerOption("/Zc:sizedDealloc", &options);
+ ASSERT_EQ("/MP /bigobj /Zc:sizedDealloc ", options.additional_options);
+}
diff --git a/chromium/tools/gn/visual_studio_writer.cc b/chromium/tools/gn/visual_studio_writer.cc
new file mode 100644
index 00000000000..e0483465f2f
--- /dev/null
+++ b/chromium/tools/gn/visual_studio_writer.cc
@@ -0,0 +1,805 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/visual_studio_writer.h"
+
+#include <algorithm>
+#include <map>
+#include <memory>
+#include <set>
+#include <string>
+
+#include "base/logging.h"
+#include "base/strings/string_split.h"
+#include "base/strings/string_util.h"
+#include "base/strings/utf_string_conversions.h"
+#include "tools/gn/builder.h"
+#include "tools/gn/commands.h"
+#include "tools/gn/config.h"
+#include "tools/gn/config_values_extractors.h"
+#include "tools/gn/filesystem_utils.h"
+#include "tools/gn/label_pattern.h"
+#include "tools/gn/parse_tree.h"
+#include "tools/gn/path_output.h"
+#include "tools/gn/source_file_type.h"
+#include "tools/gn/standard_out.h"
+#include "tools/gn/target.h"
+#include "tools/gn/variables.h"
+#include "tools/gn/visual_studio_utils.h"
+#include "tools/gn/xml_element_writer.h"
+
+#if defined(OS_WIN)
+#include "base/win/registry.h"
+#endif
+
+namespace {
+
+struct SemicolonSeparatedWriter {
+ void operator()(const std::string& value, std::ostream& out) const {
+ out << value + ';';
+ }
+};
+
+struct IncludeDirWriter {
+ explicit IncludeDirWriter(PathOutput& path_output)
+ : path_output_(path_output) {}
+ ~IncludeDirWriter() = default;
+
+ void operator()(const SourceDir& dir, std::ostream& out) const {
+ path_output_.WriteDir(out, dir, PathOutput::DIR_NO_LAST_SLASH);
+ out << ";";
+ }
+
+ PathOutput& path_output_;
+};
+
+struct SourceFileWriter {
+ SourceFileWriter(PathOutput& path_output, const SourceFile& source_file)
+ : path_output_(path_output), source_file_(source_file) {}
+ ~SourceFileWriter() = default;
+
+ void operator()(std::ostream& out) const {
+ path_output_.WriteFile(out, source_file_);
+ }
+
+ PathOutput& path_output_;
+ const SourceFile& source_file_;
+};
+
+const char kToolsetVersionVs2013[] = "v120"; // Visual Studio 2013
+const char kToolsetVersionVs2015[] = "v140"; // Visual Studio 2015
+const char kProjectVersionVs2013[] = "12.0"; // Visual Studio 2013
+const char kProjectVersionVs2015[] = "14.0"; // Visual Studio 2015
+const char kVersionStringVs2013[] = "Visual Studio 2013"; // Visual Studio 2013
+const char kVersionStringVs2015[] = "Visual Studio 2015"; // Visual Studio 2015
+const char kWindowsKitsVersion[] = "10"; // Windows 10 SDK
+const char kWindowsKitsIncludeVersion[] = "10.0.10586.0"; // Windows 10 SDK
+
+const char kGuidTypeProject[] = "{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}";
+const char kGuidTypeFolder[] = "{2150E333-8FDC-42A3-9474-1A3956D46DE8}";
+const char kGuidSeedProject[] = "project";
+const char kGuidSeedFolder[] = "folder";
+const char kGuidSeedFilter[] = "filter";
+
+const char kConfigurationName[] = "GN";
+
+std::string GetWindowsKitsIncludeDirs() {
+ std::string kits_path;
+
+#if defined(OS_WIN)
+ const base::char16* const subkeys[] = {
+ L"SOFTWARE\\Microsoft\\Windows Kits\\Installed Roots",
+ L"SOFTWARE\\Wow6432Node\\Microsoft\\Windows Kits\\Installed Roots"};
+
+ base::string16 value_name =
+ base::ASCIIToUTF16("KitsRoot") + base::ASCIIToUTF16(kWindowsKitsVersion);
+
+ for (const base::char16* subkey : subkeys) {
+ base::win::RegKey key(HKEY_LOCAL_MACHINE, subkey, KEY_READ);
+ base::string16 value;
+ if (key.ReadValue(value_name.c_str(), &value) == ERROR_SUCCESS) {
+ kits_path = base::UTF16ToUTF8(value);
+ break;
+ }
+ }
+#endif // OS_WIN
+
+ if (kits_path.empty()) {
+ kits_path = std::string("C:\\Program Files (x86)\\Windows Kits\\") +
+ kWindowsKitsVersion + "\\";
+ }
+
+ return kits_path + "Include\\" + kWindowsKitsIncludeVersion + "\\shared;" +
+ kits_path + "Include\\" + kWindowsKitsIncludeVersion + "\\um;" +
+ kits_path + "Include\\" + kWindowsKitsIncludeVersion + "\\winrt;";
+}
+
+std::string GetConfigurationType(const Target* target, Err* err) {
+ switch (target->output_type()) {
+ case Target::EXECUTABLE:
+ return "Application";
+ case Target::SHARED_LIBRARY:
+ case Target::LOADABLE_MODULE:
+ return "DynamicLibrary";
+ case Target::STATIC_LIBRARY:
+ case Target::SOURCE_SET:
+ return "StaticLibrary";
+ case Target::GROUP:
+ return "Utility";
+
+ default:
+ *err = Err(Location(),
+ "Visual Studio doesn't support '" + target->label().name() +
+ "' target output type: " +
+ Target::GetStringForOutputType(target->output_type()));
+ return std::string();
+ }
+}
+
+void ParseCompilerOptions(const std::vector<std::string>& cflags,
+ CompilerOptions* options) {
+ for (const std::string& flag : cflags)
+ ParseCompilerOption(flag, options);
+}
+
+void ParseCompilerOptions(const Target* target, CompilerOptions* options) {
+ for (ConfigValuesIterator iter(target); !iter.done(); iter.Next()) {
+ ParseCompilerOptions(iter.cur().cflags(), options);
+ ParseCompilerOptions(iter.cur().cflags_c(), options);
+ ParseCompilerOptions(iter.cur().cflags_cc(), options);
+ }
+}
+
+// Returns a string piece pointing into the input string identifying the parent
+// directory path, excluding the last slash. Note that the input pointer must
+// outlive the output.
+base::StringPiece FindParentDir(const std::string* path) {
+ DCHECK(path && !path->empty());
+ for (int i = static_cast<int>(path->size()) - 2; i >= 0; --i) {
+ if (IsSlash((*path)[i]))
+ return base::StringPiece(path->data(), i);
+ }
+ return base::StringPiece();
+}
+
+} // namespace
+
+VisualStudioWriter::SolutionEntry::SolutionEntry(const std::string& _name,
+ const std::string& _path,
+ const std::string& _guid)
+ : name(_name), path(_path), guid(_guid), parent_folder(nullptr) {}
+
+VisualStudioWriter::SolutionEntry::~SolutionEntry() = default;
+
+VisualStudioWriter::SolutionProject::SolutionProject(
+ const std::string& _name,
+ const std::string& _path,
+ const std::string& _guid,
+ const std::string& _label_dir_path,
+ const std::string& _config_platform)
+ : SolutionEntry(_name, _path, _guid),
+ label_dir_path(_label_dir_path),
+ config_platform(_config_platform) {}
+
+VisualStudioWriter::SolutionProject::~SolutionProject() = default;
+
+VisualStudioWriter::VisualStudioWriter(const BuildSettings* build_settings,
+ const char* config_platform,
+ Version version)
+ : build_settings_(build_settings),
+ config_platform_(config_platform),
+ ninja_path_output_(build_settings->build_dir(),
+ build_settings->root_path_utf8(),
+ EscapingMode::ESCAPE_NINJA_COMMAND) {
+ switch (version) {
+ case Version::Vs2013:
+ project_version_ = kProjectVersionVs2013;
+ toolset_version_ = kToolsetVersionVs2013;
+ version_string_ = kVersionStringVs2013;
+ break;
+ case Version::Vs2015:
+ project_version_ = kProjectVersionVs2015;
+ toolset_version_ = kToolsetVersionVs2015;
+ version_string_ = kVersionStringVs2015;
+ break;
+ default:
+ NOTREACHED() << "Not a valid Visual Studio Version: " << version;
+ }
+
+ windows_kits_include_dirs_ = GetWindowsKitsIncludeDirs();
+}
+
+VisualStudioWriter::~VisualStudioWriter() {
+ STLDeleteContainerPointers(projects_.begin(), projects_.end());
+ STLDeleteContainerPointers(folders_.begin(), folders_.end());
+}
+
+// static
+bool VisualStudioWriter::RunAndWriteFiles(const BuildSettings* build_settings,
+ Builder* builder,
+ Version version,
+ const std::string& sln_name,
+ const std::string& dir_filters,
+ Err* err) {
+ std::vector<const Target*> targets;
+ if (dir_filters.empty()) {
+ targets = builder->GetAllResolvedTargets();
+ } else {
+ std::vector<std::string> tokens = base::SplitString(
+ dir_filters, ";", base::TRIM_WHITESPACE, base::SPLIT_WANT_NONEMPTY);
+ SourceDir root_dir =
+ SourceDirForCurrentDirectory(build_settings->root_path());
+
+ std::vector<LabelPattern> filters;
+ for (const std::string& token : tokens) {
+ LabelPattern pattern =
+ LabelPattern::GetPattern(root_dir, Value(nullptr, token), err);
+ if (err->has_error())
+ return false;
+ filters.push_back(pattern);
+ }
+
+ commands::FilterTargetsByPatterns(builder->GetAllResolvedTargets(), filters,
+ &targets);
+ }
+
+ const char* config_platform = "Win32";
+
+ // Assume the "target_cpu" variable does not change between different
+ // toolchains.
+ if (!targets.empty()) {
+ const Scope* scope = targets.front()->settings()->base_config();
+ const Value* target_cpu_value = scope->GetValue(variables::kTargetCpu);
+ if (target_cpu_value != nullptr &&
+ target_cpu_value->string_value() == "x64")
+ config_platform = "x64";
+ }
+
+ VisualStudioWriter writer(build_settings, config_platform, version);
+ writer.projects_.reserve(targets.size());
+ writer.folders_.reserve(targets.size());
+
+ for (const Target* target : targets) {
+ // Skip actions and bundle targets.
+ if (target->output_type() == Target::COPY_FILES ||
+ target->output_type() == Target::ACTION ||
+ target->output_type() == Target::ACTION_FOREACH ||
+ target->output_type() == Target::BUNDLE_DATA) {
+ continue;
+ }
+
+ if (!writer.WriteProjectFiles(target, err))
+ return false;
+ }
+
+ if (writer.projects_.empty()) {
+ *err = Err(Location(), "No Visual Studio projects generated.");
+ return false;
+ }
+
+ // Sort projects so they appear always in the same order in solution file.
+ // Otherwise solution file is rewritten and reloaded by Visual Studio.
+ std::sort(writer.projects_.begin(), writer.projects_.end(),
+ [](const SolutionEntry* a, const SolutionEntry* b) {
+ return a->path < b->path;
+ });
+
+ writer.ResolveSolutionFolders();
+ return writer.WriteSolutionFile(sln_name, err);
+}
+
+bool VisualStudioWriter::WriteProjectFiles(const Target* target, Err* err) {
+ std::string project_name = target->label().name();
+ const char* project_config_platform = config_platform_;
+ if (!target->settings()->is_default()) {
+ project_name += "_" + target->toolchain()->label().name();
+ const Value* value =
+ target->settings()->base_config()->GetValue(variables::kCurrentCpu);
+ if (value != nullptr && value->string_value() == "x64")
+ project_config_platform = "x64";
+ else
+ project_config_platform = "Win32";
+ }
+
+ SourceFile target_file = GetTargetOutputDir(target).ResolveRelativeFile(
+ Value(nullptr, project_name + ".vcxproj"), err);
+ if (target_file.is_null())
+ return false;
+
+ base::FilePath vcxproj_path = build_settings_->GetFullPath(target_file);
+ std::string vcxproj_path_str = FilePathToUTF8(vcxproj_path);
+
+ projects_.push_back(new SolutionProject(
+ project_name, vcxproj_path_str,
+ MakeGuid(vcxproj_path_str, kGuidSeedProject),
+ FilePathToUTF8(build_settings_->GetFullPath(target->label().dir())),
+ project_config_platform));
+
+ std::stringstream vcxproj_string_out;
+ if (!WriteProjectFileContents(vcxproj_string_out, *projects_.back(), target,
+ err)) {
+ projects_.pop_back();
+ return false;
+ }
+
+ // Only write the content to the file if it's different. That is
+ // both a performance optimization and more importantly, prevents
+ // Visual Studio from reloading the projects.
+ if (!WriteFileIfChanged(vcxproj_path, vcxproj_string_out.str(), err))
+ return false;
+
+ base::FilePath filters_path = UTF8ToFilePath(vcxproj_path_str + ".filters");
+ std::stringstream filters_string_out;
+ WriteFiltersFileContents(filters_string_out, target);
+ return WriteFileIfChanged(filters_path, filters_string_out.str(), err);
+}
+
+bool VisualStudioWriter::WriteProjectFileContents(
+ std::ostream& out,
+ const SolutionProject& solution_project,
+ const Target* target,
+ Err* err) {
+ PathOutput path_output(GetTargetOutputDir(target),
+ build_settings_->root_path_utf8(),
+ EscapingMode::ESCAPE_NONE);
+
+ out << "<?xml version=\"1.0\" encoding=\"utf-8\"?>" << std::endl;
+ XmlElementWriter project(
+ out, "Project",
+ XmlAttributes("DefaultTargets", "Build")
+ .add("ToolsVersion", project_version_)
+ .add("xmlns", "http://schemas.microsoft.com/developer/msbuild/2003"));
+
+ {
+ std::unique_ptr<XmlElementWriter> configurations = project.SubElement(
+ "ItemGroup", XmlAttributes("Label", "ProjectConfigurations"));
+ std::unique_ptr<XmlElementWriter> project_config =
+ configurations->SubElement(
+ "ProjectConfiguration",
+ XmlAttributes("Include", std::string(kConfigurationName) + '|' +
+ solution_project.config_platform));
+ project_config->SubElement("Configuration")->Text(kConfigurationName);
+ project_config->SubElement("Platform")
+ ->Text(solution_project.config_platform);
+ }
+
+ {
+ std::unique_ptr<XmlElementWriter> globals =
+ project.SubElement("PropertyGroup", XmlAttributes("Label", "Globals"));
+ globals->SubElement("ProjectGuid")->Text(solution_project.guid);
+ globals->SubElement("Keyword")->Text("Win32Proj");
+ globals->SubElement("RootNamespace")->Text(target->label().name());
+ globals->SubElement("IgnoreWarnCompileDuplicatedFilename")->Text("true");
+ globals->SubElement("PreferredToolArchitecture")->Text("x64");
+ }
+
+ project.SubElement(
+ "Import", XmlAttributes("Project",
+ "$(VCTargetsPath)\\Microsoft.Cpp.Default.props"));
+
+ {
+ std::unique_ptr<XmlElementWriter> configuration = project.SubElement(
+ "PropertyGroup", XmlAttributes("Label", "Configuration"));
+ configuration->SubElement("CharacterSet")->Text("Unicode");
+ std::string configuration_type = GetConfigurationType(target, err);
+ if (configuration_type.empty())
+ return false;
+ configuration->SubElement("ConfigurationType")->Text(configuration_type);
+ }
+
+ {
+ std::unique_ptr<XmlElementWriter> locals =
+ project.SubElement("PropertyGroup", XmlAttributes("Label", "Locals"));
+ locals->SubElement("PlatformToolset")->Text(toolset_version_);
+ }
+
+ project.SubElement(
+ "Import",
+ XmlAttributes("Project", "$(VCTargetsPath)\\Microsoft.Cpp.props"));
+ project.SubElement(
+ "Import",
+ XmlAttributes("Project",
+ "$(VCTargetsPath)\\BuildCustomizations\\masm.props"));
+ project.SubElement("ImportGroup",
+ XmlAttributes("Label", "ExtensionSettings"));
+
+ {
+ std::unique_ptr<XmlElementWriter> property_sheets = project.SubElement(
+ "ImportGroup", XmlAttributes("Label", "PropertySheets"));
+ property_sheets->SubElement(
+ "Import",
+ XmlAttributes(
+ "Condition",
+ "exists('$(UserRootDir)\\Microsoft.Cpp.$(Platform).user.props')")
+ .add("Label", "LocalAppDataPlatform")
+ .add("Project",
+ "$(UserRootDir)\\Microsoft.Cpp.$(Platform).user.props"));
+ }
+
+ project.SubElement("PropertyGroup", XmlAttributes("Label", "UserMacros"));
+
+ {
+ std::unique_ptr<XmlElementWriter> properties =
+ project.SubElement("PropertyGroup");
+ {
+ std::unique_ptr<XmlElementWriter> out_dir =
+ properties->SubElement("OutDir");
+ path_output.WriteDir(out_dir->StartContent(false),
+ build_settings_->build_dir(),
+ PathOutput::DIR_NO_LAST_SLASH);
+ }
+ properties->SubElement("TargetName")->Text("$(ProjectName)");
+ if (target->output_type() != Target::GROUP) {
+ properties->SubElement("TargetPath")
+ ->Text("$(OutDir)\\$(ProjectName)$(TargetExt)");
+ }
+ }
+
+ {
+ std::unique_ptr<XmlElementWriter> item_definitions =
+ project.SubElement("ItemDefinitionGroup");
+ {
+ std::unique_ptr<XmlElementWriter> cl_compile =
+ item_definitions->SubElement("ClCompile");
+ {
+ std::unique_ptr<XmlElementWriter> include_dirs =
+ cl_compile->SubElement("AdditionalIncludeDirectories");
+ RecursiveTargetConfigToStream<SourceDir>(
+ target, &ConfigValues::include_dirs, IncludeDirWriter(path_output),
+ include_dirs->StartContent(false));
+ include_dirs->Text(windows_kits_include_dirs_ +
+ "$(VSInstallDir)\\VC\\atlmfc\\include;" +
+ "%(AdditionalIncludeDirectories)");
+ }
+ CompilerOptions options;
+ ParseCompilerOptions(target, &options);
+ if (!options.additional_options.empty()) {
+ cl_compile->SubElement("AdditionalOptions")
+ ->Text(options.additional_options + "%(AdditionalOptions)");
+ }
+ if (!options.buffer_security_check.empty()) {
+ cl_compile->SubElement("BufferSecurityCheck")
+ ->Text(options.buffer_security_check);
+ }
+ cl_compile->SubElement("CompileAsWinRT")->Text("false");
+ cl_compile->SubElement("DebugInformationFormat")->Text("ProgramDatabase");
+ if (!options.disable_specific_warnings.empty()) {
+ cl_compile->SubElement("DisableSpecificWarnings")
+ ->Text(options.disable_specific_warnings +
+ "%(DisableSpecificWarnings)");
+ }
+ cl_compile->SubElement("ExceptionHandling")->Text("false");
+ if (!options.forced_include_files.empty()) {
+ cl_compile->SubElement("ForcedIncludeFiles")
+ ->Text(options.forced_include_files);
+ }
+ cl_compile->SubElement("MinimalRebuild")->Text("false");
+ if (!options.optimization.empty())
+ cl_compile->SubElement("Optimization")->Text(options.optimization);
+ if (target->config_values().has_precompiled_headers()) {
+ cl_compile->SubElement("PrecompiledHeader")->Text("Use");
+ cl_compile->SubElement("PrecompiledHeaderFile")
+ ->Text(target->config_values().precompiled_header());
+ } else {
+ cl_compile->SubElement("PrecompiledHeader")->Text("NotUsing");
+ }
+ {
+ std::unique_ptr<XmlElementWriter> preprocessor_definitions =
+ cl_compile->SubElement("PreprocessorDefinitions");
+ RecursiveTargetConfigToStream<std::string>(
+ target, &ConfigValues::defines, SemicolonSeparatedWriter(),
+ preprocessor_definitions->StartContent(false));
+ preprocessor_definitions->Text("%(PreprocessorDefinitions)");
+ }
+ if (!options.runtime_library.empty())
+ cl_compile->SubElement("RuntimeLibrary")->Text(options.runtime_library);
+ if (!options.treat_warning_as_error.empty()) {
+ cl_compile->SubElement("TreatWarningAsError")
+ ->Text(options.treat_warning_as_error);
+ }
+ if (!options.warning_level.empty())
+ cl_compile->SubElement("WarningLevel")->Text(options.warning_level);
+ }
+
+ // We don't include resource compilation and link options as ninja files
+ // are used to generate real build.
+ }
+
+ {
+ std::unique_ptr<XmlElementWriter> group = project.SubElement("ItemGroup");
+ if (!target->config_values().precompiled_source().is_null()) {
+ group
+ ->SubElement(
+ "ClCompile", "Include",
+ SourceFileWriter(path_output,
+ target->config_values().precompiled_source()))
+ ->SubElement("PrecompiledHeader")
+ ->Text("Create");
+ }
+
+ for (const SourceFile& file : target->sources()) {
+ SourceFileType type = GetSourceFileType(file);
+ if (type == SOURCE_H || type == SOURCE_CPP || type == SOURCE_C) {
+ group->SubElement(type == SOURCE_H ? "ClInclude" : "ClCompile",
+ "Include", SourceFileWriter(path_output, file));
+ }
+ }
+ }
+
+ project.SubElement(
+ "Import",
+ XmlAttributes("Project", "$(VCTargetsPath)\\Microsoft.Cpp.targets"));
+ project.SubElement(
+ "Import",
+ XmlAttributes("Project",
+ "$(VCTargetsPath)\\BuildCustomizations\\masm.targets"));
+ project.SubElement("ImportGroup", XmlAttributes("Label", "ExtensionTargets"));
+
+ std::string ninja_target = GetNinjaTarget(target);
+
+ {
+ std::unique_ptr<XmlElementWriter> build =
+ project.SubElement("Target", XmlAttributes("Name", "Build"));
+ build->SubElement(
+ "Exec", XmlAttributes("Command",
+ "call ninja.exe -C $(OutDir) " + ninja_target));
+ }
+
+ {
+ std::unique_ptr<XmlElementWriter> clean =
+ project.SubElement("Target", XmlAttributes("Name", "Clean"));
+ clean->SubElement(
+ "Exec",
+ XmlAttributes("Command",
+ "call ninja.exe -C $(OutDir) -tclean " + ninja_target));
+ }
+
+ return true;
+}
+
+void VisualStudioWriter::WriteFiltersFileContents(std::ostream& out,
+ const Target* target) {
+ out << "<?xml version=\"1.0\" encoding=\"utf-8\"?>" << std::endl;
+ XmlElementWriter project(
+ out, "Project",
+ XmlAttributes("ToolsVersion", "4.0")
+ .add("xmlns", "http://schemas.microsoft.com/developer/msbuild/2003"));
+
+ std::ostringstream files_out;
+
+ {
+ std::unique_ptr<XmlElementWriter> filters_group =
+ project.SubElement("ItemGroup");
+ XmlElementWriter files_group(files_out, "ItemGroup", XmlAttributes(), 2);
+
+ // File paths are relative to vcxproj files which are generated to out dirs.
+ // Filters tree structure need to reflect source directories and be relative
+ // to target file. We need two path outputs then.
+ PathOutput file_path_output(GetTargetOutputDir(target),
+ build_settings_->root_path_utf8(),
+ EscapingMode::ESCAPE_NONE);
+ PathOutput filter_path_output(target->label().dir(),
+ build_settings_->root_path_utf8(),
+ EscapingMode::ESCAPE_NONE);
+
+ std::set<std::string> processed_filters;
+
+ for (const SourceFile& file : target->sources()) {
+ SourceFileType type = GetSourceFileType(file);
+ if (type == SOURCE_H || type == SOURCE_CPP || type == SOURCE_C) {
+ std::unique_ptr<XmlElementWriter> cl_item = files_group.SubElement(
+ type == SOURCE_H ? "ClInclude" : "ClCompile", "Include",
+ SourceFileWriter(file_path_output, file));
+
+ std::ostringstream target_relative_out;
+ filter_path_output.WriteFile(target_relative_out, file);
+ std::string target_relative_path = target_relative_out.str();
+ ConvertPathToSystem(&target_relative_path);
+ base::StringPiece filter_path = FindParentDir(&target_relative_path);
+
+ if (!filter_path.empty()) {
+ std::string filter_path_str = filter_path.as_string();
+ while (processed_filters.find(filter_path_str) ==
+ processed_filters.end()) {
+ auto it = processed_filters.insert(filter_path_str).first;
+ filters_group
+ ->SubElement("Filter",
+ XmlAttributes("Include", filter_path_str))
+ ->SubElement("UniqueIdentifier")
+ ->Text(MakeGuid(filter_path_str, kGuidSeedFilter));
+ filter_path_str = FindParentDir(&(*it)).as_string();
+ if (filter_path_str.empty())
+ break;
+ }
+ cl_item->SubElement("Filter")->Text(filter_path);
+ }
+ }
+ }
+ }
+
+ project.Text(files_out.str());
+}
+
+bool VisualStudioWriter::WriteSolutionFile(const std::string& sln_name,
+ Err* err) {
+ std::string name = sln_name.empty() ? "all" : sln_name;
+ SourceFile sln_file = build_settings_->build_dir().ResolveRelativeFile(
+ Value(nullptr, name + ".sln"), err);
+ if (sln_file.is_null())
+ return false;
+
+ base::FilePath sln_path = build_settings_->GetFullPath(sln_file);
+
+ std::stringstream string_out;
+ WriteSolutionFileContents(string_out, sln_path.DirName());
+
+ // Only write the content to the file if it's different. That is
+ // both a performance optimization and more importantly, prevents
+ // Visual Studio from reloading the projects.
+ return WriteFileIfChanged(sln_path, string_out.str(), err);
+}
+
+void VisualStudioWriter::WriteSolutionFileContents(
+ std::ostream& out,
+ const base::FilePath& solution_dir_path) {
+ out << "Microsoft Visual Studio Solution File, Format Version 12.00"
+ << std::endl;
+ out << "# " << version_string_ << std::endl;
+
+ SourceDir solution_dir(FilePathToUTF8(solution_dir_path));
+ for (const SolutionEntry* folder : folders_) {
+ out << "Project(\"" << kGuidTypeFolder << "\") = \"(" << folder->name
+ << ")\", \"" << RebasePath(folder->path, solution_dir) << "\", \""
+ << folder->guid << "\"" << std::endl;
+ out << "EndProject" << std::endl;
+ }
+
+ for (const SolutionEntry* project : projects_) {
+ out << "Project(\"" << kGuidTypeProject << "\") = \"" << project->name
+ << "\", \"" << RebasePath(project->path, solution_dir) << "\", \""
+ << project->guid << "\"" << std::endl;
+ out << "EndProject" << std::endl;
+ }
+
+ out << "Global" << std::endl;
+
+ out << "\tGlobalSection(SolutionConfigurationPlatforms) = preSolution"
+ << std::endl;
+ const std::string config_mode_prefix = std::string(kConfigurationName) + '|';
+ const std::string config_mode = config_mode_prefix + config_platform_;
+ out << "\t\t" << config_mode << " = " << config_mode << std::endl;
+ out << "\tEndGlobalSection" << std::endl;
+
+ out << "\tGlobalSection(ProjectConfigurationPlatforms) = postSolution"
+ << std::endl;
+ for (const SolutionProject* project : projects_) {
+ const std::string project_config_mode =
+ config_mode_prefix + project->config_platform;
+ out << "\t\t" << project->guid << '.' << config_mode
+ << ".ActiveCfg = " << project_config_mode << std::endl;
+ out << "\t\t" << project->guid << '.' << config_mode
+ << ".Build.0 = " << project_config_mode << std::endl;
+ }
+ out << "\tEndGlobalSection" << std::endl;
+
+ out << "\tGlobalSection(SolutionProperties) = preSolution" << std::endl;
+ out << "\t\tHideSolutionNode = FALSE" << std::endl;
+ out << "\tEndGlobalSection" << std::endl;
+
+ out << "\tGlobalSection(NestedProjects) = preSolution" << std::endl;
+ for (const SolutionEntry* folder : folders_) {
+ if (folder->parent_folder) {
+ out << "\t\t" << folder->guid << " = " << folder->parent_folder->guid
+ << std::endl;
+ }
+ }
+ for (const SolutionEntry* project : projects_) {
+ out << "\t\t" << project->guid << " = " << project->parent_folder->guid
+ << std::endl;
+ }
+ out << "\tEndGlobalSection" << std::endl;
+
+ out << "EndGlobal" << std::endl;
+}
+
+void VisualStudioWriter::ResolveSolutionFolders() {
+ root_folder_path_.clear();
+
+ // Get all project directories. Create solution folder for each directory.
+ std::map<base::StringPiece, SolutionEntry*> processed_paths;
+ for (SolutionProject* project : projects_) {
+ base::StringPiece folder_path = project->label_dir_path;
+ if (IsSlash(folder_path[folder_path.size() - 1]))
+ folder_path = folder_path.substr(0, folder_path.size() - 1);
+ auto it = processed_paths.find(folder_path);
+ if (it != processed_paths.end()) {
+ project->parent_folder = it->second;
+ } else {
+ std::string folder_path_str = folder_path.as_string();
+ SolutionEntry* folder = new SolutionEntry(
+ FindLastDirComponent(SourceDir(folder_path)).as_string(),
+ folder_path_str, MakeGuid(folder_path_str, kGuidSeedFolder));
+ folders_.push_back(folder);
+ project->parent_folder = folder;
+ processed_paths[folder_path] = folder;
+
+ if (root_folder_path_.empty()) {
+ root_folder_path_ = folder_path_str;
+ } else {
+ size_t common_prefix_len = 0;
+ size_t max_common_length =
+ std::min(root_folder_path_.size(), folder_path.size());
+ size_t i;
+ for (i = common_prefix_len; i < max_common_length; ++i) {
+ if (IsSlash(root_folder_path_[i]) && IsSlash(folder_path[i]))
+ common_prefix_len = i + 1;
+ else if (root_folder_path_[i] != folder_path[i])
+ break;
+ }
+ if (i == max_common_length &&
+ (i == folder_path.size() || IsSlash(folder_path[i])))
+ common_prefix_len = max_common_length;
+ if (common_prefix_len < root_folder_path_.size()) {
+ if (IsSlash(root_folder_path_[common_prefix_len - 1]))
+ --common_prefix_len;
+ root_folder_path_ = root_folder_path_.substr(0, common_prefix_len);
+ }
+ }
+ }
+ }
+
+ // Create also all parent folders up to |root_folder_path_|.
+ SolutionFolders additional_folders;
+ for (SolutionEntry* folder : folders_) {
+ if (folder->path == root_folder_path_)
+ continue;
+
+ base::StringPiece parent_path;
+ while ((parent_path = FindParentDir(&folder->path)) != root_folder_path_) {
+ auto it = processed_paths.find(parent_path);
+ if (it != processed_paths.end()) {
+ folder = it->second;
+ } else {
+ folder = new SolutionEntry(
+ FindLastDirComponent(SourceDir(parent_path)).as_string(),
+ parent_path.as_string(),
+ MakeGuid(parent_path.as_string(), kGuidSeedFolder));
+ additional_folders.push_back(folder);
+ processed_paths[parent_path] = folder;
+ }
+ }
+ }
+ folders_.insert(folders_.end(), additional_folders.begin(),
+ additional_folders.end());
+
+ // Sort folders by path.
+ std::sort(folders_.begin(), folders_.end(),
+ [](const SolutionEntry* a, const SolutionEntry* b) {
+ return a->path < b->path;
+ });
+
+ // Match subfolders with their parents. Since |folders_| are sorted by path we
+ // know that parent folder always precedes its children in vector.
+ SolutionFolders parents;
+ for (SolutionEntry* folder : folders_) {
+ while (!parents.empty()) {
+ if (base::StartsWith(folder->path, parents.back()->path,
+ base::CompareCase::SENSITIVE)) {
+ folder->parent_folder = parents.back();
+ break;
+ } else {
+ parents.pop_back();
+ }
+ }
+ parents.push_back(folder);
+ }
+}
+
+std::string VisualStudioWriter::GetNinjaTarget(const Target* target) {
+ std::ostringstream ninja_target_out;
+ DCHECK(!target->dependency_output_file().value().empty());
+ ninja_path_output_.WriteFile(ninja_target_out,
+ target->dependency_output_file());
+ return ninja_target_out.str();
+}
diff --git a/chromium/tools/gn/visual_studio_writer.h b/chromium/tools/gn/visual_studio_writer.h
new file mode 100644
index 00000000000..5b578ed3cba
--- /dev/null
+++ b/chromium/tools/gn/visual_studio_writer.h
@@ -0,0 +1,137 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_VISUAL_STUDIO_WRITER_H_
+#define TOOLS_GN_VISUAL_STUDIO_WRITER_H_
+
+#include <iosfwd>
+#include <string>
+#include <vector>
+
+#include "base/gtest_prod_util.h"
+#include "base/macros.h"
+#include "tools/gn/path_output.h"
+
+namespace base {
+class FilePath;
+}
+
+class Builder;
+class BuildSettings;
+class Err;
+class Target;
+
+class VisualStudioWriter {
+ public:
+ enum Version {
+ Vs2013 = 1, // Visual Studio 2013
+ Vs2015 // Visual Studio 2015
+ };
+
+ // Writes Visual Studio project and solution files. |sln_name| is the optional
+ // solution file name ("all" is used if not specified). |dir_filters| is
+ // optional semicolon-separated list of label patterns used to limit the set
+ // of generated projects. Only matching targets will be included to the
+ // solution. On failure will populate |err| and will return false.
+ static bool RunAndWriteFiles(const BuildSettings* build_settings,
+ Builder* builder,
+ Version version,
+ const std::string& sln_name,
+ const std::string& dir_filters,
+ Err* err);
+
+ private:
+ FRIEND_TEST_ALL_PREFIXES(VisualStudioWriterTest, ResolveSolutionFolders);
+ FRIEND_TEST_ALL_PREFIXES(VisualStudioWriterTest,
+ ResolveSolutionFolders_AbsPath);
+
+ // Solution project or folder.
+ struct SolutionEntry {
+ SolutionEntry(const std::string& name,
+ const std::string& path,
+ const std::string& guid);
+ virtual ~SolutionEntry();
+
+ // Entry name. For projects must be unique in the solution.
+ std::string name;
+ // Absolute project file or folder directory path.
+ std::string path;
+ // GUID-like string.
+ std::string guid;
+ // Pointer to parent folder. nullptr if entry has no parent.
+ SolutionEntry* parent_folder;
+ };
+
+ struct SolutionProject : public SolutionEntry {
+ SolutionProject(const std::string& name,
+ const std::string& path,
+ const std::string& guid,
+ const std::string& label_dir_path,
+ const std::string& config_platform);
+ ~SolutionProject() override;
+
+ // Absolute label dir path.
+ std::string label_dir_path;
+ // Configuration platform. May be different than solution config platform.
+ std::string config_platform;
+ };
+
+ using SolutionProjects = std::vector<SolutionProject*>;
+ using SolutionFolders = std::vector<SolutionEntry*>;
+
+ VisualStudioWriter(const BuildSettings* build_settings,
+ const char* config_platform,
+ Version version);
+ ~VisualStudioWriter();
+
+ bool WriteProjectFiles(const Target* target, Err* err);
+ bool WriteProjectFileContents(std::ostream& out,
+ const SolutionProject& solution_project,
+ const Target* target,
+ Err* err);
+ void WriteFiltersFileContents(std::ostream& out, const Target* target);
+ bool WriteSolutionFile(const std::string& sln_name, Err* err);
+ void WriteSolutionFileContents(std::ostream& out,
+ const base::FilePath& solution_dir_path);
+
+ // Resolves all solution folders (parent folders for projects) into |folders_|
+ // and updates |root_folder_dir_|. Also sets |parent_folder| for |projects_|.
+ void ResolveSolutionFolders();
+
+ std::string GetNinjaTarget(const Target* target);
+
+ const BuildSettings* build_settings_;
+
+ // Toolset version.
+ const char* toolset_version_;
+
+ // Project version.
+ const char* project_version_;
+
+ // Visual Studio version string.
+ const char* version_string_;
+
+ // Platform for solution configuration (Win32, x64). Some projects may be
+ // configured for different platform.
+ const char* config_platform_;
+
+ // All projects contained by solution.
+ SolutionProjects projects_;
+
+ // Absolute root solution folder path.
+ std::string root_folder_path_;
+
+ // Folders for all solution projects.
+ SolutionFolders folders_;
+
+ // Semicolon-separated Windows SDK include directories.
+ std::string windows_kits_include_dirs_;
+
+ // Path formatter for ninja targets.
+ PathOutput ninja_path_output_;
+
+ DISALLOW_COPY_AND_ASSIGN(VisualStudioWriter);
+};
+
+#endif // TOOLS_GN_VISUAL_STUDIO_WRITER_H_
diff --git a/chromium/tools/gn/visual_studio_writer_unittest.cc b/chromium/tools/gn/visual_studio_writer_unittest.cc
new file mode 100644
index 00000000000..d70117fd0c2
--- /dev/null
+++ b/chromium/tools/gn/visual_studio_writer_unittest.cc
@@ -0,0 +1,148 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/visual_studio_writer.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+#include "tools/gn/test_with_scope.h"
+#include "tools/gn/visual_studio_utils.h"
+
+namespace {
+
+class VisualStudioWriterTest : public testing::Test {
+ protected:
+ TestWithScope setup_;
+};
+
+std::string MakeTestPath(const std::string& path) {
+#if defined(OS_WIN)
+ return "C:" + path;
+#else
+ return path;
+#endif
+}
+
+} // namespace
+
+TEST_F(VisualStudioWriterTest, ResolveSolutionFolders) {
+ VisualStudioWriter writer(setup_.build_settings(), "Win32",
+ VisualStudioWriter::Version::Vs2015);
+
+ std::string path =
+ MakeTestPath("/foo/chromium/src/out/Debug/obj/base/base.vcxproj");
+ writer.projects_.push_back(new VisualStudioWriter::SolutionProject(
+ "base", path, MakeGuid(path, "project"),
+ MakeTestPath("/foo/chromium/src/base"), "Win32"));
+
+ path = MakeTestPath("/foo/chromium/src/out/Debug/obj/tools/gn/gn.vcxproj");
+ writer.projects_.push_back(new VisualStudioWriter::SolutionProject(
+ "gn", path, MakeGuid(path, "project"),
+ MakeTestPath("/foo/chromium/src/tools/gn"), "Win32"));
+
+ path = MakeTestPath("/foo/chromium/src/out/Debug/obj/chrome/chrome.vcxproj");
+ writer.projects_.push_back(new VisualStudioWriter::SolutionProject(
+ "chrome", path, MakeGuid(path, "project"),
+ MakeTestPath("/foo/chromium/src/chrome"), "Win32"));
+
+ path = MakeTestPath("/foo/chromium/src/out/Debug/obj/base/bar.vcxproj");
+ writer.projects_.push_back(new VisualStudioWriter::SolutionProject(
+ "bar", path, MakeGuid(path, "project"),
+ MakeTestPath("/foo/chromium/src/base"), "Win32"));
+
+ writer.ResolveSolutionFolders();
+
+ ASSERT_EQ(MakeTestPath("/foo/chromium/src"), writer.root_folder_path_);
+
+ ASSERT_EQ(4u, writer.folders_.size());
+
+ ASSERT_EQ("base", writer.folders_[0]->name);
+ ASSERT_EQ(MakeTestPath("/foo/chromium/src/base"), writer.folders_[0]->path);
+ ASSERT_EQ(nullptr, writer.folders_[0]->parent_folder);
+
+ ASSERT_EQ("chrome", writer.folders_[1]->name);
+ ASSERT_EQ(MakeTestPath("/foo/chromium/src/chrome"), writer.folders_[1]->path);
+ ASSERT_EQ(nullptr, writer.folders_[1]->parent_folder);
+
+ ASSERT_EQ("tools", writer.folders_[2]->name);
+ ASSERT_EQ(MakeTestPath("/foo/chromium/src/tools"), writer.folders_[2]->path);
+ ASSERT_EQ(nullptr, writer.folders_[2]->parent_folder);
+
+ ASSERT_EQ("gn", writer.folders_[3]->name);
+ ASSERT_EQ(MakeTestPath("/foo/chromium/src/tools/gn"),
+ writer.folders_[3]->path);
+ ASSERT_EQ(writer.folders_[2], writer.folders_[3]->parent_folder);
+
+ ASSERT_EQ(writer.folders_[0], writer.projects_[0]->parent_folder);
+ ASSERT_EQ(writer.folders_[3], writer.projects_[1]->parent_folder);
+ ASSERT_EQ(writer.folders_[1], writer.projects_[2]->parent_folder);
+ ASSERT_EQ(writer.folders_[0], writer.projects_[3]->parent_folder);
+}
+
+TEST_F(VisualStudioWriterTest, ResolveSolutionFolders_AbsPath) {
+ VisualStudioWriter writer(setup_.build_settings(), "Win32",
+ VisualStudioWriter::Version::Vs2015);
+
+ std::string path =
+ MakeTestPath("/foo/chromium/src/out/Debug/obj/base/base.vcxproj");
+ writer.projects_.push_back(new VisualStudioWriter::SolutionProject(
+ "base", path, MakeGuid(path, "project"),
+ MakeTestPath("/foo/chromium/src/base"), "Win32"));
+
+ path = MakeTestPath("/foo/chromium/src/out/Debug/obj/tools/gn/gn.vcxproj");
+ writer.projects_.push_back(new VisualStudioWriter::SolutionProject(
+ "gn", path, MakeGuid(path, "project"),
+ MakeTestPath("/foo/chromium/src/tools/gn"), "Win32"));
+
+ path = MakeTestPath(
+ "/foo/chromium/src/out/Debug/obj/ABS_PATH/C/foo/bar/bar.vcxproj");
+ writer.projects_.push_back(new VisualStudioWriter::SolutionProject(
+ "bar", path, MakeGuid(path, "project"), MakeTestPath("/foo/bar"),
+ "Win32"));
+
+ path = MakeTestPath(
+ "/foo/chromium/src/out/Debug/obj/ABS_PATH/C/foo/bar/baz/baz.vcxproj");
+ writer.projects_.push_back(new VisualStudioWriter::SolutionProject(
+ "baz", path, MakeGuid(path, "project"), MakeTestPath("/foo/bar/baz"),
+ "Win32"));
+
+ writer.ResolveSolutionFolders();
+
+ ASSERT_EQ(MakeTestPath("/foo"), writer.root_folder_path_);
+
+ ASSERT_EQ(7u, writer.folders_.size());
+
+ ASSERT_EQ("bar", writer.folders_[0]->name);
+ ASSERT_EQ(MakeTestPath("/foo/bar"), writer.folders_[0]->path);
+ ASSERT_EQ(nullptr, writer.folders_[0]->parent_folder);
+
+ ASSERT_EQ("baz", writer.folders_[1]->name);
+ ASSERT_EQ(MakeTestPath("/foo/bar/baz"), writer.folders_[1]->path);
+ ASSERT_EQ(writer.folders_[0], writer.folders_[1]->parent_folder);
+
+ ASSERT_EQ("chromium", writer.folders_[2]->name);
+ ASSERT_EQ(MakeTestPath("/foo/chromium"), writer.folders_[2]->path);
+ ASSERT_EQ(nullptr, writer.folders_[2]->parent_folder);
+
+ ASSERT_EQ("src", writer.folders_[3]->name);
+ ASSERT_EQ(MakeTestPath("/foo/chromium/src"), writer.folders_[3]->path);
+ ASSERT_EQ(writer.folders_[2], writer.folders_[3]->parent_folder);
+
+ ASSERT_EQ("base", writer.folders_[4]->name);
+ ASSERT_EQ(MakeTestPath("/foo/chromium/src/base"), writer.folders_[4]->path);
+ ASSERT_EQ(writer.folders_[3], writer.folders_[4]->parent_folder);
+
+ ASSERT_EQ("tools", writer.folders_[5]->name);
+ ASSERT_EQ(MakeTestPath("/foo/chromium/src/tools"), writer.folders_[5]->path);
+ ASSERT_EQ(writer.folders_[3], writer.folders_[5]->parent_folder);
+
+ ASSERT_EQ("gn", writer.folders_[6]->name);
+ ASSERT_EQ(MakeTestPath("/foo/chromium/src/tools/gn"),
+ writer.folders_[6]->path);
+ ASSERT_EQ(writer.folders_[5], writer.folders_[6]->parent_folder);
+
+ ASSERT_EQ(writer.folders_[4], writer.projects_[0]->parent_folder);
+ ASSERT_EQ(writer.folders_[6], writer.projects_[1]->parent_folder);
+ ASSERT_EQ(writer.folders_[0], writer.projects_[2]->parent_folder);
+ ASSERT_EQ(writer.folders_[1], writer.projects_[3]->parent_folder);
+}
diff --git a/chromium/tools/gn/xml_element_writer.cc b/chromium/tools/gn/xml_element_writer.cc
new file mode 100644
index 00000000000..fcf34b283cc
--- /dev/null
+++ b/chromium/tools/gn/xml_element_writer.cc
@@ -0,0 +1,83 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/xml_element_writer.h"
+
+#include "base/memory/ptr_util.h"
+
+XmlAttributes::XmlAttributes() {}
+
+XmlAttributes::XmlAttributes(const base::StringPiece& attr_key,
+ const base::StringPiece& attr_value) {
+ add(attr_key, attr_value);
+}
+
+XmlAttributes& XmlAttributes::add(const base::StringPiece& attr_key,
+ const base::StringPiece& attr_value) {
+ push_back(std::make_pair(attr_key, attr_value));
+ return *this;
+}
+
+XmlElementWriter::XmlElementWriter(std::ostream& out,
+ const std::string& tag,
+ const XmlAttributes& attributes)
+ : XmlElementWriter(out, tag, attributes, 0) {}
+
+XmlElementWriter::XmlElementWriter(std::ostream& out,
+ const std::string& tag,
+ const XmlAttributes& attributes,
+ int indent)
+ : out_(out),
+ tag_(tag),
+ indent_(indent),
+ opening_tag_finished_(false),
+ one_line_(true) {
+ out << std::string(indent, ' ') << '<' << tag;
+ for (auto attribute : attributes)
+ out << ' ' << attribute.first << "=\"" << attribute.second << '"';
+}
+
+XmlElementWriter::~XmlElementWriter() {
+ if (!opening_tag_finished_) {
+ // The XML spec does not require a space before the closing slash. However,
+ // Eclipse is unable to parse XML settings files if there is no space.
+ out_ << " />" << std::endl;
+ } else {
+ if (!one_line_)
+ out_ << std::string(indent_, ' ');
+ out_ << "</" << tag_ << '>' << std::endl;
+ }
+}
+
+void XmlElementWriter::Text(const base::StringPiece& content) {
+ StartContent(false);
+ out_ << content;
+}
+
+std::unique_ptr<XmlElementWriter> XmlElementWriter::SubElement(
+ const std::string& tag) {
+ return SubElement(tag, XmlAttributes());
+}
+
+std::unique_ptr<XmlElementWriter> XmlElementWriter::SubElement(
+ const std::string& tag,
+ const XmlAttributes& attributes) {
+ StartContent(true);
+ return base::WrapUnique(
+ new XmlElementWriter(out_, tag, attributes, indent_ + 2));
+}
+
+std::ostream& XmlElementWriter::StartContent(bool start_new_line) {
+ if (!opening_tag_finished_) {
+ out_ << '>';
+ opening_tag_finished_ = true;
+
+ if (start_new_line && one_line_) {
+ out_ << std::endl;
+ one_line_ = false;
+ }
+ }
+
+ return out_;
+}
diff --git a/chromium/tools/gn/xml_element_writer.h b/chromium/tools/gn/xml_element_writer.h
new file mode 100644
index 00000000000..8a83df0a9a5
--- /dev/null
+++ b/chromium/tools/gn/xml_element_writer.h
@@ -0,0 +1,123 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_GN_XML_ELEMENT_WRITER_H_
+#define TOOLS_GN_XML_ELEMENT_WRITER_H_
+
+#include <iosfwd>
+#include <memory>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "base/macros.h"
+#include "base/memory/ptr_util.h"
+#include "base/strings/string_piece.h"
+
+// Vector of XML attribute key-value pairs.
+class XmlAttributes
+ : public std::vector<std::pair<base::StringPiece, base::StringPiece>> {
+ public:
+ XmlAttributes();
+ XmlAttributes(const base::StringPiece& attr_key,
+ const base::StringPiece& attr_value);
+
+ XmlAttributes& add(const base::StringPiece& attr_key,
+ const base::StringPiece& attr_value);
+};
+
+// Helper class for writing XML elements. New XML element is started in
+// XmlElementWriter constructor and ended in its destructor. XmlElementWriter
+// handles XML file formatting in order to produce human-readable document.
+class XmlElementWriter {
+ public:
+ // Starts new XML element. This constructor adds no indentation and is
+ // designed for XML root element.
+ XmlElementWriter(std::ostream& out,
+ const std::string& tag,
+ const XmlAttributes& attributes);
+ // Starts new XML element with specified indentation.
+ XmlElementWriter(std::ostream& out,
+ const std::string& tag,
+ const XmlAttributes& attributes,
+ int indent);
+ // Starts new XML element with specified indentation. Specialized constructor
+ // that allows writting XML element with single attribute without copying
+ // attribute value.
+ template <class Writer>
+ XmlElementWriter(std::ostream& out,
+ const std::string& tag,
+ const std::string& attribute_name,
+ const Writer& attribute_value_writer,
+ int indent);
+ // Ends XML element. All sub-elements should be ended at this point.
+ ~XmlElementWriter();
+
+ // Writes arbitrary XML element text.
+ void Text(const base::StringPiece& content);
+
+ // Starts new XML sub-element. Caller must ensure that parent element outlives
+ // its children.
+ std::unique_ptr<XmlElementWriter> SubElement(const std::string& tag);
+ std::unique_ptr<XmlElementWriter> SubElement(const std::string& tag,
+ const XmlAttributes& attributes);
+ template <class Writer>
+ std::unique_ptr<XmlElementWriter> SubElement(
+ const std::string& tag,
+ const std::string& attribute_name,
+ const Writer& attribute_value_writer);
+
+ // Finishes opening tag if it isn't finished yet and optionally starts new
+ // document line. Returns the stream where XML element content can be written.
+ // This is an alternative to Text() and SubElement() methods.
+ std::ostream& StartContent(bool start_new_line);
+
+ private:
+ // Output stream. XmlElementWriter objects for XML element and its
+ // sub-elements share the same output stream.
+ std::ostream& out_;
+
+ // XML element tag name.
+ std::string tag_;
+
+ // XML element indentation in the document.
+ int indent_;
+
+ // Flag indicating if opening tag is finished with '>' character already.
+ bool opening_tag_finished_;
+
+ // Flag indicating if XML element should be written in one document line.
+ bool one_line_;
+
+ DISALLOW_COPY_AND_ASSIGN(XmlElementWriter);
+};
+
+template <class Writer>
+XmlElementWriter::XmlElementWriter(std::ostream& out,
+ const std::string& tag,
+ const std::string& attribute_name,
+ const Writer& attribute_value_writer,
+ int indent)
+ : out_(out),
+ tag_(tag),
+ indent_(indent),
+ opening_tag_finished_(false),
+ one_line_(true) {
+ out << std::string(indent, ' ') << '<' << tag;
+ out << ' ' << attribute_name << "=\"";
+ attribute_value_writer(out);
+ out << '\"';
+}
+
+template <class Writer>
+std::unique_ptr<XmlElementWriter> XmlElementWriter::SubElement(
+ const std::string& tag,
+ const std::string& attribute_name,
+ const Writer& attribute_value_writer) {
+ StartContent(true);
+ return base::WrapUnique(new XmlElementWriter(
+ out_, tag, attribute_name, attribute_value_writer, indent_ + 2));
+}
+
+#endif // TOOLS_GN_XML_ELEMENT_WRITER_H_
diff --git a/chromium/tools/gn/xml_element_writer_unittest.cc b/chromium/tools/gn/xml_element_writer_unittest.cc
new file mode 100644
index 00000000000..93dfd475851
--- /dev/null
+++ b/chromium/tools/gn/xml_element_writer_unittest.cc
@@ -0,0 +1,86 @@
+// Copyright 2016 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/gn/xml_element_writer.h"
+
+#include <sstream>
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace {
+
+class MockValueWriter {
+ public:
+ explicit MockValueWriter(const std::string& value) : value_(value) {}
+ void operator()(std::ostream& out) const { out << value_; }
+
+ private:
+ std::string value_;
+};
+
+} // namespace
+
+TEST(XmlElementWriter, EmptyElement) {
+ std::ostringstream out;
+ { XmlElementWriter writer(out, "foo", XmlAttributes()); }
+ EXPECT_EQ("<foo />\n", out.str());
+
+ std::ostringstream out_attr;
+ {
+ XmlElementWriter writer(out_attr, "foo",
+ XmlAttributes("bar", "abc").add("baz", "123"));
+ }
+ EXPECT_EQ("<foo bar=\"abc\" baz=\"123\" />\n", out_attr.str());
+
+ std::ostringstream out_indent;
+ {
+ XmlElementWriter writer(out_indent, "foo", XmlAttributes("bar", "baz"), 2);
+ }
+ EXPECT_EQ(" <foo bar=\"baz\" />\n", out_indent.str());
+
+ std::ostringstream out_writer;
+ {
+ XmlElementWriter writer(out_writer, "foo", "bar", MockValueWriter("baz"),
+ 2);
+ }
+ EXPECT_EQ(" <foo bar=\"baz\" />\n", out_writer.str());
+}
+
+TEST(XmlElementWriter, ElementWithText) {
+ std::ostringstream out;
+ {
+ XmlElementWriter writer(out, "foo", XmlAttributes("bar", "baz"));
+ writer.Text("Hello world!");
+ }
+ EXPECT_EQ("<foo bar=\"baz\">Hello world!</foo>\n", out.str());
+}
+
+TEST(XmlElementWriter, SubElements) {
+ std::ostringstream out;
+ {
+ XmlElementWriter writer(out, "root", XmlAttributes("aaa", "000"));
+ writer.SubElement("foo", XmlAttributes());
+ writer.SubElement("bar", XmlAttributes("bbb", "111"))->Text("hello");
+ writer.SubElement("baz", "ccc", MockValueWriter("222"))
+ ->SubElement("grandchild");
+ }
+ std::string expected =
+ "<root aaa=\"000\">\n"
+ " <foo />\n"
+ " <bar bbb=\"111\">hello</bar>\n"
+ " <baz ccc=\"222\">\n"
+ " <grandchild />\n"
+ " </baz>\n"
+ "</root>\n";
+ EXPECT_EQ(expected, out.str());
+}
+
+TEST(XmlElementWriter, StartContent) {
+ std::ostringstream out;
+ {
+ XmlElementWriter writer(out, "foo", XmlAttributes("bar", "baz"));
+ writer.StartContent(false) << "Hello world!";
+ }
+ EXPECT_EQ("<foo bar=\"baz\">Hello world!</foo>\n", out.str());
+}
diff --git a/chromium/tools/grit/OWNERS b/chromium/tools/grit/OWNERS
index 765485d2e99..5a9431a7735 100644
--- a/chromium/tools/grit/OWNERS
+++ b/chromium/tools/grit/OWNERS
@@ -1,5 +1,4 @@
flackr@chromium.org
-mnissler@chromium.org
newt@chromium.org
thakis@chromium.org
thestig@chromium.org
diff --git a/chromium/tools/grit/grit_rule.gni b/chromium/tools/grit/grit_rule.gni
index f60d8fa1076..90c0b00abbf 100644
--- a/chromium/tools/grit/grit_rule.gni
+++ b/chromium/tools/grit/grit_rule.gni
@@ -159,6 +159,13 @@ if (use_nss_certs) {
]
}
+if (use_nss_verifier) {
+ grit_defines += [
+ "-D",
+ "use_nss_verifier",
+ ]
+}
+
if (use_ozone) {
grit_defines += [
"-D",
@@ -175,8 +182,6 @@ if (enable_image_loader_extension) {
if (is_android) {
grit_defines += [
- "-t",
- "android",
"-E",
"ANDROID_JAVA_TAGGED_ONLY=true",
]
@@ -189,11 +194,38 @@ if (is_mac || is_ios) {
]
}
-if (is_ios) {
- grit_defines += [
- "-t",
- "ios",
- ]
+# When cross-compiling, explicitly pass the target system to grit.
+if (current_toolchain != host_toolchain) {
+ if (is_android) {
+ grit_defines += [
+ "-t",
+ "android",
+ ]
+ }
+ if (is_ios) {
+ grit_defines += [
+ "-t",
+ "ios",
+ ]
+ }
+ if (is_linux) {
+ grit_defines += [
+ "-t",
+ "linux2",
+ ]
+ }
+ if (is_mac) {
+ grit_defines += [
+ "-t",
+ "darwin",
+ ]
+ }
+ if (is_win) {
+ grit_defines += [
+ "-t",
+ "win32",
+ ]
+ }
}
if (enable_extensions) {
diff --git a/chromium/tools/gritsettings/resource_ids b/chromium/tools/gritsettings/resource_ids
index c1f41e024e9..b790c724ce5 100644
--- a/chromium/tools/gritsettings/resource_ids
+++ b/chromium/tools/gritsettings/resource_ids
@@ -77,8 +77,7 @@
"messages": [9000],
},
"chrome/app/resources/locale_settings.grd": {
- "includes": [9500],
- "messages": [10000],
+ "messages": [9500],
},
# These each start with the same resource id because we only use one
# file for each build (chromiumos, google_chromeos, linux, mac, or win).
@@ -134,166 +133,164 @@
"structures": [20400],
"messages": [20410],
},
- "chrome/browser/resources/signin_internals_resources.grd": {
- "includes": [21400],
- },
"chrome/browser/resources/invalidations_resources.grd": {
- "includes": [21600],
+ "includes": [21400],
},
# This file is generated during the build.
"<(SHARED_INTERMEDIATE_DIR)/devtools/devtools_resources.grd": {
- "includes": [21650],
+ "includes": [21450],
},
"devtools_resources.grd": {
- "includes": [21650],
+ "includes": [21450],
},
"chrome/browser/resources/options_resources.grd": {
- "includes": [22650],
- "structures": [22850],
+ "includes": [22450],
+ "structures": [22650],
},
"chrome/browser/resources/options_test_resources.grd": {
- "includes": [23050],
- "structures": [23090],
+ "includes": [22850],
+ "structures": [22890],
},
"chrome/test/data/webui_test_resources.grd": {
- "includes": [23130],
- "structures": [23140],
+ "includes": [22930],
+ "structures": [22940],
},
"cloud_print/virtual_driver/win/install/virtual_driver_setup_resources.grd": {
- "messages": [23150],
- "includes": [23200],
+ "messages": [22950],
+ "includes": [23000],
},
"cloud_print/service/win/service_resources.grd": {
- "messages": [23250],
- "includes": [23350],
- "structures": [23400],
+ "messages": [23050],
+ "includes": [23150],
+ "structures": [23200],
},
"cloud_print/gcp20/prototype/gcp20_device.grd": {
- "messages": [23450],
- "includes": [23480],
- "structures": [23490],
+ "messages": [23250],
+ "includes": [23280],
+ "structures": [23290],
},
"chrome/browser/resources/quota_internals_resources.grd": {
- "includes": [23650],
+ "includes": [23250],
},
"content/content_resources.grd": {
- "includes": [24150],
+ "includes": [23950],
},
"content/shell/shell_resources.grd": {
- "includes": [24650],
+ "includes": [24450],
},
# This file is generated during the build.
"<(SHARED_INTERMEDIATE_DIR)/content/browser/tracing/tracing_resources.grd": {
- "includes": [24900],
+ "includes": [24600],
},
# iOS resources overlap with android_webview, ash, chromeos and extensions_api,
# as these are not used on iOS.
- "ios/chrome/app/strings/ios_locale_settings.grd": {
- "messages": [25150],
- },
"ios/chrome/app/strings/ios_strings.grd": {
- "messages": [25155],
+ "messages": [24850],
},
# Chromium strings and Google Chrome strings must start at the same id.
# We only use one file depending on whether we're building Chromium or
# Google Chrome.
"ios/chrome/app/strings/ios_chromium_strings.grd": {
- "messages": [26055],
+ "messages": [25755],
},
"ios/chrome/app/strings/ios_google_chrome_strings.grd": {
- "messages": [26055],
+ "messages": [25755],
},
"ios/chrome/app/resources/ios_resources.grd": {
- "structures": [26150],
- "includes": [26160],
+ "structures": [25950],
+ "includes": [26060],
},
"ios/chrome/app/theme/ios_theme_resources.grd": {
- "structures": [26170],
+ "structures": [25870],
+ },
+ "ios/chrome/share_extension/strings/ios_share_extension_strings.grd": {
+ "messages": [26325],
},
"ios/chrome/today_extension/strings/ios_today_extension_strings.grd": {
- "messages": [26635],
+ "messages": [26335],
},
"ash/ash_strings.grd": {
- "messages": [25150],
+ "messages": [24850],
},
"android_webview/ui/aw_resources.grd": {
- "includes": [25150],
+ "includes": [24850],
},
"android_webview/ui/aw_strings.grd": {
- "messages": [25250],
+ "messages": [24950],
},
"ui/chromeos/resources/ui_chromeos_resources.grd": {
- "structures": [25350],
+ "structures": [25150],
},
"ui/chromeos/ui_chromeos_strings.grd": {
- "messages": [25450],
+ "messages": [25250],
},
"chrome/common/extensions_api_resources.grd": {
- "includes": [25550],
+ "includes": [25350],
},
"extensions/extensions_resources.grd": {
- "includes": [25750],
+ "includes": [25550],
},
"extensions/browser/resources/extensions_browser_resources.grd": {
- "structures": [25950],
+ "structures": [25750],
},
"extensions/renderer/resources/extensions_renderer_resources.grd": {
- "includes": [26000],
- "structures": [26100],
+ "includes": [25800],
+ "structures": [25900],
},
"extensions/extensions_strings.grd": {
- "messages": [26150],
+ "messages": [25950],
},
"extensions/shell/app_shell_resources.grd": {
- "includes": [26550],
- },
- "chrome/browser/resources/memory_internals_resources.grd": {
- "includes": [26650],
+ "includes": [26350],
},
"chrome/browser/resources/password_manager_internals_resources.grd": {
- "includes": [26850],
+ "includes": [26650],
},
"device/bluetooth/bluetooth_strings.grd": {
- "messages": [27150],
+ "messages": [26950],
},
"ui/file_manager/file_manager_resources.grd": {
- "includes": [27260],
+ "includes": [27060],
},
"components/chrome_apps/chrome_apps_resources.grd": {
- "includes": [27440],
+ "includes": [27240],
},
"ui/login/login_resources.grd": {
- "includes": [27460],
+ "includes": [27260],
},
"chrome/browser/resources/translate_internals_resources.grd": {
- "includes": [27660],
+ "includes": [27270],
},
"chrome/browser/resources/sync_file_system_internals_resources.grd": {
- "includes": [28160],
+ "includes": [27280],
},
"chrome/app/address_input_strings.grd": {
- "messages": [28260],
+ "messages": [27380],
},
"remoting/resources/remoting_strings.grd": {
- "messages": [28710],
+ "messages": [27830],
+ },
+ "components/components_locale_settings.grd": {
+ "includes": [28280],
+ "messages": [28285],
},
"components/components_strings.grd": {
- "messages": [29160],
+ "messages": [28290],
},
# Chromium strings and Google Chrome strings must start at the same id.
# We only use one file depending on whether we're building Chromium or
# Google Chrome.
"components/components_chromium_strings.grd": {
- "messages": [30150],
+ "messages": [29550],
},
"components/components_google_chrome_strings.grd": {
- "messages": [30150],
+ "messages": [29550],
},
"components/resources/components_resources.grd": {
- "includes": [30175],
+ "includes": [29600],
},
"components/resources/components_scaled_resources.grd": {
- "structures": [30345],
+ "structures": [30150],
},
"third_party/WebKit/public/blink_resources.grd": {
"includes": [30370],
@@ -305,6 +302,9 @@
"chrome/browser/resources/settings/settings_resources.grd": {
"structures": [30920],
},
+ "headless/lib/resources/headless_lib_resources.grd": {
+ "includes": [30940],
+ },
# Resource ids starting at 31000 are reserved for projects built on Chromium.
}
diff --git a/chromium/tools/gritsettings/translation_expectations.pyl b/chromium/tools/gritsettings/translation_expectations.pyl
index bb37b0c74b4..45ee3cc7350 100644
--- a/chromium/tools/gritsettings/translation_expectations.pyl
+++ b/chromium/tools/gritsettings/translation_expectations.pyl
@@ -31,6 +31,7 @@
"ios/chrome/app/strings/ios_chromium_strings.grd",
"ios/chrome/app/strings/ios_google_chrome_strings.grd",
"ios/chrome/app/strings/ios_strings.grd",
+ "ios/chrome/share_extension/strings/ios_share_extension_strings.grd",
"ios/chrome/today_extension/strings/ios_today_extension_strings.grd",
"remoting/resources/remoting_strings.grd",
"ui/accessibility/extensions/strings/accessibility_extensions_strings.grd",
@@ -47,7 +48,7 @@
],
"files": [
"android_webview/java/strings/android_webview_strings.grd",
- "blimp/client/android/java/strings/android_blimp_strings.grd",
+ "blimp/client/app/android/java/strings/android_blimp_strings.grd",
"chrome/android/java/strings/android_chrome_strings.grd",
"content/public/android/java/strings/android_content_strings.grd",
"ui/android/java/strings/android_ui_strings.grd",
@@ -67,9 +68,7 @@
"chrome/app/settings_chromium_strings.grd": "Work in progress; to be localized later in development (late 2015)",
"chrome/app/settings_google_chrome_strings.grd": "Work in progress; to be localized later in development (late 2015)",
"chromecast/app/resources/chromecast_settings.grd": "Not UI strings; localized separately",
- "cloud_print/service/win/service_resources.grd": "Separate release process",
- "cloud_print/virtual_driver/win/install/virtual_driver_setup_resources.grd": "Separate release process",
- "ios/chrome/app/strings/ios_locale_settings.grd": "Not UI strings; localized separately",
+ "components/components_locale_settings.grd": "Not UI strings; localized separately",
"tools/grit/grit/testdata/buildinfo.grd": "Test data",
"tools/grit/grit/testdata/chrome/app/generated_resources.grd": "Test data",
"tools/grit/grit/testdata/substitute.grd": "Test data",
diff --git a/chromium/tools/gyp/AUTHORS b/chromium/tools/gyp/AUTHORS
index fecf84a1c4d..727df6d30fb 100644
--- a/chromium/tools/gyp/AUTHORS
+++ b/chromium/tools/gyp/AUTHORS
@@ -10,3 +10,4 @@ Ryan Norton <rnorton10@gmail.com>
David J. Sankel <david@sankelsoftware.com>
Eric N. Vander Weele <ericvw@gmail.com>
Tom Freudenberg <th.freudenberg@gmail.com>
+Julien Brianceau <jbriance@cisco.com>
diff --git a/chromium/tools/gyp/pylib/gyp/MSVSUtil.py b/chromium/tools/gyp/pylib/gyp/MSVSUtil.py
index 0b32e911807..f5e0c1d8b76 100644
--- a/chromium/tools/gyp/pylib/gyp/MSVSUtil.py
+++ b/chromium/tools/gyp/pylib/gyp/MSVSUtil.py
@@ -110,7 +110,7 @@ def ShardTargets(target_list, target_dicts):
else:
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
- for t in new_target_dicts:
+ for t in sorted(new_target_dicts):
for deptype in ('dependencies', 'dependencies_original'):
dependencies = copy.copy(new_target_dicts[t].get(deptype, []))
new_dependencies = []
diff --git a/chromium/tools/gyp/pylib/gyp/MSVSVersion.py b/chromium/tools/gyp/pylib/gyp/MSVSVersion.py
index d9bfa684fa3..edaf6eed001 100644
--- a/chromium/tools/gyp/pylib/gyp/MSVSVersion.py
+++ b/chromium/tools/gyp/pylib/gyp/MSVSVersion.py
@@ -68,17 +68,19 @@ class VisualStudioVersion(object):
of a user override."""
return self.default_toolset
- def SetupScript(self, target_arch):
+ def _SetupScriptInternal(self, target_arch):
"""Returns a command (with arguments) to be used to set up the
environment."""
- # Check if we are running in the SDK command line environment and use
- # the setup script from the SDK if so. |target_arch| should be either
- # 'x86' or 'x64'.
+ # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
+ # depot_tools build tools and should run SetEnv.Cmd to set up the
+ # environment. The check for WindowsSDKDir alone is not sufficient because
+ # this is set by running vcvarsall.bat.
assert target_arch in ('x86', 'x64')
sdk_dir = os.environ.get('WindowsSDKDir')
- if self.sdk_based and sdk_dir:
- return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
- '/' + target_arch]
+ if sdk_dir:
+ setup_path = os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd'))
+ if self.sdk_based and sdk_dir and os.path.exists(setup_path):
+ return [setup_path, '/' + target_arch]
else:
# We don't use VC/vcvarsall.bat for x86 because vcvarsall calls
# vcvars32, which it can only find if VS??COMNTOOLS is set, which it
@@ -106,6 +108,14 @@ class VisualStudioVersion(object):
return [os.path.normpath(
os.path.join(self.path, 'VC/vcvarsall.bat')), arg]
+ def SetupScript(self, target_arch):
+ script_data = self._SetupScriptInternal(target_arch)
+ script_path = script_data[0]
+ if not os.path.exists(script_path):
+ raise Exception('%s is missing - make sure VC++ tools are installed.' %
+ script_path)
+ return script_data
+
def _RegistryQueryBase(sysdir, key, value):
"""Use reg.exe to read a particular key.
diff --git a/chromium/tools/gyp/pylib/gyp/common.py b/chromium/tools/gyp/pylib/gyp/common.py
index 256e3f3a6b2..a1e1db5f123 100644
--- a/chromium/tools/gyp/pylib/gyp/common.py
+++ b/chromium/tools/gyp/pylib/gyp/common.py
@@ -433,7 +433,7 @@ def GetFlavor(params):
return 'linux'
-def CopyTool(flavor, out_path):
+def CopyTool(flavor, out_path, generator_flags={}):
"""Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
to |out_path|."""
# aix and solaris just need flock emulation. mac and win use more complicated
@@ -453,11 +453,18 @@ def CopyTool(flavor, out_path):
with open(source_path) as source_file:
source = source_file.readlines()
+ # Set custom header flags.
+ header = '# Generated by gyp. Do not edit.\n'
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if flavor == 'mac' and mac_toolchain_dir:
+ header += "import os;\nos.environ['DEVELOPER_DIR']='%s'\n" \
+ % mac_toolchain_dir
+
# Add header and write it out.
tool_path = os.path.join(out_path, 'gyp-%s-tool' % prefix)
with open(tool_path, 'w') as tool_file:
tool_file.write(
- ''.join([source[0], '# Generated by gyp. Do not edit.\n'] + source[1:]))
+ ''.join([source[0], header] + source[1:]))
# Make file executable.
os.chmod(tool_path, 0755)
diff --git a/chromium/tools/gyp/pylib/gyp/generator/cmake.py b/chromium/tools/gyp/pylib/gyp/generator/cmake.py
index 17f5e6396c6..a2b96291aa5 100644
--- a/chromium/tools/gyp/pylib/gyp/generator/cmake.py
+++ b/chromium/tools/gyp/pylib/gyp/generator/cmake.py
@@ -34,6 +34,7 @@ import signal
import string
import subprocess
import gyp.common
+import gyp.xcode_emulation
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
@@ -608,8 +609,8 @@ class CMakeNamer(object):
def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
- options, generator_flags, all_qualified_targets, output):
-
+ options, generator_flags, all_qualified_targets, flavor,
+ output):
# The make generator does this always.
# TODO: It would be nice to be able to tell CMake all dependencies.
circular_libs = generator_flags.get('circular', True)
@@ -633,6 +634,10 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
spec = target_dicts.get(qualified_target, {})
config = spec.get('configurations', {}).get(config_to_use, {})
+ xcode_settings = None
+ if flavor == 'mac':
+ xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+
target_name = spec.get('target_name', '<missing target name>')
target_type = spec.get('type', '<missing target type>')
target_toolset = spec.get('toolset')
@@ -904,10 +909,10 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
defines = config.get('defines')
if defines is not None:
SetTargetProperty(output,
- cmake_target_name,
- 'COMPILE_DEFINITIONS',
- defines,
- ';')
+ cmake_target_name,
+ 'COMPILE_DEFINITIONS',
+ defines,
+ ';')
# Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
# CMake currently does not have target C and CXX flags.
@@ -927,6 +932,13 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cxx = config.get('cflags_cc', [])
+ if xcode_settings:
+ cflags = xcode_settings.GetCflags(config_to_use)
+ cflags_c = xcode_settings.GetCflagsC(config_to_use)
+ cflags_cxx = xcode_settings.GetCflagsCC(config_to_use)
+ #cflags_objc = xcode_settings.GetCflagsObjC(config_to_use)
+ #cflags_objcc = xcode_settings.GetCflagsObjCC(config_to_use)
+
if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
@@ -965,6 +977,13 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
if ldflags is not None:
SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
+ # XCode settings
+ xcode_settings = config.get('xcode_settings', {})
+ for xcode_setting, xcode_value in xcode_settings.viewitems():
+ SetTargetProperty(output, cmake_target_name,
+ "XCODE_ATTRIBUTE_%s" % xcode_setting, xcode_value,
+ '' if isinstance(xcode_value, str) else ' ')
+
# Note on Dependencies and Libraries:
# CMake wants to handle link order, resolving the link line up front.
# Gyp does not retain or enforce specifying enough information to do so.
@@ -1029,7 +1048,7 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
output.write(cmake_target_name)
output.write('\n')
if static_deps:
- write_group = circular_libs and len(static_deps) > 1
+ write_group = circular_libs and len(static_deps) > 1 and flavor != 'mac'
if write_group:
output.write('-Wl,--start-group\n')
for dep in gyp.common.uniquer(static_deps):
@@ -1045,9 +1064,9 @@ def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
output.write('\n')
if external_libs:
for lib in gyp.common.uniquer(external_libs):
- output.write(' ')
- output.write(lib)
- output.write('\n')
+ output.write(' "')
+ output.write(RemovePrefix(lib, "$(SDKROOT)"))
+ output.write('"\n')
output.write(')\n')
@@ -1059,6 +1078,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
params, config_to_use):
options = params['options']
generator_flags = params['generator_flags']
+ flavor = gyp.common.GetFlavor(params)
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to cmake easier, cmake doesn't put anything here.
@@ -1141,7 +1161,9 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
# Force ninja to use rsp files. Otherwise link and ar lines can get too long,
# resulting in 'Argument list too long' errors.
- output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
+ # However, rsp files don't work correctly on Mac.
+ if flavor != 'mac':
+ output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
output.write('\n')
namer = CMakeNamer(target_list)
@@ -1156,8 +1178,13 @@ def GenerateOutputForConfig(target_list, target_dicts, data,
all_qualified_targets.add(qualified_target)
for qualified_target in target_list:
+ if flavor == 'mac':
+ gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
+ spec = target_dicts[qualified_target]
+ gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(data[gyp_file], spec)
+
WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
- options, generator_flags, all_qualified_targets, output)
+ options, generator_flags, all_qualified_targets, flavor, output)
output.close()
diff --git a/chromium/tools/gyp/pylib/gyp/generator/ninja.py b/chromium/tools/gyp/pylib/gyp/generator/ninja.py
index d8a45c7d240..edac48dfca4 100644
--- a/chromium/tools/gyp/pylib/gyp/generator/ninja.py
+++ b/chromium/tools/gyp/pylib/gyp/generator/ninja.py
@@ -379,10 +379,16 @@ class NinjaWriter(object):
# should be used for linking.
self.uses_cpp = False
+ self.target_rpath = generator_flags.get('target_rpath', r'\$$ORIGIN/lib/')
+
self.is_mac_bundle = gyp.xcode_emulation.IsMacBundle(self.flavor, spec)
self.xcode_settings = self.msvs_settings = None
if self.flavor == 'mac':
self.xcode_settings = gyp.xcode_emulation.XcodeSettings(spec)
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if mac_toolchain_dir:
+ self.xcode_settings.mac_toolchain_dir = mac_toolchain_dir
+
if self.flavor == 'win':
self.msvs_settings = gyp.msvs_emulation.MsvsSettings(spec,
generator_flags)
@@ -559,6 +565,9 @@ class NinjaWriter(object):
if 'sources' in spec and self.flavor == 'win':
outputs += self.WriteWinIdlFiles(spec, prebuild)
+ if self.xcode_settings and self.xcode_settings.IsIosFramework():
+ self.WriteiOSFrameworkHeaders(spec, outputs, prebuild)
+
stamp = self.WriteCollapsedDependencies('actions_rules_copies', outputs)
if self.is_mac_bundle:
@@ -656,6 +665,7 @@ class NinjaWriter(object):
for var in special_locals:
if '${%s}' % var in argument:
needed_variables.add(var)
+ needed_variables = sorted(needed_variables)
def cygwin_munge(path):
# pylint: disable=cell-var-from-loop
@@ -729,6 +739,7 @@ class NinjaWriter(object):
# WriteNewNinjaRule uses unique_name for creating an rsp file on win.
extra_bindings.append(('unique_name',
hashlib.md5(outputs[0]).hexdigest()))
+
self.ninja.build(outputs, rule_name, self.GypPathToNinja(source),
implicit=inputs,
order_only=prebuild,
@@ -740,7 +751,11 @@ class NinjaWriter(object):
def WriteCopies(self, copies, prebuild, mac_bundle_depends):
outputs = []
- env = self.GetToolchainEnv()
+ if self.xcode_settings:
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetToolchainEnv(additional_settings=extra_env)
+ else:
+ env = self.GetToolchainEnv()
for copy in copies:
for path in copy['files']:
# Normalize the path so trailing slashes don't confuse us.
@@ -762,18 +777,38 @@ class NinjaWriter(object):
return outputs
+ def WriteiOSFrameworkHeaders(self, spec, outputs, prebuild):
+ """Prebuild steps to generate hmap files and copy headers to destination."""
+ framework = self.ComputeMacBundleOutput()
+ all_sources = spec['sources']
+ copy_headers = spec['mac_framework_headers']
+ output = self.GypPathToUniqueOutput('headers.hmap')
+ self.xcode_settings.header_map_path = output
+ all_headers = map(self.GypPathToNinja,
+ filter(lambda x:x.endswith(('.h')), all_sources))
+ variables = [('framework', framework),
+ ('copy_headers', map(self.GypPathToNinja, copy_headers))]
+ outputs.extend(self.ninja.build(
+ output, 'compile_ios_framework_headers', all_headers,
+ variables=variables, order_only=prebuild))
+
def WriteMacBundleResources(self, resources, bundle_depends):
"""Writes ninja edges for 'mac_bundle_resources'."""
xcassets = []
+
+ extra_env = self.xcode_settings.GetPerTargetSettings()
+ env = self.GetSortedXcodeEnv(additional_settings=extra_env)
+ env = self.ComputeExportEnvString(env)
+ isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
+
for output, res in gyp.xcode_emulation.GetMacBundleResources(
generator_default_variables['PRODUCT_DIR'],
self.xcode_settings, map(self.GypPathToNinja, resources)):
output = self.ExpandSpecial(output)
if os.path.splitext(output)[-1] != '.xcassets':
- isBinary = self.xcode_settings.IsBinaryOutputFormat(self.config_name)
self.ninja.build(output, 'mac_tool', res,
variables=[('mactool_cmd', 'copy-bundle-resource'), \
- ('binary', isBinary)])
+ ('env', env), ('binary', isBinary)])
bundle_depends.append(output)
else:
xcassets.append(res)
@@ -1186,7 +1221,9 @@ class NinjaWriter(object):
rpath = 'lib/'
if self.toolset != 'target':
rpath += self.toolset
- ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
+ ldflags.append(r'-Wl,-rpath=\$$ORIGIN/%s' % rpath)
+ else:
+ ldflags.append('-Wl,-rpath=%s' % self.target_rpath)
ldflags.append('-Wl,-rpath-link=%s' % rpath)
self.WriteVariableList(ninja_file, 'ldflags',
map(self.ExpandSpecial, ldflags))
@@ -1260,10 +1297,11 @@ class NinjaWriter(object):
if len(solibs):
- extra_bindings.append(('solibs', gyp.common.EncodePOSIXShellList(solibs)))
+ extra_bindings.append(('solibs',
+ gyp.common.EncodePOSIXShellList(sorted(solibs))))
ninja_file.build(output, command + command_suffix, link_deps,
- implicit=list(implicit_deps),
+ implicit=sorted(implicit_deps),
order_only=list(order_deps),
variables=extra_bindings)
return linked_binary
@@ -1330,9 +1368,12 @@ class NinjaWriter(object):
self.AppendPostbuildVariable(variables, spec, output, self.target.binary,
is_command_start=not package_framework)
if package_framework and not is_empty:
- variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
- self.ninja.build(output, 'package_framework', mac_bundle_depends,
- variables=variables)
+ if spec['type'] == 'shared_library' and self.xcode_settings.isIOS:
+ self.ninja.build(output, 'package_ios_framework', mac_bundle_depends)
+ else:
+ variables.append(('version', self.xcode_settings.GetFrameworkVersion()))
+ self.ninja.build(output, 'package_framework', mac_bundle_depends,
+ variables=variables)
else:
self.ninja.build(output, 'stamp', mac_bundle_depends,
variables=variables)
@@ -1819,7 +1860,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
master_ninja = ninja_syntax.Writer(master_ninja_file, width=120)
# Put build-time support tools in out/{config_name}.
- gyp.common.CopyTool(flavor, toplevel_build)
+ gyp.common.CopyTool(flavor, toplevel_build, generator_flags)
# Grab make settings for CC/CXX.
# The rules are
@@ -1900,6 +1941,10 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
key_prefix = re.sub(r'\.HOST$', '.host', key_prefix)
wrappers[key_prefix] = os.path.join(build_to_root, value)
+ mac_toolchain_dir = generator_flags.get('mac_toolchain_dir', None)
+ if mac_toolchain_dir:
+ wrappers['LINK'] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
+
if flavor == 'win':
configs = [target_dicts[qualified_target]['configurations'][config_name]
for qualified_target in target_list]
@@ -1910,7 +1955,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
configs, generator_flags)
cl_paths = gyp.msvs_emulation.GenerateEnvironmentFiles(
toplevel_build, generator_flags, shared_system_includes, OpenOutput)
- for arch, path in cl_paths.iteritems():
+ for arch, path in sorted(cl_paths.iteritems()):
if clang_cl:
# If we have selected clang-cl, use that instead.
path = clang_cl
@@ -2234,6 +2279,12 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
description='COMPILE XCASSETS $in',
command='$env ./gyp-mac-tool compile-xcassets $keys $in')
master_ninja.rule(
+ 'compile_ios_framework_headers',
+ description='COMPILE HEADER MAPS AND COPY FRAMEWORK HEADERS $in',
+ command='$env ./gyp-mac-tool compile-ios-framework-header-map $out '
+ '$framework $in && $env ./gyp-mac-tool '
+ 'copy-ios-framework-headers $framework $copy_headers')
+ master_ninja.rule(
'mac_tool',
description='MACTOOL $mactool_cmd $in',
command='$env ./gyp-mac-tool $mactool_cmd $in $out $binary')
@@ -2242,6 +2293,11 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
description='PACKAGE FRAMEWORK $out, POSTBUILDS',
command='./gyp-mac-tool package-framework $out $version$postbuilds '
'&& touch $out')
+ master_ninja.rule(
+ 'package_ios_framework',
+ description='PACKAGE IOS FRAMEWORK $out, POSTBUILDS',
+ command='./gyp-mac-tool package-ios-framework $out $postbuilds '
+ '&& touch $out')
if flavor == 'win':
master_ninja.rule(
'stamp',
@@ -2346,7 +2402,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
# able to run actions and build libraries by their short name.
master_ninja.newline()
master_ninja.comment('Short names for targets.')
- for short_name in target_short_names:
+ for short_name in sorted(target_short_names):
master_ninja.build(short_name, 'phony', [x.FinalOutput() for x in
target_short_names[short_name]])
@@ -2362,7 +2418,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params,
if all_outputs:
master_ninja.newline()
- master_ninja.build('all', 'phony', list(all_outputs))
+ master_ninja.build('all', 'phony', sorted(all_outputs))
master_ninja.default(generator_flags.get('default_target', 'all'))
master_ninja_file.close()
diff --git a/chromium/tools/gyp/pylib/gyp/input.py b/chromium/tools/gyp/pylib/gyp/input.py
index 20178672b23..22eb333d052 100644
--- a/chromium/tools/gyp/pylib/gyp/input.py
+++ b/chromium/tools/gyp/pylib/gyp/input.py
@@ -1539,11 +1539,15 @@ class DependencyGraphNode(object):
# dependents.
flat_list = OrderedSet()
+ def ExtractNodeRef(node):
+ """Extracts the object that the node represents from the given node."""
+ return node.ref
+
# in_degree_zeros is the list of DependencyGraphNodes that have no
# dependencies not in flat_list. Initially, it is a copy of the children
# of this node, because when the graph was built, nodes with no
# dependencies were made implicit dependents of the root node.
- in_degree_zeros = set(self.dependents[:])
+ in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef)
while in_degree_zeros:
# Nodes in in_degree_zeros have no dependencies not in flat_list, so they
@@ -1555,12 +1559,13 @@ class DependencyGraphNode(object):
# Look at dependents of the node just added to flat_list. Some of them
# may now belong in in_degree_zeros.
- for node_dependent in node.dependents:
+ for node_dependent in sorted(node.dependents, key=ExtractNodeRef):
is_in_degree_zero = True
# TODO: We want to check through the
# node_dependent.dependencies list but if it's long and we
# always start at the beginning, then we get O(n^2) behaviour.
- for node_dependent_dependency in node_dependent.dependencies:
+ for node_dependent_dependency in (sorted(node_dependent.dependencies,
+ key=ExtractNodeRef)):
if not node_dependent_dependency.ref in flat_list:
# The dependent one or more dependencies not in flat_list. There
# will be more chances to add it to flat_list when examining
@@ -1573,7 +1578,7 @@ class DependencyGraphNode(object):
# All of the dependent's dependencies are already in flat_list. Add
# it to in_degree_zeros where it will be processed in a future
# iteration of the outer loop.
- in_degree_zeros.add(node_dependent)
+ in_degree_zeros += [node_dependent]
return list(flat_list)
diff --git a/chromium/tools/gyp/pylib/gyp/mac_tool.py b/chromium/tools/gyp/pylib/gyp/mac_tool.py
index 82b5f0a70b0..32aba14fe76 100755
--- a/chromium/tools/gyp/pylib/gyp/mac_tool.py
+++ b/chromium/tools/gyp/pylib/gyp/mac_tool.py
@@ -17,6 +17,7 @@ import plistlib
import re
import shutil
import string
+import struct
import subprocess
import sys
import tempfile
@@ -62,10 +63,15 @@ class MacTool(object):
elif extension == '.storyboard':
return self._CopyXIBFile(source, dest)
elif extension == '.strings':
- self._CopyStringsFile(source, dest, convert_to_binary)
+ self._CopyStringsFile(source, dest)
else:
+ if os.path.exists(dest):
+ os.unlink(dest)
shutil.copy(source, dest)
+ if extension in ('.plist', '.strings') and convert_to_binary == 'True':
+ self._ConvertToBinary(dest)
+
def _CopyXIBFile(self, source, dest):
"""Compiles a XIB file with ibtool into a binary plist in the bundle."""
@@ -76,8 +82,26 @@ class MacTool(object):
if os.path.relpath(dest):
dest = os.path.join(base, dest)
- args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices',
- '--output-format', 'human-readable-text', '--compile', dest, source]
+ args = ['xcrun', 'ibtool', '--errors', '--warnings', '--notices']
+
+ if os.environ['XCODE_VERSION_ACTUAL'] > '0700':
+ args.extend(['--auto-activate-custom-fonts'])
+ if 'IPHONEOS_DEPLOYMENT_TARGET' in os.environ:
+ args.extend([
+ '--target-device', 'iphone', '--target-device', 'ipad',
+ '--minimum-deployment-target',
+ os.environ['IPHONEOS_DEPLOYMENT_TARGET'],
+ ])
+ else:
+ args.extend([
+ '--target-device', 'mac',
+ '--minimum-deployment-target',
+ os.environ['MACOSX_DEPLOYMENT_TARGET'],
+ ])
+
+ args.extend(['--output-format', 'human-readable-text', '--compile', dest,
+ source])
+
ibtool_section_re = re.compile(r'/\*.*\*/')
ibtool_re = re.compile(r'.*note:.*is clipping its content')
ibtoolout = subprocess.Popen(args, stdout=subprocess.PIPE)
@@ -96,7 +120,7 @@ class MacTool(object):
subprocess.check_call([
'xcrun', 'plutil', '-convert', 'binary1', '-o', dest, dest])
- def _CopyStringsFile(self, source, dest, convert_to_binary):
+ def _CopyStringsFile(self, source, dest):
"""Copies a .strings file using iconv to reconvert the input into UTF-16."""
input_code = self._DetectInputEncoding(source) or "UTF-8"
@@ -116,9 +140,6 @@ class MacTool(object):
fp.write(s.decode(input_code).encode('UTF-16'))
fp.close()
- if convert_to_binary == 'True':
- self._ConvertToBinary(dest)
-
def _DetectInputEncoding(self, file_name):
"""Reads the first few bytes from file_name and tries to guess the text
encoding. Returns None as a guess if it can't detect it."""
@@ -254,6 +275,23 @@ class MacTool(object):
break
return libtoolout.returncode
+ def ExecPackageIosFramework(self, framework):
+ # Find the name of the binary based on the part before the ".framework".
+ binary = os.path.basename(framework).split('.')[0]
+ module_path = os.path.join(framework, 'Modules');
+ if not os.path.exists(module_path):
+ os.mkdir(module_path)
+ module_template = 'framework module %s {\n' \
+ ' umbrella header "%s.h"\n' \
+ '\n' \
+ ' export *\n' \
+ ' module * { export * }\n' \
+ '}\n' % (binary, binary)
+
+ module_file = open(os.path.join(module_path, 'module.modulemap'), "w")
+ module_file.write(module_template)
+ module_file.close()
+
def ExecPackageFramework(self, framework, version):
"""Takes a path to Something.framework and the Current version of that and
sets up all the symlinks."""
@@ -290,6 +328,23 @@ class MacTool(object):
os.remove(link)
os.symlink(dest, link)
+ def ExecCompileIosFrameworkHeaderMap(self, out, framework, *all_headers):
+ framework_name = os.path.basename(framework).split('.')[0]
+ all_headers = map(os.path.abspath, all_headers)
+ filelist = {}
+ for header in all_headers:
+ filename = os.path.basename(header)
+ filelist[filename] = header
+ filelist[os.path.join(framework_name, filename)] = header
+ WriteHmap(out, filelist)
+
+ def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
+ header_path = os.path.join(framework, 'Headers');
+ if not os.path.exists(header_path):
+ os.makedirs(header_path)
+ for header in copy_headers:
+ shutil.copy(header, os.path.join(header_path, os.path.basename(header)))
+
def ExecCompileXcassets(self, keys, *inputs):
"""Compiles multiple .xcassets files into a single .car file.
@@ -350,50 +405,26 @@ class MacTool(object):
self._MergePlist(merged_plist, plist)
plistlib.writePlist(merged_plist, output)
- def ExecCodeSignBundle(self, key, resource_rules, entitlements, provisioning):
+ def ExecCodeSignBundle(self, key, entitlements, provisioning):
"""Code sign a bundle.
This function tries to code sign an iOS bundle, following the same
algorithm as Xcode:
- 1. copy ResourceRules.plist from the user or the SDK into the bundle,
- 2. pick the provisioning profile that best match the bundle identifier,
+ 1. pick the provisioning profile that best match the bundle identifier,
and copy it into the bundle as embedded.mobileprovision,
- 3. copy Entitlements.plist from user or SDK next to the bundle,
- 4. code sign the bundle.
+ 2. copy Entitlements.plist from user or SDK next to the bundle,
+ 3. code sign the bundle.
"""
- resource_rules_path = self._InstallResourceRules(resource_rules)
substitutions, overrides = self._InstallProvisioningProfile(
provisioning, self._GetCFBundleIdentifier())
entitlements_path = self._InstallEntitlements(
entitlements, substitutions, overrides)
subprocess.check_call([
- 'codesign', '--force', '--sign', key, '--resource-rules',
- resource_rules_path, '--entitlements', entitlements_path,
- os.path.join(
+ 'codesign', '--force', '--sign', key, '--entitlements',
+ entitlements_path, '--timestamp=none', os.path.join(
os.environ['TARGET_BUILD_DIR'],
os.environ['FULL_PRODUCT_NAME'])])
- def _InstallResourceRules(self, resource_rules):
- """Installs ResourceRules.plist from user or SDK into the bundle.
-
- Args:
- resource_rules: string, optional, path to the ResourceRules.plist file
- to use, default to "${SDKROOT}/ResourceRules.plist"
-
- Returns:
- Path to the copy of ResourceRules.plist into the bundle.
- """
- source_path = resource_rules
- target_path = os.path.join(
- os.environ['BUILT_PRODUCTS_DIR'],
- os.environ['CONTENTS_FOLDER_PATH'],
- 'ResourceRules.plist')
- if not source_path:
- source_path = os.path.join(
- os.environ['SDKROOT'], 'ResourceRules.plist')
- shutil.copy2(source_path, target_path)
- return target_path
-
def _InstallProvisioningProfile(self, profile, bundle_identifier):
"""Installs embedded.mobileprovision into the bundle.
@@ -607,5 +638,71 @@ class MacTool(object):
return {k: self._ExpandVariables(data[k], substitutions) for k in data}
return data
+def NextGreaterPowerOf2(x):
+ return 2**(x-1).bit_length()
+
+def WriteHmap(output_name, filelist):
+ """Generates a header map based on |filelist|.
+
+ Per Mark Mentovai:
+ A header map is structured essentially as a hash table, keyed by names used
+ in #includes, and providing pathnames to the actual files.
+
+ The implementation below and the comment above comes from inspecting:
+ http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+ while also looking at the implementation in clang in:
+ https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+ """
+ magic = 1751998832
+ version = 1
+ _reserved = 0
+ count = len(filelist)
+ capacity = NextGreaterPowerOf2(count)
+ strings_offset = 24 + (12 * capacity)
+ max_value_length = len(max(filelist.items(), key=lambda (k,v):len(v))[1])
+
+ out = open(output_name, "wb")
+ out.write(struct.pack('<LHHLLLL', magic, version, _reserved, strings_offset,
+ count, capacity, max_value_length))
+
+ # Create empty hashmap buckets.
+ buckets = [None] * capacity
+ for file, path in filelist.items():
+ key = 0
+ for c in file:
+ key += ord(c.lower()) * 13
+
+ # Fill next empty bucket.
+ while buckets[key & capacity - 1] is not None:
+ key = key + 1
+ buckets[key & capacity - 1] = (file, path)
+
+ next_offset = 1
+ for bucket in buckets:
+ if bucket is None:
+ out.write(struct.pack('<LLL', 0, 0, 0))
+ else:
+ (file, path) = bucket
+ key_offset = next_offset
+ prefix_offset = key_offset + len(file) + 1
+ suffix_offset = prefix_offset + len(os.path.dirname(path) + os.sep) + 1
+ next_offset = suffix_offset + len(os.path.basename(path)) + 1
+ out.write(struct.pack('<LLL', key_offset, prefix_offset, suffix_offset))
+
+ # Pad byte since next offset starts at 1.
+ out.write(struct.pack('<x'))
+
+ for bucket in buckets:
+ if bucket is not None:
+ (file, path) = bucket
+ out.write(struct.pack('<%ds' % len(file), file))
+ out.write(struct.pack('<s', '\0'))
+ base = os.path.dirname(path) + os.sep
+ out.write(struct.pack('<%ds' % len(base), base))
+ out.write(struct.pack('<s', '\0'))
+ path = os.path.basename(path)
+ out.write(struct.pack('<%ds' % len(path), path))
+ out.write(struct.pack('<s', '\0'))
+
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
diff --git a/chromium/tools/gyp/pylib/gyp/msvs_emulation.py b/chromium/tools/gyp/pylib/gyp/msvs_emulation.py
index 74bd4ed3b89..e4a85a96e6c 100644
--- a/chromium/tools/gyp/pylib/gyp/msvs_emulation.py
+++ b/chromium/tools/gyp/pylib/gyp/msvs_emulation.py
@@ -485,8 +485,9 @@ class MsvsSettings(object):
if self.msvs_precompiled_header[config]:
source_ext = os.path.splitext(self.msvs_precompiled_source[config])[1]
if _LanguageMatchesForPch(source_ext, extension):
- pch = os.path.split(self.msvs_precompiled_header[config])[1]
- return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pch + '.pch']
+ pch = self.msvs_precompiled_header[config]
+ pchbase = os.path.split(pch)[1]
+ return ['/Yu' + pch, '/FI' + pch, '/Fp${pchprefix}.' + pchbase + '.pch']
return []
def GetCflagsC(self, config):
@@ -888,7 +889,7 @@ class PrecompiledHeader(object):
def _PchHeader(self):
"""Get the header that will appear in an #include line for all source
files."""
- return os.path.split(self.settings.msvs_precompiled_header[self.config])[1]
+ return self.settings.msvs_precompiled_header[self.config]
def GetObjDependencies(self, sources, objs, arch):
"""Given a list of sources files and the corresponding object files,
diff --git a/chromium/tools/gyp/pylib/gyp/xcode_emulation.py b/chromium/tools/gyp/pylib/gyp/xcode_emulation.py
index a475cd2588b..2dec19e5d6f 100644
--- a/chromium/tools/gyp/pylib/gyp/xcode_emulation.py
+++ b/chromium/tools/gyp/pylib/gyp/xcode_emulation.py
@@ -162,6 +162,8 @@ class XcodeSettings(object):
self.spec = spec
self.isIOS = False
+ self.mac_toolchain_dir = None
+ self.header_map_path = None
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
@@ -222,6 +224,10 @@ class XcodeSettings(object):
default)
return format == "binary"
+ def IsIosFramework(self):
+ return self.spec['type'] == 'shared_library' and self._IsBundle() and \
+ self.isIOS
+
def _IsBundle(self):
return int(self.spec.get('mac_bundle', 0)) != 0 or self._IsXCTest()
@@ -489,6 +495,9 @@ class XcodeSettings(object):
if 'SDKROOT' in self._Settings() and sdk_root:
cflags.append('-isysroot %s' % sdk_root)
+ if self.header_map_path:
+ cflags.append('-I%s' % self.header_map_path)
+
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
@@ -858,13 +867,13 @@ class XcodeSettings(object):
# extensions and provide loader and main function.
# These flags reflect the compilation options used by xcode to compile
# extensions.
- ldflags.append('-lpkstart')
if XcodeVersion() < '0900':
+ ldflags.append('-lpkstart')
ldflags.append(sdk_root +
'/System/Library/PrivateFrameworks/PlugInKit.framework/PlugInKit')
+ else:
+ ldflags.append('-e _NSExtensionMain')
ldflags.append('-fapplication-extension')
- ldflags.append('-Xlinker -rpath '
- '-Xlinker @executable_path/../../Frameworks')
self._Appendf(ldflags, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
@@ -938,7 +947,8 @@ class XcodeSettings(object):
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
- if self.spec['type'] == 'loadable_module' and self._IsBundle():
+ if ((self.spec['type'] == 'loadable_module' or self._IsIosAppExtension())
+ and self._IsBundle()):
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
@@ -994,7 +1004,8 @@ class XcodeSettings(object):
be deployed to a device. This should be run as the very last step of the
build."""
if not (self.isIOS and
- (self.spec['type'] == 'executable' or self._IsXCTest())):
+ (self.spec['type'] == 'executable' or self._IsXCTest()) or
+ self.IsIosFramework()):
return []
settings = self.xcode_settings[configname]
@@ -1009,9 +1020,8 @@ class XcodeSettings(object):
print 'Warning: Some codesign keys not implemented, ignoring: %s' % (
', '.join(sorted(unimpl)))
- return ['%s code-sign-bundle "%s" "%s" "%s" "%s"' % (
+ return ['%s code-sign-bundle "%s" "%s" "%s"' % (
os.path.join('${TARGET_BUILD_DIR}', 'gyp-mac-tool'), key,
- settings.get('CODE_SIGN_RESOURCE_RULES_PATH', ''),
settings.get('CODE_SIGN_ENTITLEMENTS', ''),
settings.get('PROVISIONING_PROFILE', ''))
]
@@ -1096,25 +1106,37 @@ class XcodeSettings(object):
xcode, xcode_build = XcodeVersion()
cache['DTXcode'] = xcode
cache['DTXcodeBuild'] = xcode_build
+ compiler = self.xcode_settings[configname].get('GCC_VERSION')
+ if compiler is not None:
+ cache['DTCompiler'] = compiler
sdk_root = self._SdkRoot(configname)
if not sdk_root:
sdk_root = self._DefaultSdkRoot()
- cache['DTSDKName'] = sdk_root
- if xcode >= '0430':
+ sdk_version = self._GetSdkVersionInfoItem(sdk_root, '--show-sdk-version')
+ cache['DTSDKName'] = sdk_root + (sdk_version or '')
+ if xcode >= '0720':
cache['DTSDKBuild'] = self._GetSdkVersionInfoItem(
- sdk_root, '--show-sdk-version')
+ sdk_root, '--show-sdk-build-version')
+ elif xcode >= '0430':
+ cache['DTSDKBuild'] = sdk_version
else:
cache['DTSDKBuild'] = cache['BuildMachineOSBuild']
if self.isIOS:
- cache['DTPlatformName'] = cache['DTSDKName']
+ cache['MinimumOSVersion'] = self.xcode_settings[configname].get(
+ 'IPHONEOS_DEPLOYMENT_TARGET')
+ cache['DTPlatformName'] = sdk_root
+ cache['DTPlatformVersion'] = sdk_version
+
if configname.endswith("iphoneos"):
- cache['DTPlatformVersion'] = self._GetSdkVersionInfoItem(
- sdk_root, '--show-sdk-version')
cache['CFBundleSupportedPlatforms'] = ['iPhoneOS']
+ cache['DTPlatformBuild'] = cache['DTSDKBuild']
else:
cache['CFBundleSupportedPlatforms'] = ['iPhoneSimulator']
+ # This is weird, but Xcode sets DTPlatformBuild to an empty field
+ # for simulator builds.
+ cache['DTPlatformBuild'] = ""
XcodeSettings._plist_cache[configname] = cache
# Include extra plist items that are per-target, not per global
@@ -1487,12 +1509,16 @@ def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
+ 'XCODE_VERSION_ACTUAL' : XcodeVersion()[0],
}
if xcode_settings.GetPerConfigSetting('SDKROOT', configuration):
env['SDKROOT'] = xcode_settings._SdkPath(configuration)
else:
env['SDKROOT'] = ''
+ if xcode_settings.mac_toolchain_dir:
+ env['DEVELOPER_DIR'] = xcode_settings.mac_toolchain_dir
+
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
diff --git a/chromium/tools/gyp/pylib/gyp/xcode_ninja.py b/chromium/tools/gyp/pylib/gyp/xcode_ninja.py
index 0886d99e771..bc76ffff4e9 100644
--- a/chromium/tools/gyp/pylib/gyp/xcode_ninja.py
+++ b/chromium/tools/gyp/pylib/gyp/xcode_ninja.py
@@ -233,13 +233,26 @@ def CreateWrapper(target_list, target_dicts, data, params):
# Tell Xcode to look everywhere for headers.
sources_target['configurations'] = {'Default': { 'include_dirs': [ depth ] } }
+ # Put excluded files into the sources target so they can be opened in Xcode.
+ skip_excluded_files = \
+ not generator_flags.get('xcode_ninja_list_excluded_files', True)
+
sources = []
for target, target_dict in target_dicts.iteritems():
base = os.path.dirname(target)
files = target_dict.get('sources', []) + \
target_dict.get('mac_bundle_resources', [])
+
+ if not skip_excluded_files:
+ files.extend(target_dict.get('sources_excluded', []) +
+ target_dict.get('mac_bundle_resources_excluded', []))
+
for action in target_dict.get('actions', []):
files.extend(action.get('inputs', []))
+
+ if not skip_excluded_files:
+ files.extend(action.get('inputs_excluded', []))
+
# Remove files starting with $. These are mostly intermediate files for the
# build system.
files = [ file for file in files if not file.startswith('$')]
diff --git a/chromium/tools/gyp/test/determinism/determinism.gyp b/chromium/tools/gyp/test/determinism/determinism.gyp
new file mode 100644
index 00000000000..81346748a13
--- /dev/null
+++ b/chromium/tools/gyp/test/determinism/determinism.gyp
@@ -0,0 +1,59 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'determinism',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'depfile_action',
+ 'inputs': [
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output.txt',
+ ],
+ 'depfile': 'depfile.d',
+ 'action': [ ]
+ },
+ ],
+ },
+ {
+ 'target_name': 'determinism2',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'depfile_action',
+ 'inputs': [
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output.txt',
+ ],
+ 'depfile': 'depfile.d',
+ 'action': [ ]
+ },
+ ],
+ },
+ {
+ 'target_name': 'determinism3',
+ 'type': 'none',
+ 'actions': [
+ {
+ 'action_name': 'depfile_action',
+ 'inputs': [
+ 'input.txt',
+ ],
+ 'outputs': [
+ 'output.txt',
+ ],
+ 'depfile': 'depfile.d',
+ 'action': [ ]
+ },
+ ],
+ },
+ ],
+}
diff --git a/chromium/tools/gyp/test/determinism/empty-targets.gyp b/chromium/tools/gyp/test/determinism/empty-targets.gyp
new file mode 100644
index 00000000000..a4ccdd703c9
--- /dev/null
+++ b/chromium/tools/gyp/test/determinism/empty-targets.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'empty_target1',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'empty_target2',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'empty_target3',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'empty_target4',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'empty_target5',
+ 'type': 'none',
+ },
+ {
+ 'target_name': 'empty_target6',
+ 'type': 'none',
+ },
+ ],
+}
diff --git a/chromium/tools/gyp/test/determinism/needed-variables.gyp b/chromium/tools/gyp/test/determinism/needed-variables.gyp
new file mode 100644
index 00000000000..022165bebd4
--- /dev/null
+++ b/chromium/tools/gyp/test/determinism/needed-variables.gyp
@@ -0,0 +1,33 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'test',
+ 'type': 'executable',
+ 'sources': ['rule.ext'],
+ 'rules': [{
+ 'rule_name': 'rule',
+ 'extension': 'ext',
+ 'inputs': [ 'rule.py', ],
+ 'action': [
+ 'python',
+ 'rule.py',
+ '<(RULE_INPUT_ROOT)',
+ '<(RULE_INPUT_EXT)',
+ '<(RULE_INPUT_DIRNAME)',
+ '<(RULE_INPUT_NAME)',
+ '<(RULE_INPUT_PATH)',
+ ],
+ 'outputs': [ 'hello_world.txt' ],
+ 'sources': ['rule.ext'],
+ 'message': 'Processing <(RULE_INPUT_PATH)',
+ 'process_outputs_as_sources': 1,
+ # Allows the test to run without hermetic cygwin on windows.
+ 'msvs_cygwin_shell': 0,
+ }],
+ },
+ ],
+}
diff --git a/chromium/tools/gyp/test/determinism/solibs.gyp b/chromium/tools/gyp/test/determinism/solibs.gyp
new file mode 100644
index 00000000000..9ae3246d638
--- /dev/null
+++ b/chromium/tools/gyp/test/determinism/solibs.gyp
@@ -0,0 +1,32 @@
+# Copyright (c) 2015 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This test both tests solibs and implicit_deps.
+{
+ 'targets': [
+ {
+ 'target_name': 'a',
+ 'type': 'shared_library',
+ 'sources': [ 'solib.cc' ],
+ },
+ {
+ 'target_name': 'b',
+ 'type': 'shared_library',
+ 'sources': [ 'solib.cc' ],
+ },
+ {
+ 'target_name': 'c',
+ 'type': 'executable',
+ 'sources': [ 'main.cc' ],
+ 'dependencies': [ 'a', 'b' ],
+ },
+ ],
+ 'conditions': [
+ ['OS=="linux"', {
+ 'target_defaults': {
+ 'cflags': ['-fPIC'],
+ },
+ }],
+ ],
+}
diff --git a/chromium/tools/gyp/test/ios/app-bundle/test-archs.gyp b/chromium/tools/gyp/test/ios/app-bundle/test-archs.gyp
index b1558c94bec..79cfcd67383 100644
--- a/chromium/tools/gyp/test/ios/app-bundle/test-archs.gyp
+++ b/chromium/tools/gyp/test/ios/app-bundle/test-archs.gyp
@@ -6,7 +6,6 @@
['CC', '/usr/bin/clang'],
],
'target_defaults': {
- 'product_extension': 'bundle',
'mac_bundle_resources': [
'TestApp/English.lproj/InfoPlist.strings',
'TestApp/English.lproj/MainMenu.xib',
diff --git a/chromium/tools/gyp/test/ios/app-bundle/test-device.gyp b/chromium/tools/gyp/test/ios/app-bundle/test-device.gyp
index 28cdbb3af58..a0cfff7cdb8 100644
--- a/chromium/tools/gyp/test/ios/app-bundle/test-device.gyp
+++ b/chromium/tools/gyp/test/ios/app-bundle/test-device.gyp
@@ -2,15 +2,41 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
- 'make_global_settings': [
- ['CC', '/usr/bin/clang'],
+ 'conditions': [
+ ['"<(GENERATOR)"=="xcode"', {
+ 'target_defaults': {
+ 'configurations': {
+ 'Default': {
+ 'xcode_settings': {
+ 'SDKROOT': 'iphonesimulator',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ }
+ },
+ 'Default-iphoneos': {
+ 'xcode_settings': {
+ 'SDKROOT': 'iphoneos',
+ 'CONFIGURATION_BUILD_DIR':'build/Default-iphoneos',
+ }
+ },
+ },
+ },
+ }, {
+ 'target_defaults': {
+ 'configurations': {
+ 'Default': {
+ 'xcode_settings': {
+ 'SDKROOT': 'iphonesimulator',
+ }
+ },
+ },
+ },
+ }],
],
'targets': [
{
'target_name': 'test_app',
'product_name': 'Test App Gyp',
'type': 'executable',
- 'product_extension': 'bundle',
'mac_bundle': 1,
'sources': [
'TestApp/main.m',
@@ -31,16 +57,19 @@
],
'SDKROOT': 'iphonesimulator', # -isysroot
'TARGETED_DEVICE_FAMILY': '1,2',
+ 'INFOPLIST_OUTPUT_FORMAT':'xml',
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
- 'IPHONEOS_DEPLOYMENT_TARGET': '4.2',
- 'CONFIGURATION_BUILD_DIR':'build/Default',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '8.0',
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': '',
+
},
},
{
'target_name': 'sig_test',
- 'product_name': 'sig_test',
+ 'product_name': 'sigtest',
'type': 'executable',
- 'product_extension': 'bundle',
'mac_bundle': 1,
'sources': [
'TestApp/main.m',
@@ -70,8 +99,9 @@
],
'SDKROOT': 'iphonesimulator', # -isysroot
'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+ 'INFOPLIST_OUTPUT_FORMAT':'xml',
'INFOPLIST_FILE': 'TestApp/TestApp-Info.plist',
- 'IPHONEOS_DEPLOYMENT_TARGET': '4.2',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '8.0',
'CONFIGURATION_BUILD_DIR':'buildsig/Default',
},
},
diff --git a/chromium/tools/gyp/test/ios/app-bundle/test.gyp b/chromium/tools/gyp/test/ios/app-bundle/test.gyp
index b60474df8a3..544c589f607 100644
--- a/chromium/tools/gyp/test/ios/app-bundle/test.gyp
+++ b/chromium/tools/gyp/test/ios/app-bundle/test.gyp
@@ -21,6 +21,7 @@
],
'mac_bundle_resources': [
'TestApp/English.lproj/InfoPlist.strings',
+ 'TestApp/English.lproj/LanguageMap.plist',
'TestApp/English.lproj/MainMenu.xib',
'TestApp/English.lproj/Main_iPhone.storyboard',
],
diff --git a/chromium/tools/gyp/test/ios/extension/extension.gyp b/chromium/tools/gyp/test/ios/extension/extension.gyp
index cf17d82dc4e..91c068413d7 100644
--- a/chromium/tools/gyp/test/ios/extension/extension.gyp
+++ b/chromium/tools/gyp/test/ios/extension/extension.gyp
@@ -48,7 +48,10 @@
'ARCHS': [ 'armv7' ],
'SDKROOT': 'iphoneos',
'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
- 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
},
},
{
@@ -77,7 +80,10 @@
'ARCHS': [ 'armv7' ],
'SDKROOT': 'iphoneos',
'IPHONEOS_DEPLOYMENT_TARGET': '7.0',
- 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+ 'CODE_SIGNING_REQUIRED': 'NO',
+ 'DEPLOYMENT_POSTPROCESSING': 'YES',
+ 'STRIP_INSTALLED_PRODUCT': 'YES',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
},
},
],
diff --git a/chromium/tools/gyp/test/ios/framework/framework.gyp b/chromium/tools/gyp/test/ios/framework/framework.gyp
new file mode 100644
index 00000000000..a99079226d4
--- /dev/null
+++ b/chromium/tools/gyp/test/ios/framework/framework.gyp
@@ -0,0 +1,39 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'iOSFramework',
+ 'type': 'shared_library',
+ 'mac_bundle': 1,
+ 'sources': [
+ 'iOSFramework/iOSFramework.h',
+ ],
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/UIKit.framework',
+ ],
+ },
+ 'mac_framework_headers': [
+ 'iOSFramework/iOSFramework.h',
+ ],
+ 'mac_framework_dirs': [
+ '$(SDKROOT)/../../Library/Frameworks',
+ ],
+ 'xcode_settings': {
+ 'OTHER_CFLAGS': [
+ '-fobjc-abi-version=2',
+ ],
+ 'INFOPLIST_FILE': 'iOSFramework/Info.plist',
+ 'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+ 'SDKROOT': 'iphoneos',
+ 'IPHONEOS_DEPLOYMENT_TARGET': '8.0',
+ 'CONFIGURATION_BUILD_DIR':'build/Default',
+ 'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+ },
+ },
+ ],
+}
diff --git a/chromium/tools/gyp/test/linux/target-rpath/test.gyp b/chromium/tools/gyp/test/linux/target-rpath/test.gyp
new file mode 100644
index 00000000000..b546106986e
--- /dev/null
+++ b/chromium/tools/gyp/test/linux/target-rpath/test.gyp
@@ -0,0 +1,47 @@
+# Copyright (c) 2013 Google Inc. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+ 'targets': [
+ {
+ 'target_name': 'shared',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'shared_no_so_suffix',
+ 'product_extension': 'so.0.1',
+ 'type': 'shared_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'static',
+ 'type': 'static_library',
+ 'sources': [ 'file.c' ],
+ },
+ {
+ 'target_name': 'shared_executable',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'dependencies': [
+ 'shared',
+ ]
+ },
+ {
+ 'target_name': 'shared_executable_no_so_suffix',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'dependencies': [
+ 'shared_no_so_suffix',
+ ]
+ },
+ {
+ 'target_name': 'static_executable',
+ 'type': 'executable',
+ 'sources': [ 'main.c' ],
+ 'dependencies': [
+ 'static',
+ ]
+ },
+ ],
+}
diff --git a/chromium/tools/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp b/chromium/tools/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp
new file mode 100644
index 00000000000..f5f0e8eafdc
--- /dev/null
+++ b/chromium/tools/gyp/test/xcode-ninja/list_excluded/hello_exclude.gyp
@@ -0,0 +1,19 @@
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'hello',
+ 'type': 'executable',
+ 'sources': [
+ 'hello.cpp',
+ 'hello_excluded.cpp',
+ ],
+ 'sources!': [
+ 'hello_excluded.cpp',
+ ],
+ },
+ ],
+}
diff --git a/chromium/tools/gyp/tools/pretty_gyp.py b/chromium/tools/gyp/tools/pretty_gyp.py
index c51d35872cc..d5736bbd4a6 100755
--- a/chromium/tools/gyp/tools/pretty_gyp.py
+++ b/chromium/tools/gyp/tools/pretty_gyp.py
@@ -118,24 +118,23 @@ def prettyprint_input(lines):
basic_offset = 2
last_line = ""
for line in lines:
- if COMMENT_RE.match(line):
- print line
- else:
- line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
- if len(line) > 0:
+ line = line.strip('\r\n\t ') # Otherwise doesn't strip \r on Unix.
+ if len(line) > 0:
+ brace_diff = 0
+ if not COMMENT_RE.match(line):
(brace_diff, after) = count_braces(line)
- if brace_diff != 0:
- if after:
- print " " * (basic_offset * indent) + line
- indent += brace_diff
- else:
- indent += brace_diff
- print " " * (basic_offset * indent) + line
+ if brace_diff != 0:
+ if after:
+ print " " * (basic_offset * indent) + line
+ indent += brace_diff
else:
+ indent += brace_diff
print " " * (basic_offset * indent) + line
else:
- print ""
- last_line = line
+ print " " * (basic_offset * indent) + line
+ else:
+ print ""
+ last_line = line
def main():
diff --git a/chromium/tools/imagediff/BUILD.gn b/chromium/tools/imagediff/BUILD.gn
new file mode 100644
index 00000000000..7688cfea54b
--- /dev/null
+++ b/chromium/tools/imagediff/BUILD.gn
@@ -0,0 +1,21 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+executable("imagediff") {
+ output_name = "image_diff" # Different than dir nam for historical reasons.
+ sources = [
+ "image_diff.cc",
+ "image_diff_png.cc",
+ "image_diff_png.h",
+ ]
+
+ configs += [ "//build/config/compiler:wexit_time_destructors" ]
+
+ deps = [
+ "//base",
+ "//build/config/sanitizers:deps",
+ "//third_party/libpng",
+ "//third_party/zlib",
+ ]
+}
diff --git a/chromium/tools/imagediff/DEPS b/chromium/tools/imagediff/DEPS
new file mode 100644
index 00000000000..170e24fb81f
--- /dev/null
+++ b/chromium/tools/imagediff/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+third_party/libpng/png.h",
+ "+third_party/zlib/zlib.h",
+]
diff --git a/chromium/tools/imagediff/image_diff.cc b/chromium/tools/imagediff/image_diff.cc
new file mode 100644
index 00000000000..56d1be9c077
--- /dev/null
+++ b/chromium/tools/imagediff/image_diff.cc
@@ -0,0 +1,453 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file input format is based loosely on
+// Tools/DumpRenderTree/ImageDiff.m
+
+// The exact format of this tool's output to stdout is important, to match
+// what the run-webkit-tests script expects.
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <algorithm>
+#include <iostream>
+#include <memory>
+#include <string>
+#include <vector>
+
+#include "base/command_line.h"
+#include "base/containers/hash_tables.h"
+#include "base/files/file_path.h"
+#include "base/files/file_util.h"
+#include "base/logging.h"
+#include "base/numerics/safe_conversions.h"
+#include "base/process/memory.h"
+#include "base/strings/string_util.h"
+#include "base/strings/utf_string_conversions.h"
+#include "build/build_config.h"
+#include "tools/imagediff/image_diff_png.h"
+
+#if defined(OS_WIN)
+#include "windows.h"
+#endif
+
+// Causes the app to remain open, waiting for pairs of filenames on stdin.
+// The caller is then responsible for terminating this app.
+static const char kOptionPollStdin[] = "use-stdin";
+// Causes the app to additionally calculate a diff of the color histograms
+// (which is resistant to shifts in layout).
+static const char kOptionCompareHistograms[] = "histogram";
+// Causes the app to output an image that visualizes the difference.
+static const char kOptionGenerateDiff[] = "diff";
+
+// Return codes used by this utility.
+static const int kStatusSame = 0;
+static const int kStatusDifferent = 1;
+static const int kStatusError = 2;
+
+// Color codes.
+static const uint32_t RGBA_RED = 0x000000ff;
+static const uint32_t RGBA_ALPHA = 0xff000000;
+
+class Image {
+ public:
+ Image() : w_(0), h_(0) {
+ }
+
+ Image(const Image& image)
+ : w_(image.w_),
+ h_(image.h_),
+ data_(image.data_) {
+ }
+
+ bool has_image() const {
+ return w_ > 0 && h_ > 0;
+ }
+
+ int w() const {
+ return w_;
+ }
+
+ int h() const {
+ return h_;
+ }
+
+ const unsigned char* data() const {
+ return &data_.front();
+ }
+
+ // Creates the image from stdin with the given data length. On success, it
+ // will return true. On failure, no other methods should be accessed.
+ bool CreateFromStdin(size_t byte_length) {
+ if (byte_length == 0)
+ return false;
+
+ std::unique_ptr<unsigned char[]> source(new unsigned char[byte_length]);
+ if (fread(source.get(), 1, byte_length, stdin) != byte_length)
+ return false;
+
+ if (!image_diff_png::DecodePNG(source.get(), byte_length,
+ &data_, &w_, &h_)) {
+ Clear();
+ return false;
+ }
+ return true;
+ }
+
+ // Creates the image from the given filename on disk, and returns true on
+ // success.
+ bool CreateFromFilename(const base::FilePath& path) {
+ FILE* f = base::OpenFile(path, "rb");
+ if (!f)
+ return false;
+
+ std::vector<unsigned char> compressed;
+ const int buf_size = 1024;
+ unsigned char buf[buf_size];
+ size_t num_read = 0;
+ while ((num_read = fread(buf, 1, buf_size, f)) > 0) {
+ compressed.insert(compressed.end(), buf, buf + num_read);
+ }
+
+ base::CloseFile(f);
+
+ if (!image_diff_png::DecodePNG(&compressed[0], compressed.size(),
+ &data_, &w_, &h_)) {
+ Clear();
+ return false;
+ }
+ return true;
+ }
+
+ void Clear() {
+ w_ = h_ = 0;
+ data_.clear();
+ }
+
+ // Returns the RGBA value of the pixel at the given location
+ uint32_t pixel_at(int x, int y) const {
+ DCHECK(x >= 0 && x < w_);
+ DCHECK(y >= 0 && y < h_);
+ return *reinterpret_cast<const uint32_t*>(&(data_[(y * w_ + x) * 4]));
+ }
+
+ void set_pixel_at(int x, int y, uint32_t color) const {
+ DCHECK(x >= 0 && x < w_);
+ DCHECK(y >= 0 && y < h_);
+ void* addr = &const_cast<unsigned char*>(&data_.front())[(y * w_ + x) * 4];
+ *reinterpret_cast<uint32_t*>(addr) = color;
+ }
+
+ private:
+ // pixel dimensions of the image
+ int w_, h_;
+
+ std::vector<unsigned char> data_;
+};
+
+float PercentageDifferent(const Image& baseline, const Image& actual) {
+ int w = std::min(baseline.w(), actual.w());
+ int h = std::min(baseline.h(), actual.h());
+
+ // Compute pixels different in the overlap.
+ int pixels_different = 0;
+ for (int y = 0; y < h; y++) {
+ for (int x = 0; x < w; x++) {
+ if (baseline.pixel_at(x, y) != actual.pixel_at(x, y))
+ pixels_different++;
+ }
+ }
+
+ // Count pixels that are a difference in size as also being different.
+ int max_w = std::max(baseline.w(), actual.w());
+ int max_h = std::max(baseline.h(), actual.h());
+ // These pixels are off the right side, not including the lower right corner.
+ pixels_different += (max_w - w) * h;
+ // These pixels are along the bottom, including the lower right corner.
+ pixels_different += (max_h - h) * max_w;
+
+ // Like the WebKit ImageDiff tool, we define percentage different in terms
+ // of the size of the 'actual' bitmap.
+ float total_pixels = static_cast<float>(actual.w()) *
+ static_cast<float>(actual.h());
+ if (total_pixels == 0) {
+ // When the bitmap is empty, they are 100% different.
+ return 100.0f;
+ }
+ return 100.0f * pixels_different / total_pixels;
+}
+
+typedef base::hash_map<uint32_t, int32_t> RgbaToCountMap;
+
+float HistogramPercentageDifferent(const Image& baseline, const Image& actual) {
+ // TODO(johnme): Consider using a joint histogram instead, as described in
+ // "Comparing Images Using Joint Histograms" by Pass & Zabih
+ // http://www.cs.cornell.edu/~rdz/papers/pz-jms99.pdf
+
+ int w = std::min(baseline.w(), actual.w());
+ int h = std::min(baseline.h(), actual.h());
+
+ // Count occurences of each RGBA pixel value of baseline in the overlap.
+ RgbaToCountMap baseline_histogram;
+ for (int y = 0; y < h; y++) {
+ for (int x = 0; x < w; x++) {
+ // hash_map operator[] inserts a 0 (default constructor) if key not found.
+ baseline_histogram[baseline.pixel_at(x, y)]++;
+ }
+ }
+
+ // Compute pixels different in the histogram of the overlap.
+ int pixels_different = 0;
+ for (int y = 0; y < h; y++) {
+ for (int x = 0; x < w; x++) {
+ uint32_t actual_rgba = actual.pixel_at(x, y);
+ RgbaToCountMap::iterator it = baseline_histogram.find(actual_rgba);
+ if (it != baseline_histogram.end() && it->second > 0)
+ it->second--;
+ else
+ pixels_different++;
+ }
+ }
+
+ // Count pixels that are a difference in size as also being different.
+ int max_w = std::max(baseline.w(), actual.w());
+ int max_h = std::max(baseline.h(), actual.h());
+ // These pixels are off the right side, not including the lower right corner.
+ pixels_different += (max_w - w) * h;
+ // These pixels are along the bottom, including the lower right corner.
+ pixels_different += (max_h - h) * max_w;
+
+ // Like the WebKit ImageDiff tool, we define percentage different in terms
+ // of the size of the 'actual' bitmap.
+ float total_pixels = static_cast<float>(actual.w()) *
+ static_cast<float>(actual.h());
+ if (total_pixels == 0) {
+ // When the bitmap is empty, they are 100% different.
+ return 100.0f;
+ }
+ return 100.0f * pixels_different / total_pixels;
+}
+
+void PrintHelp() {
+ fprintf(stderr,
+ "Usage:\n"
+ " image_diff [--histogram] <compare file> <reference file>\n"
+ " Compares two files on disk, returning 0 when they are the same;\n"
+ " passing \"--histogram\" additionally calculates a diff of the\n"
+ " RGBA value histograms (which is resistant to shifts in layout)\n"
+ " image_diff --use-stdin\n"
+ " Stays open reading pairs of filenames from stdin, comparing them,\n"
+ " and sending 0 to stdout when they are the same\n"
+ " image_diff --diff <compare file> <reference file> <output file>\n"
+ " Compares two files on disk, outputs an image that visualizes the\n"
+ " difference to <output file>\n");
+ /* For unfinished webkit-like-mode (see below)
+ "\n"
+ " image_diff -s\n"
+ " Reads stream input from stdin, should be EXACTLY of the format\n"
+ " \"Content-length: <byte length> <data>Content-length: ...\n"
+ " it will take as many file pairs as given, and will compare them as\n"
+ " (cmp_file, reference_file) pairs\n");
+ */
+}
+
+int CompareImages(const base::FilePath& file1,
+ const base::FilePath& file2,
+ bool compare_histograms) {
+ Image actual_image;
+ Image baseline_image;
+
+ if (!actual_image.CreateFromFilename(file1)) {
+ fprintf(stderr, "image_diff: Unable to open file \"%" PRFilePath "\"\n",
+ file1.value().c_str());
+ return kStatusError;
+ }
+ if (!baseline_image.CreateFromFilename(file2)) {
+ fprintf(stderr, "image_diff: Unable to open file \"%" PRFilePath "\"\n",
+ file2.value().c_str());
+ return kStatusError;
+ }
+
+ if (compare_histograms) {
+ float percent = HistogramPercentageDifferent(actual_image, baseline_image);
+ const char* passed = percent > 0.0 ? "failed" : "passed";
+ printf("histogram diff: %01.2f%% %s\n", percent, passed);
+ }
+
+ const char* diff_name = compare_histograms ? "exact diff" : "diff";
+ float percent = PercentageDifferent(actual_image, baseline_image);
+ const char* passed = percent > 0.0 ? "failed" : "passed";
+ printf("%s: %01.2f%% %s\n", diff_name, percent, passed);
+ if (percent > 0.0) {
+ // failure: The WebKit version also writes the difference image to
+ // stdout, which seems excessive for our needs.
+ return kStatusDifferent;
+ }
+ // success
+ return kStatusSame;
+
+/* Untested mode that acts like WebKit's image comparator. I wrote this but
+ decided it's too complicated. We may use it in the future if it looks useful
+
+ char buffer[2048];
+ while (fgets(buffer, sizeof(buffer), stdin)) {
+
+ if (strncmp("Content-length: ", buffer, 16) == 0) {
+ char* context;
+ strtok_s(buffer, " ", &context);
+ int image_size = strtol(strtok_s(NULL, " ", &context), NULL, 10);
+
+ bool success = false;
+ if (image_size > 0 && actual_image.has_image() == 0) {
+ if (!actual_image.CreateFromStdin(image_size)) {
+ fputs("Error, input image can't be decoded.\n", stderr);
+ return 1;
+ }
+ } else if (image_size > 0 && baseline_image.has_image() == 0) {
+ if (!baseline_image.CreateFromStdin(image_size)) {
+ fputs("Error, baseline image can't be decoded.\n", stderr);
+ return 1;
+ }
+ } else {
+ fputs("Error, image size must be specified.\n", stderr);
+ return 1;
+ }
+ }
+
+ if (actual_image.has_image() && baseline_image.has_image()) {
+ float percent = PercentageDifferent(actual_image, baseline_image);
+ if (percent > 0.0) {
+ // failure: The WebKit version also writes the difference image to
+ // stdout, which seems excessive for our needs.
+ printf("diff: %01.2f%% failed\n", percent);
+ } else {
+ // success
+ printf("diff: %01.2f%% passed\n", percent);
+ }
+ actual_image.Clear();
+ baseline_image.Clear();
+ }
+
+ fflush(stdout);
+ }
+*/
+}
+
+bool CreateImageDiff(const Image& image1, const Image& image2, Image* out) {
+ int w = std::min(image1.w(), image2.w());
+ int h = std::min(image1.h(), image2.h());
+ *out = Image(image1);
+ bool same = (image1.w() == image2.w()) && (image1.h() == image2.h());
+
+ // TODO(estade): do something with the extra pixels if the image sizes
+ // are different.
+ for (int y = 0; y < h; y++) {
+ for (int x = 0; x < w; x++) {
+ uint32_t base_pixel = image1.pixel_at(x, y);
+ if (base_pixel != image2.pixel_at(x, y)) {
+ // Set differing pixels red.
+ out->set_pixel_at(x, y, RGBA_RED | RGBA_ALPHA);
+ same = false;
+ } else {
+ // Set same pixels as faded.
+ uint32_t alpha = base_pixel & RGBA_ALPHA;
+ uint32_t new_pixel = base_pixel - ((alpha / 2) & RGBA_ALPHA);
+ out->set_pixel_at(x, y, new_pixel);
+ }
+ }
+ }
+
+ return same;
+}
+
+int DiffImages(const base::FilePath& file1, const base::FilePath& file2,
+ const base::FilePath& out_file) {
+ Image actual_image;
+ Image baseline_image;
+
+ if (!actual_image.CreateFromFilename(file1)) {
+ fprintf(stderr, "image_diff: Unable to open file \"%" PRFilePath "\"\n",
+ file1.value().c_str());
+ return kStatusError;
+ }
+ if (!baseline_image.CreateFromFilename(file2)) {
+ fprintf(stderr, "image_diff: Unable to open file \"%" PRFilePath "\"\n",
+ file2.value().c_str());
+ return kStatusError;
+ }
+
+ Image diff_image;
+ bool same = CreateImageDiff(baseline_image, actual_image, &diff_image);
+ if (same)
+ return kStatusSame;
+
+ std::vector<unsigned char> png_encoding;
+ image_diff_png::EncodeRGBAPNG(
+ diff_image.data(), diff_image.w(), diff_image.h(),
+ diff_image.w() * 4, &png_encoding);
+ if (base::WriteFile(out_file,
+ reinterpret_cast<char*>(&png_encoding.front()),
+ base::checked_cast<int>(png_encoding.size())) < 0)
+ return kStatusError;
+
+ return kStatusDifferent;
+}
+
+// It isn't strictly correct to only support ASCII paths, but this
+// program reads paths on stdin and the program that spawns it outputs
+// paths as non-wide strings anyway.
+base::FilePath FilePathFromASCII(const std::string& str) {
+#if defined(OS_WIN)
+ return base::FilePath(base::ASCIIToUTF16(str));
+#else
+ return base::FilePath(str);
+#endif
+}
+
+int main(int argc, const char* argv[]) {
+ base::EnableTerminationOnHeapCorruption();
+ base::CommandLine::Init(argc, argv);
+ const base::CommandLine& parsed_command_line =
+ *base::CommandLine::ForCurrentProcess();
+ bool histograms = parsed_command_line.HasSwitch(kOptionCompareHistograms);
+ if (parsed_command_line.HasSwitch(kOptionPollStdin)) {
+ // Watch stdin for filenames.
+ std::string stdin_buffer;
+ base::FilePath filename1;
+ while (std::getline(std::cin, stdin_buffer)) {
+ if (stdin_buffer.empty())
+ continue;
+
+ if (!filename1.empty()) {
+ // CompareImages writes results to stdout unless an error occurred.
+ base::FilePath filename2 = FilePathFromASCII(stdin_buffer);
+ if (CompareImages(filename1, filename2, histograms) == kStatusError)
+ printf("error\n");
+ fflush(stdout);
+ filename1 = base::FilePath();
+ } else {
+ // Save the first filename in another buffer and wait for the second
+ // filename to arrive via stdin.
+ filename1 = FilePathFromASCII(stdin_buffer);
+ }
+ }
+ return 0;
+ }
+
+ const base::CommandLine::StringVector& args = parsed_command_line.GetArgs();
+ if (parsed_command_line.HasSwitch(kOptionGenerateDiff)) {
+ if (args.size() == 3) {
+ return DiffImages(base::FilePath(args[0]),
+ base::FilePath(args[1]),
+ base::FilePath(args[2]));
+ }
+ } else if (args.size() == 2) {
+ return CompareImages(
+ base::FilePath(args[0]), base::FilePath(args[1]), histograms);
+ }
+
+ PrintHelp();
+ return kStatusError;
+}
diff --git a/chromium/tools/imagediff/image_diff_png.cc b/chromium/tools/imagediff/image_diff_png.cc
new file mode 100644
index 00000000000..7524026c192
--- /dev/null
+++ b/chromium/tools/imagediff/image_diff_png.cc
@@ -0,0 +1,643 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/imagediff/image_diff_png.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "base/logging.h"
+#include "build/build_config.h"
+#include "third_party/libpng/png.h"
+#include "third_party/zlib/zlib.h"
+
+namespace image_diff_png {
+
+// This is a duplicate of ui/gfx/codec/png_codec.cc, after removing code related
+// to Skia, that we can use when running layout tests with minimal dependencies.
+namespace {
+
+enum ColorFormat {
+ // 3 bytes per pixel (packed), in RGB order regardless of endianness.
+ // This is the native JPEG format.
+ FORMAT_RGB,
+
+ // 4 bytes per pixel, in RGBA order in memory regardless of endianness.
+ FORMAT_RGBA,
+
+ // 4 bytes per pixel, in BGRA order in memory regardless of endianness.
+ // This is the default Windows DIB order.
+ FORMAT_BGRA,
+
+ // 4 bytes per pixel, in pre-multiplied kARGB_8888_Config format. For use
+ // with directly writing to a skia bitmap.
+ FORMAT_SkBitmap
+};
+
+// Represents a comment in the tEXt ancillary chunk of the png.
+struct Comment {
+ std::string key;
+ std::string text;
+};
+
+// Converts BGRA->RGBA and RGBA->BGRA.
+void ConvertBetweenBGRAandRGBA(const unsigned char* input, int pixel_width,
+ unsigned char* output, bool* is_opaque) {
+ for (int x = 0; x < pixel_width; x++) {
+ const unsigned char* pixel_in = &input[x * 4];
+ unsigned char* pixel_out = &output[x * 4];
+ pixel_out[0] = pixel_in[2];
+ pixel_out[1] = pixel_in[1];
+ pixel_out[2] = pixel_in[0];
+ pixel_out[3] = pixel_in[3];
+ }
+}
+
+void ConvertRGBAtoRGB(const unsigned char* rgba, int pixel_width,
+ unsigned char* rgb, bool* is_opaque) {
+ for (int x = 0; x < pixel_width; x++) {
+ const unsigned char* pixel_in = &rgba[x * 4];
+ unsigned char* pixel_out = &rgb[x * 3];
+ pixel_out[0] = pixel_in[0];
+ pixel_out[1] = pixel_in[1];
+ pixel_out[2] = pixel_in[2];
+ }
+}
+
+} // namespace
+
+// Decoder --------------------------------------------------------------------
+//
+// This code is based on WebKit libpng interface (PNGImageDecoder), which is
+// in turn based on the Mozilla png decoder.
+
+namespace {
+
+// Gamma constants: We assume we're on Windows which uses a gamma of 2.2.
+const double kMaxGamma = 21474.83; // Maximum gamma accepted by png library.
+const double kDefaultGamma = 2.2;
+const double kInverseGamma = 1.0 / kDefaultGamma;
+
+class PngDecoderState {
+ public:
+ // Output is a vector<unsigned char>.
+ PngDecoderState(ColorFormat ofmt, std::vector<unsigned char>* o)
+ : output_format(ofmt),
+ output_channels(0),
+ is_opaque(true),
+ output(o),
+ row_converter(NULL),
+ width(0),
+ height(0),
+ done(false) {
+ }
+
+ ColorFormat output_format;
+ int output_channels;
+
+ // Used during the reading of an SkBitmap. Defaults to true until we see a
+ // pixel with anything other than an alpha of 255.
+ bool is_opaque;
+
+ // An intermediary buffer for decode output.
+ std::vector<unsigned char>* output;
+
+ // Called to convert a row from the library to the correct output format.
+ // When NULL, no conversion is necessary.
+ void (*row_converter)(const unsigned char* in, int w, unsigned char* out,
+ bool* is_opaque);
+
+ // Size of the image, set in the info callback.
+ int width;
+ int height;
+
+ // Set to true when we've found the end of the data.
+ bool done;
+};
+
+void ConvertRGBtoRGBA(const unsigned char* rgb, int pixel_width,
+ unsigned char* rgba, bool* is_opaque) {
+ for (int x = 0; x < pixel_width; x++) {
+ const unsigned char* pixel_in = &rgb[x * 3];
+ unsigned char* pixel_out = &rgba[x * 4];
+ pixel_out[0] = pixel_in[0];
+ pixel_out[1] = pixel_in[1];
+ pixel_out[2] = pixel_in[2];
+ pixel_out[3] = 0xff;
+ }
+}
+
+void ConvertRGBtoBGRA(const unsigned char* rgb, int pixel_width,
+ unsigned char* bgra, bool* is_opaque) {
+ for (int x = 0; x < pixel_width; x++) {
+ const unsigned char* pixel_in = &rgb[x * 3];
+ unsigned char* pixel_out = &bgra[x * 4];
+ pixel_out[0] = pixel_in[2];
+ pixel_out[1] = pixel_in[1];
+ pixel_out[2] = pixel_in[0];
+ pixel_out[3] = 0xff;
+ }
+}
+
+// Called when the png header has been read. This code is based on the WebKit
+// PNGImageDecoder
+void DecodeInfoCallback(png_struct* png_ptr, png_info* info_ptr) {
+ PngDecoderState* state = static_cast<PngDecoderState*>(
+ png_get_progressive_ptr(png_ptr));
+
+ int bit_depth, color_type, interlace_type, compression_type;
+ int filter_type, channels;
+ png_uint_32 w, h;
+ png_get_IHDR(png_ptr, info_ptr, &w, &h, &bit_depth, &color_type,
+ &interlace_type, &compression_type, &filter_type);
+
+ // Bounds check. When the image is unreasonably big, we'll error out and
+ // end up back at the setjmp call when we set up decoding. "Unreasonably big"
+ // means "big enough that w * h * 32bpp might overflow an int"; we choose this
+ // threshold to match WebKit and because a number of places in code assume
+ // that an image's size (in bytes) fits in a (signed) int.
+ unsigned long long total_size =
+ static_cast<unsigned long long>(w) * static_cast<unsigned long long>(h);
+ if (total_size > ((1 << 29) - 1))
+ longjmp(png_jmpbuf(png_ptr), 1);
+ state->width = static_cast<int>(w);
+ state->height = static_cast<int>(h);
+
+ // Expand to ensure we use 24-bit for RGB and 32-bit for RGBA.
+ if (color_type == PNG_COLOR_TYPE_PALETTE ||
+ (color_type == PNG_COLOR_TYPE_GRAY && bit_depth < 8))
+ png_set_expand(png_ptr);
+
+ // Transparency for paletted images.
+ if (png_get_valid(png_ptr, info_ptr, PNG_INFO_tRNS))
+ png_set_expand(png_ptr);
+
+ // Convert 16-bit to 8-bit.
+ if (bit_depth == 16)
+ png_set_strip_16(png_ptr);
+
+ // Expand grayscale to RGB.
+ if (color_type == PNG_COLOR_TYPE_GRAY ||
+ color_type == PNG_COLOR_TYPE_GRAY_ALPHA)
+ png_set_gray_to_rgb(png_ptr);
+
+ // Deal with gamma and keep it under our control.
+ double gamma;
+ if (png_get_gAMA(png_ptr, info_ptr, &gamma)) {
+ if (gamma <= 0.0 || gamma > kMaxGamma) {
+ gamma = kInverseGamma;
+ png_set_gAMA(png_ptr, info_ptr, gamma);
+ }
+ png_set_gamma(png_ptr, kDefaultGamma, gamma);
+ } else {
+ png_set_gamma(png_ptr, kDefaultGamma, kInverseGamma);
+ }
+
+ // Tell libpng to send us rows for interlaced pngs.
+ if (interlace_type == PNG_INTERLACE_ADAM7)
+ png_set_interlace_handling(png_ptr);
+
+ // Update our info now
+ png_read_update_info(png_ptr, info_ptr);
+ channels = png_get_channels(png_ptr, info_ptr);
+
+ // Pick our row format converter necessary for this data.
+ if (channels == 3) {
+ switch (state->output_format) {
+ case FORMAT_RGB:
+ state->row_converter = NULL; // no conversion necessary
+ state->output_channels = 3;
+ break;
+ case FORMAT_RGBA:
+ state->row_converter = &ConvertRGBtoRGBA;
+ state->output_channels = 4;
+ break;
+ case FORMAT_BGRA:
+ state->row_converter = &ConvertRGBtoBGRA;
+ state->output_channels = 4;
+ break;
+ default:
+ NOTREACHED() << "Unknown output format";
+ break;
+ }
+ } else if (channels == 4) {
+ switch (state->output_format) {
+ case FORMAT_RGB:
+ state->row_converter = &ConvertRGBAtoRGB;
+ state->output_channels = 3;
+ break;
+ case FORMAT_RGBA:
+ state->row_converter = NULL; // no conversion necessary
+ state->output_channels = 4;
+ break;
+ case FORMAT_BGRA:
+ state->row_converter = &ConvertBetweenBGRAandRGBA;
+ state->output_channels = 4;
+ break;
+ default:
+ NOTREACHED() << "Unknown output format";
+ break;
+ }
+ } else {
+ NOTREACHED() << "Unknown input channels";
+ longjmp(png_jmpbuf(png_ptr), 1);
+ }
+
+ state->output->resize(
+ state->width * state->output_channels * state->height);
+}
+
+void DecodeRowCallback(png_struct* png_ptr, png_byte* new_row,
+ png_uint_32 row_num, int pass) {
+ PngDecoderState* state = static_cast<PngDecoderState*>(
+ png_get_progressive_ptr(png_ptr));
+
+ DCHECK(pass == 0);
+ if (static_cast<int>(row_num) > state->height) {
+ NOTREACHED() << "Invalid row";
+ return;
+ }
+
+ unsigned char* base = NULL;
+ base = &state->output->front();
+
+ unsigned char* dest = &base[state->width * state->output_channels * row_num];
+ if (state->row_converter)
+ state->row_converter(new_row, state->width, dest, &state->is_opaque);
+ else
+ memcpy(dest, new_row, state->width * state->output_channels);
+}
+
+void DecodeEndCallback(png_struct* png_ptr, png_info* info) {
+ PngDecoderState* state = static_cast<PngDecoderState*>(
+ png_get_progressive_ptr(png_ptr));
+
+ // Mark the image as complete, this will tell the Decode function that we
+ // have successfully found the end of the data.
+ state->done = true;
+}
+
+// Automatically destroys the given read structs on destruction to make
+// cleanup and error handling code cleaner.
+class PngReadStructDestroyer {
+ public:
+ PngReadStructDestroyer(png_struct** ps, png_info** pi) : ps_(ps), pi_(pi) {
+ }
+ ~PngReadStructDestroyer() {
+ png_destroy_read_struct(ps_, pi_, NULL);
+ }
+ private:
+ png_struct** ps_;
+ png_info** pi_;
+};
+
+bool BuildPNGStruct(const unsigned char* input, size_t input_size,
+ png_struct** png_ptr, png_info** info_ptr) {
+ if (input_size < 8)
+ return false; // Input data too small to be a png
+
+ // Have libpng check the signature, it likes the first 8 bytes.
+ if (png_sig_cmp(const_cast<unsigned char*>(input), 0, 8) != 0)
+ return false;
+
+ *png_ptr = png_create_read_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL);
+ if (!*png_ptr)
+ return false;
+
+ *info_ptr = png_create_info_struct(*png_ptr);
+ if (!*info_ptr) {
+ png_destroy_read_struct(png_ptr, NULL, NULL);
+ return false;
+ }
+
+ return true;
+}
+
+} // namespace
+
+// static
+bool Decode(const unsigned char* input, size_t input_size,
+ ColorFormat format, std::vector<unsigned char>* output,
+ int* w, int* h) {
+ png_struct* png_ptr = NULL;
+ png_info* info_ptr = NULL;
+ if (!BuildPNGStruct(input, input_size, &png_ptr, &info_ptr))
+ return false;
+
+ PngReadStructDestroyer destroyer(&png_ptr, &info_ptr);
+ if (setjmp(png_jmpbuf(png_ptr))) {
+ // The destroyer will ensure that the structures are cleaned up in this
+ // case, even though we may get here as a jump from random parts of the
+ // PNG library called below.
+ return false;
+ }
+
+ PngDecoderState state(format, output);
+
+ png_set_progressive_read_fn(png_ptr, &state, &DecodeInfoCallback,
+ &DecodeRowCallback, &DecodeEndCallback);
+ png_process_data(png_ptr,
+ info_ptr,
+ const_cast<unsigned char*>(input),
+ input_size);
+
+ if (!state.done) {
+ // Fed it all the data but the library didn't think we got all the data, so
+ // this file must be truncated.
+ output->clear();
+ return false;
+ }
+
+ *w = state.width;
+ *h = state.height;
+ return true;
+}
+
+// Encoder --------------------------------------------------------------------
+//
+// This section of the code is based on nsPNGEncoder.cpp in Mozilla
+// (Copyright 2005 Google Inc.)
+
+namespace {
+
+// Passed around as the io_ptr in the png structs so our callbacks know where
+// to write data.
+struct PngEncoderState {
+ explicit PngEncoderState(std::vector<unsigned char>* o) : out(o) {}
+ std::vector<unsigned char>* out;
+};
+
+// Called by libpng to flush its internal buffer to ours.
+void EncoderWriteCallback(png_structp png, png_bytep data, png_size_t size) {
+ PngEncoderState* state = static_cast<PngEncoderState*>(png_get_io_ptr(png));
+ DCHECK(state->out);
+
+ size_t old_size = state->out->size();
+ state->out->resize(old_size + size);
+ memcpy(&(*state->out)[old_size], data, size);
+}
+
+void FakeFlushCallback(png_structp png) {
+ // We don't need to perform any flushing since we aren't doing real IO, but
+ // we're required to provide this function by libpng.
+}
+
+void ConvertBGRAtoRGB(const unsigned char* bgra, int pixel_width,
+ unsigned char* rgb, bool* is_opaque) {
+ for (int x = 0; x < pixel_width; x++) {
+ const unsigned char* pixel_in = &bgra[x * 4];
+ unsigned char* pixel_out = &rgb[x * 3];
+ pixel_out[0] = pixel_in[2];
+ pixel_out[1] = pixel_in[1];
+ pixel_out[2] = pixel_in[0];
+ }
+}
+
+#ifdef PNG_TEXT_SUPPORTED
+
+inline char* strdup(const char* str) {
+#if defined(OS_WIN)
+ return _strdup(str);
+#else
+ return ::strdup(str);
+#endif
+}
+
+class CommentWriter {
+ public:
+ explicit CommentWriter(const std::vector<Comment>& comments)
+ : comments_(comments),
+ png_text_(new png_text[comments.size()]) {
+ for (size_t i = 0; i < comments.size(); ++i)
+ AddComment(i, comments[i]);
+ }
+
+ ~CommentWriter() {
+ for (size_t i = 0; i < comments_.size(); ++i) {
+ free(png_text_[i].key);
+ free(png_text_[i].text);
+ }
+ delete [] png_text_;
+ }
+
+ bool HasComments() {
+ return !comments_.empty();
+ }
+
+ png_text* get_png_text() {
+ return png_text_;
+ }
+
+ int size() {
+ return static_cast<int>(comments_.size());
+ }
+
+ private:
+ void AddComment(size_t pos, const Comment& comment) {
+ png_text_[pos].compression = PNG_TEXT_COMPRESSION_NONE;
+ // A PNG comment's key can only be 79 characters long.
+ DCHECK(comment.key.length() < 79);
+ png_text_[pos].key = strdup(comment.key.substr(0, 78).c_str());
+ png_text_[pos].text = strdup(comment.text.c_str());
+ png_text_[pos].text_length = comment.text.length();
+#ifdef PNG_iTXt_SUPPORTED
+ png_text_[pos].itxt_length = 0;
+ png_text_[pos].lang = 0;
+ png_text_[pos].lang_key = 0;
+#endif
+ }
+
+ const std::vector<Comment> comments_;
+ png_text* png_text_;
+};
+#endif // PNG_TEXT_SUPPORTED
+
+// The type of functions usable for converting between pixel formats.
+typedef void (*FormatConverter)(const unsigned char* in, int w,
+ unsigned char* out, bool* is_opaque);
+
+// libpng uses a wacky setjmp-based API, which makes the compiler nervous.
+// We constrain all of the calls we make to libpng where the setjmp() is in
+// place to this function.
+// Returns true on success.
+bool DoLibpngWrite(png_struct* png_ptr, png_info* info_ptr,
+ PngEncoderState* state,
+ int width, int height, int row_byte_width,
+ const unsigned char* input, int compression_level,
+ int png_output_color_type, int output_color_components,
+ FormatConverter converter,
+ const std::vector<Comment>& comments) {
+#ifdef PNG_TEXT_SUPPORTED
+ CommentWriter comment_writer(comments);
+#endif
+ unsigned char* row_buffer = NULL;
+
+ // Make sure to not declare any locals here -- locals in the presence
+ // of setjmp() in C++ code makes gcc complain.
+
+ if (setjmp(png_jmpbuf(png_ptr))) {
+ delete[] row_buffer;
+ return false;
+ }
+
+ png_set_compression_level(png_ptr, compression_level);
+
+ // Set our callback for libpng to give us the data.
+ png_set_write_fn(png_ptr, state, EncoderWriteCallback, FakeFlushCallback);
+
+ png_set_IHDR(png_ptr, info_ptr, width, height, 8, png_output_color_type,
+ PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_DEFAULT,
+ PNG_FILTER_TYPE_DEFAULT);
+
+#ifdef PNG_TEXT_SUPPORTED
+ if (comment_writer.HasComments()) {
+ png_set_text(png_ptr, info_ptr, comment_writer.get_png_text(),
+ comment_writer.size());
+ }
+#endif
+
+ png_write_info(png_ptr, info_ptr);
+
+ if (!converter) {
+ // No conversion needed, give the data directly to libpng.
+ for (int y = 0; y < height; y ++) {
+ png_write_row(png_ptr,
+ const_cast<unsigned char*>(&input[y * row_byte_width]));
+ }
+ } else {
+ // Needs conversion using a separate buffer.
+ row_buffer = new unsigned char[width * output_color_components];
+ for (int y = 0; y < height; y ++) {
+ converter(&input[y * row_byte_width], width, row_buffer, NULL);
+ png_write_row(png_ptr, row_buffer);
+ }
+ delete[] row_buffer;
+ }
+
+ png_write_end(png_ptr, info_ptr);
+ return true;
+}
+
+} // namespace
+
+// static
+bool EncodeWithCompressionLevel(const unsigned char* input, ColorFormat format,
+ const int width, const int height,
+ int row_byte_width,
+ bool discard_transparency,
+ const std::vector<Comment>& comments,
+ int compression_level,
+ std::vector<unsigned char>* output) {
+ // Run to convert an input row into the output row format, NULL means no
+ // conversion is necessary.
+ FormatConverter converter = NULL;
+
+ int input_color_components, output_color_components;
+ int png_output_color_type;
+ switch (format) {
+ case FORMAT_RGB:
+ input_color_components = 3;
+ output_color_components = 3;
+ png_output_color_type = PNG_COLOR_TYPE_RGB;
+ discard_transparency = false;
+ break;
+
+ case FORMAT_RGBA:
+ input_color_components = 4;
+ if (discard_transparency) {
+ output_color_components = 3;
+ png_output_color_type = PNG_COLOR_TYPE_RGB;
+ converter = ConvertRGBAtoRGB;
+ } else {
+ output_color_components = 4;
+ png_output_color_type = PNG_COLOR_TYPE_RGB_ALPHA;
+ converter = NULL;
+ }
+ break;
+
+ case FORMAT_BGRA:
+ input_color_components = 4;
+ if (discard_transparency) {
+ output_color_components = 3;
+ png_output_color_type = PNG_COLOR_TYPE_RGB;
+ converter = ConvertBGRAtoRGB;
+ } else {
+ output_color_components = 4;
+ png_output_color_type = PNG_COLOR_TYPE_RGB_ALPHA;
+ converter = ConvertBetweenBGRAandRGBA;
+ }
+ break;
+
+ default:
+ NOTREACHED() << "Unknown pixel format";
+ return false;
+ }
+
+ // Row stride should be at least as long as the length of the data.
+ DCHECK(input_color_components * width <= row_byte_width);
+
+ png_struct* png_ptr = png_create_write_struct(PNG_LIBPNG_VER_STRING,
+ NULL, NULL, NULL);
+ if (!png_ptr)
+ return false;
+ png_info* info_ptr = png_create_info_struct(png_ptr);
+ if (!info_ptr) {
+ png_destroy_write_struct(&png_ptr, NULL);
+ return false;
+ }
+
+ PngEncoderState state(output);
+ bool success = DoLibpngWrite(png_ptr, info_ptr, &state,
+ width, height, row_byte_width,
+ input, compression_level, png_output_color_type,
+ output_color_components, converter, comments);
+ png_destroy_write_struct(&png_ptr, &info_ptr);
+
+ return success;
+}
+
+// static
+bool Encode(const unsigned char* input, ColorFormat format,
+ const int width, const int height, int row_byte_width,
+ bool discard_transparency,
+ const std::vector<Comment>& comments,
+ std::vector<unsigned char>* output) {
+ return EncodeWithCompressionLevel(input, format, width, height,
+ row_byte_width,
+ discard_transparency,
+ comments, Z_DEFAULT_COMPRESSION,
+ output);
+}
+
+// Decode a PNG into an RGBA pixel array.
+bool DecodePNG(const unsigned char* input, size_t input_size,
+ std::vector<unsigned char>* output,
+ int* width, int* height) {
+ return Decode(input, input_size, FORMAT_RGBA, output, width, height);
+}
+
+// Encode an RGBA pixel array into a PNG.
+bool EncodeRGBAPNG(const unsigned char* input,
+ int width,
+ int height,
+ int row_byte_width,
+ std::vector<unsigned char>* output) {
+ return Encode(input, FORMAT_RGBA,
+ width, height, row_byte_width, false,
+ std::vector<Comment>(), output);
+}
+
+// Encode an BGRA pixel array into a PNG.
+bool EncodeBGRAPNG(const unsigned char* input,
+ int width,
+ int height,
+ int row_byte_width,
+ bool discard_transparency,
+ std::vector<unsigned char>* output) {
+ return Encode(input, FORMAT_BGRA,
+ width, height, row_byte_width, discard_transparency,
+ std::vector<Comment>(), output);
+}
+
+} // image_diff_png
diff --git a/chromium/tools/imagediff/image_diff_png.h b/chromium/tools/imagediff/image_diff_png.h
new file mode 100644
index 00000000000..defa4acf7e5
--- /dev/null
+++ b/chromium/tools/imagediff/image_diff_png.h
@@ -0,0 +1,37 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_IMAGEDIFF_IMAGE_DIFF_PNG_H_
+#define TOOLS_IMAGEDIFF_IMAGE_DIFF_PNG_H_
+
+#include <stddef.h>
+
+#include <string>
+#include <vector>
+
+namespace image_diff_png {
+
+// Decode a PNG into an RGBA pixel array.
+bool DecodePNG(const unsigned char* input, size_t input_size,
+ std::vector<unsigned char>* output,
+ int* width, int* height);
+
+// Encode an RGBA pixel array into a PNG.
+bool EncodeRGBAPNG(const unsigned char* input,
+ int width,
+ int height,
+ int row_byte_width,
+ std::vector<unsigned char>* output);
+
+// Encode an BGRA pixel array into a PNG.
+bool EncodeBGRAPNG(const unsigned char* input,
+ int width,
+ int height,
+ int row_byte_width,
+ bool discard_transparency,
+ std::vector<unsigned char>* output);
+
+} // namespace image_diff_png
+
+#endif // TOOLS_IMAGEDIFF_IMAGE_DIFF_PNG_H_
diff --git a/chromium/tools/include_tracer.py b/chromium/tools/include_tracer.py
new file mode 100755
index 00000000000..567a79765f8
--- /dev/null
+++ b/chromium/tools/include_tracer.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# based on an almost identical script by: jyrki@google.com (Jyrki Alakuijala)
+
+"""Prints out include dependencies in chrome.
+
+Since it ignores defines, it gives just a rough estimation of file size.
+
+Usage:
+ tools/include_tracer.py chrome/browser/ui/browser.h
+"""
+
+import os
+import sys
+
+# Created by copying the command line for prerender_browsertest.cc, replacing
+# spaces with newlines, and dropping everything except -F and -I switches.
+# TODO(port): Add windows, linux directories.
+INCLUDE_PATHS = [
+ '',
+ 'gpu',
+ 'skia/config',
+ 'skia/ext',
+ 'testing/gmock/include',
+ 'testing/gtest/include',
+ 'third_party/WebKit/Source',
+ 'third_party/WebKit/Source/core',
+ 'third_party/WebKit/Source/core/accessibility',
+ 'third_party/WebKit/Source/core/accessibility/chromium',
+ 'third_party/WebKit/Source/core/bindings',
+ 'third_party/WebKit/Source/core/bindings/generic',
+ 'third_party/WebKit/Source/core/bindings/v8',
+ 'third_party/WebKit/Source/core/bindings/v8/custom',
+ 'third_party/WebKit/Source/core/bindings/v8/specialization',
+ 'third_party/WebKit/Source/core/bridge',
+ 'third_party/WebKit/Source/core/bridge/jni',
+ 'third_party/WebKit/Source/core/bridge/jni/v8',
+ 'third_party/WebKit/Source/core/css',
+ 'third_party/WebKit/Source/core/dom',
+ 'third_party/WebKit/Source/core/dom/default',
+ 'third_party/WebKit/Source/core/editing',
+ 'third_party/WebKit/Source/core/fileapi',
+ 'third_party/WebKit/Source/core/history',
+ 'third_party/WebKit/Source/core/html',
+ 'third_party/WebKit/Source/core/html/canvas',
+ 'third_party/WebKit/Source/core/html/parser',
+ 'third_party/WebKit/Source/core/html/shadow',
+ 'third_party/WebKit/Source/core/inspector',
+ 'third_party/WebKit/Source/core/loader',
+ 'third_party/WebKit/Source/core/loader/appcache',
+ 'third_party/WebKit/Source/core/loader/archive',
+ 'third_party/WebKit/Source/core/loader/cache',
+ 'third_party/WebKit/Source/core/loader/icon',
+ 'third_party/WebKit/Source/core/mathml',
+ 'third_party/WebKit/Source/core/notifications',
+ 'third_party/WebKit/Source/core/page',
+ 'third_party/WebKit/Source/core/page/animation',
+ 'third_party/WebKit/Source/core/page/chromium',
+ 'third_party/WebKit/Source/core/platform',
+ 'third_party/WebKit/Source/core/platform/animation',
+ 'third_party/WebKit/Source/core/platform/audio',
+ 'third_party/WebKit/Source/core/platform/audio/chromium',
+ 'third_party/WebKit/Source/core/platform/audio/mac',
+ 'third_party/WebKit/Source/core/platform/chromium',
+ 'third_party/WebKit/Source/core/platform/cocoa',
+ 'third_party/WebKit/Source/core/platform/graphics',
+ 'third_party/WebKit/Source/core/platform/graphics/cg',
+ 'third_party/WebKit/Source/core/platform/graphics/chromium',
+ 'third_party/WebKit/Source/core/platform/graphics/cocoa',
+ 'third_party/WebKit/Source/core/platform/graphics/filters',
+ 'third_party/WebKit/Source/core/platform/graphics/gpu',
+ 'third_party/WebKit/Source/core/platform/graphics/mac',
+ 'third_party/WebKit/Source/core/platform/graphics/opentype',
+ 'third_party/WebKit/Source/core/platform/graphics/skia',
+ 'third_party/WebKit/Source/core/platform/graphics/transforms',
+ 'third_party/WebKit/Source/core/platform/image-decoders',
+ 'third_party/WebKit/Source/core/platform/image-decoders/bmp',
+ 'third_party/WebKit/Source/core/platform/image-decoders/gif',
+ 'third_party/WebKit/Source/core/platform/image-decoders/ico',
+ 'third_party/WebKit/Source/core/platform/image-decoders/jpeg',
+ 'third_party/WebKit/Source/core/platform/image-decoders/png',
+ 'third_party/WebKit/Source/core/platform/image-decoders/skia',
+ 'third_party/WebKit/Source/core/platform/image-decoders/webp',
+ 'third_party/WebKit/Source/core/platform/image-decoders/xbm',
+ 'third_party/WebKit/Source/core/platform/image-encoders/skia',
+ 'third_party/WebKit/Source/core/platform/mac',
+ 'third_party/WebKit/Source/core/platform/mock',
+ 'third_party/WebKit/Source/core/platform/network',
+ 'third_party/WebKit/Source/core/platform/network/chromium',
+ 'third_party/WebKit/Source/core/platform/sql',
+ 'third_party/WebKit/Source/core/platform/text',
+ 'third_party/WebKit/Source/core/platform/text/mac',
+ 'third_party/WebKit/Source/core/platform/text/transcoder',
+ 'third_party/WebKit/Source/core/plugins',
+ 'third_party/WebKit/Source/core/plugins/chromium',
+ 'third_party/WebKit/Source/core/rendering',
+ 'third_party/WebKit/Source/core/rendering/style',
+ 'third_party/WebKit/Source/core/rendering/svg',
+ 'third_party/WebKit/Source/core/storage',
+ 'third_party/WebKit/Source/core/storage/chromium',
+ 'third_party/WebKit/Source/core/svg',
+ 'third_party/WebKit/Source/core/svg/animation',
+ 'third_party/WebKit/Source/core/svg/graphics',
+ 'third_party/WebKit/Source/core/svg/graphics/filters',
+ 'third_party/WebKit/Source/core/svg/properties',
+ 'third_party/WebKit/Source/core/webaudio',
+ 'third_party/WebKit/Source/core/websockets',
+ 'third_party/WebKit/Source/core/workers',
+ 'third_party/WebKit/Source/core/xml',
+ 'third_party/WebKit/Source/public',
+ 'third_party/WebKit/Source/web',
+ 'third_party/WebKit/Source/wtf',
+ 'third_party/cld',
+ 'third_party/google_toolbox_for_mac/src',
+ 'third_party/icu/public/common',
+ 'third_party/icu/public/i18n',
+ 'third_party/npapi',
+ 'third_party/npapi/bindings',
+ 'third_party/protobuf',
+ 'third_party/protobuf/src',
+ 'third_party/skia/gpu/include',
+ 'third_party/skia/include/config',
+ 'third_party/skia/include/core',
+ 'third_party/skia/include/effects',
+ 'third_party/skia/include/gpu',
+ 'third_party/skia/include/pdf',
+ 'third_party/skia/include/ports',
+ 'v8/include',
+ 'xcodebuild/Debug/include',
+ 'xcodebuild/DerivedSources/Debug/chrome',
+ 'xcodebuild/DerivedSources/Debug/policy',
+ 'xcodebuild/DerivedSources/Debug/protoc_out',
+ 'xcodebuild/DerivedSources/Debug/webkit',
+ 'xcodebuild/DerivedSources/Debug/webkit/bindings',
+]
+
+
+def Walk(seen, filename, parent, indent):
+ """Returns the size of |filename| plus the size of all files included by
+ |filename| and prints the include tree of |filename| to stdout. Every file
+ is visited at most once.
+ """
+ total_bytes = 0
+
+ # .proto(devel) filename translation
+ if filename.endswith('.pb.h'):
+ basename = filename[:-5]
+ if os.path.exists(basename + '.proto'):
+ filename = basename + '.proto'
+ else:
+ print 'could not find ', filename
+
+ # Show and count files only once.
+ if filename in seen:
+ return total_bytes
+ seen.add(filename)
+
+ # Display the paths.
+ print ' ' * indent + filename
+
+ # Skip system includes.
+ if filename[0] == '<':
+ return total_bytes
+
+ # Find file in all include paths.
+ resolved_filename = filename
+ for root in INCLUDE_PATHS + [os.path.dirname(parent)]:
+ if os.path.exists(os.path.join(root, filename)):
+ resolved_filename = os.path.join(root, filename)
+ break
+
+ # Recurse.
+ if os.path.exists(resolved_filename):
+ lines = open(resolved_filename).readlines()
+ else:
+ print ' ' * (indent + 2) + "-- not found"
+ lines = []
+ for line in lines:
+ line = line.strip()
+ if line.startswith('#include "'):
+ total_bytes += Walk(
+ seen, line.split('"')[1], resolved_filename, indent + 2)
+ elif line.startswith('#include '):
+ include = '<' + line.split('<')[1].split('>')[0] + '>'
+ total_bytes += Walk(
+ seen, include, resolved_filename, indent + 2)
+ elif line.startswith('import '):
+ total_bytes += Walk(
+ seen, line.split('"')[1], resolved_filename, indent + 2)
+ return total_bytes + len("".join(lines))
+
+
+def main():
+ bytes = Walk(set(), sys.argv[1], '', 0)
+ print
+ print float(bytes) / (1 << 20), "megabytes of chrome source"
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/ipc_fuzzer/DEPS b/chromium/tools/ipc_fuzzer/DEPS
new file mode 100644
index 00000000000..1af06669a1b
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+third_party/re2",
+ "+tools/ipc_fuzzer/message_lib",
+]
diff --git a/chromium/tools/ipc_fuzzer/OWNERS b/chromium/tools/ipc_fuzzer/OWNERS
new file mode 100644
index 00000000000..2cca4f2232c
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/OWNERS
@@ -0,0 +1,3 @@
+inferno@chromium.org
+mbarbella@chromium.org
+tsepez@chromium.org
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/DEPS b/chromium/tools/ipc_fuzzer/fuzzer/DEPS
new file mode 100644
index 00000000000..64488680f12
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/fuzzer/DEPS
@@ -0,0 +1,3 @@
+include_rules = [
+ "+third_party/mt19937ar",
+]
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/fuzzer.cc b/chromium/tools/ipc_fuzzer/fuzzer/fuzzer.cc
new file mode 100644
index 00000000000..c1fb9d2e446
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/fuzzer/fuzzer.cc
@@ -0,0 +1,2051 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <iostream>
+#include <set>
+#include <string>
+#include <utility>
+#include <vector>
+
+#include "base/macros.h"
+#include "base/memory/shared_memory_handle.h"
+#include "base/strings/string_util.h"
+#include "build/build_config.h"
+#include "ipc/ipc_message.h"
+#include "ipc/ipc_message_utils.h"
+#include "ipc/ipc_switches.h"
+#include "ipc/ipc_sync_channel.h"
+#include "ipc/ipc_sync_message.h"
+#include "tools/ipc_fuzzer/fuzzer/fuzzer.h"
+#include "tools/ipc_fuzzer/fuzzer/rand_util.h"
+#include "tools/ipc_fuzzer/message_lib/message_cracker.h"
+#include "tools/ipc_fuzzer/message_lib/message_file.h"
+
+#if defined(OS_POSIX)
+#include <unistd.h>
+#endif
+
+// First include of all message files to provide basic types.
+#include "tools/ipc_fuzzer/message_lib/all_messages.h"
+#include "tools/ipc_fuzzer/message_lib/all_message_null_macros.h"
+
+#if defined(COMPILER_GCC)
+#define PRETTY_FUNCTION __PRETTY_FUNCTION__
+#elif defined(COMPILER_MSVC)
+#define PRETTY_FUNCTION __FUNCSIG__
+#else
+#define PRETTY_FUNCTION __FUNCTION__
+#endif
+
+namespace IPC {
+class Message;
+} // namespace IPC
+
+namespace {
+// For breaking deep recursion.
+int g_depth = 0;
+} // namespace
+
+namespace ipc_fuzzer {
+
+FuzzerFunctionVector g_function_vector;
+
+bool Fuzzer::ShouldGenerate() {
+ return false;
+}
+
+// Partially-specialized class that knows how to handle a given type.
+template <class P>
+struct FuzzTraits {
+ static bool Fuzz(P* p, Fuzzer *fuzzer) {
+ // This is the catch-all for types we don't have enough information
+ // to generate.
+ std::cerr << "Can't handle " << PRETTY_FUNCTION << "\n";
+ return false;
+ }
+};
+
+// Template function to invoke partially-specialized class method.
+template <class P>
+static bool FuzzParam(P* p, Fuzzer* fuzzer) {
+ return FuzzTraits<P>::Fuzz(p, fuzzer);
+}
+
+template <class P>
+static bool FuzzParamArray(P* p, size_t length, Fuzzer* fuzzer) {
+ for (size_t i = 0; i < length; i++, p++) {
+ if (!FuzzTraits<P>::Fuzz(p, fuzzer))
+ return false;
+ }
+ return true;
+}
+
+// Specializations to generate primitive types.
+template <>
+struct FuzzTraits<bool> {
+ static bool Fuzz(bool* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzBool(p);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<int> {
+ static bool Fuzz(int* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzInt(p);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<unsigned int> {
+ static bool Fuzz(unsigned int* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzInt(reinterpret_cast<int*>(p));
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<long> {
+ static bool Fuzz(long* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzLong(p);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<unsigned long> {
+ static bool Fuzz(unsigned long* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzLong(reinterpret_cast<long*>(p));
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<long long> {
+ static bool Fuzz(long long* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzInt64(reinterpret_cast<int64_t*>(p));
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<unsigned long long> {
+ static bool Fuzz(unsigned long long* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzInt64(reinterpret_cast<int64_t*>(p));
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<short> {
+ static bool Fuzz(short* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzUInt16(reinterpret_cast<uint16_t*>(p));
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<unsigned short> {
+ static bool Fuzz(unsigned short* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzUInt16(reinterpret_cast<uint16_t*>(p));
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<signed char> {
+ static bool Fuzz(signed char* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzUChar(reinterpret_cast<unsigned char*>(p));
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<unsigned char> {
+ static bool Fuzz(unsigned char* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzUChar(p);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<wchar_t> {
+ static bool Fuzz(wchar_t* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzWChar(p);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<float> {
+ static bool Fuzz(float* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzFloat(p);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<double> {
+ static bool Fuzz(double* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzDouble(p);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<std::string> {
+ static bool Fuzz(std::string* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzString(p);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<base::string16> {
+ static bool Fuzz(base::string16* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzString16(p);
+ return true;
+ }
+};
+
+// Specializations for tuples.
+template <>
+struct FuzzTraits<base::Tuple<>> {
+ static bool Fuzz(base::Tuple<>* p, Fuzzer* fuzzer) {
+ return true;
+ }
+};
+
+template <class A>
+struct FuzzTraits<base::Tuple<A>> {
+ static bool Fuzz(base::Tuple<A>* p, Fuzzer* fuzzer) {
+ return FuzzParam(&base::get<0>(*p), fuzzer);
+ }
+};
+
+template <class A, class B>
+struct FuzzTraits<base::Tuple<A, B>> {
+ static bool Fuzz(base::Tuple<A, B>* p, Fuzzer* fuzzer) {
+ return
+ FuzzParam(&base::get<0>(*p), fuzzer) &&
+ FuzzParam(&base::get<1>(*p), fuzzer);
+ }
+};
+
+template <class A, class B, class C>
+struct FuzzTraits<base::Tuple<A, B, C>> {
+ static bool Fuzz(base::Tuple<A, B, C>* p, Fuzzer* fuzzer) {
+ return
+ FuzzParam(&base::get<0>(*p), fuzzer) &&
+ FuzzParam(&base::get<1>(*p), fuzzer) &&
+ FuzzParam(&base::get<2>(*p), fuzzer);
+ }
+};
+
+template <class A, class B, class C, class D>
+struct FuzzTraits<base::Tuple<A, B, C, D>> {
+ static bool Fuzz(base::Tuple<A, B, C, D>* p, Fuzzer* fuzzer) {
+ return
+ FuzzParam(&base::get<0>(*p), fuzzer) &&
+ FuzzParam(&base::get<1>(*p), fuzzer) &&
+ FuzzParam(&base::get<2>(*p), fuzzer) &&
+ FuzzParam(&base::get<3>(*p), fuzzer);
+ }
+};
+
+template <class A, class B, class C, class D, class E>
+struct FuzzTraits<base::Tuple<A, B, C, D, E>> {
+ static bool Fuzz(base::Tuple<A, B, C, D, E>* p, Fuzzer* fuzzer) {
+ return
+ FuzzParam(&base::get<0>(*p), fuzzer) &&
+ FuzzParam(&base::get<1>(*p), fuzzer) &&
+ FuzzParam(&base::get<2>(*p), fuzzer) &&
+ FuzzParam(&base::get<3>(*p), fuzzer) &&
+ FuzzParam(&base::get<4>(*p), fuzzer);
+ }
+};
+
+// Specializations for containers.
+template <class A>
+struct FuzzTraits<std::vector<A> > {
+ static bool Fuzz(std::vector<A>* p, Fuzzer* fuzzer) {
+ ++g_depth;
+ size_t count = p->size();
+ if (fuzzer->ShouldGenerate()) {
+ count = g_depth > 3 ? 0 : RandElementCount();
+ p->resize(count);
+ }
+ for (size_t i = 0; i < count; ++i) {
+ if (!FuzzParam(&p->at(i), fuzzer)) {
+ --g_depth;
+ return false;
+ }
+ }
+ --g_depth;
+ return true;
+ }
+};
+
+template <class A>
+struct FuzzTraits<std::set<A> > {
+ static bool Fuzz(std::set<A>* p, Fuzzer* fuzzer) {
+ if (!fuzzer->ShouldGenerate()) {
+ std::set<A> result;
+ typename std::set<A>::iterator it;
+ for (it = p->begin(); it != p->end(); ++it) {
+ A item = *it;
+ if (!FuzzParam(&item, fuzzer))
+ return false;
+ result.insert(item);
+ }
+ *p = result;
+ return true;
+ }
+
+ static int g_depth = 0;
+ size_t count = ++g_depth > 3 ? 0 : RandElementCount();
+ A a;
+ for (size_t i = 0; i < count; ++i) {
+ if (!FuzzParam(&a, fuzzer)) {
+ --g_depth;
+ return false;
+ }
+ p->insert(a);
+ }
+ --g_depth;
+ return true;
+ }
+};
+
+template <class A, class B>
+struct FuzzTraits<std::map<A, B> > {
+ static bool Fuzz(std::map<A, B>* p, Fuzzer* fuzzer) {
+ if (!fuzzer->ShouldGenerate()) {
+ typename std::map<A, B>::iterator it;
+ for (it = p->begin(); it != p->end(); ++it) {
+ if (!FuzzParam(&it->second, fuzzer))
+ return false;
+ }
+ return true;
+ }
+
+ static int g_depth = 0;
+ size_t count = ++g_depth > 3 ? 0 : RandElementCount();
+ std::pair<A, B> place_holder;
+ for (size_t i = 0; i < count; ++i) {
+ if (!FuzzParam(&place_holder, fuzzer)) {
+ --g_depth;
+ return false;
+ }
+ p->insert(place_holder);
+ }
+ --g_depth;
+ return true;
+ }
+};
+
+template <class A, class B, class C, class D>
+struct FuzzTraits<std::map<A, B, C, D>> {
+ static bool Fuzz(std::map<A, B, C, D>* p, Fuzzer* fuzzer) {
+ if (!fuzzer->ShouldGenerate()) {
+ typename std::map<A, B, C, D>::iterator it;
+ for (it = p->begin(); it != p->end(); ++it) {
+ if (!FuzzParam(&it->second, fuzzer))
+ return false;
+ }
+ return true;
+ }
+
+ static int g_depth = 0;
+ size_t count = ++g_depth > 3 ? 0 : RandElementCount();
+ std::pair<A, B> place_holder;
+ for (size_t i = 0; i < count; ++i) {
+ if (!FuzzParam(&place_holder, fuzzer)) {
+ --g_depth;
+ return false;
+ }
+ p->insert(place_holder);
+ }
+ --g_depth;
+ return true;
+ }
+};
+
+template <class A, class B>
+struct FuzzTraits<std::pair<A, B> > {
+ static bool Fuzz(std::pair<A, B>* p, Fuzzer* fuzzer) {
+ return
+ FuzzParam(&p->first, fuzzer) &&
+ FuzzParam(&p->second, fuzzer);
+ }
+};
+
+// Specializations for hand-coded types.
+
+template <>
+struct FuzzTraits<base::FilePath> {
+ static bool Fuzz(base::FilePath* p, Fuzzer* fuzzer) {
+ if (!fuzzer->ShouldGenerate()) {
+ base::FilePath::StringType path = p->value();
+ if(!FuzzParam(&path, fuzzer))
+ return false;
+ *p = base::FilePath(path);
+ return true;
+ }
+
+ const char path_chars[] = "ACz0/.~:";
+ size_t count = RandInRange(60);
+ base::FilePath::StringType random_path;
+ for (size_t i = 0; i < count; ++i)
+ random_path += path_chars[RandInRange(sizeof(path_chars) - 1)];
+ *p = base::FilePath(random_path);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<base::File::Error> {
+ static bool Fuzz(base::File::Error* p, Fuzzer* fuzzer) {
+ int value = static_cast<int>(*p);
+ if (!FuzzParam(&value, fuzzer))
+ return false;
+ *p = static_cast<base::File::Error>(value);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<base::File::Info> {
+ static bool Fuzz(base::File::Info* p, Fuzzer* fuzzer) {
+ double last_modified = p->last_modified.ToDoubleT();
+ double last_accessed = p->last_accessed.ToDoubleT();
+ double creation_time = p->creation_time.ToDoubleT();
+ if (!FuzzParam(&p->size, fuzzer))
+ return false;
+ if (!FuzzParam(&p->is_directory, fuzzer))
+ return false;
+ if (!FuzzParam(&last_modified, fuzzer))
+ return false;
+ if (!FuzzParam(&last_accessed, fuzzer))
+ return false;
+ if (!FuzzParam(&creation_time, fuzzer))
+ return false;
+ p->last_modified = base::Time::FromDoubleT(last_modified);
+ p->last_accessed = base::Time::FromDoubleT(last_accessed);
+ p->creation_time = base::Time::FromDoubleT(creation_time);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<base::NullableString16> {
+ static bool Fuzz(base::NullableString16* p, Fuzzer* fuzzer) {
+ base::string16 string = p->string();
+ bool is_null = p->is_null();
+ if (!FuzzParam(&string, fuzzer))
+ return false;
+ if (!FuzzParam(&is_null, fuzzer))
+ return false;
+ *p = base::NullableString16(string, is_null);
+ return true;
+ }
+};
+
+#if defined(OS_WIN) || defined(OS_MACOSX)
+template <>
+struct FuzzTraits<base::SharedMemoryHandle> {
+ static bool Fuzz(base::SharedMemoryHandle* p, Fuzzer* fuzzer) {
+ // This generates an invalid SharedMemoryHandle. Generating a valid
+ // SharedMemoryHandle requires setting/knowing state in both the sending and
+ // receiving process, which is not currently possible.
+ return true;
+ }
+};
+#endif // defined(OS_WIN) || defined(OS_MACOSX)
+
+template <>
+struct FuzzTraits<base::Time> {
+ static bool Fuzz(base::Time* p, Fuzzer* fuzzer) {
+ int64_t internal_value = p->ToInternalValue();
+ if (!FuzzParam(&internal_value, fuzzer))
+ return false;
+ *p = base::Time::FromInternalValue(internal_value);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<base::TimeDelta> {
+ static bool Fuzz(base::TimeDelta* p, Fuzzer* fuzzer) {
+ int64_t internal_value = p->ToInternalValue();
+ if (!FuzzParam(&internal_value, fuzzer))
+ return false;
+ *p = base::TimeDelta::FromInternalValue(internal_value);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<base::TimeTicks> {
+ static bool Fuzz(base::TimeTicks* p, Fuzzer* fuzzer) {
+ int64_t internal_value = p->ToInternalValue();
+ if (!FuzzParam(&internal_value, fuzzer))
+ return false;
+ *p = base::TimeTicks::FromInternalValue(internal_value);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<base::ListValue> {
+ static bool Fuzz(base::ListValue* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ ++g_depth;
+ size_t list_length = p->GetSize();
+ if (fuzzer->ShouldGenerate())
+ list_length = g_depth > 3 ? 0 : RandInRange(8);
+ for (size_t index = 0; index < list_length; ++index) {
+ switch (RandInRange(8)) {
+ case base::Value::TYPE_BOOLEAN: {
+ bool tmp;
+ p->GetBoolean(index, &tmp);
+ fuzzer->FuzzBool(&tmp);
+ p->Set(index, new base::FundamentalValue(tmp));
+ break;
+ }
+ case base::Value::TYPE_INTEGER: {
+ int tmp;
+ p->GetInteger(index, &tmp);
+ fuzzer->FuzzInt(&tmp);
+ p->Set(index, new base::FundamentalValue(tmp));
+ break;
+ }
+ case base::Value::TYPE_DOUBLE: {
+ double tmp;
+ p->GetDouble(index, &tmp);
+ fuzzer->FuzzDouble(&tmp);
+ p->Set(index, new base::FundamentalValue(tmp));
+ break;
+ }
+ case base::Value::TYPE_STRING: {
+ std::string tmp;
+ p->GetString(index, &tmp);
+ fuzzer->FuzzString(&tmp);
+ p->Set(index, new base::StringValue(tmp));
+ break;
+ }
+ case base::Value::TYPE_BINARY: {
+ char tmp[200];
+ size_t bin_length = RandInRange(sizeof(tmp));
+ fuzzer->FuzzData(tmp, bin_length);
+ p->Set(index,
+ base::BinaryValue::CreateWithCopiedBuffer(tmp, bin_length));
+ break;
+ }
+ case base::Value::TYPE_DICTIONARY: {
+ base::DictionaryValue* tmp = new base::DictionaryValue();
+ p->GetDictionary(index, &tmp);
+ FuzzParam(tmp, fuzzer);
+ p->Set(index, tmp);
+ break;
+ }
+ case base::Value::TYPE_LIST: {
+ base::ListValue* tmp = new base::ListValue();
+ p->GetList(index, &tmp);
+ FuzzParam(tmp, fuzzer);
+ p->Set(index, tmp);
+ break;
+ }
+ case base::Value::TYPE_NULL:
+ default:
+ break;
+ }
+ }
+ --g_depth;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<base::DictionaryValue> {
+ static bool Fuzz(base::DictionaryValue* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ ++g_depth;
+ size_t dict_length = g_depth > 3 ? 0 : RandInRange(8);
+ for (size_t index = 0; index < dict_length; ++index) {
+ std::string property;
+ fuzzer->FuzzString(&property);
+ switch (RandInRange(8)) {
+ case base::Value::TYPE_BOOLEAN: {
+ bool tmp;
+ fuzzer->FuzzBool(&tmp);
+ p->SetWithoutPathExpansion(property, new base::FundamentalValue(tmp));
+ break;
+ }
+ case base::Value::TYPE_INTEGER: {
+ int tmp;
+ fuzzer->FuzzInt(&tmp);
+ p->SetWithoutPathExpansion(property, new base::FundamentalValue(tmp));
+ break;
+ }
+ case base::Value::TYPE_DOUBLE: {
+ double tmp;
+ fuzzer->FuzzDouble(&tmp);
+ p->SetWithoutPathExpansion(property, new base::FundamentalValue(tmp));
+ break;
+ }
+ case base::Value::TYPE_STRING: {
+ std::string tmp;
+ fuzzer->FuzzString(&tmp);
+ p->SetWithoutPathExpansion(property, new base::StringValue(tmp));
+ break;
+ }
+ case base::Value::TYPE_BINARY: {
+ char tmp[200];
+ size_t bin_length = RandInRange(sizeof(tmp));
+ fuzzer->FuzzData(tmp, bin_length);
+ p->SetWithoutPathExpansion(
+ property,
+ base::BinaryValue::CreateWithCopiedBuffer(tmp, bin_length));
+ break;
+ }
+ case base::Value::TYPE_DICTIONARY: {
+ base::DictionaryValue* tmp = new base::DictionaryValue();
+ FuzzParam(tmp, fuzzer);
+ p->SetWithoutPathExpansion(property, tmp);
+ break;
+ }
+ case base::Value::TYPE_LIST: {
+ base::ListValue* tmp = new base::ListValue();
+ FuzzParam(tmp, fuzzer);
+ p->SetWithoutPathExpansion(property, tmp);
+ break;
+ }
+ case base::Value::TYPE_NULL:
+ default:
+ break;
+ }
+ }
+ --g_depth;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<blink::WebGamepad> {
+ static bool Fuzz(blink::WebGamepad* p, Fuzzer* fuzzer) {
+ if (!FuzzParam(&p->connected, fuzzer))
+ return false;
+ if (!FuzzParam(&p->timestamp, fuzzer))
+ return false;
+ unsigned idLength = static_cast<unsigned>(
+ RandInRange(blink::WebGamepad::idLengthCap + 1));
+ if (!FuzzParamArray(&p->id[0], idLength, fuzzer))
+ return false;
+ p->axesLength = static_cast<unsigned>(
+ RandInRange(blink::WebGamepad::axesLengthCap + 1));
+ if (!FuzzParamArray(&p->axes[0], p->axesLength, fuzzer))
+ return false;
+ p->buttonsLength = static_cast<unsigned>(
+ RandInRange(blink::WebGamepad::buttonsLengthCap + 1));
+ if (!FuzzParamArray(&p->buttons[0], p->buttonsLength, fuzzer))
+ return false;
+ unsigned mappingsLength = static_cast<unsigned>(
+ RandInRange(blink::WebGamepad::mappingLengthCap + 1));
+ if (!FuzzParamArray(&p->mapping[0], mappingsLength, fuzzer))
+ return false;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<blink::WebGamepadButton> {
+ static bool Fuzz(blink::WebGamepadButton* p, Fuzzer* fuzzer) {
+ if (!FuzzParam(&p->pressed, fuzzer))
+ return false;
+ if (!FuzzParam(&p->value, fuzzer))
+ return false;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<cc::CompositorFrame> {
+ static bool Fuzz(cc::CompositorFrame* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ if (!FuzzParam(&p->metadata, fuzzer))
+ return false;
+
+ switch (RandInRange(3)) {
+ case 0: {
+ p->delegated_frame_data.reset(new cc::DelegatedFrameData());
+ if (!FuzzParam(p->delegated_frame_data.get(), fuzzer))
+ return false;
+ return true;
+ }
+ case 1: {
+ p->gl_frame_data.reset(new cc::GLFrameData());
+ if (!FuzzParam(p->gl_frame_data.get(), fuzzer))
+ return false;
+ return true;
+ }
+ default:
+ // Fuzz nothing to handle the no frame case.
+ return true;
+ }
+ }
+};
+
+template <>
+struct FuzzTraits<cc::CompositorFrameAck> {
+ static bool Fuzz(cc::CompositorFrameAck* p, Fuzzer* fuzzer) {
+ if (!FuzzParam(&p->resources, fuzzer))
+ return false;
+
+ if (!p->gl_frame_data)
+ p->gl_frame_data.reset(new cc::GLFrameData);
+ if (!FuzzParam(p->gl_frame_data.get(), fuzzer))
+ return false;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<cc::DelegatedFrameData> {
+ static bool Fuzz(cc::DelegatedFrameData* p, Fuzzer* fuzzer) {
+ if (!FuzzParam(&p->device_scale_factor, fuzzer))
+ return false;
+ if (!FuzzParam(&p->resource_list, fuzzer))
+ return false;
+ if (!FuzzParam(&p->render_pass_list, fuzzer))
+ return false;
+ return true;
+ }
+};
+
+template <class A>
+struct FuzzTraits<cc::ListContainer<A>> {
+ static bool Fuzz(cc::ListContainer<A>* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<cc::QuadList> {
+ static bool Fuzz(cc::QuadList* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<cc::RenderPass> {
+ static bool Fuzz(cc::RenderPass* p, Fuzzer* fuzzer) {
+ if (!FuzzParam(&p->id, fuzzer))
+ return false;
+ if (!FuzzParam(&p->output_rect, fuzzer))
+ return false;
+ if (!FuzzParam(&p->damage_rect, fuzzer))
+ return false;
+ if (!FuzzParam(&p->transform_to_root_target, fuzzer))
+ return false;
+ if (!FuzzParam(&p->has_transparent_background, fuzzer))
+ return false;
+ if (!FuzzParam(&p->quad_list, fuzzer))
+ return false;
+ if (!FuzzParam(&p->shared_quad_state_list, fuzzer))
+ return false;
+ // Omitting |copy_requests| as it is not sent over IPC.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<cc::RenderPassList> {
+ static bool Fuzz(cc::RenderPassList* p, Fuzzer* fuzzer) {
+ if (!fuzzer->ShouldGenerate()) {
+ for (size_t i = 0; i < p->size(); ++i) {
+ if (!FuzzParam(p->at(i).get(), fuzzer))
+ return false;
+ }
+ return true;
+ }
+
+ size_t count = RandElementCount();
+ for (size_t i = 0; i < count; ++i) {
+ std::unique_ptr<cc::RenderPass> render_pass = cc::RenderPass::Create();
+ if (!FuzzParam(render_pass.get(), fuzzer))
+ return false;
+ p->push_back(std::move(render_pass));
+ }
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<content::IndexedDBKey> {
+ static bool Fuzz(content::IndexedDBKey* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ ++g_depth;
+ blink::WebIDBKeyType web_type =
+ static_cast<blink::WebIDBKeyType>(RandInRange(7));
+ switch (web_type) {
+ case blink::WebIDBKeyTypeArray: {
+ size_t length = g_depth > 3 ? 0 : RandInRange(4);
+ std::vector<content::IndexedDBKey> array;
+ array.resize(length);
+ for (size_t i = 0; i < length; ++i) {
+ if (!FuzzParam(&array[i], fuzzer)) {
+ --g_depth;
+ return false;
+ }
+ }
+ *p = content::IndexedDBKey(array);
+ return true;
+ }
+ case blink::WebIDBKeyTypeBinary: {
+ std::string binary;
+ if (!FuzzParam(&binary, fuzzer)) {
+ --g_depth;
+ return false;
+ }
+ *p = content::IndexedDBKey(binary);
+ return true;
+ }
+ case blink::WebIDBKeyTypeString: {
+ base::string16 string;
+ if (!FuzzParam(&string, fuzzer))
+ return false;
+ *p = content::IndexedDBKey(string);
+ return true;
+ }
+ case blink::WebIDBKeyTypeDate:
+ case blink::WebIDBKeyTypeNumber: {
+ double number;
+ if (!FuzzParam(&number, fuzzer)) {
+ --g_depth;
+ return false;
+ }
+ *p = content::IndexedDBKey(number, web_type);
+ return true;
+ }
+ case blink::WebIDBKeyTypeInvalid:
+ case blink::WebIDBKeyTypeNull: {
+ *p = content::IndexedDBKey(web_type);
+ return true;
+ }
+ default: {
+ NOTREACHED();
+ --g_depth;
+ return false;
+ }
+ }
+ }
+};
+
+template <>
+struct FuzzTraits<content::IndexedDBKeyRange> {
+ static bool Fuzz(content::IndexedDBKeyRange* p, Fuzzer* fuzzer) {
+ content::IndexedDBKey lower = p->lower();
+ content::IndexedDBKey upper = p->upper();
+ bool lower_open = p->lower_open();
+ bool upper_open = p->upper_open();
+ if (!FuzzParam(&lower, fuzzer))
+ return false;
+ if (!FuzzParam(&upper, fuzzer))
+ return false;
+ if (!FuzzParam(&lower_open, fuzzer))
+ return false;
+ if (!FuzzParam(&upper_open, fuzzer))
+ return false;
+ *p = content::IndexedDBKeyRange(lower, upper, lower_open, upper_open);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<content::IndexedDBKeyPath> {
+ static bool Fuzz(content::IndexedDBKeyPath* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ switch (RandInRange(3)) {
+ case 0: {
+ std::vector<base::string16> array;
+ if (!FuzzParam(&array, fuzzer))
+ return false;
+ *p = content::IndexedDBKeyPath(array);
+ break;
+ }
+ case 1: {
+ base::string16 string;
+ if (!FuzzParam(&string, fuzzer))
+ return false;
+ *p = content::IndexedDBKeyPath(string);
+ break;
+ }
+ case 2: {
+ *p = content::IndexedDBKeyPath();
+ break;
+ }
+ }
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<content::PageState> {
+ static bool Fuzz(content::PageState* p, Fuzzer* fuzzer) {
+ std::string data = p->ToEncodedData();
+ if (!FuzzParam(&data, fuzzer))
+ return false;
+ *p = content::PageState::CreateFromEncodedData(data);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<content::SyntheticGesturePacket> {
+ static bool Fuzz(content::SyntheticGesturePacket* p,
+ Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ std::unique_ptr<content::SyntheticGestureParams> gesture_params;
+ switch (RandInRange(
+ content::SyntheticGestureParams::SYNTHETIC_GESTURE_TYPE_MAX + 1)) {
+ case content::SyntheticGestureParams::GestureType::
+ SMOOTH_SCROLL_GESTURE: {
+ content::SyntheticSmoothScrollGestureParams* params =
+ new content::SyntheticSmoothScrollGestureParams();
+ if (!FuzzParam(&params->anchor, fuzzer))
+ return false;
+ if (!FuzzParam(&params->distances, fuzzer))
+ return false;
+ if (!FuzzParam(&params->prevent_fling, fuzzer))
+ return false;
+ if (!FuzzParam(&params->speed_in_pixels_s, fuzzer))
+ return false;
+ gesture_params.reset(params);
+ break;
+ }
+ case content::SyntheticGestureParams::GestureType::SMOOTH_DRAG_GESTURE: {
+ content::SyntheticSmoothDragGestureParams* params =
+ new content::SyntheticSmoothDragGestureParams();
+ if (!FuzzParam(&params->start_point, fuzzer))
+ return false;
+ if (!FuzzParam(&params->distances, fuzzer))
+ return false;
+ if (!FuzzParam(&params->speed_in_pixels_s, fuzzer))
+ return false;
+ gesture_params.reset(params);
+ break;
+ }
+ case content::SyntheticGestureParams::GestureType::PINCH_GESTURE: {
+ content::SyntheticPinchGestureParams* params =
+ new content::SyntheticPinchGestureParams();
+ if (!FuzzParam(&params->scale_factor, fuzzer))
+ return false;
+ if (!FuzzParam(&params->anchor, fuzzer))
+ return false;
+ if (!FuzzParam(&params->relative_pointer_speed_in_pixels_s,
+ fuzzer))
+ return false;
+ gesture_params.reset(params);
+ break;
+ }
+ case content::SyntheticGestureParams::GestureType::TAP_GESTURE: {
+ content::SyntheticTapGestureParams* params =
+ new content::SyntheticTapGestureParams();
+ if (!FuzzParam(&params->position, fuzzer))
+ return false;
+ if (!FuzzParam(&params->duration_ms, fuzzer))
+ return false;
+ gesture_params.reset(params);
+ break;
+ }
+ }
+ p->set_gesture_params(std::move(gesture_params));
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<content::WebCursor> {
+ static bool Fuzz(content::WebCursor* p, Fuzzer* fuzzer) {
+ content::WebCursor::CursorInfo info;
+ p->GetCursorInfo(&info);
+
+ // |type| enum is not validated on de-serialization, so pick random value.
+ if (!FuzzParam(reinterpret_cast<int*>(&info.type), fuzzer))
+ return false;
+ if (!FuzzParam(&info.hotspot, fuzzer))
+ return false;
+ if (!FuzzParam(&info.image_scale_factor, fuzzer))
+ return false;
+ if (!FuzzParam(&info.custom_image, fuzzer))
+ return false;
+ // Omitting |externalHandle| since it is not serialized.
+
+ // Scale factor is expected to be greater than 0, otherwise we hit
+ // a check failure.
+ info.image_scale_factor = fabs(info.image_scale_factor);
+ if (!(info.image_scale_factor > 0.0))
+ info.image_scale_factor = 1;
+
+ *p = content::WebCursor();
+ p->InitFromCursorInfo(info);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ContentSettingsPattern> {
+ static bool Fuzz(ContentSettingsPattern* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): This can crash if a pattern is generated from a random
+ // string. We could carefully generate a pattern or fix pattern generation.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ExtensionMsg_PermissionSetStruct> {
+ static bool Fuzz(ExtensionMsg_PermissionSetStruct* p,
+ Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<extensions::URLPatternSet> {
+ static bool Fuzz(extensions::URLPatternSet* p, Fuzzer* fuzzer) {
+ std::set<URLPattern> patterns = p->patterns();
+ if (!FuzzParam(&patterns, fuzzer))
+ return false;
+ *p = extensions::URLPatternSet(patterns);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gfx::Point> {
+ static bool Fuzz(gfx::Point* p, Fuzzer* fuzzer) {
+ int x = p->x();
+ int y = p->y();
+ if (!FuzzParam(&x, fuzzer))
+ return false;
+ if (!FuzzParam(&y, fuzzer))
+ return false;
+ p->SetPoint(x, y);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gfx::PointF> {
+ static bool Fuzz(gfx::PointF* p, Fuzzer* fuzzer) {
+ float x = p->x();
+ float y = p->y();
+ if (!FuzzParam(&x, fuzzer))
+ return false;
+ if (!FuzzParam(&y, fuzzer))
+ return false;
+ p->SetPoint(x, y);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gfx::Rect> {
+ static bool Fuzz(gfx::Rect* p, Fuzzer* fuzzer) {
+ gfx::Point origin = p->origin();
+ gfx::Size size = p->size();
+ if (!FuzzParam(&origin, fuzzer))
+ return false;
+ if (!FuzzParam(&size, fuzzer))
+ return false;
+ p->set_origin(origin);
+ p->set_size(size);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gfx::RectF> {
+ static bool Fuzz(gfx::RectF* p, Fuzzer* fuzzer) {
+ gfx::PointF origin = p->origin();
+ gfx::SizeF size = p->size();
+ if (!FuzzParam(&origin, fuzzer))
+ return false;
+ if (!FuzzParam(&size, fuzzer))
+ return false;
+ p->set_origin(origin);
+ p->set_size(size);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gfx::Range> {
+ static bool Fuzz(gfx::Range* p, Fuzzer* fuzzer) {
+ size_t start = p->start();
+ size_t end = p->end();
+ if (!FuzzParam(&start, fuzzer))
+ return false;
+ if (!FuzzParam(&end, fuzzer))
+ return false;
+ *p = gfx::Range(start, end);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gfx::Size> {
+ static bool Fuzz(gfx::Size* p, Fuzzer* fuzzer) {
+ int width = p->width();
+ int height = p->height();
+ if (!FuzzParam(&width, fuzzer))
+ return false;
+ if (!FuzzParam(&height, fuzzer))
+ return false;
+ p->SetSize(width, height);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gfx::SizeF> {
+ static bool Fuzz(gfx::SizeF* p, Fuzzer* fuzzer) {
+ float w;
+ float h;
+ if (!FuzzParam(&w, fuzzer))
+ return false;
+ if (!FuzzParam(&h, fuzzer))
+ return false;
+ p->SetSize(w, h);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gfx::Transform> {
+ static bool Fuzz(gfx::Transform* p, Fuzzer* fuzzer) {
+ SkMScalar matrix[16];
+ for (size_t i = 0; i < arraysize(matrix); i++) {
+ matrix[i] = p->matrix().get(i / 4, i % 4);
+ }
+ if (!FuzzParamArray(&matrix[0], arraysize(matrix), fuzzer))
+ return false;
+ *p = gfx::Transform(matrix[0], matrix[1], matrix[2], matrix[3], matrix[4],
+ matrix[5], matrix[6], matrix[7], matrix[8], matrix[9],
+ matrix[10], matrix[11], matrix[12], matrix[13],
+ matrix[14], matrix[15]);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gfx::Vector2d> {
+ static bool Fuzz(gfx::Vector2d* p, Fuzzer* fuzzer) {
+ int x = p->x();
+ int y = p->y();
+ if (!FuzzParam(&x, fuzzer))
+ return false;
+ if (!FuzzParam(&y, fuzzer))
+ return false;
+ *p = gfx::Vector2d(x, y);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gfx::Vector2dF> {
+ static bool Fuzz(gfx::Vector2dF* p, Fuzzer* fuzzer) {
+ float x = p->x();
+ float y = p->y();
+ if (!FuzzParam(&x, fuzzer))
+ return false;
+ if (!FuzzParam(&y, fuzzer))
+ return false;
+ *p = gfx::Vector2dF(x, y);
+ return true;
+ }
+};
+
+template <typename TypeMarker, typename WrappedType, WrappedType kInvalidValue>
+struct FuzzTraits<gpu::IdType<TypeMarker, WrappedType, kInvalidValue>> {
+ using param_type = gpu::IdType<TypeMarker, WrappedType, kInvalidValue>;
+ static bool Fuzz(param_type* id, Fuzzer* fuzzer) {
+ WrappedType raw_value = id->GetUnsafeValue();
+ if (!FuzzParam(&raw_value, fuzzer))
+ return false;
+ *id = param_type::FromUnsafeValue(raw_value);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gpu::Mailbox> {
+ static bool Fuzz(gpu::Mailbox* p, Fuzzer* fuzzer) {
+ fuzzer->FuzzBytes(p->name, sizeof(p->name));
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gpu::SyncToken> {
+ static bool Fuzz(gpu::SyncToken* p, Fuzzer* fuzzer) {
+ bool verified_flush = false;
+ gpu::CommandBufferNamespace namespace_id =
+ gpu::CommandBufferNamespace::INVALID;
+ int32_t extra_data_field = 0;
+ gpu::CommandBufferId command_buffer_id;
+ uint64_t release_count = 0;
+
+ if (!FuzzParam(&verified_flush, fuzzer))
+ return false;
+ if (!FuzzParam(&namespace_id, fuzzer))
+ return false;
+ if (!FuzzParam(&extra_data_field, fuzzer))
+ return false;
+ if (!FuzzParam(&command_buffer_id, fuzzer))
+ return false;
+ if (!FuzzParam(&release_count, fuzzer))
+ return false;
+
+ p->Clear();
+ p->Set(namespace_id, extra_data_field, command_buffer_id, release_count);
+ if (verified_flush)
+ p->SetVerifyFlush();
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gpu::MailboxHolder> {
+ static bool Fuzz(gpu::MailboxHolder* p, Fuzzer* fuzzer) {
+ if (!FuzzParam(&p->mailbox, fuzzer))
+ return false;
+ if (!FuzzParam(&p->sync_token, fuzzer))
+ return false;
+ if (!FuzzParam(&p->texture_target, fuzzer))
+ return false;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<gpu::ValueState> {
+ static bool Fuzz(gpu::ValueState* p, Fuzzer* fuzzer) {
+ if (!FuzzParamArray(&p->float_value[0], 4, fuzzer))
+ return false;
+ if (!FuzzParamArray(&p->int_value[0], 4, fuzzer))
+ return false;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<GURL> {
+ static bool Fuzz(GURL* p, Fuzzer* fuzzer) {
+ if (!fuzzer->ShouldGenerate()) {
+ std::string spec = p->possibly_invalid_spec();
+ if (!FuzzParam(&spec, fuzzer))
+ return false;
+ if (spec != p->possibly_invalid_spec())
+ *p = GURL(spec);
+ return true;
+ }
+
+ const char url_chars[] = "Ahtp0:/.?+\\%&#";
+ size_t count = RandInRange(100);
+ std::string random_url;
+ for (size_t i = 0; i < count; ++i)
+ random_url += url_chars[RandInRange(sizeof(url_chars) - 1)];
+ int selector = RandInRange(10);
+ if (selector == 0)
+ random_url = std::string("http://") + random_url;
+ else if (selector == 1)
+ random_url = std::string("file://") + random_url;
+ else if (selector == 2)
+ random_url = std::string("javascript:") + random_url;
+ else if (selector == 2)
+ random_url = std::string("data:") + random_url;
+ *p = GURL(random_url);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<HostID> {
+ static bool Fuzz(HostID* p, Fuzzer* fuzzer) {
+ HostID::HostType type = p->type();
+ std::string id = p->id();
+ if (!FuzzParam(&type, fuzzer))
+ return false;
+ if (!FuzzParam(&id, fuzzer))
+ return false;
+ *p = HostID(type, id);
+ return true;
+ }
+};
+
+#if defined(OS_WIN)
+template <>
+struct FuzzTraits<HWND> {
+ static bool Fuzz(HWND* p, Fuzzer* fuzzer) {
+ // TODO(aarya): This should actually do something.
+ return true;
+ }
+};
+#endif
+
+template <>
+struct FuzzTraits<IPC::Message> {
+ static bool Fuzz(IPC::Message* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ if (g_function_vector.empty())
+ return false;
+ size_t index = RandInRange(g_function_vector.size());
+ IPC::Message* ipc_message = (*g_function_vector[index])(NULL, fuzzer);
+ if (!ipc_message)
+ return false;
+ p = ipc_message;
+ return true;
+ }
+};
+
+#if !defined(OS_WIN)
+// PlatformfileForTransit is just SharedMemoryHandle on Windows, which already
+// has a trait, see ipc/ipc_platform_file.h
+template <>
+struct FuzzTraits<IPC::PlatformFileForTransit> {
+ static bool Fuzz(IPC::PlatformFileForTransit* p, Fuzzer* fuzzer) {
+ // TODO(inferno): I don't think we can generate real ones due to check on
+ // construct.
+ return true;
+ }
+};
+#endif
+
+template <>
+struct FuzzTraits<IPC::ChannelHandle> {
+ static bool Fuzz(IPC::ChannelHandle* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ // TODO(inferno): Add way to generate real channel handles.
+#if defined(OS_WIN)
+ HANDLE fake_handle = (HANDLE)(RandU64());
+ p->pipe = IPC::ChannelHandle::PipeHandle(fake_handle);
+ return true;
+#elif defined(OS_POSIX)
+ return
+ FuzzParam(&p->name, fuzzer) &&
+ FuzzParam(&p->socket, fuzzer);
+#endif
+ }
+};
+
+#if defined(OS_WIN)
+template <>
+struct FuzzTraits<LOGFONT> {
+ static bool Fuzz(LOGFONT* p, Fuzzer* fuzzer) {
+ // TODO(aarya): This should actually do something.
+ return true;
+ }
+};
+#endif
+
+template <>
+struct FuzzTraits<media::AudioParameters> {
+ static bool Fuzz(media::AudioParameters* p, Fuzzer* fuzzer) {
+ int channel_layout = p->channel_layout();
+ int format = p->format();
+ int sample_rate = p->sample_rate();
+ int bits_per_sample = p->bits_per_sample();
+ int frames_per_buffer = p->frames_per_buffer();
+ int channels = p->channels();
+ int effects = p->effects();
+ // TODO(mbarbella): Support ChannelLayout mutation and invalid values.
+ if (fuzzer->ShouldGenerate()) {
+ channel_layout =
+ RandInRange(media::ChannelLayout::CHANNEL_LAYOUT_MAX + 1);
+ }
+ if (!FuzzParam(&format, fuzzer))
+ return false;
+ if (!FuzzParam(&sample_rate, fuzzer))
+ return false;
+ if (!FuzzParam(&bits_per_sample, fuzzer))
+ return false;
+ if (!FuzzParam(&frames_per_buffer, fuzzer))
+ return false;
+ if (!FuzzParam(&channels, fuzzer))
+ return false;
+ if (!FuzzParam(&effects, fuzzer))
+ return false;
+ media::AudioParameters params(
+ static_cast<media::AudioParameters::Format>(format),
+ static_cast<media::ChannelLayout>(channel_layout), sample_rate,
+ bits_per_sample, frames_per_buffer);
+ params.set_channels_for_discrete(channels);
+ params.set_effects(effects);
+ *p = params;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<media::cast::RtpTimeTicks> {
+ static bool Fuzz(media::cast::RtpTimeTicks* p, Fuzzer* fuzzer) {
+ base::TimeDelta delta;
+ int base;
+ if (!FuzzParam(&delta, fuzzer))
+ return false;
+ if (!FuzzParam(&base, fuzzer))
+ return false;
+ *p = media::cast::RtpTimeTicks::FromTimeDelta(delta, base);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<media::VideoCaptureFormat> {
+ static bool Fuzz(media::VideoCaptureFormat* p, Fuzzer* fuzzer) {
+ if (!FuzzParam(&p->frame_size, fuzzer))
+ return false;
+ if (!FuzzParam(&p->frame_rate, fuzzer))
+ return false;
+ if (!FuzzParam(reinterpret_cast<int*>(&p->pixel_format), fuzzer))
+ return false;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<net::LoadTimingInfo> {
+ static bool Fuzz(net::LoadTimingInfo* p, Fuzzer* fuzzer) {
+ return FuzzParam(&p->socket_log_id, fuzzer) &&
+ FuzzParam(&p->socket_reused, fuzzer) &&
+ FuzzParam(&p->request_start_time, fuzzer) &&
+ FuzzParam(&p->request_start, fuzzer) &&
+ FuzzParam(&p->proxy_resolve_start, fuzzer) &&
+ FuzzParam(&p->proxy_resolve_end, fuzzer) &&
+ FuzzParam(&p->connect_timing.dns_start, fuzzer) &&
+ FuzzParam(&p->connect_timing.dns_end, fuzzer) &&
+ FuzzParam(&p->connect_timing.connect_start, fuzzer) &&
+ FuzzParam(&p->connect_timing.connect_end, fuzzer) &&
+ FuzzParam(&p->connect_timing.ssl_start, fuzzer) &&
+ FuzzParam(&p->connect_timing.ssl_end, fuzzer) &&
+ FuzzParam(&p->send_start, fuzzer) &&
+ FuzzParam(&p->send_end, fuzzer) &&
+ FuzzParam(&p->receive_headers_end, fuzzer);
+ }
+};
+
+template <>
+struct FuzzTraits<net::HostPortPair> {
+ static bool Fuzz(net::HostPortPair* p, Fuzzer* fuzzer) {
+ std::string host = p->host();
+ uint16_t port = p->port();
+ if (!FuzzParam(&host, fuzzer))
+ return false;
+ if (!FuzzParam(&port, fuzzer))
+ return false;
+ p->set_host(host);
+ p->set_port(port);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<net::IPEndPoint> {
+ static bool Fuzz(net::IPEndPoint* p, Fuzzer* fuzzer) {
+ net::IPAddressNumber address_number = p->address().bytes();
+ int port = p->port();
+ if (!FuzzParam(&address_number, fuzzer))
+ return false;
+ if (!FuzzParam(&port, fuzzer))
+ return false;
+ net::IPEndPoint ip_endpoint(address_number, port);
+ *p = ip_endpoint;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<network_hints::LookupRequest> {
+ static bool Fuzz(network_hints::LookupRequest* p, Fuzzer* fuzzer) {
+ if (!FuzzParam(&p->hostname_list, fuzzer))
+ return false;
+ return true;
+ }
+};
+
+// PP_ traits.
+template <>
+struct FuzzTraits<PP_Bool> {
+ static bool Fuzz(PP_Bool* p, Fuzzer* fuzzer) {
+ bool tmp = PP_ToBool(*p);
+ if (!FuzzParam(&tmp, fuzzer))
+ return false;
+ *p = PP_FromBool(tmp);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<PP_KeyInformation> {
+ static bool Fuzz(PP_KeyInformation* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<PP_NetAddress_Private> {
+ static bool Fuzz(PP_NetAddress_Private* p, Fuzzer* fuzzer) {
+ p->size = RandInRange(sizeof(p->data) + 1);
+ fuzzer->FuzzBytes(&p->data, p->size);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::PPB_X509Certificate_Fields> {
+ static bool Fuzz(ppapi::PPB_X509Certificate_Fields* p,
+ Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::proxy::PPBFlash_DrawGlyphs_Params> {
+ static bool Fuzz(ppapi::proxy::PPBFlash_DrawGlyphs_Params* p,
+ Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::proxy::ResourceMessageCallParams> {
+ static bool Fuzz(
+ ppapi::proxy::ResourceMessageCallParams* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ PP_Resource resource;
+ int32_t sequence;
+ bool has_callback;
+ if (!FuzzParam(&resource, fuzzer))
+ return false;
+ if (!FuzzParam(&sequence, fuzzer))
+ return false;
+ if (!FuzzParam(&has_callback, fuzzer))
+ return false;
+ *p = ppapi::proxy::ResourceMessageCallParams(resource, sequence);
+ if (has_callback)
+ p->set_has_callback();
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::proxy::ResourceMessageReplyParams> {
+ static bool Fuzz(
+ ppapi::proxy::ResourceMessageReplyParams* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ PP_Resource resource;
+ int32_t sequence;
+ int32_t result;
+ if (!FuzzParam(&resource, fuzzer))
+ return false;
+ if (!FuzzParam(&sequence, fuzzer))
+ return false;
+ if (!FuzzParam(&result, fuzzer))
+ return false;
+ *p = ppapi::proxy::ResourceMessageReplyParams(resource, sequence);
+ p->set_result(result);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::proxy::SerializedHandle> {
+ static bool Fuzz(ppapi::proxy::SerializedHandle* p,
+ Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::proxy::SerializedFontDescription> {
+ static bool Fuzz(ppapi::proxy::SerializedFontDescription* p,
+ Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::proxy::SerializedTrueTypeFontDesc> {
+ static bool Fuzz(ppapi::proxy::SerializedTrueTypeFontDesc* p,
+ Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::proxy::SerializedVar> {
+ static bool Fuzz(ppapi::proxy::SerializedVar* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::HostResource> {
+ static bool Fuzz(ppapi::HostResource* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ PP_Instance instance;
+ PP_Resource resource;
+ if (!FuzzParam(&instance, fuzzer))
+ return false;
+ if (!FuzzParam(&resource, fuzzer))
+ return false;
+ p->SetHostResource(instance, resource);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::PepperFilePath> {
+ static bool Fuzz(ppapi::PepperFilePath *p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ unsigned domain = RandInRange(ppapi::PepperFilePath::DOMAIN_MAX_VALID+1);
+ base::FilePath path;
+ if (!FuzzParam(&path, fuzzer))
+ return false;
+ *p = ppapi::PepperFilePath(
+ static_cast<ppapi::PepperFilePath::Domain>(domain), path);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::PpapiPermissions> {
+ static bool Fuzz(ppapi::PpapiPermissions* p, Fuzzer* fuzzer) {
+ uint32_t bits = p->GetBits();
+ if (!FuzzParam(&bits, fuzzer))
+ return false;
+ *p = ppapi::PpapiPermissions(bits);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ppapi::SocketOptionData> {
+ static bool Fuzz(ppapi::SocketOptionData* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): This can be improved.
+ int32_t tmp;
+ p->GetInt32(&tmp);
+ if (!FuzzParam(&tmp, fuzzer))
+ return false;
+ p->SetInt32(tmp);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<printing::PdfRenderSettings> {
+ static bool Fuzz(printing::PdfRenderSettings* p, Fuzzer* fuzzer) {
+ gfx::Rect area = p->area();
+ int dpi = p->dpi();
+ bool autorotate = p->autorotate();
+ if (!FuzzParam(&area, fuzzer))
+ return false;
+ if (!FuzzParam(&dpi, fuzzer))
+ return false;
+ if (!FuzzParam(&autorotate, fuzzer))
+ return false;
+ *p = printing::PdfRenderSettings(area, dpi, autorotate);
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<SkBitmap> {
+ static bool Fuzz(SkBitmap* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): This should actually do something.
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<storage::DataElement> {
+ static bool Fuzz(storage::DataElement* p, Fuzzer* fuzzer) {
+ // TODO(mbarbella): Support mutation.
+ if (!fuzzer->ShouldGenerate())
+ return true;
+
+ switch (RandInRange(4)) {
+ case storage::DataElement::Type::TYPE_BYTES: {
+ if (RandEvent(2)) {
+ p->SetToEmptyBytes();
+ } else {
+ char data[256];
+ int data_len = RandInRange(sizeof(data));
+ fuzzer->FuzzBytes(&data[0], data_len);
+ p->SetToBytes(&data[0], data_len);
+ }
+ return true;
+ }
+ case storage::DataElement::Type::TYPE_FILE: {
+ base::FilePath path;
+ uint64_t offset;
+ uint64_t length;
+ base::Time modification_time;
+ if (!FuzzParam(&path, fuzzer))
+ return false;
+ if (!FuzzParam(&offset, fuzzer))
+ return false;
+ if (!FuzzParam(&length, fuzzer))
+ return false;
+ if (!FuzzParam(&modification_time, fuzzer))
+ return false;
+ p->SetToFilePathRange(path, offset, length, modification_time);
+ return true;
+ }
+ case storage::DataElement::Type::TYPE_BLOB: {
+ std::string uuid;
+ uint64_t offset;
+ uint64_t length;
+ if (!FuzzParam(&uuid, fuzzer))
+ return false;
+ if (!FuzzParam(&offset, fuzzer))
+ return false;
+ if (!FuzzParam(&length, fuzzer))
+ return false;
+ p->SetToBlobRange(uuid, offset, length);
+ return true;
+ }
+ case storage::DataElement::Type::TYPE_FILE_FILESYSTEM: {
+ GURL url;
+ uint64_t offset;
+ uint64_t length;
+ base::Time modification_time;
+ if (!FuzzParam(&url, fuzzer))
+ return false;
+ if (!FuzzParam(&offset, fuzzer))
+ return false;
+ if (!FuzzParam(&length, fuzzer))
+ return false;
+ if (!FuzzParam(&modification_time, fuzzer))
+ return false;
+ p->SetToFileSystemUrlRange(url, offset, length, modification_time);
+ return true;
+ }
+ default: {
+ NOTREACHED();
+ return false;
+ }
+ }
+ }
+};
+
+template <>
+struct FuzzTraits<ui::LatencyInfo> {
+ static bool Fuzz(ui::LatencyInfo* p, Fuzzer* fuzzer) {
+ // TODO(inferno): Add param traits for |latency_components|.
+ int64_t trace_id = p->trace_id();
+ bool terminated = p->terminated();
+ uint32_t input_coordinates_size = static_cast<uint32_t>(
+ RandInRange(ui::LatencyInfo::kMaxInputCoordinates + 1));
+ ui::LatencyInfo::InputCoordinate
+ input_coordinates[ui::LatencyInfo::kMaxInputCoordinates];
+ if (!FuzzParamArray(
+ input_coordinates, input_coordinates_size, fuzzer))
+ return false;
+ if (!FuzzParam(&trace_id, fuzzer))
+ return false;
+ if (!FuzzParam(&terminated, fuzzer))
+ return false;
+
+ ui::LatencyInfo latency(trace_id, terminated);
+ for (size_t i = 0; i < input_coordinates_size; i++) {
+ latency.AddInputCoordinate(input_coordinates[i]);
+ }
+ *p = latency;
+
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<ui::LatencyInfo::InputCoordinate> {
+ static bool Fuzz(
+ ui::LatencyInfo::InputCoordinate* p, Fuzzer* fuzzer) {
+ if (!FuzzParam(&p->x, fuzzer))
+ return false;
+ if (!FuzzParam(&p->y, fuzzer))
+ return false;
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<url::Origin> {
+ static bool Fuzz(url::Origin* p, Fuzzer* fuzzer) {
+ std::string scheme = p->scheme();
+ std::string host = p->host();
+ uint16_t port = p->port();
+ if (!FuzzParam(&scheme, fuzzer))
+ return false;
+ if (!FuzzParam(&host, fuzzer))
+ return false;
+ if (!FuzzParam(&port, fuzzer))
+ return false;
+ *p = url::Origin::UnsafelyCreateOriginWithoutNormalization(scheme, host,
+ port);
+
+ // Force a unique origin 1% of the time:
+ if (RandInRange(100) == 1)
+ *p = url::Origin();
+ return true;
+ }
+};
+
+template <>
+struct FuzzTraits<URLPattern> {
+ static bool Fuzz(URLPattern* p, Fuzzer* fuzzer) {
+ int valid_schemes = p->valid_schemes();
+ std::string host = p->host();
+ std::string port = p->port();
+ std::string path = p->path();
+ if (!FuzzParam(&valid_schemes, fuzzer))
+ return false;
+ if (!FuzzParam(&host, fuzzer))
+ return false;
+ if (!FuzzParam(&port, fuzzer))
+ return false;
+ if (!FuzzParam(&path, fuzzer))
+ return false;
+ *p = URLPattern(valid_schemes);
+ p->SetHost(host);
+ p->SetPort(port);
+ p->SetPath(path);
+ return true;
+ }
+};
+
+// Redefine macros to generate generating from traits declarations.
+// STRUCT declarations cause corresponding STRUCT_TRAITS declarations to occur.
+#undef IPC_STRUCT_BEGIN
+#undef IPC_STRUCT_BEGIN_WITH_PARENT
+#undef IPC_STRUCT_MEMBER
+#undef IPC_STRUCT_END
+#define IPC_STRUCT_BEGIN_WITH_PARENT(struct_name, parent) \
+ IPC_STRUCT_BEGIN(struct_name)
+#define IPC_STRUCT_BEGIN(struct_name) IPC_STRUCT_TRAITS_BEGIN(struct_name)
+#define IPC_STRUCT_MEMBER(type, name, ...) IPC_STRUCT_TRAITS_MEMBER(name)
+#define IPC_STRUCT_END() IPC_STRUCT_TRAITS_END()
+
+// Set up so next include will generate generate trait classes.
+#undef IPC_STRUCT_TRAITS_BEGIN
+#undef IPC_STRUCT_TRAITS_MEMBER
+#undef IPC_STRUCT_TRAITS_PARENT
+#undef IPC_STRUCT_TRAITS_END
+#define IPC_STRUCT_TRAITS_BEGIN(struct_name) \
+ template <> \
+ struct FuzzTraits<struct_name> { \
+ static bool Fuzz(struct_name *p, Fuzzer* fuzzer) {
+
+#define IPC_STRUCT_TRAITS_MEMBER(name) \
+ if (!FuzzParam(&p->name, fuzzer)) \
+ return false;
+
+#define IPC_STRUCT_TRAITS_PARENT(type) \
+ if (!FuzzParam(static_cast<type*>(p), fuzzer)) \
+ return false;
+
+#define IPC_STRUCT_TRAITS_END() \
+ return true; \
+ } \
+ };
+
+// If |condition| isn't met, the messsge will fail to serialize. Try
+// increasingly smaller ranges until we find one that happens to meet
+// the condition, or fail trying.
+// TODO(mbarbella): Attempt to validate even in the mutation case.
+#undef IPC_ENUM_TRAITS_VALIDATE
+#define IPC_ENUM_TRAITS_VALIDATE(enum_name, condition) \
+ template <> \
+ struct FuzzTraits<enum_name> { \
+ static bool Fuzz(enum_name* p, Fuzzer* fuzzer) { \
+ if (!fuzzer->ShouldGenerate()) { \
+ return FuzzParam(reinterpret_cast<int*>(p), fuzzer); \
+ } \
+ for (int shift = 30; shift; --shift) { \
+ for (int tries = 0; tries < 2; ++tries) { \
+ int value = RandInRange(1 << shift); \
+ if (condition) { \
+ *reinterpret_cast<int*>(p) = value; \
+ return true; \
+ } \
+ } \
+ } \
+ std::cerr << "failed to satisfy " << #condition << "\n"; \
+ return false; \
+ } \
+ };
+
+// Bring them into existence.
+#include "tools/ipc_fuzzer/message_lib/all_messages.h"
+#include "tools/ipc_fuzzer/message_lib/all_message_null_macros.h"
+
+#define MAX_FAKE_ROUTING_ID 15
+
+// MessageFactory abstracts away constructing control/routed messages by
+// providing an additional random routing ID argument when necessary.
+template <typename Message, IPC::MessageKind>
+class MessageFactory;
+
+template <typename Message>
+class MessageFactory<Message, IPC::MessageKind::CONTROL> {
+ public:
+ template <typename... Args>
+ static Message* New(const Args&... args) {
+ return new Message(args...);
+ }
+};
+
+template <typename Message>
+class MessageFactory<Message, IPC::MessageKind::ROUTED> {
+ public:
+ template <typename... Args>
+ static Message* New(const Args&... args) {
+ return new Message(RandInRange(MAX_FAKE_ROUTING_ID), args...);
+ }
+};
+
+template <typename Message>
+class FuzzerHelper;
+
+template <typename Meta, typename... Ins>
+class FuzzerHelper<IPC::MessageT<Meta, base::Tuple<Ins...>, void>> {
+ public:
+ using Message = IPC::MessageT<Meta, base::Tuple<Ins...>, void>;
+
+ static IPC::Message* Fuzz(IPC::Message* msg, Fuzzer* fuzzer) {
+ return FuzzImpl(msg, fuzzer, base::MakeIndexSequence<sizeof...(Ins)>());
+ }
+
+ private:
+ template <size_t... Ns>
+ static IPC::Message* FuzzImpl(IPC::Message* msg,
+ Fuzzer* fuzzer,
+ base::IndexSequence<Ns...>) {
+ typename Message::Param p;
+ if (msg) {
+ Message::Read(static_cast<Message*>(msg), &p);
+ }
+ if (FuzzParam(&p, fuzzer)) {
+ return MessageFactory<Message, Meta::kKind>::New(base::get<Ns>(p)...);
+ }
+ std::cerr << "Don't know how to handle " << Meta::kName << "\n";
+ return nullptr;
+ }
+};
+
+template <typename Meta, typename... Ins, typename... Outs>
+class FuzzerHelper<
+ IPC::MessageT<Meta, base::Tuple<Ins...>, base::Tuple<Outs...>>> {
+ public:
+ using Message =
+ IPC::MessageT<Meta, base::Tuple<Ins...>, base::Tuple<Outs...>>;
+
+ static IPC::Message* Fuzz(IPC::Message* msg, Fuzzer* fuzzer) {
+ return FuzzImpl(msg, fuzzer, base::MakeIndexSequence<sizeof...(Ins)>());
+ }
+
+ private:
+ template <size_t... Ns>
+ static IPC::Message* FuzzImpl(IPC::Message* msg,
+ Fuzzer* fuzzer,
+ base::IndexSequence<Ns...>) {
+ typename Message::SendParam p;
+ Message* real_msg = static_cast<Message*>(msg);
+ Message* new_msg = nullptr;
+ if (real_msg) {
+ Message::ReadSendParam(real_msg, &p);
+ }
+ if (FuzzParam(&p, fuzzer)) {
+ new_msg = MessageFactory<Message, Meta::kKind>::New(
+ base::get<Ns>(p)..., static_cast<Outs*>(nullptr)...);
+ }
+ if (real_msg && new_msg) {
+ MessageCracker::CopyMessageID(new_msg, real_msg);
+ } else if (!new_msg) {
+ std::cerr << "Don't know how to handle " << Meta::kName << "\n";
+ }
+ return new_msg;
+ }
+};
+
+#include "tools/ipc_fuzzer/message_lib/all_message_null_macros.h"
+
+void PopulateFuzzerFunctionVector(
+ FuzzerFunctionVector* function_vector) {
+#undef IPC_MESSAGE_DECL
+#define IPC_MESSAGE_DECL(name, ...) \
+ function_vector->push_back(FuzzerHelper<name>::Fuzz);
+#include "tools/ipc_fuzzer/message_lib/all_messages.h"
+}
+
+// Redefine macros to register fuzzing functions into map.
+#include "tools/ipc_fuzzer/message_lib/all_message_null_macros.h"
+#undef IPC_MESSAGE_DECL
+#define IPC_MESSAGE_DECL(name, ...) \
+ (*map)[static_cast<uint32_t>(name::ID)] = FuzzerHelper<name>::Fuzz;
+
+void PopulateFuzzerFunctionMap(FuzzerFunctionMap* map) {
+#include "tools/ipc_fuzzer/message_lib/all_messages.h"
+}
+
+} // namespace ipc_fuzzer
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/fuzzer.h b/chromium/tools/ipc_fuzzer/fuzzer/fuzzer.h
new file mode 100644
index 00000000000..f406a02953c
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/fuzzer/fuzzer.h
@@ -0,0 +1,88 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_IPC_FUZZER_MUTATE_FUZZER_H_
+#define TOOLS_IPC_FUZZER_MUTATE_FUZZER_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+#include <vector>
+
+#include "base/strings/string_util.h"
+#include "ipc/ipc_message.h"
+
+namespace ipc_fuzzer {
+
+// Interface implemented by those who generate basic types. The types all
+// correspond to the types which a pickle from base/pickle.h can pickle,
+// plus the floating point types.
+class Fuzzer {
+ public:
+ // Functions for various data types.
+ virtual void FuzzBool(bool* value) = 0;
+ virtual void FuzzInt(int* value) = 0;
+ virtual void FuzzLong(long* value) = 0;
+ virtual void FuzzSize(size_t* value) = 0;
+ virtual void FuzzUChar(unsigned char* value) = 0;
+ virtual void FuzzWChar(wchar_t* value) = 0;
+ virtual void FuzzUInt16(uint16_t* value) = 0;
+ virtual void FuzzUInt32(uint32_t* value) = 0;
+ virtual void FuzzInt64(int64_t* value) = 0;
+ virtual void FuzzUInt64(uint64_t* value) = 0;
+ virtual void FuzzFloat(float* value) = 0;
+ virtual void FuzzDouble(double *value) = 0;
+ virtual void FuzzString(std::string* value) = 0;
+ virtual void FuzzString16(base::string16* value) = 0;
+ virtual void FuzzData(char* data, int length) = 0;
+ virtual void FuzzBytes(void* data, int data_len) = 0;
+
+ // Used to determine if a completely new value should be generated for
+ // certain types instead of attempting to modify the existing one.
+ virtual bool ShouldGenerate();
+};
+
+class NoOpFuzzer : public Fuzzer {
+ public:
+ NoOpFuzzer() {}
+ virtual ~NoOpFuzzer() {}
+
+ void FuzzBool(bool* value) override {}
+ void FuzzInt(int* value) override {}
+ void FuzzLong(long* value) override {}
+ void FuzzSize(size_t* value) override {}
+ void FuzzUChar(unsigned char* value) override {}
+ void FuzzWChar(wchar_t* value) override {}
+ void FuzzUInt16(uint16_t* value) override {}
+ void FuzzUInt32(uint32_t* value) override {}
+ void FuzzInt64(int64_t* value) override {}
+ void FuzzUInt64(uint64_t* value) override {}
+ void FuzzFloat(float* value) override {}
+ void FuzzDouble(double* value) override {}
+ void FuzzString(std::string* value) override {}
+ void FuzzString16(base::string16* value) override {}
+ void FuzzData(char* data, int length) override {}
+ void FuzzBytes(void* data, int data_len) override {}
+};
+
+typedef IPC::Message* (*FuzzerFunction)(IPC::Message*, Fuzzer*);
+
+// Used for mutating messages. Once populated, the map associates a message ID
+// with a FuzzerFunction used for mutation of that message type.
+typedef base::hash_map<uint32_t, FuzzerFunction> FuzzerFunctionMap;
+void PopulateFuzzerFunctionMap(FuzzerFunctionMap* map);
+
+// Used for generating new messages. Once populated, the vector contains
+// FuzzerFunctions for all message types that we know how to generate.
+typedef std::vector<FuzzerFunction> FuzzerFunctionVector;
+void PopulateFuzzerFunctionVector(FuzzerFunctionVector* function_vector);
+
+// Since IPC::Message can be serialized, we also track a global function vector
+// to handle generation of new messages while fuzzing.
+extern FuzzerFunctionVector g_function_vector;
+
+} // namespace ipc_fuzzer
+
+#endif // TOOLS_IPC_FUZZER_MUTATE_FUZZER_H_
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/fuzzer_main.cc b/chromium/tools/ipc_fuzzer/fuzzer/fuzzer_main.cc
new file mode 100644
index 00000000000..04174af9a00
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/fuzzer/fuzzer_main.cc
@@ -0,0 +1,248 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <iostream>
+#include <set>
+#include <vector>
+
+#include "base/command_line.h"
+#include "base/strings/string_split.h"
+#include "ipc/ipc_message_macros.h"
+#include "tools/ipc_fuzzer/fuzzer/fuzzer.h"
+#include "tools/ipc_fuzzer/fuzzer/generator.h"
+#include "tools/ipc_fuzzer/fuzzer/mutator.h"
+#include "tools/ipc_fuzzer/fuzzer/rand_util.h"
+#include "tools/ipc_fuzzer/message_lib/message_file.h"
+
+namespace ipc_fuzzer {
+
+namespace {
+
+// TODO(mbarbella): Check to see if this value is actually reasonable.
+const int kFrequency = 23;
+
+const char kCountSwitch[] = "count";
+const char kCountSwitchHelp[] =
+ "Number of messages to generate (generator).";
+
+const char kFrequencySwitch[] = "frequency";
+const char kFrequencySwitchHelp[] =
+ "Probability of mutation; tweak every 1/|q| times (mutator).";
+
+const char kFuzzerNameSwitch[] = "fuzzer-name";
+const char kFuzzerNameSwitchHelp[] =
+ "Select from generate, mutate, or no-op. Default: generate";
+
+const char kHelpSwitch[] = "help";
+const char kHelpSwitchHelp[] =
+ "Show this message.";
+
+const char kPermuteSwitch[] = "permute";
+const char kPermuteSwitchHelp[] =
+ "Randomly shuffle the order of all messages (mutator).";
+
+const char kTypeListSwitch[] = "type-list";
+const char kTypeListSwitchHelp[] =
+ "Explicit list of the only message-ids to mutate (mutator).";
+
+void usage() {
+ std::cerr << "Mutate messages from an exiting message file.\n";
+
+ std::cerr << "Usage:\n"
+ << " ipc_fuzzer"
+ << " [--" << kCountSwitch << "=c]"
+ << " [--" << kFrequencySwitch << "=q]"
+ << " [--" << kFuzzerNameSwitch << "=f]"
+ << " [--" << kHelpSwitch << "]"
+ << " [--" << kTypeListSwitch << "=x,y,z...]"
+ << " [--" << kPermuteSwitch << "]"
+ << " [infile (mutation only)] outfile\n";
+
+ std::cerr
+ << " --" << kCountSwitch << " - " << kCountSwitchHelp << "\n"
+ << " --" << kFrequencySwitch << " - " << kFrequencySwitchHelp << "\n"
+ << " --" << kFuzzerNameSwitch << " - " << kFuzzerNameSwitchHelp << "\n"
+ << " --" << kHelpSwitch << " - " << kHelpSwitchHelp << "\n"
+ << " --" << kTypeListSwitch << " - " << kTypeListSwitchHelp << "\n"
+ << " --" << kPermuteSwitch << " - " << kPermuteSwitchHelp << "\n";
+}
+
+} // namespace
+
+class FuzzerFactory {
+ public:
+ static Fuzzer *Create(const std::string& name, int frequency) {
+ if (name == "default")
+ return new Generator();
+
+ if (name == "generate")
+ return new Generator();
+
+ if (name == "mutate")
+ return new Mutator(frequency);
+
+ if (name == "no-op")
+ return new NoOpFuzzer();
+
+ std::cerr << "No such fuzzer: " << name << "\n";
+ return 0;
+ }
+};
+
+static IPC::Message* RewriteMessage(
+ IPC::Message* message,
+ Fuzzer* fuzzer,
+ FuzzerFunctionMap* map) {
+ FuzzerFunctionMap::iterator it = map->find(message->type());
+ if (it == map->end()) {
+ // This usually indicates a missing message file in all_messages.h, or
+ // that the message dump file is taken from a different revision of
+ // chromium from this executable.
+ std::cerr << "Unknown message type: ["
+ << IPC_MESSAGE_ID_CLASS(message->type()) << ", "
+ << IPC_MESSAGE_ID_LINE(message->type()) << "].\n";
+ return 0;
+ }
+
+ return (*it->second)(message, fuzzer);
+}
+
+int Generate(base::CommandLine* cmd, Fuzzer* fuzzer) {
+ base::CommandLine::StringVector args = cmd->GetArgs();
+ if (args.size() != 1) {
+ usage();
+ return EXIT_FAILURE;
+ }
+ base::FilePath::StringType output_file_name = args[0];
+
+ int message_count = 1000;
+ if (cmd->HasSwitch(kCountSwitch))
+ message_count = atoi(cmd->GetSwitchValueASCII(kCountSwitch).c_str());
+
+ MessageVector message_vector;
+ int bad_count = 0;
+ if (message_count < 0) {
+ // Enumerate them all.
+ for (size_t i = 0; i < g_function_vector.size(); ++i) {
+ if (IPC::Message* new_message = (*g_function_vector[i])(NULL, fuzzer))
+ message_vector.push_back(new_message);
+ else
+ bad_count += 1;
+ }
+ } else {
+ // Fuzz a random batch.
+ for (int i = 0; i < message_count; ++i) {
+ size_t index = RandInRange(g_function_vector.size());
+ if (IPC::Message* new_message = (*g_function_vector[index])(NULL, fuzzer))
+ message_vector.push_back(new_message);
+ else
+ bad_count += 1;
+ }
+ }
+
+ std::cerr << "Failed to generate " << bad_count << " messages.\n";
+ if (!MessageFile::Write(base::FilePath(output_file_name), message_vector))
+ return EXIT_FAILURE;
+ return EXIT_SUCCESS;
+}
+
+int Mutate(base::CommandLine* cmd, Fuzzer* fuzzer) {
+ base::CommandLine::StringVector args = cmd->GetArgs();
+ if (args.size() != 2) {
+ usage();
+ return EXIT_FAILURE;
+ }
+ base::FilePath::StringType input_file_name = args[0];
+ base::FilePath::StringType output_file_name = args[1];
+
+ bool permute = cmd->HasSwitch(kPermuteSwitch);
+
+ std::string type_string_list = cmd->GetSwitchValueASCII(kTypeListSwitch);
+ std::vector<std::string> type_string_vector = base::SplitString(
+ type_string_list, ",", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
+ std::set<uint32_t> type_set;
+ for (size_t i = 0; i < type_string_vector.size(); ++i) {
+ type_set.insert(atoi(type_string_vector[i].c_str()));
+ }
+
+ FuzzerFunctionMap fuzz_function_map;
+ PopulateFuzzerFunctionMap(&fuzz_function_map);
+
+ MessageVector message_vector;
+ if (!MessageFile::Read(base::FilePath(input_file_name), &message_vector))
+ return EXIT_FAILURE;
+
+ for (size_t i = 0; i < message_vector.size(); ++i) {
+ IPC::Message* msg = message_vector[i];
+ // If an explicit type set is specified, make sure we should be mutating
+ // this message type on this run.
+ if (!type_set.empty() && type_set.end() == std::find(
+ type_set.begin(), type_set.end(), msg->type())) {
+ continue;
+ }
+ IPC::Message* new_message = RewriteMessage(msg, fuzzer, &fuzz_function_map);
+ if (new_message) {
+ IPC::Message* old_message = message_vector[i];
+ delete old_message;
+ message_vector[i] = new_message;
+ }
+ }
+
+ if (permute) {
+ std::random_shuffle(message_vector.begin(), message_vector.end(),
+ RandInRange);
+ }
+
+ if (!MessageFile::Write(base::FilePath(output_file_name), message_vector))
+ return EXIT_FAILURE;
+ return EXIT_SUCCESS;
+}
+
+int FuzzerMain(int argc, char** argv) {
+ base::CommandLine::Init(argc, argv);
+ base::CommandLine* cmd = base::CommandLine::ForCurrentProcess();
+ base::CommandLine::StringVector args = cmd->GetArgs();
+
+ if (args.size() == 0 || args.size() > 2 || cmd->HasSwitch(kHelpSwitch)) {
+ usage();
+ return EXIT_FAILURE;
+ }
+
+ InitRand();
+
+ PopulateFuzzerFunctionVector(&g_function_vector);
+ std::cerr << "Counted " << g_function_vector.size()
+ << " distinct messages present in chrome.\n";
+
+ std::string fuzzer_name = "default";
+ if (cmd->HasSwitch(kFuzzerNameSwitch))
+ fuzzer_name = cmd->GetSwitchValueASCII(kFuzzerNameSwitch);
+
+ int frequency = kFrequency;
+ if (cmd->HasSwitch(kFrequencySwitch))
+ frequency = atoi(cmd->GetSwitchValueASCII(kFrequencySwitch).c_str());
+
+ Fuzzer* fuzzer = FuzzerFactory::Create(fuzzer_name, frequency);
+ if (!fuzzer)
+ return EXIT_FAILURE;
+
+ int result;
+ base::FilePath::StringType output_file_name;
+ if (fuzzer_name == "default" || fuzzer_name == "generate") {
+ result = Generate(cmd, fuzzer);
+ } else {
+ result = Mutate(cmd, fuzzer);
+ }
+
+ return result;
+}
+
+} // namespace ipc_fuzzer
+
+int main(int argc, char** argv) {
+ return ipc_fuzzer::FuzzerMain(argc, argv);
+}
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/generator.cc b/chromium/tools/ipc_fuzzer/fuzzer/generator.cc
new file mode 100644
index 00000000000..bcf827c7af5
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/fuzzer/generator.cc
@@ -0,0 +1,119 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <string>
+
+#include "base/strings/string_util.h"
+#include "tools/ipc_fuzzer/fuzzer/generator.h"
+#include "tools/ipc_fuzzer/fuzzer/rand_util.h"
+
+namespace ipc_fuzzer {
+
+template <typename T>
+void GenerateIntegralType(T* value) {
+ switch (RandInRange(16)) {
+ case 0:
+ *value = static_cast<T>(0);
+ break;
+ case 1:
+ *value = static_cast<T>(1);
+ break;
+ case 2:
+ *value = static_cast<T>(-1);
+ break;
+ case 3:
+ *value = static_cast<T>(2);
+ break;
+ default:
+ *value = static_cast<T>(RandU64());
+ break;
+ }
+}
+
+template <typename T>
+void GenerateFloatingType(T* value) {
+ *value = RandDouble();
+}
+
+template <typename T>
+void GenerateStringType(T* value) {
+ T temp_string;
+ size_t length = RandInRange(300);
+ for (size_t i = 0; i < length; ++i)
+ temp_string += RandInRange(256);
+ *value = temp_string;
+}
+
+void Generator::FuzzBool(bool* value) {
+ *value = RandInRange(2) ? true: false;
+}
+
+void Generator::FuzzInt(int* value) {
+ GenerateIntegralType<int>(value);
+}
+
+void Generator::FuzzLong(long* value) {
+ GenerateIntegralType<long>(value);
+}
+
+void Generator::FuzzSize(size_t* value) {
+ GenerateIntegralType<size_t>(value);
+}
+
+void Generator::FuzzUChar(unsigned char* value) {
+ GenerateIntegralType<unsigned char>(value);
+}
+
+void Generator::FuzzWChar(wchar_t* value) {
+ GenerateIntegralType<wchar_t>(value);
+}
+
+void Generator::FuzzUInt16(uint16_t* value) {
+ GenerateIntegralType<uint16_t>(value);
+}
+
+void Generator::FuzzUInt32(uint32_t* value) {
+ GenerateIntegralType<uint32_t>(value);
+}
+
+void Generator::FuzzInt64(int64_t* value) {
+ GenerateIntegralType<int64_t>(value);
+}
+
+void Generator::FuzzUInt64(uint64_t* value) {
+ GenerateIntegralType<uint64_t>(value);
+}
+
+void Generator::FuzzFloat(float* value) {
+ GenerateFloatingType<float>(value);
+}
+
+void Generator::FuzzDouble(double* value) {
+ GenerateFloatingType<double>(value);
+}
+
+void Generator::FuzzString(std::string* value) {
+ GenerateStringType<std::string>(value);
+}
+
+void Generator::FuzzString16(base::string16* value) {
+ GenerateStringType<base::string16>(value);
+}
+
+void Generator::FuzzData(char* data, int length) {
+ for (int i = 0; i < length; ++i) {
+ GenerateIntegralType<char>(&data[i]);
+ }
+}
+
+void Generator::FuzzBytes(void* data, int data_len) {
+ FuzzData(static_cast<char*>(data), data_len);
+}
+
+bool Generator::ShouldGenerate() {
+ // The generator fuzzer should always generate new values.
+ return true;
+}
+
+} // namespace ipc_fuzzer
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/generator.h b/chromium/tools/ipc_fuzzer/fuzzer/generator.h
new file mode 100644
index 00000000000..2e1e6a0e556
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/fuzzer/generator.h
@@ -0,0 +1,43 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_IPC_FUZZER_MUTATE_GENERATOR_H_
+#define TOOLS_IPC_FUZZER_MUTATE_GENERATOR_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+
+#include "tools/ipc_fuzzer/fuzzer/fuzzer.h"
+
+namespace ipc_fuzzer {
+
+class Generator : public Fuzzer {
+ public:
+ Generator() {}
+ virtual ~Generator() {}
+
+ void FuzzBool(bool* value) override;
+ void FuzzInt(int* value) override;
+ void FuzzLong(long* value) override;
+ void FuzzSize(size_t* value) override;
+ void FuzzUChar(unsigned char* value) override;
+ void FuzzWChar(wchar_t* value) override;
+ void FuzzUInt16(uint16_t* value) override;
+ void FuzzUInt32(uint32_t* value) override;
+ void FuzzInt64(int64_t* value) override;
+ void FuzzUInt64(uint64_t* value) override;
+ void FuzzFloat(float* value) override;
+ void FuzzDouble(double* value) override;
+ void FuzzString(std::string* value) override;
+ void FuzzString16(base::string16* value) override;
+ void FuzzData(char* data, int length) override;
+ void FuzzBytes(void* data, int data_len) override;
+ bool ShouldGenerate() override;
+};
+
+} // namespace ipc_fuzzer
+
+#endif // TOOLS_IPC_FUZZER_MUTATE_GENERATOR_H_
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/mutator.cc b/chromium/tools/ipc_fuzzer/fuzzer/mutator.cc
new file mode 100644
index 00000000000..25cc58781b2
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/fuzzer/mutator.cc
@@ -0,0 +1,120 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <algorithm>
+#include <string>
+
+#include "base/strings/string_util.h"
+#include "base/strings/utf_string_conversions.h"
+#include "tools/ipc_fuzzer/fuzzer/mutator.h"
+#include "tools/ipc_fuzzer/fuzzer/rand_util.h"
+
+namespace ipc_fuzzer {
+
+template <typename T>
+void FuzzIntegralType(T* value, unsigned int frequency) {
+ if (RandEvent(frequency)) {
+ switch (RandInRange(4)) {
+ case 0: (*value) = 0; break;
+ case 1: (*value)--; break;
+ case 2: (*value)++; break;
+ case 3: (*value) = RandU64(); break;
+ }
+ }
+}
+
+template <typename T>
+void FuzzStringType(T* value, unsigned int frequency,
+ const T& literal1, const T& literal2) {
+ if (RandEvent(frequency)) {
+ switch (RandInRange(5)) {
+ case 4: (*value) = (*value) + (*value); // FALLTHROUGH
+ case 3: (*value) = (*value) + (*value); // FALLTHROUGH
+ case 2: (*value) = (*value) + (*value); break;
+ case 1: (*value) += literal1; break;
+ case 0: (*value) = literal2; break;
+ }
+ }
+}
+
+void Mutator::FuzzBool(bool* value) {
+ if (RandEvent(frequency_))
+ (*value) = !(*value);
+}
+
+void Mutator::FuzzInt(int* value) {
+ FuzzIntegralType<int>(value, frequency_);
+}
+
+void Mutator::FuzzLong(long* value) {
+ FuzzIntegralType<long>(value, frequency_);
+}
+
+void Mutator::FuzzSize(size_t* value) {
+ FuzzIntegralType<size_t>(value, frequency_);
+}
+
+void Mutator::FuzzUChar(unsigned char* value) {
+ FuzzIntegralType<unsigned char>(value, frequency_);
+}
+
+void Mutator::FuzzWChar(wchar_t* value) {
+ FuzzIntegralType<wchar_t>(value, frequency_);
+}
+
+void Mutator::FuzzUInt16(uint16_t* value) {
+ FuzzIntegralType<uint16_t>(value, frequency_);
+}
+
+void Mutator::FuzzUInt32(uint32_t* value) {
+ FuzzIntegralType<uint32_t>(value, frequency_);
+}
+
+void Mutator::FuzzInt64(int64_t* value) {
+ FuzzIntegralType<int64_t>(value, frequency_);
+}
+
+void Mutator::FuzzUInt64(uint64_t* value) {
+ FuzzIntegralType<uint64_t>(value, frequency_);
+}
+
+void Mutator::FuzzFloat(float* value) {
+ if (RandEvent(frequency_))
+ *value = RandDouble();
+}
+
+void Mutator::FuzzDouble(double* value) {
+ if (RandEvent(frequency_))
+ *value = RandDouble();
+}
+
+void Mutator:: FuzzString(std::string* value) {
+ FuzzStringType<std::string>(value, frequency_, "BORKED", std::string());
+}
+
+void Mutator::FuzzString16(base::string16* value) {
+ FuzzStringType<base::string16>(value, frequency_,
+ base::WideToUTF16(L"BORKED"),
+ base::WideToUTF16(L""));
+}
+
+void Mutator::FuzzData(char* data, int length) {
+ if (RandEvent(frequency_)) {
+ for (int i = 0; i < length; ++i) {
+ FuzzIntegralType<char>(&data[i], frequency_);
+ }
+ }
+}
+
+void Mutator::FuzzBytes(void* data, int data_len) {
+ FuzzData(static_cast<char*>(data), data_len);
+}
+
+bool Mutator::ShouldGenerate() {
+ // TODO(mbarbella): With a low probability, allow something to be fully
+ // rewritten while mutating instead of always changing the existing value.
+ return false;
+}
+
+} // namespace ipc_fuzzer
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/mutator.h b/chromium/tools/ipc_fuzzer/fuzzer/mutator.h
new file mode 100644
index 00000000000..993b25482dd
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/fuzzer/mutator.h
@@ -0,0 +1,47 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_IPC_FUZZER_MUTATE_MUTATOR_H_
+#define TOOLS_IPC_FUZZER_MUTATE_MUTATOR_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include <string>
+
+#include "tools/ipc_fuzzer/fuzzer/fuzzer.h"
+
+namespace ipc_fuzzer {
+
+class Mutator : public Fuzzer {
+ public:
+ explicit Mutator(unsigned int frequency) : frequency_(frequency) {}
+ virtual ~Mutator() {}
+
+ void FuzzBool(bool* value) override;
+ void FuzzInt(int* value) override;
+ void FuzzLong(long* value) override;
+ void FuzzSize(size_t* value) override;
+ void FuzzUChar(unsigned char* value) override;
+ void FuzzWChar(wchar_t* value) override;
+ void FuzzUInt16(uint16_t* value) override;
+ void FuzzUInt32(uint32_t* value) override;
+ void FuzzInt64(int64_t* value) override;
+ void FuzzUInt64(uint64_t* value) override;
+ void FuzzFloat(float* value) override;
+ void FuzzDouble(double* value) override;
+ void FuzzString(std::string* value) override;
+ void FuzzString16(base::string16* value) override;
+ void FuzzData(char* data, int length) override;
+ void FuzzBytes(void* data, int data_len) override;
+ bool ShouldGenerate() override;
+
+ private:
+ // TODO(mbarbella): Use double frequencies.
+ unsigned int frequency_;
+};
+
+} // namespace ipc_fuzzer
+
+#endif // TOOLS_IPC_FUZZER_MUTATE_MUTATOR_H_
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/rand_util.cc b/chromium/tools/ipc_fuzzer/fuzzer/rand_util.cc
new file mode 100644
index 00000000000..5483c3362fb
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/fuzzer/rand_util.cc
@@ -0,0 +1,18 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/rand_util.h"
+#include "tools/ipc_fuzzer/fuzzer/rand_util.h"
+
+namespace ipc_fuzzer {
+
+MersenneTwister* g_mersenne_twister = NULL;
+
+void InitRand() {
+ // TODO(aedla): convert to C++11 std::mt19937 in the future
+ g_mersenne_twister = new MersenneTwister();
+ g_mersenne_twister->init_genrand(static_cast<uint32_t>(base::RandUint64()));
+}
+
+} // namespace ipc_fuzzer
diff --git a/chromium/tools/ipc_fuzzer/fuzzer/rand_util.h b/chromium/tools/ipc_fuzzer/fuzzer/rand_util.h
new file mode 100644
index 00000000000..d1afa086cde
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/fuzzer/rand_util.h
@@ -0,0 +1,46 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_IPC_FUZZER_MUTATE_RAND_UTIL_H_
+#define TOOLS_IPC_FUZZER_MUTATE_RAND_UTIL_H_
+
+#include <stddef.h>
+#include <stdint.h>
+
+#include "third_party/mt19937ar/mt19937ar.h"
+
+namespace ipc_fuzzer {
+
+extern MersenneTwister* g_mersenne_twister;
+
+void InitRand();
+
+inline uint32_t RandU32() {
+ return g_mersenne_twister->genrand_int32();
+}
+
+inline uint64_t RandU64() {
+ return (static_cast<uint64_t>(RandU32()) << 32) | RandU32();
+}
+
+inline double RandDouble() {
+ uint64_t rand_u64 = RandU64();
+ return *reinterpret_cast<double*>(&rand_u64);
+}
+
+inline uint32_t RandInRange(uint32_t range) {
+ return RandU32() % range;
+}
+
+inline bool RandEvent(uint32_t frequency) {
+ return RandInRange(frequency) == 0;
+}
+
+inline size_t RandElementCount() {
+ return RandU32() % 10;
+}
+
+} // namespace ipc_fuzzer
+
+#endif // TOOLS_IPC_FUZZER_MUTATE_RAND_UTIL_H_
diff --git a/chromium/tools/ipc_fuzzer/message_dump/message_dump.cc b/chromium/tools/ipc_fuzzer/message_dump/message_dump.cc
new file mode 100644
index 00000000000..2c81545a823
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_dump/message_dump.cc
@@ -0,0 +1,65 @@
+// Copyright (c) 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/files/file_path.h"
+#include "base/process/process.h"
+#include "base/strings/string_number_conversions.h"
+#include "build/build_config.h"
+#include "ipc/ipc_channel_proxy.h"
+#include "tools/ipc_fuzzer/message_lib/message_file.h"
+
+#if defined(OS_WIN)
+#define PidToStringType base::UintToString16
+#define MESSAGE_DUMP_EXPORT __declspec(dllexport)
+#else
+#define PidToStringType base::IntToString
+#define MESSAGE_DUMP_EXPORT __attribute__((visibility("default")))
+#endif
+
+namespace ipc_fuzzer {
+
+class IPCDump : public IPC::ChannelProxy::OutgoingMessageFilter {
+ public:
+ ~IPCDump() {
+ base::FilePath::StringType pid_string =
+ PidToStringType(base::Process::Current().Pid());
+ base::FilePath output_file_path =
+ dump_directory().Append(pid_string + FILE_PATH_LITERAL(".ipcdump"));
+
+ MessageFile::Write(output_file_path, messages_);
+ }
+
+ IPC::Message* Rewrite(IPC::Message* message) override {
+ messages_.push_back(new IPC::Message(*message));
+ return message;
+ }
+
+ base::FilePath dump_directory() const { return dump_directory_; }
+
+ void set_dump_directory(const base::FilePath& dump_directory) {
+ dump_directory_ = dump_directory;
+ }
+
+ private:
+ MessageVector messages_;
+ base::FilePath dump_directory_;
+};
+
+IPCDump g_ipcdump;
+
+} // namespace ipc_fuzzer
+
+// Entry point avoiding mangled names.
+extern "C" {
+MESSAGE_DUMP_EXPORT IPC::ChannelProxy::OutgoingMessageFilter* GetFilter(void);
+MESSAGE_DUMP_EXPORT void SetDumpDirectory(const base::FilePath& dump_directory);
+}
+
+IPC::ChannelProxy::OutgoingMessageFilter* GetFilter(void) {
+ return &ipc_fuzzer::g_ipcdump;
+}
+
+void SetDumpDirectory(const base::FilePath& dump_directory) {
+ ipc_fuzzer::g_ipcdump.set_dump_directory(dump_directory);
+}
diff --git a/chromium/tools/ipc_fuzzer/message_lib/DEPS b/chromium/tools/ipc_fuzzer/message_lib/DEPS
new file mode 100644
index 00000000000..b2aa99957dd
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_lib/DEPS
@@ -0,0 +1,17 @@
+include_rules = [
+ "+chrome/common",
+ "+components/autofill/content/common",
+ "+components/content_settings/content/common",
+ "+components/dns_prefetch/common",
+ "+components/nacl/common",
+ "+components/network_hints/common",
+ "+components/password_manager/content/common",
+ "+components/pdf/common",
+ "+components/tracing",
+ "+components/translate",
+ "+components/visitedlink/common",
+ "+content/child",
+ "+content/common",
+ "+extensions/common",
+ "+remoting/host",
+]
diff --git a/chromium/tools/ipc_fuzzer/message_lib/all_message_null_macros.h b/chromium/tools/ipc_fuzzer/message_lib/all_message_null_macros.h
new file mode 100644
index 00000000000..2f56371ca3a
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_lib/all_message_null_macros.h
@@ -0,0 +1,9 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// No include guard, may be included multiple times.
+
+// Include all null macros.
+#include "chrome/common/safe_browsing/ipc_protobuf_message_null_macros.h"
+#include "ipc/ipc_message_null_macros.h"
diff --git a/chromium/tools/ipc_fuzzer/message_lib/all_messages.h b/chromium/tools/ipc_fuzzer/message_lib/all_messages.h
new file mode 100644
index 00000000000..cc601787f88
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_lib/all_messages.h
@@ -0,0 +1,28 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Multiply-included file, hence no include guard.
+// Inclusion of all message files recognized by message_lib. All messages
+// received by RenderProcessHost should be included here for the IPC fuzzer.
+
+// Force all multi-include optional files to be included again.
+#undef CHROME_COMMON_COMMON_PARAM_TRAITS_MACROS_H_
+#undef COMPONENTS_AUTOFILL_CONTENT_COMMON_AUTOFILL_PARAM_TRAITS_MACROS_H_
+#undef COMPONENTS_NACL_COMMON_NACL_TYPES_PARAM_TRAITS_H_
+#undef CONTENT_COMMON_CONTENT_PARAM_TRAITS_MACROS_H_
+#undef CONTENT_COMMON_FRAME_PARAM_MACROS_H_
+#undef CONTENT_PUBLIC_COMMON_COMMON_PARAM_TRAITS_MACROS_H_
+
+#include "chrome/common/all_messages.h"
+#include "components/autofill/content/common/autofill_message_generator.h"
+#include "components/content_settings/content/common/content_settings_message_generator.h"
+#include "components/nacl/common/nacl_host_messages.h"
+#include "components/network_hints/common/network_hints_message_generator.h"
+#include "components/password_manager/content/common/credential_manager_message_generator.h"
+#include "components/pdf/common/pdf_message_generator.h"
+#include "components/tracing/tracing_messages.h"
+#include "components/translate/content/common/translate_messages.h"
+#include "components/visitedlink/common/visitedlink_message_generator.h"
+#include "content/common/all_messages.h"
+#include "extensions/common/extension_message_generator.h"
diff --git a/chromium/tools/ipc_fuzzer/message_lib/message_cracker.h b/chromium/tools/ipc_fuzzer/message_lib/message_cracker.h
new file mode 100644
index 00000000000..fcfc25755ff
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_lib/message_cracker.h
@@ -0,0 +1,34 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_CRACKER_H_
+#define TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_CRACKER_H_
+
+#include <stdint.h>
+#include <string.h>
+#include "base/macros.h"
+#include "ipc/ipc_message.h"
+
+// Means for updating protected message fields.
+class MessageCracker : public IPC::Message {
+ public:
+ static void CopyMessageID(IPC::Message* dst, IPC::Message* src) {
+ memcpy(ToCracker(dst)->mutable_payload(),
+ ToCracker(src)->payload(),
+ sizeof(int));
+ }
+
+ static void SetMessageType(IPC::Message* message, uint32_t type) {
+ ToCracker(message)->header()->type = type;
+ }
+
+ private:
+ static MessageCracker* ToCracker(IPC::Message* message) {
+ return reinterpret_cast<MessageCracker*>(message);
+ }
+
+ DISALLOW_COPY_AND_ASSIGN(MessageCracker);
+};
+
+#endif // TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_CRACKER_H_
diff --git a/chromium/tools/ipc_fuzzer/message_lib/message_file.h b/chromium/tools/ipc_fuzzer/message_lib/message_file.h
new file mode 100644
index 00000000000..70ab55c7498
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_lib/message_file.h
@@ -0,0 +1,28 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_FILE_H_
+#define TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_FILE_H_
+
+#include "base/files/file_path.h"
+#include "base/macros.h"
+#include "base/memory/scoped_vector.h"
+#include "ipc/ipc_message.h"
+
+namespace ipc_fuzzer {
+
+typedef ScopedVector<IPC::Message> MessageVector;
+
+class MessageFile {
+ public:
+ static bool Read(const base::FilePath& path, MessageVector* messages);
+ static bool Write(const base::FilePath& path, const MessageVector& messages);
+
+ private:
+ DISALLOW_COPY_AND_ASSIGN(MessageFile);
+};
+
+} // namespace ipc_fuzzer
+
+#endif // TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_FILE_H_
diff --git a/chromium/tools/ipc_fuzzer/message_lib/message_file_format.h b/chromium/tools/ipc_fuzzer/message_lib/message_file_format.h
new file mode 100644
index 00000000000..fb39d05f12f
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_lib/message_file_format.h
@@ -0,0 +1,63 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_FILE_FORMAT_H_
+#define TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_FILE_FORMAT_H_
+
+#include <stdint.h>
+
+// Message file contains IPC messages and message names. Each message type
+// has a NameTableEntry mapping the type to a name.
+//
+// |========================|
+// | FileHeader |
+// |========================|
+// | Message |
+// |------------------------|
+// | Message |
+// |------------------------|
+// | ... |
+// |========================|
+// | NameTableEntry |
+// |------------------------|
+// | NameTableEntry |
+// |------------------------|
+// | ... |
+// |------------------------|
+// | type = 0x0002070f |
+// | string_table_offset = ----+
+// |------------------------| |
+// | ... | |
+// |========================| |
+// | message name | |
+// |------------------------| |
+// | message name | |
+// |------------------------| |
+// | ... | |
+// |------------------------| |
+// | "FrameHostMsg_OpenURL" <--+
+// |------------------------|
+// | ... |
+// |========================|
+
+namespace ipc_fuzzer {
+
+struct FileHeader {
+ static const uint32_t kMagicValue = 0x1bcf11ee;
+ static const uint32_t kCurrentVersion = 1;
+
+ uint32_t magic;
+ uint32_t version;
+ uint32_t message_count;
+ uint32_t name_count;
+};
+
+struct NameTableEntry {
+ uint32_t type;
+ uint32_t string_table_offset;
+};
+
+} // namespace ipc_fuzzer
+
+#endif // TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_FILE_FORMAT_H_
diff --git a/chromium/tools/ipc_fuzzer/message_lib/message_file_reader.cc b/chromium/tools/ipc_fuzzer/message_lib/message_file_reader.cc
new file mode 100644
index 00000000000..7fe5aba4dd4
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_lib/message_file_reader.cc
@@ -0,0 +1,235 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <limits.h>
+#include <stddef.h>
+#include <stdint.h>
+
+#include "base/files/file_path.h"
+#include "base/files/memory_mapped_file.h"
+#include "base/logging.h"
+#include "base/macros.h"
+#include "base/strings/string_piece.h"
+#include "ipc/ipc_message.h"
+#include "tools/ipc_fuzzer/message_lib/message_cracker.h"
+#include "tools/ipc_fuzzer/message_lib/message_file.h"
+#include "tools/ipc_fuzzer/message_lib/message_file_format.h"
+#include "tools/ipc_fuzzer/message_lib/message_names.h"
+
+namespace ipc_fuzzer {
+
+namespace {
+
+// Helper class to read IPC message file into a MessageVector and
+// fix message types.
+class Reader {
+ public:
+ Reader(const base::FilePath& path);
+ bool Read(MessageVector* messages);
+
+ private:
+ template <typename T>
+ bool CutObject(const T** object);
+
+ // Reads the header, checks magic and version.
+ bool ReadHeader();
+
+ bool MapFile();
+ bool ReadMessages();
+
+ // Last part of the file is a string table for message names.
+ bool ReadStringTable();
+
+ // Reads type <-> name mapping into name_map_. References string table.
+ bool ReadNameTable();
+
+ // Removes obsolete messages from the vector.
+ bool RemoveUnknownMessages();
+
+ // Does type -> name -> correct_type fixup.
+ void FixMessageTypes();
+
+ // Raw data.
+ base::FilePath path_;
+ base::MemoryMappedFile mapped_file_;
+ base::StringPiece file_data_;
+ base::StringPiece string_table_;
+
+ // Parsed data.
+ const FileHeader* header_;
+ MessageVector* messages_;
+ MessageNames name_map_;
+
+ DISALLOW_COPY_AND_ASSIGN(Reader);
+};
+
+Reader::Reader(const base::FilePath& path)
+ : path_(path),
+ header_(NULL),
+ messages_(NULL) {
+}
+
+template <typename T>
+bool Reader::CutObject(const T** object) {
+ if (file_data_.size() < sizeof(T)) {
+ LOG(ERROR) << "Unexpected EOF.";
+ return false;
+ }
+ *object = reinterpret_cast<const T*>(file_data_.data());
+ file_data_.remove_prefix(sizeof(T));
+ return true;
+}
+
+bool Reader::ReadHeader() {
+ if (!CutObject<FileHeader>(&header_))
+ return false;
+ if (header_->magic != FileHeader::kMagicValue) {
+ LOG(ERROR) << path_.value() << " is not an IPC message file.";
+ return false;
+ }
+ if (header_->version != FileHeader::kCurrentVersion) {
+ LOG(ERROR) << "Wrong version for message file " << path_.value() << ". "
+ << "File version is " << header_->version << ", "
+ << "current version is " << FileHeader::kCurrentVersion << ".";
+ return false;
+ }
+ return true;
+}
+
+bool Reader::MapFile() {
+ if (!mapped_file_.Initialize(path_)) {
+ LOG(ERROR) << "Failed to map testcase: " << path_.value();
+ return false;
+ }
+ const char* data = reinterpret_cast<const char*>(mapped_file_.data());
+ file_data_.set(data, mapped_file_.length());
+ return true;
+}
+
+bool Reader::ReadMessages() {
+ for (size_t i = 0; i < header_->message_count; ++i) {
+ const char* begin = file_data_.begin();
+ const char* end = file_data_.end();
+ IPC::Message::NextMessageInfo info;
+ IPC::Message::FindNext(begin, end, &info);
+ if (!info.message_found) {
+ LOG(ERROR) << "Failed to parse message.";
+ return false;
+ }
+
+ CHECK_EQ(info.message_end, info.pickle_end);
+ size_t msglen = info.message_end - begin;
+ if (msglen > INT_MAX) {
+ LOG(ERROR) << "Message too large.";
+ return false;
+ }
+
+ // Copy is necessary to fix message type later.
+ IPC::Message const_message(begin, msglen);
+ IPC::Message* message = new IPC::Message(const_message);
+ messages_->push_back(message);
+ file_data_.remove_prefix(msglen);
+ }
+ return true;
+}
+
+bool Reader::ReadStringTable() {
+ size_t name_count = header_->name_count;
+ if (!name_count)
+ return true;
+ if (name_count > file_data_.size() / sizeof(NameTableEntry)) {
+ LOG(ERROR) << "Invalid name table size: " << name_count;
+ return false;
+ }
+
+ size_t string_table_offset = name_count * sizeof(NameTableEntry);
+ string_table_ = file_data_.substr(string_table_offset);
+ if (string_table_.empty()) {
+ LOG(ERROR) << "Missing string table.";
+ return false;
+ }
+ if (string_table_.end()[-1] != '\0') {
+ LOG(ERROR) << "String table doesn't end with NUL.";
+ return false;
+ }
+ return true;
+}
+
+bool Reader::ReadNameTable() {
+ for (size_t i = 0; i < header_->name_count; ++i) {
+ const NameTableEntry* entry;
+ if (!CutObject<NameTableEntry>(&entry))
+ return false;
+ size_t offset = entry->string_table_offset;
+ if (offset >= string_table_.size()) {
+ LOG(ERROR) << "Invalid string table offset: " << offset;
+ return false;
+ }
+ name_map_.Add(entry->type, std::string(string_table_.data() + offset));
+ }
+ return true;
+}
+
+bool Reader::RemoveUnknownMessages() {
+ MessageVector::iterator it = messages_->begin();
+ while (it != messages_->end()) {
+ uint32_t type = (*it)->type();
+ if (!name_map_.TypeExists(type)) {
+ LOG(ERROR) << "Missing name table entry for type " << type;
+ return false;
+ }
+ const std::string& name = name_map_.TypeToName(type);
+ if (!MessageNames::GetInstance()->NameExists(name)) {
+ LOG(WARNING) << "Unknown message " << name;
+ it = messages_->erase(it);
+ } else {
+ ++it;
+ }
+ }
+ return true;
+}
+
+// Message types are based on line numbers, so a minor edit of *_messages.h
+// changes the types of messages in that file. The types are fixed here to
+// increase the lifetime of message files. This is only a partial fix because
+// message arguments and structure layouts can change as well.
+void Reader::FixMessageTypes() {
+ for (MessageVector::iterator it = messages_->begin();
+ it != messages_->end(); ++it) {
+ uint32_t type = (*it)->type();
+ const std::string& name = name_map_.TypeToName(type);
+ uint32_t correct_type = MessageNames::GetInstance()->NameToType(name);
+ if (type != correct_type)
+ MessageCracker::SetMessageType(*it, correct_type);
+ }
+}
+
+bool Reader::Read(MessageVector* messages) {
+ messages_ = messages;
+
+ if (!MapFile())
+ return false;
+ if (!ReadHeader())
+ return false;
+ if (!ReadMessages())
+ return false;
+ if (!ReadStringTable())
+ return false;
+ if (!ReadNameTable())
+ return false;
+ if (!RemoveUnknownMessages())
+ return false;
+ FixMessageTypes();
+
+ return true;
+}
+
+} // namespace
+
+bool MessageFile::Read(const base::FilePath& path, MessageVector* messages) {
+ Reader reader(path);
+ return reader.Read(messages);
+}
+
+} // namespace ipc_fuzzer
diff --git a/chromium/tools/ipc_fuzzer/message_lib/message_file_writer.cc b/chromium/tools/ipc_fuzzer/message_lib/message_file_writer.cc
new file mode 100644
index 00000000000..6511c279e6a
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_lib/message_file_writer.cc
@@ -0,0 +1,168 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <limits.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <set>
+
+#include "base/files/file.h"
+#include "base/logging.h"
+#include "base/macros.h"
+#include "tools/ipc_fuzzer/message_lib/message_file.h"
+#include "tools/ipc_fuzzer/message_lib/message_file_format.h"
+#include "tools/ipc_fuzzer/message_lib/message_names.h"
+
+namespace ipc_fuzzer {
+
+namespace {
+
+// Helper class to write a MessageVector + message names to a file.
+class Writer {
+ public:
+ Writer(const base::FilePath& path);
+ ~Writer() {}
+ bool Write(const MessageVector& messages);
+
+ private:
+ bool OpenFile();
+
+ // Helper to append data to file_.
+ bool WriteBlob(const void *buffer, size_t size);
+
+ // Collects a set of MessageVector message types. Corresponding message
+ // names need to be included in the file.
+ bool CollectMessageTypes();
+
+ bool WriteHeader();
+ bool WriteMessages();
+
+ // Each name table entry is a message type + string table offset.
+ bool WriteNameTable();
+
+ // String table contains the actual message names.
+ bool WriteStringTable();
+
+ typedef std::set<uint32_t> TypesSet;
+ base::FilePath path_;
+ base::File file_;
+ const MessageVector* messages_;
+ TypesSet types_;
+
+ DISALLOW_COPY_AND_ASSIGN(Writer);
+};
+
+Writer::Writer(const base::FilePath& path) : path_(path), messages_(NULL) {
+}
+
+bool Writer::OpenFile() {
+ file_.Initialize(path_,
+ base::File::FLAG_CREATE_ALWAYS | base::File::FLAG_WRITE);
+ if (!file_.IsValid()) {
+ LOG(ERROR) << "Failed to create IPC message file: " << path_.value();
+ return false;
+ }
+ return true;
+}
+
+bool Writer::WriteBlob(const void *buffer, size_t size) {
+ if (size > INT_MAX)
+ return false;
+ const char* char_buffer = static_cast<const char*>(buffer);
+ int ret = file_.WriteAtCurrentPos(char_buffer, size);
+ if (ret != static_cast<int>(size)) {
+ LOG(ERROR) << "Failed to write " << size << " bytes.";
+ return false;
+ }
+ return true;
+}
+
+bool Writer::CollectMessageTypes() {
+ for (size_t i = 0; i < messages_->size(); ++i) {
+ uint32_t type = (*messages_)[i]->type();
+ if (!MessageNames::GetInstance()->TypeExists(type)) {
+ LOG(ERROR) << "Unknown message type: " << type;
+ return false;
+ }
+ types_.insert(type);
+ }
+ return true;
+}
+
+bool Writer::WriteHeader() {
+ FileHeader header;
+ if (messages_->size() > UINT_MAX)
+ return false;
+ header.magic = FileHeader::kMagicValue;
+ header.version = FileHeader::kCurrentVersion;
+ header.message_count = messages_->size();
+ header.name_count = types_.size();
+ if (!WriteBlob(&header, sizeof(FileHeader)))
+ return false;
+ return true;
+}
+
+bool Writer::WriteMessages() {
+ for (size_t i = 0; i < messages_->size(); ++i) {
+ IPC::Message* message = (*messages_)[i];
+ if (!WriteBlob(message->data(), message->size()))
+ return false;
+ }
+ return true;
+}
+
+bool Writer::WriteNameTable() {
+ size_t string_table_offset = 0;
+ NameTableEntry entry;
+
+ for (TypesSet::iterator it = types_.begin(); it != types_.end(); ++it) {
+ if (string_table_offset > UINT_MAX)
+ return false;
+ entry.type = *it;
+ entry.string_table_offset = string_table_offset;
+ if (!WriteBlob(&entry, sizeof(NameTableEntry)))
+ return false;
+ const std::string& name = MessageNames::GetInstance()->TypeToName(*it);
+ string_table_offset += name.length() + 1;
+ }
+ return true;
+}
+
+bool Writer::WriteStringTable() {
+ for (TypesSet::iterator it = types_.begin(); it != types_.end(); ++it) {
+ const std::string& name = MessageNames::GetInstance()->TypeToName(*it);
+ if (!WriteBlob(name.c_str(), name.length() + 1))
+ return false;
+ }
+ return true;
+}
+
+bool Writer::Write(const MessageVector& messages) {
+ messages_ = &messages;
+
+ if (!OpenFile())
+ return false;
+ if (!CollectMessageTypes())
+ return false;
+ if (!WriteHeader())
+ return false;
+ if (!WriteMessages())
+ return false;
+ if (!WriteNameTable())
+ return false;
+ if (!WriteStringTable())
+ return false;
+
+ return true;
+}
+
+} // namespace
+
+bool MessageFile::Write(const base::FilePath& path,
+ const MessageVector& messages) {
+ Writer writer(path);
+ return writer.Write(messages);
+}
+
+} // namespace ipc_fuzzer
diff --git a/chromium/tools/ipc_fuzzer/message_lib/message_lib.gyp b/chromium/tools/ipc_fuzzer/message_lib/message_lib.gyp
index d0b7ae5ca73..56ccce07f3b 100644
--- a/chromium/tools/ipc_fuzzer/message_lib/message_lib.gyp
+++ b/chromium/tools/ipc_fuzzer/message_lib/message_lib.gyp
@@ -22,6 +22,7 @@
'../../../third_party/libjingle/libjingle.gyp:libjingle',
'../../../third_party/mt19937ar/mt19937ar.gyp:mt19937ar',
'../../../third_party/WebKit/public/blink.gyp:blink',
+ '../../../third_party/WebKit/public/blink_headers.gyp:blink_headers',
'../../../ui/accessibility/accessibility.gyp:ax_gen',
],
},
diff --git a/chromium/tools/ipc_fuzzer/message_lib/message_names.cc b/chromium/tools/ipc_fuzzer/message_lib/message_names.cc
new file mode 100644
index 00000000000..da11f237f18
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_lib/message_names.cc
@@ -0,0 +1,37 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/ipc_fuzzer/message_lib/message_names.h"
+#include "tools/ipc_fuzzer/message_lib/all_messages.h"
+
+#include "tools/ipc_fuzzer/message_lib/all_message_null_macros.h"
+#undef IPC_MESSAGE_DECL
+#define IPC_MESSAGE_DECL(name, ...) \
+ names.Add(static_cast<uint32_t>(name::ID), #name);
+
+void PopulateIpcMessageNames(ipc_fuzzer::MessageNames& names) {
+#include "tools/ipc_fuzzer/message_lib/all_messages.h"
+}
+
+namespace ipc_fuzzer {
+
+// static
+MessageNames* MessageNames::all_names_ = NULL;
+
+MessageNames::MessageNames() {
+}
+
+MessageNames::~MessageNames() {
+}
+
+// static
+MessageNames* MessageNames::GetInstance() {
+ if (!all_names_) {
+ all_names_ = new MessageNames();
+ PopulateIpcMessageNames(*all_names_);
+ }
+ return all_names_;
+}
+
+} // namespace ipc_fuzzer
diff --git a/chromium/tools/ipc_fuzzer/message_lib/message_names.h b/chromium/tools/ipc_fuzzer/message_lib/message_names.h
new file mode 100644
index 00000000000..5a9e4427d5e
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_lib/message_names.h
@@ -0,0 +1,61 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_NAMES_H_
+#define TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_NAMES_H_
+
+#include <stdint.h>
+
+#include <map>
+#include <string>
+#include "base/logging.h"
+#include "base/macros.h"
+
+namespace ipc_fuzzer {
+
+class MessageNames {
+ public:
+ MessageNames();
+ ~MessageNames();
+ static MessageNames* GetInstance();
+
+ void Add(uint32_t type, const std::string& name) {
+ name_map_[type] = name;
+ type_map_[name] = type;
+ }
+
+ bool TypeExists(uint32_t type) {
+ return name_map_.find(type) != name_map_.end();
+ }
+
+ bool NameExists(const std::string& name) {
+ return type_map_.find(name) != type_map_.end();
+ }
+
+ const std::string& TypeToName(uint32_t type) {
+ TypeToNameMap::iterator it = name_map_.find(type);
+ CHECK(it != name_map_.end());
+ return it->second;
+ }
+
+ uint32_t NameToType(const std::string& name) {
+ NameToTypeMap::iterator it = type_map_.find(name);
+ CHECK(it != type_map_.end());
+ return it->second;
+ }
+
+ private:
+ typedef std::map<uint32_t, std::string> TypeToNameMap;
+ typedef std::map<std::string, uint32_t> NameToTypeMap;
+ TypeToNameMap name_map_;
+ NameToTypeMap type_map_;
+
+ static MessageNames* all_names_;
+
+ DISALLOW_COPY_AND_ASSIGN(MessageNames);
+};
+
+} // namespace ipc_fuzzer
+
+#endif // TOOLS_IPC_FUZZER_MESSAGE_LIB_MESSAGE_NAMES_H_
diff --git a/chromium/tools/ipc_fuzzer/message_replay/DEPS b/chromium/tools/ipc_fuzzer/message_replay/DEPS
new file mode 100644
index 00000000000..7826d21ef9c
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_replay/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ "+chrome/common",
+ "+content/public/common",
+ "+mojo/edk/embedder"
+]
diff --git a/chromium/tools/ipc_fuzzer/message_replay/replay.cc b/chromium/tools/ipc_fuzzer/message_replay/replay.cc
new file mode 100644
index 00000000000..4f3999d2e9a
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_replay/replay.cc
@@ -0,0 +1,23 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdlib.h>
+
+#include "base/at_exit.h"
+#include "tools/ipc_fuzzer/message_replay/replay_process.h"
+
+int main(int argc, const char** argv) {
+ base::AtExitManager exit_manager;
+ ipc_fuzzer::ReplayProcess replay;
+ if (!replay.Initialize(argc, argv))
+ return EXIT_FAILURE;
+
+ replay.OpenChannel();
+
+ if (!replay.OpenTestcase())
+ return EXIT_FAILURE;
+
+ replay.Run();
+ return EXIT_SUCCESS;
+}
diff --git a/chromium/tools/ipc_fuzzer/message_replay/replay_process.cc b/chromium/tools/ipc_fuzzer/message_replay/replay_process.cc
new file mode 100644
index 00000000000..54b56de8f10
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_replay/replay_process.cc
@@ -0,0 +1,172 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "tools/ipc_fuzzer/message_replay/replay_process.h"
+
+#include <limits.h>
+#include <string>
+
+#include "base/bind.h"
+#include "base/command_line.h"
+#include "base/files/file_path.h"
+#include "base/logging.h"
+#include "base/posix/global_descriptors.h"
+#include "build/build_config.h"
+#include "chrome/common/chrome_switches.h"
+#include "content/public/common/content_switches.h"
+#include "content/public/common/mojo_channel_switches.h"
+#include "ipc/ipc_descriptors.h"
+#include "ipc/ipc_switches.h"
+#include "ipc/mojo/ipc_channel_mojo.h"
+#include "mojo/edk/embedder/embedder.h"
+#include "mojo/edk/embedder/platform_channel_pair.h"
+
+#if defined(OS_POSIX)
+#include "content/public/common/content_descriptors.h"
+#endif
+
+namespace ipc_fuzzer {
+
+void InitializeMojo() {
+ mojo::edk::SetMaxMessageSize(64 * 1024 * 1024);
+ mojo::edk::Init();
+}
+
+void InitializeMojoIPCChannel() {
+ mojo::edk::ScopedPlatformHandle platform_channel;
+#if defined(OS_WIN)
+ platform_channel =
+ mojo::edk::PlatformChannelPair::PassClientHandleFromParentProcess(
+ *base::CommandLine::ForCurrentProcess());
+#elif defined(OS_POSIX)
+ platform_channel.reset(mojo::edk::PlatformHandle(
+ base::GlobalDescriptors::GetInstance()->Get(kMojoIPCChannel)));
+#endif
+ CHECK(platform_channel.is_valid());
+ mojo::edk::SetParentPipeHandle(std::move(platform_channel));
+}
+
+ReplayProcess::ReplayProcess()
+ : io_thread_("Chrome_ChildIOThread"),
+ shutdown_event_(true, false),
+ message_index_(0) {
+}
+
+ReplayProcess::~ReplayProcess() {
+ channel_.reset();
+
+ // Signal this event before shutting down the service process. That way all
+ // background threads can cleanup.
+ shutdown_event_.Signal();
+ io_thread_.Stop();
+}
+
+bool ReplayProcess::Initialize(int argc, const char** argv) {
+ base::CommandLine::Init(argc, argv);
+
+ if (!base::CommandLine::ForCurrentProcess()->HasSwitch(
+ switches::kIpcFuzzerTestcase)) {
+ LOG(ERROR) << "This binary shouldn't be executed directly, "
+ << "please use tools/ipc_fuzzer/scripts/play_testcase.py";
+ return false;
+ }
+
+ // Log to both stderr and file destinations.
+ logging::SetMinLogLevel(logging::LOG_ERROR);
+ logging::LoggingSettings settings;
+ settings.logging_dest = logging::LOG_TO_ALL;
+ settings.log_file = FILE_PATH_LITERAL("ipc_replay.log");
+ logging::InitLogging(settings);
+
+ // Make sure to initialize Mojo before starting the IO thread.
+ InitializeMojo();
+
+ io_thread_.StartWithOptions(
+ base::Thread::Options(base::MessageLoop::TYPE_IO, 0));
+
+#if defined(OS_POSIX)
+ base::GlobalDescriptors* g_fds = base::GlobalDescriptors::GetInstance();
+ g_fds->Set(kPrimaryIPCChannel,
+ kPrimaryIPCChannel + base::GlobalDescriptors::kBaseDescriptor);
+ g_fds->Set(kMojoIPCChannel,
+ kMojoIPCChannel + base::GlobalDescriptors::kBaseDescriptor);
+#endif
+
+ mojo_ipc_support_.reset(new IPC::ScopedIPCSupport(io_thread_.task_runner()));
+ InitializeMojoIPCChannel();
+
+ return true;
+}
+
+void ReplayProcess::OpenChannel() {
+ // TODO(morrita): As the adoption of ChannelMojo spreads, this
+ // criteria has to be updated.
+ std::string process_type =
+ base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(
+ switches::kProcessType);
+ bool should_use_mojo = process_type == switches::kRendererProcess &&
+ content::ShouldUseMojoChannel();
+ if (should_use_mojo) {
+ std::string token =
+ base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(
+ switches::kMojoChannelToken);
+ channel_ = IPC::ChannelProxy::Create(
+ IPC::ChannelMojo::CreateClientFactory(mojo::edk::CreateChildMessagePipe(
+ base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(
+ switches::kMojoChannelToken))),
+ this, io_thread_.task_runner());
+ } else {
+ std::string channel_name =
+ base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(
+ switches::kProcessChannelID);
+ channel_ =
+ IPC::ChannelProxy::Create(channel_name, IPC::Channel::MODE_CLIENT, this,
+ io_thread_.task_runner());
+ }
+}
+
+bool ReplayProcess::OpenTestcase() {
+ base::FilePath path =
+ base::CommandLine::ForCurrentProcess()->GetSwitchValuePath(
+ switches::kIpcFuzzerTestcase);
+ return MessageFile::Read(path, &messages_);
+}
+
+void ReplayProcess::SendNextMessage() {
+ if (message_index_ >= messages_.size()) {
+ base::MessageLoop::current()->QuitWhenIdle();
+ return;
+ }
+
+ // Take next message and release it from vector.
+ IPC::Message* message = messages_[message_index_];
+ messages_[message_index_++] = NULL;
+
+ if (!channel_->Send(message)) {
+ LOG(ERROR) << "ChannelProxy::Send() failed after "
+ << message_index_ << " messages";
+ base::MessageLoop::current()->QuitWhenIdle();
+ }
+}
+
+void ReplayProcess::Run() {
+ timer_.reset(new base::Timer(false, true));
+ timer_->Start(FROM_HERE,
+ base::TimeDelta::FromMilliseconds(1),
+ base::Bind(&ReplayProcess::SendNextMessage,
+ base::Unretained(this)));
+ base::MessageLoop::current()->Run();
+}
+
+bool ReplayProcess::OnMessageReceived(const IPC::Message& msg) {
+ return true;
+}
+
+void ReplayProcess::OnChannelError() {
+ LOG(ERROR) << "Channel error, quitting after "
+ << message_index_ << " messages";
+ base::MessageLoop::current()->QuitWhenIdle();
+}
+
+} // namespace ipc_fuzzer
diff --git a/chromium/tools/ipc_fuzzer/message_replay/replay_process.h b/chromium/tools/ipc_fuzzer/message_replay/replay_process.h
new file mode 100644
index 00000000000..9929b72031e
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_replay/replay_process.h
@@ -0,0 +1,65 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TOOLS_IPC_FUZZER_REPLAY_REPLAY_PROCESS_H_
+#define TOOLS_IPC_FUZZER_REPLAY_REPLAY_PROCESS_H_
+
+#include <stddef.h>
+
+#include <memory>
+
+#include "base/macros.h"
+#include "base/message_loop/message_loop.h"
+#include "base/synchronization/waitable_event.h"
+#include "base/threading/thread.h"
+#include "base/timer/timer.h"
+#include "ipc/ipc_channel_proxy.h"
+#include "ipc/ipc_listener.h"
+#include "ipc/ipc_message.h"
+#include "ipc/mojo/scoped_ipc_support.h"
+#include "tools/ipc_fuzzer/message_lib/message_file.h"
+
+namespace ipc_fuzzer {
+
+class ReplayProcess : public IPC::Listener {
+ public:
+ ReplayProcess();
+ ~ReplayProcess() override;
+
+ // Set up command line, logging, IO thread. Returns true on success, false
+ // otherwise.
+ bool Initialize(int argc, const char** argv);
+
+ // Open a channel to the browser process. It will think we are a renderer.
+ void OpenChannel();
+
+ // Extract messages from a file specified by --ipc-fuzzer-testcase=
+ // Returns true on success, false otherwise.
+ bool OpenTestcase();
+
+ // Send messages to the browser.
+ void Run();
+
+ // IPC::Listener implementation.
+ bool OnMessageReceived(const IPC::Message& message) override;
+ void OnChannelError() override;
+
+ private:
+ void SendNextMessage();
+
+ std::unique_ptr<IPC::ScopedIPCSupport> mojo_ipc_support_;
+ std::unique_ptr<IPC::ChannelProxy> channel_;
+ base::MessageLoop main_loop_;
+ base::Thread io_thread_;
+ base::WaitableEvent shutdown_event_;
+ std::unique_ptr<base::Timer> timer_;
+ MessageVector messages_;
+ size_t message_index_;
+
+ DISALLOW_COPY_AND_ASSIGN(ReplayProcess);
+};
+
+} // namespace ipc_fuzzer
+
+#endif // TOOLS_IPC_FUZZER_REPLAY_REPLAY_PROCESS_H_
diff --git a/chromium/tools/ipc_fuzzer/message_tools/DEPS b/chromium/tools/ipc_fuzzer/message_tools/DEPS
new file mode 100644
index 00000000000..12bafed9e88
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_tools/DEPS
@@ -0,0 +1,5 @@
+include_rules = [
+ "+chrome/common/all_messages.h",
+ "+chrome/common/safe_browsing/ipc_protobuf_message_null_macros.h",
+ "+content/common/all_messages.h",
+]
diff --git a/chromium/tools/ipc_fuzzer/message_tools/message_list.cc b/chromium/tools/ipc_fuzzer/message_tools/message_list.cc
new file mode 100644
index 00000000000..8b4d10c2bbe
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_tools/message_list.cc
@@ -0,0 +1,183 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stddef.h>
+
+#include <algorithm>
+#include <iostream>
+#include <string>
+#include <vector>
+
+#include "build/build_config.h"
+
+// Include once to get the type definitions
+#include "tools/ipc_fuzzer/message_lib/all_messages.h"
+
+struct msginfo {
+ const char* name;
+ const char* file;
+ int id;
+
+ bool operator< (const msginfo& other) const {
+ return id < other.id;
+ }
+};
+
+// Redefine macros to generate table
+#include "tools/ipc_fuzzer/message_lib/all_message_null_macros.h"
+#undef IPC_MESSAGE_DECL
+#define IPC_MESSAGE_DECL(name, ...) {#name, __FILE__, IPC_MESSAGE_ID()},
+
+static msginfo msgtable[] = {
+#include "tools/ipc_fuzzer/message_lib/all_messages.h"
+};
+#define MSGTABLE_SIZE (sizeof(msgtable)/sizeof(msgtable[0]))
+static_assert(MSGTABLE_SIZE, "check your headers for an extra semicolon");
+
+static bool check_msgtable() {
+ bool result = true;
+ int previous_class_id = 0;
+ int highest_class_id = 0;
+ const char* file_name = "NONE";
+ const char* previous_file_name = "NONE";
+ std::vector<int> exemptions;
+
+ // Exclude test and other non-browser files from consideration. Do not
+ // include message files used inside the actual chrome browser in this list.
+ exemptions.push_back(TestMsgStart);
+ exemptions.push_back(FirefoxImporterUnittestMsgStart);
+ exemptions.push_back(ShellMsgStart);
+ exemptions.push_back(LayoutTestMsgStart);
+ exemptions.push_back(MetroViewerMsgStart);
+ exemptions.push_back(CCMsgStart); // Nothing but param traits.
+ exemptions.push_back(CldDataProviderMsgStart); // Conditional build.
+
+#if defined(DISABLE_NACL)
+ exemptions.push_back(NaClMsgStart);
+#endif // defined(DISABLE_NACL)
+
+#if !defined(OS_ANDROID)
+ exemptions.push_back(JavaBridgeMsgStart);
+ exemptions.push_back(MediaPlayerMsgStart);
+ exemptions.push_back(EncryptedMediaMsgStart);
+ exemptions.push_back(GinJavaBridgeMsgStart);
+ exemptions.push_back(AndroidWebViewMsgStart);
+#endif // !defined(OS_ANDROID)
+
+#if !defined(OS_POSIX)
+ exemptions.push_back(CastMediaMsgStart); // FIXME: Add support for types.
+#endif // !defined(OS_POSIX)
+
+#if !defined(USE_OZONE)
+ exemptions.push_back(OzoneGpuMsgStart);
+#endif // !defined(USE_OZONE)
+
+ for (size_t i = 0; i < MSGTABLE_SIZE; ++i) {
+ int class_id = IPC_MESSAGE_ID_CLASS(msgtable[i].id);
+ file_name = msgtable[i].file;
+ if (class_id >= LastIPCMsgStart) {
+ std::cout << "Invalid LastIPCMsgStart setting\n";
+ result = false;
+ }
+ if (class_id == previous_class_id &&
+ strcmp(file_name, previous_file_name) != 0) {
+ std::cerr << "enum used in multiple files: "
+ << file_name << " vs "
+ << previous_file_name << "\n";
+ result = false;
+ }
+ while (class_id > previous_class_id + 1) {
+ std::vector<int>::iterator iter;
+ iter = find(exemptions.begin(), exemptions.end(), previous_class_id + 1);
+ if (iter == exemptions.end()) {
+ std::cout << "Missing message file for enum "
+ << class_id - (previous_class_id + 1)
+ << " before enum used by " << file_name << "\n";
+ result = false;
+ }
+ ++previous_class_id;
+ }
+ previous_class_id = class_id;
+ previous_file_name = file_name;
+ if (class_id > highest_class_id)
+ highest_class_id = class_id;
+ }
+
+ while (LastIPCMsgStart > highest_class_id + 1) {
+ std::vector<int>::iterator iter;
+ iter = find(exemptions.begin(), exemptions.end(), highest_class_id+1);
+ if (iter == exemptions.end()) {
+ std::cout << "Missing message file for enum "
+ << LastIPCMsgStart - (highest_class_id + 1)
+ << " before enum LastIPCMsgStart\n";
+ break;
+ }
+ ++highest_class_id;
+ }
+
+ if (!result)
+ std::cout << "Please check tools/ipc_fuzzer/message_lib/all_messages.h\n";
+
+ return result;
+}
+
+static void dump_msgtable(bool show_args, bool show_ids,
+ bool show_comma, const char *prefix) {
+ bool first = true;
+ for (size_t i = 0; i < MSGTABLE_SIZE; ++i) {
+ if ((!prefix) || strstr(msgtable[i].name, prefix) == msgtable[i].name) {
+ if (show_comma) {
+ if (!first)
+ std::cout << ",";
+ first = false;
+ std::cout << msgtable[i].id;
+ } else {
+ if (show_ids)
+ std::cout << msgtable[i].id << " " <<
+ IPC_MESSAGE_ID_CLASS(msgtable[i].id) << "," <<
+ IPC_MESSAGE_ID_LINE(msgtable[i].id) << " ";
+ std::cout << msgtable[i].name << "\n";
+ }
+ }
+ }
+ if (show_comma)
+ std::cout << "\n";
+}
+
+int main(int argc, char **argv) {
+ bool show_args = false;
+ bool show_ids = false;
+ bool skip_check = false;
+ bool show_comma = false;
+ const char *filter = NULL;
+
+ while (--argc > 0) {
+ ++argv;
+ if (std::string("--args") == *argv) {
+ show_args = true;
+ } else if (std::string("--comma") == *argv) {
+ show_comma = true;
+ } else if (std::string("--filter") == *argv) {
+ filter = *(++argv);
+ --argc;
+ } else if (std::string("--ids") == *argv) {
+ show_ids = true;
+ } else if (std::string("--no-check") == *argv) {
+ skip_check = true;
+ } else {
+ std::cout <<
+ "usage: ipc_message_list [--args] [--ids] [--no-check] "
+ "[--filter prefix] [--comma]\n";
+ return 1;
+ }
+ }
+
+ std::sort(msgtable, msgtable + MSGTABLE_SIZE);
+
+ if (!skip_check && check_msgtable() == false)
+ return 1;
+
+ dump_msgtable(show_args, show_ids, show_comma, filter);
+ return 0;
+}
diff --git a/chromium/tools/ipc_fuzzer/message_tools/message_util.cc b/chromium/tools/ipc_fuzzer/message_tools/message_util.cc
new file mode 100644
index 00000000000..1c74892f673
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/message_tools/message_util.cc
@@ -0,0 +1,179 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <limits.h>
+#include <stddef.h>
+#include <stdlib.h>
+#include <iostream>
+#include <string>
+#include <vector>
+
+#include "base/command_line.h"
+#include "base/strings/string_split.h"
+#include "third_party/re2/src/re2/re2.h"
+#include "tools/ipc_fuzzer/message_lib/message_file.h"
+#include "tools/ipc_fuzzer/message_lib/message_names.h"
+
+namespace {
+
+const char kDumpSwitch[] = "dump";
+const char kDumpSwitchHelp[] =
+ "dump human-readable form to stdout instead of copying.";
+
+const char kEndSwitch[] = "end";
+const char kEndSwitchHelp[] =
+ "output messages before |m|th message in file (exclusive).";
+
+const char kHelpSwitch[] = "help";
+const char kHelpSwitchHelp[] =
+ "display this message.";
+
+const char kInSwitch[] = "in";
+const char kInSwitchHelp[] =
+ "output only the messages at the specified positions in the file.";
+
+const char kInvertSwitch[] = "invert";
+const char kInvertSwitchHelp[] =
+ "output messages NOT meeting above criteria.";
+
+const char kRegexpSwitch[] = "regexp";
+const char kRegexpSwitchHelp[] =
+ "output messages matching regular expression |x|.";
+
+const char kStartSwitch[] = "start";
+const char kStartSwitchHelp[] =
+ "output messages after |n|th message in file (inclusive).";
+
+void usage() {
+ std::cerr << "ipc_message_util: Concatenate all |infile| message files and "
+ << "copy a subset of the result to |outfile|.\n";
+
+ std::cerr << "Usage:\n"
+ << " ipc_message_util"
+ << " [--" << kStartSwitch << "=n]"
+ << " [--" << kEndSwitch << "=m]"
+ << " [--" << kInSwitch << "=i[,j,...]]"
+ << " [--" << kRegexpSwitch << "=x]"
+ << " [--" << kInvertSwitch << "]"
+ << " [--" << kDumpSwitch << "]"
+ << " [--" << kHelpSwitch << "]"
+ << " infile,infile,... [outfile]\n";
+
+ std::cerr << " --" << kStartSwitch << " - " << kStartSwitchHelp << "\n"
+ << " --" << kEndSwitch << " - " << kEndSwitchHelp << "\n"
+ << " --" << kInSwitch << " - " << kInSwitchHelp << "\n"
+ << " --" << kRegexpSwitch << " - " << kRegexpSwitchHelp << "\n"
+ << " --" << kInvertSwitch << " - " << kInvertSwitchHelp << "\n"
+ << " --" << kDumpSwitch << " - " << kDumpSwitchHelp << "\n"
+ << " --" << kHelpSwitch << " - " << kHelpSwitchHelp << "\n";
+}
+
+std::string MessageName(const IPC::Message* msg) {
+ return ipc_fuzzer::MessageNames::GetInstance()->TypeToName(msg->type());
+}
+
+bool MessageMatches(const IPC::Message* msg, const RE2& pattern) {
+ return RE2::FullMatch(MessageName(msg), pattern);
+}
+
+} // namespace
+
+int main(int argc, char** argv) {
+ base::CommandLine::Init(argc, argv);
+ base::CommandLine* cmd = base::CommandLine::ForCurrentProcess();
+ base::CommandLine::StringVector args = cmd->GetArgs();
+
+ if (args.size() < 1 || args.size() > 2 || cmd->HasSwitch(kHelpSwitch)) {
+ usage();
+ return EXIT_FAILURE;
+ }
+
+ size_t start_index = 0;
+ if (cmd->HasSwitch(kStartSwitch)) {
+ int temp = atoi(cmd->GetSwitchValueASCII(kStartSwitch).c_str());
+ if (temp > 0)
+ start_index = static_cast<size_t>(temp);
+ }
+
+ size_t end_index = INT_MAX;
+ if (cmd->HasSwitch(kEndSwitch)) {
+ int temp = atoi(cmd->GetSwitchValueASCII(kEndSwitch).c_str());
+ if (temp > 0)
+ end_index = static_cast<size_t>(temp);
+ }
+
+ bool has_regexp = cmd->HasSwitch(kRegexpSwitch);
+ RE2 filter_pattern(cmd->GetSwitchValueASCII(kRegexpSwitch));
+
+ bool invert = cmd->HasSwitch(kInvertSwitch);
+ bool perform_dump = cmd->HasSwitch(kDumpSwitch);
+
+ base::FilePath::StringType output_file_name;
+
+ if (!perform_dump) {
+ if (args.size() < 2) {
+ usage();
+ return EXIT_FAILURE;
+ }
+ output_file_name = args[1];
+ }
+
+ ipc_fuzzer::MessageVector input_message_vector;
+ for (const base::FilePath::StringType& name : base::SplitString(
+ args[0], base::FilePath::StringType(1, ','),
+ base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL)) {
+ ipc_fuzzer::MessageVector message_vector;
+ if (!ipc_fuzzer::MessageFile::Read(base::FilePath(name), &message_vector))
+ return EXIT_FAILURE;
+ input_message_vector.insert(input_message_vector.end(),
+ message_vector.begin(), message_vector.end());
+ message_vector.weak_clear();
+ }
+
+ bool has_indices = cmd->HasSwitch(kInSwitch);
+ std::vector<bool> indices;
+
+ if (has_indices) {
+ indices.resize(input_message_vector.size(), false);
+ for (const std::string& cur : base::SplitString(
+ cmd->GetSwitchValueASCII(kInSwitch), ",", base::TRIM_WHITESPACE,
+ base::SPLIT_WANT_ALL)) {
+ int index = atoi(cur.c_str());
+ if (index >= 0 && static_cast<size_t>(index) < indices.size())
+ indices[index] = true;
+ }
+ }
+
+ ipc_fuzzer::MessageVector output_message_vector;
+ std::vector<size_t> remap_vector;
+
+ for (size_t i = 0; i < input_message_vector.size(); ++i) {
+ bool valid = (i >= start_index && i < end_index);
+ if (valid && has_regexp) {
+ valid = MessageMatches(input_message_vector[i], filter_pattern);
+ }
+ if (valid && has_indices) {
+ valid = indices[i];
+ }
+ if (valid != invert) {
+ output_message_vector.push_back(input_message_vector[i]);
+ remap_vector.push_back(i);
+ input_message_vector[i] = NULL;
+ }
+ }
+
+ if (perform_dump) {
+ for (size_t i = 0; i < output_message_vector.size(); ++i) {
+ std::cout << remap_vector[i] << ". "
+ << MessageName(output_message_vector[i]) << "\n";
+ }
+ } else {
+ if (!ipc_fuzzer::MessageFile::Write(
+ base::FilePath(output_file_name), output_message_vector)) {
+ return EXIT_FAILURE;
+ }
+ }
+
+ return EXIT_SUCCESS;
+}
diff --git a/chromium/tools/ipc_fuzzer/scripts/cf_package_builder.py b/chromium/tools/ipc_fuzzer/scripts/cf_package_builder.py
new file mode 100755
index 00000000000..1fc98d18693
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/scripts/cf_package_builder.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates IPC fuzzer packages suitable for uploading to ClusterFuzz. Stores
+the packages into chrome build directory. See fuzzer_list below for the list of
+fuzzers.
+"""
+
+import argparse
+import distutils.archive_util
+import os
+import shutil
+import sys
+import tempfile
+
+FUZZER_LIST = [
+ 'ipc_fuzzer_mut',
+ 'ipc_fuzzer_gen',
+]
+
+class CFPackageBuilder:
+ def __init__(self):
+ self.fuzzer_list = FUZZER_LIST
+
+ def parse_arguments(self):
+ desc = 'Builder of IPC fuzzer packages for ClusterFuzz'
+ parser = argparse.ArgumentParser(description=desc)
+ parser.add_argument('--out-dir', dest='out_dir', default='out',
+ help='output directory under src/ directory')
+ parser.add_argument('--build-type', dest='build_type', default='Release',
+ help='Debug vs. Release build')
+ self.args = parser.parse_args()
+
+ def set_application_paths(self):
+ script_path = os.path.realpath(__file__)
+ self.mutate_dir = os.path.dirname(script_path)
+ src_dir = os.path.join(self.mutate_dir, os.pardir, os.pardir, os.pardir)
+ src_dir = os.path.abspath(src_dir)
+ out_dir = os.path.join(src_dir, self.args.out_dir)
+ self.build_dir = os.path.join(out_dir, self.args.build_type)
+
+ def switch_to_temp_work_directory(self):
+ self.old_cwd = os.getcwd()
+ self.work_dir = tempfile.mkdtemp()
+ os.chdir(self.work_dir)
+
+ def remove_temp_work_directory(self):
+ os.chdir(self.old_cwd)
+ shutil.rmtree(self.work_dir)
+
+ def build_package(self, fuzzer):
+ os.makedirs(fuzzer)
+ fuzzer_src_path = os.path.join(self.mutate_dir, fuzzer + '.py')
+ fuzzer_dst_path = os.path.join(fuzzer, 'run.py')
+ shutil.copyfile(fuzzer_src_path, fuzzer_dst_path)
+ utils_src_path = os.path.join(self.mutate_dir, 'utils.py')
+ utils_dst_path = os.path.join(fuzzer, 'utils.py')
+ shutil.copyfile(utils_src_path, utils_dst_path)
+ distutils.archive_util.make_zipfile(fuzzer, fuzzer)
+ package_name = fuzzer + '.zip'
+ shutil.copy(package_name, self.build_dir)
+ final_package_path = os.path.join(self.build_dir, package_name)
+ print 'Built %s.' % final_package_path
+
+ def main(self):
+ self.parse_arguments()
+ self.set_application_paths()
+ self.switch_to_temp_work_directory()
+ for fuzzer in self.fuzzer_list:
+ self.build_package(fuzzer)
+ self.remove_temp_work_directory()
+
+ return 0
+
+if __name__ == "__main__":
+ builder = CFPackageBuilder()
+ sys.exit(builder.main())
diff --git a/chromium/tools/ipc_fuzzer/scripts/ipc_fuzzer_gen.py b/chromium/tools/ipc_fuzzer/scripts/ipc_fuzzer_gen.py
new file mode 100755
index 00000000000..dc7d61edb3e
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/scripts/ipc_fuzzer_gen.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generational ClusterFuzz fuzzer. It generates IPC messages using
+GenerateTraits. Support of GenerateTraits for different types will be gradually
+added.
+"""
+
+import os
+import random
+import subprocess
+import sys
+import utils
+
+FUZZER_NAME_OPTION = '--fuzzer-name=generate'
+MAX_IPC_MESSAGES_PER_TESTCASE = 1500
+
+
+class GenerationalFuzzer:
+ def parse_arguments(self):
+ self.args = utils.parse_arguments()
+
+ def set_application_paths(self):
+ chrome_application_path = utils.get_application_path()
+ chrome_application_directory = os.path.dirname(chrome_application_path)
+ self.ipc_fuzzer_binary = utils.get_fuzzer_application_name()
+ self.ipc_replay_binary = utils.get_replay_application_name()
+ self.ipc_fuzzer_binary_path = os.path.join(
+ chrome_application_directory, self.ipc_fuzzer_binary)
+ self.ipc_replay_binary_path = os.path.join(
+ chrome_application_directory, self.ipc_replay_binary)
+
+ def generate_ipcdump_testcase(self):
+ ipcdump_testcase_path = (
+ utils.random_ipcdump_testcase_path(self.args.output_dir))
+ num_ipc_messages = random.randint(1, MAX_IPC_MESSAGES_PER_TESTCASE)
+ count_option = '--count=%d' % num_ipc_messages
+
+ cmd = [
+ self.ipc_fuzzer_binary_path,
+ FUZZER_NAME_OPTION,
+ count_option,
+ ipcdump_testcase_path,
+ ]
+
+ if subprocess.call(cmd):
+ sys.exit('%s failed.' % self.ipc_fuzzer_binary)
+
+ utils.create_flags_file(ipcdump_testcase_path, self.ipc_replay_binary_path)
+
+ def main(self):
+ self.parse_arguments()
+ self.set_application_paths()
+ for _ in xrange(self.args.no_of_files):
+ self.generate_ipcdump_testcase()
+
+ return 0
+
+if __name__ == "__main__":
+ fuzzer = GenerationalFuzzer()
+ sys.exit(fuzzer.main())
diff --git a/chromium/tools/ipc_fuzzer/scripts/ipc_fuzzer_mut.py b/chromium/tools/ipc_fuzzer/scripts/ipc_fuzzer_mut.py
new file mode 100755
index 00000000000..28d5239f1aa
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/scripts/ipc_fuzzer_mut.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Mutational ClusterFuzz fuzzer. A pre-built corpus of ipcdump files has
+to be uploaded to ClusterFuzz along with this script. As chrome is being
+developed, the corpus will become out-of-date and needs to be updated.
+
+This fuzzer will pick some ipcdumps from the corpus, concatenate them with
+ipc_message_util and mutate the result with ipc_fuzzer_mutate.
+"""
+
+import os
+import random
+import subprocess
+import sys
+import utils
+
+FUZZER_NAME_OPTION = '--fuzzer-name=mutate'
+IPC_MESSAGE_UTIL_APPLICATION = 'ipc_message_util'
+IPCDUMP_MERGE_LIMIT = 50
+
+class MutationalFuzzer:
+ def parse_arguments(self):
+ self.args = utils.parse_arguments()
+
+ def set_application_paths(self):
+ chrome_application_path = utils.get_application_path()
+ chrome_application_directory = os.path.dirname(chrome_application_path)
+
+ self.ipc_message_util_binary = utils.application_name_for_platform(
+ IPC_MESSAGE_UTIL_APPLICATION)
+ self.ipc_fuzzer_binary = utils.get_fuzzer_application_name()
+ self.ipc_replay_binary = utils.get_replay_application_name()
+ self.ipc_message_util_binary_path = os.path.join(
+ chrome_application_directory, self.ipc_message_util_binary)
+ self.ipc_fuzzer_binary_path = os.path.join(
+ chrome_application_directory, self.ipc_fuzzer_binary)
+ self.ipc_replay_binary_path = os.path.join(
+ chrome_application_directory, self.ipc_replay_binary)
+
+ def set_corpus(self):
+ # Corpus should be set per job as a fuzzer-specific environment variable.
+ corpus = os.getenv('IPC_CORPUS_DIR', 'default')
+ corpus_directory = os.path.join(self.args.input_dir, corpus)
+ if not os.path.exists(corpus_directory):
+ sys.exit('Corpus directory "%s" not found.' % corpus_directory)
+
+ entries = os.listdir(corpus_directory)
+ entries = [i for i in entries if i.endswith(utils.IPCDUMP_EXTENSION)]
+ self.corpus = [os.path.join(corpus_directory, entry) for entry in entries]
+
+ def create_mutated_ipcdump_testcase(self):
+ ipcdumps = ','.join(random.sample(self.corpus, IPCDUMP_MERGE_LIMIT))
+ tmp_ipcdump_testcase = utils.create_temp_file()
+ mutated_ipcdump_testcase = (
+ utils.random_ipcdump_testcase_path(self.args.output_dir))
+
+ # Concatenate ipcdumps -> tmp_ipcdump.
+ cmd = [
+ self.ipc_message_util_binary_path,
+ ipcdumps,
+ tmp_ipcdump_testcase,
+ ]
+ if subprocess.call(cmd):
+ sys.exit('%s failed.' % self.ipc_message_util_binary)
+
+ # Mutate tmp_ipcdump -> mutated_ipcdump.
+ cmd = [
+ self.ipc_fuzzer_binary_path,
+ FUZZER_NAME_OPTION,
+ tmp_ipcdump_testcase,
+ mutated_ipcdump_testcase,
+ ]
+ if subprocess.call(cmd):
+ sys.exit('%s failed.' % self.ipc_fuzzer_binary)
+
+ utils.create_flags_file(
+ mutated_ipcdump_testcase, self.ipc_replay_binary_path)
+ os.remove(tmp_ipcdump_testcase)
+
+ def main(self):
+ self.parse_arguments()
+ self.set_application_paths()
+ self.set_corpus()
+ for _ in xrange(self.args.no_of_files):
+ self.create_mutated_ipcdump_testcase()
+
+ return 0
+
+if __name__ == "__main__":
+ fuzzer = MutationalFuzzer()
+ sys.exit(fuzzer.main())
diff --git a/chromium/tools/ipc_fuzzer/scripts/play_testcase.py b/chromium/tools/ipc_fuzzer/scripts/play_testcase.py
new file mode 100755
index 00000000000..69354479dee
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/scripts/play_testcase.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wrapper around chrome.
+
+Replaces all the child processes (renderer, GPU, plugins and utility) with the
+IPC fuzzer. The fuzzer will then play back a specified testcase.
+
+Depends on ipc_fuzzer being available on the same directory as chrome.
+"""
+
+import argparse
+import os
+import platform
+import subprocess
+import sys
+
+CHROME_BINARY_FOR_PLATFORM_DICT = {
+ 'LINUX': 'chrome',
+ 'MAC': 'Chromium.app/Contents/MacOS/Chromium',
+ 'WINDOWS': 'chrome.exe',
+}
+
+def GetPlatform():
+ platform = None
+ if sys.platform.startswith('win'):
+ platform = 'WINDOWS'
+ elif sys.platform.startswith('linux'):
+ platform = 'LINUX'
+ elif sys.platform == 'darwin':
+ platform = 'MAC'
+
+ assert platform is not None
+ return platform
+
+def main():
+ desc = 'Wrapper to run chrome with child processes replaced by IPC fuzzers'
+ parser = argparse.ArgumentParser(description=desc)
+ parser.add_argument('--out-dir', dest='out_dir', default='out',
+ help='output directory under src/ directory')
+ parser.add_argument('--build-type', dest='build_type', default='Release',
+ help='Debug vs. Release build')
+ parser.add_argument('--gdb-browser', dest='gdb_browser', default=False,
+ action='store_true',
+ help='run browser process inside gdb')
+ parser.add_argument('testcase',
+ help='IPC file to be replayed')
+ parser.add_argument('chrome_args',
+ nargs=argparse.REMAINDER,
+ help='any additional arguments are passed to chrome')
+ args = parser.parse_args()
+
+ platform = GetPlatform()
+ chrome_binary = CHROME_BINARY_FOR_PLATFORM_DICT[platform]
+ fuzzer_binary = 'ipc_fuzzer_replay'
+ if platform == 'WINDOWS':
+ fuzzer_binary += '.exe'
+
+ script_path = os.path.realpath(__file__)
+ ipc_fuzzer_dir = os.path.join(os.path.dirname(script_path), os.pardir)
+ src_dir = os.path.abspath(os.path.join(ipc_fuzzer_dir, os.pardir, os.pardir))
+ out_dir = os.path.join(src_dir, args.out_dir)
+ build_dir = os.path.join(out_dir, args.build_type)
+
+ chrome_path = os.path.join(build_dir, chrome_binary)
+ if not os.path.exists(chrome_path):
+ print 'chrome executable not found at ', chrome_path
+ return 1
+
+ fuzzer_path = os.path.join(build_dir, fuzzer_binary)
+ if not os.path.exists(fuzzer_path):
+ print 'fuzzer executable not found at ', fuzzer_path
+ print ('ensure GYP_DEFINES="enable_ipc_fuzzer=1" and build target ' +
+ fuzzer_binary + '.')
+ return 1
+
+ prefixes = {
+ '--renderer-cmd-prefix',
+ '--gpu-launcher',
+ '--plugin-launcher',
+ '--ppapi-plugin-launcher',
+ '--utility-cmd-prefix',
+ }
+
+ chrome_command = [
+ chrome_path,
+ '--ipc-fuzzer-testcase=' + args.testcase,
+ '--no-sandbox',
+ '--disable-kill-after-bad-ipc',
+ '--disable-mojo-channel',
+ ]
+
+ if args.gdb_browser:
+ chrome_command = ['gdb', '--args'] + chrome_command
+
+ launchers = {}
+ for prefix in prefixes:
+ launchers[prefix] = fuzzer_path
+
+ for arg in args.chrome_args:
+ if arg.find('=') != -1:
+ switch, value = arg.split('=', 1)
+ if switch in prefixes:
+ launchers[switch] = value + ' ' + launchers[switch]
+ continue
+ chrome_command.append(arg)
+
+ for switch, value in launchers.items():
+ chrome_command.append(switch + '=' + value)
+
+ command_line = ' '.join(['\'' + arg + '\'' for arg in chrome_command])
+ print 'Executing: ' + command_line
+
+ return subprocess.call(chrome_command)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/ipc_fuzzer/scripts/remove_close_messages.py b/chromium/tools/ipc_fuzzer/scripts/remove_close_messages.py
new file mode 100755
index 00000000000..8e42e391fb9
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/scripts/remove_close_messages.py
@@ -0,0 +1,75 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Removes ViewHostMsg_Close and alike from testcases. These messages are an
+annoyance for corpus distillation. They cause the browser to exit, so no
+further messages are processed. On the other hand, ViewHostMsg_Close is useful
+for fuzzing - many found bugs are related to a renderer disappearing. So the
+fuzzer should be crafting random ViewHostMsg_Close messages.
+"""
+
+import argparse
+import os
+import platform
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def create_temp_file():
+ temp_file = tempfile.NamedTemporaryFile(delete=False)
+ temp_file.close()
+ return temp_file.name
+
+def main():
+ desc = 'Remove ViewHostMsg_Close and alike from the testcases.'
+ parser = argparse.ArgumentParser(description=desc)
+ parser.add_argument('--out-dir', dest='out_dir', default='out',
+ help='ouput directory under src/ directory')
+ parser.add_argument('--build-type', dest='build_type', default='Release',
+ help='Debug vs. Release build')
+ parser.add_argument('testcase_dir',
+ help='Directory containing testcases')
+ parsed = parser.parse_args()
+
+ message_util_binary = 'ipc_message_util'
+
+ script_path = os.path.realpath(__file__)
+ ipc_fuzzer_dir = os.path.join(os.path.dirname(script_path), os.pardir)
+ src_dir = os.path.abspath(os.path.join(ipc_fuzzer_dir, os.pardir, os.pardir))
+ out_dir = os.path.join(src_dir, parsed.out_dir);
+ build_dir = os.path.join(out_dir, parsed.build_type)
+
+ message_util_path = os.path.join(build_dir, message_util_binary)
+ if not os.path.exists(message_util_path):
+ print 'ipc_message_util executable not found at ', message_util_path
+ return 1
+
+ filter_command = [
+ message_util_path,
+ '--invert',
+ '--regexp=ViewHostMsg_Close|ViewHostMsg_ClosePage_ACK',
+ 'input',
+ 'output',
+ ]
+
+ testcase_list = os.listdir(parsed.testcase_dir)
+ testcase_count = len(testcase_list)
+ index = 0
+ for testcase in testcase_list:
+ index += 1
+ print '[%d/%d] Processing %s' % (index, testcase_count, testcase)
+ testcase_path = os.path.join(parsed.testcase_dir, testcase)
+ filtered_path = create_temp_file()
+ filter_command[-2] = testcase_path
+ filter_command[-1] = filtered_path
+ subprocess.call(filter_command)
+ shutil.move(filtered_path, testcase_path)
+
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/ipc_fuzzer/scripts/utils.py b/chromium/tools/ipc_fuzzer/scripts/utils.py
new file mode 100755
index 00000000000..3c7e1498f51
--- /dev/null
+++ b/chromium/tools/ipc_fuzzer/scripts/utils.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility functions used by Generational and Mutational ClusterFuzz
+fuzzers."""
+
+import argparse
+import os
+import random
+import string
+import sys
+import tempfile
+
+APP_PATH_KEY = 'APP_PATH'
+FLAGS_PREFIX = 'flags-'
+FUZZ_PREFIX = 'fuzz-'
+IPC_FUZZER_APPLICATION = 'ipc_fuzzer'
+IPC_REPLAY_APPLICATION = 'ipc_fuzzer_replay'
+IPCDUMP_EXTENSION = '.ipcdump'
+LAUNCH_PREFIXES = [
+ '--gpu-launcher',
+ '--plugin-launcher',
+ '--ppapi-plugin-launcher',
+ '--renderer-cmd-prefix',
+ '--utility-cmd-prefix',
+]
+
+def application_name_for_platform(application_name):
+ """Return application name for current platform."""
+ if platform() == 'WINDOWS':
+ return application_name + '.exe'
+ return application_name
+
+def create_flags_file(ipcdump_testcase_path, ipc_replay_application_path):
+ """Create a flags file to add launch prefix to application command line."""
+ random_launch_prefix = random.choice(LAUNCH_PREFIXES)
+ file_content = '%s=%s' % (random_launch_prefix, ipc_replay_application_path)
+
+ flags_file_path = ipcdump_testcase_path.replace(FUZZ_PREFIX, FLAGS_PREFIX)
+ file_handle = open(flags_file_path, 'w')
+ file_handle.write(file_content)
+ file_handle.close()
+
+def create_temp_file():
+ """Create a temporary file."""
+ temp_file = tempfile.NamedTemporaryFile(delete=False)
+ temp_file.close()
+ return temp_file.name
+
+def get_fuzzer_application_name():
+ """Get the application name for the fuzzer binary."""
+ return application_name_for_platform(IPC_FUZZER_APPLICATION)
+
+def get_replay_application_name():
+ """Get the application name for the replay binary."""
+ return application_name_for_platform(IPC_REPLAY_APPLICATION)
+
+def parse_arguments():
+ """Parse fuzzer arguments."""
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--input_dir')
+ parser.add_argument('--output_dir')
+ parser.add_argument('--no_of_files', type=int)
+ args = parser.parse_args();
+ if (not args.input_dir or
+ not args.output_dir or
+ not args.no_of_files):
+ parser.print_help()
+ sys.exit(1)
+
+ return args
+
+def random_id(size=16, chars=string.ascii_lowercase):
+ """Return a random id string, default 16 characters long."""
+ return ''.join(random.choice(chars) for _ in range(size))
+
+def random_ipcdump_testcase_path(ipcdump_directory):
+ """Return a random ipc testcase path."""
+ return os.path.join(
+ ipcdump_directory,
+ '%s%s%s' % (FUZZ_PREFIX, random_id(), IPCDUMP_EXTENSION))
+
+def platform():
+ """Return running platform."""
+ if sys.platform.startswith('win'):
+ return 'WINDOWS'
+ if sys.platform.startswith('linux'):
+ return 'LINUX'
+ if sys.platform == 'darwin':
+ return 'MAC'
+
+ assert False, 'Unknown platform'
+
+def get_application_path():
+ """Return chrome application path."""
+ if APP_PATH_KEY not in os.environ:
+ sys.exit(
+ 'Environment variable %s should be set to chrome path.' % APP_PATH_KEY)
+
+ return os.environ[APP_PATH_KEY]
diff --git a/chromium/tools/ipc_messages_log.py b/chromium/tools/ipc_messages_log.py
new file mode 100755
index 00000000000..26284d1cd60
--- /dev/null
+++ b/chromium/tools/ipc_messages_log.py
@@ -0,0 +1,168 @@
+#!/usr/bin/python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""""Processes a log file and resolves IPC message identifiers.
+
+Resolves IPC messages of the form [unknown type NNNNNN] to named IPC messages.
+
+e.g. logfile containing
+
+I/stderr ( 3915): ipc 3915.3.1370207904 2147483647 S [unknown type 66372]
+
+will be transformed to:
+
+I/stderr ( 3915): ipc 3915.3.1370207904 2147483647 S ViewMsg_SetCSSColors
+
+In order to find the message header files efficiently, it requires that
+Chromium is checked out using git.
+"""
+
+import optparse
+import os
+import re
+import subprocess
+import sys
+
+
+def _SourceDir():
+ """Get chromium's source directory."""
+ return os.path.join(sys.path[0], '..')
+
+
+def _ReadLines(f):
+ """Read from file f and generate right-stripped lines."""
+ for line in f:
+ yield line.rstrip()
+
+
+def _GetMsgStartTable():
+ """Read MsgStart enumeration from ipc/ipc_message_utils.h.
+
+ Determines the message type identifiers by reading.
+ header file ipc/ipc_message_utils.h and looking for
+ enum IPCMessageStart. Assumes following code format in header file:
+ enum IPCMessageStart {
+ Type1MsgStart ...,
+ Type2MsgStart,
+ };
+
+ Returns:
+ A dictionary mapping StartName to enumeration value.
+ """
+ ipc_message_file = _SourceDir() + '/ipc/ipc_message_utils.h'
+ ipc_message_lines = _ReadLines(open(ipc_message_file))
+ is_msg_start = False
+ count = 0
+ msg_start_table = dict()
+ for line in ipc_message_lines:
+ if is_msg_start:
+ if line.strip() == '};':
+ break
+ msgstart_index = line.find('MsgStart')
+ msg_type = line[:msgstart_index] + 'MsgStart'
+ msg_start_table[msg_type.strip()] = count
+ count+=1
+ elif line.strip() == 'enum IPCMessageStart {':
+ is_msg_start = True
+
+ return msg_start_table
+
+
+def _FindMessageHeaderFiles():
+ """Look through the source directory for *_messages.h."""
+ os.chdir(_SourceDir())
+ pipe = subprocess.Popen(['git', 'ls-files', '--', '*_messages.h'],
+ stdout=subprocess.PIPE)
+ return _ReadLines(pipe.stdout)
+
+
+def _GetMsgId(msg_start, line_number, msg_start_table):
+ """Construct the meessage id given the msg_start and the line number."""
+ hex_str = '%x%04x' % (msg_start_table[msg_start], line_number)
+ return int(hex_str, 16)
+
+
+def _ReadHeaderFile(f, msg_start_table, msg_map):
+ """Read a header file and construct a map from message_id to message name."""
+ msg_def_re = re.compile(
+ '^IPC_(?:SYNC_)?MESSAGE_[A-Z0-9_]+\(([A-Za-z0-9_]+).*')
+ msg_start_re = re.compile(
+ '^\s*#define\s+IPC_MESSAGE_START\s+([a-zA-Z0-9_]+MsgStart).*')
+ msg_start = None
+ msg_name = None
+ line_number = 0
+
+ for line in f:
+ line_number+=1
+ match = re.match(msg_start_re, line)
+ if match:
+ msg_start = match.group(1)
+ # print "msg_start = " + msg_start
+ match = re.match(msg_def_re, line)
+ if match:
+ msg_name = match.group(1)
+ # print "msg_name = " + msg_name
+ if msg_start and msg_name:
+ msg_id = _GetMsgId(msg_start, line_number, msg_start_table)
+ msg_map[msg_id] = msg_name
+ return msg_map
+
+
+def _ResolveMsg(msg_type, msg_map):
+ """Fully resolve a message type to a name."""
+ if msg_type in msg_map:
+ return msg_map[msg_type]
+ else:
+ return '[Unknown message %d (0x%x)]x' % (msg_type, msg_type)
+
+
+def _ProcessLog(f, msg_map):
+ """Read lines from f and resolve the IPC messages according to msg_map."""
+ unknown_msg_re = re.compile('\[unknown type (\d+)\]')
+ for line in f:
+ line = line.rstrip()
+ match = re.search(unknown_msg_re, line)
+ if match:
+ line = re.sub(unknown_msg_re,
+ _ResolveMsg(int(match.group(1)), msg_map),
+ line)
+ print line
+
+
+def _GetMsgMap():
+ """Returns a dictionary mapping from message number to message name."""
+ msg_start_table = _GetMsgStartTable()
+ msg_map = dict()
+ for header_file in _FindMessageHeaderFiles():
+ _ReadHeaderFile(open(header_file),
+ msg_start_table,
+ msg_map)
+ return msg_map
+
+
+def main():
+ """Processes one or more log files with IPC logging messages.
+
+ Replaces '[unknown type NNNNNN]' with resolved
+ IPC messages.
+
+ Reads from standard input if no log files specified on the
+ command line.
+ """
+ parser = optparse.OptionParser('usage: %prog [LOGFILE...]')
+ (_, args) = parser.parse_args()
+
+ msg_map = _GetMsgMap()
+ log_files = args
+
+ if log_files:
+ for log_file in log_files:
+ _ProcessLog(open(log_file), msg_map)
+ else:
+ _ProcessLog(sys.stdin, msg_map)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/isolate_driver.py b/chromium/tools/isolate_driver.py
new file mode 100755
index 00000000000..f75b4bdf719
--- /dev/null
+++ b/chromium/tools/isolate_driver.py
@@ -0,0 +1,313 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Adaptor script called through build/isolate.gypi.
+
+Creates a wrapping .isolate which 'includes' the original one, that can be
+consumed by tools/swarming_client/isolate.py. Path variables are determined
+based on the current working directory. The relative_cwd in the .isolated file
+is determined based on the .isolate file that declare the 'command' variable to
+be used so the wrapping .isolate doesn't affect this value.
+
+This script loads build.ninja and processes it to determine all the executables
+referenced by the isolated target. It adds them in the wrapping .isolate file.
+
+WARNING: The target to use for build.ninja analysis is the base name of the
+.isolate file plus '_run'. For example, 'foo_test.isolate' would have the target
+'foo_test_run' analysed.
+"""
+
+import glob
+import json
+import logging
+import os
+import posixpath
+import StringIO
+import subprocess
+import sys
+import time
+
+TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
+SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client')
+SRC_DIR = os.path.dirname(TOOLS_DIR)
+
+sys.path.insert(0, SWARMING_CLIENT_DIR)
+
+import isolate_format
+
+
+def load_ninja_recursively(build_dir, ninja_path, build_steps):
+ """Crudely extracts all the subninja and build referenced in ninja_path.
+
+ In particular, it ignores rule and variable declarations. The goal is to be
+ performant (well, as much as python can be performant) which is currently in
+ the <200ms range for a complete chromium tree. As such the code is laid out
+ for performance instead of readability.
+ """
+ logging.debug('Loading %s', ninja_path)
+ try:
+ with open(os.path.join(build_dir, ninja_path), 'rb') as f:
+ line = None
+ merge_line = ''
+ subninja = []
+ for line in f:
+ line = line.rstrip()
+ if not line:
+ continue
+
+ if line[-1] == '$':
+ # The next line needs to be merged in.
+ merge_line += line[:-1]
+ continue
+
+ if merge_line:
+ line = merge_line + line
+ merge_line = ''
+
+ statement = line[:line.find(' ')]
+ if statement == 'build':
+ # Save the dependency list as a raw string. Only the lines needed will
+ # be processed with raw_build_to_deps(). This saves a good 70ms of
+ # processing time.
+ build_target, dependencies = line[6:].split(': ', 1)
+ # Interestingly, trying to be smart and only saving the build steps
+ # with the intended extensions ('', '.stamp', '.so') slows down
+ # parsing even if 90% of the build rules can be skipped.
+ # On Windows, a single step may generate two target, so split items
+ # accordingly. It has only been seen for .exe/.exe.pdb combos.
+ for i in build_target.strip().split():
+ build_steps[i] = dependencies
+ elif statement == 'subninja':
+ subninja.append(line[9:])
+ except IOError:
+ print >> sys.stderr, 'Failed to open %s' % ninja_path
+ raise
+
+ total = 1
+ for rel_path in subninja:
+ try:
+ # Load each of the files referenced.
+ # TODO(maruel): Skip the files known to not be needed. It saves an aweful
+ # lot of processing time.
+ total += load_ninja_recursively(build_dir, rel_path, build_steps)
+ except IOError:
+ print >> sys.stderr, '... as referenced by %s' % ninja_path
+ raise
+ return total
+
+
+def load_ninja(build_dir):
+ """Loads the tree of .ninja files in build_dir."""
+ build_steps = {}
+ total = load_ninja_recursively(build_dir, 'build.ninja', build_steps)
+ logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps))
+ return build_steps
+
+
+def using_blacklist(item):
+ """Returns True if an item should be analyzed.
+
+ Ignores many rules that are assumed to not depend on a dynamic library. If
+ the assumption doesn't hold true anymore for a file format, remove it from
+ this list. This is simply an optimization.
+ """
+ # *.json is ignored below, *.isolated.gen.json is an exception, it is produced
+ # by isolate_driver.py in 'test_isolation_mode==prepare'.
+ if item.endswith('.isolated.gen.json'):
+ return True
+ IGNORED = (
+ '.a', '.cc', '.css', '.dat', '.def', '.frag', '.h', '.html', '.isolate',
+ '.js', '.json', '.manifest', '.o', '.obj', '.pak', '.png', '.pdb', '.py',
+ '.strings', '.test', '.txt', '.vert',
+ )
+ # ninja files use native path format.
+ ext = os.path.splitext(item)[1]
+ if ext in IGNORED:
+ return False
+ # Special case Windows, keep .dll.lib but discard .lib.
+ if item.endswith('.dll.lib'):
+ return True
+ if ext == '.lib':
+ return False
+ return item not in ('', '|', '||')
+
+
+def raw_build_to_deps(item):
+ """Converts a raw ninja build statement into the list of interesting
+ dependencies.
+ """
+ # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC,
+ # .dll.lib, .exe and empty.
+ # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc.
+ return filter(using_blacklist, item.split(' ')[1:])
+
+
+def collect_deps(target, build_steps, dependencies_added, rules_seen):
+ """Recursively adds all the interesting dependencies for |target|
+ into |dependencies_added|.
+ """
+ if rules_seen is None:
+ rules_seen = set()
+ if target in rules_seen:
+ # TODO(maruel): Figure out how it happens.
+ logging.warning('Circular dependency for %s!', target)
+ return
+ rules_seen.add(target)
+ try:
+ dependencies = raw_build_to_deps(build_steps[target])
+ except KeyError:
+ logging.info('Failed to find a build step to generate: %s', target)
+ return
+ logging.debug('collect_deps(%s) -> %s', target, dependencies)
+ for dependency in dependencies:
+ dependencies_added.add(dependency)
+ collect_deps(dependency, build_steps, dependencies_added, rules_seen)
+
+
+def post_process_deps(build_dir, dependencies):
+ """Processes the dependency list with OS specific rules."""
+ def filter_item(i):
+ if i.endswith('.so.TOC'):
+ # Remove only the suffix .TOC, not the .so!
+ return i[:-4]
+ if i.endswith('.dylib.TOC'):
+ # Remove only the suffix .TOC, not the .dylib!
+ return i[:-4]
+ if i.endswith('.dll.lib'):
+ # Remove only the suffix .lib, not the .dll!
+ return i[:-4]
+ return i
+
+ def is_exe(i):
+ # This script is only for adding new binaries that are created as part of
+ # the component build.
+ ext = os.path.splitext(i)[1]
+ # On POSIX, executables have no extension.
+ if ext not in ('', '.dll', '.dylib', '.exe', '.nexe', '.so'):
+ return False
+ if os.path.isabs(i):
+ # In some rare case, there's dependency set explicitly on files outside
+ # the checkout.
+ return False
+
+ # Check for execute access and strip directories. This gets rid of all the
+ # phony rules.
+ p = os.path.join(build_dir, i)
+ return os.access(p, os.X_OK) and not os.path.isdir(p)
+
+ return filter(is_exe, map(filter_item, dependencies))
+
+
+def create_wrapper(args, isolate_index, isolated_index):
+ """Creates a wrapper .isolate that add dynamic libs.
+
+ The original .isolate is not modified.
+ """
+ cwd = os.getcwd()
+ isolate = args[isolate_index]
+ # The code assumes the .isolate file is always specified path-less in cwd. Fix
+ # if this assumption doesn't hold true.
+ assert os.path.basename(isolate) == isolate, isolate
+
+ # This will look like ../out/Debug. This is based against cwd. Note that this
+ # must equal the value provided as PRODUCT_DIR.
+ build_dir = os.path.dirname(args[isolated_index])
+
+ # This will look like chrome/unit_tests.isolate. It is based against SRC_DIR.
+ # It's used to calculate temp_isolate.
+ src_isolate = os.path.relpath(os.path.join(cwd, isolate), SRC_DIR)
+
+ # The wrapping .isolate. This will look like
+ # ../out/Debug/gen/chrome/unit_tests.isolate.
+ temp_isolate = os.path.join(build_dir, 'gen', src_isolate)
+ temp_isolate_dir = os.path.dirname(temp_isolate)
+
+ # Relative path between the new and old .isolate file.
+ isolate_relpath = os.path.relpath(
+ '.', temp_isolate_dir).replace(os.path.sep, '/')
+
+ # It's a big assumption here that the name of the isolate file matches the
+ # primary target '_run'. Fix accordingly if this doesn't hold true, e.g.
+ # complain to maruel@.
+ target = isolate[:-len('.isolate')] + '_run'
+ build_steps = load_ninja(build_dir)
+ binary_deps = set()
+ collect_deps(target, build_steps, binary_deps, None)
+ binary_deps = post_process_deps(build_dir, binary_deps)
+ logging.debug(
+ 'Binary dependencies:%s', ''.join('\n ' + i for i in binary_deps))
+
+ # Now do actual wrapping .isolate.
+ isolate_dict = {
+ 'includes': [
+ posixpath.join(isolate_relpath, isolate),
+ ],
+ 'variables': {
+ # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so'].
+ 'files': sorted(
+ '<(PRODUCT_DIR)/%s' % i.replace(os.path.sep, '/')
+ for i in binary_deps),
+ },
+ }
+ if not os.path.isdir(temp_isolate_dir):
+ os.makedirs(temp_isolate_dir)
+ comment = (
+ '# Warning: this file was AUTOGENERATED.\n'
+ '# DO NO EDIT.\n')
+ out = StringIO.StringIO()
+ isolate_format.print_all(comment, isolate_dict, out)
+ isolate_content = out.getvalue()
+ with open(temp_isolate, 'wb') as f:
+ f.write(isolate_content)
+ logging.info('Added %d dynamic libs', len(binary_deps))
+ logging.debug('%s', isolate_content)
+ args[isolate_index] = temp_isolate
+
+
+def prepare_isolate_call(args, output):
+ """Gathers all information required to run isolate.py later.
+
+ Dumps it as JSON to |output| file.
+ """
+ with open(output, 'wb') as f:
+ json.dump({
+ 'args': args,
+ 'dir': os.getcwd(),
+ 'version': 1,
+ }, f, indent=2, sort_keys=True)
+
+
+def main():
+ logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s')
+ args = sys.argv[1:]
+ mode = args[0] if args else None
+ isolate = None
+ isolated = None
+ for i, arg in enumerate(args):
+ if arg == '--isolate':
+ isolate = i + 1
+ if arg == '--isolated':
+ isolated = i + 1
+ if isolate is None or isolated is None or not mode:
+ print >> sys.stderr, 'Internal failure'
+ return 1
+
+ create_wrapper(args, isolate, isolated)
+
+ # In 'prepare' mode just collect all required information for postponed
+ # isolated.py invocation later, store it in *.isolated.gen.json file.
+ if mode == 'prepare':
+ prepare_isolate_call(args[1:], args[isolated] + '.gen.json')
+ return 0
+
+ swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client')
+ sys.stdout.flush()
+ result = subprocess.call(
+ [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args)
+ return result
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/json_schema_compiler/cc_generator.py b/chromium/tools/json_schema_compiler/cc_generator.py
index ac1679bee35..0904e39b366 100644
--- a/chromium/tools/json_schema_compiler/cc_generator.py
+++ b/chromium/tools/json_schema_compiler/cc_generator.py
@@ -126,7 +126,15 @@ class _Generator(object):
(c.Append('%s::%s()' % (classname_in_namespace, classname))
.Cblock(self._GenerateInitializersAndBody(type_))
.Append('%s::~%s() {}' % (classname_in_namespace, classname))
- .Append()
+ )
+ # Note: we use 'rhs' because some API objects have a member 'other'.
+ (c.Append('%s::%s(%s&& rhs)' %
+ (classname_in_namespace, classname, classname))
+ .Cblock(self._GenerateMoveCtor(type_))
+ .Append('%s& %s::operator=(%s&& rhs)' %
+ (classname_in_namespace, classname_in_namespace,
+ classname))
+ .Cblock(self._GenerateMoveAssignOperator(type_))
)
if type_.origin.from_json:
c.Cblock(self._GenerateTypePopulate(classname_in_namespace, type_))
@@ -178,12 +186,92 @@ class _Generator(object):
raise TypeError(t)
if items:
- s = ': %s' % (', '.join(items))
+ s = ': %s' % (',\n'.join(items))
else:
s = ''
s = s + ' {}'
return Code().Append(s)
+ def _GetMoveProps(self, type_, copy_str, move_str):
+ """Returns a tuple of (props, dicts) for the type.
+
+ |props| is a list of all the copyable or movable properties generated using
+ the copy_str and move_str, and |dicts| is a list of all the dictionary
+ properties by name.
+
+ Properties:
+ - |type_| the Type to get the properties from
+ - |copy_str| the string to use when copying a value; should have two
+ placeholders to take the property name.
+ - |move_str| the string to use when moving a value; should have two
+ placeholders to take the property name.
+ """
+ props = []
+ dicts = []
+ for prop in type_.properties.values():
+ t = prop.type_
+
+ real_t = self._type_helper.FollowRef(t)
+ if (real_t.property_type != PropertyType.ENUM and
+ (prop.optional or
+ t.property_type == PropertyType.ANY or
+ t.property_type == PropertyType.ARRAY or
+ t.property_type == PropertyType.BINARY or
+ t.property_type == PropertyType.CHOICES or
+ t.property_type == PropertyType.OBJECT or
+ t.property_type == PropertyType.REF or
+ t.property_type == PropertyType.STRING)):
+ props.append(move_str % (prop.unix_name, prop.unix_name))
+ elif t.property_type == PropertyType.FUNCTION:
+ dicts.append(prop.unix_name)
+ elif (real_t.property_type == PropertyType.ENUM or
+ t.property_type == PropertyType.INTEGER or
+ t.property_type == PropertyType.DOUBLE or
+ t.property_type == PropertyType.BOOLEAN):
+ props.append(copy_str % (prop.unix_name, prop.unix_name))
+ else:
+ raise TypeError(t)
+
+ if type_.property_type == PropertyType.CHOICES:
+ for choice in type_.choices:
+ prop_name = 'as_%s' % choice.unix_name
+ props.append(move_str % (prop_name, prop_name))
+
+ if (type_.property_type == PropertyType.OBJECT and
+ type_.additional_properties is not None):
+ if type_.additional_properties.property_type == PropertyType.ANY:
+ dicts.append('additional_properties')
+ else:
+ props.append(move_str % ('additional_properties',
+ 'additional_properties'))
+
+ return (props, dicts)
+
+ def _GenerateMoveCtor(self, type_):
+ props, dicts = self._GetMoveProps(type_, '%s(rhs.%s)',
+ '%s(std::move(rhs.%s))')
+ s = ''
+ if props:
+ s = s + ': %s' % (',\n'.join(props))
+ s = s + '{'
+ for item in dicts:
+ s = s + ('\n%s.Swap(&rhs.%s);' % (item, item))
+ s = s + '\n}'
+
+ return Code().Append(s)
+
+ def _GenerateMoveAssignOperator(self, type_):
+ props, dicts = self._GetMoveProps(type_, '%s = rhs.%s;',
+ '%s = std::move(rhs.%s);')
+ s = '{\n'
+ if props:
+ s = s + '\n'.join(props)
+ for item in dicts:
+ s = s + ('%s.Swap(&rhs.%s);' % (item, item))
+ s = s + '\nreturn *this;\n}'
+
+ return Code().Append(s)
+
def _GenerateTypePopulate(self, cpp_namespace, type_):
"""Generates the function for populating a type given a pointer to it.
@@ -323,15 +411,15 @@ class _Generator(object):
classname = cpp_util.Classname(schema_util.StripNamespace(type_.name))
c = Code()
(c.Append('// static')
- .Append('scoped_ptr<%s> %s::FromValue(%s) {' % (classname,
+ .Append('std::unique_ptr<%s> %s::FromValue(%s) {' % (classname,
cpp_namespace, self._GenerateParams(('const base::Value& value',))))
)
if self._generate_error_messages:
c.Append('DCHECK(error);')
- (c.Append(' scoped_ptr<%s> out(new %s());' % (classname, classname))
+ (c.Append(' std::unique_ptr<%s> out(new %s());' % (classname, classname))
.Append(' if (!Populate(%s))' % self._GenerateArgs(
('value', 'out.get()')))
- .Append(' return scoped_ptr<%s>();' % classname)
+ .Append(' return nullptr;')
.Append(' return out;')
.Append('}')
)
@@ -353,9 +441,9 @@ class _Generator(object):
into a base::DictionaryValue.
"""
c = Code()
- (c.Sblock('scoped_ptr<base::DictionaryValue> %s::ToValue() const {' %
+ (c.Sblock('std::unique_ptr<base::DictionaryValue> %s::ToValue() const {' %
cpp_namespace)
- .Append('scoped_ptr<base::DictionaryValue> value('
+ .Append('std::unique_ptr<base::DictionaryValue> value('
'new base::DictionaryValue());')
.Append()
)
@@ -394,16 +482,12 @@ class _Generator(object):
if type_.additional_properties.property_type == PropertyType.ANY:
c.Append('value->MergeDictionary(&additional_properties);')
else:
- # Non-copyable types will be wrapped in a linked_ptr for inclusion in
- # maps, so we need to unwrap them.
- needs_unwrap = (
- not self._type_helper.IsCopyable(type_.additional_properties))
(c.Sblock('for (const auto& it : additional_properties) {')
.Cblock(self._CreateValueFromType(
'value->SetWithoutPathExpansion(it.first, %s);',
type_.additional_properties.name,
type_.additional_properties,
- '%sit.second' % ('*' if needs_unwrap else '')))
+ 'it.second'))
.Eblock('}')
)
@@ -416,8 +500,9 @@ class _Generator(object):
into a base::Value.
"""
c = Code()
- c.Sblock('scoped_ptr<base::Value> %s::ToValue() const {' % cpp_namespace)
- c.Append('scoped_ptr<base::Value> result;')
+ c.Sblock('std::unique_ptr<base::Value> %s::ToValue() const {' %
+ cpp_namespace)
+ c.Append('std::unique_ptr<base::Value> result;')
for choice in type_.choices:
choice_var = 'as_%s' % choice.unix_name
# Enums cannot be wrapped with scoped_ptr, but the XXX_NONE enum value
@@ -595,7 +680,7 @@ class _Generator(object):
(c.Concat(self._GenerateError(
'"expected %%(total)d arguments, got " '
'+ base::IntToString(%%(var)s.GetSize())'))
- .Append('return scoped_ptr<Params>();')
+ .Append('return nullptr;')
.Eblock('}')
.Substitute({
'var': var,
@@ -612,13 +697,13 @@ class _Generator(object):
"""
c = Code()
(c.Append('// static')
- .Sblock('scoped_ptr<Params> Params::Create(%s) {' % self._GenerateParams(
- ['const base::ListValue& args']))
+ .Sblock('std::unique_ptr<Params> Params::Create(%s) {' %
+ self._GenerateParams(['const base::ListValue& args']))
)
if self._generate_error_messages:
c.Append('DCHECK(error);')
(c.Concat(self._GenerateParamsCheck(function, 'args'))
- .Append('scoped_ptr<Params> params(new Params());')
+ .Append('std::unique_ptr<Params> params(new Params());')
)
for param in function.params:
@@ -629,7 +714,7 @@ class _Generator(object):
# incorrect or missing, those following it are not processed. Note that
# for optional arguments, we allow missing arguments and proceed because
# there may be other arguments following it.
- failure_value = 'scoped_ptr<Params>()'
+ failure_value = 'std::unique_ptr<Params>()'
c.Append()
value_var = param.unix_name + '_value'
(c.Append('const base::Value* %(value_var)s = NULL;')
@@ -728,7 +813,7 @@ class _Generator(object):
c.Append('return %(failure_value)s;')
(c.Eblock('}')
.Sblock('else {')
- .Append('scoped_ptr<%(cpp_type)s> temp(new %(cpp_type)s());')
+ .Append('std::unique_ptr<%(cpp_type)s> temp(new %(cpp_type)s());')
.Append('if (!%%(cpp_type)s::Populate(%s)) {' % self._GenerateArgs(
('*dictionary', 'temp.get()')))
.Append(' return %(failure_value)s;')
@@ -792,7 +877,7 @@ class _Generator(object):
c.Eblock('}')
elif underlying_type.property_type == PropertyType.CHOICES:
if is_ptr:
- (c.Append('scoped_ptr<%(cpp_type)s> temp(new %(cpp_type)s());')
+ (c.Append('std::unique_ptr<%(cpp_type)s> temp(new %(cpp_type)s());')
.Append('if (!%%(cpp_type)s::Populate(%s))' % self._GenerateArgs(
('*%(src_var)s', 'temp.get()')))
.Append(' return %(failure_value)s;')
@@ -1001,9 +1086,9 @@ class _Generator(object):
params = callback.params
c.Concat(self._GeneratePropertyFunctions(function_scope, params))
- (c.Sblock('scoped_ptr<base::ListValue> %(function_scope)s'
+ (c.Sblock('std::unique_ptr<base::ListValue> %(function_scope)s'
'Create(%(declaration_list)s) {')
- .Append('scoped_ptr<base::ListValue> create_results('
+ .Append('std::unique_ptr<base::ListValue> create_results('
'new base::ListValue());')
)
declaration_list = []
diff --git a/chromium/tools/json_schema_compiler/code.py b/chromium/tools/json_schema_compiler/code.py
index d637026d6d0..ef41cf88407 100644
--- a/chromium/tools/json_schema_compiler/code.py
+++ b/chromium/tools/json_schema_compiler/code.py
@@ -131,7 +131,7 @@ class Code(object):
else:
line = comment[0:max_len]
comment = comment[max_len:]
- return line, comment
+ return line, comment.lstrip()
# First line has the full maximum length.
if not new_line and self._code:
@@ -174,7 +174,7 @@ class Code(object):
return self
def TrimTrailingNewlines(self):
- """Trims any trailing newlines.
+ """Removes any trailing empty Line objects.
"""
while self._code:
if self._code[-1].value != '':
diff --git a/chromium/tools/json_schema_compiler/compiler.py b/chromium/tools/json_schema_compiler/compiler.py
index feb354dedfd..c626f2f96c2 100755
--- a/chromium/tools/json_schema_compiler/compiler.py
+++ b/chromium/tools/json_schema_compiler/compiler.py
@@ -59,6 +59,10 @@ def GenerateSchema(generator_name,
# If compiling the C++ model code, delete 'nocompile' nodes.
if generator_name == 'cpp':
api_def = json_schema.DeleteNodes(api_def, 'nocompile')
+
+ # Delete all 'nodefine' nodes. They are only for documentation.
+ api_def = json_schema.DeleteNodes(api_def, 'nodefine')
+
api_defs.extend(api_def)
api_model = Model(allow_inline_enums=False)
diff --git a/chromium/tools/json_schema_compiler/cpp_type_generator.py b/chromium/tools/json_schema_compiler/cpp_type_generator.py
index 3d307a9f0be..269a99572c1 100644
--- a/chromium/tools/json_schema_compiler/cpp_type_generator.py
+++ b/chromium/tools/json_schema_compiler/cpp_type_generator.py
@@ -120,10 +120,12 @@ class CppTypeGenerator(object):
# never needs to be wrapped in pointer shenanigans.
# TODO(kalman): change this - but it's an exceedingly far-reaching change.
if not self.FollowRef(type_).property_type == PropertyType.ENUM:
- if is_in_container and (is_ptr or not self.IsCopyable(type_)):
- cpp_type = 'linked_ptr<%s>' % cpp_util.PadForGenerics(cpp_type)
- elif is_ptr:
- cpp_type = 'scoped_ptr<%s>' % cpp_util.PadForGenerics(cpp_type)
+ is_base_value = (cpp_type == 'base::Value' or
+ cpp_type == 'base::DictionaryValue')
+ # Wrap ptrs and base::Values in containers (which aren't movable) in
+ # scoped_ptrs.
+ if is_ptr or (is_in_container and is_base_value):
+ cpp_type = 'std::unique_ptr<%s>' % cpp_util.PadForGenerics(cpp_type)
return cpp_type
@@ -230,12 +232,9 @@ class CppTypeGenerator(object):
if type_.property_type == PropertyType.REF:
deps.add(_TypeDependency(self._FindType(type_.ref_type), hard=hard))
elif type_.property_type == PropertyType.ARRAY:
- # Non-copyable types are not hard because they are wrapped in linked_ptrs
- # when generated. Otherwise they're typedefs, so they're hard (though we
- # could generate those typedefs in every dependent namespace, but that
- # seems weird).
- deps = self._TypeDependencies(type_.item_type,
- hard=self.IsCopyable(type_.item_type))
+ # Types in containers are hard dependencies because they are stored
+ # directly and use move semantics.
+ deps = self._TypeDependencies(type_.item_type, hard=hard)
elif type_.property_type == PropertyType.CHOICES:
for type_ in type_.choices:
deps |= self._TypeDependencies(type_, hard=self.IsCopyable(type_))
diff --git a/chromium/tools/json_schema_compiler/cpp_type_generator_test.py b/chromium/tools/json_schema_compiler/cpp_type_generator_test.py
index 51fcfe94e36..db4e8cff1d1 100755
--- a/chromium/tools/json_schema_compiler/cpp_type_generator_test.py
+++ b/chromium/tools/json_schema_compiler/cpp_type_generator_test.py
@@ -6,6 +6,7 @@
from cpp_namespace_environment import CppNamespaceEnvironment
from cpp_type_generator import CppTypeGenerator
from json_schema import CachedLoad
+import idl_schema
import model
import unittest
@@ -49,6 +50,10 @@ class CppTypeGeneratorTest(unittest.TestCase):
self.content_settings_json = CachedLoad('test/content_settings.json')
self.content_settings = self.models['content_settings'].AddNamespace(
self.content_settings_json[0], 'path/to/content_settings.json')
+ self.objects_movable_idl = idl_schema.Load('test/objects_movable.idl')
+ self.objects_movable = self.models['objects_movable'].AddNamespace(
+ self.objects_movable_idl[0], 'path/to/objects_movable.idl',
+ include_compiler_options=True)
def testGenerateIncludesAndForwardDeclarations(self):
m = model.Model()
@@ -143,7 +148,7 @@ class CppTypeGeneratorTest(unittest.TestCase):
manager = CppTypeGenerator(self.models.get('windows'),
_FakeSchemaLoader(None))
self.assertEquals(
- 'std::vector<linked_ptr<Window> >',
+ 'std::vector<Window>',
manager.GetCppType(
self.windows.functions['getAll'].callback.params[0].type_))
manager = CppTypeGenerator(self.models.get('permissions'),
@@ -153,6 +158,14 @@ class CppTypeGeneratorTest(unittest.TestCase):
manager.GetCppType(
self.permissions.types['Permissions'].properties['origins'].type_))
+ manager = CppTypeGenerator(self.models.get('objects_movable'),
+ _FakeSchemaLoader(None))
+ self.assertEquals(
+ 'std::vector<MovablePod>',
+ manager.GetCppType(
+ self.objects_movable.types['MovableParent'].
+ properties['pods'].type_))
+
def testGetCppTypeLocalRef(self):
manager = CppTypeGenerator(self.models.get('tabs'), _FakeSchemaLoader(None))
self.assertEquals(
@@ -169,7 +182,7 @@ class CppTypeGeneratorTest(unittest.TestCase):
environment=CppNamespaceEnvironment('%(namespace)s'))
manager = CppTypeGenerator(m, _FakeSchemaLoader(m))
self.assertEquals(
- 'std::vector<linked_ptr<tabs::Tab> >',
+ 'std::vector<tabs::Tab>',
manager.GetCppType(
self.windows.types['Window'].properties['tabs'].type_))
@@ -180,7 +193,7 @@ class CppTypeGeneratorTest(unittest.TestCase):
manager.GetCppType(
self.permissions.types['Permissions'].properties['origins'].type_,
is_in_container=False))
- self.assertEquals('linked_ptr<std::vector<std::string> >',
+ self.assertEquals('std::vector<std::string>',
manager.GetCppType(
self.permissions.types['Permissions'].properties['origins'].type_,
is_in_container=True))
diff --git a/chromium/tools/json_schema_compiler/h_generator.py b/chromium/tools/json_schema_compiler/h_generator.py
index 173e549e187..c64241cd62d 100644
--- a/chromium/tools/json_schema_compiler/h_generator.py
+++ b/chromium/tools/json_schema_compiler/h_generator.py
@@ -53,12 +53,11 @@ class _Generator(object):
.Append('#include <stdint.h>')
.Append()
.Append('#include <map>')
+ .Append('#include <memory>')
.Append('#include <string>')
.Append('#include <vector>')
.Append()
.Append('#include "base/logging.h"')
- .Append('#include "base/memory/linked_ptr.h"')
- .Append('#include "base/memory/scoped_ptr.h"')
.Append('#include "base/values.h"')
.Cblock(self._type_helper.GenerateIncludes(include_soft=include_soft))
.Append()
@@ -228,6 +227,9 @@ class _Generator(object):
.Append('%(classname)s();')
.Append('~%(classname)s();')
)
+ (c.Append('%(classname)s(%(classname)s&& rhs);')
+ .Append('%(classname)s& operator=(%(classname)s&& rhs);')
+ )
if type_.origin.from_json:
(c.Append()
.Comment('Populates a %s object from a base::Value. Returns'
@@ -239,7 +241,7 @@ class _Generator(object):
(c.Append()
.Comment('Creates a %s object from a base::Value, or NULL on '
'failure.' % classname)
- .Append('static scoped_ptr<%s> FromValue(%s);' % (
+ .Append('static std::unique_ptr<%s> FromValue(%s);' % (
classname, self._GenerateParams(('const base::Value& value',))))
)
if type_.origin.from_client:
@@ -249,7 +251,7 @@ class _Generator(object):
(c.Append()
.Comment('Returns a new %s representing the serialized form of this '
'%s object.' % (value_type, classname))
- .Append('scoped_ptr<%s> ToValue() const;' % value_type)
+ .Append('std::unique_ptr<%s> ToValue() const;' % value_type)
)
if type_.property_type == PropertyType.CHOICES:
# Choices are modelled with optional fields for each choice. Exactly one
@@ -326,8 +328,8 @@ class _Generator(object):
c = Code()
(c.Sblock('struct Params {')
- .Append('static scoped_ptr<Params> Create(%s);' % self._GenerateParams(
- ('const base::ListValue& args',)))
+ .Append('static std::unique_ptr<Params> Create(%s);' %
+ self._GenerateParams(('const base::ListValue& args',)))
.Append('~Params();')
.Append()
.Cblock(self._GenerateTypes(p.type_ for p in function.params))
@@ -366,7 +368,7 @@ class _Generator(object):
c.Comment(param.description)
declaration_list.append(cpp_util.GetParameterDeclaration(
param, self._type_helper.GetCppType(param.type_)))
- c.Append('scoped_ptr<base::ListValue> Create(%s);' %
+ c.Append('std::unique_ptr<base::ListValue> Create(%s);' %
', '.join(declaration_list))
return c
diff --git a/chromium/tools/json_schema_compiler/idl_schema.py b/chromium/tools/json_schema_compiler/idl_schema.py
index 65d7baeb485..58efe28598f 100755
--- a/chromium/tools/json_schema_compiler/idl_schema.py
+++ b/chromium/tools/json_schema_compiler/idl_schema.py
@@ -149,6 +149,8 @@ class Dictionary(object):
result = {'id': self.node.GetName(),
'properties': properties,
'type': 'object'}
+ if self.node.GetProperty('nodefine'):
+ result['nodefine'] = True
if self.node.GetProperty('nodoc'):
result['nodoc'] = True
elif self.node.GetProperty('inline_doc'):
@@ -175,7 +177,8 @@ class Member(object):
properties['deprecated'] = self.node.GetProperty('deprecated')
if self.node.GetProperty('allowAmbiguousOptionalArguments'):
properties['allowAmbiguousOptionalArguments'] = True
- for property_name in ('OPTIONAL', 'nodoc', 'nocompile', 'nodart'):
+ for property_name in ('OPTIONAL', 'nodoc', 'nocompile', 'nodart',
+ 'nodefine'):
if self.node.GetProperty(property_name):
properties[property_name.lower()] = True
for option_name, sanitizer in [
@@ -357,8 +360,8 @@ class Enum(object):
'description': self.description,
'type': 'string',
'enum': enum}
- for property_name in (
- 'inline_doc', 'noinline_doc', 'nodoc', 'cpp_enum_prefix_override',):
+ for property_name in ('cpp_enum_prefix_override', 'inline_doc',
+ 'noinline_doc', 'nodefine', 'nodoc',):
if self.node.GetProperty(property_name):
result[property_name] = self.node.GetProperty(property_name)
if self.node.GetProperty('deprecated'):
diff --git a/chromium/tools/json_schema_compiler/idl_schema_test.py b/chromium/tools/json_schema_compiler/idl_schema_test.py
index 03960c3b029..3e0d3cc482a 100755
--- a/chromium/tools/json_schema_compiler/idl_schema_test.py
+++ b/chromium/tools/json_schema_compiler/idl_schema_test.py
@@ -126,6 +126,12 @@ class IdlSchemaTest(unittest.TestCase):
self.assertTrue(func is not None)
self.assertTrue(func['nocompile'])
+ def testNoDefine(self):
+ schema = self.idl_basics
+ func = getFunction(schema, 'function31')
+ self.assertTrue(func is not None)
+ self.assertTrue(func['nodefine'])
+
def testNoDocOnEnum(self):
schema = self.idl_basics
enum_with_nodoc = getType(schema, 'EnumTypeWithNoDoc')
diff --git a/chromium/tools/json_schema_compiler/js_externs_generator.py b/chromium/tools/json_schema_compiler/js_externs_generator.py
index e1dfcf2869d..065e4d3aaba 100644
--- a/chromium/tools/json_schema_compiler/js_externs_generator.py
+++ b/chromium/tools/json_schema_compiler/js_externs_generator.py
@@ -20,7 +20,7 @@ import re
NOTE = """// NOTE: The format of types has changed. 'FooType' is now
// 'chrome.%s.FooType'.
// Please run the closure compiler before committing changes.
-// See https://code.google.com/p/chromium/wiki/ClosureCompilation.
+// See https://chromium.googlesource.com/chromium/src/+/master/docs/closure_compilation.md
"""
class JsExternsGenerator(object):
@@ -39,16 +39,16 @@ class _Generator(object):
(c.Append(self._GetHeader(sys.argv[0], self._namespace.name))
.Append())
- c.Cblock(self._GenerateNamespaceObject())
+ self._AppendNamespaceObject(c)
for js_type in self._namespace.types.values():
- c.Cblock(self._GenerateType(js_type))
+ self._AppendType(c, js_type)
for function in self._namespace.functions.values():
- c.Cblock(self._GenerateFunction(function))
+ self._AppendFunction(c, function)
for event in self._namespace.events.values():
- c.Cblock(self._GenerateEvent(event))
+ self._AppendEvent(c, event)
c.TrimTrailingNewlines()
@@ -62,26 +62,22 @@ class _Generator(object):
('/** @fileoverview Externs generated from namespace: %s */' %
namespace))
-
- def _GenerateType(self, js_type):
- """Given a Type object, returns the Code for this type's definition.
+ def _AppendType(self, c, js_type):
+ """Given a Type object, generates the Code for this type's definition.
"""
- c = Code()
if js_type.property_type is PropertyType.ENUM:
- c.Concat(self._GenerateEnumJsDoc(js_type))
+ self._AppendEnumJsDoc(c, js_type)
else:
- c.Concat(self._GenerateTypeJsDoc(js_type))
+ self._AppendTypeJsDoc(c, js_type)
+ c.Append()
- return c
-
- def _GenerateEnumJsDoc(self, js_type):
- """ Given an Enum Type object, returns the Code for the enum's definition.
+ def _AppendEnumJsDoc(self, c, js_type):
+ """ Given an Enum Type object, generates the Code for the enum's definition.
"""
- c = Code()
(c.Sblock(line='/**', line_prefix=' * ')
.Append('@enum {string}')
- .Append(self._js_util.GenerateSeeLink(self._namespace.name, 'type',
- js_type.simple_name))
+ .Append(self._js_util.GetSeeLink(self._namespace.name, 'type',
+ js_type.simple_name))
.Eblock(' */'))
c.Append('chrome.%s.%s = {' % (self._namespace.name, js_type.name))
@@ -101,7 +97,6 @@ class _Generator(object):
[" %s: '%s'," % (get_property_name(v.name), v.name)
for v in js_type.enum_values]))
c.Append('};')
- return c
def _IsTypeConstructor(self, js_type):
"""Returns true if the given type should be a @constructor. If this returns
@@ -110,12 +105,9 @@ class _Generator(object):
return any(prop.type_.property_type is PropertyType.FUNCTION
for prop in js_type.properties.values())
- def _GenerateTypeJsDoc(self, js_type):
- """Generates the documentation for a type as a Code.
-
- Returns an empty code object if the object has no documentation.
+ def _AppendTypeJsDoc(self, c, js_type):
+ """Appends the documentation for a type as a Code.
"""
- c = Code()
c.Sblock(line='/**', line_prefix=' * ')
if js_type.description:
@@ -126,10 +118,10 @@ class _Generator(object):
if is_constructor:
c.Comment('@constructor', comment_prefix = ' * ', wrap_indent=4)
else:
- c.Concat(self._GenerateTypedef(js_type.properties))
+ self._AppendTypedef(c, js_type.properties)
- c.Append(self._js_util.GenerateSeeLink(self._namespace.name, 'type',
- js_type.simple_name))
+ c.Append(self._js_util.GetSeeLink(self._namespace.name, 'type',
+ js_type.simple_name))
c.Eblock(' */')
var = 'chrome.%s.%s' % (js_type.namespace.name, js_type.simple_name)
@@ -137,23 +129,18 @@ class _Generator(object):
var += ';'
c.Append(var)
- return c
-
- def _GenerateTypedef(self, properties):
- """Given an OrderedDict of properties, returns a Code containing a @typedef.
+ def _AppendTypedef(self, c, properties):
+ """Given an OrderedDict of properties, Appends code containing a @typedef.
"""
- if not properties: return Code()
+ if not properties: return
- c = Code()
c.Append('@typedef {')
- c.Concat(self._js_util.GenerateObjectDefinition(self._namespace.name,
- properties),
- new_line=False)
+ self._js_util.AppendObjectDefinition(c, self._namespace.name, properties,
+ new_line=False)
c.Append('}', new_line=False)
- return c
- def _GenerateFunction(self, function):
- """Generates the code representing a function, including its documentation.
+ def _AppendFunction(self, c, function):
+ """Appends the code representing a function, including its documentation.
For example:
/**
@@ -161,36 +148,31 @@ class _Generator(object):
*/
chrome.window.setTitle = function(title) {};
"""
- c = Code()
- params = self._GenerateFunctionParams(function)
- (c.Concat(self._js_util.GenerateFunctionJsDoc(self._namespace.name,
- function))
- .Append('chrome.%s.%s = function(%s) {};' % (self._namespace.name,
- function.name,
- params))
- )
- return c
-
- def _GenerateEvent(self, event):
- """Generates the code representing an event.
+ self._js_util.AppendFunctionJsDoc(c, self._namespace.name, function)
+ params = self._GetFunctionParams(function)
+ c.Append('chrome.%s.%s = function(%s) {};' % (self._namespace.name,
+ function.name, params))
+ c.Append()
+
+ def _AppendEvent(self, c, event):
+ """Appends the code representing an event.
For example:
/** @type {!ChromeEvent} */
chrome.bookmarks.onChildrenReordered;
"""
- c = Code()
c.Sblock(line='/**', line_prefix=' * ')
if (event.description):
c.Comment(event.description, comment_prefix='')
c.Append('@type {!ChromeEvent}')
- c.Append(self._js_util.GenerateSeeLink(self._namespace.name, 'event',
- event.name))
+ c.Append(self._js_util.GetSeeLink(self._namespace.name, 'event',
+ event.name))
c.Eblock(' */')
c.Append('chrome.%s.%s;' % (self._namespace.name, event.name))
- return c
+ c.Append()
- def _GenerateNamespaceObject(self):
- """Generates the code creating namespace object.
+ def _AppendNamespaceObject(self, c):
+ """Appends the code creating namespace object.
For example:
/**
@@ -198,14 +180,15 @@ class _Generator(object):
*/
chrome.bookmarks = {};
"""
- c = Code()
- (c.Append("""/**
+ c.Append("""/**
* @const
*/""")
- .Append('chrome.%s = {};' % self._namespace.name))
- return c
+ c.Append('chrome.%s = {};' % self._namespace.name)
+ c.Append()
- def _GenerateFunctionParams(self, function):
+ def _GetFunctionParams(self, function):
+ """Returns the function params string for function.
+ """
params = function.params[:]
if function.callback:
params.append(function.callback)
diff --git a/chromium/tools/json_schema_compiler/js_externs_generator_test.py b/chromium/tools/json_schema_compiler/js_externs_generator_test.py
index c3376598610..917ce6379fb 100755
--- a/chromium/tools/json_schema_compiler/js_externs_generator_test.py
+++ b/chromium/tools/json_schema_compiler/js_externs_generator_test.py
@@ -44,6 +44,7 @@ namespace fakeApi {
long? maybe;
(DOMString or Greek or long[]) choice;
object plainObj;
+ ArrayBuffer arrayBuff;
};
callback VoidCallback = void();
@@ -81,7 +82,7 @@ expected_output = ("""// Copyright %s The Chromium Authors. All rights reserved.
// NOTE: The format of types has changed. 'FooType' is now
// 'chrome.fakeApi.FooType'.
// Please run the closure compiler before committing changes.
-// See https://code.google.com/p/chromium/wiki/ClosureCompilation.
+// See https://chromium.googlesource.com/chromium/src/+/master/docs/closure_compilation.md
/** @fileoverview Externs generated from namespace: fakeApi */
@@ -123,7 +124,8 @@ chrome.fakeApi.Bar;
* obj: !chrome.fakeApi.Bar,
* maybe: (number|undefined),
* choice: (string|!chrome.fakeApi.Greek|!Array<number>),
- * plainObj: Object
+ * plainObj: Object,
+ * arrayBuff: ArrayBuffer
* }}
* @see https://developer.chrome.com/extensions/fakeApi#type-Baz
*/
@@ -202,6 +204,10 @@ fake_json = """// Copyright 2014 The Chromium Authors. All rights reserved.
"type": "integer"
}
}
+ },
+ "quu": {
+ "type": "binary",
+ "description": "The array buffer"
}
}
},
@@ -240,7 +246,7 @@ json_expected = ("""// Copyright %s The Chromium Authors. All rights reserved.
// NOTE: The format of types has changed. 'FooType' is now
// 'chrome.fakeJson.FooType'.
// Please run the closure compiler before committing changes.
-// See https://code.google.com/p/chromium/wiki/ClosureCompilation.
+// See https://chromium.googlesource.com/chromium/src/+/master/docs/closure_compilation.md
/** @fileoverview Externs generated from namespace: fakeJson */
@@ -266,7 +272,8 @@ chrome.fakeJson.CrazyEnum = {
* bar: number,
* baz: {
* depth: number
- * }
+ * },
+ * quu: ArrayBuffer
* }} inlineObj Evil inline object! With a super duper duper long string
* description that causes problems!
* @param {function({
@@ -279,7 +286,7 @@ chrome.fakeJson.CrazyEnum = {
* @see https://developer.chrome.com/extensions/fakeJson#method-funcWithInlineObj
*/
chrome.fakeJson.funcWithInlineObj = function(inlineObj, callback) {};""" %
- (datetime.now().year, sys.argv[0]))
+ (datetime.now().year, sys.argv[0]))
class JsExternGeneratorTest(unittest.TestCase):
diff --git a/chromium/tools/json_schema_compiler/js_interface_generator.py b/chromium/tools/json_schema_compiler/js_interface_generator.py
index d8ea94d48cb..e6c892a8674 100644
--- a/chromium/tools/json_schema_compiler/js_interface_generator.py
+++ b/chromium/tools/json_schema_compiler/js_interface_generator.py
@@ -38,20 +38,23 @@ class _Generator(object):
(c.Append(self._GetHeader(sys.argv[0], self._namespace.name))
.Append())
- c.Cblock(self._GenerateInterfaceObject())
+ self._AppendInterfaceObject(c)
+ c.Append()
c.Sblock('%s.prototype = {' % self._interface)
for function in self._namespace.functions.values():
- c.Cblock(self._GenerateFunction(function))
+ self._AppendFunction(c, function)
+
+ c.TrimTrailingNewlines()
+ c.Eblock('};')
+ c.Append()
for event in self._namespace.events.values():
- c.Cblock(self._GenerateEvent(event))
+ self._AppendEvent(c, event)
c.TrimTrailingNewlines()
- c.Eblock('};')
-
return c
def _GetHeader(self, tool, namespace):
@@ -63,40 +66,36 @@ class _Generator(object):
namespace) + '\n' +
ASSERT);
- def _GenerateInterfaceObject(self):
- """Generates the code creating the interface object.
+ def _AppendInterfaceObject(self, c):
+ """Appends the code creating the interface object.
For example:
/** @interface */
function SettingsPrivate() {}
"""
- c = Code()
(c.Append('/** @interface */')
.Append('function %s() {}' % self._interface))
- return c
- def _GenerateFunction(self, function):
- """Generates the inteface for a function, including a JSDoc comment.
+ def _AppendFunction(self, c, function):
+ """Appends the inteface for a function, including a JSDoc comment.
"""
- c = Code()
if function.deprecated:
- return c
-
- (c.Concat(self._js_util.GenerateFunctionJsDoc(self._namespace.name,
- function))
- .Append('%s: assertNotReached,' % (function.name)))
+ return
- return c
+ self._js_util.AppendFunctionJsDoc(c, self._namespace.name, function)
+ c.Append('%s: assertNotReached,' % (function.name))
+ c.Append()
- def _GenerateEvent(self, event):
- """Generates the interface for an event.
+ def _AppendEvent(self, c, event):
+ """Appends the interface for an event.
"""
- c = Code()
c.Sblock(line='/**', line_prefix=' * ')
if (event.description):
c.Comment(event.description, comment_prefix='')
c.Append('@type {!ChromeEvent}')
- c.Append(self._js_util.GenerateSeeLink(self._namespace.name, 'event',
- event.name))
+ c.Append(self._js_util.GetSeeLink(self._namespace.name, 'event',
+ event.name))
c.Eblock(' */')
- c.Append('%s: new ChromeEvent(),' % (event.name))
- return c
+
+ c.Append('%s.prototype.%s;' % (self._interface, event.name))
+
+ c.Append()
diff --git a/chromium/tools/json_schema_compiler/js_interface_generator_test.py b/chromium/tools/json_schema_compiler/js_interface_generator_test.py
index 2a5e492d906..90837d7a982 100755
--- a/chromium/tools/json_schema_compiler/js_interface_generator_test.py
+++ b/chromium/tools/json_schema_compiler/js_interface_generator_test.py
@@ -19,7 +19,7 @@ fake_idl = """
// A totally fake API.
namespace fakeApi {
- enum Grk {
+ enum Greek {
ALPHA,
BETA,
GAMMA,
@@ -34,21 +34,21 @@ namespace fakeApi {
DOMString str;
long num;
boolean b;
- Grk letter;
- Grk? optionalLetter;
+ Greek letter;
+ Greek? optionalLetter;
long[] arr;
Bar[]? optionalObjArr;
- Grk[] enumArr;
+ Greek[] enumArr;
any[] anythingGoes;
Bar obj;
long? maybe;
- (DOMString or Grk or long[]) choice;
+ (DOMString or Greek or long[]) choice;
object plainObj;
};
callback VoidCallback = void();
- callback BazGrkCallback = void(Baz baz, Grk greek);
+ callback BazGreekCallback = void(Baz baz, Greek greek);
interface Functions {
// Does something exciting! And what's more, this is a multiline function
@@ -59,7 +59,7 @@ namespace fakeApi {
// |callback| : The callback which will most assuredly in all cases be
// called; that is, of course, iff such a callback was provided and is
// not at all null.
- static void bazGrk(optional BazGrkCallback callback);
+ static void bazGreek(optional BazGreekCallback callback);
[deprecated="Use a new method."] static DOMString returnString();
};
@@ -97,20 +97,21 @@ FakeApi.prototype = {
doSomething: assertNotReached,
/**
- * @param {function(!chrome.fakeApi.Baz, !chrome.fakeApi.Grk):void=} callback
- * The callback which will most assuredly in all cases be called; that is,
- * of course, iff such a callback was provided and is not at all null.
- * @see https://developer.chrome.com/extensions/fakeApi#method-bazGrk
+ * @param {function(!chrome.fakeApi.Baz, !chrome.fakeApi.Greek):void=}
+ * callback The callback which will most assuredly in all cases be called;
+ * that is, of course, iff such a callback was provided and is not at all
+ * null.
+ * @see https://developer.chrome.com/extensions/fakeApi#method-bazGreek
*/
- bazGrk: assertNotReached,
+ bazGreek: assertNotReached,
+};
- /**
- * Fired when we realize it's a trap!
- * @type {!ChromeEvent}
- * @see https://developer.chrome.com/extensions/fakeApi#event-onTrapDetected
- */
- onTrapDetected: new ChromeEvent(),
-};""" % (datetime.now().year, sys.argv[0]))
+/**
+ * Fired when we realize it's a trap!
+ * @type {!ChromeEvent}
+ * @see https://developer.chrome.com/extensions/fakeApi#event-onTrapDetected
+ */
+FakeApi.prototype.onTrapDetected;""" % (datetime.now().year, sys.argv[0]))
class JsExternGeneratorTest(unittest.TestCase):
def _GetNamespace(self, fake_content, filename):
diff --git a/chromium/tools/json_schema_compiler/js_util.py b/chromium/tools/json_schema_compiler/js_util.py
index f304b98b153..2549aefa2fa 100644
--- a/chromium/tools/json_schema_compiler/js_util.py
+++ b/chromium/tools/json_schema_compiler/js_util.py
@@ -30,14 +30,14 @@ class JsUtil(object):
"""
return (INFO % tool)
- def GenerateObjectDefinition(self, namespace_name, properties):
+ def AppendObjectDefinition(self, c, namespace_name, properties,
+ new_line=True):
"""Given an OrderedDict of properties, returns a Code containing the
description of an object.
"""
- if not properties: return Code()
+ if not properties: return
- c = Code()
- c.Sblock('{')
+ c.Sblock('{', new_line=new_line)
first = True
for field, prop in properties.items():
# Avoid trailing comma.
@@ -48,23 +48,19 @@ class JsUtil(object):
first = False
js_type = self._TypeToJsType(namespace_name, prop.type_)
if prop.optional:
- js_type = (Code().
- Append('(').
- Concat(js_type, new_line=False).
- Append('|undefined)', new_line=False))
+ js_type = (Code().Append('(')
+ .Concat(js_type, new_line=False)
+ .Append('|undefined)', new_line=False))
c.Append('%s: ' % field, strip_right=False)
c.Concat(js_type, new_line=False)
c.Eblock('}')
- return c
-
- def GenerateFunctionJsDoc(self, namespace_name, function):
- """Generates the documentation for a function as a Code.
+ def AppendFunctionJsDoc(self, c, namespace_name, function):
+ """Appends the documentation for a function as a Code.
Returns an empty code object if the object has no documentation.
"""
- c = Code()
c.Sblock(line='/**', line_prefix=' * ')
if function.description:
@@ -75,13 +71,15 @@ class JsUtil(object):
c.Concat(js_type, new_line=False)
if optional:
c.Append('=', new_line=False)
- c.Append('} %s' % name, new_line=False)
+ c.Append('}', new_line=False)
+ c.Comment(' %s' % name, comment_prefix='', wrap_indent=4, new_line=False)
if description:
c.Comment(' %s' % description, comment_prefix='',
wrap_indent=4, new_line=False)
for param in function.params:
- append_field(c, 'param', self._TypeToJsType(namespace_name, param.type_),
+ append_field(c, 'param',
+ self._TypeToJsType(namespace_name, param.type_),
param.name, param.optional, param.description)
if function.callback:
@@ -99,10 +97,9 @@ class JsUtil(object):
if function.deprecated:
c.Append('@deprecated %s' % function.deprecated)
- c.Append(self.GenerateSeeLink(namespace_name, 'method', function.name))
+ c.Append(self.GetSeeLink(namespace_name, 'method', function.name))
c.Eblock(' */')
- return c
def _FunctionToJsFunction(self, namespace_name, function):
"""Converts a model.Function to a JS type (i.e., function([params])...)"""
@@ -128,8 +125,9 @@ class JsUtil(object):
return Code().Append('number')
if js_type.property_type is PropertyType.OBJECT:
if js_type.properties:
- return self.GenerateObjectDefinition(namespace_name,
- js_type.properties)
+ c = Code()
+ self.AppendObjectDefinition(c, namespace_name, js_type.properties)
+ return c
return Code().Append('Object')
if js_type.property_type is PropertyType.ARRAY:
return (Code().Append('!Array<').
@@ -150,14 +148,16 @@ class JsUtil(object):
return c
if js_type.property_type is PropertyType.FUNCTION:
return self._FunctionToJsFunction(namespace_name, js_type.function)
+ if js_type.property_type is PropertyType.BINARY:
+ return Code().Append('ArrayBuffer')
if js_type.property_type is PropertyType.ANY:
return Code().Append('*')
if js_type.property_type.is_fundamental:
return Code().Append(js_type.property_type.name)
return Code().Append('?') # TODO(tbreisacher): Make this more specific.
- def GenerateSeeLink(self, namespace_name, object_type, object_name):
- """Generates a @see link for a given API 'object' (type, method, or event).
+ def GetSeeLink(self, namespace_name, object_type, object_name):
+ """Returns a @see link for a given API 'object' (type, method, or event).
"""
# NOTE(devlin): This is kind of a hack. Some APIs will be hosted on
diff --git a/chromium/tools/json_schema_compiler/json_schema.py b/chromium/tools/json_schema_compiler/json_schema.py
index bb4e9c4bc5d..e00c11d7b35 100644
--- a/chromium/tools/json_schema_compiler/json_schema.py
+++ b/chromium/tools/json_schema_compiler/json_schema.py
@@ -37,9 +37,13 @@ def DeleteNodes(item, delete_key=None, matcher=None):
def Load(filename):
- with open(filename, 'r') as handle:
- schemas = json_parse.Parse(handle.read())
- return schemas
+ try:
+ with open(filename, 'r') as handle:
+ schemas = json_parse.Parse(handle.read())
+ return schemas
+ except:
+ print('FAILED: Exception encountered while loading "%s"' % filename)
+ raise
# A dictionary mapping |filename| to the object resulting from loading the JSON
diff --git a/chromium/tools/json_schema_compiler/model.py b/chromium/tools/json_schema_compiler/model.py
index 642e818492b..e0147b5b488 100644
--- a/chromium/tools/json_schema_compiler/model.py
+++ b/chromium/tools/json_schema_compiler/model.py
@@ -303,6 +303,7 @@ class Function(object):
self.optional = json.get('optional', False)
self.parent = parent
self.nocompile = json.get('nocompile')
+ self.nodefine = json.get('nodefine')
options = json.get('options', {})
self.conditions = options.get('conditions', [])
self.actions = options.get('actions', [])
diff --git a/chromium/tools/json_schema_compiler/test/json_schema_compiler_tests.gyp b/chromium/tools/json_schema_compiler/test/json_schema_compiler_tests.gyp
index 404e9b09b56..b9ed611af6f 100644
--- a/chromium/tools/json_schema_compiler/test/json_schema_compiler_tests.gyp
+++ b/chromium/tools/json_schema_compiler/test/json_schema_compiler_tests.gyp
@@ -25,6 +25,8 @@
'idl_other_namespace_sub_namespace.idl',
'idl_object_types.idl',
'objects.json',
+ 'objects_movable.idl',
+ 'objects_movable_json.json',
'simple_api.json',
'error_generation.json'
],
diff --git a/chromium/tools/json_schema_compiler/util.cc b/chromium/tools/json_schema_compiler/util.cc
index e870c78ba4c..dcb44ecb132 100644
--- a/chromium/tools/json_schema_compiler/util.cc
+++ b/chromium/tools/json_schema_compiler/util.cc
@@ -100,29 +100,29 @@ bool PopulateItem(const base::Value& from,
return true;
}
-bool PopulateItem(const base::Value& from, linked_ptr<base::Value>* out) {
- *out = make_linked_ptr(from.DeepCopy());
+bool PopulateItem(const base::Value& from, std::unique_ptr<base::Value>* out) {
+ *out = from.CreateDeepCopy();
return true;
}
bool PopulateItem(const base::Value& from,
- linked_ptr<base::Value>* out,
+ std::unique_ptr<base::Value>* out,
base::string16* error) {
- *out = make_linked_ptr(from.DeepCopy());
+ *out = from.CreateDeepCopy();
return true;
}
bool PopulateItem(const base::Value& from,
- linked_ptr<base::DictionaryValue>* out) {
+ std::unique_ptr<base::DictionaryValue>* out) {
const base::DictionaryValue* dict = nullptr;
if (!from.GetAsDictionary(&dict))
return false;
- *out = make_linked_ptr(dict->DeepCopy());
+ *out = dict->CreateDeepCopy();
return true;
}
bool PopulateItem(const base::Value& from,
- linked_ptr<base::DictionaryValue>* out,
+ std::unique_ptr<base::DictionaryValue>* out,
base::string16* error) {
const base::DictionaryValue* dict = nullptr;
if (!from.GetAsDictionary(&dict)) {
@@ -133,7 +133,7 @@ bool PopulateItem(const base::Value& from,
ValueTypeToString(from.GetType())));
return false;
}
- *out = make_linked_ptr(dict->DeepCopy());
+ *out = dict->CreateDeepCopy();
return true;
}
@@ -158,11 +158,12 @@ void AddItemToList(const std::vector<char>& from, base::ListValue* out) {
base::BinaryValue::CreateWithCopiedBuffer(from.data(), from.size()));
}
-void AddItemToList(const linked_ptr<base::Value>& from, base::ListValue* out) {
+void AddItemToList(const std::unique_ptr<base::Value>& from,
+ base::ListValue* out) {
out->Append(from->DeepCopy());
}
-void AddItemToList(const linked_ptr<base::DictionaryValue>& from,
+void AddItemToList(const std::unique_ptr<base::DictionaryValue>& from,
base::ListValue* out) {
out->Append(static_cast<base::Value*>(from->DeepCopy()));
}
diff --git a/chromium/tools/json_schema_compiler/util.h b/chromium/tools/json_schema_compiler/util.h
index 59d65d01031..220ba80c77d 100644
--- a/chromium/tools/json_schema_compiler/util.h
+++ b/chromium/tools/json_schema_compiler/util.h
@@ -2,14 +2,14 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
-#ifndef TOOLS_JSON_SCHEMA_COMPILER_UTIL_H__
-#define TOOLS_JSON_SCHEMA_COMPILER_UTIL_H__
+#ifndef TOOLS_JSON_SCHEMA_COMPILER_UTIL_H_
+#define TOOLS_JSON_SCHEMA_COMPILER_UTIL_H_
+#include <memory>
#include <string>
+#include <utility>
#include <vector>
-#include "base/memory/linked_ptr.h"
-#include "base/memory/scoped_ptr.h"
#include "base/values.h"
namespace json_schema_compiler {
@@ -18,7 +18,7 @@ namespace util {
// Populates the item |out| from the value |from|. These are used by template
// specializations of |Get(Optional)ArrayFromList|.
-bool PopulateItem(const base::Value& from, linked_ptr<base::Value>* out);
+bool PopulateItem(const base::Value& from, std::unique_ptr<base::Value>* out);
bool PopulateItem(const base::Value& from, int* out);
bool PopulateItem(const base::Value& from, int* out, base::string16* error);
@@ -35,25 +35,33 @@ bool PopulateItem(const base::Value& from,
std::vector<char>* out,
base::string16* error);
bool PopulateItem(const base::Value& from,
- linked_ptr<base::Value>* out,
- base::string16* error);
-bool PopulateItem(const base::Value& from, linked_ptr<base::Value>* out);
-bool PopulateItem(const base::Value& from,
- linked_ptr<base::DictionaryValue>* out);
-bool PopulateItem(const base::Value& from,
- linked_ptr<base::DictionaryValue>* out,
+ std::unique_ptr<base::Value>* out,
base::string16* error);
+bool PopulateItem(const base::Value& from, std::unique_ptr<base::Value>* out);
// This template is used for types generated by tools/json_schema_compiler.
template <class T>
-bool PopulateItem(const base::Value& from, linked_ptr<T>* out) {
+bool PopulateItem(const base::Value& from, std::unique_ptr<T>* out) {
const base::DictionaryValue* dict = nullptr;
if (!from.GetAsDictionary(&dict))
return false;
- scoped_ptr<T> obj(new T());
+ std::unique_ptr<T> obj(new T());
if (!T::Populate(*dict, obj.get()))
return false;
- *out = linked_ptr<T>(obj.release());
+ *out = std::move(obj);
+ return true;
+}
+
+// This template is used for types generated by tools/json_schema_compiler.
+template <class T>
+bool PopulateItem(const base::Value& from, T* out) {
+ const base::DictionaryValue* dict = nullptr;
+ if (!from.GetAsDictionary(&dict))
+ return false;
+ T obj;
+ if (!T::Populate(*dict, &obj))
+ return false;
+ *out = std::move(obj);
return true;
}
@@ -61,15 +69,29 @@ bool PopulateItem(const base::Value& from, linked_ptr<T>* out) {
// error generation enabled.
template <class T>
bool PopulateItem(const base::Value& from,
- linked_ptr<T>* out,
+ std::unique_ptr<T>* out,
base::string16* error) {
const base::DictionaryValue* dict = nullptr;
if (!from.GetAsDictionary(&dict))
return false;
- scoped_ptr<T> obj(new T());
+ std::unique_ptr<T> obj(new T());
if (!T::Populate(*dict, obj.get(), error))
return false;
- *out = linked_ptr<T>(obj.release());
+ *out = std::move(obj);
+ return true;
+}
+
+// This template is used for types generated by tools/json_schema_compiler with
+// error generation enabled.
+template <class T>
+bool PopulateItem(const base::Value& from, T* out, base::string16* error) {
+ const base::DictionaryValue* dict = nullptr;
+ if (!from.GetAsDictionary(&dict))
+ return false;
+ T obj;
+ if (!T::Populate(*dict, &obj, error))
+ return false;
+ *out = std::move(obj);
return true;
}
@@ -82,7 +104,9 @@ bool PopulateArrayFromList(const base::ListValue& list, std::vector<T>* out) {
for (const base::Value* value : list) {
if (!PopulateItem(*value, &item))
return false;
- out->push_back(item);
+ // T might not be movable, but in that case it should be copyable, and this
+ // will still work.
+ out->push_back(std::move(item));
}
return true;
@@ -99,7 +123,7 @@ bool PopulateArrayFromList(const base::ListValue& list,
for (const base::Value* value : list) {
if (!PopulateItem(*value, &item, error))
return false;
- out->push_back(item);
+ out->push_back(std::move(item));
}
return true;
@@ -110,7 +134,7 @@ bool PopulateArrayFromList(const base::ListValue& list,
// if anything other than a list of |T| is at the specified key.
template <class T>
bool PopulateOptionalArrayFromList(const base::ListValue& list,
- scoped_ptr<std::vector<T>>* out) {
+ std::unique_ptr<std::vector<T>>* out) {
out->reset(new std::vector<T>());
if (!PopulateArrayFromList(list, out->get())) {
out->reset();
@@ -121,7 +145,7 @@ bool PopulateOptionalArrayFromList(const base::ListValue& list,
template <class T>
bool PopulateOptionalArrayFromList(const base::ListValue& list,
- scoped_ptr<std::vector<T>>* out,
+ std::unique_ptr<std::vector<T>>* out,
base::string16* error) {
out->reset(new std::vector<T>());
if (!PopulateArrayFromList(list, out->get(), error)) {
@@ -138,14 +162,21 @@ void AddItemToList(const bool from, base::ListValue* out);
void AddItemToList(const double from, base::ListValue* out);
void AddItemToList(const std::string& from, base::ListValue* out);
void AddItemToList(const std::vector<char>& from, base::ListValue* out);
-void AddItemToList(const linked_ptr<base::Value>& from, base::ListValue* out);
-void AddItemToList(const linked_ptr<base::DictionaryValue>& from,
+void AddItemToList(const std::unique_ptr<base::Value>& from,
+ base::ListValue* out);
+void AddItemToList(const std::unique_ptr<base::DictionaryValue>& from,
base::ListValue* out);
// This template is used for types generated by tools/json_schema_compiler.
template <class T>
-void AddItemToList(const linked_ptr<T>& from, base::ListValue* out) {
- out->Append(from->ToValue().release());
+void AddItemToList(const std::unique_ptr<T>& from, base::ListValue* out) {
+ out->Append(from->ToValue());
+}
+
+// This template is used for types generated by tools/json_schema_compiler.
+template <class T>
+void AddItemToList(const T& from, base::ListValue* out) {
+ out->Append(from.ToValue());
}
// Set |out| to the the contents of |from|. Requires PopulateItem to be
@@ -153,32 +184,32 @@ void AddItemToList(const linked_ptr<T>& from, base::ListValue* out) {
template <class T>
void PopulateListFromArray(const std::vector<T>& from, base::ListValue* out) {
out->Clear();
- for (const auto& item : from)
+ for (const T& item : from)
AddItemToList(item, out);
}
// Set |out| to the the contents of |from| if |from| is not null. Requires
// PopulateItem to be implemented for |T|.
template <class T>
-void PopulateListFromOptionalArray(const scoped_ptr<std::vector<T>>& from,
+void PopulateListFromOptionalArray(const std::unique_ptr<std::vector<T>>& from,
base::ListValue* out) {
- if (from.get())
+ if (from)
PopulateListFromArray(*from, out);
}
template <class T>
-scoped_ptr<base::Value> CreateValueFromArray(const std::vector<T>& from) {
- base::ListValue* list = new base::ListValue();
- PopulateListFromArray(from, list);
- return scoped_ptr<base::Value>(list);
+std::unique_ptr<base::Value> CreateValueFromArray(const std::vector<T>& from) {
+ std::unique_ptr<base::ListValue> list(new base::ListValue());
+ PopulateListFromArray(from, list.get());
+ return std::move(list);
}
template <class T>
-scoped_ptr<base::Value> CreateValueFromOptionalArray(
- const scoped_ptr<std::vector<T>>& from) {
- if (from.get())
+std::unique_ptr<base::Value> CreateValueFromOptionalArray(
+ const std::unique_ptr<std::vector<T>>& from) {
+ if (from)
return CreateValueFromArray(*from);
- return scoped_ptr<base::Value>();
+ return nullptr;
}
std::string ValueTypeToString(base::Value::Type type);
@@ -186,4 +217,4 @@ std::string ValueTypeToString(base::Value::Type type);
} // namespace util
} // namespace json_schema_compiler
-#endif // TOOLS_JSON_SCHEMA_COMPILER_UTIL_H__
+#endif // TOOLS_JSON_SCHEMA_COMPILER_UTIL_H_
diff --git a/chromium/tools/json_schema_compiler/util_cc_helper.py b/chromium/tools/json_schema_compiler/util_cc_helper.py
index d69122dfc38..e81c4e35943 100644
--- a/chromium/tools/json_schema_compiler/util_cc_helper.py
+++ b/chromium/tools/json_schema_compiler/util_cc_helper.py
@@ -22,7 +22,7 @@ class UtilCCHelper(object):
def CreateValueFromArray(self, src, optional):
"""Generates code to create a scoped_pt<Value> from the array at src.
- |src| The variable to convert, either a vector or scoped_ptr<vector>.
+ |src| The variable to convert, either a vector or std::unique_ptr<vector>.
|optional| Whether |type_| was optional. Optional types are pointers so
must be treated differently.
"""
diff --git a/chromium/tools/json_to_struct/PRESUBMIT.py b/chromium/tools/json_to_struct/PRESUBMIT.py
new file mode 100644
index 00000000000..32809021954
--- /dev/null
+++ b/chromium/tools/json_to_struct/PRESUBMIT.py
@@ -0,0 +1,20 @@
+# Copyright 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit script for changes affecting tools/json_to_struct/
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details about the presubmit API built into depot_tools.
+"""
+
+WHITELIST = [ r'.+_test.py$' ]
+
+def CheckChangeOnUpload(input_api, output_api):
+ return input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api, '.', whitelist=WHITELIST)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api, '.', whitelist=WHITELIST)
diff --git a/chromium/tools/json_to_struct/element_generator.py b/chromium/tools/json_to_struct/element_generator.py
new file mode 100644
index 00000000000..e34753294e3
--- /dev/null
+++ b/chromium/tools/json_to_struct/element_generator.py
@@ -0,0 +1,158 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import struct_generator
+
+def _JSONToCString16(json_string_literal):
+ """Converts a JSON string literal to a C++ UTF-16 string literal. This is
+ done by converting \\u#### to \\x####.
+ """
+ c_string_literal = json_string_literal
+ escape_index = c_string_literal.find('\\')
+ while escape_index > 0:
+ if c_string_literal[escape_index + 1] == 'u':
+ # We close the C string literal after the 4 hex digits and reopen it right
+ # after, otherwise the Windows compiler will sometimes try to get more
+ # than 4 characters in the hex string.
+ c_string_literal = (c_string_literal[0:escape_index + 1] + 'x' +
+ c_string_literal[escape_index + 2:escape_index + 6] + '" L"' +
+ c_string_literal[escape_index + 6:])
+ escape_index = c_string_literal.find('\\', escape_index + 6)
+ return c_string_literal
+
+def _GenerateString(content, lines, indent=' '):
+ """Generates an UTF-8 string to be included in a static structure initializer.
+ If content is not specified, uses NULL.
+ """
+ if content is None:
+ lines.append(indent + 'NULL,')
+ else:
+ # json.dumps quotes the string and escape characters as required.
+ lines.append(indent + '%s,' % json.dumps(content))
+
+def _GenerateString16(content, lines, indent=' '):
+ """Generates an UTF-16 string to be included in a static structure
+ initializer. If content is not specified, uses NULL.
+ """
+ if content is None:
+ lines.append(indent + 'NULL,')
+ else:
+ # json.dumps quotes the string and escape characters as required.
+ lines.append(indent + 'L%s,' % _JSONToCString16(json.dumps(content)))
+
+def _GenerateArrayVariableName(element_name, field_name, field_name_count):
+ # Generates a unique variable name for an array variable.
+ var = 'array_%s_%s' % (element_name, field_name)
+ if var not in field_name_count:
+ field_name_count[var] = 0
+ return var
+ new_var = '%s_%d' % (var, field_name_count[var])
+ field_name_count[var] += 1
+ return new_var
+
+def _GenerateArray(element_name, field_info, content, lines, indent,
+ field_name_count):
+ """Generates an array to be included in a static structure initializer. If
+ content is not specified, uses NULL. The array is assigned to a temporary
+ variable which is initialized before the structure.
+ """
+ if content is None:
+ lines.append(indent + 'NULL,')
+ lines.append(indent + '0,') # Size of the array.
+ return
+
+ # Create a new array variable and use it in the structure initializer.
+ # This prohibits nested arrays. Add a clash detection and renaming mechanism
+ # to solve the problem.
+ var = _GenerateArrayVariableName(element_name, field_info['field'],
+ field_name_count)
+ lines.append(indent + '%s,' % var)
+ lines.append(indent + '%s,' % len(content)) # Size of the array.
+ # Generate the array content.
+ array_lines = []
+ field_info['contents']['field'] = var;
+ array_lines.append(struct_generator.GenerateField(
+ field_info['contents']) + '[] = {')
+ for subcontent in content:
+ GenerateFieldContent(element_name, field_info['contents'], subcontent,
+ array_lines, indent, field_name_count)
+ array_lines.append('};')
+ # Prepend the generated array so it is initialized before the structure.
+ lines.reverse()
+ array_lines.reverse()
+ lines.extend(array_lines)
+ lines.reverse()
+
+def _GenerateStruct(element_name, field_info, content, lines, indent,
+ field_name_count):
+ """Generates a struct to be included in a static structure initializer. If
+ content is not specified, uses {0}.
+ """
+ if content is None:
+ lines.append(indent + '{0},')
+ return
+
+ fields = field_info['fields']
+ lines.append(indent + '{')
+ for field in fields:
+ subcontent = content.get(field['field'])
+ GenerateFieldContent(element_name, field, subcontent, lines, ' ' + indent,
+ field_name_count)
+ lines.append(indent + '},')
+
+def GenerateFieldContent(element_name, field_info, content, lines, indent,
+ field_name_count):
+ """Generate the content of a field to be included in the static structure
+ initializer. If the field's content is not specified, uses the default value
+ if one exists.
+ """
+ if content is None:
+ content = field_info.get('default', None)
+ type = field_info['type']
+ if type == 'int' or type == 'enum':
+ lines.append('%s%s,' % (indent, content))
+ elif type == 'string':
+ _GenerateString(content, lines, indent)
+ elif type == 'string16':
+ _GenerateString16(content, lines, indent)
+ elif type == 'array':
+ _GenerateArray(element_name, field_info, content, lines, indent,
+ field_name_count)
+ elif type == 'struct':
+ _GenerateStruct(element_name, field_info, content, lines, indent,
+ field_name_count)
+ else:
+ raise RuntimeError('Unknown field type "%s"' % type)
+
+def GenerateElement(type_name, schema, element_name, element, field_name_count):
+ """Generate the static structure initializer for one element.
+ """
+ lines = [];
+ lines.append('const %s %s = {' % (type_name, element_name));
+ for field_info in schema:
+ content = element.get(field_info['field'], None)
+ if (content == None and not field_info.get('optional', False)):
+ raise RuntimeError('Mandatory field "%s" omitted in element "%s".' %
+ (field_info['field'], element_name))
+ GenerateFieldContent(element_name, field_info, content, lines, ' ',
+ field_name_count)
+ lines.append('};')
+ return '\n'.join(lines)
+
+def GenerateElements(type_name, schema, description, field_name_count={}):
+ """Generate the static structure initializer for all the elements in the
+ description['elements'] dictionary, as well as for any variables in
+ description['int_variables'].
+ """
+ result = [];
+ for var_name, value in description.get('int_variables', {}).items():
+ result.append('const int %s = %s;' % (var_name, value))
+ result.append('')
+
+ for element_name, element in description.get('elements', {}).items():
+ result.append(GenerateElement(type_name, schema, element_name, element,
+ field_name_count))
+ result.append('')
+ return '\n'.join(result)
diff --git a/chromium/tools/json_to_struct/element_generator_test.py b/chromium/tools/json_to_struct/element_generator_test.py
new file mode 100755
index 00000000000..373338ebed1
--- /dev/null
+++ b/chromium/tools/json_to_struct/element_generator_test.py
@@ -0,0 +1,238 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from element_generator import GenerateFieldContent
+from element_generator import GenerateElements
+import unittest
+
+class ElementGeneratorTest(unittest.TestCase):
+ def testGenerateIntFieldContent(self):
+ lines = [];
+ GenerateFieldContent('', {'type': 'int', 'default': 5}, None, lines, ' ',
+ {})
+ self.assertEquals([' 5,'], lines)
+ lines = [];
+ GenerateFieldContent('', {'type': 'int', 'default': 5}, 12, lines, ' ', {})
+ self.assertEquals([' 12,'], lines)
+ lines = [];
+ GenerateFieldContent('', {'type': 'int'}, -3, lines, ' ', {})
+ self.assertEquals([' -3,'], lines)
+
+ def testGenerateStringFieldContent(self):
+ lines = [];
+ GenerateFieldContent('', {'type': 'string', 'default': 'foo_bar'}, None,
+ lines, ' ', {})
+ self.assertEquals([' "foo_bar",'], lines)
+ lines = [];
+ GenerateFieldContent('', {'type': 'string', 'default': 'foo'}, 'bar\n',
+ lines, ' ', {})
+ self.assertEquals([' "bar\\n",'], lines)
+ lines = [];
+ GenerateFieldContent('', {'type': 'string'}, None, lines, ' ', {})
+ self.assertEquals([' NULL,'], lines)
+ lines = [];
+ GenerateFieldContent('', {'type': 'string'}, 'foo', lines, ' ', {})
+ self.assertEquals([' "foo",'], lines)
+
+ def testGenerateString16FieldContent(self):
+ lines = [];
+ GenerateFieldContent('', {'type': 'string16',
+ 'default': u'f\u00d8\u00d81a'},
+ None, lines, ' ', {})
+ self.assertEquals([' L"f\\x00d8" L"\\x00d8" L"1a",'], lines)
+ lines = [];
+ GenerateFieldContent('', {'type': 'string16', 'default': 'foo'},
+ u'b\uc3a5r', lines, ' ', {})
+ self.assertEquals([' L"b\\xc3a5" L"r",'], lines)
+ lines = [];
+ GenerateFieldContent('', {'type': 'string16'}, None, lines, ' ', {})
+ self.assertEquals([' NULL,'], lines)
+ lines = [];
+ GenerateFieldContent('', {'type': 'string16'}, u'foo\\u1234', lines, ' ',
+ {})
+ self.assertEquals([' L"foo\\\\u1234",'], lines)
+
+ def testGenerateEnumFieldContent(self):
+ lines = [];
+ GenerateFieldContent('', {'type': 'enum', 'default': 'RED'}, None, lines,
+ ' ', {})
+ self.assertEquals([' RED,'], lines)
+ lines = [];
+ GenerateFieldContent('', {'type': 'enum', 'default': 'RED'}, 'BLACK', lines,
+ ' ', {})
+ self.assertEquals([' BLACK,'], lines)
+ lines = [];
+ GenerateFieldContent('', {'type': 'enum'}, 'BLUE', lines, ' ', {})
+ self.assertEquals([' BLUE,'], lines)
+
+ def testGenerateArrayFieldContent(self):
+ lines = ['STRUCT BEGINS'];
+ GenerateFieldContent('test', {'type': 'array', 'contents': {'type': 'int'}},
+ None, lines, ' ', {})
+ self.assertEquals(['STRUCT BEGINS', ' NULL,', ' 0,'], lines)
+ lines = ['STRUCT BEGINS'];
+ GenerateFieldContent('test', {'field': 'my_array', 'type': 'array',
+ 'contents': {'type': 'int'}},
+ [3, 4], lines, ' ', {})
+ self.assertEquals('const int array_test_my_array[] = {\n' +
+ ' 3,\n' +
+ ' 4,\n' +
+ '};\n' +
+ 'STRUCT BEGINS\n' +
+ ' array_test_my_array,\n' +
+ ' 2,', '\n'.join(lines))
+ lines = ['STRUCT BEGINS'];
+ GenerateFieldContent('test', {'field': 'my_array', 'type': 'array',
+ 'contents': {'type': 'int'}},
+ [3, 4], lines, ' ', {'array_test_my_array': 1})
+ self.assertEquals('const int array_test_my_array_1[] = {\n' +
+ ' 3,\n' +
+ ' 4,\n' +
+ '};\n' +
+ 'STRUCT BEGINS\n' +
+ ' array_test_my_array_1,\n' +
+ ' 2,', '\n'.join(lines))
+
+ def testGenerateElements(self):
+ schema = [
+ {'field': 'f0', 'type': 'int', 'default': 1000, 'optional': True},
+ {'field': 'f1', 'type': 'string'},
+ {'field': 'f2', 'type': 'enum', 'ctype': 'QuasiBool', 'default': 'MAYBE',
+ 'optional': True},
+ {'field': 'f3', 'type': 'array', 'contents': {'type': 'string16'},
+ 'optional': True},
+ {
+ 'field': 'f4',
+ 'type': 'struct',
+ 'type_name': 'InnerType',
+ 'fields': [
+ {'field': 'g0', 'type': 'string'}
+ ],
+ 'optional': True
+ },
+ {
+ 'field': 'f5',
+ 'type': 'array',
+ 'contents': {
+ 'type': 'struct',
+ 'type_name': 'InnerType',
+ 'fields': [
+ {'field': 'a0', 'type': 'string'},
+ {'field': 'a1', 'type': 'string'}
+ ]
+ },
+ 'optional': True
+ }
+ ]
+ description = {
+ 'int_variables': {'a': -5, 'b': 5},
+ 'elements': {
+ 'elem0': {'f0': 5, 'f1': 'foo', 'f2': 'SURE'},
+ 'elem1': {'f2': 'NOWAY', 'f0': -2, 'f1': 'bar'},
+ 'elem2': {'f1': 'foo_bar', 'f3': [u'bar', u'foo']},
+ 'elem3': {'f1': 'foo', 'f4': {'g0': 'test'}},
+ 'elem4': {'f1': 'foo', 'f5': [{'a0': 'test0', 'a1': 'test1'}]},
+ }
+ }
+
+ # Build the expected result stream based on the unpredicatble order the
+ # dictionary element are listed in.
+ int_variable_expected = {
+ 'a': 'const int a = -5;\n',
+ 'b': 'const int b = 5;\n',
+ }
+ elements_expected = {
+ 'elem0': 'const MyType elem0 = {\n'
+ ' 5,\n'
+ ' "foo",\n'
+ ' SURE,\n'
+ ' NULL,\n'
+ ' 0,\n'
+ ' {0},\n'
+ ' NULL,\n'
+ ' 0,\n'
+ '};\n',
+ 'elem1': 'const MyType elem1 = {\n'
+ ' -2,\n'
+ ' "bar",\n'
+ ' NOWAY,\n'
+ ' NULL,\n'
+ ' 0,\n'
+ ' {0},\n'
+ ' NULL,\n'
+ ' 0,\n'
+ '};\n',
+ 'elem2': 'const wchar_t* const array_elem2_f3[] = {\n'
+ ' L"bar",\n'
+ ' L"foo",\n'
+ '};\n'
+ 'const MyType elem2 = {\n'
+ ' 1000,\n'
+ ' "foo_bar",\n'
+ ' MAYBE,\n'
+ ' array_elem2_f3,\n'
+ ' 2,\n'
+ ' {0},\n'
+ ' NULL,\n'
+ ' 0,\n'
+ '};\n',
+ 'elem3': 'const MyType elem3 = {\n'
+ ' 1000,\n'
+ ' "foo",\n'
+ ' MAYBE,\n'
+ ' NULL,\n'
+ ' 0,\n'
+ ' {\n'
+ ' "test",\n'
+ ' },\n'
+ ' NULL,\n'
+ ' 0,\n'
+ '};\n',
+ 'elem4': 'const InnerType array_elem4_f5[] = {\n'
+ ' {\n'
+ ' "test0",\n'
+ ' "test1",\n'
+ ' },\n'
+ '};\n'
+ 'const MyType elem4 = {\n'
+ ' 1000,\n'
+ ' "foo",\n'
+ ' MAYBE,\n'
+ ' NULL,\n'
+ ' 0,\n'
+ ' {0},\n'
+ ' array_elem4_f5,\n'
+ ' 1,\n'
+ '};\n'
+ }
+ expected = ''
+ for key, value in description['int_variables'].items():
+ expected += int_variable_expected[key]
+ expected += '\n'
+ elements = []
+ for key, value in description['elements'].items():
+ elements.append(elements_expected[key])
+ expected += '\n'.join(elements)
+
+ result = GenerateElements('MyType', schema, description)
+ self.assertEquals(expected, result)
+
+ def testGenerateElementsMissingMandatoryField(self):
+ schema = [
+ {'field': 'f0', 'type': 'int'},
+ {'field': 'f1', 'type': 'string'},
+ ]
+ description = {
+ 'int_variables': {'a': -5, 'b': 5},
+ 'elements': {
+ 'elem0': {'f0': 5},
+ }
+ }
+
+ self.assertRaises(RuntimeError,
+ lambda: GenerateElements('MyType', schema, description))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/json_to_struct/json_to_struct.gni b/chromium/tools/json_to_struct/json_to_struct.gni
new file mode 100644
index 00000000000..814392258b5
--- /dev/null
+++ b/chromium/tools/json_to_struct/json_to_struct.gni
@@ -0,0 +1,64 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Converts a .json file to a C++ struct.
+#
+# Variables:
+#
+# source (required)
+# Single file name of source .json file.
+#
+# schema_file (required)
+# Single file name of the .json file that defines the schema.
+#
+# namespace (required)
+# Namespace name to put result in.
+#
+# visibility (optional)
+# Normal meaning.
+template("json_to_struct") {
+ assert(defined(invoker.source), "source required in $target_name")
+ assert(defined(invoker.schema_file), "schema_file required in $target_name")
+ assert(defined(invoker.namespace), "namespace required in $target_name")
+
+ action_name = target_name + "_action"
+ source_set_name = target_name
+
+ action(action_name) {
+ visibility = [ ":$source_set_name" ]
+ script = "//tools/json_to_struct/json_to_struct.py"
+
+ inputs = [
+ "//tools/json_to_struct/element_generator.py",
+ "//tools/json_to_struct/struct_generator.py",
+ invoker.source,
+ ]
+
+ out_dir = get_path_info(invoker.source, "gen_dir")
+ out_name = get_path_info(invoker.source, "name")
+ outputs = [
+ "$out_dir/$out_name.cc",
+ "$out_dir/$out_name.h",
+ ]
+
+ args = [
+ rebase_path(invoker.source, root_build_dir),
+ "--destbase=" + rebase_path(out_dir, root_build_dir),
+ "--namespace=" + invoker.namespace,
+ "--schema=" + rebase_path(invoker.schema_file, root_build_dir),
+ ]
+ }
+
+ source_set(source_set_name) {
+ if (defined(invoker.visibility)) {
+ visibility = invoker.visibility
+ }
+
+ sources = get_target_outputs(":$action_name")
+
+ deps = [
+ ":$action_name",
+ ]
+ }
+}
diff --git a/chromium/tools/json_to_struct/json_to_struct.py b/chromium/tools/json_to_struct/json_to_struct.py
new file mode 100755
index 00000000000..46cfd4a9397
--- /dev/null
+++ b/chromium/tools/json_to_struct/json_to_struct.py
@@ -0,0 +1,243 @@
+#!/usr/bin/env python
+# Copyright 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Format for the JSON schema file:
+# {
+# "type_name": "DesiredCStructName",
+# "headers": [ // Optional list of headers to be included by the .h.
+# "path/to/header.h"
+# ],
+# "schema": [ // Fields of the generated structure.
+# {
+# "field": "my_enum_field",
+# "type": "enum", // Either: int, string, string16, enum, array, struct.
+# "default": "RED", // Optional. Cannot be used for array.
+# "ctype": "Color" // Only for enum, specify the C type.
+# },
+# {
+# "field": "my_int_array_field", // my_int_array_field_size will also
+# "type": "array", // be generated.
+# "contents": {
+# "type": "int" // Either: int, string, string16, enum, array.
+# }
+# },
+# {
+# "field": "my_struct_field",
+# "type_name": "PointStuct",
+# "type": "struct",
+# "fields": [
+# {"field": "x", "type": "int"},
+# {"field": "y", "type": "int"}
+# ]
+# },
+# ...
+# ]
+# }
+#
+# Format for the JSON description file:
+# {
+# "int_variables": { // An optional list of constant int variables.
+# "kDesiredConstantName": 45
+# },
+# "elements": { // All the elements for which to create static
+# // initialization code in the .cc file.
+# "my_const_variable": {
+# "my_int_field": 10,
+# "my_string_field": "foo bar",
+# "my_enum_field": "BLACK",
+# "my_int_array_field": [ 1, 2, 3, 5, 7 ],
+# "my_struct_field": {"x": 1, "y": 2}
+# },
+# "my_other_const_variable": {
+# ...
+# }
+# }
+# }
+
+import json
+from datetime import datetime
+import os.path
+import sys
+import optparse
+import re
+_script_path = os.path.realpath(__file__)
+
+sys.path.insert(0, os.path.normpath(_script_path + "/../../json_comment_eater"))
+try:
+ import json_comment_eater
+finally:
+ sys.path.pop(0)
+
+import struct_generator
+import element_generator
+
+HEAD = """// Copyright %d The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// GENERATED FROM THE SCHEMA DEFINITION AND DESCRIPTION IN
+// %s
+// %s
+// DO NOT EDIT.
+
+"""
+
+def _GenerateHeaderGuard(h_filename):
+ """Generates the string used in #ifndef guarding the header file.
+ """
+ result = re.sub('[%s\\\\.]' % os.sep, '_', h_filename.upper())
+ return re.sub('^_*', '', result) + '_' # Remove leading underscores.
+
+def _GenerateH(basepath, fileroot, head, namespace, schema, description):
+ """Generates the .h file containing the definition of the structure specified
+ by the schema.
+
+ Args:
+ basepath: The base directory in which files are generated.
+ fileroot: The filename and path, relative to basepath, of the file to
+ create, without an extension.
+ head: The string to output as the header of the .h file.
+ namespace: A string corresponding to the C++ namespace to use.
+ schema: A dict containing the schema. See comment at the top of this file.
+ description: A dict containing the description. See comment at the top of
+ this file.
+ """
+
+ h_filename = fileroot + '.h'
+ with open(os.path.join(basepath, h_filename), 'w') as f:
+ f.write(head)
+
+ header_guard = _GenerateHeaderGuard(h_filename)
+ f.write('#ifndef %s\n' % header_guard)
+ f.write('#define %s\n' % header_guard)
+ f.write('\n')
+
+ f.write('#include <cstddef>\n')
+ f.write('\n')
+
+ for header in schema.get('headers', []):
+ f.write('#include "%s"\n' % header)
+ f.write('\n')
+
+ if namespace:
+ f.write('namespace %s {\n' % namespace)
+ f.write('\n')
+
+ f.write(struct_generator.GenerateStruct(
+ schema['type_name'], schema['schema']))
+ f.write('\n')
+
+ for var_name, value in description.get('int_variables', {}).items():
+ f.write('extern const int %s;\n' % var_name)
+ f.write('\n')
+
+ for element_name, element in description['elements'].items():
+ f.write('extern const %s %s;\n' % (schema['type_name'], element_name))
+
+ if namespace:
+ f.write('\n')
+ f.write('} // namespace %s\n' % namespace)
+
+ f.write('\n')
+ f.write( '#endif // %s\n' % header_guard)
+
+def _GenerateCC(basepath, fileroot, head, namespace, schema, description):
+ """Generates the .cc file containing the static initializers for the
+ of the elements specified in the description.
+
+ Args:
+ basepath: The base directory in which files are generated.
+ fileroot: The filename and path, relative to basepath, of the file to
+ create, without an extension.
+ head: The string to output as the header of the .cc file.
+ namespace: A string corresponding to the C++ namespace to use.
+ schema: A dict containing the schema. See comment at the top of this file.
+ description: A dict containing the description. See comment at the top of
+ this file.
+ """
+
+ with open(os.path.join(basepath, fileroot + '.cc'), 'w') as f:
+ f.write(head)
+
+ f.write('#include "%s"\n' % (fileroot + '.h'))
+ f.write('\n')
+
+ if namespace:
+ f.write('namespace %s {\n' % namespace)
+ f.write('\n')
+
+ f.write(element_generator.GenerateElements(schema['type_name'],
+ schema['schema'], description))
+
+ if namespace:
+ f.write('\n')
+ f.write('} // namespace %s\n' % namespace)
+
+def _Load(filename):
+ """Loads a JSON file int a Python object and return this object.
+ """
+ # TODO(beaudoin): When moving to Python 2.7 use object_pairs_hook=OrderedDict.
+ with open(filename, 'r') as handle:
+ result = json.loads(json_comment_eater.Nom(handle.read()))
+ return result
+
+def GenerateStruct(basepath, output_root, namespace, schema, description,
+ description_filename, schema_filename, year=None):
+ """Generates a C++ struct from a JSON description.
+
+ Args:
+ basepath: The base directory in which files are generated.
+ output_root: The filename and path, relative to basepath, of the file to
+ create, without an extension.
+ namespace: A string corresponding to the C++ namespace to use.
+ schema: A dict containing the schema. See comment at the top of this file.
+ description: A dict containing the description. See comment at the top of
+ this file.
+ description_filename: The description filename. This is added to the
+ header of the outputted files.
+ schema_filename: The schema filename. This is added to the header of the
+ outputted files.
+ year: Year to display next to the copy-right in the header.
+ """
+ year = int(year) if year else datetime.now().year
+ head = HEAD % (year, schema_filename, description_filename)
+ _GenerateH(basepath, output_root, head, namespace, schema, description)
+ _GenerateCC(basepath, output_root, head, namespace, schema, description)
+
+if __name__ == '__main__':
+ parser = optparse.OptionParser(
+ description='Generates an C++ array of struct from a JSON description.',
+ usage='usage: %prog [option] -s schema description')
+ parser.add_option('-b', '--destbase',
+ help='base directory of generated files.')
+ parser.add_option('-d', '--destdir',
+ help='directory to output generated files, relative to destbase.')
+ parser.add_option('-n', '--namespace',
+ help='C++ namespace for generated files. e.g search_providers.')
+ parser.add_option('-s', '--schema', help='path to the schema file, '
+ 'mandatory.')
+ parser.add_option('-o', '--output', help='output filename, ')
+ (opts, args) = parser.parse_args()
+
+ if not opts.schema:
+ parser.error('You must specify a --schema.')
+
+ description_filename = os.path.normpath(args[0])
+ root, ext = os.path.splitext(description_filename)
+ shortroot = opts.output if opts.output else os.path.split(root)[1]
+ if opts.destdir:
+ output_root = os.path.join(os.path.normpath(opts.destdir), shortroot)
+ else:
+ output_root = shortroot
+
+ if opts.destbase:
+ basepath = os.path.normpath(opts.destbase)
+ else:
+ basepath = ''
+
+ schema = _Load(opts.schema)
+ description = _Load(description_filename)
+ GenerateStruct(basepath, output_root, opts.namespace, schema, description,
+ description_filename, opts.schema)
diff --git a/chromium/tools/json_to_struct/struct_generator.py b/chromium/tools/json_to_struct/struct_generator.py
new file mode 100644
index 00000000000..5849d82b451
--- /dev/null
+++ b/chromium/tools/json_to_struct/struct_generator.py
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+def _GenerateArrayField(field_info):
+ """Generate a string defining an array field in a C structure.
+ """
+ contents = field_info['contents']
+ contents['field'] = '* ' + field_info['field']
+ if contents['type'] == 'array':
+ raise RuntimeError('Nested arrays are not supported.')
+ return (GenerateField(contents) + ';\n' +
+ ' const size_t %s_size') % field_info['field'];
+
+def GenerateField(field_info):
+ """Generate a string defining a field of the type specified by
+ field_info['type'] in a C structure.
+ """
+ field = field_info['field']
+ type = field_info['type']
+ if type == 'int':
+ return 'const int %s' % field
+ elif type == 'string':
+ return 'const char* const %s' % field
+ elif type == 'string16':
+ return 'const wchar_t* const %s' % field
+ elif type == 'enum':
+ return 'const %s %s' % (field_info['ctype'], field)
+ elif type == 'array':
+ return _GenerateArrayField(field_info)
+ elif type == 'struct':
+ return 'const %s %s' % (field_info['type_name'], field)
+ else:
+ raise RuntimeError('Unknown field type "%s"' % type)
+
+def GenerateStruct(type_name, schema):
+ """Generate a string defining a structure containing the fields specified in
+ the schema list.
+ """
+ lines = [];
+ lines.append('struct %s {' % type_name)
+ for field_info in schema:
+ if field_info['type'] == 'struct':
+ lines.insert(0, GenerateStruct(field_info['type_name'],
+ field_info['fields']))
+ elif (field_info['type'] == 'array'
+ and field_info['contents']['type'] == 'struct'):
+ contents = field_info['contents']
+ lines.insert(0, GenerateStruct(contents['type_name'],
+ contents['fields']))
+ lines.append(' ' + GenerateField(field_info) + ';')
+ lines.append('};');
+ return '\n'.join(lines) + '\n';
diff --git a/chromium/tools/json_to_struct/struct_generator_test.py b/chromium/tools/json_to_struct/struct_generator_test.py
new file mode 100755
index 00000000000..bff5db73af8
--- /dev/null
+++ b/chromium/tools/json_to_struct/struct_generator_test.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from struct_generator import GenerateField
+from struct_generator import GenerateStruct
+import unittest
+
+class StructGeneratorTest(unittest.TestCase):
+ def testGenerateIntField(self):
+ self.assertEquals('const int foo_bar',
+ GenerateField({'type': 'int', 'field': 'foo_bar'}))
+
+ def testGenerateStringField(self):
+ self.assertEquals('const char* const bar_foo',
+ GenerateField({'type': 'string', 'field': 'bar_foo'}))
+
+ def testGenerateString16Field(self):
+ self.assertEquals('const wchar_t* const foo_bar',
+ GenerateField({'type': 'string16', 'field': 'foo_bar'}))
+
+ def testGenerateEnumField(self):
+ self.assertEquals('const MyEnumType foo_foo',
+ GenerateField({'type': 'enum',
+ 'field': 'foo_foo',
+ 'ctype': 'MyEnumType'}))
+
+ def testGenerateArrayField(self):
+ self.assertEquals('const int * bar_bar;\n'
+ ' const size_t bar_bar_size',
+ GenerateField({'type': 'array',
+ 'field': 'bar_bar',
+ 'contents': {'type': 'int'}}))
+
+ def testGenerateStruct(self):
+ schema = [
+ {'type': 'int', 'field': 'foo_bar'},
+ {'type': 'string', 'field': 'bar_foo', 'default': 'dummy'},
+ {
+ 'type': 'array',
+ 'field': 'bar_bar',
+ 'contents': {
+ 'type': 'enum',
+ 'ctype': 'MyEnumType'
+ }
+ }
+ ]
+ struct = ('struct MyTypeName {\n'
+ ' const int foo_bar;\n'
+ ' const char* const bar_foo;\n'
+ ' const MyEnumType * bar_bar;\n'
+ ' const size_t bar_bar_size;\n'
+ '};\n')
+ self.assertEquals(struct, GenerateStruct('MyTypeName', schema))
+
+ def testGenerateArrayOfStruct(self):
+ schema = [
+ {
+ 'type': 'array',
+ 'field': 'bar_bar',
+ 'contents': {
+ 'type': 'struct',
+ 'type_name': 'InnerTypeName',
+ 'fields': [
+ {'type': 'string', 'field': 'key'},
+ {'type': 'string', 'field': 'value'},
+ ]
+ }
+ }
+ ]
+ struct = (
+ 'struct InnerTypeName {\n'
+ ' const char* const key;\n'
+ ' const char* const value;\n'
+ '};\n'
+ '\n'
+ 'struct MyTypeName {\n'
+ ' const InnerTypeName * bar_bar;\n'
+ ' const size_t bar_bar_size;\n'
+ '};\n')
+ self.assertEquals(struct, GenerateStruct('MyTypeName', schema))
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/licenses.py b/chromium/tools/licenses.py
new file mode 100755
index 00000000000..9a6602b855a
--- /dev/null
+++ b/chromium/tools/licenses.py
@@ -0,0 +1,576 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility for checking and processing licensing information in third_party
+directories.
+
+Usage: licenses.py <command>
+
+Commands:
+ scan scan third_party directories, verifying that we have licensing info
+ credits generate about:credits on stdout
+
+(You can also import this as a module.)
+"""
+
+import argparse
+import cgi
+import os
+import sys
+
+# Paths from the root of the tree to directories to skip.
+PRUNE_PATHS = set([
+ # Same module occurs in crypto/third_party/nss and net/third_party/nss, so
+ # skip this one.
+ os.path.join('third_party','nss'),
+
+ # Placeholder directory only, not third-party code.
+ os.path.join('third_party','adobe'),
+
+ # Apache 2.0 license. See crbug.com/140478
+ os.path.join('third_party','bidichecker'),
+
+ # Build files only, not third-party code.
+ os.path.join('third_party','widevine'),
+
+ # Only binaries, used during development.
+ os.path.join('third_party','valgrind'),
+
+ # Used for development and test, not in the shipping product.
+ os.path.join('build','secondary'),
+ os.path.join('third_party','bison'),
+ os.path.join('third_party','blanketjs'),
+ os.path.join('third_party','cygwin'),
+ os.path.join('third_party','gles2_conform'),
+ os.path.join('third_party','gnu_binutils'),
+ os.path.join('third_party','gold'),
+ os.path.join('third_party','gperf'),
+ os.path.join('third_party','kasko'),
+ os.path.join('third_party','lighttpd'),
+ os.path.join('third_party','llvm'),
+ os.path.join('third_party','llvm-build'),
+ os.path.join('third_party','mingw-w64'),
+ os.path.join('third_party','nacl_sdk_binaries'),
+ os.path.join('third_party','pefile'),
+ os.path.join('third_party','perl'),
+ os.path.join('third_party','psyco_win32'),
+ os.path.join('third_party','pylib'),
+ os.path.join('third_party','pywebsocket'),
+ os.path.join('third_party','qunit'),
+ os.path.join('third_party','sinonjs'),
+ os.path.join('third_party','syzygy'),
+
+ # Chromium code in third_party.
+ os.path.join('third_party','fuzzymatch'),
+ os.path.join('tools', 'swarming_client'),
+
+ # Stuff pulled in from chrome-internal for official builds/tools.
+ os.path.join('third_party', 'clear_cache'),
+ os.path.join('third_party', 'gnu'),
+ os.path.join('third_party', 'googlemac'),
+ os.path.join('third_party', 'pcre'),
+ os.path.join('third_party', 'psutils'),
+ os.path.join('third_party', 'sawbuck'),
+ # See crbug.com/350472
+ os.path.join('chrome', 'browser', 'resources', 'chromeos', 'quickoffice'),
+ # Chrome for Android proprietary code.
+ os.path.join('clank'),
+
+ # Redistribution does not require attribution in documentation.
+ os.path.join('third_party','directxsdk'),
+ os.path.join('third_party','platformsdk_win2008_6_1'),
+ os.path.join('third_party','platformsdk_win7'),
+
+ # For testing only, presents on some bots.
+ os.path.join('isolate_deps_dir'),
+
+ # Overrides some WebRTC files, same license. Skip this one.
+ os.path.join('third_party', 'webrtc_overrides'),
+])
+
+# Directories we don't scan through.
+VCS_METADATA_DIRS = ('.svn', '.git')
+PRUNE_DIRS = (VCS_METADATA_DIRS +
+ ('out', 'Debug', 'Release', # build files
+ 'layout_tests')) # lots of subdirs
+
+ADDITIONAL_PATHS = (
+ os.path.join('breakpad'),
+ os.path.join('chrome', 'common', 'extensions', 'docs', 'examples'),
+ os.path.join('chrome', 'test', 'chromeos', 'autotest'),
+ os.path.join('chrome', 'test', 'data'),
+ os.path.join('native_client'),
+ os.path.join('net', 'tools', 'spdyshark'),
+ os.path.join('sdch', 'open-vcdiff'),
+ os.path.join('testing', 'gmock'),
+ os.path.join('testing', 'gtest'),
+ os.path.join('tools', 'gyp'),
+ os.path.join('tools', 'page_cycler', 'acid3'),
+ os.path.join('url', 'third_party', 'mozilla'),
+ os.path.join('v8'),
+ # Fake directories to include the strongtalk and fdlibm licenses.
+ os.path.join('v8', 'strongtalk'),
+ os.path.join('v8', 'fdlibm'),
+)
+
+
+# Directories where we check out directly from upstream, and therefore
+# can't provide a README.chromium. Please prefer a README.chromium
+# wherever possible.
+SPECIAL_CASES = {
+ os.path.join('native_client'): {
+ "Name": "native client",
+ "URL": "http://code.google.com/p/nativeclient",
+ "License": "BSD",
+ },
+ os.path.join('sdch', 'open-vcdiff'): {
+ "Name": "open-vcdiff",
+ "URL": "https://github.com/google/open-vcdiff",
+ "License": "Apache 2.0, MIT, GPL v2 and custom licenses",
+ "License Android Compatible": "yes",
+ },
+ os.path.join('testing', 'gmock'): {
+ "Name": "gmock",
+ "URL": "http://code.google.com/p/googlemock",
+ "License": "BSD",
+ "License File": "NOT_SHIPPED",
+ },
+ os.path.join('testing', 'gtest'): {
+ "Name": "gtest",
+ "URL": "http://code.google.com/p/googletest",
+ "License": "BSD",
+ "License File": "NOT_SHIPPED",
+ },
+ os.path.join('third_party', 'angle'): {
+ "Name": "Almost Native Graphics Layer Engine",
+ "URL": "http://code.google.com/p/angleproject/",
+ "License": "BSD",
+ },
+ os.path.join('third_party', 'cros_system_api'): {
+ "Name": "Chromium OS system API",
+ "URL": "http://www.chromium.org/chromium-os",
+ "License": "BSD",
+ # Absolute path here is resolved as relative to the source root.
+ "License File": "/LICENSE.chromium_os",
+ },
+ os.path.join('third_party', 'lss'): {
+ "Name": "linux-syscall-support",
+ "URL": "http://code.google.com/p/linux-syscall-support/",
+ "License": "BSD",
+ "License File": "/LICENSE",
+ },
+ os.path.join('third_party', 'pdfium'): {
+ "Name": "PDFium",
+ "URL": "http://code.google.com/p/pdfium/",
+ "License": "BSD",
+ },
+ os.path.join('third_party', 'pdfsqueeze'): {
+ "Name": "pdfsqueeze",
+ "URL": "http://code.google.com/p/pdfsqueeze/",
+ "License": "Apache 2.0",
+ "License File": "COPYING",
+ },
+ os.path.join('third_party', 'ppapi'): {
+ "Name": "ppapi",
+ "URL": "http://code.google.com/p/ppapi/",
+ },
+ os.path.join('third_party', 'scons-2.0.1'): {
+ "Name": "scons-2.0.1",
+ "URL": "http://www.scons.org",
+ "License": "MIT",
+ "License File": "NOT_SHIPPED",
+ },
+ os.path.join('third_party', 'catapult'): {
+ "Name": "catapult",
+ "URL": "https://github.com/catapult-project/catapult",
+ "License": "BSD",
+ "License File": "NOT_SHIPPED",
+ },
+ os.path.join('third_party', 'v8-i18n'): {
+ "Name": "Internationalization Library for v8",
+ "URL": "http://code.google.com/p/v8-i18n/",
+ "License": "Apache 2.0",
+ },
+ os.path.join('third_party', 'WebKit'): {
+ "Name": "WebKit",
+ "URL": "http://webkit.org/",
+ "License": "BSD and GPL v2",
+ # Absolute path here is resolved as relative to the source root.
+ "License File": "/third_party/WebKit/LICENSE_FOR_ABOUT_CREDITS",
+ },
+ os.path.join('third_party', 'webpagereplay'): {
+ "Name": "webpagereplay",
+ "URL": "http://code.google.com/p/web-page-replay",
+ "License": "Apache 2.0",
+ "License File": "NOT_SHIPPED",
+ },
+ os.path.join('tools', 'gyp'): {
+ "Name": "gyp",
+ "URL": "http://code.google.com/p/gyp",
+ "License": "BSD",
+ "License File": "NOT_SHIPPED",
+ },
+ os.path.join('v8'): {
+ "Name": "V8 JavaScript Engine",
+ "URL": "http://code.google.com/p/v8",
+ "License": "BSD",
+ },
+ os.path.join('v8', 'strongtalk'): {
+ "Name": "Strongtalk",
+ "URL": "http://www.strongtalk.org/",
+ "License": "BSD",
+ # Absolute path here is resolved as relative to the source root.
+ "License File": "/v8/LICENSE.strongtalk",
+ },
+ os.path.join('v8', 'fdlibm'): {
+ "Name": "fdlibm",
+ "URL": "http://www.netlib.org/fdlibm/",
+ "License": "Freely Distributable",
+ # Absolute path here is resolved as relative to the source root.
+ "License File" : "/v8/src/third_party/fdlibm/LICENSE",
+ "License Android Compatible" : "yes",
+ },
+ os.path.join('third_party', 'khronos_glcts'): {
+ # These sources are not shipped, are not public, and it isn't
+ # clear why they're tripping the license check.
+ "Name": "khronos_glcts",
+ "URL": "http://no-public-url",
+ "License": "Khronos",
+ "License File": "NOT_SHIPPED",
+ },
+ os.path.join('tools', 'telemetry', 'third_party', 'gsutil'): {
+ "Name": "gsutil",
+ "URL": "https://cloud.google.com/storage/docs/gsutil",
+ "License": "Apache 2.0",
+ "License File": "NOT_SHIPPED",
+ },
+}
+
+# Special value for 'License File' field used to indicate that the license file
+# should not be used in about:credits.
+NOT_SHIPPED = "NOT_SHIPPED"
+
+# Paths for libraries that we have checked are not shipped on iOS. These are
+# left out of the licenses file primarily because we don't want to cause a
+# firedrill due to someone thinking that Chrome for iOS is using LGPL code
+# when it isn't.
+# This is a temporary hack; the real solution is crbug.com/178215
+KNOWN_NON_IOS_LIBRARIES = set([
+ os.path.join('base', 'third_party', 'symbolize'),
+ os.path.join('base', 'third_party', 'xdg_mime'),
+ os.path.join('base', 'third_party', 'xdg_user_dirs'),
+ os.path.join('chrome', 'installer', 'mac', 'third_party', 'bsdiff'),
+ os.path.join('chrome', 'installer', 'mac', 'third_party', 'xz'),
+ os.path.join('chrome', 'test', 'data', 'third_party', 'kraken'),
+ os.path.join('chrome', 'test', 'data', 'third_party', 'spaceport'),
+ os.path.join('chrome', 'third_party', 'mock4js'),
+ os.path.join('chrome', 'third_party', 'mozilla_security_manager'),
+ os.path.join('third_party', 'WebKit'),
+ os.path.join('third_party', 'angle'),
+ os.path.join('third_party', 'apple_apsl'),
+ os.path.join('third_party', 'apple_sample_code'),
+ os.path.join('third_party', 'ashmem'),
+ os.path.join('third_party', 'bspatch'),
+ os.path.join('third_party', 'cacheinvalidation'),
+ os.path.join('third_party', 'cld'),
+ os.path.join('third_party', 'codesighs'),
+ os.path.join('third_party', 'flot'),
+ os.path.join('third_party', 'gtk+'),
+ os.path.join('third_party', 'iaccessible2'),
+ os.path.join('third_party', 'iccjpeg'),
+ os.path.join('third_party', 'isimpledom'),
+ os.path.join('third_party', 'jsoncpp'),
+ os.path.join('third_party', 'khronos'),
+ os.path.join('third_party', 'libXNVCtrl'),
+ os.path.join('third_party', 'libevent'),
+ os.path.join('third_party', 'libjpeg'),
+ os.path.join('third_party', 'libusb'),
+ os.path.join('third_party', 'libva'),
+ os.path.join('third_party', 'libxslt'),
+ os.path.join('third_party', 'lss'),
+ os.path.join('third_party', 'lzma_sdk'),
+ os.path.join('third_party', 'mesa'),
+ os.path.join('third_party', 'molokocacao'),
+ os.path.join('third_party', 'motemplate'),
+ os.path.join('third_party', 'mozc'),
+ os.path.join('third_party', 'mozilla'),
+ os.path.join('third_party', 'npapi'),
+ os.path.join('third_party', 'ots'),
+ os.path.join('third_party', 'pdfsqueeze'),
+ os.path.join('third_party', 'ppapi'),
+ os.path.join('third_party', 'qcms'),
+ os.path.join('third_party', 're2'),
+ os.path.join('third_party', 'safe_browsing'),
+ os.path.join('third_party', 'sfntly'),
+ os.path.join('third_party', 'smhasher'),
+ os.path.join('third_party', 'sudden_motion_sensor'),
+ os.path.join('third_party', 'swiftshader'),
+ os.path.join('third_party', 'swig'),
+ os.path.join('third_party', 'talloc'),
+ os.path.join('third_party', 'tcmalloc'),
+ os.path.join('third_party', 'usb_ids'),
+ os.path.join('third_party', 'v8-i18n'),
+ os.path.join('third_party', 'wtl'),
+ os.path.join('third_party', 'yasm'),
+ os.path.join('v8', 'strongtalk'),
+])
+
+
+class LicenseError(Exception):
+ """We raise this exception when a directory's licensing info isn't
+ fully filled out."""
+ pass
+
+def AbsolutePath(path, filename, root):
+ """Convert a path in README.chromium to be absolute based on the source
+ root."""
+ if filename.startswith('/'):
+ # Absolute-looking paths are relative to the source root
+ # (which is the directory we're run from).
+ absolute_path = os.path.join(root, filename[1:])
+ else:
+ absolute_path = os.path.join(root, path, filename)
+ if os.path.exists(absolute_path):
+ return absolute_path
+ return None
+
+def ParseDir(path, root, require_license_file=True, optional_keys=None):
+ """Examine a third_party/foo component and extract its metadata."""
+
+ # Parse metadata fields out of README.chromium.
+ # We examine "LICENSE" for the license file by default.
+ metadata = {
+ "License File": "LICENSE", # Relative path to license text.
+ "Name": None, # Short name (for header on about:credits).
+ "URL": None, # Project home page.
+ "License": None, # Software license.
+ }
+
+ if optional_keys is None:
+ optional_keys = []
+
+ if path in SPECIAL_CASES:
+ metadata.update(SPECIAL_CASES[path])
+ else:
+ # Try to find README.chromium.
+ readme_path = os.path.join(root, path, 'README.chromium')
+ if not os.path.exists(readme_path):
+ raise LicenseError("missing README.chromium or licenses.py "
+ "SPECIAL_CASES entry")
+
+ for line in open(readme_path):
+ line = line.strip()
+ if not line:
+ break
+ for key in metadata.keys() + optional_keys:
+ field = key + ": "
+ if line.startswith(field):
+ metadata[key] = line[len(field):]
+
+ # Check that all expected metadata is present.
+ errors = []
+ for key, value in metadata.iteritems():
+ if not value:
+ errors.append("couldn't find '" + key + "' line "
+ "in README.chromium or licences.py "
+ "SPECIAL_CASES")
+
+ # Special-case modules that aren't in the shipping product, so don't need
+ # their license in about:credits.
+ if metadata["License File"] != NOT_SHIPPED:
+ # Check that the license file exists.
+ for filename in (metadata["License File"], "COPYING"):
+ license_path = AbsolutePath(path, filename, root)
+ if license_path is not None:
+ break
+
+ if require_license_file and not license_path:
+ errors.append("License file not found. "
+ "Either add a file named LICENSE, "
+ "import upstream's COPYING if available, "
+ "or add a 'License File:' line to "
+ "README.chromium with the appropriate path.")
+ metadata["License File"] = license_path
+
+ if errors:
+ raise LicenseError(";\n".join(errors))
+ return metadata
+
+
+def ContainsFiles(path, root):
+ """Determines whether any files exist in a directory or in any of its
+ subdirectories."""
+ for _, dirs, files in os.walk(os.path.join(root, path)):
+ if files:
+ return True
+ for vcs_metadata in VCS_METADATA_DIRS:
+ if vcs_metadata in dirs:
+ dirs.remove(vcs_metadata)
+ return False
+
+
+def FilterDirsWithFiles(dirs_list, root):
+ # If a directory contains no files, assume it's a DEPS directory for a
+ # project not used by our current configuration and skip it.
+ return [x for x in dirs_list if ContainsFiles(x, root)]
+
+
+def FindThirdPartyDirs(prune_paths, root):
+ """Find all third_party directories underneath the source root."""
+ third_party_dirs = set()
+ for path, dirs, files in os.walk(root):
+ path = path[len(root)+1:] # Pretty up the path.
+
+ if path in prune_paths:
+ dirs[:] = []
+ continue
+
+ # Prune out directories we want to skip.
+ # (Note that we loop over PRUNE_DIRS so we're not iterating over a
+ # list that we're simultaneously mutating.)
+ for skip in PRUNE_DIRS:
+ if skip in dirs:
+ dirs.remove(skip)
+
+ if os.path.basename(path) == 'third_party':
+ # Add all subdirectories that are not marked for skipping.
+ for dir in dirs:
+ dirpath = os.path.join(path, dir)
+ if dirpath not in prune_paths:
+ third_party_dirs.add(dirpath)
+
+ # Don't recurse into any subdirs from here.
+ dirs[:] = []
+ continue
+
+ # Don't recurse into paths in ADDITIONAL_PATHS, like we do with regular
+ # third_party/foo paths.
+ if path in ADDITIONAL_PATHS:
+ dirs[:] = []
+
+ for dir in ADDITIONAL_PATHS:
+ if dir not in prune_paths:
+ third_party_dirs.add(dir)
+
+ return third_party_dirs
+
+
+def FindThirdPartyDirsWithFiles(root):
+ third_party_dirs = FindThirdPartyDirs(PRUNE_PATHS, root)
+ return FilterDirsWithFiles(third_party_dirs, root)
+
+
+def ScanThirdPartyDirs(root=None):
+ """Scan a list of directories and report on any problems we find."""
+ if root is None:
+ root = os.getcwd()
+ third_party_dirs = FindThirdPartyDirsWithFiles(root)
+
+ errors = []
+ for path in sorted(third_party_dirs):
+ try:
+ metadata = ParseDir(path, root)
+ except LicenseError, e:
+ errors.append((path, e.args[0]))
+ continue
+
+ for path, error in sorted(errors):
+ print path + ": " + error
+
+ return len(errors) == 0
+
+
+def GenerateCredits(
+ file_template_file, entry_template_file, output_file, target_os):
+ """Generate about:credits."""
+
+ def EvaluateTemplate(template, env, escape=True):
+ """Expand a template with variables like {{foo}} using a
+ dictionary of expansions."""
+ for key, val in env.items():
+ if escape:
+ val = cgi.escape(val)
+ template = template.replace('{{%s}}' % key, val)
+ return template
+
+ root = os.path.join(os.path.dirname(__file__), '..')
+ third_party_dirs = FindThirdPartyDirs(PRUNE_PATHS, root)
+
+ if not file_template_file:
+ file_template_file = os.path.join(root, 'components', 'about_ui',
+ 'resources', 'about_credits.tmpl')
+ if not entry_template_file:
+ entry_template_file = os.path.join(root, 'components', 'about_ui',
+ 'resources',
+ 'about_credits_entry.tmpl')
+
+ entry_template = open(entry_template_file).read()
+ entries = []
+ for path in third_party_dirs:
+ try:
+ metadata = ParseDir(path, root)
+ except LicenseError:
+ # TODO(phajdan.jr): Convert to fatal error (http://crbug.com/39240).
+ continue
+ if metadata['License File'] == NOT_SHIPPED:
+ continue
+ if target_os == 'ios':
+ # Skip over files that are known not to be used on iOS.
+ if path in KNOWN_NON_IOS_LIBRARIES:
+ continue
+ env = {
+ 'name': metadata['Name'],
+ 'url': metadata['URL'],
+ 'license': open(metadata['License File'], 'rb').read(),
+ }
+ entry = {
+ 'name': metadata['Name'],
+ 'content': EvaluateTemplate(entry_template, env),
+ }
+ entries.append(entry)
+
+ entries.sort(key=lambda entry: (entry['name'], entry['content']))
+ entries_contents = '\n'.join([entry['content'] for entry in entries])
+ file_template = open(file_template_file).read()
+ template_contents = "<!-- Generated by licenses.py; do not edit. -->"
+ template_contents += EvaluateTemplate(file_template,
+ {'entries': entries_contents},
+ escape=False)
+
+ if output_file:
+ with open(output_file, 'w') as output:
+ output.write(template_contents)
+ else:
+ print template_contents
+
+ return True
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--file-template',
+ help='Template HTML to use for the license page.')
+ parser.add_argument('--entry-template',
+ help='Template HTML to use for each license.')
+ parser.add_argument('--target-os',
+ help='OS that this build is targeting.')
+ parser.add_argument('command', choices=['help', 'scan', 'credits'])
+ parser.add_argument('output_file', nargs='?')
+ args = parser.parse_args()
+
+ if args.command == 'scan':
+ if not ScanThirdPartyDirs():
+ return 1
+ elif args.command == 'credits':
+ if not GenerateCredits(args.file_template, args.entry_template,
+ args.output_file, args.target_os):
+ return 1
+ else:
+ print __doc__
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/linux/OWNERS b/chromium/tools/linux/OWNERS
new file mode 100644
index 00000000000..2e285e40af2
--- /dev/null
+++ b/chromium/tools/linux/OWNERS
@@ -0,0 +1,2 @@
+thakis@chromium.org
+thestig@chromium.org
diff --git a/chromium/tools/linux/PRESUBMIT.py b/chromium/tools/linux/PRESUBMIT.py
new file mode 100644
index 00000000000..b29a8c47778
--- /dev/null
+++ b/chromium/tools/linux/PRESUBMIT.py
@@ -0,0 +1,45 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Top-level presubmit script for linux.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into depot_tools.
+"""
+
+
+def CommonChecks(input_api, output_api):
+ import sys
+ def join(*args):
+ return input_api.os_path.join(input_api.PresubmitLocalPath(), *args)
+
+ output = []
+ sys_path_backup = sys.path
+ try:
+ sys.path = [
+ join('..', 'linux'),
+ ] + sys.path
+ output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
+ finally:
+ sys.path = sys_path_backup
+
+ output.extend(
+ input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api,
+ input_api.os_path.join(input_api.PresubmitLocalPath(), 'tests'),
+ whitelist=[r'.+_tests\.py$']))
+
+ if input_api.is_committing:
+ output.extend(input_api.canned_checks.PanProjectChecks(input_api,
+ output_api,
+ owners_check=False))
+ return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CommonChecks(input_api, output_api)
diff --git a/chromium/tools/linux/dump-static-initializers.py b/chromium/tools/linux/dump-static-initializers.py
new file mode 100755
index 00000000000..b71d0627482
--- /dev/null
+++ b/chromium/tools/linux/dump-static-initializers.py
@@ -0,0 +1,240 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dump functions called by static intializers in a Linux Release binary.
+
+Usage example:
+ tools/linux/dump-static-intializers.py out/Release/chrome
+
+A brief overview of static initialization:
+1) the compiler writes out, per object file, a function that contains
+ the static intializers for that file.
+2) the compiler also writes out a pointer to that function in a special
+ section.
+3) at link time, the linker concatenates the function pointer sections
+ into a single list of all initializers.
+4) at run time, on startup the binary runs all function pointers.
+
+The functions in (1) all have mangled names of the form
+ _GLOBAL__I_foobar.cc
+using objdump, we can disassemble those functions and dump all symbols that
+they reference.
+"""
+
+import optparse
+import re
+import subprocess
+import sys
+
+# A map of symbol => informative text about it.
+NOTES = {
+ '__cxa_atexit@plt': 'registers a dtor to run at exit',
+ 'std::__ioinit': '#includes <iostream>, use <ostream> instead',
+}
+
+# Determine whether this is a git checkout (as opposed to e.g. svn).
+IS_GIT_WORKSPACE = (subprocess.Popen(
+ ['git', 'rev-parse'], stderr=subprocess.PIPE).wait() == 0)
+
+class Demangler(object):
+ """A wrapper around c++filt to provide a function to demangle symbols."""
+ def __init__(self, toolchain):
+ self.cppfilt = subprocess.Popen([toolchain + 'c++filt'],
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+
+ def Demangle(self, sym):
+ """Given mangled symbol |sym|, return its demangled form."""
+ self.cppfilt.stdin.write(sym + '\n')
+ return self.cppfilt.stdout.readline().strip()
+
+# Matches for example: "cert_logger.pb.cc", capturing "cert_logger".
+protobuf_filename_re = re.compile(r'(.*)\.pb\.cc$')
+def QualifyFilenameAsProto(filename):
+ """Attempt to qualify a bare |filename| with a src-relative path, assuming it
+ is a protoc-generated file. If a single match is found, it is returned.
+ Otherwise the original filename is returned."""
+ if not IS_GIT_WORKSPACE:
+ return filename
+ match = protobuf_filename_re.match(filename)
+ if not match:
+ return filename
+ basename = match.groups(0)
+ gitlsfiles = subprocess.Popen(
+ ['git', 'ls-files', '--', '*/%s.proto' % basename],
+ stdout=subprocess.PIPE)
+ candidate = filename
+ for line in gitlsfiles.stdout:
+ if candidate != filename:
+ return filename # Multiple hits, can't help.
+ candidate = line.strip()
+ return candidate
+
+# Regex matching the substring of a symbol's demangled text representation most
+# likely to appear in a source file.
+# Example: "v8::internal::Builtins::InitBuiltinFunctionTable()" becomes
+# "InitBuiltinFunctionTable", since the first (optional & non-capturing) group
+# picks up any ::-qualification and the last fragment picks up a suffix that
+# starts with an opener.
+symbol_code_name_re = re.compile(r'^(?:[^(<[]*::)?([^:(<[]*).*?$')
+def QualifyFilename(filename, symbol):
+ """Given a bare filename and a symbol that occurs in it, attempt to qualify
+ it with a src-relative path. If more than one file matches, return the
+ original filename."""
+ if not IS_GIT_WORKSPACE:
+ return filename
+ match = symbol_code_name_re.match(symbol)
+ if not match:
+ return filename
+ symbol = match.group(1)
+ gitgrep = subprocess.Popen(
+ ['git', 'grep', '-l', symbol, '--', '*/%s' % filename],
+ stdout=subprocess.PIPE)
+ candidate = filename
+ for line in gitgrep.stdout:
+ if candidate != filename: # More than one candidate; return bare filename.
+ return filename
+ candidate = line.strip()
+ return candidate
+
+# Regex matching nm output for the symbols we're interested in.
+# See test_ParseNmLine for examples.
+nm_re = re.compile(r'(\S+) (\S+) t (?:_ZN12)?_GLOBAL__(?:sub_)?I_(.*)')
+def ParseNmLine(line):
+ """Given a line of nm output, parse static initializers as a
+ (file, start, size) tuple."""
+ match = nm_re.match(line)
+ if match:
+ addr, size, filename = match.groups()
+ return (filename, int(addr, 16), int(size, 16))
+
+
+def test_ParseNmLine():
+ """Verify the nm_re regex matches some sample lines."""
+ parse = ParseNmLine(
+ '0000000001919920 0000000000000008 t '
+ '_ZN12_GLOBAL__I_safe_browsing_service.cc')
+ assert parse == ('safe_browsing_service.cc', 26319136, 8), parse
+
+ parse = ParseNmLine(
+ '00000000026b9eb0 0000000000000024 t '
+ '_GLOBAL__sub_I_extension_specifics.pb.cc')
+ assert parse == ('extension_specifics.pb.cc', 40607408, 36), parse
+
+# Just always run the test; it is fast enough.
+test_ParseNmLine()
+
+
+def ParseNm(toolchain, binary):
+ """Given a binary, yield static initializers as (file, start, size) tuples."""
+ nm = subprocess.Popen([toolchain + 'nm', '-S', binary],
+ stdout=subprocess.PIPE)
+ for line in nm.stdout:
+ parse = ParseNmLine(line)
+ if parse:
+ yield parse
+
+# Regex matching objdump output for the symbols we're interested in.
+# Example line:
+# 12354ab: (disassembly, including <FunctionReference>)
+disassembly_re = re.compile(r'^\s+[0-9a-f]+:.*<(\S+)>')
+def ExtractSymbolReferences(toolchain, binary, start, end):
+ """Given a span of addresses, returns symbol references from disassembly."""
+ cmd = [toolchain + 'objdump', binary, '--disassemble',
+ '--start-address=0x%x' % start, '--stop-address=0x%x' % end]
+ objdump = subprocess.Popen(cmd, stdout=subprocess.PIPE)
+
+ refs = set()
+ for line in objdump.stdout:
+ if '__static_initialization_and_destruction' in line:
+ raise RuntimeError, ('code mentions '
+ '__static_initialization_and_destruction; '
+ 'did you accidentally run this on a Debug binary?')
+ match = disassembly_re.search(line)
+ if match:
+ (ref,) = match.groups()
+ if ref.startswith('.LC') or ref.startswith('_DYNAMIC'):
+ # Ignore these, they are uninformative.
+ continue
+ if ref.startswith('_GLOBAL__I_'):
+ # Probably a relative jump within this function.
+ continue
+ refs.add(ref)
+
+ return sorted(refs)
+
+def main():
+ parser = optparse.OptionParser(usage='%prog [option] filename')
+ parser.add_option('-d', '--diffable', dest='diffable',
+ action='store_true', default=False,
+ help='Prints the filename on each line, for more easily '
+ 'diff-able output. (Used by sizes.py)')
+ parser.add_option('-t', '--toolchain-prefix', dest='toolchain',
+ action='store', default='',
+ help='Toolchain prefix to append to all tool invocations '
+ '(nm, objdump).')
+ opts, args = parser.parse_args()
+ if len(args) != 1:
+ parser.error('missing filename argument')
+ return 1
+ binary = args[0]
+
+ demangler = Demangler(opts.toolchain)
+ file_count = 0
+ initializer_count = 0
+
+ files = ParseNm(opts.toolchain, binary)
+ if opts.diffable:
+ files = sorted(files)
+ for filename, addr, size in files:
+ file_count += 1
+ ref_output = []
+
+ qualified_filename = QualifyFilenameAsProto(filename)
+
+ if size == 2:
+ # gcc generates a two-byte 'repz retq' initializer when there is a
+ # ctor even when the ctor is empty. This is fixed in gcc 4.6, but
+ # Android uses gcc 4.4.
+ ref_output.append('[empty ctor, but it still has cost on gcc <4.6]')
+ else:
+ for ref in ExtractSymbolReferences(opts.toolchain, binary, addr,
+ addr+size):
+ initializer_count += 1
+
+ ref = demangler.Demangle(ref)
+ if qualified_filename == filename:
+ qualified_filename = QualifyFilename(filename, ref)
+
+ note = ''
+ if ref in NOTES:
+ note = NOTES[ref]
+ elif ref.endswith('_2eproto()'):
+ note = 'protocol compiler bug: crbug.com/105626'
+
+ if note:
+ ref_output.append('%s [%s]' % (ref, note))
+ else:
+ ref_output.append(ref)
+
+ if opts.diffable:
+ if ref_output:
+ print '\n'.join('# ' + qualified_filename + ' ' + r for r in ref_output)
+ else:
+ print '# %s: (empty initializer list)' % qualified_filename
+ else:
+ print '%s (initializer offset 0x%x size 0x%x)' % (qualified_filename,
+ addr, size)
+ print ''.join(' %s\n' % r for r in ref_output)
+
+ if opts.diffable:
+ print '#',
+ print 'Found %d static initializers in %d files.' % (initializer_count,
+ file_count)
+
+ return 0
+
+if '__main__' == __name__:
+ sys.exit(main())
diff --git a/chromium/tools/linux/procfs.py b/chromium/tools/linux/procfs.py
new file mode 100755
index 00000000000..ef19b25ea57
--- /dev/null
+++ b/chromium/tools/linux/procfs.py
@@ -0,0 +1,747 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# A Python library to read and store procfs (/proc) information on Linux.
+#
+# Each information storage class in this file stores original data as original
+# as reasonablly possible. Translation is done when requested. It is to make it
+# always possible to probe the original data.
+
+
+import collections
+import logging
+import os
+import re
+import struct
+import sys
+
+
+class _NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+
+_LOGGER = logging.getLogger('procfs')
+_LOGGER.addHandler(_NullHandler())
+
+
+class ProcStat(object):
+ """Reads and stores information in /proc/pid/stat."""
+ _PATTERN = re.compile(r'^'
+ '(?P<PID>-?[0-9]+) '
+ '\((?P<COMM>.+)\) '
+ '(?P<STATE>[RSDZTW]) '
+ '(?P<PPID>-?[0-9]+) '
+ '(?P<PGRP>-?[0-9]+) '
+ '(?P<SESSION>-?[0-9]+) '
+ '(?P<TTY_NR>-?[0-9]+) '
+ '(?P<TPGID>-?[0-9]+) '
+ '(?P<FLAGS>[0-9]+) '
+ '(?P<MINFIT>[0-9]+) '
+ '(?P<CMINFIT>[0-9]+) '
+ '(?P<MAJFIT>[0-9]+) '
+ '(?P<CMAJFIT>[0-9]+) '
+ '(?P<UTIME>[0-9]+) '
+ '(?P<STIME>[0-9]+) '
+ '(?P<CUTIME>[0-9]+) '
+ '(?P<CSTIME>[0-9]+) '
+ '(?P<PRIORITY>[0-9]+) '
+ '(?P<NICE>[0-9]+) '
+ '(?P<NUM_THREADS>[0-9]+) '
+ '(?P<ITREALVALUE>[0-9]+) '
+ '(?P<STARTTIME>[0-9]+) '
+ '(?P<VSIZE>[0-9]+) '
+ '(?P<RSS>[0-9]+) '
+ '(?P<RSSLIM>[0-9]+) '
+ '(?P<STARTCODE>[0-9]+) '
+ '(?P<ENDCODE>[0-9]+) '
+ '(?P<STARTSTACK>[0-9]+) '
+ '(?P<KSTKESP>[0-9]+) '
+ '(?P<KSTKEIP>[0-9]+) '
+ '(?P<SIGNAL>[0-9]+) '
+ '(?P<BLOCKED>[0-9]+) '
+ '(?P<SIGIGNORE>[0-9]+) '
+ '(?P<SIGCATCH>[0-9]+) '
+ '(?P<WCHAN>[0-9]+) '
+ '(?P<NSWAP>[0-9]+) '
+ '(?P<CNSWAP>[0-9]+) '
+ '(?P<EXIT_SIGNAL>[0-9]+) '
+ '(?P<PROCESSOR>[0-9]+) '
+ '(?P<RT_PRIORITY>[0-9]+) '
+ '(?P<POLICY>[0-9]+) '
+ '(?P<DELAYACCT_BLKIO_TICKS>[0-9]+) '
+ '(?P<GUEST_TIME>[0-9]+) '
+ '(?P<CGUEST_TIME>[0-9]+)', re.IGNORECASE)
+
+ def __init__(self, raw, pid, vsize, rss):
+ self._raw = raw
+ self._pid = pid
+ self._vsize = vsize
+ self._rss = rss
+
+ @staticmethod
+ def load_file(stat_f):
+ raw = stat_f.readlines()
+ stat = ProcStat._PATTERN.match(raw[0])
+ return ProcStat(raw,
+ stat.groupdict().get('PID'),
+ stat.groupdict().get('VSIZE'),
+ stat.groupdict().get('RSS'))
+
+ @staticmethod
+ def load(pid):
+ try:
+ with open(os.path.join('/proc', str(pid), 'stat'), 'r') as stat_f:
+ return ProcStat.load_file(stat_f)
+ except IOError:
+ return None
+
+ @property
+ def raw(self):
+ return self._raw
+
+ @property
+ def pid(self):
+ return int(self._pid)
+
+ @property
+ def vsize(self):
+ return int(self._vsize)
+
+ @property
+ def rss(self):
+ return int(self._rss)
+
+
+class ProcStatm(object):
+ """Reads and stores information in /proc/pid/statm."""
+ _PATTERN = re.compile(r'^'
+ '(?P<SIZE>[0-9]+) '
+ '(?P<RESIDENT>[0-9]+) '
+ '(?P<SHARE>[0-9]+) '
+ '(?P<TEXT>[0-9]+) '
+ '(?P<LIB>[0-9]+) '
+ '(?P<DATA>[0-9]+) '
+ '(?P<DT>[0-9]+)', re.IGNORECASE)
+
+ def __init__(self, raw, size, resident, share, text, lib, data, dt):
+ self._raw = raw
+ self._size = size
+ self._resident = resident
+ self._share = share
+ self._text = text
+ self._lib = lib
+ self._data = data
+ self._dt = dt
+
+ @staticmethod
+ def load_file(statm_f):
+ try:
+ raw = statm_f.readlines()
+ except (IOError, OSError):
+ return None
+ statm = ProcStatm._PATTERN.match(raw[0])
+ return ProcStatm(raw,
+ statm.groupdict().get('SIZE'),
+ statm.groupdict().get('RESIDENT'),
+ statm.groupdict().get('SHARE'),
+ statm.groupdict().get('TEXT'),
+ statm.groupdict().get('LIB'),
+ statm.groupdict().get('DATA'),
+ statm.groupdict().get('DT'))
+
+ @staticmethod
+ def load(pid):
+ try:
+ with open(os.path.join('/proc', str(pid), 'statm'), 'r') as statm_f:
+ return ProcStatm.load_file(statm_f)
+ except (IOError, OSError):
+ return None
+
+ @property
+ def raw(self):
+ return self._raw
+
+ @property
+ def size(self):
+ return int(self._size)
+
+ @property
+ def resident(self):
+ return int(self._resident)
+
+ @property
+ def share(self):
+ return int(self._share)
+
+ @property
+ def text(self):
+ return int(self._text)
+
+ @property
+ def lib(self):
+ return int(self._lib)
+
+ @property
+ def data(self):
+ return int(self._data)
+
+ @property
+ def dt(self):
+ return int(self._dt)
+
+
+class ProcStatus(object):
+ """Reads and stores information in /proc/pid/status."""
+ _PATTERN = re.compile(r'^(?P<NAME>[A-Za-z0-9_]+):\s+(?P<VALUE>.*)')
+
+ def __init__(self, raw, dct):
+ self._raw = raw
+ self._pid = dct.get('Pid')
+ self._name = dct.get('Name')
+ self._vm_peak = dct.get('VmPeak')
+ self._vm_size = dct.get('VmSize')
+ self._vm_lck = dct.get('VmLck')
+ self._vm_pin = dct.get('VmPin')
+ self._vm_hwm = dct.get('VmHWM')
+ self._vm_rss = dct.get('VmRSS')
+ self._vm_data = dct.get('VmData')
+ self._vm_stack = dct.get('VmStk')
+ self._vm_exe = dct.get('VmExe')
+ self._vm_lib = dct.get('VmLib')
+ self._vm_pte = dct.get('VmPTE')
+ self._vm_swap = dct.get('VmSwap')
+
+ @staticmethod
+ def load_file(status_f):
+ raw = status_f.readlines()
+ dct = {}
+ for line in raw:
+ status_match = ProcStatus._PATTERN.match(line)
+ if status_match:
+ match_dict = status_match.groupdict()
+ dct[match_dict['NAME']] = match_dict['VALUE']
+ else:
+ raise SyntaxError('Unknown /proc/pid/status format.')
+ return ProcStatus(raw, dct)
+
+ @staticmethod
+ def load(pid):
+ with open(os.path.join('/proc', str(pid), 'status'), 'r') as status_f:
+ return ProcStatus.load_file(status_f)
+
+ @property
+ def raw(self):
+ return self._raw
+
+ @property
+ def pid(self):
+ return int(self._pid)
+
+ @property
+ def vm_peak(self):
+ """Returns a high-water (peak) virtual memory size in kilo-bytes."""
+ if self._vm_peak.endswith('kB'):
+ return int(self._vm_peak.split()[0])
+ raise ValueError('VmPeak is not in kB.')
+
+ @property
+ def vm_size(self):
+ """Returns a virtual memory size in kilo-bytes."""
+ if self._vm_size.endswith('kB'):
+ return int(self._vm_size.split()[0])
+ raise ValueError('VmSize is not in kB.')
+
+ @property
+ def vm_hwm(self):
+ """Returns a high-water (peak) resident set size (RSS) in kilo-bytes."""
+ if self._vm_hwm.endswith('kB'):
+ return int(self._vm_hwm.split()[0])
+ raise ValueError('VmHWM is not in kB.')
+
+ @property
+ def vm_rss(self):
+ """Returns a resident set size (RSS) in kilo-bytes."""
+ if self._vm_rss.endswith('kB'):
+ return int(self._vm_rss.split()[0])
+ raise ValueError('VmRSS is not in kB.')
+
+
+class ProcMapsEntry(object):
+ """A class representing one line in /proc/pid/maps."""
+
+ def __init__(
+ self, begin, end, readable, writable, executable, private, offset,
+ major, minor, inode, name):
+ self.begin = begin
+ self.end = end
+ self.readable = readable
+ self.writable = writable
+ self.executable = executable
+ self.private = private
+ self.offset = offset
+ self.major = major
+ self.minor = minor
+ self.inode = inode
+ self.name = name
+
+ def as_dict(self):
+ return {
+ 'begin': self.begin,
+ 'end': self.end,
+ 'readable': self.readable,
+ 'writable': self.writable,
+ 'executable': self.executable,
+ 'private': self.private,
+ 'offset': self.offset,
+ 'major': self.major,
+ 'minor': self.minor,
+ 'inode': self.inode,
+ 'name': self.name,
+ }
+
+
+class ProcMaps(object):
+ """Reads and stores information in /proc/pid/maps."""
+
+ MAPS_PATTERN = re.compile(
+ r'^([a-f0-9]+)-([a-f0-9]+)\s+(.)(.)(.)(.)\s+([a-f0-9]+)\s+(\S+):(\S+)\s+'
+ r'(\d+)\s*(.*)$', re.IGNORECASE)
+
+ EXECUTABLE_PATTERN = re.compile(
+ r'\S+\.(so|dll|dylib|bundle)((\.\d+)+\w*(\.\d+){0,3})?')
+
+ def __init__(self):
+ self._sorted_indexes = []
+ self._dictionary = {}
+ self._sorted = True
+
+ def iter(self, condition):
+ if not self._sorted:
+ self._sorted_indexes.sort()
+ self._sorted = True
+ for index in self._sorted_indexes:
+ if not condition or condition(self._dictionary[index]):
+ yield self._dictionary[index]
+
+ def __iter__(self):
+ if not self._sorted:
+ self._sorted_indexes.sort()
+ self._sorted = True
+ for index in self._sorted_indexes:
+ yield self._dictionary[index]
+
+ @staticmethod
+ def load_file(maps_f):
+ table = ProcMaps()
+ for line in maps_f:
+ table.append_line(line)
+ return table
+
+ @staticmethod
+ def load(pid):
+ try:
+ with open(os.path.join('/proc', str(pid), 'maps'), 'r') as maps_f:
+ return ProcMaps.load_file(maps_f)
+ except (IOError, OSError):
+ return None
+
+ def append_line(self, line):
+ entry = self.parse_line(line)
+ if entry:
+ self._append_entry(entry)
+ return entry
+
+ @staticmethod
+ def parse_line(line):
+ matched = ProcMaps.MAPS_PATTERN.match(line)
+ if matched:
+ return ProcMapsEntry( # pylint: disable=W0212
+ int(matched.group(1), 16), # begin
+ int(matched.group(2), 16), # end
+ matched.group(3), # readable
+ matched.group(4), # writable
+ matched.group(5), # executable
+ matched.group(6), # private
+ int(matched.group(7), 16), # offset
+ matched.group(8), # major
+ matched.group(9), # minor
+ int(matched.group(10), 10), # inode
+ matched.group(11) # name
+ )
+ else:
+ return None
+
+ @staticmethod
+ def constants(entry):
+ return entry.writable == '-' and entry.executable == '-'
+
+ @staticmethod
+ def executable(entry):
+ return entry.executable == 'x'
+
+ @staticmethod
+ def executable_and_constants(entry):
+ return ((entry.writable == '-' and entry.executable == '-') or
+ entry.executable == 'x')
+
+ def _append_entry(self, entry):
+ if self._sorted_indexes and self._sorted_indexes[-1] > entry.begin:
+ self._sorted = False
+ self._sorted_indexes.append(entry.begin)
+ self._dictionary[entry.begin] = entry
+
+
+class ProcSmaps(object):
+ """Reads and stores information in /proc/pid/smaps."""
+ _SMAPS_PATTERN = re.compile(r'^(?P<NAME>[A-Za-z0-9_]+):\s+(?P<VALUE>.*)')
+
+ class VMA(object):
+ def __init__(self):
+ self._size = 0
+ self._rss = 0
+ self._pss = 0
+
+ def append(self, name, value):
+ dct = {
+ 'Size': '_size',
+ 'Rss': '_rss',
+ 'Pss': '_pss',
+ 'Referenced': '_referenced',
+ 'Private_Clean': '_private_clean',
+ 'Shared_Clean': '_shared_clean',
+ 'KernelPageSize': '_kernel_page_size',
+ 'MMUPageSize': '_mmu_page_size',
+ }
+ if name in dct:
+ self.__setattr__(dct[name], value)
+
+ @property
+ def size(self):
+ if self._size.endswith('kB'):
+ return int(self._size.split()[0])
+ return int(self._size)
+
+ @property
+ def rss(self):
+ if self._rss.endswith('kB'):
+ return int(self._rss.split()[0])
+ return int(self._rss)
+
+ @property
+ def pss(self):
+ if self._pss.endswith('kB'):
+ return int(self._pss.split()[0])
+ return int(self._pss)
+
+ def __init__(self, raw, total_dct, maps, vma_internals):
+ self._raw = raw
+ self._size = total_dct['Size']
+ self._rss = total_dct['Rss']
+ self._pss = total_dct['Pss']
+ self._referenced = total_dct['Referenced']
+ self._shared_clean = total_dct['Shared_Clean']
+ self._private_clean = total_dct['Private_Clean']
+ self._kernel_page_size = total_dct['KernelPageSize']
+ self._mmu_page_size = total_dct['MMUPageSize']
+ self._maps = maps
+ self._vma_internals = vma_internals
+
+ @staticmethod
+ def load(pid):
+ with open(os.path.join('/proc', str(pid), 'smaps'), 'r') as smaps_f:
+ raw = smaps_f.readlines()
+
+ vma = None
+ vma_internals = collections.OrderedDict()
+ total_dct = collections.defaultdict(int)
+ maps = ProcMaps()
+ for line in raw:
+ maps_match = ProcMaps.MAPS_PATTERN.match(line)
+ if maps_match:
+ vma = maps.append_line(line.strip())
+ vma_internals[vma] = ProcSmaps.VMA()
+ else:
+ smaps_match = ProcSmaps._SMAPS_PATTERN.match(line)
+ if smaps_match:
+ match_dict = smaps_match.groupdict()
+ vma_internals[vma].append(match_dict['NAME'], match_dict['VALUE'])
+ total_dct[match_dict['NAME']] += int(match_dict['VALUE'].split()[0])
+
+ return ProcSmaps(raw, total_dct, maps, vma_internals)
+
+ @property
+ def size(self):
+ return self._size
+
+ @property
+ def rss(self):
+ return self._rss
+
+ @property
+ def referenced(self):
+ return self._referenced
+
+ @property
+ def pss(self):
+ return self._pss
+
+ @property
+ def private_clean(self):
+ return self._private_clean
+
+ @property
+ def shared_clean(self):
+ return self._shared_clean
+
+ @property
+ def kernel_page_size(self):
+ return self._kernel_page_size
+
+ @property
+ def mmu_page_size(self):
+ return self._mmu_page_size
+
+ @property
+ def vma_internals(self):
+ return self._vma_internals
+
+
+class ProcPagemap(object):
+ """Reads and stores partial information in /proc/pid/pagemap.
+
+ It picks up virtual addresses to read based on ProcMaps (/proc/pid/maps).
+ See https://www.kernel.org/doc/Documentation/vm/pagemap.txt for details.
+ """
+ _BYTES_PER_PAGEMAP_VALUE = 8
+ _BYTES_PER_OS_PAGE = 4096
+ _VIRTUAL_TO_PAGEMAP_OFFSET = _BYTES_PER_OS_PAGE / _BYTES_PER_PAGEMAP_VALUE
+
+ _MASK_PRESENT = 1 << 63
+ _MASK_SWAPPED = 1 << 62
+ _MASK_FILEPAGE_OR_SHAREDANON = 1 << 61
+ _MASK_SOFTDIRTY = 1 << 55
+ _MASK_PFN = (1 << 55) - 1
+
+ class VMA(object):
+ def __init__(self, vsize, present, swapped, pageframes):
+ self._vsize = vsize
+ self._present = present
+ self._swapped = swapped
+ self._pageframes = pageframes
+
+ @property
+ def vsize(self):
+ return int(self._vsize)
+
+ @property
+ def present(self):
+ return int(self._present)
+
+ @property
+ def swapped(self):
+ return int(self._swapped)
+
+ @property
+ def pageframes(self):
+ return self._pageframes
+
+ def __init__(self, vsize, present, swapped, vma_internals, in_process_dup):
+ self._vsize = vsize
+ self._present = present
+ self._swapped = swapped
+ self._vma_internals = vma_internals
+ self._in_process_dup = in_process_dup
+
+ @staticmethod
+ def load(pid, maps):
+ total_present = 0
+ total_swapped = 0
+ total_vsize = 0
+ in_process_dup = 0
+ vma_internals = collections.OrderedDict()
+ process_pageframe_set = set()
+
+ try:
+ pagemap_fd = os.open(
+ os.path.join('/proc', str(pid), 'pagemap'), os.O_RDONLY)
+ except (IOError, OSError):
+ return None
+ for vma in maps:
+ present = 0
+ swapped = 0
+ vsize = 0
+ pageframes = collections.defaultdict(int)
+ begin_offset = ProcPagemap._offset(vma.begin)
+ chunk_size = ProcPagemap._offset(vma.end) - begin_offset
+ try:
+ os.lseek(pagemap_fd, begin_offset, os.SEEK_SET)
+ buf = os.read(pagemap_fd, chunk_size)
+ except (IOError, OSError):
+ return None
+ if len(buf) < chunk_size:
+ _LOGGER.warn('Failed to read pagemap at 0x%x in %d.' % (vma.begin, pid))
+ pagemap_values = struct.unpack(
+ '=%dQ' % (len(buf) / ProcPagemap._BYTES_PER_PAGEMAP_VALUE), buf)
+ for pagemap_value in pagemap_values:
+ vsize += ProcPagemap._BYTES_PER_OS_PAGE
+ if pagemap_value & ProcPagemap._MASK_PRESENT:
+ if (pagemap_value & ProcPagemap._MASK_PFN) in process_pageframe_set:
+ in_process_dup += ProcPagemap._BYTES_PER_OS_PAGE
+ else:
+ process_pageframe_set.add(pagemap_value & ProcPagemap._MASK_PFN)
+ if (pagemap_value & ProcPagemap._MASK_PFN) not in pageframes:
+ present += ProcPagemap._BYTES_PER_OS_PAGE
+ pageframes[pagemap_value & ProcPagemap._MASK_PFN] += 1
+ if pagemap_value & ProcPagemap._MASK_SWAPPED:
+ swapped += ProcPagemap._BYTES_PER_OS_PAGE
+ vma_internals[vma] = ProcPagemap.VMA(vsize, present, swapped, pageframes)
+ total_present += present
+ total_swapped += swapped
+ total_vsize += vsize
+ try:
+ os.close(pagemap_fd)
+ except OSError:
+ return None
+
+ return ProcPagemap(total_vsize, total_present, total_swapped,
+ vma_internals, in_process_dup)
+
+ @staticmethod
+ def _offset(virtual_address):
+ return virtual_address / ProcPagemap._VIRTUAL_TO_PAGEMAP_OFFSET
+
+ @property
+ def vsize(self):
+ return int(self._vsize)
+
+ @property
+ def present(self):
+ return int(self._present)
+
+ @property
+ def swapped(self):
+ return int(self._swapped)
+
+ @property
+ def vma_internals(self):
+ return self._vma_internals
+
+
+class _ProcessMemory(object):
+ """Aggregates process memory information from /proc for manual testing."""
+ def __init__(self, pid):
+ self._pid = pid
+ self._maps = None
+ self._pagemap = None
+ self._stat = None
+ self._status = None
+ self._statm = None
+ self._smaps = []
+
+ def _read(self, proc_file):
+ lines = []
+ with open(os.path.join('/proc', str(self._pid), proc_file), 'r') as proc_f:
+ lines = proc_f.readlines()
+ return lines
+
+ def read_all(self):
+ self.read_stat()
+ self.read_statm()
+ self.read_status()
+ self.read_smaps()
+ self.read_maps()
+ self.read_pagemap(self._maps)
+
+ def read_maps(self):
+ self._maps = ProcMaps.load(self._pid)
+
+ def read_pagemap(self, maps):
+ self._pagemap = ProcPagemap.load(self._pid, maps)
+
+ def read_smaps(self):
+ self._smaps = ProcSmaps.load(self._pid)
+
+ def read_stat(self):
+ self._stat = ProcStat.load(self._pid)
+
+ def read_statm(self):
+ self._statm = ProcStatm.load(self._pid)
+
+ def read_status(self):
+ self._status = ProcStatus.load(self._pid)
+
+ @property
+ def pid(self):
+ return self._pid
+
+ @property
+ def maps(self):
+ return self._maps
+
+ @property
+ def pagemap(self):
+ return self._pagemap
+
+ @property
+ def smaps(self):
+ return self._smaps
+
+ @property
+ def stat(self):
+ return self._stat
+
+ @property
+ def statm(self):
+ return self._statm
+
+ @property
+ def status(self):
+ return self._status
+
+
+def main(argv):
+ """The main function for manual testing."""
+ _LOGGER.setLevel(logging.WARNING)
+ handler = logging.StreamHandler()
+ handler.setLevel(logging.WARNING)
+ handler.setFormatter(logging.Formatter(
+ '%(asctime)s:%(name)s:%(levelname)s:%(message)s'))
+ _LOGGER.addHandler(handler)
+
+ pids = []
+ for arg in argv[1:]:
+ try:
+ pid = int(arg)
+ except ValueError:
+ raise SyntaxError("%s is not an integer." % arg)
+ else:
+ pids.append(pid)
+
+ procs = {}
+ for pid in pids:
+ procs[pid] = _ProcessMemory(pid)
+ procs[pid].read_all()
+
+ print '=== PID: %d ===' % pid
+
+ print ' stat: %d' % procs[pid].stat.vsize
+ print ' statm: %d' % (procs[pid].statm.size * 4096)
+ print ' status: %d (Peak:%d)' % (procs[pid].status.vm_size * 1024,
+ procs[pid].status.vm_peak * 1024)
+ print ' smaps: %d' % (procs[pid].smaps.size * 1024)
+ print 'pagemap: %d' % procs[pid].pagemap.vsize
+ print ' stat: %d' % (procs[pid].stat.rss * 4096)
+ print ' statm: %d' % (procs[pid].statm.resident * 4096)
+ print ' status: %d (Peak:%d)' % (procs[pid].status.vm_rss * 1024,
+ procs[pid].status.vm_hwm * 1024)
+ print ' smaps: %d' % (procs[pid].smaps.rss * 1024)
+ print 'pagemap: %d' % procs[pid].pagemap.present
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/chromium/tools/luci-go/OWNERS b/chromium/tools/luci-go/OWNERS
new file mode 100644
index 00000000000..8b03e48bcd8
--- /dev/null
+++ b/chromium/tools/luci-go/OWNERS
@@ -0,0 +1,3 @@
+maruel@chromium.org
+tandrii@chromium.org
+vadimsh@chromium.org
diff --git a/chromium/tools/luci-go/README.md b/chromium/tools/luci-go/README.md
new file mode 100644
index 00000000000..a87c34fa4a8
--- /dev/null
+++ b/chromium/tools/luci-go/README.md
@@ -0,0 +1,10 @@
+# luci-go
+
+Contains executable built out of
+https://github.com/luci/luci-go/tree/master/client/cmd/.
+
+The binaries are retrieved from the following builders:
+
+- mac64: http://build.chromium.org/p/chromium.infra/builders/infra-continuous-mac-10.10-64/
+- linux64: http://build.chromium.org/p/chromium.infra/builders/infra-continuous-precise-64/
+- win64: http://build.chromium.org/p/chromium.infra/builders/infra-continuous-win-64/
diff --git a/chromium/tools/luci-go/linux64/isolate.sha1 b/chromium/tools/luci-go/linux64/isolate.sha1
new file mode 100644
index 00000000000..41d0add7969
--- /dev/null
+++ b/chromium/tools/luci-go/linux64/isolate.sha1
@@ -0,0 +1 @@
+cf7c1fac12790056ac393774827a5720c7590bac
diff --git a/chromium/tools/luci-go/mac64/isolate.sha1 b/chromium/tools/luci-go/mac64/isolate.sha1
new file mode 100644
index 00000000000..15744d663a3
--- /dev/null
+++ b/chromium/tools/luci-go/mac64/isolate.sha1
@@ -0,0 +1 @@
+4678a9332ef5a7b90b184763afee1c100981f710
diff --git a/chromium/tools/luci-go/win64/isolate.exe.sha1 b/chromium/tools/luci-go/win64/isolate.exe.sha1
new file mode 100644
index 00000000000..7c5b7ebf6e2
--- /dev/null
+++ b/chromium/tools/luci-go/win64/isolate.exe.sha1
@@ -0,0 +1 @@
+98457ff4fc79d05661fea53d2b3aff70fac90022
diff --git a/chromium/tools/mac/OWNERS b/chromium/tools/mac/OWNERS
new file mode 100644
index 00000000000..2e285e40af2
--- /dev/null
+++ b/chromium/tools/mac/OWNERS
@@ -0,0 +1,2 @@
+thakis@chromium.org
+thestig@chromium.org
diff --git a/chromium/tools/mac/dump-static-initializers.py b/chromium/tools/mac/dump-static-initializers.py
new file mode 100755
index 00000000000..3a2c125062d
--- /dev/null
+++ b/chromium/tools/mac/dump-static-initializers.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Dumps a list of files with static initializers. Use with release builds.
+
+Usage:
+ tools/mac/dump-static-initializers.py out/Release/Chromium\ Framework.framework.dSYM/Contents/Resources/DWARF/Chromium\ Framework
+
+Do NOT use mac_strip_release=0 or component=shared_library if you want to use
+this script.
+"""
+
+import optparse
+import re
+import subprocess
+import sys
+
+# Matches for example:
+# [ 1] 000001ca 64 (N_SO ) 00 0000 0000000000000000 'test.cc'
+dsymutil_file_re = re.compile("N_SO.*'([^']*)'")
+
+# Matches for example:
+# [ 2] 000001d2 66 (N_OSO ) 00 0001 000000004ed856a0 '/Volumes/MacintoshHD2/src/chrome-git/src/test.o'
+dsymutil_o_file_re = re.compile("N_OSO.*'([^']*)'")
+
+# Matches for example:
+# [ 8] 00000233 24 (N_FUN ) 01 0000 0000000000001b40 '__GLOBAL__I_s'
+# [185989] 00dc69ef 26 (N_STSYM ) 02 0000 00000000022e2290 '__GLOBAL__I_a'
+dsymutil_re = re.compile(r"(?:N_FUN|N_STSYM).*\s[0-9a-f]*\s'__GLOBAL__I_")
+
+def ParseDsymutil(binary):
+ """Given a binary, prints source and object filenames for files with
+ static initializers.
+ """
+
+ child = subprocess.Popen(['dsymutil', '-s', binary], stdout=subprocess.PIPE)
+ for line in child.stdout:
+ file_match = dsymutil_file_re.search(line)
+ if file_match:
+ current_filename = file_match.group(1)
+ else:
+ o_file_match = dsymutil_o_file_re.search(line)
+ if o_file_match:
+ current_o_filename = o_file_match.group(1)
+ else:
+ match = dsymutil_re.search(line)
+ if match:
+ print current_filename
+ print current_o_filename
+ print
+
+
+def main():
+ parser = optparse.OptionParser(usage='%prog filename')
+ opts, args = parser.parse_args()
+ if len(args) != 1:
+ parser.error('missing filename argument')
+ return 1
+ binary = args[0]
+
+ ParseDsymutil(binary)
+ return 0
+
+
+if '__main__' == __name__:
+ sys.exit(main())
diff --git a/chromium/tools/mac/symbolicate_crash.py b/chromium/tools/mac/symbolicate_crash.py
new file mode 100755
index 00000000000..731cc85f8dc
--- /dev/null
+++ b/chromium/tools/mac/symbolicate_crash.py
@@ -0,0 +1,504 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script can take an Apple-style CrashReporter log and symbolicate it. This
+is useful for when a user's reports aren't being uploaded, for example.
+
+Only versions 6, 7, 8, and 9 reports are supported. For more information on the
+file format, reference this document:
+ TN2123 <http://developer.apple.com/library/mac/#technotes/tn2004/tn2123.html>
+
+Information on symbolication was gleaned from:
+ <http://developer.apple.com/tools/xcode/symbolizingcrashdumps.html>
+"""
+
+import optparse
+import os.path
+import re
+import subprocess
+import sys
+
+# Maps binary image identifiers to binary names (minus the .dSYM portion) found
+# in the archive. These are the only objects that will be looked up.
+SYMBOL_IMAGE_MAP = {
+ 'com.google.Chrome': 'Google Chrome.app',
+ 'com.google.Chrome.framework': 'Google Chrome Framework.framework',
+ 'com.google.Chrome.helper': 'Google Chrome Helper.app'
+}
+
+class CrashReport(object):
+ """A parsed representation of an Apple CrashReport text file."""
+ def __init__(self, file_name):
+ super(CrashReport, self).__init__()
+ self.report_info = {}
+ self.threads = []
+ self._binary_images = {}
+
+ fd = open(file_name, 'r')
+ self._ParseHeader(fd)
+
+ # Try and get the report version. If it's not a version we handle, abort.
+ self.report_version = int(self.report_info['Report Version'])
+ # Version 6: 10.5 and 10.6 crash report
+ # Version 7: 10.6 spindump report
+ # Version 8: 10.7 spindump report
+ # Version 9: 10.7 crash report
+ valid_versions = (6, 7, 8, 9)
+ if self.report_version not in valid_versions:
+ raise Exception("Only crash reports of versions %s are accepted." %
+ str(valid_versions))
+
+ # If this is a spindump (version 7 or 8 report), use a special parser. The
+ # format is undocumented, but is similar to version 6. However, the spindump
+ # report contains user and kernel stacks for every process on the system.
+ if self.report_version == 7 or self.report_version == 8:
+ self._ParseSpindumpStack(fd)
+ else:
+ self._ParseStack(fd)
+
+ self._ParseBinaryImages(fd)
+ fd.close()
+
+ def Symbolicate(self, symbol_path):
+ """Symbolicates a crash report stack trace."""
+ # In order to be efficient, collect all the offsets that will be passed to
+ # atos by the image name.
+ offsets_by_image = self._CollectAddressesForImages(SYMBOL_IMAGE_MAP.keys())
+
+ # For each image, run atos with the list of addresses.
+ for image_name, addresses in offsets_by_image.items():
+ # If this image was not loaded or is in no stacks, skip.
+ if image_name not in self._binary_images or not len(addresses):
+ continue
+
+ # Combine the |image_name| and |symbol_path| into the path of the dSYM.
+ dsym_file = self._GetDSymPath(symbol_path, image_name)
+
+ # From the list of 2-Tuples of (frame, address), create a list of just
+ # addresses.
+ address_list = map(lambda x: x[1], addresses)
+
+ # Look up the load address of the image.
+ binary_base = self._binary_images[image_name][0]
+
+ # This returns a list of just symbols. The indices will match up with the
+ # list of |addresses|.
+ symbol_names = self._RunAtos(binary_base, dsym_file, address_list)
+ if not symbol_names:
+ print 'Error loading symbols for ' + image_name
+ continue
+
+ # Attaches a list of symbol names to stack frames. This assumes that the
+ # order of |addresses| has stayed the same as |symbol_names|.
+ self._AddSymbolsToFrames(symbol_names, addresses)
+
+ def _ParseHeader(self, fd):
+ """Parses the header section of a crash report, which contains the OS and
+ application version information."""
+ # The header is made up of different sections, depending on the type of
+ # report and the report version. Almost all have a format of a key and
+ # value separated by a colon. Accumulate all of these artifacts into a
+ # dictionary until the first thread stack is reached.
+ thread_re = re.compile('^[ \t]*Thread ([a-f0-9]+)')
+ line = ''
+ while not thread_re.match(line):
+ # Skip blank lines. There are typically three or four sections separated
+ # by newlines in the header.
+ line = line.strip()
+ if line:
+ parts = line.split(':', 1)
+ # Certain lines in different report versions don't follow the key-value
+ # format, so skip them.
+ if len(parts) == 2:
+ # There's a varying amount of space padding after the ':' to align all
+ # the values; strip that.
+ self.report_info[parts[0]] = parts[1].lstrip()
+ line = fd.readline()
+
+ # When this loop exits, the header has been read in full. However, the first
+ # thread stack heading has been read past. Seek backwards from the current
+ # position by the length of the line so that it is re-read when
+ # _ParseStack() is entered.
+ fd.seek(-len(line), os.SEEK_CUR)
+
+ def _ParseStack(self, fd):
+ """Parses the stack dump of a crash report and creates a list of threads
+ and their stack traces."""
+ # Compile a regex that matches the start of a thread stack. Note that this
+ # must be specific to not include the thread state section, which comes
+ # right after all the stack traces.
+ line_re = re.compile('^Thread ([0-9]+)( Crashed)?:(.*)')
+
+ # On entry into this function, the fd has been walked up to the "Thread 0"
+ # line.
+ line = fd.readline().rstrip()
+ in_stack = False
+ thread = None
+ while line_re.match(line) or in_stack:
+ # Check for start of the thread stack.
+ matches = line_re.match(line)
+
+ if not line.strip():
+ # A blank line indicates a break in the thread stack.
+ in_stack = False
+ elif matches:
+ # If this is the start of a thread stack, create the CrashThread.
+ in_stack = True
+ thread = CrashThread(matches.group(1))
+ thread.name = matches.group(3)
+ thread.did_crash = matches.group(2) != None
+ self.threads.append(thread)
+ else:
+ # All other lines are stack frames.
+ thread.stack.append(self._ParseStackFrame(line))
+ # Read the next line.
+ line = fd.readline()
+
+ def _ParseStackFrame(self, line):
+ """Takes in a single line of text and transforms it into a StackFrame."""
+ frame = StackFrame(line)
+
+ # A stack frame is in the format of:
+ # |<frame-number> <binary-image> 0x<address> <symbol> <offset>|.
+ regex = '^([0-9]+) +(.+)[ \t]+(0x[0-9a-f]+) (.*) \+ ([0-9]+)$'
+ matches = re.match(regex, line)
+ if matches is None:
+ return frame
+
+ # Create a stack frame with the information extracted from the regex.
+ frame.frame_id = matches.group(1)
+ frame.image = matches.group(2)
+ frame.address = int(matches.group(3), 0) # Convert HEX to an int.
+ frame.original_symbol = matches.group(4)
+ frame.offset = matches.group(5)
+ frame.line = None
+ return frame
+
+ def _ParseSpindumpStack(self, fd):
+ """Parses a spindump stack report. In this format, each thread stack has
+ both a user and kernel trace. Only the user traces are symbolicated."""
+
+ # The stack trace begins with the thread header, which is identified by a
+ # HEX number. The thread names appear to be incorrect in spindumps.
+ user_thread_re = re.compile('^ Thread ([0-9a-fx]+)')
+
+ # When this method is called, the fd has been walked right up to the first
+ # line.
+ line = fd.readline()
+ in_user_stack = False
+ in_kernel_stack = False
+ thread = None
+ frame_id = 0
+ while user_thread_re.match(line) or in_user_stack or in_kernel_stack:
+ # Check for the start of a thread.
+ matches = user_thread_re.match(line)
+
+ if not line.strip():
+ # A blank line indicates the start of a new thread. The blank line comes
+ # after the kernel stack before a new thread header.
+ in_kernel_stack = False
+ elif matches:
+ # This is the start of a thread header. The next line is the heading for
+ # the user stack, followed by the actual trace.
+ thread = CrashThread(matches.group(1))
+ frame_id = 0
+ self.threads.append(thread)
+ in_user_stack = True
+ line = fd.readline() # Read past the 'User stack:' header.
+ elif line.startswith(' Kernel stack:'):
+ # The kernel stack header comes immediately after the last frame (really
+ # the top frame) in the user stack, without a blank line.
+ in_user_stack = False
+ in_kernel_stack = True
+ elif in_user_stack:
+ # If this is a line while in the user stack, parse it as a stack frame.
+ thread.stack.append(self._ParseSpindumpStackFrame(line))
+ # Loop with the next line.
+ line = fd.readline()
+
+ # When the loop exits, the file has been read through the 'Binary images:'
+ # header. Seek backwards so that _ParseBinaryImages() does the right thing.
+ fd.seek(-len(line), os.SEEK_CUR)
+
+ def _ParseSpindumpStackFrame(self, line):
+ """Parses a spindump-style stackframe."""
+ frame = StackFrame(line)
+
+ # The format of the frame is either:
+ # A: |<space><steps> <symbol> + <offset> (in <image-name>) [<address>]|
+ # B: |<space><steps> ??? (in <image-name> + <offset>) [<address>]|
+ regex_a = '^([ ]+[0-9]+) (.*) \+ ([0-9]+) \(in (.*)\) \[(0x[0-9a-f]+)\]'
+ regex_b = '^([ ]+[0-9]+) \?\?\?( \(in (.*) \+ ([0-9]+)\))? \[(0x[0-9a-f]+)\]'
+
+ # Create the stack frame with the information extracted from the regex.
+ matches = re.match(regex_a, line)
+ if matches:
+ frame.frame_id = matches.group(1)[4:] # Remove some leading spaces.
+ frame.original_symbol = matches.group(2)
+ frame.offset = matches.group(3)
+ frame.image = matches.group(4)
+ frame.address = int(matches.group(5), 0)
+ frame.line = None
+ return frame
+
+ # If pattern A didn't match (which it will most of the time), try B.
+ matches = re.match(regex_b, line)
+ if matches:
+ frame.frame_id = matches.group(1)[4:] # Remove some leading spaces.
+ frame.image = matches.group(3)
+ frame.offset = matches.group(4)
+ frame.address = int(matches.group(5), 0)
+ frame.line = None
+ return frame
+
+ # Otherwise, this frame could not be matched and just use the raw input.
+ frame.line = frame.line.strip()
+ return frame
+
+ def _ParseBinaryImages(self, fd):
+ """Parses out the binary images section in order to get the load offset."""
+ # The parser skips some sections, so advance until the "Binary Images"
+ # header is reached.
+ while not fd.readline().lstrip().startswith("Binary Images:"): pass
+
+ # Create a regex to match the lines of format:
+ # |0x<start> - 0x<end> <binary-image> <version> (<version>) <<UUID>> <path>|
+ image_re = re.compile(
+ '[ ]*(0x[0-9a-f]+) -[ \t]+(0x[0-9a-f]+) [+ ]([a-zA-Z0-9._\-]+)')
+
+ # This section is in this format:
+ # |<start address> - <end address> <image name>|.
+ while True:
+ line = fd.readline()
+ if not line.strip():
+ # End when a blank line is hit.
+ return
+ # Match the line to the regex.
+ match = image_re.match(line)
+ if match:
+ # Store the offsets by image name so it can be referenced during
+ # symbolication. These are hex numbers with leading '0x', so int() can
+ # convert them to decimal if base=0.
+ address_range = (int(match.group(1), 0), int(match.group(2), 0))
+ self._binary_images[match.group(3)] = address_range
+
+ def _CollectAddressesForImages(self, images):
+ """Iterates all the threads and stack frames and all the stack frames that
+ are in a list of binary |images|. The result is a dictionary, keyed by the
+ image name that maps to a list of tuples. Each is a 2-Tuple of
+ (stack_frame, address)"""
+ # Create the collection and initialize it with empty lists for each image.
+ collection = {}
+ for image in images:
+ collection[image] = []
+
+ # Perform the iteration.
+ for thread in self.threads:
+ for frame in thread.stack:
+ image_name = self._ImageForAddress(frame.address)
+ if image_name in images:
+ # Replace the image name in the frame in case it was elided.
+ frame.image = image_name
+ collection[frame.image].append((frame, frame.address))
+
+ # Return the result.
+ return collection
+
+ def _ImageForAddress(self, address):
+ """Given a PC address, returns the bundle identifier of the image in which
+ the address resides."""
+ for image_name, address_range in self._binary_images.items():
+ if address >= address_range[0] and address <= address_range[1]:
+ return image_name
+ return None
+
+ def _GetDSymPath(self, base_path, image_name):
+ """Takes a base path for the symbols and an image name. It looks the name up
+ in SYMBOL_IMAGE_MAP and creates a full path to the dSYM in the bundle."""
+ image_file = SYMBOL_IMAGE_MAP[image_name]
+ return os.path.join(base_path, image_file + '.dSYM', 'Contents',
+ 'Resources', 'DWARF',
+ os.path.splitext(image_file)[0]) # Chop off the extension.
+
+ def _RunAtos(self, load_address, dsym_file, addresses):
+ """Runs the atos with the provided arguments. |addresses| is used as stdin.
+ Returns a list of symbol information in the same order as |addresses|."""
+ args = ['atos', '-l', str(load_address), '-o', dsym_file]
+
+ # Get the arch type. This is of the format |X86 (Native)|.
+ if 'Code Type' in self.report_info:
+ arch = self.report_info['Code Type'].lower().split(' ')
+ if len(arch) == 2:
+ arch = arch[0]
+ if arch == 'x86':
+ # The crash report refers to i386 as x86, but atos doesn't know what
+ # that is.
+ arch = 'i386'
+ args.extend(['-arch', arch])
+
+ proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ addresses = map(hex, addresses)
+ (stdout, stderr) = proc.communicate(' '.join(addresses))
+ if proc.returncode:
+ return None
+ return stdout.rstrip().split('\n')
+
+ def _AddSymbolsToFrames(self, symbols, address_tuples):
+ """Takes a single value (the list) from _CollectAddressesForImages and does
+ a smart-zip with the data returned by atos in |symbols|. Note that the
+ indices must match for this to succeed."""
+ if len(symbols) != len(address_tuples):
+ print 'symbols do not match'
+
+ # Each line of output from atos is in this format:
+ # |<symbol> (in <image>) (<file>:<line>)|.
+ line_regex = re.compile('(.+) \(in (.+)\) (\((.+):([0-9]+)\))?')
+
+ # Zip the two data sets together.
+ for i in range(len(symbols)):
+ symbol_parts = line_regex.match(symbols[i])
+ if not symbol_parts:
+ continue # Error.
+ frame = address_tuples[i][0]
+ frame.symbol = symbol_parts.group(1)
+ frame.image = symbol_parts.group(2)
+ frame.file_name = symbol_parts.group(4)
+ frame.line_number = symbol_parts.group(5)
+
+
+class CrashThread(object):
+ """A CrashThread represents a stacktrace of a single thread """
+ def __init__(self, thread_id):
+ super(CrashThread, self).__init__()
+ self.thread_id = thread_id
+ self.name = None
+ self.did_crash = False
+ self.stack = []
+
+ def __repr__(self):
+ name = ''
+ if self.name:
+ name = ': ' + self.name
+ return 'Thread ' + self.thread_id + name + '\n' + \
+ '\n'.join(map(str, self.stack))
+
+
+class StackFrame(object):
+ """A StackFrame is owned by a CrashThread."""
+ def __init__(self, line):
+ super(StackFrame, self).__init__()
+ # The original line. This will be set to None if symbolication was
+ # successfuly.
+ self.line = line
+
+ self.frame_id = 0
+ self.image = None
+ self.address = 0x0
+ self.original_symbol = None
+ self.offset = 0x0
+ # The following members are set after symbolication.
+ self.symbol = None
+ self.file_name = None
+ self.line_number = 0
+
+ def __repr__(self):
+ # If symbolication failed, just use the original line.
+ if self.line:
+ return ' %s' % self.line
+
+ # Use different location information depending on symbolicated data.
+ location = None
+ if self.file_name:
+ location = ' - %s:%s' % (self.file_name, self.line_number)
+ else:
+ location = ' + %s' % self.offset
+
+ # Same with the symbol information.
+ symbol = self.original_symbol
+ if self.symbol:
+ symbol = self.symbol
+
+ return ' %s\t0x%x\t[%s\t%s]\t%s' % (self.frame_id, self.address,
+ self.image, location, symbol)
+
+
+def PrettyPrintReport(report):
+ """Takes a crash report and prints it like the crash server would."""
+ print 'Process : ' + report.report_info['Process']
+ print 'Version : ' + report.report_info['Version']
+ print 'Date : ' + report.report_info['Date/Time']
+ print 'OS Version : ' + report.report_info['OS Version']
+ print
+ if 'Crashed Thread' in report.report_info:
+ print 'Crashed Thread : ' + report.report_info['Crashed Thread']
+ print
+ if 'Event' in report.report_info:
+ print 'Event : ' + report.report_info['Event']
+ print
+
+ for thread in report.threads:
+ print
+ if thread.did_crash:
+ exc_type = report.report_info['Exception Type'].split(' ')[0]
+ exc_code = report.report_info['Exception Codes'].replace('at', '@')
+ print '*CRASHED* ( ' + exc_type + ' / ' + exc_code + ' )'
+ # Version 7 reports have spindump-style output (with a stepped stack trace),
+ # so remove the first tab to get better alignment.
+ if report.report_version == 7:
+ for line in repr(thread).split('\n'):
+ print line.replace('\t', ' ', 1)
+ else:
+ print thread
+
+
+def Main(args):
+ """Program main."""
+ parser = optparse.OptionParser(
+ usage='%prog [options] symbol_path crash_report',
+ description='This will parse and symbolicate an Apple CrashReporter v6-9 '
+ 'file.')
+ parser.add_option('-s', '--std-path', action='store_true', dest='std_path',
+ help='With this flag, the symbol_path is a containing '
+ 'directory, in which a dSYM files are stored in a '
+ 'directory named by the version. Example: '
+ '[symbolicate_crash.py -s ./symbols/ report.crash] will '
+ 'look for dSYMs in ./symbols/15.0.666.0/ if the report is '
+ 'from that verison.')
+ (options, args) = parser.parse_args(args[1:])
+
+ # Check that we have something to symbolicate.
+ if len(args) != 2:
+ parser.print_usage()
+ return 1
+
+ report = CrashReport(args[1])
+ symbol_path = None
+
+ # If not using the standard layout, this is a full path to the symbols.
+ if not options.std_path:
+ symbol_path = args[0]
+ # Otherwise, use the report version to locate symbols in a directory.
+ else:
+ # This is in the format of |M.N.B.P (B.P)|. Get just the part before the
+ # space.
+ chrome_version = report.report_info['Version'].split(' ')[0]
+ symbol_path = os.path.join(args[0], chrome_version)
+
+ # Check that the symbols exist.
+ if not os.path.isdir(symbol_path):
+ print >>sys.stderr, 'Symbol path %s is not a directory' % symbol_path
+ return 2
+
+ print >>sys.stderr, 'Using symbols from ' + symbol_path
+ print >>sys.stderr, '=' * 80
+
+ report.Symbolicate(symbol_path)
+ PrettyPrintReport(report)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main(sys.argv))
diff --git a/chromium/tools/mb/OWNERS b/chromium/tools/mb/OWNERS
new file mode 100644
index 00000000000..2140fa57afc
--- /dev/null
+++ b/chromium/tools/mb/OWNERS
@@ -0,0 +1,4 @@
+brettw@chromium.org
+phajdan.jr@chromium.org
+dpranke@chromium.org
+scottmg@chromium.org
diff --git a/chromium/tools/mb/PRESUBMIT.py b/chromium/tools/mb/PRESUBMIT.py
new file mode 100644
index 00000000000..8a8d9b704c8
--- /dev/null
+++ b/chromium/tools/mb/PRESUBMIT.py
@@ -0,0 +1,40 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+def _CommonChecks(input_api, output_api):
+ results = []
+
+ # Run Pylint over the files in the directory.
+ pylint_checks = input_api.canned_checks.GetPylint(input_api, output_api)
+ results.extend(input_api.RunTests(pylint_checks))
+
+ # Run the MB unittests.
+ results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api, '.', [ r'^.+_unittest\.py$']))
+
+ # Validate the format of the mb_config.pyl file.
+ cmd = [input_api.python_executable, 'mb.py', 'validate']
+ kwargs = {'cwd': input_api.PresubmitLocalPath()}
+ results.extend(input_api.RunTests([
+ input_api.Command(name='mb_validate',
+ cmd=cmd, kwargs=kwargs,
+ message=output_api.PresubmitError)]))
+
+ results.extend(
+ input_api.canned_checks.CheckLongLines(
+ input_api,
+ output_api,
+ maxlen=80,
+ source_file_filter=lambda x: 'mb_config.pyl' in x.LocalPath()))
+
+ return results
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return _CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return _CommonChecks(input_api, output_api)
diff --git a/chromium/tools/mb/README.md b/chromium/tools/mb/README.md
new file mode 100644
index 00000000000..4e73a8e9fc7
--- /dev/null
+++ b/chromium/tools/mb/README.md
@@ -0,0 +1,22 @@
+# MB - The Meta-Build wrapper
+
+MB is a simple wrapper intended to provide a uniform interface to either
+GYP or GN, such that users and bots can call one script and not need to
+worry about whether a given bot is meant to use GN or GYP.
+
+It supports two main functions:
+
+1. "gen" - the main `gyp_chromium` / `gn gen` invocation that generates the
+ Ninja files needed for the build.
+
+2. "analyze" - the step that takes a list of modified files and a list of
+ desired targets and reports which targets will need to be rebuilt.
+
+We also use MB as a forcing function to collect all of the different
+build configurations that we actually support for Chromium builds into
+one place, in `//tools/mb/mb_config.pyl`.
+
+For more information, see:
+
+* [The User Guide](docs/user_guide.md)
+* [The Design Spec](docs/design_spec.md)
diff --git a/chromium/tools/mb/docs/README.md b/chromium/tools/mb/docs/README.md
new file mode 100644
index 00000000000..f29007d9ede
--- /dev/null
+++ b/chromium/tools/mb/docs/README.md
@@ -0,0 +1,4 @@
+# The MB (Meta-Build wrapper) documentation
+
+* The [User Guide](user_guide.md)
+* The [Design Spec](design_spec.md)
diff --git a/chromium/tools/mb/docs/design_spec.md b/chromium/tools/mb/docs/design_spec.md
new file mode 100644
index 00000000000..f49b9c8c88c
--- /dev/null
+++ b/chromium/tools/mb/docs/design_spec.md
@@ -0,0 +1,439 @@
+# The MB (Meta-Build wrapper) design spec
+
+[TOC]
+
+## Intro
+
+MB is intended to address two major aspects of the GYP -> GN transition
+for Chromium:
+
+1. "bot toggling" - make it so that we can easily flip a given bot
+ back and forth between GN and GYP.
+
+2. "bot configuration" - provide a single source of truth for all of
+ the different configurations (os/arch/`gyp_define` combinations) of
+ Chromium that are supported.
+
+MB must handle at least the `gen` and `analyze` steps on the bots, i.e.,
+we need to wrap both the `gyp_chromium` invocation to generate the
+Ninja files, and the `analyze` step that takes a list of modified files
+and a list of targets to build and returns which targets are affected by
+the files.
+
+For more information on how to actually use MB, see
+[the user guide](user_guide.md).
+
+## Design
+
+MB is intended to be as simple as possible, and to defer as much work as
+possible to GN or GYP. It should live as a very simple Python wrapper
+that offers little in the way of surprises.
+
+### Command line
+
+It is structured as a single binary that supports a list of subcommands:
+
+* `mb gen -c linux_rel_bot //out/Release`
+* `mb analyze -m tryserver.chromium.linux -b linux_rel /tmp/input.json /tmp/output.json`
+
+### Configurations
+
+`mb` will first look for a bot config file in a set of different locations
+(initially just in //ios/build/bots). Bot config files are JSON files that
+contain keys for 'GYP_DEFINES' (a list of strings that will be joined together
+with spaces and passed to GYP, or a dict that will be similarly converted),
+'gn_args' (a list of strings that will be joined together), and an
+'mb_type' field that says whether to use GN or GYP. Bot config files
+require the full list of settings to be given explicitly.
+
+If no mathcing bot config file is found, `mb` looks in the
+`//tools/mb/mb_config.pyl` config file to determine whether to use GYP or GN
+for a particular build directory, and what set of flags (`GYP_DEFINES` or `gn
+args`) to use.
+
+A config can either be specified directly (useful for testing) or by specifying
+the master name and builder name (useful on the bots so that they do not need
+to specify a config directly and can be hidden from the details).
+
+See the [user guide](user_guide.md#mb_config.pyl) for details.
+
+### Handling the analyze step
+
+The interface to `mb analyze` is described in the
+[user\_guide](user_guide.md#mb_analyze).
+
+The way analyze works can be subtle and complicated (see below).
+
+Since the interface basically mirrors the way the "analyze" step on the bots
+invokes `gyp_chromium` today, when the config is found to be a gyp config,
+the arguments are passed straight through.
+
+It implements the equivalent functionality in GN by calling `gn refs
+[list of files] --type=executable --all --as=output` and filtering the
+output to match the list of targets.
+
+## Analyze
+
+The goal of the `analyze` step is to speed up the cycle time of the try servers
+by only building and running the tests affected by the files in a patch, rather
+than everything that might be out of date. Doing this ends up being tricky.
+
+We start with the following requirements and observations:
+
+* In an ideal (un-resource-constrained) world, we would build and test
+ everything that a patch affected on every patch. This does not
+ necessarily mean that we would build 'all' on every patch (see below).
+
+* In the real world, however, we do not have an infinite number of machines,
+ and try jobs are not infinitely fast, so we need to balance the desire
+ to get maximum test coverage against the desire to have reasonable cycle
+ times, given the number of machines we have.
+
+* Also, since we run most try jobs against tip-of-tree Chromium, by
+ the time one job completes on the bot, new patches have probably landed,
+ rendering the build out of date.
+
+* This means that the next try job may have to do a build that is out of
+ date due to a combination of files affected by a given patch, and files
+ affected for unrelated reasons. We want to rebuild and test only the
+ targets affected by the patch, so that we don't blame or punish the
+ patch author for unrelated changes.
+
+So:
+
+1. We need a way to indicate which changed files we care about and which
+ we don't (the affected files of a patch).
+
+2. We need to know which tests we might potentially want to run, and how
+ those are mapped onto build targets. For some kinds of tests (like
+ GTest-based tests), the mapping is 1:1 - if you want to run base_unittests,
+ you need to build base_unittests. For others (like the telemetry and
+ layout tests), you might need to build several executables in order to
+ run the tests, and that mapping might best be captured by a *meta*
+ target (a GN group or a GYP 'none' target like `webkit_tests`) that
+ depends on the right list of files. Because the GN and GYP files know
+ nothing about test steps, we have to have some way of mapping back
+ and forth between test steps and build targets. That mapping
+ is *not* currently available to MB (or GN or GYP), and so we have to
+ enough information to make it possible for the caller to do the mapping.
+
+3. We might also want to know when test targets are affected by data files
+ that aren't compiled (python scripts, or the layout tests themselves).
+ There's no good way to do this in GYP, but GN supports this.
+
+4. We also want to ensure that particular targets still compile even if they
+ are not actually tested; consider testing the installers themselves, or
+ targets that don't yet have good test coverage. We might want to use meta
+ targets for this purpose as well.
+
+5. However, for some meta targets, we don't necessarily want to rebuild the
+ meta target itself, perhaps just the dependencies of the meta target that
+ are affected by the patch. For example, if you have a meta target like
+ `blink_tests` that might depend on ten different test binaries. If a patch
+ only affects one of them (say `wtf_unittests`), you don't want to
+ build `blink_tests`, because that might actually also build the other nine
+ targets. In other words, some meta targets are *prunable*.
+
+6. As noted above, in the ideal case we actually have enough resources and
+ things are fast enough that we can afford to build everything affected by a
+ patch, but listing every possible target explicitly would be painful. The
+ GYP and GN Ninja generators provide an 'all' target that captures (nearly,
+ see [crbug.com/503241](crbug.com/503241)) everything, but unfortunately
+ neither GN nor GYP actually represents 'all' as a meta target in the build
+ graph, so we will need to write code to handle that specially.
+
+7. In some cases, we will not be able to correctly analyze the build graph to
+ determine the impact of a patch, and need to bail out (e.g,. if you change a
+ build file itself, it may not be easy to tell how that affects the graph).
+ In that case we should simply build and run everything.
+
+The interaction between 2) and 5) means that we need to treat meta targets
+two different ways, and so we need to know which targets should be
+pruned in the sense of 5) and which targets should be returned unchanged
+so that we can map them back to the appropriate tests.
+
+So, we need three things as input:
+
+* `files`: the list of files in the patch
+* `test_targets`: the list of ninja targets which, if affected by a patch,
+ should be reported back so that we can map them back to the appropriate
+ tests to run. Any meta targets in this list should *not* be pruned.
+* `additional_compile_targets`: the list of ninja targets we wish to compile
+ *in addition to* the list in `test_targets`. Any meta targets
+ present in this list should be pruned (we don't need to return the
+ meta targets because they aren't mapped back to tests, and we don't want
+ to build them because we might build too much).
+
+We can then return two lists as output:
+
+* `compile_targets`, which is a list of pruned targets to be
+ passed to Ninja to build. It is acceptable to replace a list of
+ pruned targets by a meta target if it turns out that all of the
+ dependendencies of the target are affected by the patch (i.e.,
+ all ten binaries that blink_tests depends on), but doing so is
+ not required.
+* `test_targets`, which is a list of unpruned targets to be mapped
+ back to determine which tests to run.
+
+There may be substantial overlap between the two lists, but there is
+no guarantee that one is a subset of the other and the two cannot be
+used interchangeably or merged together without losing information and
+causing the wrong thing to happen.
+
+The implementation is responsible for recognizing 'all' as a magic string
+and mapping it onto the list of all root nodes in the build graph.
+
+There may be files listed in the input that don't actually exist in the build
+graph: this could be either the result of an error (the file should be in the
+build graph, but isn't), or perfectly fine (the file doesn't affect the build
+graph at all). We can't tell these two apart, so we should ignore missing
+files.
+
+There may be targets listed in the input that don't exist in the build
+graph; unlike missing files, this can only indicate a configuration error,
+and so we should return which targets are missing so the caller can
+treat this as an error, if so desired.
+
+Any of the three inputs may be an empty list:
+
+* It normally doesn't make sense to call analyze at all if no files
+ were modified, but in rare cases we can hit a race where we try to
+ test a patch after it has already been committed, in which case
+ the list of modified files is empty. We should return 'no dependency'
+ in that case.
+
+* Passing an empty list for one or the other of test_targets and
+ additional_compile_targets is perfectly sensible: in the former case,
+ it can indicate that you don't want to run any tests, and in the latter,
+ it can indicate that you don't want to do build anything else in
+ addition to the test targets.
+
+* It doesn't make sense to call analyze if you don't want to compile
+ anything at all, so passing [] for both test_targets and
+ additional_compile_targets should probably return an error.
+
+In the output case, an empty list indicates that there was nothing to
+build, or that there were no affected test targets as appropriate.
+
+Note that passing no arguments to Ninja is equivalent to passing
+`all` to Ninja (at least given how GN and GYP work); however, we
+don't want to take advantage of this in most cases because we don't
+actually want to build every out of date target, only the targets
+potentially affected by the files. One could try to indicate
+to analyze that we wanted to use no arguments instead of an empty
+list, but using the existing fields for this seems fragile and/or
+confusing, and adding a new field for this seems unwarranted at this time.
+
+There is an "error" field in case something goes wrong (like the
+empty file list case, above, or an internal error in MB/GYP/GN). The
+analyze code should also return an error code to the shell if appropriate
+to indicate that the command failed.
+
+In the case where build files themselves are modified and analyze may
+not be able to determine a correct answer (point 7 above, where we return
+"Found dependency (all)"), we should also return the `test_targets` unmodified
+and return the union of `test_targets` and `additional_compile_targets` for
+`compile_targets`, to avoid confusion.
+
+### Examples
+
+Continuing the example given above, suppose we have the following build
+graph:
+
+* `blink_tests` is a meta target that depends on `webkit_unit_tests`,
+ `wtf_unittests`, and `webkit_tests` and represents all of the targets
+ needed to fully test Blink. Each of those is a separate test step.
+* `webkit_tests` is also a meta target; it depends on `content_shell`
+ and `image_diff`.
+* `base_unittests` is a separate test binary.
+* `wtf_unittests` depends on `Assertions.cpp` and `AssertionsTest.cpp`.
+* `webkit_unit_tests` depends on `WebNode.cpp` and `WebNodeTest.cpp`.
+* `content_shell` depends on `WebNode.cpp` and `Assertions.cpp`.
+* `base_unittests` depends on `logging.cc` and `logging_unittest.cc`.
+
+#### Example 1
+
+We wish to run 'wtf_unittests' and 'webkit_tests' on a bot, but not
+compile any additional targets.
+
+If a patch touches WebNode.cpp, then analyze gets as input:
+
+ {
+ "files": ["WebNode.cpp"],
+ "test_targets": ["wtf_unittests", "webkit_tests"],
+ "additional_compile_targets": []
+ }
+
+and should return as output:
+
+ {
+ "status": "Found dependency",
+ "compile_targets": ["webkit_unit_tests"],
+ "test_targets": ["webkit_tests"]
+ }
+
+Note how `webkit_tests` was pruned in compile_targets but not in test_targets.
+
+#### Example 2
+
+Using the same patch as Example 1, assume we wish to run only `wtf_unittests`,
+but additionally build everything needed to test Blink (`blink_tests`):
+
+We pass as input:
+
+ {
+ "files": ["WebNode.cpp"],
+ "test_targets": ["wtf_unittests"],
+ "additional_compile_targets": ["blink_tests"]
+ }
+
+And should get as output:
+
+ {
+ "status": "Found dependency",
+ "compile_targets": ["webkit_unit_tests"],
+ "test_targets": []
+ }
+
+Here `blink_tests` was pruned in the output compile_targets, and
+test_targets was empty, since blink_tests was not listed in the input
+test_targets.
+
+#### Example 3
+
+Build everything, but do not run any tests.
+
+Input:
+
+ {
+ "files": ["WebNode.cpp"],
+ "test_targets": [],
+ "additional_compile_targets": ["all"]
+ }
+
+Output:
+
+ {
+ "status": "Found dependency",
+ "compile_targets": ["webkit_unit_tests", "content_shell"],
+ "test_targets": []
+ }
+
+#### Example 4
+
+Same as Example 2, but a build file was modified instead of a source file.
+
+Input:
+
+ {
+ "files": ["BUILD.gn"],
+ "test_targets": ["wtf_unittests"],
+ "additional_compile_targets": ["blink_tests"]
+ }
+
+Output:
+
+ {
+ "status": "Found dependency (all)",
+ "compile_targets": ["webkit_unit_tests", "wtf_unittests"],
+ "test_targets": ["wtf_unittests"]
+ }
+
+test_targets was returned unchanged, compile_targets was pruned.
+
+## Random Requirements and Rationale
+
+This section is collection of semi-organized notes on why MB is the way
+it is ...
+
+### in-tree or out-of-tree
+
+The first issue is whether or not this should exist as a script in
+Chromium at all; an alternative would be to simply change the bot
+configurations to know whether to use GYP or GN, and which flags to
+pass.
+
+That would certainly work, but experience over the past two years
+suggests a few things:
+
+ * we should push as much logic as we can into the source repositories
+ so that they can be versioned and changed atomically with changes to
+ the product code; having to coordinate changes between src/ and
+ build/ is at best annoying and can lead to weird errors.
+ * the infra team would really like to move to providing
+ product-independent services (i.e., not have to do one thing for
+ Chromium, another for NaCl, a third for V8, etc.).
+ * we found that during the SVN->GIT migration the ability to flip bot
+ configurations between the two via changes to a file in chromium
+ was very useful.
+
+All of this suggests that the interface between bots and Chromium should
+be a simple one, hiding as much of the chromium logic as possible.
+
+### Why not have MB be smarter about de-duping flags?
+
+This just adds complexity to the MB implementation, and duplicates logic
+that GYP and GN already have to support anyway; in particular, it might
+require MB to know how to parse GYP and GN values. The belief is that
+if MB does *not* do this, it will lead to fewer surprises.
+
+It will not be hard to change this if need be.
+
+### Integration w/ gclient runhooks
+
+On the bots, we will disable `gyp_chromium` as part of runhooks (using
+`GYP_CHROMIUM_NO_ACTION=1`), so that mb shows up as a separate step.
+
+At the moment, we expect most developers to either continue to use
+`gyp_chromium` in runhooks or to disable at as above if they have no
+use for GYP at all. We may revisit how this works once we encourage more
+people to use GN full-time (i.e., we might take `gyp_chromium` out of
+runhooks altogether).
+
+### Config per flag set or config per (os/arch/flag set)?
+
+Currently, mb_config.pyl does not specify the host_os, target_os, host_cpu, or
+target_cpu values for every config that Chromium runs on, it only specifies
+them for when the values need to be explicitly set on the command line.
+
+Instead, we have one config per unique combination of flags only.
+
+In other words, rather than having `linux_rel_bot`, `win_rel_bot`, and
+`mac_rel_bot`, we just have `rel_bot`.
+
+This design allows us to determine easily all of the different sets
+of flags that we need to support, but *not* which flags are used on which
+host/target combinations.
+
+It may be that we should really track the latter. Doing so is just a
+config file change, however.
+
+### Non-goals
+
+* MB is not intended to replace direct invocation of GN or GYP for
+ complicated build scenarios (aka ChromeOS), where multiple flags need
+ to be set to user-defined paths for specific toolchains (e.g., where
+ ChromeOS needs to specify specific board types and compilers).
+
+* MB is not intended at this time to be something developers use frequently,
+ or to add a lot of features to. We hope to be able to get rid of it once
+ the GYP->GN migration is done, and so we should not add things for
+ developers that can't easily be added to GN itself.
+
+* MB is not intended to replace the
+ [CR tool](https://code.google.com/p/chromium/wiki/CRUserManual). Not
+ only is it only intended to replace the gyp\_chromium part of `'gclient
+ runhooks'`, it is not really meant as a developer-facing tool.
+
+### Open issues
+
+* Some common flags (goma\_dir being the obvious one) may need to be
+ specified via the user, and it's unclear how to integrate this with
+ the concept of build\_configs.
+
+ Right now, MB has hard-coded support for a few flags (i.e., you can
+ pass the --goma-dir flag, and it will know to expand "${goma\_dir}" in
+ the string before calling out to the tool. We may want to generalize
+ this to a common key/value approach (perhaps then meeting the
+ ChromeOS non-goal, above), or we may want to keep this very strictly
+ limited for simplicity.
diff --git a/chromium/tools/mb/docs/user_guide.md b/chromium/tools/mb/docs/user_guide.md
new file mode 100644
index 00000000000..01ac5b7a111
--- /dev/null
+++ b/chromium/tools/mb/docs/user_guide.md
@@ -0,0 +1,288 @@
+# The MB (Meta-Build wrapper) user guide
+
+[TOC]
+
+## Introduction
+
+`mb` is a simple python wrapper around the GYP and GN meta-build tools to
+be used as part of the GYP->GN migration.
+
+It is intended to be used by bots to make it easier to manage the configuration
+each bot builds (i.e., the configurations can be changed from chromium
+commits), and to consolidate the list of all of the various configurations
+that Chromium is built in.
+
+Ideally this tool will no longer be needed after the migration is complete.
+
+For more discussion of MB, see also [the design spec](design_spec.md).
+
+## MB subcommands
+
+### `mb analyze`
+
+`mb analyze` is reponsible for determining what targets are affected by
+a list of files (e.g., the list of files in a patch on a trybot):
+
+```
+mb analyze -c chromium_linux_rel //out/Release input.json output.json
+```
+
+Either the `-c/--config` flag or the `-m/--master` and `-b/--builder` flags
+must be specified so that `mb` can figure out which config to use.
+
+The first positional argument must be a GN-style "source-absolute" path
+to the build directory.
+
+The second positional argument is a (normal) path to a JSON file containing
+a single object with the following fields:
+
+ * `files`: an array of the modified filenames to check (as paths relative to
+ the checkout root).
+ * `test_targets`: an array of (ninja) build targets that needed to run the
+ tests we wish to run. An empty array will be treated as if there are
+ no tests that will be run.
+ * `additional_compile_targets`: an array of (ninja) build targets that
+ reflect the stuff we might want to build *in addition to* the list
+ passed in `test_targets`. Targets in this list will be treated
+ specially, in the following way: if a given target is a "meta"
+ (GN: group, GYP: none) target like 'blink_tests' or
+ 'chromium_builder_tests', or even the ninja-specific 'all' target,
+ then only the *dependencies* of the target that are affected by
+ the modified files will be rebuilt (not the target itself, which
+ might also cause unaffected dependencies to be rebuilt). An empty
+ list will be treated as if there are no additional targets to build.
+ Empty lists for both `test_targets` and `additional_compile_targets`
+ would cause no work to be done, so will result in an error.
+ * `targets`: a legacy field that resembled a union of `compile_targets`
+ and `test_targets`. Support for this field will be removed once the
+ bots have been updated to use compile_targets and test_targets instead.
+
+The third positional argument is a (normal) path to where mb will write
+the result, also as a JSON object. This object may contain the following
+fields:
+
+ * `error`: this should only be present if something failed.
+ * `compile_targets`: the list of ninja targets that should be passed
+ directly to the corresponding ninja / compile.py invocation. This
+ list may contain entries that are *not* listed in the input (see
+ the description of `additional_compile_targets` above and
+ [design_spec.md](the design spec) for how this works).
+ * `invalid_targets`: a list of any targets that were passed in
+ either of the input lists that weren't actually found in the graph.
+ * `test_targets`: the subset of the input `test_targets` that are
+ potentially out of date, indicating that the matching test steps
+ should be re-run.
+ * `targets`: a legacy field that indicates the subset of the input `targets`
+ that depend on the input `files`.
+ * `build_targets`: a legacy field that indicates the minimal subset of
+ targets needed to build all of `targets` that were affected.
+ * `status`: a field containing one of three strings:
+
+ * `"Found dependency"` (build the `compile_targets`)
+ * `"No dependency"` (i.e., no build needed)
+ * `"Found dependency (all)"` (`test_targets` is returned as-is;
+ `compile_targets` should contain the union of `test_targets` and
+ `additional_compile_targets`. In this case the targets do not
+ need to be pruned).
+
+See [design_spec.md](the design spec) for more details and examples; the
+differences can be subtle. We won't even go into how the `targets` and
+`build_targets` differ from each other or from `compile_targets` and
+`test_targets`.
+
+The `-b/--builder`, `-c/--config`, `-f/--config-file`, `-m/--master`,
+`-q/--quiet`, and `-v/--verbose` flags work as documented for `mb gen`.
+
+### `mb audit`
+
+`mb audit` is used to track the progress of the GYP->GN migration. You can
+use it to check a single master, or all the masters we care about. See
+`mb help audit` for more details (most people are not expected to care about
+this).
+
+### `mb gen`
+
+`mb gen` is responsible for generating the Ninja files by invoking either GYP
+or GN as appropriate. It takes arguments to specify a build config and
+a directory, then runs GYP or GN as appropriate:
+
+```
+% mb gen -m tryserver.chromium.linux -b linux_rel //out/Release
+% mb gen -c linux_rel_trybot //out/Release
+```
+
+Either the `-c/--config` flag or the `-m/--master` and `-b/--builder` flags
+must be specified so that `mb` can figure out which config to use.
+
+By default, MB will look for a bot config file under `//ios/build/bots` (see
+[design_spec.md](the design spec) for details of how the bot config files
+work). If no matching one is found, will then look in
+`//tools/mb/mb_config.pyl` to look up the config information, but you can
+specify a custom config file using the `-f/--config-file` flag.
+
+The path must be a GN-style "source-absolute" path (as above).
+
+You can pass the `-n/--dryrun` flag to mb gen to see what will happen without
+actually writing anything.
+
+You can pass the `-q/--quiet` flag to get mb to be silent unless there is an
+error, and pass the `-v/--verbose` flag to get mb to log all of the files
+that are read and written, and all the commands that are run.
+
+If the build config will use the Goma distributed-build system, you can pass
+the path to your Goma client in the `-g/--goma-dir` flag, and it will be
+incorporated into the appropriate flags for GYP or GN as needed.
+
+If gen ends up using GYP, the path must have a valid GYP configuration as the
+last component of the path (i.e., specify `//out/Release_x64`, not `//out`).
+
+### `mb help`
+
+Produces help output on the other subcommands
+
+### `mb lookup`
+
+Prints what command will be run by `mb gen` (like `mb gen -n` but does
+not require you to specify a path).
+
+The `-b/--builder`, `-c/--config`, `-f/--config-file`, `-m/--master`,
+`-q/--quiet`, and `-v/--verbose` flags work as documented for `mb gen`.
+
+### `mb validate`
+
+Does internal checking to make sure the config file is syntactically
+valid and that all of the entries are used properly. It does not validate
+that the flags make sense, or that the builder names are legal or
+comprehensive, but it does complain about configs and mixins that aren't
+used.
+
+The `-f/--config-file` and `-q/--quiet` flags work as documented for
+`mb gen`.
+
+This is mostly useful as a presubmit check and for verifying changes to
+the config file.
+
+## Isolates and Swarming
+
+`mb gen` is also responsible for generating the `.isolate` and
+`.isolated.gen.json` files needed to run test executables through swarming
+in a GN build (in a GYP build, this is done as part of the compile step).
+
+If you wish to generate the isolate files, pass `mb gen` the
+`--swarming-targets-file` command line argument; that arg should be a path
+to a file containing a list of ninja build targets to compute the runtime
+dependencies for (on Windows, use the ninja target name, not the file, so
+`base_unittests`, not `base_unittests.exe`).
+
+MB will take this file, translate each build target to the matching GN
+label (e.g., `base_unittests` -> `//base:base_unittests`, write that list
+to a file called `runtime_deps` in the build directory, and pass that to
+`gn gen $BUILD ... --runtime-deps-list-file=$BUILD/runtime_deps`.
+
+Once GN has computed the lists of runtime dependencies, MB will then
+look up the command line for each target (currently this is hard-coded
+in [mb.py](https://code.google.com/p/chromium/codesearch?q=mb.py#chromium/src/tools/mb/mb.py&q=mb.py%20GetIsolateCommand&sq=package:chromium&type=cs)), and write out the
+matching `.isolate` and `.isolated.gen.json` files.
+
+## The `mb_config.pyl` config file
+
+The `mb_config.pyl` config file is intended to enumerate all of the
+supported build configurations for Chromium. Generally speaking, you
+should never need to (or want to) build a configuration that isn't
+listed here, and so by using the configs in this file you can avoid
+having to juggle long lists of GYP_DEFINES and gn args by hand.
+
+`mb_config.pyl` is structured as a file containing a single PYthon Literal
+expression: a dictionary with three main keys, `masters`, `configs` and
+`mixins`.
+
+The `masters` key contains a nested series of dicts containing mappings
+of master -> builder -> config . This allows us to isolate the buildbot
+recipes from the actual details of the configs.
+
+The `configs` key points to a dictionary of named build
+configurations.
+
+There should be an key in this dict for every supported configuration
+of Chromium, meaning every configuration we have a bot for, and every
+configuration commonly used by develpers but that we may not have a bot
+for.
+
+The value of each key is a list of "mixins" that will define what that
+build_config does. Each item in the list must be an entry in the dictionary
+value of the `mixins` key.
+
+Each mixin value is itself a dictionary that contains one or more of the
+following keys:
+
+ * `gyp_crosscompile`: a boolean; if true, GYP_CROSSCOMPILE=1 is set in
+ the environment and passed to GYP.
+ * `gyp_defines`: a string containing a list of GYP_DEFINES.
+ * `gn_args`: a string containing a list of values passed to gn --args.
+ * `mixins`: a list of other mixins that should be included.
+ * `type`: a string with either the value `gyp` or `gn`;
+ setting this indicates which meta-build tool to use.
+
+When `mb gen` or `mb analyze` executes, it takes a config name, looks it
+up in the 'configs' dict, and then does a left-to-right expansion of the
+mixins; gyp_defines and gn_args values are concatenated, and the type values
+override each other.
+
+For example, if you had:
+
+```
+{
+ 'configs`: {
+ 'linux_release_trybot': ['gyp_release', 'trybot'],
+ 'gn_shared_debug': None,
+ }
+ 'mixins': {
+ 'bot': {
+ 'gyp_defines': 'use_goma=1 dcheck_always_on=0',
+ 'gn_args': 'use_goma=true dcheck_always_on=false',
+ },
+ 'debug': {
+ 'gn_args': 'is_debug=true',
+ },
+ 'gn': {'type': 'gn'},
+ 'gyp_release': {
+ 'mixins': ['release'],
+ 'type': 'gyp',
+ },
+ 'release': {
+ 'gn_args': 'is_debug=false',
+ }
+ 'shared': {
+ 'gn_args': 'is_component_build=true',
+ 'gyp_defines': 'component=shared_library',
+ },
+ 'trybot': {
+ 'gyp_defines': 'dcheck_always_on=1',
+ 'gn_args': 'dcheck_always_on=true',
+ }
+ }
+}
+```
+
+and you ran `mb gen -c linux_release_trybot //out/Release`, it would
+translate into a call to `gyp_chromium -G Release` with `GYP_DEFINES` set to
+`"use_goma=true dcheck_always_on=false dcheck_always_on=true"`.
+
+(From that you can see that mb is intentionally dumb and does not
+attempt to de-dup the flags, it lets gyp do that).
+
+## Debugging MB
+
+By design, MB should be simple enough that very little can go wrong.
+
+The most obvious issue is that you might see different commands being
+run than you expect; running `'mb -v'` will print what it's doing and
+run the commands; `'mb -n'` will print what it will do but *not* run
+the commands.
+
+If you hit weirder things than that, add some print statements to the
+python script, send a question to gn-dev@chromium.org, or
+[file a bug](https://crbug.com/new) with the label
+'mb' and cc: dpranke@chromium.org.
+
+
diff --git a/chromium/tools/mb/mb b/chromium/tools/mb/mb
new file mode 100755
index 00000000000..d3a0cdf019c
--- /dev/null
+++ b/chromium/tools/mb/mb
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+base_dir=$(dirname "$0")
+
+PYTHONDONTWRITEBYTECODE=1 exec python "$base_dir/mb.py" "$@"
diff --git a/chromium/tools/mb/mb.bat b/chromium/tools/mb/mb.bat
new file mode 100755
index 00000000000..a82770e714a
--- /dev/null
+++ b/chromium/tools/mb/mb.bat
@@ -0,0 +1,6 @@
+@echo off
+setlocal
+:: This is required with cygwin only.
+PATH=%~dp0;%PATH%
+set PYTHONDONTWRITEBYTECODE=1
+call python "%~dp0mb.py" %*
diff --git a/chromium/tools/mb/mb.py b/chromium/tools/mb/mb.py
new file mode 100755
index 00000000000..39e810c1a67
--- /dev/null
+++ b/chromium/tools/mb/mb.py
@@ -0,0 +1,1360 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""MB - the Meta-Build wrapper around GYP and GN
+
+MB is a wrapper script for GYP and GN that can be used to generate build files
+for sets of canned configurations and analyze them.
+"""
+
+from __future__ import print_function
+
+import argparse
+import ast
+import errno
+import json
+import os
+import pipes
+import pprint
+import re
+import shutil
+import sys
+import subprocess
+import tempfile
+import urllib2
+
+from collections import OrderedDict
+
+def main(args):
+ mbw = MetaBuildWrapper()
+ return mbw.Main(args)
+
+
+class MetaBuildWrapper(object):
+ def __init__(self):
+ p = os.path
+ d = os.path.dirname
+ self.chromium_src_dir = p.normpath(d(d(d(p.abspath(__file__)))))
+ self.default_config = p.join(self.chromium_src_dir, 'tools', 'mb',
+ 'mb_config.pyl')
+ self.executable = sys.executable
+ self.platform = sys.platform
+ self.sep = os.sep
+ self.args = argparse.Namespace()
+ self.configs = {}
+ self.masters = {}
+ self.mixins = {}
+
+ def Main(self, args):
+ self.ParseArgs(args)
+ try:
+ ret = self.args.func()
+ if ret:
+ self.DumpInputFiles()
+ return ret
+ except KeyboardInterrupt:
+ self.Print('interrupted, exiting', stream=sys.stderr)
+ return 130
+ except Exception as e:
+ self.DumpInputFiles()
+ self.Print(str(e))
+ return 1
+
+ def ParseArgs(self, argv):
+ def AddCommonOptions(subp):
+ subp.add_argument('-b', '--builder',
+ help='builder name to look up config from')
+ subp.add_argument('-m', '--master',
+ help='master name to look up config from')
+ subp.add_argument('-c', '--config',
+ help='configuration to analyze')
+ subp.add_argument('-f', '--config-file', metavar='PATH',
+ default=self.default_config,
+ help='path to config file '
+ '(default is //tools/mb/mb_config.pyl)')
+ subp.add_argument('-g', '--goma-dir', default=self.ExpandUser('~/goma'),
+ help='path to goma directory (default is %(default)s).')
+ subp.add_argument('-n', '--dryrun', action='store_true',
+ help='Do a dry run (i.e., do nothing, just print '
+ 'the commands that will run)')
+ subp.add_argument('-v', '--verbose', action='store_true',
+ help='verbose logging')
+
+ parser = argparse.ArgumentParser(prog='mb')
+ subps = parser.add_subparsers()
+
+ subp = subps.add_parser('analyze',
+ help='analyze whether changes to a set of files '
+ 'will cause a set of binaries to be rebuilt.')
+ AddCommonOptions(subp)
+ subp.add_argument('path', nargs=1,
+ help='path build was generated into.')
+ subp.add_argument('input_path', nargs=1,
+ help='path to a file containing the input arguments '
+ 'as a JSON object.')
+ subp.add_argument('output_path', nargs=1,
+ help='path to a file containing the output arguments '
+ 'as a JSON object.')
+ subp.set_defaults(func=self.CmdAnalyze)
+
+ subp = subps.add_parser('gen',
+ help='generate a new set of build files')
+ AddCommonOptions(subp)
+ subp.add_argument('--swarming-targets-file',
+ help='save runtime dependencies for targets listed '
+ 'in file.')
+ subp.add_argument('path', nargs=1,
+ help='path to generate build into')
+ subp.set_defaults(func=self.CmdGen)
+
+ subp = subps.add_parser('isolate',
+ help='generate the .isolate files for a given'
+ 'binary')
+ AddCommonOptions(subp)
+ subp.add_argument('path', nargs=1,
+ help='path build was generated into')
+ subp.add_argument('target', nargs=1,
+ help='ninja target to generate the isolate for')
+ subp.set_defaults(func=self.CmdIsolate)
+
+ subp = subps.add_parser('lookup',
+ help='look up the command for a given config or '
+ 'builder')
+ AddCommonOptions(subp)
+ subp.set_defaults(func=self.CmdLookup)
+
+ subp = subps.add_parser(
+ 'run',
+ help='build and run the isolated version of a '
+ 'binary',
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+ subp.description = (
+ 'Build, isolate, and run the given binary with the command line\n'
+ 'listed in the isolate. You may pass extra arguments after the\n'
+ 'target; use "--" if the extra arguments need to include switches.\n'
+ '\n'
+ 'Examples:\n'
+ '\n'
+ ' % tools/mb/mb.py run -m chromium.linux -b "Linux Builder" \\\n'
+ ' //out/Default content_browsertests\n'
+ '\n'
+ ' % tools/mb/mb.py run out/Default content_browsertests\n'
+ '\n'
+ ' % tools/mb/mb.py run out/Default content_browsertests -- \\\n'
+ ' --test-launcher-retry-limit=0'
+ '\n'
+ )
+
+ AddCommonOptions(subp)
+ subp.add_argument('-j', '--jobs', dest='jobs', type=int,
+ help='Number of jobs to pass to ninja')
+ subp.add_argument('--no-build', dest='build', default=True,
+ action='store_false',
+ help='Do not build, just isolate and run')
+ subp.add_argument('path', nargs=1,
+ help=('path to generate build into (or use).'
+ ' This can be either a regular path or a '
+ 'GN-style source-relative path like '
+ '//out/Default.'))
+ subp.add_argument('target', nargs=1,
+ help='ninja target to build and run')
+ subp.add_argument('extra_args', nargs='*',
+ help=('extra args to pass to the isolate to run. Use '
+ '"--" as the first arg if you need to pass '
+ 'switches'))
+ subp.set_defaults(func=self.CmdRun)
+
+ subp = subps.add_parser('validate',
+ help='validate the config file')
+ subp.add_argument('-f', '--config-file', metavar='PATH',
+ default=self.default_config,
+ help='path to config file '
+ '(default is //tools/mb/mb_config.pyl)')
+ subp.set_defaults(func=self.CmdValidate)
+
+ subp = subps.add_parser('audit',
+ help='Audit the config file to track progress')
+ subp.add_argument('-f', '--config-file', metavar='PATH',
+ default=self.default_config,
+ help='path to config file '
+ '(default is //tools/mb/mb_config.pyl)')
+ subp.add_argument('-i', '--internal', action='store_true',
+ help='check internal masters also')
+ subp.add_argument('-m', '--master', action='append',
+ help='master to audit (default is all non-internal '
+ 'masters in file)')
+ subp.add_argument('-u', '--url-template', action='store',
+ default='https://build.chromium.org/p/'
+ '{master}/json/builders',
+ help='URL scheme for JSON APIs to buildbot '
+ '(default: %(default)s) ')
+ subp.add_argument('-c', '--check-compile', action='store_true',
+ help='check whether tbd and master-only bots actually'
+ ' do compiles')
+ subp.set_defaults(func=self.CmdAudit)
+
+ subp = subps.add_parser('help',
+ help='Get help on a subcommand.')
+ subp.add_argument(nargs='?', action='store', dest='subcommand',
+ help='The command to get help for.')
+ subp.set_defaults(func=self.CmdHelp)
+
+ self.args = parser.parse_args(argv)
+
+ def DumpInputFiles(self):
+
+ def DumpContentsOfFilePassedTo(arg_name, path):
+ if path and self.Exists(path):
+ self.Print("\n# To recreate the file passed to %s:" % arg_name)
+ self.Print("%% cat > %s <<EOF)" % path)
+ contents = self.ReadFile(path)
+ self.Print(contents)
+ self.Print("EOF\n%\n")
+
+ if getattr(self.args, 'input_path', None):
+ DumpContentsOfFilePassedTo(
+ 'argv[0] (input_path)', self.args.input_path[0])
+ if getattr(self.args, 'swarming_targets_file', None):
+ DumpContentsOfFilePassedTo(
+ '--swarming-targets-file', self.args.swarming_targets_file)
+
+ def CmdAnalyze(self):
+ vals = self.Lookup()
+ if vals['type'] == 'gn':
+ return self.RunGNAnalyze(vals)
+ else:
+ return self.RunGYPAnalyze(vals)
+
+ def CmdGen(self):
+ vals = self.Lookup()
+ self.ClobberIfNeeded(vals)
+
+ if vals['type'] == 'gn':
+ return self.RunGNGen(vals)
+ else:
+ return self.RunGYPGen(vals)
+
+ def CmdHelp(self):
+ if self.args.subcommand:
+ self.ParseArgs([self.args.subcommand, '--help'])
+ else:
+ self.ParseArgs(['--help'])
+
+ def CmdIsolate(self):
+ vals = self.GetConfig()
+ if not vals:
+ return 1
+
+ if vals['type'] == 'gn':
+ return self.RunGNIsolate(vals)
+ else:
+ return self.Build('%s_run' % self.args.target[0])
+
+ def CmdLookup(self):
+ vals = self.Lookup()
+ if vals['type'] == 'gn':
+ cmd = self.GNCmd('gen', '_path_', vals['gn_args'])
+ env = None
+ else:
+ cmd, env = self.GYPCmd('_path_', vals)
+
+ self.PrintCmd(cmd, env)
+ return 0
+
+ def CmdRun(self):
+ vals = self.GetConfig()
+ if not vals:
+ return 1
+
+ build_dir = self.args.path[0]
+ target = self.args.target[0]
+
+ if vals['type'] == 'gn':
+ if self.args.build:
+ ret = self.Build(target)
+ if ret:
+ return ret
+ ret = self.RunGNIsolate(vals)
+ if ret:
+ return ret
+ else:
+ ret = self.Build('%s_run' % target)
+ if ret:
+ return ret
+
+ cmd = [
+ self.executable,
+ self.PathJoin('tools', 'swarming_client', 'isolate.py'),
+ 'run',
+ '-s',
+ self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)),
+ ]
+ if self.args.extra_args:
+ cmd += ['--'] + self.args.extra_args
+
+ ret, _, _ = self.Run(cmd, force_verbose=False, buffer_output=False)
+
+ return ret
+
+ def CmdValidate(self, print_ok=True):
+ errs = []
+
+ # Read the file to make sure it parses.
+ self.ReadConfigFile()
+
+ # Build a list of all of the configs referenced by builders.
+ all_configs = {}
+ for master in self.masters:
+ for config in self.masters[master].values():
+ all_configs[config] = master
+
+ # Check that every referenced config actually exists.
+ for config, loc in all_configs.items():
+ if not config in self.configs:
+ errs.append('Unknown config "%s" referenced from "%s".' %
+ (config, loc))
+
+ # Check that every actual config is actually referenced.
+ for config in self.configs:
+ if not config in all_configs:
+ errs.append('Unused config "%s".' % config)
+
+ # Figure out the whole list of mixins, and check that every mixin
+ # listed by a config or another mixin actually exists.
+ referenced_mixins = set()
+ for config, mixins in self.configs.items():
+ for mixin in mixins:
+ if not mixin in self.mixins:
+ errs.append('Unknown mixin "%s" referenced by config "%s".' %
+ (mixin, config))
+ referenced_mixins.add(mixin)
+
+ for mixin in self.mixins:
+ for sub_mixin in self.mixins[mixin].get('mixins', []):
+ if not sub_mixin in self.mixins:
+ errs.append('Unknown mixin "%s" referenced by mixin "%s".' %
+ (sub_mixin, mixin))
+ referenced_mixins.add(sub_mixin)
+
+ # Check that every mixin defined is actually referenced somewhere.
+ for mixin in self.mixins:
+ if not mixin in referenced_mixins:
+ errs.append('Unreferenced mixin "%s".' % mixin)
+
+ # If we're checking the Chromium config, check that the 'chromium' bots
+ # which build public artifacts do not include the chrome_with_codecs mixin.
+ if self.args.config_file == self.default_config:
+ if 'chromium' in self.masters:
+ for builder in self.masters['chromium']:
+ config = self.masters['chromium'][builder]
+ def RecurseMixins(current_mixin):
+ if current_mixin == 'chrome_with_codecs':
+ errs.append('Public artifact builder "%s" can not contain the '
+ '"chrome_with_codecs" mixin.' % builder)
+ return
+ if not 'mixins' in self.mixins[current_mixin]:
+ return
+ for mixin in self.mixins[current_mixin]['mixins']:
+ RecurseMixins(mixin)
+
+ for mixin in self.configs[config]:
+ RecurseMixins(mixin)
+ else:
+ errs.append('Missing "chromium" master. Please update this '
+ 'proprietary codecs check with the name of the master '
+ 'responsible for public build artifacts.')
+
+ if errs:
+ raise MBErr(('mb config file %s has problems:' % self.args.config_file) +
+ '\n ' + '\n '.join(errs))
+
+ if print_ok:
+ self.Print('mb config file %s looks ok.' % self.args.config_file)
+ return 0
+
+ def CmdAudit(self):
+ """Track the progress of the GYP->GN migration on the bots."""
+
+ # First, make sure the config file is okay, but don't print anything
+ # if it is (it will throw an error if it isn't).
+ self.CmdValidate(print_ok=False)
+
+ stats = OrderedDict()
+ STAT_MASTER_ONLY = 'Master only'
+ STAT_CONFIG_ONLY = 'Config only'
+ STAT_TBD = 'Still TBD'
+ STAT_GYP = 'Still GYP'
+ STAT_DONE = 'Done (on GN)'
+ stats[STAT_MASTER_ONLY] = 0
+ stats[STAT_CONFIG_ONLY] = 0
+ stats[STAT_TBD] = 0
+ stats[STAT_GYP] = 0
+ stats[STAT_DONE] = 0
+
+ def PrintBuilders(heading, builders, notes):
+ stats.setdefault(heading, 0)
+ stats[heading] += len(builders)
+ if builders:
+ self.Print(' %s:' % heading)
+ for builder in sorted(builders):
+ self.Print(' %s%s' % (builder, notes[builder]))
+
+ self.ReadConfigFile()
+
+ masters = self.args.master or self.masters
+ for master in sorted(masters):
+ url = self.args.url_template.replace('{master}', master)
+
+ self.Print('Auditing %s' % master)
+
+ MASTERS_TO_SKIP = (
+ 'client.skia',
+ 'client.v8.fyi',
+ 'tryserver.v8',
+ )
+ if master in MASTERS_TO_SKIP:
+ # Skip these bots because converting them is the responsibility of
+ # those teams and out of scope for the Chromium migration to GN.
+ self.Print(' Skipped (out of scope)')
+ self.Print('')
+ continue
+
+ INTERNAL_MASTERS = ('official.desktop', 'official.desktop.continuous')
+ if master in INTERNAL_MASTERS and not self.args.internal:
+ # Skip these because the servers aren't accessible by default ...
+ self.Print(' Skipped (internal)')
+ self.Print('')
+ continue
+
+ try:
+ # Fetch the /builders contents from the buildbot master. The
+ # keys of the dict are the builder names themselves.
+ json_contents = self.Fetch(url)
+ d = json.loads(json_contents)
+ except Exception as e:
+ self.Print(str(e))
+ return 1
+
+ config_builders = set(self.masters[master])
+ master_builders = set(d.keys())
+ both = master_builders & config_builders
+ master_only = master_builders - config_builders
+ config_only = config_builders - master_builders
+ tbd = set()
+ gyp = set()
+ done = set()
+ notes = {builder: '' for builder in config_builders | master_builders}
+
+ for builder in both:
+ config = self.masters[master][builder]
+ if config == 'tbd':
+ tbd.add(builder)
+ else:
+ # TODO(dpranke): Check if MB is actually running?
+ vals = self.FlattenConfig(config)
+ if vals['type'] == 'gyp':
+ gyp.add(builder)
+ else:
+ done.add(builder)
+
+ if self.args.check_compile and (tbd or master_only):
+ either = tbd | master_only
+ for builder in either:
+ notes[builder] = ' (' + self.CheckCompile(master, builder) +')'
+
+ if master_only or config_only or tbd or gyp:
+ PrintBuilders(STAT_MASTER_ONLY, master_only, notes)
+ PrintBuilders(STAT_CONFIG_ONLY, config_only, notes)
+ PrintBuilders(STAT_TBD, tbd, notes)
+ PrintBuilders(STAT_GYP, gyp, notes)
+ else:
+ self.Print(' ... done')
+
+ stats[STAT_DONE] += len(done)
+
+ self.Print('')
+
+ fmt = '{:<27} {:>4}'
+ self.Print(fmt.format('Totals', str(sum(int(v) for v in stats.values()))))
+ self.Print(fmt.format('-' * 27, '----'))
+ for stat, count in stats.items():
+ self.Print(fmt.format(stat, str(count)))
+
+ return 0
+
+ def GetConfig(self):
+ build_dir = self.args.path[0]
+
+ vals = {}
+ if self.args.builder or self.args.master or self.args.config:
+ vals = self.Lookup()
+ if vals['type'] == 'gn':
+ # Re-run gn gen in order to ensure the config is consistent with the
+ # build dir.
+ self.RunGNGen(vals)
+ return vals
+
+ # TODO: We can only get the config for GN build dirs, not GYP build dirs.
+ # GN stores the args that were used in args.gn in the build dir,
+ # but GYP doesn't store them anywhere. We should consider modifying
+ # gyp_chromium to record the arguments it runs with in a similar
+ # manner.
+
+ mb_type_path = self.PathJoin(self.ToAbsPath(build_dir), 'mb_type')
+ if not self.Exists(mb_type_path):
+ gn_args_path = self.PathJoin(self.ToAbsPath(build_dir), 'args.gn')
+ if not self.Exists(gn_args_path):
+ self.Print('Must either specify a path to an existing GN build dir '
+ 'or pass in a -m/-b pair or a -c flag to specify the '
+ 'configuration')
+ return {}
+ else:
+ mb_type = 'gn'
+ else:
+ mb_type = self.ReadFile(mb_type_path).strip()
+
+ if mb_type == 'gn':
+ vals = self.GNValsFromDir(build_dir)
+ else:
+ vals = {}
+ vals['type'] = mb_type
+
+ return vals
+
+ def GNValsFromDir(self, build_dir):
+ args_contents = self.ReadFile(
+ self.PathJoin(self.ToAbsPath(build_dir), 'args.gn'))
+ gn_args = []
+ for l in args_contents.splitlines():
+ fields = l.split(' ')
+ name = fields[0]
+ val = ' '.join(fields[2:])
+ gn_args.append('%s=%s' % (name, val))
+
+ return {
+ 'gn_args': ' '.join(gn_args),
+ 'type': 'gn',
+ }
+
+ def Lookup(self):
+ vals = self.ReadBotConfig()
+ if not vals:
+ self.ReadConfigFile()
+ config = self.ConfigFromArgs()
+ if not config in self.configs:
+ raise MBErr('Config "%s" not found in %s' %
+ (config, self.args.config_file))
+
+ vals = self.FlattenConfig(config)
+
+ # Do some basic sanity checking on the config so that we
+ # don't have to do this in every caller.
+ assert 'type' in vals, 'No meta-build type specified in the config'
+ assert vals['type'] in ('gn', 'gyp'), (
+ 'Unknown meta-build type "%s"' % vals['gn_args'])
+
+ return vals
+
+ def ReadBotConfig(self):
+ if not self.args.master or not self.args.builder:
+ return {}
+ path = self.PathJoin(self.chromium_src_dir, 'ios', 'build', 'bots',
+ self.args.master, self.args.builder + '.json')
+ if not self.Exists(path):
+ return {}
+
+ contents = json.loads(self.ReadFile(path))
+ gyp_vals = contents.get('GYP_DEFINES', {})
+ if isinstance(gyp_vals, dict):
+ gyp_defines = ' '.join('%s=%s' % (k, v) for k, v in gyp_vals.items())
+ else:
+ gyp_defines = ' '.join(gyp_vals)
+ gn_args = ' '.join(contents.get('gn_args', []))
+
+ return {
+ 'type': contents.get('mb_type', ''),
+ 'gn_args': gn_args,
+ 'gyp_defines': gyp_defines,
+ 'gyp_crosscompile': False,
+ }
+
+ def ReadConfigFile(self):
+ if not self.Exists(self.args.config_file):
+ raise MBErr('config file not found at %s' % self.args.config_file)
+
+ try:
+ contents = ast.literal_eval(self.ReadFile(self.args.config_file))
+ except SyntaxError as e:
+ raise MBErr('Failed to parse config file "%s": %s' %
+ (self.args.config_file, e))
+
+ self.configs = contents['configs']
+ self.masters = contents['masters']
+ self.mixins = contents['mixins']
+
+ def ConfigFromArgs(self):
+ if self.args.config:
+ if self.args.master or self.args.builder:
+ raise MBErr('Can not specific both -c/--config and -m/--master or '
+ '-b/--builder')
+
+ return self.args.config
+
+ if not self.args.master or not self.args.builder:
+ raise MBErr('Must specify either -c/--config or '
+ '(-m/--master and -b/--builder)')
+
+ if not self.args.master in self.masters:
+ raise MBErr('Master name "%s" not found in "%s"' %
+ (self.args.master, self.args.config_file))
+
+ if not self.args.builder in self.masters[self.args.master]:
+ raise MBErr('Builder name "%s" not found under masters[%s] in "%s"' %
+ (self.args.builder, self.args.master, self.args.config_file))
+
+ return self.masters[self.args.master][self.args.builder]
+
+ def FlattenConfig(self, config):
+ mixins = self.configs[config]
+ vals = {
+ 'type': None,
+ 'gn_args': [],
+ 'gyp_defines': '',
+ 'gyp_crosscompile': False,
+ }
+
+ visited = []
+ self.FlattenMixins(mixins, vals, visited)
+ return vals
+
+ def FlattenMixins(self, mixins, vals, visited):
+ for m in mixins:
+ if m not in self.mixins:
+ raise MBErr('Unknown mixin "%s"' % m)
+
+ # TODO: check for cycles in mixins.
+
+ visited.append(m)
+
+ mixin_vals = self.mixins[m]
+ if 'type' in mixin_vals:
+ vals['type'] = mixin_vals['type']
+ if 'gn_args' in mixin_vals:
+ if vals['gn_args']:
+ vals['gn_args'] += ' ' + mixin_vals['gn_args']
+ else:
+ vals['gn_args'] = mixin_vals['gn_args']
+ if 'gyp_crosscompile' in mixin_vals:
+ vals['gyp_crosscompile'] = mixin_vals['gyp_crosscompile']
+ if 'gyp_defines' in mixin_vals:
+ if vals['gyp_defines']:
+ vals['gyp_defines'] += ' ' + mixin_vals['gyp_defines']
+ else:
+ vals['gyp_defines'] = mixin_vals['gyp_defines']
+ if 'mixins' in mixin_vals:
+ self.FlattenMixins(mixin_vals['mixins'], vals, visited)
+ return vals
+
+ def ClobberIfNeeded(self, vals):
+ path = self.args.path[0]
+ build_dir = self.ToAbsPath(path)
+ mb_type_path = self.PathJoin(build_dir, 'mb_type')
+ needs_clobber = False
+ new_mb_type = vals['type']
+ if self.Exists(build_dir):
+ if self.Exists(mb_type_path):
+ old_mb_type = self.ReadFile(mb_type_path)
+ if old_mb_type != new_mb_type:
+ self.Print("Build type mismatch: was %s, will be %s, clobbering %s" %
+ (old_mb_type, new_mb_type, path))
+ needs_clobber = True
+ else:
+ # There is no 'mb_type' file in the build directory, so this probably
+ # means that the prior build(s) were not done through mb, and we
+ # have no idea if this was a GYP build or a GN build. Clobber it
+ # to be safe.
+ self.Print("%s/mb_type missing, clobbering to be safe" % path)
+ needs_clobber = True
+
+ if self.args.dryrun:
+ return
+
+ if needs_clobber:
+ self.RemoveDirectory(build_dir)
+
+ self.MaybeMakeDirectory(build_dir)
+ self.WriteFile(mb_type_path, new_mb_type)
+
+ def RunGNGen(self, vals):
+ build_dir = self.args.path[0]
+
+ cmd = self.GNCmd('gen', build_dir, vals['gn_args'], extra_args=['--check'])
+
+ swarming_targets = []
+ if getattr(self.args, 'swarming_targets_file', None):
+ # We need GN to generate the list of runtime dependencies for
+ # the compile targets listed (one per line) in the file so
+ # we can run them via swarming. We use ninja_to_gn.pyl to convert
+ # the compile targets to the matching GN labels.
+ path = self.args.swarming_targets_file
+ if not self.Exists(path):
+ self.WriteFailureAndRaise('"%s" does not exist' % path,
+ output_path=None)
+ contents = self.ReadFile(path)
+ swarming_targets = set(contents.splitlines())
+ gn_isolate_map = ast.literal_eval(self.ReadFile(self.PathJoin(
+ self.chromium_src_dir, 'testing', 'buildbot', 'gn_isolate_map.pyl')))
+ gn_labels = []
+ err = ''
+ for target in swarming_targets:
+ target_name = self.GNTargetName(target)
+ if not target_name in gn_isolate_map:
+ err += ('test target "%s" not found\n' % target_name)
+ elif gn_isolate_map[target_name]['type'] == 'unknown':
+ err += ('test target "%s" type is unknown\n' % target_name)
+ else:
+ gn_labels.append(gn_isolate_map[target_name]['label'])
+
+ if err:
+ raise MBErr('Error: Failed to match swarming targets to %s:\n%s' %
+ ('//testing/buildbot/gn_isolate_map.pyl', err))
+
+ gn_runtime_deps_path = self.ToAbsPath(build_dir, 'runtime_deps')
+
+ # Since GN hasn't run yet, the build directory may not even exist.
+ self.MaybeMakeDirectory(self.ToAbsPath(build_dir))
+
+ self.WriteFile(gn_runtime_deps_path, '\n'.join(gn_labels) + '\n')
+ cmd.append('--runtime-deps-list-file=%s' % gn_runtime_deps_path)
+
+ ret, _, _ = self.Run(cmd)
+ if ret:
+ # If `gn gen` failed, we should exit early rather than trying to
+ # generate isolates. Run() will have already logged any error output.
+ self.Print('GN gen failed: %d' % ret)
+ return ret
+
+ for target in swarming_targets:
+ if target.endswith('_apk'):
+ # "_apk" targets may be either android_apk or executable. The former
+ # will result in runtime_deps associated with the stamp file, while the
+ # latter will result in runtime_deps associated with the executable.
+ target_name = self.GNTargetName(target)
+ label = gn_isolate_map[target_name]['label']
+ runtime_deps_targets = [
+ target_name + '.runtime_deps',
+ 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')]
+ elif gn_isolate_map[target]['type'] == 'gpu_browser_test':
+ if self.platform == 'win32':
+ runtime_deps_targets = ['browser_tests.exe.runtime_deps']
+ else:
+ runtime_deps_targets = ['browser_tests.runtime_deps']
+ elif (gn_isolate_map[target]['type'] == 'script' or
+ gn_isolate_map[target].get('label_type') == 'group'):
+ # For script targets, the build target is usually a group,
+ # for which gn generates the runtime_deps next to the stamp file
+ # for the label, which lives under the obj/ directory.
+ label = gn_isolate_map[target]['label']
+ runtime_deps_targets = [
+ 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')]
+ elif self.platform == 'win32':
+ runtime_deps_targets = [target + '.exe.runtime_deps']
+ else:
+ runtime_deps_targets = [target + '.runtime_deps']
+
+ for r in runtime_deps_targets:
+ runtime_deps_path = self.ToAbsPath(build_dir, r)
+ if self.Exists(runtime_deps_path):
+ break
+ else:
+ raise MBErr('did not generate any of %s' %
+ ', '.join(runtime_deps_targets))
+
+ command, extra_files = self.GetIsolateCommand(target, vals,
+ gn_isolate_map)
+
+ runtime_deps = self.ReadFile(runtime_deps_path).splitlines()
+
+ self.WriteIsolateFiles(build_dir, command, target, runtime_deps,
+ extra_files)
+
+ return 0
+
+ def RunGNIsolate(self, vals):
+ gn_isolate_map = ast.literal_eval(self.ReadFile(self.PathJoin(
+ self.chromium_src_dir, 'testing', 'buildbot', 'gn_isolate_map.pyl')))
+
+ build_dir = self.args.path[0]
+ target = self.args.target[0]
+ target_name = self.GNTargetName(target)
+ command, extra_files = self.GetIsolateCommand(target, vals, gn_isolate_map)
+
+ label = gn_isolate_map[target_name]['label']
+ cmd = self.GNCmd('desc', build_dir, extra_args=[label, 'runtime_deps'])
+ ret, out, _ = self.Call(cmd)
+ if ret:
+ if out:
+ self.Print(out)
+ return ret
+
+ runtime_deps = out.splitlines()
+
+ self.WriteIsolateFiles(build_dir, command, target, runtime_deps,
+ extra_files)
+
+ ret, _, _ = self.Run([
+ self.executable,
+ self.PathJoin('tools', 'swarming_client', 'isolate.py'),
+ 'check',
+ '-i',
+ self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)),
+ '-s',
+ self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target))],
+ buffer_output=False)
+
+ return ret
+
+ def WriteIsolateFiles(self, build_dir, command, target, runtime_deps,
+ extra_files):
+ isolate_path = self.ToAbsPath(build_dir, target + '.isolate')
+ self.WriteFile(isolate_path,
+ pprint.pformat({
+ 'variables': {
+ 'command': command,
+ 'files': sorted(runtime_deps + extra_files),
+ }
+ }) + '\n')
+
+ self.WriteJSON(
+ {
+ 'args': [
+ '--isolated',
+ self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)),
+ '--isolate',
+ self.ToSrcRelPath('%s/%s.isolate' % (build_dir, target)),
+ ],
+ 'dir': self.chromium_src_dir,
+ 'version': 1,
+ },
+ isolate_path + 'd.gen.json',
+ )
+
+ def GNCmd(self, subcommand, path, gn_args='', extra_args=None):
+ if self.platform == 'linux2':
+ subdir, exe = 'linux64', 'gn'
+ elif self.platform == 'darwin':
+ subdir, exe = 'mac', 'gn'
+ else:
+ subdir, exe = 'win', 'gn.exe'
+ gn_path = self.PathJoin(self.chromium_src_dir, 'buildtools', subdir, exe)
+
+ cmd = [gn_path, subcommand, path]
+ gn_args = gn_args.replace("$(goma_dir)", self.args.goma_dir)
+ if gn_args:
+ cmd.append('--args=%s' % gn_args)
+ if extra_args:
+ cmd.extend(extra_args)
+ return cmd
+
+ def RunGYPGen(self, vals):
+ path = self.args.path[0]
+
+ output_dir = self.ParseGYPConfigPath(path)
+ cmd, env = self.GYPCmd(output_dir, vals)
+ ret, _, _ = self.Run(cmd, env=env)
+ return ret
+
+ def RunGYPAnalyze(self, vals):
+ output_dir = self.ParseGYPConfigPath(self.args.path[0])
+ if self.args.verbose:
+ inp = self.ReadInputJSON(['files', 'test_targets',
+ 'additional_compile_targets'])
+ self.Print()
+ self.Print('analyze input:')
+ self.PrintJSON(inp)
+ self.Print()
+
+ cmd, env = self.GYPCmd(output_dir, vals)
+ cmd.extend(['-f', 'analyzer',
+ '-G', 'config_path=%s' % self.args.input_path[0],
+ '-G', 'analyzer_output_path=%s' % self.args.output_path[0]])
+ ret, _, _ = self.Run(cmd, env=env)
+ if not ret and self.args.verbose:
+ outp = json.loads(self.ReadFile(self.args.output_path[0]))
+ self.Print()
+ self.Print('analyze output:')
+ self.PrintJSON(outp)
+ self.Print()
+
+ return ret
+
+ def GetIsolateCommand(self, target, vals, gn_isolate_map):
+ android = 'target_os="android"' in vals['gn_args']
+
+ # This needs to mirror the settings in //build/config/ui.gni:
+ # use_x11 = is_linux && !use_ozone.
+ # TODO(dpranke): Figure out how to keep this in sync better.
+ use_x11 = (self.platform == 'linux2' and
+ not android and
+ not 'use_ozone=true' in vals['gn_args'])
+
+ asan = 'is_asan=true' in vals['gn_args']
+ msan = 'is_msan=true' in vals['gn_args']
+ tsan = 'is_tsan=true' in vals['gn_args']
+
+ target_name = self.GNTargetName(target)
+ test_type = gn_isolate_map[target_name]['type']
+
+ executable = gn_isolate_map[target_name].get('executable', target_name)
+ executable_suffix = '.exe' if self.platform == 'win32' else ''
+
+ cmdline = []
+ extra_files = []
+
+ if android:
+ # TODO(jbudorick): This won't work with instrumentation test targets.
+ # Revisit this logic when those are added to gn_isolate_map.pyl.
+ cmdline = [self.PathJoin('bin', 'run_%s' % target_name)]
+ elif use_x11 and test_type == 'windowed_test_launcher':
+ extra_files = [
+ 'xdisplaycheck',
+ '../../testing/test_env.py',
+ '../../testing/xvfb.py',
+ ]
+ cmdline = [
+ '../../testing/xvfb.py',
+ '.',
+ './' + str(executable) + executable_suffix,
+ '--brave-new-test-launcher',
+ '--test-launcher-bot-mode',
+ '--asan=%d' % asan,
+ '--msan=%d' % msan,
+ '--tsan=%d' % tsan,
+ ]
+ elif test_type in ('windowed_test_launcher', 'console_test_launcher'):
+ extra_files = [
+ '../../testing/test_env.py'
+ ]
+ cmdline = [
+ '../../testing/test_env.py',
+ './' + str(executable) + executable_suffix,
+ '--brave-new-test-launcher',
+ '--test-launcher-bot-mode',
+ '--asan=%d' % asan,
+ '--msan=%d' % msan,
+ '--tsan=%d' % tsan,
+ ]
+ elif test_type == 'gpu_browser_test':
+ extra_files = [
+ '../../testing/test_env.py'
+ ]
+ gtest_filter = gn_isolate_map[target]['gtest_filter']
+ cmdline = [
+ '../../testing/test_env.py',
+ './browser_tests' + executable_suffix,
+ '--test-launcher-bot-mode',
+ '--enable-gpu',
+ '--test-launcher-jobs=1',
+ '--gtest_filter=%s' % gtest_filter,
+ ]
+ elif test_type == 'script':
+ extra_files = [
+ '../../testing/test_env.py'
+ ]
+ cmdline = [
+ '../../testing/test_env.py',
+ '../../' + self.ToSrcRelPath(gn_isolate_map[target]['script'])
+ ]
+ elif test_type in ('raw'):
+ extra_files = []
+ cmdline = [
+ './' + str(target) + executable_suffix,
+ ]
+
+ else:
+ self.WriteFailureAndRaise('No command line for %s found (test type %s).'
+ % (target, test_type), output_path=None)
+
+ cmdline += gn_isolate_map[target_name].get('args', [])
+
+ return cmdline, extra_files
+
+ def ToAbsPath(self, build_path, *comps):
+ return self.PathJoin(self.chromium_src_dir,
+ self.ToSrcRelPath(build_path),
+ *comps)
+
+ def ToSrcRelPath(self, path):
+ """Returns a relative path from the top of the repo."""
+ if path.startswith('//'):
+ return path[2:].replace('/', self.sep)
+ return self.RelPath(path, self.chromium_src_dir)
+
+ def ParseGYPConfigPath(self, path):
+ rpath = self.ToSrcRelPath(path)
+ output_dir, _, _ = rpath.rpartition(self.sep)
+ return output_dir
+
+ def GYPCmd(self, output_dir, vals):
+ gyp_defines = vals['gyp_defines']
+ goma_dir = self.args.goma_dir
+
+ # GYP uses shlex.split() to split the gyp defines into separate arguments,
+ # so we can support backslashes and and spaces in arguments by quoting
+ # them, even on Windows, where this normally wouldn't work.
+ if '\\' in goma_dir or ' ' in goma_dir:
+ goma_dir = "'%s'" % goma_dir
+ gyp_defines = gyp_defines.replace("$(goma_dir)", goma_dir)
+
+ cmd = [
+ self.executable,
+ self.PathJoin('build', 'gyp_chromium'),
+ '-G',
+ 'output_dir=' + output_dir,
+ ]
+
+ # Ensure that we have an environment that only contains
+ # the exact values of the GYP variables we need.
+ env = os.environ.copy()
+ env['GYP_GENERATORS'] = 'ninja'
+ if 'GYP_CHROMIUM_NO_ACTION' in env:
+ del env['GYP_CHROMIUM_NO_ACTION']
+ if 'GYP_CROSSCOMPILE' in env:
+ del env['GYP_CROSSCOMPILE']
+ env['GYP_DEFINES'] = gyp_defines
+ if vals['gyp_crosscompile']:
+ env['GYP_CROSSCOMPILE'] = '1'
+ return cmd, env
+
+ def RunGNAnalyze(self, vals):
+ # analyze runs before 'gn gen' now, so we need to run gn gen
+ # in order to ensure that we have a build directory.
+ ret = self.RunGNGen(vals)
+ if ret:
+ return ret
+
+ inp = self.ReadInputJSON(['files', 'test_targets',
+ 'additional_compile_targets'])
+ if self.args.verbose:
+ self.Print()
+ self.Print('analyze input:')
+ self.PrintJSON(inp)
+ self.Print()
+
+ # TODO(crbug.com/555273) - currently GN treats targets and
+ # additional_compile_targets identically since we can't tell the
+ # difference between a target that is a group in GN and one that isn't.
+ # We should eventually fix this and treat the two types differently.
+ targets = (set(inp['test_targets']) |
+ set(inp['additional_compile_targets']))
+
+ output_path = self.args.output_path[0]
+
+ # Bail out early if a GN file was modified, since 'gn refs' won't know
+ # what to do about it. Also, bail out early if 'all' was asked for,
+ # since we can't deal with it yet.
+ if (any(f.endswith('.gn') or f.endswith('.gni') for f in inp['files']) or
+ 'all' in targets):
+ self.WriteJSON({
+ 'status': 'Found dependency (all)',
+ 'compile_targets': sorted(targets),
+ 'test_targets': sorted(targets & set(inp['test_targets'])),
+ }, output_path)
+ return 0
+
+ # This shouldn't normally happen, but could due to unusual race conditions,
+ # like a try job that gets scheduled before a patch lands but runs after
+ # the patch has landed.
+ if not inp['files']:
+ self.Print('Warning: No files modified in patch, bailing out early.')
+ self.WriteJSON({
+ 'status': 'No dependency',
+ 'compile_targets': [],
+ 'test_targets': [],
+ }, output_path)
+ return 0
+
+ ret = 0
+ response_file = self.TempFile()
+ response_file.write('\n'.join(inp['files']) + '\n')
+ response_file.close()
+
+ matching_targets = set()
+ try:
+ cmd = self.GNCmd('refs', self.args.path[0]) + [
+ '@%s' % response_file.name, '--all', '--as=output']
+ ret, out, _ = self.Run(cmd, force_verbose=False)
+ if ret and not 'The input matches no targets' in out:
+ self.WriteFailureAndRaise('gn refs returned %d: %s' % (ret, out),
+ output_path)
+ build_dir = self.ToSrcRelPath(self.args.path[0]) + self.sep
+ for output in out.splitlines():
+ build_output = output.replace(build_dir, '')
+ if build_output in targets:
+ matching_targets.add(build_output)
+
+ cmd = self.GNCmd('refs', self.args.path[0]) + [
+ '@%s' % response_file.name, '--all']
+ ret, out, _ = self.Run(cmd, force_verbose=False)
+ if ret and not 'The input matches no targets' in out:
+ self.WriteFailureAndRaise('gn refs returned %d: %s' % (ret, out),
+ output_path)
+ for label in out.splitlines():
+ build_target = label[2:]
+ # We want to accept 'chrome/android:chrome_public_apk' and
+ # just 'chrome_public_apk'. This may result in too many targets
+ # getting built, but we can adjust that later if need be.
+ for input_target in targets:
+ if (input_target == build_target or
+ build_target.endswith(':' + input_target)):
+ matching_targets.add(input_target)
+ finally:
+ self.RemoveFile(response_file.name)
+
+ if matching_targets:
+ self.WriteJSON({
+ 'status': 'Found dependency',
+ 'compile_targets': sorted(matching_targets),
+ 'test_targets': sorted(matching_targets &
+ set(inp['test_targets'])),
+ }, output_path)
+ else:
+ self.WriteJSON({
+ 'status': 'No dependency',
+ 'compile_targets': [],
+ 'test_targets': [],
+ }, output_path)
+
+ if self.args.verbose:
+ outp = json.loads(self.ReadFile(output_path))
+ self.Print()
+ self.Print('analyze output:')
+ self.PrintJSON(outp)
+ self.Print()
+
+ return 0
+
+ def ReadInputJSON(self, required_keys):
+ path = self.args.input_path[0]
+ output_path = self.args.output_path[0]
+ if not self.Exists(path):
+ self.WriteFailureAndRaise('"%s" does not exist' % path, output_path)
+
+ try:
+ inp = json.loads(self.ReadFile(path))
+ except Exception as e:
+ self.WriteFailureAndRaise('Failed to read JSON input from "%s": %s' %
+ (path, e), output_path)
+
+ for k in required_keys:
+ if not k in inp:
+ self.WriteFailureAndRaise('input file is missing a "%s" key' % k,
+ output_path)
+
+ return inp
+
+ def WriteFailureAndRaise(self, msg, output_path):
+ if output_path:
+ self.WriteJSON({'error': msg}, output_path, force_verbose=True)
+ raise MBErr(msg)
+
+ def WriteJSON(self, obj, path, force_verbose=False):
+ try:
+ self.WriteFile(path, json.dumps(obj, indent=2, sort_keys=True) + '\n',
+ force_verbose=force_verbose)
+ except Exception as e:
+ raise MBErr('Error %s writing to the output path "%s"' %
+ (e, path))
+
+ def CheckCompile(self, master, builder):
+ url_template = self.args.url_template + '/{builder}/builds/_all?as_text=1'
+ url = urllib2.quote(url_template.format(master=master, builder=builder),
+ safe=':/()?=')
+ try:
+ builds = json.loads(self.Fetch(url))
+ except Exception as e:
+ return str(e)
+ successes = sorted(
+ [int(x) for x in builds.keys() if "text" in builds[x] and
+ cmp(builds[x]["text"][:2], ["build", "successful"]) == 0],
+ reverse=True)
+ if not successes:
+ return "no successful builds"
+ build = builds[str(successes[0])]
+ step_names = set([step["name"] for step in build["steps"]])
+ compile_indicators = set(["compile", "compile (with patch)", "analyze"])
+ if compile_indicators & step_names:
+ return "compiles"
+ return "does not compile"
+
+ def PrintCmd(self, cmd, env):
+ if self.platform == 'win32':
+ env_prefix = 'set '
+ env_quoter = QuoteForSet
+ shell_quoter = QuoteForCmd
+ else:
+ env_prefix = ''
+ env_quoter = pipes.quote
+ shell_quoter = pipes.quote
+
+ def print_env(var):
+ if env and var in env:
+ self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var])))
+
+ print_env('GYP_CROSSCOMPILE')
+ print_env('GYP_DEFINES')
+
+ if cmd[0] == self.executable:
+ cmd = ['python'] + cmd[1:]
+ self.Print(*[shell_quoter(arg) for arg in cmd])
+
+ def PrintJSON(self, obj):
+ self.Print(json.dumps(obj, indent=2, sort_keys=True))
+
+ def GNTargetName(self, target):
+ return target[:-len('_apk')] if target.endswith('_apk') else target
+
+ def Build(self, target):
+ build_dir = self.ToSrcRelPath(self.args.path[0])
+ ninja_cmd = ['ninja', '-C', build_dir]
+ if self.args.jobs:
+ ninja_cmd.extend(['-j', '%d' % self.args.jobs])
+ ninja_cmd.append(target)
+ ret, _, _ = self.Run(ninja_cmd, force_verbose=False, buffer_output=False)
+ return ret
+
+ def Run(self, cmd, env=None, force_verbose=True, buffer_output=True):
+ # This function largely exists so it can be overridden for testing.
+ if self.args.dryrun or self.args.verbose or force_verbose:
+ self.PrintCmd(cmd, env)
+ if self.args.dryrun:
+ return 0, '', ''
+
+ ret, out, err = self.Call(cmd, env=env, buffer_output=buffer_output)
+ if self.args.verbose or force_verbose:
+ if ret:
+ self.Print(' -> returned %d' % ret)
+ if out:
+ self.Print(out, end='')
+ if err:
+ self.Print(err, end='', file=sys.stderr)
+ return ret, out, err
+
+ def Call(self, cmd, env=None, buffer_output=True):
+ if buffer_output:
+ p = subprocess.Popen(cmd, shell=False, cwd=self.chromium_src_dir,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+ env=env)
+ out, err = p.communicate()
+ else:
+ p = subprocess.Popen(cmd, shell=False, cwd=self.chromium_src_dir,
+ env=env)
+ p.wait()
+ out = err = ''
+ return p.returncode, out, err
+
+ def ExpandUser(self, path):
+ # This function largely exists so it can be overridden for testing.
+ return os.path.expanduser(path)
+
+ def Exists(self, path):
+ # This function largely exists so it can be overridden for testing.
+ return os.path.exists(path)
+
+ def Fetch(self, url):
+ # This function largely exists so it can be overridden for testing.
+ f = urllib2.urlopen(url)
+ contents = f.read()
+ f.close()
+ return contents
+
+ def MaybeMakeDirectory(self, path):
+ try:
+ os.makedirs(path)
+ except OSError, e:
+ if e.errno != errno.EEXIST:
+ raise
+
+ def PathJoin(self, *comps):
+ # This function largely exists so it can be overriden for testing.
+ return os.path.join(*comps)
+
+ def Print(self, *args, **kwargs):
+ # This function largely exists so it can be overridden for testing.
+ print(*args, **kwargs)
+ if kwargs.get('stream', sys.stdout) == sys.stdout:
+ sys.stdout.flush()
+
+ def ReadFile(self, path):
+ # This function largely exists so it can be overriden for testing.
+ with open(path) as fp:
+ return fp.read()
+
+ def RelPath(self, path, start='.'):
+ # This function largely exists so it can be overriden for testing.
+ return os.path.relpath(path, start)
+
+ def RemoveFile(self, path):
+ # This function largely exists so it can be overriden for testing.
+ os.remove(path)
+
+ def RemoveDirectory(self, abs_path):
+ if self.platform == 'win32':
+ # In other places in chromium, we often have to retry this command
+ # because we're worried about other processes still holding on to
+ # file handles, but when MB is invoked, it will be early enough in the
+ # build that their should be no other processes to interfere. We
+ # can change this if need be.
+ self.Run(['cmd.exe', '/c', 'rmdir', '/q', '/s', abs_path])
+ else:
+ shutil.rmtree(abs_path, ignore_errors=True)
+
+ def TempFile(self, mode='w'):
+ # This function largely exists so it can be overriden for testing.
+ return tempfile.NamedTemporaryFile(mode=mode, delete=False)
+
+ def WriteFile(self, path, contents, force_verbose=False):
+ # This function largely exists so it can be overriden for testing.
+ if self.args.dryrun or self.args.verbose or force_verbose:
+ self.Print('\nWriting """\\\n%s""" to %s.\n' % (contents, path))
+ with open(path, 'w') as fp:
+ return fp.write(contents)
+
+
+class MBErr(Exception):
+ pass
+
+
+# See http://goo.gl/l5NPDW and http://goo.gl/4Diozm for the painful
+# details of this next section, which handles escaping command lines
+# so that they can be copied and pasted into a cmd window.
+UNSAFE_FOR_SET = set('^<>&|')
+UNSAFE_FOR_CMD = UNSAFE_FOR_SET.union(set('()%'))
+ALL_META_CHARS = UNSAFE_FOR_CMD.union(set('"'))
+
+
+def QuoteForSet(arg):
+ if any(a in UNSAFE_FOR_SET for a in arg):
+ arg = ''.join('^' + a if a in UNSAFE_FOR_SET else a for a in arg)
+ return arg
+
+
+def QuoteForCmd(arg):
+ # First, escape the arg so that CommandLineToArgvW will parse it properly.
+ # From //tools/gyp/pylib/gyp/msvs_emulation.py:23.
+ if arg == '' or ' ' in arg or '"' in arg:
+ quote_re = re.compile(r'(\\*)"')
+ arg = '"%s"' % (quote_re.sub(lambda mo: 2 * mo.group(1) + '\\"', arg))
+
+ # Then check to see if the arg contains any metacharacters other than
+ # double quotes; if it does, quote everything (including the double
+ # quotes) for safety.
+ if any(a in UNSAFE_FOR_CMD for a in arg):
+ arg = ''.join('^' + a if a in ALL_META_CHARS else a for a in arg)
+ return arg
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/chromium/tools/mb/mb_config.pyl b/chromium/tools/mb/mb_config.pyl
new file mode 100644
index 00000000000..e2cfe335e1f
--- /dev/null
+++ b/chromium/tools/mb/mb_config.pyl
@@ -0,0 +1,1842 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ # This is a map of buildbot master names -> buildbot builder names ->
+ # config names (where each config name is a key in the 'configs' dict,
+ # below). MB uses this dict to look up which config to use for a given bot.
+ 'masters': {
+ # Take care when changing any of these builders to ensure that you do not
+ # include a configuration with 'chrome_with_codecs' since these builders
+ # generated publicly advertised non-Official builds which are not allowed
+ # to have proprietary codecs enabled.
+ 'chromium': {
+ 'Android': 'android_without_codecs_gyp_release_bot_minimal_symbols',
+ 'Linux x64': 'noswarming_gn_release_bot',
+ 'Mac': 'noswarming_gyp_release_bot_mac_strip',
+ 'Win': 'noswarming_gyp_release_bot_minimal_symbols_x86',
+ },
+
+ 'chromium.android': {
+ 'Android Cronet ARM64 Builder':
+ 'android_cronet_gn_release_bot_minimal_symbols_arm64',
+ 'Android Cronet ARM64 Builder (dbg)':
+ 'android_cronet_gn_debug_static_bot_arm64',
+ 'Android Cronet ARMv6 Builder':
+ 'android_cronet_gn_release_bot_minimal_symbols_armv6',
+ 'Android Cronet Builder':
+ 'android_cronet_gn_release_bot_minimal_symbols',
+ 'Android Cronet Builder (dbg)': 'android_cronet_gn_debug_static_bot',
+ 'Android Cronet Data Reduction Proxy Builder':
+ 'android_cronet_data_reduction_proxy_gn_release_bot_minimal_symbols',
+ 'Android Cronet MIPS Builder':
+ 'android_cronet_gn_release_bot_minimal_symbols_mipsel',
+ 'Android Cronet x86 Builder':
+ 'android_cronet_gn_release_bot_minimal_symbols_x86',
+ 'Android Cronet x86 Builder (dbg)':
+ 'android_cronet_gn_debug_static_bot_x86',
+
+ 'Android GN Builder (dbg)': 'android_gn_debug_bot_minimal_symbols',
+ 'Android MIPS Builder (dbg)': 'android_gn_debug_static_bot_mipsel',
+ 'Android Swarm Builder':
+ 'swarming_android_gn_release_bot_minimal_symbols',
+ 'Android WebView CTS L-MR1 (dbg)': 'none',
+ 'Android Webview L (dbg)': 'none',
+ 'Android Webview M (dbg)': 'none',
+ 'Android arm Builder (dbg)': 'android_gn_debug_static_bot',
+ 'Android arm64 Builder (dbg)': 'android_gn_debug_static_bot_arm64',
+ 'Android x64 Builder (dbg)': 'android_gn_debug_static_bot_x64',
+ 'Android x86 Builder (dbg)': 'android_gn_debug_static_bot_x86',
+ 'Jelly Bean Tester': 'android_gn_debug_static_bot',
+ 'KitKat Tablet Tester': 'android_gn_debug_static_bot',
+ 'Lollipop Consumer Tester': 'android_gn_debug_static_bot_arm64',
+ 'Lollipop Low-end Tester': 'android_gn_debug_static_bot',
+ 'Lollipop Phone Tester': 'android_gn_debug_static_bot',
+ 'Lollipop Tablet Tester': 'android_gn_debug_static_bot',
+ 'Marshmallow 64 bit Tester': 'android_gn_debug_static_bot_arm64',
+ 'Marshmallow Tablet Tester': 'android_gn_debug_static_bot',
+ },
+
+ 'chromium.chrome': {
+ 'Google Chrome ChromeOS': 'gyp_official_goma_chromeos',
+ 'Google Chrome Linux x64': 'gn_official_goma',
+ 'Google Chrome Mac': 'gyp_official_goma',
+ 'Google Chrome Win': 'gyp_official_goma_minimal_symbols_x86',
+ },
+
+ 'chromium.chromiumos': {
+ 'ChromiumOS amd64-generic Compile': 'tbd',
+ 'ChromiumOS daisy Compile': 'tbd',
+ 'ChromiumOS x86-generic Compile': 'tbd',
+ 'Linux ChromiumOS Builder (dbg)': 'tbd',
+ 'Linux ChromiumOS Full': 'tbd',
+ 'Linux ChromiumOS Ozone Builder': 'tbd',
+
+ 'Linux ChromiumOS Builder': 'swarming_chromeos_gyp_release_bot',
+ 'Linux ChromiumOS GN (dbg)': 'chromeos_gn_debug_bot',
+ 'Linux ChromiumOS GN': 'chromeos_ozone_gn_release_bot',
+ 'Linux ChromiumOS Ozone Tests (1)': 'none',
+ 'Linux ChromiumOS Tests (1)': 'none',
+ 'Linux ChromiumOS Tests (dbg)(1)': 'none',
+ },
+
+ 'chromium.fyi': {
+ 'Blink Linux LSan ASan': 'tbd',
+ 'Browser Side Navigation Linux': 'tbd',
+ 'Chromium Builder': 'tbd',
+ 'Chromium Builder (dbg)': 'tbd',
+ 'Chromium Linux Goma Canary': 'tbd',
+ 'Chromium Linux Goma Canary (clobber)': 'tbd',
+ 'Chromium Linux Precise Goma LinkTest': 'tbd',
+ 'Chromium Linux32 Goma Canary (clobber)': 'tbd',
+ 'Chromium Mac 10.10 MacViews': 'tbd',
+ 'Chromium Mac 10.11': 'tbd',
+ 'Chromium Mac 10.11 Force Mac Toolchain': 'tbd',
+ 'Chromium Mac 10.9 Goma Canary': 'tbd',
+ 'Chromium Mac 10.9 Goma Canary (clobber)': 'tbd',
+ 'Chromium Mac 10.9 Goma Canary (dbg)': 'tbd',
+ 'Chromium Mac 10.9 Goma Canary (dbg)(clobber)': 'tbd',
+ 'Chromium Win 10': 'tbd',
+ 'Chromium Win x64 Clobber': 'tbd',
+ 'ChromiumOS Linux Tests': 'tbd',
+ 'ClangToTAndroidASan':
+ 'android_clang_no_chrome_plugins_asan_gn_debug_bot_minimal_symbols',
+ 'ClangToTLinux': 'tbd',
+ 'ClangToTLinux (dbg)': 'tbd',
+ 'ClangToTLinuxASan': 'tbd',
+ 'ClangToTLinuxUBSanVptr': 'tbd',
+ 'ClangToTMac': 'tbd',
+ 'ClangToTMac (dbg)': 'tbd',
+ 'ClangToTMacASan': 'tbd',
+ 'ClangToTWin': 'tbd',
+ 'ClangToTWin(dbg)': 'tbd',
+ 'ClangToTWin(dll)': 'tbd',
+ 'ClangToTWin64': 'tbd',
+ 'ClangToTWin64(dbg)': 'tbd',
+ 'ClangToTWin64(dll)': 'tbd',
+ 'ClangToTiOS': 'tbd',
+ 'Closure Compilation Linux': 'tbd',
+ 'CrWin7Goma': 'tbd',
+ 'CrWin7Goma(clbr)': 'tbd',
+ 'CrWin7Goma(dbg)': 'tbd',
+ 'CrWin7Goma(dll)': 'tbd',
+ 'CrWinAsan': 'tbd',
+ 'CrWinAsan(dll)': 'tbd',
+ 'CrWinAsanCov': 'tbd',
+ 'CrWinClang(shared)': 'tbd',
+ 'CrWinClang64(dll)': 'tbd',
+ 'CrWinClangGoma': 'tbd',
+ 'CrWinClangLLD': 'tbd',
+ 'CrWinClangLLD64': 'tbd',
+ 'CrWinClngLLD64dbg': 'tbd',
+ 'CrWinClngLLDdbg': 'tbd',
+ 'CrWinGoma': 'tbd',
+ 'CrWinGoma(dll)': 'tbd',
+ 'Linux Trusty': 'tbd',
+ 'Linux Trusty (dbg)': 'tbd',
+ 'Linux V8 API Stability': 'tbd',
+ 'MD Top Chrome ChromeOS material-hybrid': 'tbd',
+ 'MD Top Chrome ChromeOS non-material': 'tbd',
+ 'MD Top Chrome Linux material': 'tbd',
+ 'MD Top Chrome Win material': 'tbd',
+ 'Ozone ECS Linux': 'tbd',
+ 'Windows 8 App Certification': 'tbd',
+ 'Windows Builder (DrMemory)': 'tbd',
+ 'Windows Tests (DrMemory)': 'tbd',
+
+ 'Android ChromeDriver Tests (dbg)': 'none',
+ 'Android Cloud Tests': 'android_gn_debug_static_bot_x86',
+ 'Android Remoting Tests': 'none',
+ 'Android Tests (trial)(dbg)': 'none',
+ 'Android Tests (x86 emulator)': 'android_gn_debug_static_bot_x86',
+ 'Android Asan Builder Tests (dbg)':
+ 'android_asan_gn_debug_bot_minimal_symbols',
+ 'Android Builder (dbg)': 'android_gn_debug_static_bot',
+ 'CFI Linux CF': 'gn_cfi_diag_release_bot',
+ 'CFI Linux ToT': 'gn_cfi_release_bot',
+ 'CFI Linux': 'gn_cfi_release_bot',
+ 'Chromium Win MiniInstaller Tests': 'none',
+ 'Chromium Win PGO Builder': 'gyp_official_winpgo',
+ 'Chromium Win x64 PGO Builder': 'gyp_official_winpgo_x64',
+ 'Chromium Windows Analyze': 'gn_windows_analyze',
+ 'Chromium_iOS_Device': 'ios_gyp',
+ 'Chromium_iOS_Device_(ninja)': 'ios_gyp',
+ 'Chromium_iOS_Simulator_(dbg)': 'ios_gyp',
+ 'CrWinClang':
+ 'swarming_gyp_clang_official_release_bot_minimal_symbols_x86',
+ 'CrWinClang(dbg)':
+ 'swarming_gyp_clang_debug_bot_minimal_symbols_x86',
+ 'CrWinClang64':
+ 'swarming_gyp_clang_official_release_bot_minimal_symbols_x64',
+ 'ClangToTAndroidASan tester': 'none',
+ 'ClangToTLinux tester': 'none',
+ 'ClangToTLinuxASan tester': 'none',
+ 'ClangToTLinuxUBSanVptr tester': 'none',
+ 'ClangToTMac tester': 'none',
+ 'ClangToTMacASan tester': 'none',
+ 'ClangToTWin tester': 'none',
+ 'ClangToTWin(dbg) tester': 'none',
+ 'ClangToTWin(dll) tester': 'none',
+ 'ClangToTWin64 tester': 'none',
+ 'ClangToTWin64(dbg) tester': 'none',
+ 'ClangToTWin64(dll) tester': 'none',
+ 'CrWinAsan tester': 'none',
+ 'CrWinAsan(dll) tester': 'none',
+ 'CrWinAsanCov tester': 'none',
+ 'CrWinClang tester': 'none',
+ 'CrWinClang(dbg) tester': 'none',
+ 'CrWinClang(shared) tester': 'none',
+ 'CrWinClang64 tester': 'none',
+ 'CrWinClang64(dbg) tester': 'none',
+ 'CrWinClang64(dbg)': 'win_clang_debug_bot',
+ 'CrWinClang64(dll) tester': 'none',
+ 'CrWinClangLLD tester': 'none',
+ 'CrWinClangLLD64 tester': 'none',
+ 'CrWinClngLLD64dbg tester': 'none',
+ 'CrWinClngLLDdbg tester': 'none',
+ 'LTO Linux Perf': 'gn_official_goma_lto',
+ 'Libfuzzer Upload Linux ASan': 'gn_release_libfuzzer_asan',
+ 'Libfuzzer Upload Linux MSan': 'gn_release_libfuzzer_msan',
+ 'Libfuzzer Upload Linux UBSan': 'gn_release_libfuzzer_ubsan',
+ 'Linux ARM': 'swarming_gyp_release_bot_arm',
+ 'Site Isolation Linux': 'gn_release_trybot',
+ 'Site Isolation Win': 'gyp_release_trybot_x64',
+ 'Vista Tests (dbg)(1)': 'none',
+ 'Vista Tests (dbg)(2)': 'none',
+ 'Win LKGR (DrM 64)': 'gn_release_drmemory_drfuzz',
+ 'Win LKGR (DrM)': 'gn_release_drmemory_drfuzz_x86',
+ 'Win8 Tests (1)': 'none',
+ 'Win8 Tests (2)': 'none',
+ 'WinClang': 'win_clang_debug_bot',
+ 'Windows Browser (DrMemory light) (1)': 'none',
+ 'Windows Browser (DrMemory light) (2)': 'none',
+ },
+
+ 'chromium.gpu': {
+ 'Android Debug (Nexus 5)': 'android_gyp_debug_static_bot',
+ 'Android Debug (Nexus 6)': 'android_gyp_debug_static_bot',
+ 'Android Debug (Nexus 9)': 'android_gyp_debug_static_bot_arm64',
+ 'GPU Mac Builder': 'tbd',
+ 'GPU Mac Builder (dbg)': 'tbd',
+
+ 'GPU Linux Builder (dbg)': 'swarming_gpu_tests_gn_debug_bot',
+ 'GPU Linux Builder': 'swarming_gpu_tests_gn_release_bot',
+ 'GPU Win Builder':
+ 'swarming_gpu_tests_gyp_release_bot_minimal_symbols_x86',
+ 'GPU Win Builder (dbg)':
+ 'swarming_gpu_tests_gyp_debug_bot_minimal_symbols_x86',
+ 'Linux Debug (NVIDIA)': 'none',
+ 'Linux Release (NVIDIA)': 'none',
+ 'Mac 10.10 Debug (Intel)': 'none',
+ 'Mac 10.10 Release (Intel)': 'none',
+ 'Mac 10.10 Retina Debug (AMD)': 'none',
+ 'Mac 10.10 Retina Release (AMD)': 'none',
+ 'Mac Retina Debug': 'none',
+ 'Mac Retina Release': 'none',
+ 'Win7 Debug (NVIDIA)': 'none',
+ 'Win7 Release (ATI)': 'none',
+ 'Win7 Release (NVIDIA)': 'none',
+ },
+
+ 'chromium.gpu.fyi': {
+ 'GPU Mac Builder': 'tbd',
+ 'GPU Mac Builder (dbg)': 'tbd',
+ 'GPU Win Clang Builder (dbg)': 'tbd',
+ 'Linux Audio': 'tbd',
+ 'Win7 Audio': 'tbd',
+
+ 'GPU Linux Builder (dbg)': 'swarming_gpu_fyi_tests_gn_debug_bot',
+ 'GPU Linux Builder': 'swarming_gpu_fyi_tests_gn_release_bot',
+ 'GPU Win Builder':
+ 'swarming_gpu_tests_deqp_gles_gyp_release_bot_minimal_symbols_x86',
+ 'GPU Win Builder (dbg)':
+ 'swarming_gpu_tests_deqp_gles_gyp_debug_bot_minimal_symbols_x86',
+ 'GPU Win x64 Builder':
+ 'swarming_gpu_tests_deqp_gles_gyp_release_bot_minimal_symbols_x64',
+ 'GPU Win x64 Builder (dbg)':
+ 'swarming_gpu_tests_deqp_gles_gyp_debug_bot_minimal_symbols_x64',
+ 'Linux Debug (NVIDIA)': 'none',
+ 'Linux Debug (New Intel)': 'none',
+ 'Linux Release (ATI)': 'none',
+ 'Linux Release (Intel Graphics Stack)': 'none',
+ 'Linux Release (NVIDIA GeForce 730)': 'none',
+ 'Linux Release (NVIDIA)': 'none',
+ 'Linux Release (New Intel)': 'none',
+ 'Mac 10.10 Debug (ATI)': 'none',
+ 'Mac 10.10 Debug (Intel)': 'none',
+ 'Mac 10.10 Release (ATI)': 'none',
+ 'Mac 10.10 Release (Intel)': 'none',
+ 'Mac 10.10 Retina Debug (AMD)': 'none',
+ 'Mac 10.10 Retina Release (AMD)': 'none',
+ 'Mac Retina Debug': 'none',
+ 'Mac Retina Release': 'none',
+ 'Win7 Debug (ATI)': 'none',
+ 'Win7 Debug (NVIDIA)': 'none',
+ 'Win7 Debug (New Intel)': 'none',
+ 'Win7 Release (ATI)': 'none',
+ 'Win7 Release (Intel)': 'none',
+ 'Win7 Release (NVIDIA GeForce 730)': 'none',
+ 'Win7 Release (NVIDIA)': 'none',
+ 'Win7 Release (New Intel)': 'none',
+ 'Win7 x64 Debug (NVIDIA)': 'none',
+ 'Win7 x64 Release (NVIDIA)': 'none',
+ 'Win8 Debug (NVIDIA)': 'none',
+ 'Win8 Release (NVIDIA)': 'none',
+ },
+
+ 'chromium.linux': {
+ 'Android Arm64 Builder (dbg)':
+ 'swarming_android_gyp_debug_static_bot_arm64',
+ 'Android Builder (dbg)': 'swarming_android_gyp_debug_static_bot',
+ 'Android Builder': 'swarming_android_gyp_release_bot_minimal_symbols',
+ 'Android Clang Builder (dbg)':
+ 'android_clang_asan_findbugs_gyp_debug_bot_minimal_symbols',
+ 'Android GN (dbg)': 'android_gn_debug_bot_minimal_symbols',
+ 'Android GN': 'android_gn_release_bot_minimal_symbols',
+ 'Android Tests (dbg)': 'swarming_android_gyp_debug_static_bot',
+ 'Android Tests': 'swarming_android_gyp_release_bot_minimal_symbols',
+ 'Cast Android (dbg)': 'android_cast_gyp_debug_static_bot',
+ 'Cast Linux': 'cast_gn_release_bot',
+ 'Linux Builder (dbg)': 'swarming_gn_debug_bot',
+ 'Linux Builder (dbg)(32)': 'swarming_gyp_debug_bot_no_symbols_x86',
+ 'Linux Builder': 'swarming_gn_release_bot',
+ 'Linux Tests (dbg)(1)': 'none',
+ 'Linux Tests (dbg)(1)(32)': 'none',
+ 'Linux Tests': 'none',
+ },
+
+ 'chromium.mac': {
+ 'Mac Builder': 'tbd',
+ 'Mac Builder (dbg)': 'tbd',
+
+ 'Mac GN (dbg)': 'gn_debug_static_bot',
+ 'Mac GN': 'gn_release_bot',
+ 'Mac10.10 Tests': 'none',
+ 'Mac10.11 Tests': 'none',
+ 'Mac10.9 Tests (dbg)': 'none',
+ 'Mac10.9 Tests': 'none',
+ 'iOS_Device': 'ios_gyp',
+ 'iOS_Device_(ninja)': 'ios_gyp',
+ 'iOS_Device_GN': 'ios_gn',
+ 'iOS_Simulator_(dbg)': 'ios_gyp',
+ 'iOS_Simulator_GN_(dbg)': 'ios_gn',
+ },
+
+ 'chromium.memory.fyi': {
+ 'Chromium Linux ChromeOS MSan Builder': 'tbd',
+ 'Chromium Linux MSan Builder': 'tbd',
+ 'Chromium Linux TSan Builder': 'tbd',
+
+ 'Chromium Linux Builder (valgrind)': 'gyp_valgrind_release_bot',
+ 'Chromium OS (valgrind)(1)': 'none',
+ 'Chromium OS (valgrind)(2)': 'none',
+ 'Chromium OS (valgrind)(3)': 'none',
+ 'Chromium OS (valgrind)(4)': 'none',
+ 'Chromium OS (valgrind)(5)': 'none',
+ 'Chromium OS (valgrind)(6)': 'none',
+ 'Chromium OS Builder (valgrind)': 'gyp_valgrind_chromeos_release_bot',
+ 'Chromium Windows Builder (DrMemory x64)':
+ 'gyp_drmemory_shared_release_x64',
+ 'Chromium Windows Builder (DrMemory)': 'gyp_drmemory_shared_release_x86',
+ 'Linux ChromeOS MSan Tests': 'none',
+ 'Linux MSan Tests': 'none',
+ 'Linux TSan Tests': 'none',
+ 'Linux Tests (valgrind)(1)': 'none',
+ 'Linux Tests (valgrind)(2)': 'none',
+ 'Linux Tests (valgrind)(3)': 'none',
+ 'Linux Tests (valgrind)(4)': 'none',
+ 'Linux Tests (valgrind)(5)': 'none',
+ 'Windows Browser (DrMemory full) (1)': 'none',
+ 'Windows Browser (DrMemory full) (10)': 'none',
+ 'Windows Browser (DrMemory full) (11)': 'none',
+ 'Windows Browser (DrMemory full) (12)': 'none',
+ 'Windows Browser (DrMemory full) (2)': 'none',
+ 'Windows Browser (DrMemory full) (3)': 'none',
+ 'Windows Browser (DrMemory full) (4)': 'none',
+ 'Windows Browser (DrMemory full) (5)': 'none',
+ 'Windows Browser (DrMemory full) (6)': 'none',
+ 'Windows Browser (DrMemory full) (7)': 'none',
+ 'Windows Browser (DrMemory full) (8)': 'none',
+ 'Windows Browser (DrMemory full) (9)': 'none',
+ 'Windows Content Browser (DrMemory full) (1)': 'none',
+ 'Windows Content Browser (DrMemory full) (2)': 'none',
+ 'Windows Content Browser (DrMemory full) (3)': 'none',
+ 'Windows Content Browser (DrMemory full) (4)': 'none',
+ 'Windows Content Browser (DrMemory full) (5)': 'none',
+ 'Windows Content Browser (DrMemory full) (6)': 'none',
+ 'Windows Content Browser (DrMemory)': 'none',
+ 'Windows Unit (DrMemory full) (1)': 'none',
+ 'Windows Unit (DrMemory full) (2)': 'none',
+ 'Windows Unit (DrMemory full) (3)': 'none',
+ 'Windows Unit (DrMemory full) (4)': 'none',
+ 'Windows Unit (DrMemory full) (5)': 'none',
+ 'Windows Unit (DrMemory x64)': 'none',
+ 'Windows Unit (DrMemory)': 'none',
+ },
+
+ 'chromium.mojo': {
+ 'Chromium Mojo Android': 'android_gn_release_bot',
+ 'Chromium Mojo Linux': 'gn_release_bot',
+ 'Chromium Mojo Windows': 'gn_release_bot_minimal_symbols_x86',
+ },
+
+ 'chromium.perf': {
+ 'Android Builder': 'gn_official_goma_minimal_symbols_android',
+ 'Android Galaxy S5 Perf (1)': 'none',
+ 'Android Galaxy S5 Perf (2)': 'none',
+ 'Android Galaxy S5 Perf (3)': 'none',
+ 'Android Nexus5 Perf (1)': 'none',
+ 'Android Nexus5 Perf (2)': 'none',
+ 'Android Nexus5 Perf (3)': 'none',
+ 'Android Nexus5X Perf (1)': 'none',
+ 'Android Nexus5X Perf (2)': 'none',
+ 'Android Nexus5X Perf (3)': 'none',
+ 'Android Nexus6 Perf (1)': 'none',
+ 'Android Nexus6 Perf (2)': 'none',
+ 'Android Nexus6 Perf (3)': 'none',
+ 'Android Nexus7v2 Perf (1)': 'none',
+ 'Android Nexus7v2 Perf (2)': 'none',
+ 'Android Nexus7v2 Perf (3)': 'none',
+ 'Android Nexus9 Perf (1)': 'none',
+ 'Android Nexus9 Perf (2)': 'none',
+ 'Android Nexus9 Perf (3)': 'none',
+ 'Android One Perf (1)': 'none',
+ 'Android One Perf (2)': 'none',
+ 'Android One Perf (3)': 'none',
+ 'Android arm64 Builder': 'gn_official_goma_minimal_symbols_android_arm64',
+ 'Linux Builder': 'gn_official_goma',
+ 'Linux Perf (1)': 'none',
+ 'Linux Perf (2)': 'none',
+ 'Linux Perf (3)': 'none',
+ 'Linux Perf (4)': 'none',
+ 'Linux Perf (5)': 'none',
+ 'Mac 10.10 Perf (1)': 'none',
+ 'Mac 10.10 Perf (2)': 'none',
+ 'Mac 10.10 Perf (3)': 'none',
+ 'Mac 10.10 Perf (4)': 'none',
+ 'Mac 10.10 Perf (5)': 'none',
+ 'Mac 10.11 Perf (1)': 'none',
+ 'Mac 10.11 Perf (2)': 'none',
+ 'Mac 10.11 Perf (3)': 'none',
+ 'Mac 10.11 Perf (4)': 'none',
+ 'Mac 10.11 Perf (5)': 'none',
+ 'Mac Builder': 'gyp_official_goma',
+ 'Mac HDD Perf (1)': 'none',
+ 'Mac HDD Perf (2)': 'none',
+ 'Mac HDD Perf (3)': 'none',
+ 'Mac HDD Perf (4)': 'none',
+ 'Mac HDD Perf (5)': 'none',
+ 'Mac Retina Perf (1)': 'none',
+ 'Mac Retina Perf (2)': 'none',
+ 'Mac Retina Perf (3)': 'none',
+ 'Mac Retina Perf (4)': 'none',
+ 'Mac Retina Perf (5)': 'none',
+ 'Win 10 Perf (1)': 'none',
+ 'Win 10 Perf (2)': 'none',
+ 'Win 10 Perf (3)': 'none',
+ 'Win 10 Perf (4)': 'none',
+ 'Win 10 Perf (5)': 'none',
+ 'Win 7 ATI GPU Perf (1)': 'none',
+ 'Win 7 ATI GPU Perf (2)': 'none',
+ 'Win 7 ATI GPU Perf (3)': 'none',
+ 'Win 7 ATI GPU Perf (4)': 'none',
+ 'Win 7 ATI GPU Perf (5)': 'none',
+ 'Win 7 Intel GPU Perf (1)': 'none',
+ 'Win 7 Intel GPU Perf (2)': 'none',
+ 'Win 7 Intel GPU Perf (3)': 'none',
+ 'Win 7 Intel GPU Perf (4)': 'none',
+ 'Win 7 Intel GPU Perf (5)': 'none',
+ 'Win 7 Low-End Perf (1)': 'none',
+ 'Win 7 Low-End Perf (2)': 'none',
+ 'Win 7 Nvidia GPU Perf (1)': 'none',
+ 'Win 7 Nvidia GPU Perf (2)': 'none',
+ 'Win 7 Nvidia GPU Perf (3)': 'none',
+ 'Win 7 Nvidia GPU Perf (4)': 'none',
+ 'Win 7 Nvidia GPU Perf (5)': 'none',
+ 'Win 7 Perf (1)': 'none',
+ 'Win 7 Perf (2)': 'none',
+ 'Win 7 Perf (3)': 'none',
+ 'Win 7 Perf (4)': 'none',
+ 'Win 7 Perf (5)': 'none',
+ 'Win 7 x64 Perf (1)': 'none',
+ 'Win 7 x64 Perf (2)': 'none',
+ 'Win 7 x64 Perf (3)': 'none',
+ 'Win 7 x64 Perf (4)': 'none',
+ 'Win 7 x64 Perf (5)': 'none',
+ 'Win 8 Perf (1)': 'none',
+ 'Win 8 Perf (2)': 'none',
+ 'Win 8 Perf (3)': 'none',
+ 'Win 8 Perf (4)': 'none',
+ 'Win 8 Perf (5)': 'none',
+ 'Win Builder': 'gyp_official_goma_minimal_symbols_x86',
+ 'Win Zenbook Perf (1)': 'none',
+ 'Win Zenbook Perf (2)': 'none',
+ 'Win Zenbook Perf (3)': 'none',
+ 'Win Zenbook Perf (4)': 'none',
+ 'Win Zenbook Perf (5)': 'none',
+ 'Win x64 Builder': 'gyp_official_goma_minimal_symbols_x64',
+ },
+
+ 'client.skia': {
+ 'Linux Builder': 'swarming_gn_release_bot',
+ 'Linux Builder-Trybot': 'swarming_gn_release_bot',
+ 'Linux Tests': 'swarming_gn_release_bot',
+ 'Win Builder':
+ 'swarming_gpu_tests_gyp_release_bot_minimal_symbols_x86',
+ 'Win Builder-Trybot':
+ 'swarming_gpu_tests_gyp_release_trybot_minimal_symbols_x86',
+ },
+
+ 'client.v8.fyi': {
+ 'Linux Debug Builder': 'gn_debug_bot',
+ 'V8 Android GN (dbg)': 'android_gn_debug_bot',
+ 'V8 Linux GN': 'gn_release_bot',
+ },
+
+ 'chromium.webkit': {
+ 'Android Builder': 'gyp_release_bot_android',
+ 'WebKit Android (Nexus4)': 'gyp_release_bot_android',
+ 'WebKit Linux (dbg)': 'swarming_gn_debug_bot_x64',
+ 'WebKit Linux ASAN': 'swarming_gn_asan_lsan_release_bot_x64',
+ 'WebKit Linux Leak': 'swarming_gn_release_bot_x64',
+ 'WebKit Linux MSAN': 'swarming_gn_msan_release_bot_x64',
+ 'WebKit Linux Trusty': 'swarming_gn_release_bot_x64',
+ 'WebKit Linux': 'swarming_gn_release_bot_x64',
+ 'WebKit Mac Builder (dbg)': 'swarming_gyp_debug_bot_x64',
+ 'WebKit Mac Builder': 'swarming_gyp_release_bot_x64',
+ 'WebKit Mac10.10': 'none',
+ 'WebKit Mac10.11 (dbg)': 'none',
+ 'WebKit Mac10.11 (retina)': 'swarming_gyp_release_bot_x64',
+ 'WebKit Mac10.11': 'none',
+ 'WebKit Mac10.9': 'none',
+ 'WebKit Win Builder (dbg)': 'swarming_gyp_debug_bot_minimal_symbols_x86',
+ 'WebKit Win Builder': 'swarming_gyp_release_bot_minimal_symbols_x86',
+ 'WebKit Win x64 Builder (dbg)':
+ 'swarming_gyp_debug_bot_minimal_symbols_x64',
+ 'WebKit Win x64 Builder': 'swarming_gyp_release_bot_minimal_symbols_x64',
+ 'WebKit Win10': 'none',
+ 'WebKit Win7 (dbg)': 'none',
+ 'WebKit Win7': 'none',
+ },
+
+ 'chromium.webrtc.fyi': {
+ 'Android Builder (dbg)': 'tbd',
+ 'Android Builder ARM64 (dbg)': 'tbd',
+ 'Mac Builder': 'tbd',
+ 'Win Builder': 'tbd',
+
+ 'Android GN (dbg)': 'android_gn_debug_bot',
+ 'Android GN': 'android_gn_release_bot',
+ 'Android Tests (dbg) (K Nexus5)': 'none',
+ 'Android Tests (dbg) (L Nexus5)': 'none',
+ 'Android Tests (dbg) (L Nexus6)': 'none',
+ 'Android Tests (dbg) (L Nexus7.2)': 'none',
+ 'Android Tests (dbg) (L Nexus9)': 'none',
+ 'Linux Builder': 'gn_release_bot_chrome_with_codecs',
+ 'Linux Tester': 'none',
+ 'Mac GN (dbg)': 'gn_debug_static_bot_chrome_with_codecs',
+ 'Mac GN': 'gn_release_bot_chrome_with_codecs',
+ 'Mac Tester': 'none',
+ 'Win x64 GN (dbg)': 'gn_debug_bot_minimal_symbols_chrome_with_codecs',
+ 'Win x64 GN': 'gn_release_bot_minimal_symbols_chrome_with_codecs',
+ 'Win10 Tester': 'none',
+ 'Win7 Tester': 'none',
+ },
+
+ 'chromium.win': {
+ # Windows bots take too long to link w/ full symbols and time out.
+ 'Win Builder': 'swarming_gyp_release_bot_minimal_symbols_x86',
+ 'Win Builder (dbg)': 'swarming_gyp_debug_bot_minimal_symbols_x86',
+ 'Win x64 Builder': 'swarming_gyp_release_bot_minimal_symbols_x64',
+ 'Win x64 Builder (dbg)': 'swarming_gyp_debug_bot_minimal_symbols_x64',
+ 'Win x64 GN (dbg)': 'gn_debug_bot_minimal_symbols',
+ 'Win x64 GN': 'gn_release_bot_minimal_symbols',
+ 'Win 7 Tests x64 (1)': 'none',
+ 'Win10 Tests x64': 'none',
+ 'Win7 (32) Tests': 'none',
+ 'Win7 Tests (1)': 'none',
+ 'Win7 Tests (dbg)(1)': 'none',
+ 'Win8 Aura': 'gn_release_bot_minimal_symbols_x86',
+ 'Win8 GN (dbg)': 'gn_debug_bot_minimal_symbols_x86',
+ },
+
+ 'official.desktop': {
+ 'blimp-engine': 'gn_blimp_debug',
+ 'mac64': 'gyp_official',
+ 'precise64': 'gn_official',
+
+ # Currently the official bots set mini_installer_official_deps=1
+ # but it's not clear if that's actually used anywhere.
+ 'win': 'gyp_official',
+ 'win-asan': 'gyp_official_syzyasan',
+ 'win-pgo': 'gyp_official_winpgo',
+ 'win64': 'gyp_official_x64',
+ 'win64-pgo': 'gyp_official_winpgo_x64',
+ },
+
+ 'official.desktop.continuous': {
+ 'mac beta': 'gyp_official',
+ 'mac stable': 'gyp_official',
+ 'mac trunk': 'gyp_official',
+ 'precise64 beta': 'gn_official',
+ 'precise64 stable': 'gn_official',
+ 'precise64 trunk': 'gn_official',
+ 'win beta': 'gyp_official',
+ 'win stable': 'gyp_official',
+ 'win trunk': 'gyp_official',
+ },
+
+ 'tryserver.blink': {
+ 'linux_chromium_gn_rel': 'tbd',
+
+ 'blink_presubmit': 'none',
+ 'linux_blink_compile_dbg':
+ 'swarming_gn_debug_bot_minimal_symbols_x64',
+ 'linux_blink_compile_rel':
+ 'swarming_gn_release_trybot_minimal_symbols_x64',
+ 'linux_blink_dbg':
+ 'swarming_gn_debug_bot_minimal_symbols_x64',
+ 'linux_blink_rel':
+ 'swarming_gn_release_trybot_minimal_symbols_x64',
+ 'linux_blink_rel_ng':
+ 'swarming_gn_release_trybot_minimal_symbols_x64',
+ 'mac_blink_compile_dbg':
+ 'swarming_gyp_debug_bot_minimal_symbols_x64',
+ 'mac_blink_compile_rel':
+ 'swarming_gyp_release_bot_minimal_symbols_x64',
+ 'mac_blink_dbg':
+ 'swarming_gyp_debug_bot_minimal_symbols_x64',
+ 'mac_blink_rel': 'swarming_gyp_release_trybot_minimal_symbols_x64',
+ 'win_blink_compile_dbg': 'swarming_gyp_debug_bot_minimal_symbols_x86',
+ 'win_blink_compile_rel':
+ 'swarming_gyp_release_trybot_minimal_symbols_x86',
+ 'win_blink_dbg': 'swarming_gyp_debug_bot_minimal_symbols_x86',
+ 'win_blink_rel': 'swarming_gyp_release_trybot_minimal_symbols_x86',
+ },
+
+ 'tryserver.chromium.android': {
+ 'android_amp': 'tbd',
+ 'android_archive_rel_ng': 'tbd',
+ 'android_coverage': 'tbd',
+ 'android_swarming_rel': 'tbd',
+ 'cast_shell_android': 'tbd',
+
+ 'android_arm64_dbg_recipe': 'swarming_android_gyp_debug_trybot_arm64',
+ 'android_blink_rel': 'swarming_android_gyp_release_trybot',
+ 'android_chromium_gn_compile_dbg': 'android_gn_debug_trybot',
+ 'android_chromium_gn_compile_rel': 'android_gn_release_trybot',
+ 'android_chromium_gn_rel': 'android_gn_release_trybot',
+ 'android_chromium_variable': 'findit',
+ 'android_chromium_variable_archive': 'findit',
+ 'android_chromium_variable_arm64': 'findit',
+ 'android_chromium_variable_cast_shell': 'findit',
+ 'android_chromium_variable_clang': 'findit',
+ 'android_chromium_variable_gn': 'findit',
+ 'android_chromium_variable_nexus4': 'findit',
+ 'android_clang_dbg_recipe': 'android_clang_asan_findbugs_gyp_debug_trybot',
+ 'android_compile_dbg': 'swarming_android_gyp_debug_trybot',
+ 'android_compile_mips_dbg': 'android_gn_debug_trybot_mipsel',
+ 'android_compile_rel': 'swarming_android_gyp_release_trybot',
+ 'android_compile_x64_dbg': 'android_gn_debug_trybot_x64',
+ 'android_compile_x86_dbg': 'android_gn_debug_trybot_x86',
+ # TODO(crbug/597596): Switch this back to debug_trybot when cronet's
+ # shared library loading is fixed.
+ 'android_cronet_tester': 'android_cronet_gn_debug_static_bot',
+ 'linux_android_dbg_ng': 'swarming_android_gyp_debug_trybot',
+ 'linux_android_rel_ng': 'swarming_android_gyp_release_trybot',
+ },
+
+ 'tryserver.chromium.angle': {
+ 'mac_angle_dbg_ng': 'tbd',
+ 'mac_angle_rel_ng': 'tbd',
+
+ 'linux_angle_dbg_ng': 'swarming_gpu_fyi_tests_gn_debug_trybot',
+ 'linux_angle_rel_ng': 'swarming_gpu_fyi_tests_gn_release_trybot',
+ 'win_angle_dbg_ng':
+ 'swarming_gpu_tests_deqp_gles_gyp_debug_bot_minimal_symbols_x86',
+ 'win_angle_rel_ng':
+ 'swarming_gpu_tests_deqp_gles_gyp_release_trybot_minimal_symbols_x86',
+ 'win_angle_x64_dbg_ng':
+ 'swarming_gpu_tests_deqp_gles_gyp_debug_bot_minimal_symbols_x64',
+ 'win_angle_x64_rel_ng':
+ 'swarming_gpu_tests_deqp_gles_gyp_release_trybot_minimal_symbols_x64',
+ },
+
+ 'tryserver.chromium.linux': {
+ 'Chromium Linux Codesearch Builder': 'tbd',
+ 'ChromiumOS Codesearch Builder': 'tbd',
+ 'chromeos_amd64-generic_chromium_compile_only_ng': 'tbd',
+ 'chromeos_daisy_chromium_compile_only_ng': 'tbd',
+ 'chromeos_x86-generic_chromium_compile_only_ng': 'tbd',
+ 'linux_chromium_browser_side_navigation_rel': 'tbd',
+ 'linux_chromium_chromeos_asan_rel_ng': 'tbd',
+ 'linux_chromium_chromeos_asan_variable': 'tbd',
+ 'linux_chromium_chromeos_compile_dbg_ng': 'tbd',
+ 'linux_chromium_chromeos_dbg_ng': 'tbd',
+ 'linux_chromium_chromeos_msan_rel_ng': 'tbd',
+ 'linux_chromium_chromeos_ozone_rel_ng': 'tbd',
+ 'linux_chromium_chromeos_variable': 'tbd',
+ 'linux_chromium_variable': 'tbd',
+
+ 'cast_shell_linux': 'cast_gn_release_trybot',
+ 'chromeos_amd64-generic_variable': 'findit',
+ 'chromeos_daisy_variable': 'findit',
+ 'chromeos_x86-generic_variable': 'findit',
+ 'chromium_presubmit': 'none',
+ 'linux_arm': 'swarming_gyp_release_trybot_arm',
+ 'linux_chromium_archive_rel_ng': 'noswarming_gn_release_bot',
+ 'linux_chromium_asan_rel_ng': 'swarming_asan_lsan_gyp_release_trybot',
+ 'linux_chromium_asan_variable': 'findit',
+ 'linux_chromium_cast_variable': 'findit',
+ 'linux_chromium_cfi_rel_ng': 'gn_cfi_release_trybot',
+ 'linux_chromium_chromeos_compile_rel_ng':
+ 'swarming_chromeos_gyp_release_trybot',
+ 'linux_chromium_chromeos_rel_ng': 'swarming_chromeos_gyp_release_trybot',
+ 'linux_chromium_chromeos_variable_chrome': 'findit',
+ 'linux_chromium_clobber_rel_ng': 'gn_release_trybot',
+ 'linux_chromium_compile_dbg_32_ng': 'swarming_gyp_debug_trybot_x86',
+ 'linux_chromium_compile_dbg_ng': 'swarming_gn_debug_trybot',
+ 'linux_chromium_compile_rel_ng': 'swarming_gn_release_trybot',
+ 'linux_chromium_dbg_32_ng': 'swarming_gyp_debug_trybot_x86',
+ 'linux_chromium_dbg_ng': 'swarming_gn_debug_trybot',
+ 'linux_chromium_gn_chromeos_dbg': 'chromeos_gn_debug_bot',
+ 'linux_chromium_gn_chromeos_rel': 'chromeos_ozone_gn_release_trybot',
+ 'linux_chromium_gn_chromeos_variable': 'findit',
+ 'linux_chromium_gn_upload': 'gn_linux_upload',
+ 'linux_chromium_msan_rel_ng': 'swarming_msan_gyp_release_trybot',
+ 'linux_chromium_practice_rel_ng': 'gyp_release_trybot',
+ 'linux_chromium_rel_ng': 'swarming_gpu_tests_gn_release_trybot',
+ 'linux_chromium_tsan_rel_ng': 'swarming_tsan_gyp_release_trybot',
+ 'linux_chromium_variable_32': 'findit',
+ 'linux_chromium_variable_chrome': 'findit',
+ 'linux_chromium_variable_clobber': 'findit',
+ 'linux_chromium_variable_32_chrome': 'findit',
+ 'linux_chromium_variable_32_clobber': 'findit',
+ 'linux_chromium_webkit_asan_variable': 'findit',
+ 'linux_chromium_webkit_leak_variable': 'findit',
+ 'linux_chromium_webkit_msan_variable': 'findit',
+ 'linux_chromium_webkit_variable': 'findit',
+ 'linux_ecs_ozone': 'embedded_gyp_debug_bot',
+ 'linux_full_bisect_builder': 'swarming_gyp_release_bot',
+ 'linux_nacl_sdk': 'nacl_annotator',
+ 'linux_nacl_sdk_build': 'nacl_annotator',
+ 'linux_optional_gpu_tests_rel':
+ 'swarming_gpu_fyi_tests_gn_release_trybot',
+ 'linux_site_isolation': 'gn_release_trybot',
+ 'linux_upload_clang': 'gn_release_bot',
+ 'linux_valgrind': 'gyp_valgrind_release_bot',
+ },
+
+ 'tryserver.chromium.mac': {
+ 'mac_chromium_10.10_rel_ng': 'tbd',
+ 'mac_chromium_archive_rel_ng': 'tbd',
+ 'mac_chromium_asan_rel_ng': 'tbd',
+ 'mac_chromium_asan_variable': 'tbd',
+ 'mac_chromium_compile_dbg_ng': 'tbd',
+ 'mac_chromium_compile_rel_ng': 'tbd',
+ 'mac_chromium_dbg_ng': 'tbd',
+ 'mac_chromium_rel_ng': 'tbd',
+ 'mac_optional_gpu_tests_rel': 'tbd',
+
+ 'ios_dbg_simulator': 'ios_gyp',
+ 'ios_dbg_simulator_gn': 'ios_gn',
+ 'ios_dbg_simulator_ninja': 'ios_gyp',
+ 'ios_rel_device': 'ios_gyp',
+ 'ios_rel_device_gn': 'ios_gn',
+ 'ios_rel_device_ninja': 'ios_gyp',
+ 'mac_chromium_gn_dbg': 'gn_debug_static_bot',
+ 'mac_chromium_gn_rel': 'gn_release_trybot',
+ 'mac_chromium_gn_upload': 'gn_release_bot',
+ 'mac_chromium_variable': 'findit',
+ 'mac_chromium_variable_10.10': 'findit',
+ 'mac_chromium_variable_10.10_layout': 'findit',
+ 'mac_chromium_variable_archive': 'findit',
+ 'mac_chromium_variable_chrome': 'findit',
+ 'mac_chromium_variable_gn': 'findit',
+ 'mac_chromium_variable_layout': 'findit',
+ 'mac_nacl_sdk': 'nacl_annotator',
+ 'mac_nacl_sdk_build': 'nacl_annotator',
+ 'mac_upload_clang': 'gn_release_bot',
+ },
+
+ 'tryserver.chromium.perf': {
+ 'android_arm64_perf_bisect_builder':
+ 'gn_official_goma_minimal_symbols_android_arm64',
+ 'android_fyi_perf_bisect': 'gn_official_goma_minimal_symbols_android',
+ 'android_nexus5X_perf_bisect':
+ 'gn_official_goma_minimal_symbols_android',
+ 'android_nexus5_perf_bisect': 'gn_official_goma_minimal_symbols_android',
+ 'android_nexus6_perf_bisect': 'gn_official_goma_minimal_symbols_android',
+ 'android_nexus7_perf_bisect': 'gn_official_goma_minimal_symbols_android',
+ 'android_nexus9_perf_bisect':
+ 'gn_official_goma_minimal_symbols_android_arm64',
+ 'android_one_perf_bisect': 'gn_official_goma_minimal_symbols_android',
+ 'android_perf_bisect_builder':
+ 'gn_official_goma_minimal_symbols_android',
+ 'android_s5_perf_bisect': 'gn_official_goma_minimal_symbols_android',
+ 'android_s5_perf_cq': 'gn_official_goma_minimal_symbols_android',
+ 'android_webview_aosp_perf_bisect':
+ 'gn_official_goma_minimal_symbols_android',
+ 'linux_fyi_perf_bisect': 'gn_official_goma',
+ 'linux_perf_bisect': 'gn_official_goma',
+ 'linux_perf_bisect_builder': 'gn_official_goma',
+ 'linux_perf_cq': 'gn_official_goma',
+ 'mac_10_10_perf_bisect': 'gyp_official_goma',
+ 'mac_10_11_perf_bisect': 'gyp_official_goma',
+ 'mac_fyi_perf_bisect': 'gyp_official_goma',
+ 'mac_hdd_perf_bisect': 'gyp_official_goma',
+ 'mac_perf_bisect_builder': 'gyp_official_goma',
+ 'mac_retina_perf_bisect': 'gyp_official_goma',
+ 'mac_retina_perf_cq': 'gyp_official_goma',
+ 'win_8_perf_bisect': 'gyp_official_goma_minimal_symbols_x86',
+ 'win_fyi_perf_bisect': 'gyp_official_goma_minimal_symbols_x86',
+ 'win_perf_bisect': 'gyp_official_goma_minimal_symbols_x86',
+ 'win_perf_bisect_builder': 'gyp_official_goma_minimal_symbols_x86',
+ 'win_x64_perf_bisect': 'gyp_official_goma_minimal_symbols_x64',
+ 'winx64_10_perf_bisect': 'gyp_official_goma_minimal_symbols_x64',
+ 'winx64_10_perf_cq': 'gyp_official_goma_minimal_symbols_x64',
+ 'winx64_bisect_builder': 'gyp_official_goma_minimal_symbols_x64',
+ 'winx64_zen_perf_bisect': 'gyp_official_goma_minimal_symbols_x64',
+ 'winx64ati_perf_bisect': 'gyp_official_goma_minimal_symbols_x64',
+ 'winx64intel_perf_bisect': 'gyp_official_goma_minimal_symbols_x64',
+ 'winx64nvidia_perf_bisect': 'gyp_official_goma_minimal_symbols_x64',
+ },
+
+ 'tryserver.chromium.win': {
+ 'win10_chromium_x64_rel_ng':
+ 'swarming_gyp_release_trybot_minimal_symbols_x64',
+ 'win8_chromium_gn_dbg': 'gn_debug_bot_minimal_symbols_x86',
+ 'win8_chromium_gn_upload': 'gn_release_bot_minimal_symbols_x86',
+ 'win8_chromium_ng': 'gn_release_trybot_x86',
+ 'win_archive': 'noswarming_gyp_release_trybot_minimal_symbols_x86',
+ 'win_chromium_compile_dbg_ng':
+ 'swarming_gyp_debug_bot_minimal_symbols_x86',
+ 'win_chromium_compile_rel_ng':
+ 'swarming_gpu_tests_gyp_release_trybot_minimal_symbols_x86',
+ 'win_chromium_gn_x64_dbg': 'gn_debug_bot_minimal_symbols',
+ 'win_chromium_gn_x64_rel': 'gn_release_trybot',
+ 'win_chromium_dbg_ng': 'swarming_gyp_debug_bot_minimal_symbols_x86',
+ 'win_chromium_rel_ng':
+ 'swarming_gpu_tests_gyp_release_trybot_minimal_symbols_x86',
+ 'win_chromium_syzyasan_rel':
+ 'swarming_gyp_syzyasan_release_trybot_minimal_symbols_x86',
+ 'win_chromium_variable': 'findit',
+ 'win_chromium_variable_archive': 'findit',
+ 'win_chromium_variable_chrome': 'findit',
+ 'win_chromium_variable_gn': 'findit',
+ 'win_chromium_variable_webkit_builder': 'findit',
+ 'win_chromium_variable_webkit_layout': 'findit',
+ 'win_chromium_variable_webkit_win7_builder': 'findit',
+ 'win_chromium_variable_webkit_win7_builder_x64': 'findit',
+ 'win_chromium_x64_rel_ng':
+ 'swarming_gyp_release_trybot_minimal_symbols_x64',
+ 'win_clang': 'win_clang_debug_bot',
+ 'win_clang_dbg': 'swarming_gyp_clang_debug_bot_minimal_symbols_x86',
+ 'win_clang_rel':
+ 'swarming_gyp_clang_official_release_trybot_minimal_symbols_x86',
+ 'win_clang_x64_dbg': 'win_clang_debug_bot',
+ 'win_clang_x64_rel':
+ 'swarming_gyp_clang_official_release_trybot_minimal_symbols_x64',
+ 'win_nacl_sdk': 'nacl_annotator',
+ 'win_nacl_sdk_build': 'nacl_annotator',
+ 'win_optional_gpu_tests_rel':
+ 'swarming_gpu_tests_deqp_gles_gyp_release_trybot_minimal_symbols_x86',
+ 'win_pgo': 'gyp_official_winpgo',
+ 'win_upload_clang': 'gn_release_bot',
+ },
+
+ 'tryserver.v8': {
+ 'v8_android_chromium_gn_dbg': 'android_gn_debug_bot',
+ 'v8_linux_blink_rel': 'swarming_gn_release_trybot_minimal_symbols_x64',
+ 'v8_linux_chromium_gn_rel': 'gn_release_trybot',
+ },
+ },
+
+
+ # This is the list of configs that you can pass to mb; each config
+ # represents a particular combination of GYP_DEFINES/gn args that
+ # we must support. A given config *may* be platform-specific but
+ # is not necessarily so (i.e., we might have mac, win, and linux
+ # bots all using the 'gn_release_bot' config).
+ 'configs': {
+ 'android_asan_gn_debug_bot_minimal_symbols': [
+ 'android', 'asan', 'gn', 'debug_bot_minimal_symbols',
+ ],
+
+ 'android_cast_gyp_debug_static_bot': [
+ 'android', 'cast', 'gyp', 'debug_static_bot',
+ ],
+
+ 'android_clang_no_chrome_plugins_asan_gn_debug_bot_minimal_symbols': [
+ 'android', 'clang_no_chrome_plugins', 'asan', 'gn',
+ 'debug_bot_minimal_symbols',
+ ],
+
+ 'android_clang_asan_findbugs_gyp_debug_bot_minimal_symbols': [
+ 'android', 'clang', 'asan', 'findbugs', 'gyp',
+ 'debug_bot_minimal_symbols',
+ ],
+
+ 'android_clang_asan_findbugs_gyp_debug_trybot': [
+ 'android', 'clang', 'asan', 'findbugs', 'gyp', 'debug_trybot',
+ ],
+
+ 'android_cronet_data_reduction_proxy_gn_release_bot_minimal_symbols': [
+ 'android', 'cronet', 'data_reduction_proxy', 'gn',
+ 'release_bot_minimal_symbols', 'arm'
+ ],
+
+ 'android_cronet_gn_debug_static_bot': [
+ 'android', 'cronet', 'gn', 'debug_static_bot', 'arm'
+ ],
+
+ 'android_cronet_gn_debug_static_bot_arm64': [
+ 'android', 'cronet', 'gn', 'debug_static_bot', 'arm64'
+ ],
+
+ 'android_cronet_gn_debug_static_bot_x86': [
+ 'android', 'cronet', 'gn', 'debug_static_bot', 'x86'
+ ],
+
+ 'android_cronet_gn_release_bot_minimal_symbols': [
+ 'android', 'cronet', 'gn', 'release_bot_minimal_symbols', 'arm'
+ ],
+
+ 'android_cronet_gn_release_bot_minimal_symbols_arm64': [
+ 'android', 'cronet', 'gn', 'release_bot_minimal_symbols', 'arm64'
+ ],
+
+ 'android_cronet_gn_release_bot_minimal_symbols_armv6': [
+ 'android', 'cronet', 'gn', 'release_bot_minimal_symbols', 'armv6'
+ ],
+
+ 'android_cronet_gn_release_bot_minimal_symbols_mipsel': [
+ 'android', 'cronet', 'gn', 'release_bot_minimal_symbols', 'mipsel'
+ ],
+
+ 'android_cronet_gn_release_bot_minimal_symbols_x86': [
+ 'android', 'cronet', 'gn', 'release_bot_minimal_symbols', 'x86'
+ ],
+
+ 'android_gn_debug_bot': [
+ 'android', 'gn', 'debug_bot',
+ ],
+
+ 'android_gn_debug_bot_minimal_symbols': [
+ 'android', 'gn', 'debug_bot_minimal_symbols',
+ ],
+
+ 'android_gn_debug_static_bot': [
+ 'android', 'gn', 'debug_static_bot',
+ ],
+
+ 'android_gn_debug_static_bot_arm64': [
+ 'android', 'gn', 'debug_static_bot', 'arm64',
+ ],
+
+ 'android_gn_debug_static_bot_mipsel': [
+ 'android', 'gn', 'debug_static_bot', 'mipsel',
+ ],
+
+ 'android_gn_debug_static_bot_x64': [
+ 'android', 'gn', 'debug_static_bot', 'x64',
+ ],
+
+ 'android_gn_debug_static_bot_x86': [
+ 'android', 'gn', 'debug_static_bot', 'x86',
+ ],
+
+ 'android_gn_debug_trybot': [
+ 'android', 'gn', 'debug_trybot',
+ ],
+
+ 'android_gn_debug_trybot_mipsel': [
+ 'android', 'gn', 'debug_trybot', 'mipsel',
+ ],
+
+ 'android_gn_debug_trybot_x64': [
+ 'android', 'gn', 'debug_trybot', 'x64',
+ ],
+
+ 'android_gn_debug_trybot_x86': [
+ 'android', 'gn', 'debug_trybot', 'x86',
+ ],
+
+ 'android_gn_release_bot': [
+ 'android', 'gn', 'release_bot',
+ ],
+
+ 'android_gn_release_bot_minimal_symbols': [
+ 'android', 'gn', 'release_bot_minimal_symbols',
+ ],
+
+ 'android_gn_release_trybot': [
+ 'android', 'gn', 'release_trybot',
+ ],
+
+ 'android_gyp_debug_static_bot': [
+ 'android', 'gyp', 'debug_static_bot',
+ ],
+
+ 'android_gyp_debug_static_bot_arm64': [
+ 'android', 'gyp', 'debug_static_bot', 'arm64',
+ ],
+
+ 'android_without_codecs_gyp_release_bot_minimal_symbols': [
+ 'android_without_codecs', 'gyp', 'release_bot_minimal_symbols',
+ ],
+
+ 'cast_gn_release_bot': [
+ 'cast', 'gn', 'release_bot',
+ ],
+
+ 'cast_gn_release_trybot': [
+ 'cast', 'gn', 'release_trybot',
+ ],
+
+ 'chromeos_gn_debug_bot': [
+ 'chromeos', 'gn', 'debug_bot',
+ ],
+
+ 'chromeos_ozone_gn_release_bot': [
+ 'chromeos', 'ozone', 'gn', 'release_bot',
+ ],
+
+ 'chromeos_ozone_gn_release_trybot': [
+ 'chromeos', 'ozone', 'gn', 'release_trybot',
+ ],
+
+ 'embedded_gyp_debug_bot': [
+ 'embedded', 'gyp', 'debug_bot',
+ ],
+
+ # The 'findit' config is used by the *_variable_* bots, which run
+ # the 'FindIt' recipes and code to bisect failures that happen on the
+ # other waterfall bots. The findit recipes actually override and
+ # re-use the configs from the matching waterfall bots, and so it should
+ # be an error if MB ever sees an actual *_variable_* bot name.
+ 'findit': [
+ 'error',
+ ],
+
+ # This is the "deployment" config for the blimp builds. Currently
+ # we want them to be debug, non-optimized builds (and we don't need any
+ # chrome branding), so we don't use the "official" mixin.
+ 'gn_blimp_debug': [
+ 'gn', 'blimp', 'debug',
+ ],
+
+ 'gn_cfi_release_bot': [
+ 'gn', 'cfi', 'release_bot',
+ ],
+
+ 'gn_cfi_release_trybot': [
+ 'gn', 'cfi', 'release_trybot',
+ ],
+
+ 'gn_cfi_diag_release_bot': [
+ 'gn', 'cfi', 'cfi_diag', 'release_bot',
+ ],
+
+ 'gn_debug_bot': [
+ 'gn', 'debug_bot',
+ ],
+
+ 'gn_debug_bot_minimal_symbols': [
+ 'gn', 'debug_bot_minimal_symbols',
+ ],
+
+ 'gn_debug_bot_minimal_symbols_x86': [
+ 'gn', 'debug_bot_minimal_symbols', 'x86',
+ ],
+
+ 'gn_debug_static_bot': [
+ 'gn', 'debug_static_bot',
+ ],
+
+ 'gn_linux_upload': [
+ 'gn_linux_upload', 'official', 'goma',
+ ],
+
+ 'gn_official': [
+ 'gn', 'official',
+ ],
+
+ 'gn_official_goma': [
+ 'gn', 'official', 'goma',
+ ],
+
+ 'gn_official_goma_lto': ['gn', 'official', 'goma', 'lto'],
+
+ 'gn_official_goma_minimal_symbols_android': [
+ 'gn', 'official', 'goma', 'minimal_symbols', 'android',
+ ],
+
+ 'gn_official_goma_minimal_symbols_android_arm64': [
+ 'gn', 'official', 'goma', 'minimal_symbols', 'android', 'arm64',
+ ],
+
+ 'gn_release_bot': [
+ 'gn', 'release_bot',
+ ],
+
+ 'gn_release_bot_minimal_symbols': [
+ 'gn', 'release_bot_minimal_symbols',
+ ],
+
+ 'gn_release_bot_minimal_symbols_x86': [
+ 'gn', 'release_bot_minimal_symbols', 'x86',
+ ],
+
+ 'gn_release_trybot': [
+ 'gn', 'release_trybot',
+ ],
+
+ 'gn_release_trybot_x86': [
+ 'gn', 'release_trybot', 'x86',
+ ],
+
+ 'gyp_official': [
+ 'gyp', 'official',
+ ],
+
+ 'gyp_official_goma': [
+ 'gyp', 'official', 'goma',
+ ],
+
+ 'gyp_official_goma_chromeos': [
+ 'gyp', 'official', 'goma', 'chromeos',
+ ],
+
+ 'gyp_official_goma_minimal_symbols_x64': [
+ 'gyp', 'official', 'goma', 'minimal_symbols', 'x64',
+ ],
+
+ 'gyp_official_goma_minimal_symbols_x86': [
+ 'gyp', 'official', 'goma', 'minimal_symbols', 'x86',
+ ],
+
+ 'gyp_official_syzyasan': [
+ 'gyp', 'official', 'syzyasan',
+ ],
+
+ # TODO(crbug.com/595947) - figure out how to handle PGO, which needs
+ # to invoke GYP/GN twice, with two different sets of flags, apparently.
+ 'gyp_official_winpgo': [
+ 'gyp', 'error',
+ ],
+
+ 'gyp_official_winpgo_x64': [
+ 'gyp', 'error', 'x64',
+ ],
+
+ 'gyp_official_x64': [
+ 'gyp', 'official', 'x64',
+ ],
+
+ 'gyp_release_bot_android': [
+ 'gyp', 'release_bot', 'android',
+ ],
+
+ 'gyp_release_trybot': [
+ 'gyp', 'release_trybot',
+ ],
+
+ 'gyp_release_trybot_x64': [
+ 'gyp', 'release_trybot', 'x64',
+ ],
+
+ 'gn_release_libfuzzer_asan': [
+ 'gn', 'release', 'libfuzzer', 'asan', 'proprietary_codecs', 'pdf_xfa',
+ 'disable_nacl',
+ ],
+
+ 'gn_release_libfuzzer_msan': [
+ 'gn', 'release', 'libfuzzer', 'msan', 'proprietary_codecs', 'pdf_xfa',
+ 'disable_nacl',
+ ],
+ 'gn_release_libfuzzer_ubsan': [
+ 'gn', 'release', 'libfuzzer', 'ubsan_security', 'proprietary_codecs',
+ 'pdf_xfa', 'disable_nacl',
+ ],
+
+ 'gn_release_drmemory_drfuzz_x86': [
+ 'gn', 'release', 'drmemory', 'drfuzz', 'x86', 'proprietary_codecs',
+ ],
+ 'gn_release_drmemory_drfuzz': [
+ 'gn', 'release', 'drmemory', 'drfuzz', 'proprietary_codecs',
+ ],
+
+ 'gn_windows_analyze': [
+ 'gn', 'no_symbols', 'disable_precompiled_headers',
+ 'shared', 'x86', 'win_analyze',
+ ],
+
+ 'gyp_valgrind_release_bot': [
+ 'gyp', 'valgrind', 'release_bot',
+ ],
+
+ 'gyp_valgrind_chromeos_release_bot': [
+ 'gyp', 'chromeos', 'valgrind', 'release_bot',
+ ],
+
+ 'gyp_drmemory_shared_release_x86': [
+ 'gyp', 'drmemory', 'shared', 'release', 'x86',
+ ],
+
+ 'gyp_drmemory_shared_release_x64': [
+ 'gyp', 'drmemory', 'shared', 'release', 'x64',
+ ],
+
+ # The 'ios' configs are just used for auditing. iOS bots
+ # actually use the ios recipes, not the chromium recipe, and look
+ # up their GYP or GN arguments via files checked in under //ios/build/bots.
+ # It is an error to actually use one of these configs to generate the
+ # build files.
+ 'ios_gn': [ 'gn', 'error'],
+
+ 'ios_gyp': ['gyp', 'error'],
+
+ # This is used to indicate that the bot runs the nacl annotator-based
+ # configs and switching them is out of scope for MB.
+ 'nacl_annotator': [ 'error' ],
+
+ # This is used for tracking purposes; any bot that uses this config
+ # should never actually run MB.
+ 'none': [
+ 'error',
+ ],
+
+ 'noswarming_gn_release_bot': [
+ 'noswarming', 'gn', 'release_bot',
+ ],
+
+ 'noswarming_gyp_release_bot_mac_strip': [
+ 'noswarming', 'gyp', 'release_bot', 'mac_strip',
+ ],
+
+ 'noswarming_gyp_release_bot_minimal_symbols_x86': [
+ 'noswarming', 'gyp', 'release_bot', 'minimal_symbols', 'x86',
+ ],
+
+ 'noswarming_gyp_release_trybot_minimal_symbols_x86': [
+ 'noswarming', 'gyp', 'release_trybot', 'minimal_symbols', 'x86',
+ ],
+
+ 'swarming_android_gn_release_bot_minimal_symbols': [
+ 'swarming', 'android', 'gn', 'release_bot_minimal_symbols',
+ ],
+
+ 'swarming_android_gyp_debug_static_bot': [
+ 'swarming', 'android', 'gyp', 'debug_static_bot',
+ ],
+
+ 'swarming_android_gyp_debug_static_bot_arm64': [
+ 'swarming', 'android', 'gyp', 'debug_static_bot', 'arm64',
+ ],
+
+ 'swarming_android_gyp_debug_trybot': [
+ 'swarming', 'android', 'gyp', 'debug_trybot',
+ ],
+
+ 'swarming_android_gyp_debug_trybot_arm64': [
+ 'swarming', 'android', 'gyp', 'debug_trybot', 'arm64',
+ ],
+
+ 'swarming_android_gyp_release_bot_minimal_symbols': [
+ 'swarming', 'android', 'gyp', 'release_bot_minimal_symbols',
+ ],
+
+ 'swarming_android_gyp_release_trybot': [
+ 'swarming', 'android', 'gyp', 'release_trybot',
+ ],
+
+ 'swarming_asan_lsan_gyp_release_trybot': [
+ 'swarming', 'asan', 'lsan', 'release_trybot',
+ ],
+
+ 'swarming_chromeos_gyp_release_bot': [
+ 'swarming', 'chromeos_with_codecs', 'gyp', 'release_bot',
+ ],
+
+ 'swarming_chromeos_gyp_release_trybot': [
+ 'swarming', 'chromeos_with_codecs', 'gyp', 'release_trybot',
+ ],
+
+ 'swarming_msan_gyp_release_trybot': [
+ 'swarming', 'chromeos', 'msan', 'gyp', 'release_trybot',
+ ],
+
+ 'swarming_gn_asan_lsan_release_bot_x64': [
+ 'swarming', 'gn', 'asan', 'lsan', 'release_bot', 'x64',
+ ],
+
+ 'swarming_gn_msan_release_bot_x64': [
+ 'swarming', 'gn', 'msan', 'release_bot', 'x64',
+ ],
+
+ 'swarming_gpu_fyi_tests_gn_debug_bot': [
+ 'swarming', 'gpu_tests', 'internal_gles2_conform_tests', 'gn',
+ 'debug_bot', 'angle_deqp_tests',
+ ],
+
+ 'swarming_gpu_fyi_tests_gn_debug_trybot': [
+ 'swarming', 'gpu_tests', 'internal_gles2_conform_tests', 'gn',
+ 'debug_trybot', 'angle_deqp_tests',
+ ],
+
+ 'swarming_gpu_fyi_tests_gn_release_bot': [
+ 'swarming', 'gpu_tests', 'internal_gles2_conform_tests', 'gn',
+ 'release_bot', 'angle_deqp_tests',
+ ],
+
+ 'swarming_gpu_fyi_tests_gn_release_trybot': [
+ 'swarming', 'gpu_tests', 'internal_gles2_conform_tests', 'gn',
+ 'release_bot', 'angle_deqp_tests',
+ ],
+
+ 'swarming_gpu_tests_deqp_gles_gyp_debug_bot_minimal_symbols_x64': [
+ 'swarming', 'gpu_tests', 'angle_deqp_tests',
+ 'internal_gles2_conform_tests', 'gyp', 'debug_bot',
+ 'minimal_symbols', 'x64',
+ ],
+
+ 'swarming_gpu_tests_deqp_gles_gyp_debug_bot_minimal_symbols_x86': [
+ 'swarming', 'gpu_tests', 'angle_deqp_tests',
+ 'internal_gles2_conform_tests', 'gyp', 'debug_bot',
+ 'minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gpu_tests_deqp_gles_gyp_release_trybot_minimal_symbols_x64': [
+ 'swarming', 'gpu_tests', 'angle_deqp_tests',
+ 'internal_gles2_conform_tests', 'gyp', 'release_trybot',
+ 'minimal_symbols', 'x64',
+ ],
+
+ 'swarming_gpu_tests_deqp_gles_gyp_release_trybot_minimal_symbols_x86': [
+ 'swarming', 'gpu_tests', 'angle_deqp_tests',
+ 'internal_gles2_conform_tests', 'gyp', 'release_trybot',
+ 'minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gpu_tests_deqp_gles_gyp_release_bot_minimal_symbols_x64': [
+ 'swarming', 'gpu_tests', 'angle_deqp_tests',
+ 'internal_gles2_conform_tests', 'gyp', 'release_bot',
+ 'minimal_symbols', 'x64',
+ ],
+
+ 'swarming_gpu_tests_deqp_gles_gyp_release_bot_minimal_symbols_x86': [
+ 'swarming', 'gpu_tests', 'angle_deqp_tests',
+ 'internal_gles2_conform_tests', 'gyp', 'release_bot',
+ 'minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gpu_tests_gn_debug_bot': [
+ 'swarming', 'gpu_tests', 'gn', 'debug_bot',
+ ],
+
+ 'swarming_gpu_tests_gn_release_bot': [
+ 'swarming', 'gpu_tests', 'gn', 'release_bot',
+ ],
+
+ 'swarming_gpu_tests_gn_release_trybot': [
+ 'swarming', 'gpu_tests', 'gn', 'release_trybot',
+ ],
+
+ 'swarming_gpu_tests_gyp_debug_bot_minimal_symbols_x86': [
+ 'swarming', 'gpu_tests', 'gyp', 'debug_bot', 'minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gpu_tests_gyp_release_bot_minimal_symbols_x86': [
+ 'swarming', 'gpu_tests', 'gyp', 'release_bot', 'minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gpu_tests_gyp_release_trybot_minimal_symbols_x86': [
+ 'swarming', 'gpu_tests', 'gyp', 'release_trybot', 'minimal_symbols',
+ 'x86',
+ ],
+
+ 'swarming_gn_debug_bot': [
+ 'swarming', 'gn', 'debug_bot',
+ ],
+
+ 'swarming_gn_debug_bot_minimal_symbols_x64': [
+ 'swarming', 'gn', 'debug_bot_minimal_symbols', 'x64',
+ ],
+
+ 'swarming_gn_debug_bot_x64': [
+ 'swarming', 'gn', 'debug_bot', 'x64',
+ ],
+
+ 'swarming_gn_debug_trybot': [
+ 'swarming', 'gn', 'debug_trybot',
+ ],
+
+ 'swarming_gn_release_bot': [
+ 'swarming', 'gn', 'release_bot',
+ ],
+
+ 'swarming_gn_release_bot_x64': [
+ 'swarming', 'gn', 'release_bot', 'x64',
+ ],
+
+ 'swarming_gn_release_trybot': [
+ 'swarming', 'gn', 'release_bot',
+ ],
+
+ 'swarming_gn_release_trybot_minimal_symbols_x64': [
+ 'swarming', 'gn', 'release_trybot', 'minimal_symbols', 'x64',
+ ],
+
+ 'swarming_gyp_clang_debug_bot_minimal_symbols_x86': [
+ 'swarming', 'gyp', 'clang', 'debug_bot', 'minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gyp_clang_official_release_bot_minimal_symbols_x64': [
+ 'swarming', 'gyp', 'clang', 'official', 'release_bot',
+ 'minimal_symbols', 'x64',
+ ],
+
+ 'swarming_gyp_clang_official_release_bot_minimal_symbols_x86': [
+ 'swarming', 'gyp', 'clang', 'official', 'release_bot',
+ 'minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gyp_clang_official_release_trybot_minimal_symbols_x64': [
+ 'swarming', 'gyp', 'clang', 'official', 'release_trybot',
+ 'minimal_symbols', 'x64',
+ ],
+
+ 'swarming_gyp_clang_official_release_trybot_minimal_symbols_x86': [
+ 'swarming', 'gyp', 'clang', 'official', 'release_trybot',
+ 'minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gyp_debug_bot_minimal_symbols_x64': [
+ 'swarming', 'gyp', 'debug_bot_minimal_symbols', 'x64',
+ ],
+
+ 'swarming_gyp_debug_bot_minimal_symbols_x86': [
+ 'swarming', 'gyp', 'debug_bot_minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gyp_debug_bot_no_symbols_x86': [
+ 'swarming', 'gyp', 'debug_bot', 'no_symbols', 'x86',
+ ],
+
+ 'swarming_gyp_debug_bot_x64': [
+ 'swarming', 'gyp', 'debug_bot', 'x64',
+ ],
+
+ 'swarming_gyp_debug_trybot_x86': [
+ 'swarming', 'gyp', 'debug_trybot', 'x86',
+ ],
+
+ 'swarming_gyp_release_bot': [
+ 'swarming', 'gyp', 'release_bot',
+ ],
+
+ 'swarming_gyp_release_bot_arm': [
+ 'swarming', 'gyp', 'release_bot', 'arm', 'crosscompile',
+ ],
+
+ 'swarming_gyp_release_bot_minimal_symbols_x64': [
+ 'swarming', 'gyp', 'release_bot_minimal_symbols', 'x64',
+ ],
+
+ 'swarming_gyp_release_bot_minimal_symbols_x86': [
+ 'swarming', 'gyp', 'release_bot_minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gyp_release_bot_x64': [
+ 'swarming', 'gyp', 'release_bot', 'x64',
+ ],
+
+ 'swarming_gyp_release_trybot_arm': [
+ 'swarming', 'gyp', 'release_trybot', 'arm', 'crosscompile',
+ ],
+
+ 'swarming_gyp_release_trybot_minimal_symbols_x64': [
+ 'swarming', 'gyp', 'release_trybot', 'minimal_symbols', 'x64',
+ ],
+
+ 'swarming_gyp_release_trybot_minimal_symbols_x86': [
+ 'swarming', 'gyp', 'release_trybot', 'minimal_symbols', 'x86',
+ ],
+
+ 'swarming_gyp_syzyasan_release_trybot_minimal_symbols_x86': [
+ 'swarming', 'gyp', 'syzyasan', 'release_trybot', 'minimal_symbols',
+ 'x86',
+ ],
+
+ 'swarming_tsan_gyp_release_trybot': [
+ 'swarming', 'disable_nacl', 'tsan', 'gyp', 'release_trybot',
+ ],
+
+ 'gn_debug_bot_minimal_symbols_chrome_with_codecs': [
+ 'gn', 'debug_bot_minimal_symbols', 'chrome_with_codecs',
+ ],
+
+ 'gn_debug_static_bot_chrome_with_codecs': [
+ 'gn', 'debug_static_bot', 'chrome_with_codecs',
+ ],
+
+ 'gn_release_bot_chrome_with_codecs': [
+ 'gn', 'release_bot', 'chrome_with_codecs',
+ ],
+
+ 'gn_release_bot_minimal_symbols_chrome_with_codecs': [
+ 'gn', 'release_bot_minimal_symbols', 'chrome_with_codecs',
+ ],
+
+ # This indicates that we haven't yet set up this bot w/ MB. This is
+ # different from 'none' in that a bot set to 'none' should never do
+ # compiles; a bot set to 'tbd' should do compiles but we haven't
+ # added the entries yet.
+ 'tbd': ['error'],
+
+ 'win_clang_debug_bot': [
+ 'gn', 'clang', 'debug_bot_minimal_symbols',
+ ],
+ },
+
+ # This is a dict mapping a given 'mixin' name to a dict of settings that
+ # mb should use. See //tools/mb/docs/user_guide.md for more information.
+ 'mixins': {
+ # We build Android with codecs on most bots to ensure maximum test
+ # coverage, but use 'android_without_codecs' on bots responsible for
+ # building publicly advertised non-Official Android builds --
+ # which are not allowed to have proprietary codecs enabled.
+ 'android': {
+ 'mixins': ['android_without_codecs', 'chrome_with_codecs'],
+ },
+
+ 'android_without_codecs': {
+ 'gn_args': 'target_os="android"',
+ 'gyp_defines': 'OS=android',
+ },
+
+ 'archive_gpu_tests': {
+ # archive_gpu_tests=true is not needed in GN builds.
+ 'gyp_defines': 'archive_gpu_tests=1',
+ },
+
+ 'angle_deqp_tests': {
+ 'gyp_defines': 'build_angle_deqp_tests=1',
+ 'gn_args': 'build_angle_deqp_tests=true',
+ },
+
+ 'arm': {
+ 'gn_args': 'target_cpu="arm"',
+ 'gyp_defines': 'target_arch=arm',
+ },
+
+ 'arm64': {
+ 'gn_args': 'target_cpu="arm64"',
+ 'gyp_defines': 'target_arch=arm64',
+ },
+
+ 'armv6': {
+ 'gn_args': 'arm_version=6',
+ 'gyp_defines': 'arm_version=6',
+ },
+
+ 'asan': {
+ 'gn_args': 'is_asan=true',
+ 'gyp_defines': 'asan=1',
+ },
+
+ # Removes dependencies on X11 and audio libraries for a containerized
+ # build.
+ 'blimp': {
+ 'gn_args': ('use_aura=true use_ozone=true use_alsa=false '
+ 'use_pulseaudio=false use_cups=false use_glib=false '
+ 'use_low_quality_image_interpolation=true'),
+ 'gyp_defines': ('use_aura=1 use_ozone=1 use_alsa=0 '
+ 'use_pulseaudio=0 use_cups=0 use_glib=0'),
+ },
+
+ 'cast': {
+ 'gn_args': 'is_chromecast=true',
+ 'gyp_defines': 'chromecast=1',
+ },
+
+ 'cfi': {
+ 'gn_args': 'is_cfi=true',
+ 'gyp_defines': 'cfi_vptr=1',
+ },
+
+ 'cfi_diag': {
+ 'gn_args': 'use_cfi_diag=true',
+ 'gyp_defines': 'cfi_diag=1',
+ },
+
+ 'chrome_with_codecs': {
+ 'gn_args': 'ffmpeg_branding="Chrome" proprietary_codecs=true',
+ 'gyp_defines': 'ffmpeg_branding=Chrome proprietary_codecs=1',
+ },
+
+ 'chromeos': {
+ 'gn_args': 'target_os="chromeos"',
+ 'gyp_defines': 'chromeos=1',
+ },
+
+ 'chromeos_with_codecs': {
+ 'gn_args': 'ffmpeg_branding="ChromeOS" proprietary_codecs=true',
+ 'gyp_defines': 'ffmpeg_branding=ChromeOS proprietary_codecs=1',
+ 'mixins': ['chromeos'],
+ },
+
+ 'clang_no_chrome_plugins': {
+ 'gn_args': 'clang_use_chrome_plugins=false',
+ 'gyp_defines': 'clang_use_chrome_plugins=0',
+ 'mixins': ['clang'],
+ },
+
+ 'clang': {
+ 'gn_args': 'is_clang=true',
+ 'gyp_defines': 'clang=1',
+ },
+
+ 'cronet': {
+ 'gn_args': ('disable_file_support=true disable_ftp_support=true '
+ 'enable_websockets=false'),
+ 'gyp_defines': ('disable_file_support=1 disable_ftp_support=1 '
+ 'enable_websockets=0'),
+ },
+
+ 'crosscompile': {
+ # This mixin is only needed on GYP bots that are doing cross-compiles
+ # but are *not* targetting Android or iOS (where build/gyp_chromium
+ # will set the crosscompile variable automatically). It is not need
+ # in GN at all.
+ 'gyp_crosscompile': True,
+ },
+
+ 'data_reduction_proxy': {
+ 'gn_args': 'cronet_enable_data_reduction_proxy_support=true',
+ 'gyp_defines': 'enable_data_reduction_proxy_support=1',
+ },
+
+ 'dcheck_always_on': {
+ 'gn_args': 'dcheck_always_on=true',
+ 'gyp_defines': 'dcheck_always_on=1',
+ },
+
+ 'debug': {
+ 'gn_args': 'is_debug=true',
+ },
+
+ 'debug_bot': {
+ 'mixins': ['debug', 'shared', 'goma'],
+ },
+
+ 'debug_bot_minimal_symbols': {
+ 'mixins': ['debug_bot', 'minimal_symbols'],
+ },
+
+ 'debug_static_bot': {
+ 'mixins': ['debug', 'static', 'minimal_symbols', 'goma'],
+ },
+
+ 'debug_trybot': {
+ 'mixins': ['debug_bot_minimal_symbols'],
+ },
+
+ 'disable_nacl': {
+ 'gn_args': 'enable_nacl=false',
+ 'gyp_defines': 'disable_nacl=1',
+ },
+
+ 'disable_precompiled_headers': {
+ 'gn_args': 'disable_precompiled_headers=true',
+ 'gyp_defines': 'chromium_win_pch=0',
+ },
+
+ 'embedded': {
+ 'gn_args': 'error',
+ 'gyp_defines': 'embedded=1',
+ },
+
+ # This mixin is used to force configs that use it to fail. It
+ # is used in two cases: when we have bots that we haven't looked
+ # at yet and don't know whether they need MB or not, and for bots
+ # that are test-only and should never run MB.
+ 'error': {
+ 'gn_args': 'error',
+ 'gyp_defines': 'target_arch=unknown',
+ },
+
+ 'findbugs': {
+ 'gn_args': 'run_findbugs=true',
+ 'gyp_defines': 'run_findbugs=1',
+ },
+
+ 'gn_linux_upload': {
+ 'type': 'gn',
+
+ # We don't want to require a runtime dependency on glib in the
+ # GN binary; ideally we could just turn glib off, but that doesn't
+ # actually work, so we need to pretend to be doing an ozone build
+ # in order for the flag to actually take effect.
+ 'gn_args': 'use_ozone=true',
+ },
+
+ 'gn': {'type': 'gn'},
+
+ 'goma': {
+ # The MB code will properly escape goma_dir if necessary in the GYP
+ # code path; the GN code path needs no escaping.
+ 'gn_args': 'use_goma=true goma_dir="$(goma_dir)"',
+ 'gyp_defines': 'use_goma=1 gomadir=$(goma_dir)',
+ },
+
+ 'gpu_tests': {
+ 'mixins': ['archive_gpu_tests', 'chrome_with_codecs'],
+ },
+
+ 'gyp': {'type': 'gyp'},
+
+ 'internal_gles2_conform_tests': {
+ 'gn_args': 'internal_gles2_conform_tests=true',
+ 'gyp_defines': 'internal_gles2_conform_tests=1',
+ },
+
+ 'libfuzzer': { 'gn_args': 'use_libfuzzer=true' },
+
+ 'ubsan_security': { 'gn_args': 'is_ubsan_security=true' },
+
+ 'lsan': {
+ 'gn_args': 'is_lsan=true',
+ 'gyp_defines': 'lsan=1',
+ },
+
+ 'lto': {
+ 'gn_args': 'allow_posix_link_time_opt=true',
+ 'gyp_defines': 'use_lto=1',
+ },
+
+ 'mac_strip': {
+ 'gn_args': 'error', # TODO(GYP): Port mac_strip_release
+ 'gyp_defines': 'mac_strip_release=1',
+ },
+
+ 'minimal_symbols': {
+ 'gn_args': 'symbol_level=1',
+ 'gyp_defines': 'fastbuild=1',
+ },
+
+ 'mipsel': {
+ 'gn_args': 'target_cpu="mipsel"',
+ 'gyp_defines': 'target_arch=mipsel',
+ },
+
+ 'msan': {
+ 'gn_args': ('is_msan=true msan_track_origins=2 '
+ 'use_prebuilt_instrumented_libraries=true'),
+ 'gyp_defines': ('msan=1 msan_track_origins=2 '
+ 'use_prebuilt_instrumented_libraries=1'),
+ },
+
+ 'no_symbols': {
+ 'gn_args': 'symbol_level=0',
+ 'gyp_defines': 'fastbuild=2',
+ },
+
+ 'noswarming': {
+ # test_isolation_mode doesn't exist in a GN build (it's always a no-op).
+ 'gn_args': '',
+ 'gyp_defines': 'test_isolation_mode=noop',
+ },
+
+ 'official': {
+ 'gn_args': ('is_chrome_branded=true is_official_build=true '
+ 'is_debug=false'),
+ 'gyp_defines': 'branding=Chrome buildtype=Official',
+ },
+
+ 'ozone': {
+ 'gn_args': 'use_ozone=true',
+ 'gyp_defines': 'use_ozone=1',
+ },
+
+ 'pdf_xfa': {
+ 'gn_args': 'pdf_enable_xfa=true',
+ 'gyp_defines': 'pdf_enable_xfa=1',
+ },
+
+ 'proprietary_codecs': {
+ 'gn_args': 'proprietary_codecs=true',
+ 'gyp_defines': 'proprietary_codecs=1',
+ },
+
+ 'release': {
+ 'gn_args': 'is_debug=false',
+ },
+
+ 'release_bot': {
+ 'mixins': ['release', 'static', 'goma'],
+ },
+
+ 'release_bot_minimal_symbols': {
+ 'mixins': ['release_bot', 'minimal_symbols'],
+ },
+
+ 'release_trybot': {
+ 'mixins': ['release_bot_minimal_symbols', 'dcheck_always_on'],
+ },
+
+ 'shared': {
+ 'gn_args': 'is_component_build=true',
+ 'gyp_defines': 'component=shared_library',
+ },
+
+ 'static': {
+ 'gn_args': 'is_component_build=false',
+ 'gyp_defines': 'component=static_library',
+ },
+
+ 'swarming': {
+ 'gn_args': '',
+ 'gyp_defines': 'test_isolation_mode=prepare',
+ },
+
+ 'syzyasan': {
+ 'gn_args': 'is_syzyasan=true',
+ 'gyp_defines': 'syzyasan=1'
+ },
+
+ 'tsan': {
+ 'gn_args': 'is_tsan=true',
+ 'gyp_defines': 'tsan=1',
+ },
+
+ 'win_analyze': {
+ 'gn_args': 'use_vs_code_analysis=true',
+ 'gyp_defines': 'win_analyze=1',
+ },
+
+ 'x64': {
+ 'gn_args': 'target_cpu="x64"',
+ 'gyp_defines': 'target_arch=x64',
+ },
+
+ 'x86': {
+ 'gn_args': 'target_cpu="x86"',
+ 'gyp_defines': 'target_arch=ia32',
+ },
+
+ 'drfuzz': { 'gn_args': 'use_drfuzz=true' },
+
+ 'drmemory': {
+ 'gn_args': 'is_component_build=true enable_iterator_debugging=false',
+ 'gyp_defines': 'component=shared_library build_for_tool=drmemory',
+ },
+
+ 'valgrind': {
+ # TODO: add gn_args for 'build_for_tool=memcheck'
+ 'gyp_defines': 'build_for_tool=memcheck',
+ }
+ },
+}
diff --git a/chromium/tools/mb/mb_unittest.py b/chromium/tools/mb/mb_unittest.py
new file mode 100755
index 00000000000..dbb093d3898
--- /dev/null
+++ b/chromium/tools/mb/mb_unittest.py
@@ -0,0 +1,450 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for mb.py."""
+
+import json
+import StringIO
+import os
+import sys
+import unittest
+
+import mb
+
+
+class FakeMBW(mb.MetaBuildWrapper):
+ def __init__(self, win32=False):
+ super(FakeMBW, self).__init__()
+
+ # Override vars for test portability.
+ if win32:
+ self.chromium_src_dir = 'c:\\fake_src'
+ self.default_config = 'c:\\fake_src\\tools\\mb\\mb_config.pyl'
+ self.platform = 'win32'
+ self.executable = 'c:\\python\\python.exe'
+ self.sep = '\\'
+ else:
+ self.chromium_src_dir = '/fake_src'
+ self.default_config = '/fake_src/tools/mb/mb_config.pyl'
+ self.executable = '/usr/bin/python'
+ self.platform = 'linux2'
+ self.sep = '/'
+
+ self.files = {}
+ self.calls = []
+ self.cmds = []
+ self.cross_compile = None
+ self.out = ''
+ self.err = ''
+ self.rmdirs = []
+
+ def ExpandUser(self, path):
+ return '$HOME/%s' % path
+
+ def Exists(self, path):
+ return self.files.get(path) is not None
+
+ def MaybeMakeDirectory(self, path):
+ self.files[path] = True
+
+ def PathJoin(self, *comps):
+ return self.sep.join(comps)
+
+ def ReadFile(self, path):
+ return self.files[path]
+
+ def WriteFile(self, path, contents, force_verbose=False):
+ self.files[path] = contents
+
+ def Call(self, cmd, env=None, buffer_output=True):
+ if env:
+ self.cross_compile = env.get('GYP_CROSSCOMPILE')
+ self.calls.append(cmd)
+ if self.cmds:
+ return self.cmds.pop(0)
+ return 0, '', ''
+
+ def Print(self, *args, **kwargs):
+ sep = kwargs.get('sep', ' ')
+ end = kwargs.get('end', '\n')
+ f = kwargs.get('file', sys.stdout)
+ if f == sys.stderr:
+ self.err += sep.join(args) + end
+ else:
+ self.out += sep.join(args) + end
+
+ def TempFile(self, mode='w'):
+ return FakeFile(self.files)
+
+ def RemoveFile(self, path):
+ del self.files[path]
+
+ def RemoveDirectory(self, path):
+ self.rmdirs.append(path)
+ files_to_delete = [f for f in self.files if f.startswith(path)]
+ for f in files_to_delete:
+ self.files[f] = None
+
+
+class FakeFile(object):
+ def __init__(self, files):
+ self.name = '/tmp/file'
+ self.buf = ''
+ self.files = files
+
+ def write(self, contents):
+ self.buf += contents
+
+ def close(self):
+ self.files[self.name] = self.buf
+
+
+TEST_CONFIG = """\
+{
+ 'configs': {
+ 'gyp_rel_bot': ['gyp', 'rel', 'goma'],
+ 'gn_debug_goma': ['gn', 'debug', 'goma'],
+ 'gyp_debug': ['gyp', 'debug', 'fake_feature1'],
+ 'gn_rel_bot': ['gn', 'rel', 'goma'],
+ 'gyp_crosscompile': ['gyp', 'crosscompile'],
+ },
+ 'masters': {
+ 'chromium': {},
+ 'fake_master': {
+ 'fake_builder': 'gyp_rel_bot',
+ 'fake_gn_builder': 'gn_rel_bot',
+ 'fake_gyp_crosscompile_builder': 'gyp_crosscompile',
+ 'fake_gn_debug_builder': 'gn_debug_goma',
+ 'fake_gyp_builder': 'gyp_debug',
+ },
+ },
+ 'mixins': {
+ 'crosscompile': {
+ 'gyp_crosscompile': True,
+ },
+ 'fake_feature1': {
+ 'gn_args': 'enable_doom_melon=true',
+ 'gyp_defines': 'doom_melon=1',
+ },
+ 'gyp': {'type': 'gyp'},
+ 'gn': {'type': 'gn'},
+ 'goma': {
+ 'gn_args': 'use_goma=true goma_dir="$(goma_dir)"',
+ 'gyp_defines': 'goma=1 gomadir=$(goma_dir)',
+ },
+ 'rel': {
+ 'gn_args': 'is_debug=false',
+ },
+ 'debug': {
+ 'gn_args': 'is_debug=true',
+ },
+ },
+}
+"""
+
+
+TEST_BAD_CONFIG = """\
+{
+ 'configs': {
+ 'gn_rel_bot_1': ['gn', 'rel', 'chrome_with_codecs'],
+ 'gn_rel_bot_2': ['gn', 'rel', 'bad_nested_config'],
+ },
+ 'masters': {
+ 'chromium': {
+ 'a': 'gn_rel_bot_1',
+ 'b': 'gn_rel_bot_2',
+ },
+ },
+ 'mixins': {
+ 'gn': {'type': 'gn'},
+ 'chrome_with_codecs': {
+ 'gn_args': 'proprietary_codecs=true',
+ },
+ 'bad_nested_config': {
+ 'mixins': ['chrome_with_codecs'],
+ },
+ 'rel': {
+ 'gn_args': 'is_debug=false',
+ },
+ },
+}
+"""
+
+class UnitTest(unittest.TestCase):
+ def fake_mbw(self, files=None, win32=False):
+ mbw = FakeMBW(win32=win32)
+ mbw.files.setdefault(mbw.default_config, TEST_CONFIG)
+ if files:
+ for path, contents in files.items():
+ mbw.files[path] = contents
+ return mbw
+
+ def check(self, args, mbw=None, files=None, out=None, err=None, ret=None):
+ if not mbw:
+ mbw = self.fake_mbw(files)
+
+ actual_ret = mbw.Main(args)
+
+ self.assertEqual(actual_ret, ret)
+ if out is not None:
+ self.assertEqual(mbw.out, out)
+ if err is not None:
+ self.assertEqual(mbw.err, err)
+ return mbw
+
+ def test_clobber(self):
+ files = {
+ '/fake_src/out/Debug': None,
+ '/fake_src/out/Debug/mb_type': None,
+ }
+ mbw = self.fake_mbw(files)
+
+ # The first time we run this, the build dir doesn't exist, so no clobber.
+ self.check(['gen', '-c', 'gn_debug_goma', '//out/Debug'], mbw=mbw, ret=0)
+ self.assertEqual(mbw.rmdirs, [])
+ self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gn')
+
+ # The second time we run this, the build dir exists and matches, so no
+ # clobber.
+ self.check(['gen', '-c', 'gn_debug_goma', '//out/Debug'], mbw=mbw, ret=0)
+ self.assertEqual(mbw.rmdirs, [])
+ self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gn')
+
+ # Now we switch build types; this should result in a clobber.
+ self.check(['gen', '-c', 'gyp_debug', '//out/Debug'], mbw=mbw, ret=0)
+ self.assertEqual(mbw.rmdirs, ['/fake_src/out/Debug'])
+ self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gyp')
+
+ # Now we delete mb_type; this checks the case where the build dir
+ # exists but wasn't populated by mb; this should also result in a clobber.
+ del mbw.files['/fake_src/out/Debug/mb_type']
+ self.check(['gen', '-c', 'gyp_debug', '//out/Debug'], mbw=mbw, ret=0)
+ self.assertEqual(mbw.rmdirs,
+ ['/fake_src/out/Debug', '/fake_src/out/Debug'])
+ self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gyp')
+
+ def test_gn_analyze(self):
+ files = {'/tmp/in.json': """{\
+ "files": ["foo/foo_unittest.cc"],
+ "test_targets": ["foo_unittests", "bar_unittests"],
+ "additional_compile_targets": []
+ }"""}
+
+ mbw = self.fake_mbw(files)
+ mbw.Call = lambda cmd, env=None, buffer_output=True: (
+ 0, 'out/Default/foo_unittests\n', '')
+
+ self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default',
+ '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0)
+ out = json.loads(mbw.files['/tmp/out.json'])
+ self.assertEqual(out, {
+ 'status': 'Found dependency',
+ 'compile_targets': ['foo_unittests'],
+ 'test_targets': ['foo_unittests']
+ })
+
+ def test_gn_analyze_fails(self):
+ files = {'/tmp/in.json': """{\
+ "files": ["foo/foo_unittest.cc"],
+ "test_targets": ["foo_unittests", "bar_unittests"],
+ "additional_compile_targets": []
+ }"""}
+
+ mbw = self.fake_mbw(files)
+ mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '')
+
+ self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default',
+ '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=1)
+
+ def test_gn_analyze_all(self):
+ files = {'/tmp/in.json': """{\
+ "files": ["foo/foo_unittest.cc"],
+ "test_targets": ["bar_unittests"],
+ "additional_compile_targets": ["all"]
+ }"""}
+ mbw = self.fake_mbw(files)
+ mbw.Call = lambda cmd, env=None, buffer_output=True: (
+ 0, 'out/Default/foo_unittests\n', '')
+ self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default',
+ '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0)
+ out = json.loads(mbw.files['/tmp/out.json'])
+ self.assertEqual(out, {
+ 'status': 'Found dependency (all)',
+ 'compile_targets': ['all', 'bar_unittests'],
+ 'test_targets': ['bar_unittests'],
+ })
+
+ def test_gn_analyze_missing_file(self):
+ files = {'/tmp/in.json': """{\
+ "files": ["foo/foo_unittest.cc"],
+ "test_targets": ["bar_unittests"],
+ "additional_compile_targets": []
+ }"""}
+ mbw = self.fake_mbw(files)
+ mbw.cmds = [
+ (0, '', ''),
+ (1, 'The input matches no targets, configs, or files\n', ''),
+ (1, 'The input matches no targets, configs, or files\n', ''),
+ ]
+
+ self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default',
+ '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0)
+ out = json.loads(mbw.files['/tmp/out.json'])
+ self.assertEqual(out, {
+ 'status': 'No dependency',
+ 'compile_targets': [],
+ 'test_targets': [],
+ })
+
+ def test_gn_gen(self):
+ self.check(['gen', '-c', 'gn_debug_goma', '//out/Default', '-g', '/goma'],
+ ret=0,
+ out=('/fake_src/buildtools/linux64/gn gen //out/Default '
+ '\'--args=is_debug=true use_goma=true goma_dir="/goma"\' '
+ '--check\n'))
+
+ mbw = self.fake_mbw(win32=True)
+ self.check(['gen', '-c', 'gn_debug_goma', '-g', 'c:\\goma', '//out/Debug'],
+ mbw=mbw, ret=0,
+ out=('c:\\fake_src\\buildtools\\win\\gn.exe gen //out/Debug '
+ '"--args=is_debug=true use_goma=true goma_dir=\\"'
+ 'c:\\goma\\"" --check\n'))
+
+
+ def test_gn_gen_fails(self):
+ mbw = self.fake_mbw()
+ mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '')
+ self.check(['gen', '-c', 'gn_debug_goma', '//out/Default'], mbw=mbw, ret=1)
+
+ def test_gn_gen_swarming(self):
+ files = {
+ '/tmp/swarming_targets': 'base_unittests\n',
+ '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
+ "{'base_unittests': {"
+ " 'label': '//base:base_unittests',"
+ " 'type': 'raw',"
+ " 'args': [],"
+ "}}\n"
+ ),
+ '/fake_src/out/Default/base_unittests.runtime_deps': (
+ "base_unittests\n"
+ ),
+ }
+ mbw = self.fake_mbw(files)
+ self.check(['gen',
+ '-c', 'gn_debug_goma',
+ '--swarming-targets-file', '/tmp/swarming_targets',
+ '//out/Default'], mbw=mbw, ret=0)
+ self.assertIn('/fake_src/out/Default/base_unittests.isolate',
+ mbw.files)
+ self.assertIn('/fake_src/out/Default/base_unittests.isolated.gen.json',
+ mbw.files)
+
+ def test_gn_isolate(self):
+ files = {
+ '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
+ "{'base_unittests': {"
+ " 'label': '//base:base_unittests',"
+ " 'type': 'raw',"
+ " 'args': [],"
+ "}}\n"
+ ),
+ '/fake_src/out/Default/base_unittests.runtime_deps': (
+ "base_unittests\n"
+ ),
+ }
+ self.check(['isolate', '-c', 'gn_debug_goma', '//out/Default',
+ 'base_unittests'], files=files, ret=0)
+
+ # test running isolate on an existing build_dir
+ files['/fake_src/out/Default/args.gn'] = 'is_debug = True\n'
+ self.check(['isolate', '//out/Default', 'base_unittests'],
+ files=files, ret=0)
+
+ files['/fake_src/out/Default/mb_type'] = 'gn\n'
+ self.check(['isolate', '//out/Default', 'base_unittests'],
+ files=files, ret=0)
+
+ def test_gn_run(self):
+ files = {
+ '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
+ "{'base_unittests': {"
+ " 'label': '//base:base_unittests',"
+ " 'type': 'raw',"
+ " 'args': [],"
+ "}}\n"
+ ),
+ '/fake_src/out/Default/base_unittests.runtime_deps': (
+ "base_unittests\n"
+ ),
+ }
+ self.check(['run', '-c', 'gn_debug_goma', '//out/Default',
+ 'base_unittests'], files=files, ret=0)
+
+ def test_gn_lookup(self):
+ self.check(['lookup', '-c', 'gn_debug_goma'], ret=0)
+
+ def test_gn_lookup_goma_dir_expansion(self):
+ self.check(['lookup', '-c', 'gn_rel_bot', '-g', '/foo'], ret=0,
+ out=("/fake_src/buildtools/linux64/gn gen _path_ "
+ "'--args=is_debug=false use_goma=true "
+ "goma_dir=\"/foo\"'\n" ))
+
+ def test_gyp_analyze(self):
+ mbw = self.check(['analyze', '-c', 'gyp_rel_bot', '//out/Release',
+ '/tmp/in.json', '/tmp/out.json'], ret=0)
+ self.assertIn('analyzer', mbw.calls[0])
+
+ def test_gyp_crosscompile(self):
+ mbw = self.fake_mbw()
+ self.check(['gen', '-c', 'gyp_crosscompile', '//out/Release'],
+ mbw=mbw, ret=0)
+ self.assertTrue(mbw.cross_compile)
+
+ def test_gyp_gen(self):
+ self.check(['gen', '-c', 'gyp_rel_bot', '-g', '/goma', '//out/Release'],
+ ret=0,
+ out=("GYP_DEFINES='goma=1 gomadir=/goma'\n"
+ "python build/gyp_chromium -G output_dir=out\n"))
+
+ mbw = self.fake_mbw(win32=True)
+ self.check(['gen', '-c', 'gyp_rel_bot', '-g', 'c:\\goma', '//out/Release'],
+ mbw=mbw, ret=0,
+ out=("set GYP_DEFINES=goma=1 gomadir='c:\\goma'\n"
+ "python build\\gyp_chromium -G output_dir=out\n"))
+
+ def test_gyp_gen_fails(self):
+ mbw = self.fake_mbw()
+ mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '')
+ self.check(['gen', '-c', 'gyp_rel_bot', '//out/Release'], mbw=mbw, ret=1)
+
+ def test_gyp_lookup_goma_dir_expansion(self):
+ self.check(['lookup', '-c', 'gyp_rel_bot', '-g', '/foo'], ret=0,
+ out=("GYP_DEFINES='goma=1 gomadir=/foo'\n"
+ "python build/gyp_chromium -G output_dir=_path_\n"))
+
+ def test_help(self):
+ orig_stdout = sys.stdout
+ try:
+ sys.stdout = StringIO.StringIO()
+ self.assertRaises(SystemExit, self.check, ['-h'])
+ self.assertRaises(SystemExit, self.check, ['help'])
+ self.assertRaises(SystemExit, self.check, ['help', 'gen'])
+ finally:
+ sys.stdout = orig_stdout
+
+ def test_validate(self):
+ mbw = self.fake_mbw()
+ mbw.files[mbw.default_config] = TEST_CONFIG
+ self.check(['validate'], mbw=mbw, ret=0)
+
+ def test_bad_validate(self):
+ mbw = self.fake_mbw()
+ mbw.files[mbw.default_config] = TEST_BAD_CONFIG
+ self.check(['validate'], mbw=mbw, ret=1)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/md_browser/OWNERS b/chromium/tools/md_browser/OWNERS
new file mode 100644
index 00000000000..3fc266c9701
--- /dev/null
+++ b/chromium/tools/md_browser/OWNERS
@@ -0,0 +1,2 @@
+dpranke@chromium.org
+nodir@chromium.org
diff --git a/chromium/tools/md_browser/README.md b/chromium/tools/md_browser/README.md
new file mode 100644
index 00000000000..05d6b9d648d
--- /dev/null
+++ b/chromium/tools/md_browser/README.md
@@ -0,0 +1,27 @@
+# md_browser
+
+This is a simple tool to render the markdown docs in a chromium checkout
+locally. It is written in Python and uses the Python 'markdown' package,
+which is checked into src/third_party.
+
+md_browser attempts to emulate the flavor of Markdown implemented by
+[Gitiles](https://gerrit.googlesource.com/gitiles/+/master/Documentation/markdown.md).
+
+Gitiles is the source browser running on https://chromium.googlesource.com,
+and can be run locally, but to do so requires a Java install and a Buck
+install, which can be slightly annoying to set up on Mac or Windows.
+
+This is a lighterweight solution, which also allows you to preview uncommitted
+changes (i.e., it just serves files out of the filesystem, and is not a
+full Git repo browser like Gitiles is).
+
+To run md_browser:
+
+1. cd to the top of your chromium checkout
+
+2. run `python tools/md_browser/md_browser.py`
+
+3. There is no step three.
+
+This will run a local web server on port 8080 that points to the top
+of the repo. You can specify a different port with the `-p` flag.
diff --git a/chromium/tools/md_browser/__init__.py b/chromium/tools/md_browser/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/md_browser/__init__.py
diff --git a/chromium/tools/md_browser/doc.css b/chromium/tools/md_browser/doc.css
new file mode 100644
index 00000000000..93ea5fc6d62
--- /dev/null
+++ b/chromium/tools/md_browser/doc.css
@@ -0,0 +1,298 @@
+/**
+ * Copyright 2015 The Chromium Authors. All rights reserved.
+ * Use of this source code is governed by a BSD-style license that can be
+ * found in the LICENSE file.
+ */
+
+/* This file is cloned from
+ * https://gerrit.googlesource.com/gitiles/+/master/gitiles-servlet/src/main/resources/com/google/gitiles/static/doc.css
+ */
+
+html.doc-page, .doc {
+ font-family: arial,sans-serif;
+}
+.doc-page body {
+ margin: 0;
+}
+
+.banner {
+ min-height: 44px;
+ margin: 0;
+ padding: 14px 15px 13px;
+ border-bottom: 1px solid #eee;
+}
+.banner h1, .banner h2 {
+ float: left;
+ font-size: 32px;
+ font-weight: 300;
+ line-height: 1.375;
+ margin: 0;
+}
+.banner img {
+ margin: -1px 10px -4px 0px;
+ vertical-align: middle;
+}
+.banner a, .banner a:hover {
+ text-decoration: none;
+}
+.banner, .banner a:link, .banner a:visited {
+ color: #777;
+}
+.banner h2:before {
+ border-right: 1px solid #eee;
+ content: "";
+ float: left;
+ height: 44px;
+ margin: 0 12px 0 14px;
+}
+
+.nav, .footer-line {
+ color: #333;
+ padding: 0 15px;
+ background: #eee;
+}
+.nav ul {
+ list-style: none;
+ margin: 0;
+ padding: 6px 0;
+}
+.nav li {
+ float: left;
+ font-size: 14px;
+ line-height: 1.43;
+ margin: 0 20px 0 0;
+ padding: 6px 0;
+}
+.nav li a, .footer-line a {
+ color: #7a7af9;
+}
+.nav li a:hover {
+ color: #0000f9;
+}
+.banner:after, .nav ul:after, .cols:after {
+ clear: both;
+ content: "";
+ display: block;
+}
+
+.nav-aux, .doc {
+ max-width: 978px;
+}
+.nav-aux, .doc-page .doc {
+ margin: auto;
+}
+
+.footer-break {
+ clear: both;
+ margin: 120px 0 0 0;
+}
+.footer-line {
+ font-size: 13px;
+ line-height: 30px;
+ height: 30px;
+}
+.footer-line ul {
+ list-style: none;
+ margin: 0;
+ padding: 0;
+}
+.footer-line li {
+ display: inline;
+}
+.footer-line li+li:before {
+ content: "·";
+ padding: 0 5px;
+}
+.footer-line .nav-aux {
+ position: relative;
+}
+.gitiles-att {
+ color: #A0ADCC;
+ position: absolute;
+ top: 0;
+ right: 0;
+}
+.gitiles-att a {
+ font-style: italic;
+}
+
+/* Markdown rendered in /+doc/ or tree view page . */
+
+.doc {
+ color: #444;
+ font-size: 13px;
+ line-height: normal;
+}
+
+.doc h1, .doc h2, .doc h3, .doc h4, .doc h5, .doc h6 {
+ font-family: "open sans",arial,sans-serif;
+ font-weight: bold;
+ color: #444;
+ height: auto;
+ white-space: normal;
+ overflow: visible;
+ margin: 0.67em 0 0.67em 0;
+}
+.doc h1 {
+ font-size: 20px;
+ margin: 0.67em 0 0.67em 0;
+}
+.doc h2 {
+ font-size: 16px;
+ margin: 0.67em 0 0.67em 0;
+}
+.doc h3 {
+ font-size: 14px;
+ margin: 0.67em 0 0.67em 0;
+}
+.doc h4 {
+ font-size: 13px;
+ margin: 1em 0 1em 0;
+}
+.doc h5 {
+ font-size: 13px;
+ margin: 1.3em 0 1.3em 0;
+}
+.doc h6 {
+ font-size: 13px;
+ margin: 1.6em 0 1.6em 0;
+}
+
+.doc a { text-decoration: none; }
+.doc a:link { color: #245dc1; }
+.doc a:visited { color: #7759ae; }
+.doc a:hover { text-decoration: underline; }
+
+.doc ul, .doc ol {
+ margin: 10px 10px 10px 30px;
+ padding: 0;
+}
+
+.doc img {
+ border: 0;
+ max-width: 100%;
+}
+.doc iframe {
+ min-width: 100px;
+ min-height: 30px;
+}
+iframe.noborder {
+ border: 0;
+}
+
+.doc em {
+ font-weight: normal;
+ font-style: italic;
+}
+.doc strong {
+ font-weight: bold;
+ color: inherit;
+}
+
+.doc pre {
+ border: 1px solid silver;
+ background: #fafafa;
+ margin: 0 2em 0 2em;
+ padding: 2px;
+}
+.doc code, .doc .code {
+ color: #060;
+ font: 13px/1.54 "courier new",courier,monospace;
+}
+
+.doc dl dt {
+ margin-top: 1em;
+}
+
+.doc table {
+ border-collapse: collapse;
+ border-spacing: 0;
+}
+.doc th {
+ text-align: center;
+}
+.doc th, .doc td {
+ border: 1px solid #eee;
+ padding: 4px 12px;
+ vertical-align: top;
+}
+.doc th {
+ background-color: #f5f5f5;
+}
+
+.toc {
+ margin-top: 30px;
+}
+.toc-aux {
+ padding: 2px;
+ background: #f9f9f9;
+ border: 1px solid #f2f2f2;
+ border-radius: 4px;
+}
+.toc h2 {
+ margin: 0 0 5px 0;
+}
+.toc ul {
+ margin: 0 0 0 30px;
+}
+.toc ul li {
+ margin-left: 0px;
+ list-style: disc;
+}
+.toc ul ul li {
+ list-style: circle;
+}
+
+.note, .promo, .aside {
+ border: 1px solid;
+ border-radius: 4px;
+ margin: 10px 0;
+ padding: 10px;
+}
+.note {
+ background: #fffbe4;
+ border-color: #f8f6e6;
+}
+.promo {
+ background: #f6f9ff;
+ border-color: #eff2f9;
+}
+.aside {
+ background: #f9f9f9;
+ border-color: #f2f2f2;
+}
+.note :first-child,
+.promo :first-child,
+.aside :first-child {
+ margin-top: 0;
+}
+.note p:last-child,
+.promo p:last-child,
+.aside p:last-child {
+ margin-bottom: 0;
+}
+
+.cols {
+ margin: 0 -1.533%;
+ width: 103.067%;
+}
+.col-1, .col-2, .col-3, .col-4, .col-5, .col-6,
+.col-7, .col-8, .col-9, .col-10, .col-11, .col-12 {
+ float: left;
+ margin: 0 1.488% 20px;
+}
+.col-1 { width: 5.357%; }
+.col-2 { width: 13.690%; }
+.col-3 { width: 22.024%; }
+.col-4 { width: 30.357%; }
+.col-5 { width: 38.690%; }
+.col-6 { width: 47.024%; }
+.col-7 { width: 55.357%; }
+.col-8 { width: 63.690%; }
+.col-9 { width: 72.024%; }
+.col-10 { width: 80.357%; }
+.col-11 { width: 88.690%; }
+.col-12 { width: 97.024%; }
+.cols hr {
+ width: 80%;
+}
diff --git a/chromium/tools/md_browser/footer.html b/chromium/tools/md_browser/footer.html
new file mode 100644
index 00000000000..8aab62434f5
--- /dev/null
+++ b/chromium/tools/md_browser/footer.html
@@ -0,0 +1,8 @@
+<div class="footer-break"></div>
+<div class="footer-line">
+<div class="nav-aux">
+<div class="gitiles-att"></div>
+</div>
+</div>
+</body>
+</html>
diff --git a/chromium/tools/md_browser/gitiles_ext_blocks.py b/chromium/tools/md_browser/gitiles_ext_blocks.py
new file mode 100644
index 00000000000..b1a53795e17
--- /dev/null
+++ b/chromium/tools/md_browser/gitiles_ext_blocks.py
@@ -0,0 +1,84 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Implements Gitiles' notification, aside and promotion blocks.
+
+This extention makes the Markdown parser recognize the Gitiles' extended
+blocks notation. The syntax is explained at:
+
+https://gerrit.googlesource.com/gitiles/+/master/Documentation/markdown.md#Notification_aside_promotion-blocks
+"""
+
+from markdown.blockprocessors import BlockProcessor
+from markdown.extensions import Extension
+from markdown.util import etree
+import re
+
+
+class _GitilesExtBlockProcessor(BlockProcessor):
+ """Process Gitiles' notification, aside and promotion blocks."""
+
+ RE_START = re.compile(r'^\*\*\* (note|aside|promo) *\n')
+ RE_END = re.compile(r'\n\*\*\* *\n?$')
+
+ def __init__(self, *args, **kwargs):
+ self._last_parent = None
+ BlockProcessor.__init__(self, *args, **kwargs)
+
+ def test(self, parent, block):
+ return self.RE_START.search(block) or self.RE_END.search(block)
+
+ def run(self, parent, blocks):
+ raw_block = blocks.pop(0)
+ match_start = self.RE_START.search(raw_block)
+ if match_start:
+ # Opening a new block.
+ rest = raw_block[match_start.end():]
+
+ if self._last_parent:
+ # Inconsistent state (nested starting markers). Ignore the marker
+ # and keep going.
+ blocks.insert(0, rest)
+ return
+
+ div = etree.SubElement(parent, 'div')
+ # Setting the class name is sufficient, because doc.css already has
+ # styles for these classes.
+ div.set('class', match_start.group(1))
+ self._last_parent = parent
+ blocks.insert(0, rest)
+ self.parser.parseBlocks(div, blocks)
+ return
+
+ match_end = self.RE_END.search(raw_block)
+ if match_end:
+ # Ending an existing block.
+
+ # Process the text preceding the ending marker in the current context
+ # (i.e. within the div block).
+ rest = raw_block[:match_end.start()]
+ self.parser.parseBlocks(parent, [rest])
+
+ if not self._last_parent:
+ # Inconsistent state (the ending marker is found but there is no
+ # matching starting marker).
+ # Let's continue as if we did not see the ending marker.
+ return
+
+ last_parent = self._last_parent
+ self._last_parent = None
+ self.parser.parseBlocks(last_parent, blocks)
+ return
+
+
+class _GitilesExtBlockExtension(Extension):
+ """Add Gitiles' extended blocks to Markdown."""
+ def extendMarkdown(self, md, md_globals):
+ md.parser.blockprocessors.add('gitilesextblocks',
+ _GitilesExtBlockProcessor(md.parser),
+ '_begin')
+
+
+def makeExtension(*args, **kwargs):
+ return _GitilesExtBlockExtension(*args, **kwargs)
diff --git a/chromium/tools/md_browser/header.html b/chromium/tools/md_browser/header.html
new file mode 100644
index 00000000000..1493ec3859d
--- /dev/null
+++ b/chromium/tools/md_browser/header.html
@@ -0,0 +1,8 @@
+<!DOCTYPE HTML PUBLIC "-//W3CDTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
+<html class="doc-page">
+<head>
+<meta charset="UTF-8" />
+<link rel="stylesheet" type="text/css" href="/doc.css" />
+</head>
+<body>
+<div class="doc">
diff --git a/chromium/tools/md_browser/md_browser.py b/chromium/tools/md_browser/md_browser.py
new file mode 100644
index 00000000000..2b968525320
--- /dev/null
+++ b/chromium/tools/md_browser/md_browser.py
@@ -0,0 +1,162 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple Markdown browser for a Git checkout."""
+from __future__ import print_function
+
+import SimpleHTTPServer
+import SocketServer
+import argparse
+import codecs
+import os
+import re
+import socket
+import sys
+
+
+THIS_DIR = os.path.abspath(os.path.dirname(__file__))
+SRC_DIR = os.path.dirname(os.path.dirname(THIS_DIR))
+sys.path.append(os.path.join(SRC_DIR, 'third_party', 'Python-Markdown'))
+import markdown
+
+
+def main(argv):
+ parser = argparse.ArgumentParser(prog='md_browser')
+ parser.add_argument('-p', '--port', type=int, default=8080,
+ help='port to run on (default = %(default)s)')
+ args = parser.parse_args(argv)
+
+ try:
+ s = Server(args.port, SRC_DIR)
+ print("Listening on http://localhost:%s/" % args.port)
+ print(" Try loading http://localhost:%s/docs/README.md" % args.port)
+ s.serve_forever()
+ s.shutdown()
+ return 0
+ except KeyboardInterrupt:
+ return 130
+
+
+def _gitiles_slugify(value, _separator):
+ """Convert a string (representing a section title) to URL anchor name.
+
+ This function is passed to "toc" extension as an extension option, so we
+ can emulate the way how Gitiles converts header titles to URL anchors.
+
+ Gitiles' official documentation about the conversion is at:
+
+ https://gerrit.googlesource.com/gitiles/+/master/Documentation/markdown.md#Named-anchors
+
+ Args:
+ value: The name of a section that is to be converted.
+ _separator: Unused. This is actually a configurable string that is used
+ as a replacement character for spaces in the title, typically set to
+ '-'. Since we emulate Gitiles' way of slugification here, it makes
+ little sense to have the separator charactor configurable.
+ """
+
+ # TODO(yutak): Implement accent removal. This does not seem easy without
+ # some library. For now we just make accented characters turn into
+ # underscores, just like other non-ASCII characters.
+
+ value = value.encode('ascii', 'replace') # Non-ASCII turns into '?'.
+ value = re.sub(r'[^- a-zA-Z0-9]', '_', value) # Non-alphanumerics to '_'.
+ value = value.replace(u' ', u'-')
+ value = re.sub(r'([-_])[-_]+', r'\1', value) # Fold hyphens and underscores.
+ return value
+
+
+class Server(SocketServer.TCPServer):
+ def __init__(self, port, top_level):
+ SocketServer.TCPServer.__init__(self, ('0.0.0.0', port), Handler)
+ self.port = port
+ self.top_level = top_level
+
+ def server_bind(self):
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ self.socket.bind(self.server_address)
+
+
+class Handler(SimpleHTTPServer.SimpleHTTPRequestHandler):
+ def do_GET(self):
+ path = self.path
+
+ # strip off the repo and branch info, if present, for compatibility
+ # with gitiles.
+ if path.startswith('/chromium/src/+/master'):
+ path = path[len('/chromium/src/+/master'):]
+
+ full_path = os.path.abspath(os.path.join(self.server.top_level, path[1:]))
+
+ if not full_path.startswith(SRC_DIR):
+ self._DoUnknown()
+ elif path == '/doc.css':
+ self._DoCSS('doc.css')
+ elif not os.path.exists(full_path):
+ self._DoNotFound()
+ elif path.lower().endswith('.md'):
+ self._DoMD(path)
+ elif os.path.exists(full_path + '/README.md'):
+ self._DoMD(path + '/README.md')
+ else:
+ self._DoUnknown()
+
+ def _DoMD(self, path):
+ extensions = [
+ 'markdown.extensions.def_list',
+ 'markdown.extensions.fenced_code',
+ 'markdown.extensions.tables',
+ 'markdown.extensions.toc',
+ 'gitiles_ext_blocks',
+ ]
+ extension_configs = {
+ 'markdown.extensions.toc': {
+ 'slugify': _gitiles_slugify
+ },
+ }
+
+ contents = self._Read(path[1:])
+ md_fragment = markdown.markdown(contents,
+ extensions=extensions,
+ extension_configs=extension_configs,
+ output_format='html4').encode('utf-8')
+ try:
+ self._WriteHeader('text/html')
+ self._WriteTemplate('header.html')
+ self.wfile.write(md_fragment)
+ self._WriteTemplate('footer.html')
+ except:
+ raise
+
+ def _DoCSS(self, template):
+ self._WriteHeader('text/css')
+ self._WriteTemplate(template)
+
+ def _DoNotFound(self):
+ self._WriteHeader('text/html')
+ self.wfile.write('<html><body>%s not found</body></html>' % self.path)
+
+ def _DoUnknown(self):
+ self._WriteHeader('text/html')
+ self.wfile.write('<html><body>I do not know how to serve %s.</body>'
+ '</html>' % self.path)
+
+ def _Read(self, relpath):
+ assert not relpath.startswith(os.sep)
+ path = os.path.join(self.server.top_level, relpath)
+ with codecs.open(path, encoding='utf-8') as fp:
+ return fp.read()
+
+ def _WriteHeader(self, content_type='text/plain'):
+ self.send_response(200)
+ self.send_header('Content-Type', content_type)
+ self.end_headers()
+
+ def _WriteTemplate(self, template):
+ contents = self._Read(os.path.join('tools', 'md_browser', template))
+ self.wfile.write(contents.encode('utf-8'))
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv[1:]))
diff --git a/chromium/tools/measure_page_load_time/ff_ext/chrome.manifest b/chromium/tools/measure_page_load_time/ff_ext/chrome.manifest
new file mode 100644
index 00000000000..064fae49f1c
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ff_ext/chrome.manifest
@@ -0,0 +1,2 @@
+content measurepageloadtimeextension content/
+overlay chrome://browser/content/browser.xul chrome://measurepageloadtimeextension/content/firefoxOverlay.xul
diff --git a/chromium/tools/measure_page_load_time/ff_ext/content/firefoxOverlay.xul b/chromium/tools/measure_page_load_time/ff_ext/content/firefoxOverlay.xul
new file mode 100644
index 00000000000..32fbf4904b9
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ff_ext/content/firefoxOverlay.xul
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<?xml-stylesheet href="chrome://measurepageloadtimeextension/skin/overlay.css" type="text/css"?>
+<!DOCTYPE overlay SYSTEM "chrome://measurepageloadtimeextension/locale/measurepageloadtimeextension.dtd">
+<overlay id="measurepageloadtimeextension-overlay"
+ xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul">
+ <script src="measure_page_load_time.js"/>
+</overlay>
diff --git a/chromium/tools/measure_page_load_time/ff_ext/content/measure_page_load_time.js b/chromium/tools/measure_page_load_time/ff_ext/content/measure_page_load_time.js
new file mode 100644
index 00000000000..44473ccb105
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ff_ext/content/measure_page_load_time.js
@@ -0,0 +1,209 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+/**
+ * @fileoverview measure_page_load_time.js implements a Firefox extension
+ * for measuring how long a page takes to load. It waits on TCP port
+ * 42492 for connections, then accepts URLs and returns strings of the
+ * form url,time, where "time" is the load time in milliseconds or the
+ * string "timeout" or "error". Load time is measured from the call to
+ * loadURI until the load event fires, or until the status changes to
+ * STATUS_STOP if the load event doesn't fire (there's an error.)
+ * @author jhaas@google.com (Jonathan Haas) */
+
+// Shorthand reference to nsIWebProgress[Listener] interfaces
+var IWP = Components.interfaces.nsIWebProgress;
+var IWPL = Components.interfaces.nsIWebProgressListener;
+
+
+var MPLT = {
+ /**
+ * Constants
+ */
+ PORT_NUMBER : 42492, // port to listen for connections on
+ TIME_OUT : 4 * 60 * 1000, // timeout in 4 minutes
+
+ /**
+ * Incoming URL buffer
+ * @type {string}
+ */
+ textBuffer : '',
+
+ /**
+ * URL we're currently visiting
+ * @type {string}
+ */
+ URL : '',
+
+ /**
+ * Listener to accept incoming connections
+ * @type {nsIServerSocketListener}
+ */
+ acceptListener :
+ {
+ onSocketAccepted : function(serverSocket, transport)
+ {
+ MPLT.streamInput = transport.openInputStream(0,0,0);
+ MPLT.streamOutput = transport.openOutputStream(0,0,0);
+
+ MPLT.scriptStream = Components.classes['@mozilla.org/scriptableinputstream;1']
+ .createInstance(Components.interfaces.nsIScriptableInputStream);
+ MPLT.scriptStream.init(MPLT.streamInput);
+ MPLT.pump = Components.classes['@mozilla.org/network/input-stream-pump;1']
+ .createInstance(Components.interfaces.nsIInputStreamPump);
+ MPLT.pump.init(MPLT.streamInput, -1, -1, 0, 0, false);
+ MPLT.pump.asyncRead(MPLT.dataListener,null);
+ },
+
+ onStopListening : function(){}
+ },
+
+ /**
+ * Listener for network input
+ * @type {nsIStreamListener}
+ */
+ dataListener :
+ {
+ onStartRequest: function(){},
+ onStopRequest: function(){},
+ onDataAvailable: function(request, context, inputStream, offset, count){
+ // Add the received data to the buffer, then process it
+ // Change CRLF to newline while we're at it
+ MPLT.textBuffer += MPLT.scriptStream.read(count).replace('\r\n', '\n');
+
+ MPLT.process();
+ }
+ },
+
+ /**
+ * Process the incoming data buffer
+ */
+ process : function()
+ {
+ // If we're waiting for a page to finish loading, wait
+ if (MPLT.timeLoadStarted)
+ return;
+
+ // Look for a carriage return
+ var firstCR = MPLT.textBuffer.indexOf('\n');
+
+ // If we haven't received a carriage return yet, wait
+ if (firstCR < 0)
+ return;
+
+ // If the first character was a carriage return, we're done!
+ if (firstCR == 0) {
+ MPLT.textBuffer = '';
+ MPLT.streamInput.close();
+ MPLT.streamOutput.close();
+
+ return;
+ }
+
+ // Remove the URL from the buffer
+ MPLT.URL = MPLT.textBuffer.substr(0, firstCR);
+ MPLT.textBuffer = MPLT.textBuffer.substr(firstCR + 1);
+
+ // Remember the current time and navigate to the new URL
+ MPLT.timeLoadStarted = new Date();
+ gBrowser.loadURIWithFlags(MPLT.URL, gBrowser.LOAD_FLAGS_BYPASS_CACHE);
+ setTimeout('MPLT.onTimeOut()', MPLT.TIME_OUT);
+ },
+
+ /**
+ * Page load completion handler
+ */
+ onPageLoad : function(e) {
+ // Ignore loads of non-HTML documents
+ if (!(e.originalTarget instanceof HTMLDocument))
+ return;
+
+ // Also ignore subframe loads
+ if (e.originalTarget.defaultView.frameElement)
+ return;
+
+ clearTimeout();
+ var timeElapsed = new Date() - MPLT.timeLoadStarted;
+
+ MPLT.outputResult(timeElapsed);
+ },
+
+ /**
+ * Timeout handler
+ */
+ onTimeOut : function() {
+ gBrowser.stop();
+
+ MPLT.outputResult('timeout');
+ },
+
+
+ /**
+ * Sends a properly-formatted result to the client
+ * @param {string} result The value to send along with the URL
+ */
+ outputResult : function(result) {
+
+ if (MPLT.URL) {
+ var outputString = MPLT.URL + ',' + result + '\n';
+ MPLT.streamOutput.write(outputString, outputString.length);
+ MPLT.URL = '';
+ }
+
+ MPLT.timeLoadStarted = null;
+ MPLT.process();
+ },
+
+ /**
+ * Time the page load started. If null, we're waiting for the
+ * initial page load, or otherwise don't care about the page
+ * that's currently loading
+ * @type {number}
+ */
+ timeLoadStarted : null,
+
+ /*
+ * TODO(jhaas): add support for nsIWebProgressListener
+ * If the URL being visited died as part of a network error
+ * (host not found, connection reset by peer, etc), the onload
+ * event doesn't fire. The only way to catch it would be in
+ * a web progress listener. However, nsIWebProgress is not
+ * behaving according to documentation. More research is needed.
+ * For now, omitting it means that if any of our URLs are "dirty"
+ * (do not point to real web servers with real responses), we'll log
+ * them as timeouts. This doesn't affect pages where the server
+ * exists but returns an error code.
+ */
+
+ /**
+ * Initialize the plugin, create the socket and listen
+ */
+ initialize: function() {
+ // Register for page load events
+ gBrowser.addEventListener('load', this.onPageLoad, true);
+
+ // Set a timeout to wait for the initial page to load
+ MPLT.timeLoadStarted = new Date();
+ setTimeout('MPLT.onTimeOut()', MPLT.TIME_OUT);
+
+ // Create the listening socket
+ MPLT.serverSocket = Components.classes['@mozilla.org/network/server-socket;1']
+ .createInstance(Components.interfaces.nsIServerSocket);
+
+ MPLT.serverSocket.init(MPLT.PORT_NUMBER, true, 1);
+ MPLT.serverSocket.asyncListen(this.acceptListener);
+ },
+
+ /**
+ * Close the socket(s)
+ */
+ deinitialize: function() {
+ if (MPLT.streamInput) MPLT.streamInput.close();
+ if (MPLT.streamOutput) MPLT.streamOutput.close();
+ if (MPLT.serverSocket) MPLT.serverSocket.close();
+ }
+};
+
+window.addEventListener('load', function(e) { MPLT.initialize(); }, false);
+window.addEventListener('unload', function(e) { MPLT.deinitialize(); }, false);
diff --git a/chromium/tools/measure_page_load_time/ff_ext/install.rdf b/chromium/tools/measure_page_load_time/ff_ext/install.rdf
new file mode 100644
index 00000000000..6085f425e23
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ff_ext/install.rdf
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<RDF xmlns="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:em="http://www.mozilla.org/2004/em-rdf#">
+ <Description about="urn:mozilla:install-manifest">
+ <em:id>measurepageloadtimeextension@google.com</em:id>
+ <em:name>MeasurePageLoadTime</em:name>
+ <em:version>1.0</em:version>
+ <em:creator>Jonathan Haas</em:creator>
+ <em:targetApplication>
+ <Description>
+ <em:id>{ec8030f7-c20a-464f-9b0e-13a3a9e97384}</em:id> <!-- firefox -->
+ <em:minVersion>1.5</em:minVersion>
+ <em:maxVersion>3.0.*</em:maxVersion>
+ </Description>
+ </em:targetApplication>
+ </Description>
+</RDF>
diff --git a/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.cpp b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.cpp
new file mode 100644
index 00000000000..d3a8d542519
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.cpp
@@ -0,0 +1,72 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// MeasurePageLoadTime.cpp : Implementation of DLL Exports.
+
+#include "stdafx.h"
+#include "resource.h"
+#include "MeasurePageLoadTime.h"
+
+
+class CMeasurePageLoadTimeModule : public CAtlDllModuleT< CMeasurePageLoadTimeModule >
+{
+public :
+ DECLARE_LIBID(LIBID_MeasurePageLoadTimeLib)
+ DECLARE_REGISTRY_APPID_RESOURCEID(IDR_MEASUREPAGELOADTIME, "{56C6D9F9-643C-4F6E-906C-5F7CECB23C24}")
+};
+
+CMeasurePageLoadTimeModule _AtlModule;
+
+
+#ifdef _MANAGED
+#pragma managed(push, off)
+#endif
+
+// DLL Entry Point
+extern "C" BOOL WINAPI DllMain(HINSTANCE hInstance, DWORD dwReason, LPVOID lpReserved)
+{
+ if (dwReason == DLL_PROCESS_ATTACH)
+ {
+ DisableThreadLibraryCalls(hInstance);
+ }
+ return _AtlModule.DllMain(dwReason, lpReserved);
+}
+
+#ifdef _MANAGED
+#pragma managed(pop)
+#endif
+
+
+
+
+// Used to determine whether the DLL can be unloaded by OLE
+STDAPI DllCanUnloadNow(void)
+{
+ return _AtlModule.DllCanUnloadNow();
+}
+
+
+// Returns a class factory to create an object of the requested type
+STDAPI DllGetClassObject(REFCLSID rclsid, REFIID riid, LPVOID* ppv)
+{
+ return _AtlModule.DllGetClassObject(rclsid, riid, ppv);
+}
+
+
+// DllRegisterServer - Adds entries to the system registry
+STDAPI DllRegisterServer(void)
+{
+ // registers object, typelib and all interfaces in typelib
+ HRESULT hr = _AtlModule.DllRegisterServer();
+ return hr;
+}
+
+
+// DllUnregisterServer - Removes entries from the system registry
+STDAPI DllUnregisterServer(void)
+{
+ HRESULT hr = _AtlModule.DllUnregisterServer();
+ return hr;
+}
+
diff --git a/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.def b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.def
new file mode 100644
index 00000000000..5552923379d
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.def
@@ -0,0 +1,9 @@
+; MeasurePageLoadTime.def : Declares the module parameters.
+
+LIBRARY "MeasurePageLoadTime.DLL"
+
+EXPORTS
+ DllCanUnloadNow PRIVATE
+ DllGetClassObject PRIVATE
+ DllRegisterServer PRIVATE
+ DllUnregisterServer PRIVATE
diff --git a/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.idl b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.idl
new file mode 100644
index 00000000000..d2f98da3089
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.idl
@@ -0,0 +1,40 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// MeasurePageLoadTime.idl : IDL source for MeasurePageLoadTime
+//
+
+// This file will be processed by the MIDL tool to
+// produce the type library (MeasurePageLoadTime.tlb) and marshalling code.
+
+import "oaidl.idl";
+import "ocidl.idl";
+
+[
+ object,
+ uuid(019637EB-B865-485B-9A66-419477EE55A0),
+ dual,
+ nonextensible,
+ helpstring("IMeasurePageLoadTimeBHO Interface"),
+ pointer_default(unique)
+]
+interface IMeasurePageLoadTimeBHO : IDispatch{
+};
+[
+ uuid(61AC7AC4-B715-4955-A238-5F9AEA80DF4B),
+ version(1.0),
+ helpstring("MeasurePageLoadTime 1.0 Type Library")
+]
+library MeasurePageLoadTimeLib
+{
+ importlib("stdole2.tlb");
+ [
+ uuid(807E68BC-238F-4163-AE4B-0A3604F3E145),
+ helpstring("MeasurePageLoadTimeBHO Class")
+ ]
+ coclass MeasurePageLoadTimeBHO
+ {
+ [default] interface IMeasurePageLoadTimeBHO;
+ };
+};
diff --git a/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.rc b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.rc
new file mode 100644
index 00000000000..9285a705062
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.rc
@@ -0,0 +1,121 @@
+// Microsoft Visual C++ generated resource script.
+//
+#include "resource.h"
+
+#define APSTUDIO_READONLY_SYMBOLS
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 2 resource.
+//
+#include "winres.h"
+
+/////////////////////////////////////////////////////////////////////////////
+#undef APSTUDIO_READONLY_SYMBOLS
+
+/////////////////////////////////////////////////////////////////////////////
+// English (U.S.) resources
+
+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENU)
+#ifdef _WIN32
+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_US
+#pragma code_page(1252)
+#endif //_WIN32
+
+#ifdef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// TEXTINCLUDE
+//
+
+1 TEXTINCLUDE
+BEGIN
+ "resource.h\0"
+END
+
+2 TEXTINCLUDE
+BEGIN
+ "#include ""winres.h""\r\n"
+ "\0"
+END
+
+3 TEXTINCLUDE
+BEGIN
+ "1 TYPELIB ""MeasurePageLoadTime.tlb""\r\n"
+ "\0"
+END
+
+#endif // APSTUDIO_INVOKED
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// Version
+//
+
+VS_VERSION_INFO VERSIONINFO
+ FILEVERSION 1,0,0,1
+ PRODUCTVERSION 1,0,0,1
+ FILEFLAGSMASK 0x3fL
+#ifdef _DEBUG
+ FILEFLAGS 0x1L
+#else
+ FILEFLAGS 0x0L
+#endif
+ FILEOS 0x4L
+ FILETYPE 0x2L
+ FILESUBTYPE 0x0L
+BEGIN
+ BLOCK "StringFileInfo"
+ BEGIN
+ BLOCK "040904e4"
+ BEGIN
+ VALUE "CompanyName", "Google"
+ VALUE "FileDescription", "Measures page load times"
+ VALUE "FileVersion", "1.0.0.1"
+ VALUE "LegalCopyright", "(c) 2008 Google. All rights reserved."
+ VALUE "InternalName", "MeasurePageLoadTime.dll"
+ VALUE "OriginalFilename", "MeasurePageLoadTime.dll"
+ VALUE "ProductName", "MeasurePageLoadTime"
+ VALUE "ProductVersion", "1.0.0.1"
+ END
+ END
+ BLOCK "VarFileInfo"
+ BEGIN
+ VALUE "Translation", 0x409, 1252
+ END
+END
+
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// REGISTRY
+//
+
+IDR_MEASUREPAGELOADTIME REGISTRY "MeasurePageLoadTime.rgs"
+IDR_MEASUREPAGELOADTIMEBHO REGISTRY "MeasurePageLoadTimeBHO.rgs"
+
+/////////////////////////////////////////////////////////////////////////////
+//
+// String Table
+//
+
+STRINGTABLE
+BEGIN
+ IDS_PROJNAME "MeasurePageLoadTime"
+END
+
+#endif // English (U.S.) resources
+/////////////////////////////////////////////////////////////////////////////
+
+
+
+#ifndef APSTUDIO_INVOKED
+/////////////////////////////////////////////////////////////////////////////
+//
+// Generated from the TEXTINCLUDE 3 resource.
+//
+1 TYPELIB "MeasurePageLoadTime.tlb"
+
+/////////////////////////////////////////////////////////////////////////////
+#endif // not APSTUDIO_INVOKED
+
diff --git a/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.rgs b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.rgs
new file mode 100644
index 00000000000..62f249bb628
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.rgs
@@ -0,0 +1,29 @@
+HKCR
+{
+ NoRemove AppID
+ {
+ '%APPID%' = s 'MeasurePageLoadTime'
+ 'MeasurePageLoadTime.DLL'
+ {
+ val AppID = s '%APPID%'
+ }
+ }
+}
+
+HKLM {
+ NoRemove SOFTWARE {
+ NoRemove Microsoft {
+ NoRemove Windows {
+ NoRemove CurrentVersion {
+ NoRemove Explorer {
+ NoRemove 'Browser Helper Objects' {
+ ForceRemove '{807E68BC-238F-4163-AE4B-0A3604F3E145}' = s 'MeasurePageLoadTimeBHO' {
+ val 'NoExplorer' = d '1'
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.vcproj b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.vcproj
new file mode 100644
index 00000000000..9ed8327bf95
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTime.vcproj
@@ -0,0 +1,320 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+ ProjectType="Visual C++"
+ Version="8.00"
+ Name="MeasurePageLoadTime"
+ ProjectGUID="{151243DF-25BE-4A88-B566-8B7AE8970E86}"
+ RootNamespace="MeasurePageLoadTime"
+ Keyword="AtlProj"
+ >
+ <Platforms>
+ <Platform
+ Name="Win32"
+ />
+ </Platforms>
+ <ToolFiles>
+ </ToolFiles>
+ <Configurations>
+ <Configuration
+ Name="Debug|Win32"
+ OutputDirectory="$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ UseOfATL="2"
+ ATLMinimizesCRunTimeLibraryUsage="false"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ PreprocessorDefinitions="_DEBUG"
+ MkTypLibCompatible="false"
+ TargetEnvironment="1"
+ GenerateStublessProxies="true"
+ TypeLibraryName="$(IntDir)/MeasurePageLoadTime.tlb"
+ HeaderFileName="MeasurePageLoadTime.h"
+ DLLDataFileName=""
+ InterfaceIdentifierFileName="MeasurePageLoadTime_i.c"
+ ProxyFileName="MeasurePageLoadTime_p.c"
+ ValidateParameters="false"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="0"
+ PreprocessorDefinitions="WIN32;_WINDOWS;_DEBUG;_USRDLL"
+ MinimalRebuild="true"
+ BasicRuntimeChecks="3"
+ RuntimeLibrary="3"
+ UsePrecompiledHeader="2"
+ WarningLevel="3"
+ Detect64BitPortabilityProblems="true"
+ DebugInformationFormat="4"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ PreprocessorDefinitions="_DEBUG"
+ Culture="1033"
+ AdditionalIncludeDirectories="$(IntDir)"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ RegisterOutput="true"
+ IgnoreImportLibrary="true"
+ AdditionalDependencies="ws2_32.lib"
+ LinkIncremental="2"
+ ModuleDefinitionFile=".\MeasurePageLoadTime.def"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCWebDeploymentTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ <Configuration
+ Name="Release|Win32"
+ OutputDirectory="$(ConfigurationName)"
+ IntermediateDirectory="$(ConfigurationName)"
+ ConfigurationType="2"
+ UseOfATL="1"
+ ATLMinimizesCRunTimeLibraryUsage="false"
+ CharacterSet="1"
+ >
+ <Tool
+ Name="VCPreBuildEventTool"
+ />
+ <Tool
+ Name="VCCustomBuildTool"
+ />
+ <Tool
+ Name="VCXMLDataGeneratorTool"
+ />
+ <Tool
+ Name="VCWebServiceProxyGeneratorTool"
+ />
+ <Tool
+ Name="VCMIDLTool"
+ PreprocessorDefinitions="NDEBUG"
+ MkTypLibCompatible="false"
+ TargetEnvironment="1"
+ GenerateStublessProxies="true"
+ TypeLibraryName="$(IntDir)/MeasurePageLoadTime.tlb"
+ HeaderFileName="MeasurePageLoadTime.h"
+ DLLDataFileName=""
+ InterfaceIdentifierFileName="MeasurePageLoadTime_i.c"
+ ProxyFileName="MeasurePageLoadTime_p.c"
+ ValidateParameters="false"
+ />
+ <Tool
+ Name="VCCLCompilerTool"
+ Optimization="2"
+ PreprocessorDefinitions="WIN32;_WINDOWS;NDEBUG;_USRDLL"
+ RuntimeLibrary="0"
+ UsePrecompiledHeader="2"
+ WarningLevel="3"
+ Detect64BitPortabilityProblems="true"
+ DebugInformationFormat="3"
+ />
+ <Tool
+ Name="VCManagedResourceCompilerTool"
+ />
+ <Tool
+ Name="VCResourceCompilerTool"
+ PreprocessorDefinitions="NDEBUG"
+ Culture="1033"
+ AdditionalIncludeDirectories="$(IntDir)"
+ />
+ <Tool
+ Name="VCPreLinkEventTool"
+ />
+ <Tool
+ Name="VCLinkerTool"
+ RegisterOutput="true"
+ IgnoreImportLibrary="true"
+ AdditionalDependencies="ws2_32.lib"
+ LinkIncremental="1"
+ ModuleDefinitionFile=".\MeasurePageLoadTime.def"
+ GenerateDebugInformation="true"
+ SubSystem="2"
+ OptimizeReferences="2"
+ EnableCOMDATFolding="2"
+ TargetMachine="1"
+ />
+ <Tool
+ Name="VCALinkTool"
+ />
+ <Tool
+ Name="VCManifestTool"
+ />
+ <Tool
+ Name="VCXDCMakeTool"
+ />
+ <Tool
+ Name="VCBscMakeTool"
+ />
+ <Tool
+ Name="VCFxCopTool"
+ />
+ <Tool
+ Name="VCAppVerifierTool"
+ />
+ <Tool
+ Name="VCWebDeploymentTool"
+ />
+ <Tool
+ Name="VCPostBuildEventTool"
+ />
+ </Configuration>
+ </Configurations>
+ <References>
+ </References>
+ <Files>
+ <Filter
+ Name="Source Files"
+ Filter="cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx"
+ UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}"
+ >
+ <File
+ RelativePath=".\MeasurePageLoadTime.cpp"
+ >
+ </File>
+ <File
+ RelativePath=".\MeasurePageLoadTime.def"
+ >
+ </File>
+ <File
+ RelativePath=".\MeasurePageLoadTime.idl"
+ >
+ </File>
+ <File
+ RelativePath=".\MeasurePageLoadTimeBHO.cpp"
+ >
+ </File>
+ <File
+ RelativePath=".\stdafx.cpp"
+ >
+ <FileConfiguration
+ Name="Debug|Win32"
+ >
+ <Tool
+ Name="VCCLCompilerTool"
+ UsePrecompiledHeader="1"
+ />
+ </FileConfiguration>
+ <FileConfiguration
+ Name="Release|Win32"
+ >
+ <Tool
+ Name="VCCLCompilerTool"
+ UsePrecompiledHeader="1"
+ />
+ </FileConfiguration>
+ </File>
+ </Filter>
+ <Filter
+ Name="Header Files"
+ Filter="h;hpp;hxx;hm;inl;inc;xsd"
+ UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}"
+ >
+ <File
+ RelativePath=".\MeasurePageLoadTimeBHO.h"
+ >
+ </File>
+ <File
+ RelativePath=".\stdafx.h"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Resource Files"
+ Filter="rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav"
+ UniqueIdentifier="{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}"
+ >
+ <File
+ RelativePath=".\MeasurePageLoadTime.rc"
+ >
+ </File>
+ <File
+ RelativePath=".\MeasurePageLoadTime.rgs"
+ >
+ </File>
+ <File
+ RelativePath=".\MeasurePageLoadTimeBHO.rgs"
+ >
+ </File>
+ </Filter>
+ <Filter
+ Name="Generated Files"
+ SourceControlFiles="false"
+ >
+ <File
+ RelativePath=".\MeasurePageLoadTime.h"
+ >
+ </File>
+ <File
+ RelativePath=".\MeasurePageLoadTime_i.c"
+ >
+ <FileConfiguration
+ Name="Debug|Win32"
+ >
+ <Tool
+ Name="VCCLCompilerTool"
+ UsePrecompiledHeader="0"
+ />
+ </FileConfiguration>
+ <FileConfiguration
+ Name="Release|Win32"
+ >
+ <Tool
+ Name="VCCLCompilerTool"
+ UsePrecompiledHeader="0"
+ />
+ </FileConfiguration>
+ </File>
+ </Filter>
+ <File
+ RelativePath=".\ReadMe.txt"
+ >
+ </File>
+ </Files>
+ <Globals>
+ </Globals>
+</VisualStudioProject>
diff --git a/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.cpp b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.cpp
new file mode 100644
index 00000000000..3de87f3c803
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.cpp
@@ -0,0 +1,292 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Implements a Browser Helper Object (BHO) which opens a socket
+// and waits to receive URLs over it. Visits those URLs, measuring
+// how long it takes between the start of navigation and the
+// DocumentComplete event, and returns the time in milliseconds as
+// a string to the caller.
+
+#include "stdafx.h"
+#include "MeasurePageLoadTimeBHO.h"
+
+#define MAX_URL 1024 // size of URL buffer
+#define MAX_PAGELOADTIME (4*60*1000) // assume all pages take < 4 minutes
+#define PORT 42492 // port to listen on. Also jhaas's
+ // old MSFT employee number
+
+
+// Static function to serve as thread entry point, takes a "this"
+// pointer as pParam and calls the method in the object
+static DWORD WINAPI ProcessPageTimeRequests(LPVOID pThis) {
+ reinterpret_cast<CMeasurePageLoadTimeBHO*>(pThis)->ProcessPageTimeRequests();
+
+ return 0;
+}
+
+
+STDMETHODIMP CMeasurePageLoadTimeBHO::SetSite(IUnknown* pUnkSite)
+{
+ if (pUnkSite != NULL)
+ {
+ // Cache the pointer to IWebBrowser2.
+ HRESULT hr = pUnkSite->QueryInterface(IID_IWebBrowser2, (void **)&m_spWebBrowser);
+ if (SUCCEEDED(hr))
+ {
+ // Register to sink events from DWebBrowserEvents2.
+ hr = DispEventAdvise(m_spWebBrowser);
+ if (SUCCEEDED(hr))
+ {
+ m_fAdvised = TRUE;
+ }
+
+ // Stash the interface in the global interface table
+ CComGITPtr<IWebBrowser2> git(m_spWebBrowser);
+ m_dwCookie = git.Detach();
+
+ // Create the event to be signaled when navigation completes.
+ // Start it in nonsignaled state, and allow it to be triggered
+ // when the initial page load is done.
+ m_hEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+
+ // Create a thread to wait on the socket
+ HANDLE hThread = CreateThread(NULL, 0, ::ProcessPageTimeRequests, this, 0, NULL);
+ }
+ }
+ else
+ {
+ // Unregister event sink.
+ if (m_fAdvised)
+ {
+ DispEventUnadvise(m_spWebBrowser);
+ m_fAdvised = FALSE;
+ }
+
+ // Release cached pointers and other resources here.
+ m_spWebBrowser.Release();
+ }
+
+ // Call base class implementation.
+ return IObjectWithSiteImpl<CMeasurePageLoadTimeBHO>::SetSite(pUnkSite);
+}
+
+
+void STDMETHODCALLTYPE CMeasurePageLoadTimeBHO::OnDocumentComplete(IDispatch *pDisp, VARIANT *pvarURL)
+{
+ if (pDisp == m_spWebBrowser)
+ {
+ // Fire the event when the page is done loading
+ // to unblock the other thread.
+ SetEvent(m_hEvent);
+ }
+}
+
+
+void CMeasurePageLoadTimeBHO::ProcessPageTimeRequests()
+{
+ CoInitialize(NULL);
+
+ // The event will start in nonsignaled state, meaning that
+ // the initial page load isn't done yet. Wait for that to
+ // finish before doing anything.
+ //
+ // It seems to be the case that the BHO will get loaded
+ // and SetSite called always before the initial page load
+ // even begins, but just to be on the safe side, we won't
+ // wait indefinitely.
+ WaitForSingleObject(m_hEvent, MAX_PAGELOADTIME);
+
+ // Retrieve the web browser interface from the global table
+ CComGITPtr<IWebBrowser2> git(m_dwCookie);
+ IWebBrowser2* browser;
+ git.CopyTo(&browser);
+
+ // Create a listening socket
+ m_sockListen = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
+ if (m_sockListen == SOCKET_ERROR)
+ ErrorExit();
+
+ BOOL on = TRUE;
+ if (setsockopt(m_sockListen, SOL_SOCKET, SO_REUSEADDR,
+ (const char*)&on, sizeof(on)))
+ ErrorExit();
+
+ // Bind the listening socket
+ SOCKADDR_IN addrBind;
+
+ addrBind.sin_family = AF_INET;
+ addrBind.sin_addr.s_addr = htonl(INADDR_LOOPBACK);
+ addrBind.sin_port = htons(PORT);
+
+ if (bind(m_sockListen, (sockaddr*)&addrBind, sizeof(addrBind)))
+ ErrorExit();
+
+ // Listen for incoming connections
+ if (listen(m_sockListen, 1))
+ ErrorExit();
+
+ // Ensure the socket is blocking... it should be by default, but
+ // it can't hurt to make sure
+ unsigned long nNonblocking = 0;
+ if (ioctlsocket(m_sockListen, FIONBIO, &nNonblocking))
+ ErrorExit();
+
+ m_sockTransport = 0;
+
+ // Loop indefinitely waiting for connections
+ while(1)
+ {
+ SOCKADDR_IN addrConnected;
+ int sConnected = sizeof(addrConnected);
+
+ // Wait for a client to connect and send a URL
+ m_sockTransport = accept(
+ m_sockListen, (sockaddr*)&addrConnected, &sConnected);
+
+ if (m_sockTransport == SOCKET_ERROR)
+ ErrorExit();
+
+ char pbBuffer[MAX_URL], strURL[MAX_URL];
+ DWORD cbRead, cbWritten;
+
+ bool fDone = false;
+
+ // Loop until we're done with this client
+ while (!fDone)
+ {
+ *strURL = '\0';
+ bool fReceivedCR = false;
+
+ do
+ {
+ // Only receive up to the first carriage return
+ cbRead = recv(m_sockTransport, pbBuffer, MAX_URL-1, MSG_PEEK);
+
+ // An error on read most likely means that the remote peer
+ // closed the connection. Go back to waiting
+ if (cbRead == 0)
+ {
+ fDone = true;
+ break;
+ }
+
+ // Null terminate the received characters so strchr() is safe
+ pbBuffer[cbRead] = '\0';
+
+ if(char* pchFirstCR = strchr(pbBuffer, '\n'))
+ {
+ cbRead = (DWORD)(pchFirstCR - pbBuffer + 1);
+ fReceivedCR = true;
+ }
+
+ // The below call will not block, since we determined with
+ // MSG_PEEK that at least cbRead bytes are in the TCP receive buffer
+ recv(m_sockTransport, pbBuffer, cbRead, 0);
+ pbBuffer[cbRead] = '\0';
+
+ strcat_s(strURL, sizeof(strURL), pbBuffer);
+ } while (!fReceivedCR);
+
+ // If an error occurred while reading, exit this loop
+ if (fDone)
+ break;
+
+ // Strip the trailing CR and/or LF
+ int i;
+ for (i = (int)strlen(strURL)-1; i >= 0 && isspace(strURL[i]); i--)
+ {
+ strURL[i] = '\0';
+ }
+
+ if (i < 0)
+ {
+ // Sending a carriage return on a line by itself means that
+ // the client is done making requests
+ fDone = true;
+ }
+ else
+ {
+ // Send the browser to the requested URL
+ CComVariant vNavFlags( navNoReadFromCache );
+ CComVariant vTargetFrame("_self");
+ CComVariant vPostData("");
+ CComVariant vHTTPHeaders("");
+
+ ResetEvent(m_hEvent);
+ DWORD dwStartTime = GetTickCount();
+
+ HRESULT hr = browser->Navigate(
+ CComBSTR(strURL),
+ &vNavFlags,
+ &vTargetFrame, // TargetFrameName
+ &vPostData, // PostData
+ &vHTTPHeaders // Headers
+ );
+
+ // The main browser thread will call OnDocumentComplete() when
+ // the page is done loading, which will in turn trigger
+ // m_hEvent. Wait here until then; the event will reset itself
+ // once this thread is released
+ if (WaitForSingleObject(m_hEvent, MAX_PAGELOADTIME) == WAIT_TIMEOUT)
+ {
+ sprintf_s(pbBuffer, sizeof(pbBuffer), "%s,timeout\n", strURL);
+
+ browser->Stop();
+ }
+ else
+ {
+ // Format the elapsed time as a string
+ DWORD dwLoadTime = GetTickCount() - dwStartTime;
+ sprintf_s(
+ pbBuffer, sizeof(pbBuffer), "%s,%d\n", strURL, dwLoadTime);
+ }
+
+ // Send the result. Just in case the TCP buffer can't handle
+ // the whole thing, send in parts if necessary
+ char *chSend = pbBuffer;
+
+ while (*chSend)
+ {
+ cbWritten = send(
+ m_sockTransport, chSend, (int)strlen(chSend), 0);
+
+ // Error on send probably means connection reset by peer
+ if (cbWritten == 0)
+ {
+ fDone = true;
+ break;
+ }
+
+ chSend += cbWritten;
+ }
+ }
+ }
+
+ // Close the transport socket and wait for another connection
+ closesocket(m_sockTransport);
+ m_sockTransport = 0;
+ }
+}
+
+
+void CMeasurePageLoadTimeBHO::ErrorExit()
+{
+ // Unlink from IE, close the sockets, then terminate this
+ // thread
+ SetSite(NULL);
+
+ if (m_sockTransport && m_sockTransport != SOCKET_ERROR)
+ {
+ closesocket(m_sockTransport);
+ m_sockTransport = 0;
+ }
+
+ if (m_sockListen && m_sockListen != SOCKET_ERROR)
+ {
+ closesocket(m_sockListen);
+ m_sockListen = 0;
+ }
+
+ TerminateThread(GetCurrentThread(), -1);
+}
diff --git a/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.h b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.h
new file mode 100644
index 00000000000..cabb02401f5
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.h
@@ -0,0 +1,87 @@
+// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// MeasurePageLoadTimeBHO.h : Declaration of the CMeasurePageLoadTimeBHO
+
+#include "resource.h" // main symbols
+
+#include <shlguid.h> // IID_IWebBrowser2, DIID_DWebBrowserEvents2, et
+#include <exdispid.h> // DISPID_DOCUMENTCOMPLETE, etc.
+
+#include <string>
+
+#include "MeasurePageLoadTime.h"
+
+
+#if defined(_WIN32_WCE) && !defined(_CE_DCOM) && !defined(_CE_ALLOW_SINGLE_THREADED_OBJECTS_IN_MTA)
+#error "Single-threaded COM objects are not properly supported on Windows CE platform, such as the Windows Mobile platforms that do not include full DCOM support. Define _CE_ALLOW_SINGLE_THREADED_OBJECTS_IN_MTA to force ATL to support creating single-thread COM objects and allow use of its single-threaded COM object implementations. The threading model in your rgs file was set to 'Free' as that is the only threading model supported in non DCOM Windows CE platforms."
+#endif
+
+
+
+// CMeasurePageLoadTimeBHO
+
+class ATL_NO_VTABLE CMeasurePageLoadTimeBHO :
+ public CComObjectRootEx<CComSingleThreadModel>,
+ public CComCoClass<CMeasurePageLoadTimeBHO, &CLSID_MeasurePageLoadTimeBHO>,
+ public IObjectWithSiteImpl<CMeasurePageLoadTimeBHO>,
+ public IDispatchImpl<IMeasurePageLoadTimeBHO, &IID_IMeasurePageLoadTimeBHO, &LIBID_MeasurePageLoadTimeLib, /*wMajor =*/ 1, /*wMinor =*/ 0>,
+ public IDispEventImpl<1, CMeasurePageLoadTimeBHO, &DIID_DWebBrowserEvents2, &LIBID_SHDocVw, 1, 1>
+{
+public:
+ CMeasurePageLoadTimeBHO()
+ {
+ }
+
+DECLARE_REGISTRY_RESOURCEID(IDR_MEASUREPAGELOADTIMEBHO)
+
+DECLARE_NOT_AGGREGATABLE(CMeasurePageLoadTimeBHO)
+
+BEGIN_COM_MAP(CMeasurePageLoadTimeBHO)
+ COM_INTERFACE_ENTRY(IMeasurePageLoadTimeBHO)
+ COM_INTERFACE_ENTRY(IDispatch)
+ COM_INTERFACE_ENTRY(IObjectWithSite)
+END_COM_MAP()
+
+BEGIN_SINK_MAP(CMeasurePageLoadTimeBHO)
+ SINK_ENTRY_EX(1, DIID_DWebBrowserEvents2, DISPID_DOCUMENTCOMPLETE, OnDocumentComplete)
+END_SINK_MAP()
+
+ // DWebBrowserEvents2
+ void STDMETHODCALLTYPE OnDocumentComplete(IDispatch *pDisp, VARIANT *pvarURL);
+ STDMETHOD(SetSite)(IUnknown *pUnkSite);
+
+ DECLARE_PROTECT_FINAL_CONSTRUCT()
+
+ HRESULT FinalConstruct()
+ {
+ return S_OK;
+ }
+
+ void FinalRelease()
+ {
+ }
+
+ void ProcessPageTimeRequests(void);
+ void VisitNextURL(void);
+ void ErrorExit(void);
+
+private:
+ CComPtr<IWebBrowser2> m_spWebBrowser;
+ BOOL m_fAdvised;
+
+ // Handle to global interface table
+ DWORD m_dwCookie;
+
+ // Handle to event to signal when navigation completes
+ HANDLE m_hEvent;
+
+ // Socket for accepting incoming connections
+ SOCKET m_sockListen;
+
+ // Socket for communicating with remote peers
+ SOCKET m_sockTransport;
+};
+
+OBJECT_ENTRY_AUTO(__uuidof(MeasurePageLoadTimeBHO), CMeasurePageLoadTimeBHO)
diff --git a/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.rgs b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.rgs
new file mode 100644
index 00000000000..1a00b72c14d
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ie_bho/MeasurePageLoadTimeBHO.rgs
@@ -0,0 +1,27 @@
+HKCR
+{
+ MeasurePageLoadTime.MeasurePageLoadTi.1 = s 'MeasurePageLoadTimeBHO Class'
+ {
+ CLSID = s '{807E68BC-238F-4163-AE4B-0A3604F3E145}'
+ }
+ MeasurePageLoadTime.MeasurePageLoadTime = s 'MeasurePageLoadTimeBHO Class'
+ {
+ CLSID = s '{807E68BC-238F-4163-AE4B-0A3604F3E145}'
+ CurVer = s 'MeasurePageLoadTime.MeasurePageLoadTi.1'
+ }
+ NoRemove CLSID
+ {
+ ForceRemove {807E68BC-238F-4163-AE4B-0A3604F3E145} = s 'MeasurePageLoadTimeBHO Class'
+ {
+ ProgID = s 'MeasurePageLoadTime.MeasurePageLoadTi.1'
+ VersionIndependentProgID = s 'MeasurePageLoadTime.MeasurePageLoadTime'
+ ForceRemove 'Programmable'
+ InprocServer32 = s '%MODULE%'
+ {
+ val ThreadingModel = s 'Apartment'
+ }
+ val AppID = s '%APPID%'
+ 'TypeLib' = s '{61AC7AC4-B715-4955-A238-5F9AEA80DF4B}'
+ }
+ }
+}
diff --git a/chromium/tools/measure_page_load_time/ie_bho/resource.h b/chromium/tools/measure_page_load_time/ie_bho/resource.h
new file mode 100644
index 00000000000..38dc82565b7
--- /dev/null
+++ b/chromium/tools/measure_page_load_time/ie_bho/resource.h
@@ -0,0 +1,18 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by MeasurePageLoadTime.rc
+//
+#define IDS_PROJNAME 100
+#define IDR_MEASUREPAGELOADTIME 101
+#define IDR_MEASUREPAGELOADTIMEBHO 102
+
+// Next default values for new objects
+//
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE 201
+#define _APS_NEXT_COMMAND_VALUE 32768
+#define _APS_NEXT_CONTROL_VALUE 201
+#define _APS_NEXT_SYMED_VALUE 103
+#endif
+#endif
diff --git a/chromium/tools/memory/OWNERS b/chromium/tools/memory/OWNERS
index 2351a9ff53e..b17b756cd7e 100644
--- a/chromium/tools/memory/OWNERS
+++ b/chromium/tools/memory/OWNERS
@@ -1,5 +1,4 @@
bruening@chromium.org
glider@chromium.org
thestig@chromium.org
-timurrrr@chromium.org
rnk@chromium.org
diff --git a/chromium/tools/multi_process_rss.py b/chromium/tools/multi_process_rss.py
new file mode 100755
index 00000000000..100d0f759b1
--- /dev/null
+++ b/chromium/tools/multi_process_rss.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Counts a resident set size (RSS) of multiple processes without double-counts.
+# If they share the same page frame, the page frame is counted only once.
+#
+# Usage:
+# ./multi-process-rss.py <pid>|<pid>r [...]
+#
+# If <pid> has 'r' at the end, all descendants of the process are accounted.
+#
+# Example:
+# ./multi-process-rss.py 12345 23456r
+#
+# The command line above counts the RSS of 1) process 12345, 2) process 23456
+# and 3) all descendant processes of process 23456.
+
+
+import collections
+import logging
+import os
+import psutil
+import sys
+
+
+if sys.platform.startswith('linux'):
+ _TOOLS_PATH = os.path.dirname(os.path.abspath(__file__))
+ _TOOLS_LINUX_PATH = os.path.join(_TOOLS_PATH, 'linux')
+ sys.path.append(_TOOLS_LINUX_PATH)
+ import procfs # pylint: disable=F0401
+
+
+class _NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+
+_LOGGER = logging.getLogger('multi-process-rss')
+_LOGGER.addHandler(_NullHandler())
+
+
+def _recursive_get_children(pid):
+ try:
+ children = psutil.Process(pid).get_children()
+ except psutil.error.NoSuchProcess:
+ return []
+ descendant = []
+ for child in children:
+ descendant.append(child.pid)
+ descendant.extend(_recursive_get_children(child.pid))
+ return descendant
+
+
+def list_pids(argv):
+ pids = []
+ for arg in argv[1:]:
+ try:
+ if arg.endswith('r'):
+ recursive = True
+ pid = int(arg[:-1])
+ else:
+ recursive = False
+ pid = int(arg)
+ except ValueError:
+ raise SyntaxError("%s is not an integer." % arg)
+ else:
+ pids.append(pid)
+ if recursive:
+ children = _recursive_get_children(pid)
+ pids.extend(children)
+
+ pids = sorted(set(pids), key=pids.index) # uniq: maybe slow, but simple.
+
+ return pids
+
+
+def count_pageframes(pids):
+ pageframes = collections.defaultdict(int)
+ pagemap_dct = {}
+ for pid in pids:
+ maps = procfs.ProcMaps.load(pid)
+ if not maps:
+ _LOGGER.warning('/proc/%d/maps not found.' % pid)
+ continue
+ pagemap = procfs.ProcPagemap.load(pid, maps)
+ if not pagemap:
+ _LOGGER.warning('/proc/%d/pagemap not found.' % pid)
+ continue
+ pagemap_dct[pid] = pagemap
+
+ for pid, pagemap in pagemap_dct.iteritems():
+ for vma in pagemap.vma_internals.itervalues():
+ for pageframe, number in vma.pageframes.iteritems():
+ pageframes[pageframe] += number
+
+ return pageframes
+
+
+def count_statm(pids):
+ resident = 0
+ shared = 0
+ private = 0
+
+ for pid in pids:
+ statm = procfs.ProcStatm.load(pid)
+ if not statm:
+ _LOGGER.warning('/proc/%d/statm not found.' % pid)
+ continue
+ resident += statm.resident
+ shared += statm.share
+ private += (statm.resident - statm.share)
+
+ return (resident, shared, private)
+
+
+def main(argv):
+ logging_handler = logging.StreamHandler()
+ logging_handler.setLevel(logging.WARNING)
+ logging_handler.setFormatter(logging.Formatter(
+ '%(asctime)s:%(name)s:%(levelname)s:%(message)s'))
+
+ _LOGGER.setLevel(logging.WARNING)
+ _LOGGER.addHandler(logging_handler)
+
+ if sys.platform.startswith('linux'):
+ logging.getLogger('procfs').setLevel(logging.WARNING)
+ logging.getLogger('procfs').addHandler(logging_handler)
+ pids = list_pids(argv)
+ pageframes = count_pageframes(pids)
+ else:
+ _LOGGER.error('%s is not supported.' % sys.platform)
+ return 1
+
+ # TODO(dmikurube): Classify this total RSS.
+ print len(pageframes) * 4096
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/chromium/tools/nocompile_driver.py b/chromium/tools/nocompile_driver.py
new file mode 100755
index 00000000000..b144c900ad0
--- /dev/null
+++ b/chromium/tools/nocompile_driver.py
@@ -0,0 +1,486 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Implements a simple "negative compile" test for C++ on linux.
+
+Sometimes a C++ API needs to ensure that various usages cannot compile. To
+enable unittesting of these assertions, we use this python script to
+invoke gcc on a source file and assert that compilation fails.
+
+For more info, see:
+ http://dev.chromium.org/developers/testing/no-compile-tests
+"""
+
+import StringIO
+import ast
+import locale
+import os
+import re
+import select
+import shlex
+import subprocess
+import sys
+import time
+
+
+# Matches lines that start with #if and have the substring TEST in the
+# conditional. Also extracts the comment. This allows us to search for
+# lines like the following:
+#
+# #ifdef NCTEST_NAME_OF_TEST // [r'expected output']
+# #if defined(NCTEST_NAME_OF_TEST) // [r'expected output']
+# #if NCTEST_NAME_OF_TEST // [r'expected output']
+# #elif NCTEST_NAME_OF_TEST // [r'expected output']
+# #elif DISABLED_NCTEST_NAME_OF_TEST // [r'expected output']
+#
+# inside the unittest file.
+NCTEST_CONFIG_RE = re.compile(r'^#(?:el)?if.*\s+(\S*NCTEST\S*)\s*(//.*)?')
+
+
+# Matches and removes the defined() preprocesor predicate. This is useful
+# for test cases that use the preprocessor if-statement form:
+#
+# #if defined(NCTEST_NAME_OF_TEST)
+#
+# Should be used to post-process the results found by NCTEST_CONFIG_RE.
+STRIP_DEFINED_RE = re.compile(r'defined\((.*)\)')
+
+
+# Used to grab the expectation from comment at the end of an #ifdef. See
+# NCTEST_CONFIG_RE's comment for examples of what the format should look like.
+#
+# The extracted substring should be a python array of regular expressions.
+EXTRACT_EXPECTATION_RE = re.compile(r'//\s*(\[.*\])')
+
+
+# The header for the result file so that it can be compiled.
+RESULT_FILE_HEADER = """
+// This file is generated by the no compile test from:
+// %s
+
+#include "base/logging.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+"""
+
+
+# The GUnit test function to output on a successful test completion.
+SUCCESS_GUNIT_TEMPLATE = """
+TEST(%s, %s) {
+ LOG(INFO) << "Took %f secs. Started at %f, ended at %f";
+}
+"""
+
+# The GUnit test function to output for a disabled test.
+DISABLED_GUNIT_TEMPLATE = """
+TEST(%s, %s) { }
+"""
+
+
+# Timeout constants.
+NCTEST_TERMINATE_TIMEOUT_SEC = 60
+NCTEST_KILL_TIMEOUT_SEC = NCTEST_TERMINATE_TIMEOUT_SEC + 2
+BUSY_LOOP_MAX_TIME_SEC = NCTEST_KILL_TIMEOUT_SEC * 2
+
+
+def ValidateInput(parallelism, sourcefile_path, cflags, resultfile_path):
+ """Make sure the arguments being passed in are sane."""
+ assert parallelism >= 1
+ assert type(sourcefile_path) is str
+ assert type(cflags) is str
+ assert type(resultfile_path) is str
+
+
+def ParseExpectation(expectation_string):
+ """Extracts expectation definition from the trailing comment on the ifdef.
+
+ See the comment on NCTEST_CONFIG_RE for examples of the format we are parsing.
+
+ Args:
+ expectation_string: A string like "// [r'some_regex']"
+
+ Returns:
+ A list of compiled regular expressions indicating all possible valid
+ compiler outputs. If the list is empty, all outputs are considered valid.
+ """
+ assert expectation_string is not None
+
+ match = EXTRACT_EXPECTATION_RE.match(expectation_string)
+ assert match
+
+ raw_expectation = ast.literal_eval(match.group(1))
+ assert type(raw_expectation) is list
+
+ expectation = []
+ for regex_str in raw_expectation:
+ assert type(regex_str) is str
+ expectation.append(re.compile(regex_str))
+ return expectation
+
+
+def ExtractTestConfigs(sourcefile_path, suite_name):
+ """Parses the soruce file for test configurations.
+
+ Each no-compile test in the file is separated by an ifdef macro. We scan
+ the source file with the NCTEST_CONFIG_RE to find all ifdefs that look like
+ they demark one no-compile test and try to extract the test configuration
+ from that.
+
+ Args:
+ sourcefile_path: The path to the source file.
+ suite_name: The name of the test suite.
+
+ Returns:
+ A list of test configurations. Each test configuration is a dictionary of
+ the form:
+
+ { name: 'NCTEST_NAME'
+ suite_name: 'SOURCE_FILE_NAME'
+ expectations: [re.Pattern, re.Pattern] }
+
+ The |suite_name| is used to generate a pretty gtest output on successful
+ completion of the no compile test.
+
+ The compiled regexps in |expectations| define the valid outputs of the
+ compiler. If any one of the listed patterns matches either the stderr or
+ stdout from the compilation, and the compilation failed, then the test is
+ considered to have succeeded. If the list is empty, than we ignore the
+ compiler output and just check for failed compilation. If |expectations|
+ is actually None, then this specifies a compiler sanity check test, which
+ should expect a SUCCESSFUL compilation.
+ """
+ sourcefile = open(sourcefile_path, 'r')
+
+ # Start with at least the compiler sanity test. You need to always have one
+ # sanity test to show that compiler flags and configuration are not just
+ # wrong. Otherwise, having a misconfigured compiler, or an error in the
+ # shared portions of the .nc file would cause all tests to erroneously pass.
+ test_configs = []
+
+ for line in sourcefile:
+ match_result = NCTEST_CONFIG_RE.match(line)
+ if not match_result:
+ continue
+
+ groups = match_result.groups()
+
+ # Grab the name and remove the defined() predicate if there is one.
+ name = groups[0]
+ strip_result = STRIP_DEFINED_RE.match(name)
+ if strip_result:
+ name = strip_result.group(1)
+
+ # Read expectations if there are any.
+ test_configs.append({'name': name,
+ 'suite_name': suite_name,
+ 'expectations': ParseExpectation(groups[1])})
+ sourcefile.close()
+ return test_configs
+
+
+def StartTest(sourcefile_path, cflags, config):
+ """Start one negative compile test.
+
+ Args:
+ sourcefile_path: The path to the source file.
+ cflags: A string with all the CFLAGS to give to gcc. This string will be
+ split by shelex so be careful with escaping.
+ config: A dictionary describing the test. See ExtractTestConfigs
+ for a description of the config format.
+
+ Returns:
+ A dictionary containing all the information about the started test. The
+ fields in the dictionary are as follows:
+ { 'proc': A subprocess object representing the compiler run.
+ 'cmdline': The executed command line.
+ 'name': The name of the test.
+ 'suite_name': The suite name to use when generating the gunit test
+ result.
+ 'terminate_timeout': The timestamp in seconds since the epoch after
+ which the test should be terminated.
+ 'kill_timeout': The timestamp in seconds since the epoch after which
+ the test should be given a hard kill signal.
+ 'started_at': A timestamp in seconds since the epoch for when this test
+ was started.
+ 'aborted_at': A timestamp in seconds since the epoch for when this test
+ was aborted. If the test completed successfully,
+ this value is 0.
+ 'finished_at': A timestamp in seconds since the epoch for when this
+ test was successfully complete. If the test is aborted,
+ or running, this value is 0.
+ 'expectations': A dictionary with the test expectations. See
+ ParseExpectation() for the structure.
+ }
+ """
+ # TODO(ajwong): Get the compiler from gyp.
+ cmdline = [os.path.join(os.path.dirname(os.path.realpath(__file__)),
+ '../third_party/llvm-build/Release+Asserts/bin',
+ 'clang++')]
+ cmdline.extend(shlex.split(cflags))
+ name = config['name']
+ expectations = config['expectations']
+ if expectations is not None:
+ cmdline.append('-D%s' % name)
+ cmdline.extend(['-std=c++11', '-o', '/dev/null', '-c', '-x', 'c++',
+ sourcefile_path])
+
+ process = subprocess.Popen(cmdline, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ now = time.time()
+ return {'proc': process,
+ 'cmdline': ' '.join(cmdline),
+ 'name': name,
+ 'suite_name': config['suite_name'],
+ 'terminate_timeout': now + NCTEST_TERMINATE_TIMEOUT_SEC,
+ 'kill_timeout': now + NCTEST_KILL_TIMEOUT_SEC,
+ 'started_at': now,
+ 'aborted_at': 0,
+ 'finished_at': 0,
+ 'expectations': expectations}
+
+
+def PassTest(resultfile, test):
+ """Logs the result of a test started by StartTest(), or a disabled test
+ configuration.
+
+ Args:
+ resultfile: File object for .cc file that results are written to.
+ test: An instance of the dictionary returned by StartTest(), a
+ configuration from ExtractTestConfigs().
+ """
+ # The 'started_at' key is only added if a test has been started.
+ if 'started_at' in test:
+ resultfile.write(SUCCESS_GUNIT_TEMPLATE % (
+ test['suite_name'], test['name'],
+ test['finished_at'] - test['started_at'],
+ test['started_at'], test['finished_at']))
+ else:
+ resultfile.write(DISABLED_GUNIT_TEMPLATE % (
+ test['suite_name'], test['name']))
+
+
+def FailTest(resultfile, test, error, stdout=None, stderr=None):
+ """Logs the result of a test started by StartTest()
+
+ Args:
+ resultfile: File object for .cc file that results are written to.
+ test: An instance of the dictionary returned by StartTest()
+ error: The printable reason for the failure.
+ stdout: The test's output to stdout.
+ stderr: The test's output to stderr.
+ """
+ resultfile.write('#error "%s Failed: %s"\n' % (test['name'], error))
+ resultfile.write('#error "compile line: %s"\n' % test['cmdline'])
+ if stdout and len(stdout) != 0:
+ resultfile.write('#error "%s stdout:"\n' % test['name'])
+ for line in stdout.split('\n'):
+ resultfile.write('#error " %s:"\n' % line)
+
+ if stderr and len(stderr) != 0:
+ resultfile.write('#error "%s stderr:"\n' % test['name'])
+ for line in stderr.split('\n'):
+ resultfile.write('#error " %s"\n' % line)
+ resultfile.write('\n')
+
+
+def WriteStats(resultfile, suite_name, timings):
+ """Logs the peformance timings for each stage of the script into a fake test.
+
+ Args:
+ resultfile: File object for .cc file that results are written to.
+ suite_name: The name of the GUnit suite this test belongs to.
+ timings: Dictionary with timestamps for each stage of the script run.
+ """
+ stats_template = ("Started %f, Ended %f, Total %fs, Extract %fs, "
+ "Compile %fs, Process %fs")
+ total_secs = timings['results_processed'] - timings['started']
+ extract_secs = timings['extract_done'] - timings['started']
+ compile_secs = timings['compile_done'] - timings['extract_done']
+ process_secs = timings['results_processed'] - timings['compile_done']
+ resultfile.write('TEST(%s, Stats) { LOG(INFO) << "%s"; }\n' % (
+ suite_name, stats_template % (
+ timings['started'], timings['results_processed'], total_secs,
+ extract_secs, compile_secs, process_secs)))
+
+
+def ProcessTestResult(resultfile, test):
+ """Interprets and logs the result of a test started by StartTest()
+
+ Args:
+ resultfile: File object for .cc file that results are written to.
+ test: The dictionary from StartTest() to process.
+ """
+ # Snap a copy of stdout and stderr into the test dictionary immediately
+ # cause we can only call this once on the Popen object, and lots of stuff
+ # below will want access to it.
+ proc = test['proc']
+ (stdout, stderr) = proc.communicate()
+
+ if test['aborted_at'] != 0:
+ FailTest(resultfile, test, "Compile timed out. Started %f ended %f." %
+ (test['started_at'], test['aborted_at']))
+ return
+
+ if proc.poll() == 0:
+ # Handle failure due to successful compile.
+ FailTest(resultfile, test,
+ 'Unexpected successful compilation.',
+ stdout, stderr)
+ return
+ else:
+ # Check the output has the right expectations. If there are no
+ # expectations, then we just consider the output "matched" by default.
+ if len(test['expectations']) == 0:
+ PassTest(resultfile, test)
+ return
+
+ # Otherwise test against all expectations.
+ for regexp in test['expectations']:
+ if (regexp.search(stdout) is not None or
+ regexp.search(stderr) is not None):
+ PassTest(resultfile, test)
+ return
+ expectation_str = ', '.join(
+ ["r'%s'" % regexp.pattern for regexp in test['expectations']])
+ FailTest(resultfile, test,
+ 'Expectations [%s] did not match output.' % expectation_str,
+ stdout, stderr)
+ return
+
+
+def CompleteAtLeastOneTest(resultfile, executing_tests):
+ """Blocks until at least one task is removed from executing_tests.
+
+ This function removes completed tests from executing_tests, logging failures
+ and output. If no tests can be removed, it will enter a poll-loop until one
+ test finishes or times out. On a timeout, this function is responsible for
+ terminating the process in the appropriate fashion.
+
+ Args:
+ executing_tests: A dict mapping a string containing the test name to the
+ test dict return from StartTest().
+
+ Returns:
+ A list of tests that have finished.
+ """
+ finished_tests = []
+ busy_loop_timeout = time.time() + BUSY_LOOP_MAX_TIME_SEC
+ while len(finished_tests) == 0:
+ # If we don't make progress for too long, assume the code is just dead.
+ assert busy_loop_timeout > time.time()
+
+ # Select on the output pipes.
+ read_set = []
+ for test in executing_tests.values():
+ read_set.extend([test['proc'].stderr, test['proc'].stdout])
+ result = select.select(read_set, [], read_set, NCTEST_TERMINATE_TIMEOUT_SEC)
+
+ # Now attempt to process results.
+ now = time.time()
+ for test in executing_tests.values():
+ proc = test['proc']
+ if proc.poll() is not None:
+ test['finished_at'] = now
+ finished_tests.append(test)
+ elif test['terminate_timeout'] < now:
+ proc.terminate()
+ test['aborted_at'] = now
+ elif test['kill_timeout'] < now:
+ proc.kill()
+ test['aborted_at'] = now
+
+ for test in finished_tests:
+ del executing_tests[test['name']]
+ return finished_tests
+
+
+def main():
+ if len(sys.argv) != 5:
+ print ('Usage: %s <parallelism> <sourcefile> <cflags> <resultfile>' %
+ sys.argv[0])
+ sys.exit(1)
+
+ # Force us into the "C" locale so the compiler doesn't localize its output.
+ # In particular, this stops gcc from using smart quotes when in english UTF-8
+ # locales. This makes the expectation writing much easier.
+ os.environ['LC_ALL'] = 'C'
+
+ parallelism = int(sys.argv[1])
+ sourcefile_path = sys.argv[2]
+ cflags = sys.argv[3]
+ resultfile_path = sys.argv[4]
+
+ timings = {'started': time.time()}
+
+ ValidateInput(parallelism, sourcefile_path, cflags, resultfile_path)
+
+ # Convert filename from underscores to CamelCase.
+ words = os.path.splitext(os.path.basename(sourcefile_path))[0].split('_')
+ words = [w.capitalize() for w in words]
+ suite_name = 'NoCompile' + ''.join(words)
+
+ test_configs = ExtractTestConfigs(sourcefile_path, suite_name)
+ timings['extract_done'] = time.time()
+
+ resultfile = StringIO.StringIO()
+ resultfile.write(RESULT_FILE_HEADER % sourcefile_path)
+
+ # Run the no-compile tests, but ensure we do not run more than |parallelism|
+ # tests at once.
+ timings['header_written'] = time.time()
+ executing_tests = {}
+ finished_tests = []
+
+ test = StartTest(
+ sourcefile_path,
+ cflags + ' -MMD -MF %s.d -MT %s' % (resultfile_path, resultfile_path),
+ { 'name': 'NCTEST_SANITY',
+ 'suite_name': suite_name,
+ 'expectations': None,
+ })
+ executing_tests[test['name']] = test
+
+ for config in test_configs:
+ # CompleteAtLeastOneTest blocks until at least one test finishes. Thus, this
+ # acts as a semaphore. We cannot use threads + a real semaphore because
+ # subprocess forks, which can cause all sorts of hilarity with threads.
+ if len(executing_tests) >= parallelism:
+ finished_tests.extend(CompleteAtLeastOneTest(resultfile, executing_tests))
+
+ if config['name'].startswith('DISABLED_'):
+ PassTest(resultfile, config)
+ else:
+ test = StartTest(sourcefile_path, cflags, config)
+ assert test['name'] not in executing_tests
+ executing_tests[test['name']] = test
+
+ # If there are no more test to start, we still need to drain the running
+ # ones.
+ while len(executing_tests) > 0:
+ finished_tests.extend(CompleteAtLeastOneTest(resultfile, executing_tests))
+ timings['compile_done'] = time.time()
+
+ for test in finished_tests:
+ if test['name'] == 'NCTEST_SANITY':
+ _, stderr = test['proc'].communicate()
+ return_code = test['proc'].poll()
+ if return_code != 0:
+ sys.stderr.write(stderr)
+ continue
+ ProcessTestResult(resultfile, test)
+ timings['results_processed'] = time.time()
+
+ WriteStats(resultfile, suite_name, timings)
+
+ if return_code == 0:
+ with open(resultfile_path, 'w') as fd:
+ fd.write(resultfile.getvalue())
+
+ resultfile.close()
+ sys.exit(return_code)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/omahaproxy.py b/chromium/tools/omahaproxy.py
new file mode 100755
index 00000000000..75bf43ddef3
--- /dev/null
+++ b/chromium/tools/omahaproxy.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Chrome Version Tool
+
+Scrapes Chrome channel information and prints out the requested nugget of
+information.
+"""
+
+import json
+import optparse
+import os
+import string
+import sys
+import urllib
+
+URL = 'https://omahaproxy.appspot.com/json'
+
+
+def main():
+ try:
+ data = json.load(urllib.urlopen(URL))
+ except Exception as e:
+ print 'Error: could not load %s\n\n%s' % (URL, str(e))
+ return 1
+
+ # Iterate to find out valid values for OS, channel, and field options.
+ oses = set()
+ channels = set()
+ fields = set()
+
+ for os_versions in data:
+ oses.add(os_versions['os'])
+
+ for version in os_versions['versions']:
+ for field in version:
+ if field == 'channel':
+ channels.add(version['channel'])
+ else:
+ fields.add(field)
+
+ oses = sorted(oses)
+ channels = sorted(channels)
+ fields = sorted(fields)
+
+ # Command line parsing fun begins!
+ usage = ('%prog [options]\n'
+ 'Print out information about a particular Chrome channel.')
+ parser = optparse.OptionParser(usage=usage)
+
+ parser.add_option('-o', '--os',
+ choices=oses,
+ default='win',
+ help='The operating system of interest: %s '
+ '[default: %%default]' % ', '.join(oses))
+ parser.add_option('-c', '--channel',
+ choices=channels,
+ default='stable',
+ help='The channel of interest: %s '
+ '[default: %%default]' % ', '.join(channels))
+ parser.add_option('-f', '--field',
+ choices=fields,
+ default='version',
+ help='The field of interest: %s '
+ '[default: %%default] ' % ', '.join(fields))
+ (opts, args) = parser.parse_args()
+
+ # Print out requested data if available.
+ for os_versions in data:
+ if os_versions['os'] != opts.os:
+ continue
+
+ for version in os_versions['versions']:
+ if version['channel'] != opts.channel:
+ continue
+
+ if opts.field not in version:
+ continue
+
+ print version[opts.field]
+ return 0
+
+ print 'Error: unable to find %s for Chrome %s %s.' % (
+ opts.field, opts.os, opts.channel)
+ return 1
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/oopif/OWNERS b/chromium/tools/oopif/OWNERS
new file mode 100644
index 00000000000..98a926a7271
--- /dev/null
+++ b/chromium/tools/oopif/OWNERS
@@ -0,0 +1,3 @@
+creis@chromium.org
+kenrb@chromium.org
+nasko@chromium.org
diff --git a/chromium/tools/oopif/iframe_server.py b/chromium/tools/oopif/iframe_server.py
new file mode 100644
index 00000000000..ace58422003
--- /dev/null
+++ b/chromium/tools/oopif/iframe_server.py
@@ -0,0 +1,224 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Test server for generating nested iframes with different sites.
+
+Very simple python server for creating a bunch of iframes. The page generation
+is randomized based on query parameters. See the __init__ function of the
+Params class for a description of the parameters.
+
+This server relies on gevent. On Ubuntu, install it via:
+
+ sudo apt-get install python-gevent
+
+Run the server using
+
+ python iframe_server.py
+
+To use the server, run chrome as follows:
+
+ google-chrome --host-resolver-rules='map *.invalid 127.0.0.1'
+
+Change 127.0.0.1 to be the IP of the machine this server is running on. Then
+in this chrome instance, navigate to any domain in .invalid
+(eg., http://1.invalid:8090) to run this test.
+
+"""
+
+import colorsys
+import copy
+import random
+import urllib
+import urlparse
+
+from gevent import pywsgi # pylint: disable=F0401
+
+MAIN_PAGE = """
+<html>
+ <head>
+ <style>
+ body {
+ background-color: %(color)s;
+ }
+ </style>
+ </head>
+ <body>
+ <center>
+ <h1><a href="%(url)s">%(site)s</a></h1>
+ <p><small>%(url)s</small>
+ </center>
+ <br />
+ %(iframe_html)s
+ </body>
+</html>
+"""
+
+IFRAME_FRAGMENT = """
+<iframe src="%(src)s" width="%(width)s" height="%(height)s">
+</iframe>
+"""
+
+class Params(object):
+ """Simple object for holding parameters"""
+ def __init__(self, query_dict):
+ # Basic params:
+ # nframes is how many frames per page.
+ # nsites is how many sites to random choose out of.
+ # depth is how deep to make the frame tree
+ # pattern specifies how the sites are layed out per depth. An empty string
+ # uses a random N = [0, nsites] each time to generate a N.invalid URL.
+ # Otherwise sepcify with single letters like 'ABCA' and frame
+ # A.invalid will embed B.invalid will embed C.invalid will embed A.
+ # jitter is the amount of randomness applied to nframes and nsites.
+ # Should be from [0,1]. 0.0 means no jitter.
+ # size_jitter is like jitter, but for width and height.
+ self.nframes = int(query_dict.get('nframes', [4] )[0])
+ self.nsites = int(query_dict.get('nsites', [10] )[0])
+ self.depth = int(query_dict.get('depth', [1] )[0])
+ self.jitter = float(query_dict.get('jitter', [0] )[0])
+ self.size_jitter = float(query_dict.get('size_jitter', [0.5] )[0])
+ self.pattern = query_dict.get('pattern', [''] )[0]
+ self.pattern_pos = int(query_dict.get('pattern_pos', [0] )[0])
+
+ # Size parameters. Values are percentages.
+ self.width = int(query_dict.get('width', [60])[0])
+ self.height = int(query_dict.get('height', [50])[0])
+
+ # Pass the random seed so our pages are reproduceable.
+ self.seed = int(query_dict.get('seed',
+ [random.randint(0, 2147483647)])[0])
+
+
+def get_site(urlpath):
+ """Takes a urlparse object and finds its approximate site.
+
+ Site is defined as registered domain name + scheme. We approximate
+ registered domain name by preserving the last 2 elements of the DNS
+ name. This breaks for domains like co.uk.
+ """
+ no_port = urlpath.netloc.split(':')[0]
+ host_parts = no_port.split('.')
+ site_host = '.'.join(host_parts[-2:])
+ return '%s://%s' % (urlpath.scheme, site_host)
+
+
+def generate_host(rand, params):
+ """Generates the host to be used as an iframes source.
+
+ Uses the .invalid domain to ensure DNS will not resolve to any real
+ address.
+ """
+ if params.pattern:
+ host = params.pattern[params.pattern_pos]
+ params.pattern_pos = (params.pattern_pos + 1) % len(params.pattern)
+ else:
+ host = rand.randint(1, apply_jitter(rand, params.jitter, params.nsites))
+ return '%s.invalid' % host
+
+
+def apply_jitter(rand, jitter, n):
+ """Reduce n by random amount from [0, jitter]. Ensures result is >=1."""
+ if jitter <= 0.001:
+ return n
+ v = n - int(n * rand.uniform(0, jitter))
+ if v:
+ return v
+ else:
+ return 1
+
+
+def get_color_for_site(site):
+ """Generate a stable (and pretty-ish) color for a site."""
+ val = hash(site)
+ # The constants below are arbitrary chosen emperically to look "pretty."
+ # HSV is used because it is easier to control the color than RGB.
+ # Reducing the H to 0.6 produces a good range of colors. Preserving
+ # > 0.5 saturation and value means the colors won't be too washed out.
+ h = (val % 100)/100.0 * 0.6
+ s = 1.0 - (int(val/100) % 100)/200.
+ v = 1.0 - (int(val/10000) % 100)/200.0
+ (r, g, b) = colorsys.hsv_to_rgb(h, s, v)
+ return 'rgb(%d, %d, %d)' % (int(r * 255), int(g * 255), int(b * 255))
+
+
+def make_src(scheme, netloc, path, params):
+ """Constructs the src url that will recreate the given params."""
+ if path == '/':
+ path = ''
+ return '%(scheme)s://%(netloc)s%(path)s?%(params)s' % {
+ 'scheme': scheme,
+ 'netloc': netloc,
+ 'path': path,
+ 'params': urllib.urlencode(params.__dict__),
+ }
+
+
+def make_iframe_html(urlpath, params):
+ """Produces the HTML fragment for the iframe."""
+ if (params.depth <= 0):
+ return ''
+ # Ensure a stable random number per iframe.
+ rand = random.Random()
+ rand.seed(params.seed)
+
+ netloc_paths = urlpath.netloc.split(':')
+ netloc_paths[0] = generate_host(rand, params)
+
+ width = apply_jitter(rand, params.size_jitter, params.width)
+ height = apply_jitter(rand, params.size_jitter, params.height)
+ iframe_params = {
+ 'src': make_src(urlpath.scheme, ':'.join(netloc_paths),
+ urlpath.path, params),
+ 'width': '%d%%' % width,
+ 'height': '%d%%' % height,
+ }
+ return IFRAME_FRAGMENT % iframe_params
+
+
+def create_html(environ):
+ """Creates the current HTML page. Also parses out query parameters."""
+ urlpath = urlparse.urlparse('%s://%s%s?%s' % (
+ environ['wsgi.url_scheme'],
+ environ['HTTP_HOST'],
+ environ['PATH_INFO'],
+ environ['QUERY_STRING']))
+ site = get_site(urlpath)
+ params = Params(urlparse.parse_qs(urlpath.query))
+
+ rand = random.Random()
+ rand.seed(params.seed)
+
+ iframe_htmls = []
+ for frame in xrange(0, apply_jitter(rand, params.jitter, params.nframes)):
+ # Copy current parameters into iframe and make modifications
+ # for the recursive generation.
+ iframe_params = copy.copy(params)
+ iframe_params.depth = params.depth - 1
+ # Base the new seed off the current seed, but have it skip enough that
+ # different frame trees are unlikely to collide. Numbers and skips
+ # not chosen in any scientific manner at all.
+ iframe_params.seed = params.seed + (frame + 1) * (
+ 1000000 + params.depth + 333)
+ iframe_htmls.append(make_iframe_html(urlpath, iframe_params))
+ template_params = dict(params.__dict__)
+ template_params.update({
+ 'color': get_color_for_site(site),
+ 'iframe_html': '\n'.join(iframe_htmls),
+ 'site': site,
+ 'url': make_src(urlpath.scheme, urlpath.netloc, urlpath.path, params),
+ })
+ return MAIN_PAGE % template_params
+
+
+def application(environ, start_response):
+ start_response('200 OK', [('Content-Type', 'text/html')])
+ if environ['PATH_INFO'] == '/favicon.ico':
+ yield ''
+ else:
+ yield create_html(environ)
+
+
+server = pywsgi.WSGIServer(('', 8090), application)
+
+server.serve_forever()
diff --git a/chromium/tools/origin_trials/OWNERS b/chromium/tools/origin_trials/OWNERS
new file mode 100644
index 00000000000..d9278608565
--- /dev/null
+++ b/chromium/tools/origin_trials/OWNERS
@@ -0,0 +1,3 @@
+dhnishi@chromium.org
+iclelland@chromium.org
+mek@chromium.org
diff --git a/chromium/tools/origin_trials/PRESUBMIT.py b/chromium/tools/origin_trials/PRESUBMIT.py
new file mode 100644
index 00000000000..40bf05dce90
--- /dev/null
+++ b/chromium/tools/origin_trials/PRESUBMIT.py
@@ -0,0 +1,25 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+def _CommonChecks(input_api, output_api):
+ results = []
+
+ # Run Pylint over the files in the directory.
+ pylint_checks = input_api.canned_checks.GetPylint(input_api, output_api)
+ results.extend(input_api.RunTests(pylint_checks))
+
+ # Run the generate_token unittests.
+ results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+ input_api, output_api, '.', [ r'^.+_unittest\.py$']))
+
+ return results
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return _CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return _CommonChecks(input_api, output_api)
diff --git a/chromium/tools/origin_trials/eftest.key b/chromium/tools/origin_trials/eftest.key
new file mode 100644
index 00000000000..10015d89550
--- /dev/null
+++ b/chromium/tools/origin_trials/eftest.key
@@ -0,0 +1 @@
+ƒgôÃ*
diff --git a/chromium/tools/origin_trials/generate_token.py b/chromium/tools/origin_trials/generate_token.py
new file mode 100755
index 00000000000..3b4447bbaac
--- /dev/null
+++ b/chromium/tools/origin_trials/generate_token.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility for generating experimental API tokens
+
+usage: generate_token.py [-h] [--key-file KEY_FILE]
+ [--expire-days EXPIRE_DAYS |
+ --expire-timestamp EXPIRE_TIMESTAMP]
+ origin trial_name
+
+Run "generate_token.py -h" for more help on usage.
+"""
+import argparse
+import base64
+import json
+import re
+import os
+import struct
+import sys
+import time
+import urlparse
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+sys.path.insert(0, os.path.join(script_dir, 'third_party', 'ed25519'))
+import ed25519
+
+
+# Matches a valid DNS name label (alphanumeric plus hyphens, except at the ends,
+# no longer than 63 ASCII characters)
+DNS_LABEL_REGEX = re.compile(r"^(?!-)[a-z\d-]{1,63}(?<!-)$", re.IGNORECASE)
+
+# This script generates Version 2 tokens.
+VERSION = "\x02"
+
+def HostnameFromArg(arg):
+ """Determines whether a string represents a valid hostname.
+
+ Returns the canonical hostname if its argument is valid, or None otherwise.
+ """
+ if not arg or len(arg) > 255:
+ return None
+ if arg[-1] == ".":
+ arg = arg[:-1]
+ if all(DNS_LABEL_REGEX.match(label) for label in arg.split(".")):
+ return arg.lower()
+
+def OriginFromArg(arg):
+ """Constructs the origin for the token from a command line argument.
+
+ Returns None if this is not possible (neither a valid hostname nor a
+ valid origin URL was provided.)
+ """
+ # Does it look like a hostname?
+ hostname = HostnameFromArg(arg)
+ if hostname:
+ return "https://" + hostname + ":443"
+ # If not, try to construct an origin URL from the argument
+ origin = urlparse.urlparse(arg)
+ if not origin or not origin.scheme or not origin.netloc:
+ raise argparse.ArgumentTypeError("%s is not a hostname or a URL" % arg)
+ # HTTPS or HTTP only
+ if origin.scheme not in ('https','http'):
+ raise argparse.ArgumentTypeError("%s does not use a recognized URL scheme" %
+ arg)
+ # Add default port if it is not specified
+ try:
+ port = origin.port
+ except ValueError:
+ raise argparse.ArgumentTypeError("%s is not a hostname or a URL" % arg)
+ if not port:
+ port = {"https": 443, "http": 80}[origin.scheme]
+ # Strip any extra components and return the origin URL:
+ return "{0}://{1}:{2}".format(origin.scheme, origin.hostname, port)
+
+def ExpiryFromArgs(args):
+ if args.expire_timestamp:
+ return int(args.expire_timestamp)
+ return (int(time.time()) + (int(args.expire_days) * 86400))
+
+def GenerateTokenData(origin, api_name, expiry):
+ return json.dumps({"origin": origin,
+ "feature": api_name,
+ "expiry": expiry}).encode('utf-8')
+
+def GenerateDataToSign(version, data):
+ return version + struct.pack(">I",len(data)) + data
+
+def Sign(private_key, data):
+ return ed25519.signature(data, private_key[:32], private_key[32:])
+
+def FormatToken(version, signature, data):
+ return base64.b64encode(version + signature +
+ struct.pack(">I",len(data)) + data)
+
+def main():
+ parser = argparse.ArgumentParser(
+ description="Generate tokens for enabling experimental APIs")
+ parser.add_argument("origin",
+ help="Origin for which to enable the API. This can be "
+ "either a hostname (default scheme HTTPS, default "
+ "port 443) or a URL.",
+ type=OriginFromArg)
+ parser.add_argument("trial_name",
+ help="Feature to enable. The current list of "
+ "experimental feature trials can be found in "
+ "RuntimeFeatures.in")
+ parser.add_argument("--key-file",
+ help="Ed25519 private key file to sign the token with",
+ default="eftest.key")
+ expiry_group = parser.add_mutually_exclusive_group()
+ expiry_group.add_argument("--expire-days",
+ help="Days from now when the token should exipire",
+ type=int,
+ default=42)
+ expiry_group.add_argument("--expire-timestamp",
+ help="Exact time (seconds since 1970-01-01 "
+ "00:00:00 UTC) when the token should exipire",
+ type=int)
+
+ args = parser.parse_args()
+ expiry = ExpiryFromArgs(args)
+
+ key_file = open(os.path.expanduser(args.key_file), mode="rb")
+ private_key = key_file.read(64)
+
+ # Validate that the key file read was a proper Ed25519 key -- running the
+ # publickey method on the first half of the key should return the second
+ # half.
+ if (len(private_key) < 64 or
+ ed25519.publickey(private_key[:32]) != private_key[32:]):
+ print("Unable to use the specified private key file.")
+ sys.exit(1)
+
+ token_data = GenerateTokenData(args.origin, args.trial_name, expiry)
+ data_to_sign = GenerateDataToSign(VERSION, token_data)
+ signature = Sign(private_key, data_to_sign)
+
+ # Verify that that the signature is correct before printing it.
+ try:
+ ed25519.checkvalid(signature, data_to_sign, private_key[32:])
+ except Exception, exc:
+ print "There was an error generating the signature."
+ print "(The original error was: %s)" % exc
+ sys.exit(1)
+
+ # Output a properly-formatted token. Version 1 is hard-coded, as it is
+ # the only defined token version.
+ print FormatToken(VERSION, signature, token_data)
+
+if __name__ == "__main__":
+ main()
diff --git a/chromium/tools/origin_trials/generate_token_unittest.py b/chromium/tools/origin_trials/generate_token_unittest.py
new file mode 100755
index 00000000000..06a2cc917be
--- /dev/null
+++ b/chromium/tools/origin_trials/generate_token_unittest.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# Copyright (c) 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for generate_token.py"""
+
+import argparse
+import generate_token
+import unittest
+
+
+class GenerateTokenTest(unittest.TestCase):
+
+ def test_hostname_validation(self):
+ for hostname, expected_result in [
+ ("", None),
+ (None, None),
+ ("example.com", "example.com"),
+ ("127.0.0.1", "127.0.0.1"),
+ ("localhost", "localhost"),
+ ("example.com.", "example.com"),
+ ("ExAmPlE.coM", "example.com"),
+ (".example.com", None),
+ ("example..com", None),
+ ("example123.com", "example123.com"),
+ ("123example.com", "123example.com"),
+ ("a.com", "a.com"),
+ ("1.com", "1.com"),
+ ("-.com", None),
+ ("aa.com", "aa.com"),
+ ("a1.com", "a1.com"),
+ ("a-.com", None),
+ ("-a.com", None),
+ ("123-example.com", "123-example.com"),
+ ("-123example.com", None),
+ ("123example-.com", None),
+ (("a"*63)+".com", ("a"*63)+".com"),
+ (("a"*64)+".com", None),
+ (".".join([("a"*15)]*16), ".".join([("a"*15)]*16)),
+ (".".join([("a"*15)]*17), None)]:
+ self.assertEqual(generate_token.HostnameFromArg(hostname),
+ expected_result)
+
+ def test_origin_constructed_correctly(self):
+ for origin_arg, expected_result in [
+ ("example.com", "https://example.com:443"),
+ ("https://example.com", "https://example.com:443"),
+ ("https://example.com/", "https://example.com:443"),
+ ("http://example.com", "http://example.com:80"),
+ ("http://127.0.0.1:8000", "http://127.0.0.1:8000"),
+ ("http://user:pass@example.com/path", "http://example.com:80")]:
+ self.assertEqual(generate_token.OriginFromArg(origin_arg),
+ expected_result)
+
+ def test_origin_fails_correctly(self):
+ for invalid_hostname in [
+ "example..com",
+ "gopher://gopher.tc.umn.edu",
+ "https://",
+ "https://example.com:NaN/",
+ "Not even close"]:
+ self.assertRaises(argparse.ArgumentTypeError,
+ generate_token.OriginFromArg,
+ invalid_hostname)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/origin_trials/third_party/ed25519/LICENSE b/chromium/tools/origin_trials/third_party/ed25519/LICENSE
new file mode 100644
index 00000000000..b204f4c9dea
--- /dev/null
+++ b/chromium/tools/origin_trials/third_party/ed25519/LICENSE
@@ -0,0 +1 @@
+The Ed25519 software is in the public domain.
diff --git a/chromium/tools/origin_trials/third_party/ed25519/OWNERS b/chromium/tools/origin_trials/third_party/ed25519/OWNERS
new file mode 100644
index 00000000000..d9278608565
--- /dev/null
+++ b/chromium/tools/origin_trials/third_party/ed25519/OWNERS
@@ -0,0 +1,3 @@
+dhnishi@chromium.org
+iclelland@chromium.org
+mek@chromium.org
diff --git a/chromium/tools/origin_trials/third_party/ed25519/README.chromium b/chromium/tools/origin_trials/third_party/ed25519/README.chromium
new file mode 100644
index 00000000000..412b4c35161
--- /dev/null
+++ b/chromium/tools/origin_trials/third_party/ed25519/README.chromium
@@ -0,0 +1,21 @@
+Name: Ed25519: high-speed high-security signatures
+Short Name: Ed25519
+URL: http://ed25519.cr.yp.to/software.html
+Version: 0
+License: Public Domain
+License File: NOT_SHIPPED
+Security Critical: no
+
+Description:
+This package is used by the command-line utilities in tools/origin_trials to
+sign tokens for experimental feature usage, and to verify the signatures on
+those tokens.
+Although this package does provide cryptographic signing and verification
+capabilities, it is currently not considered security-critical. It is only used
+to sign tokens for use in unit tests, or for custom developer builds of Chrome;
+not to generate tokens usable in release Chrome builds, and not for verification
+of signatures. This classification may change if the package is used for other
+purposes in the future.
+
+Local Modifications:
+None.
diff --git a/chromium/tools/origin_trials/third_party/ed25519/ed25519.py b/chromium/tools/origin_trials/third_party/ed25519/ed25519.py
new file mode 100644
index 00000000000..8497786d530
--- /dev/null
+++ b/chromium/tools/origin_trials/third_party/ed25519/ed25519.py
@@ -0,0 +1,109 @@
+# The original version of this file was downloaded from
+# http://ed25519.cr.yp.to/software.html, and came with the following copyright
+# statement:
+# The Ed25519 software is in the public domain.
+
+import hashlib
+
+b = 256
+q = 2**255 - 19
+l = 2**252 + 27742317777372353535851937790883648493
+
+def H(m):
+ return hashlib.sha512(m).digest()
+
+def expmod(b,e,m):
+ if e == 0: return 1
+ t = expmod(b,e/2,m)**2 % m
+ if e & 1: t = (t*b) % m
+ return t
+
+def inv(x):
+ return expmod(x,q-2,q)
+
+d = -121665 * inv(121666)
+I = expmod(2,(q-1)/4,q)
+
+def xrecover(y):
+ xx = (y*y-1) * inv(d*y*y+1)
+ x = expmod(xx,(q+3)/8,q)
+ if (x*x - xx) % q != 0: x = (x*I) % q
+ if x % 2 != 0: x = q-x
+ return x
+
+By = 4 * inv(5)
+Bx = xrecover(By)
+B = [Bx % q,By % q]
+
+def edwards(P,Q):
+ x1 = P[0]
+ y1 = P[1]
+ x2 = Q[0]
+ y2 = Q[1]
+ x3 = (x1*y2+x2*y1) * inv(1+d*x1*x2*y1*y2)
+ y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2)
+ return [x3 % q,y3 % q]
+
+def scalarmult(P,e):
+ if e == 0: return [0,1]
+ Q = scalarmult(P,e/2)
+ Q = edwards(Q,Q)
+ if e & 1: Q = edwards(Q,P)
+ return Q
+
+def encodeint(y):
+ bits = [(y >> i) & 1 for i in range(b)]
+ return ''.join([chr(sum([bits[i * 8 + j] << j for j in range(8)])) for i in range(b/8)])
+
+def encodepoint(P):
+ x = P[0]
+ y = P[1]
+ bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1]
+ return ''.join([chr(sum([bits[i * 8 + j] << j for j in range(8)])) for i in range(b/8)])
+
+def bit(h,i):
+ return (ord(h[i/8]) >> (i%8)) & 1
+
+def publickey(sk):
+ h = H(sk)
+ a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2))
+ A = scalarmult(B,a)
+ return encodepoint(A)
+
+def Hint(m):
+ h = H(m)
+ return sum(2**i * bit(h,i) for i in range(2*b))
+
+def signature(m,sk,pk):
+ h = H(sk)
+ a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2))
+ r = Hint(''.join([h[i] for i in range(b/8,b/4)]) + m)
+ R = scalarmult(B,r)
+ S = (r + Hint(encodepoint(R) + pk + m) * a) % l
+ return encodepoint(R) + encodeint(S)
+
+def isoncurve(P):
+ x = P[0]
+ y = P[1]
+ return (-x*x + y*y - 1 - d*x*x*y*y) % q == 0
+
+def decodeint(s):
+ return sum(2**i * bit(s,i) for i in range(0,b))
+
+def decodepoint(s):
+ y = sum(2**i * bit(s,i) for i in range(0,b-1))
+ x = xrecover(y)
+ if x & 1 != bit(s,b-1): x = q-x
+ P = [x,y]
+ if not isoncurve(P): raise Exception("decoding point that is not on curve")
+ return P
+
+def checkvalid(s,m,pk):
+ if len(s) != b/4: raise Exception("signature length is wrong")
+ if len(pk) != b/8: raise Exception("public-key length is wrong")
+ R = decodepoint(s[0:b/8])
+ A = decodepoint(pk)
+ S = decodeint(s[b/8:b/4])
+ h = Hint(encodepoint(R) + pk + m)
+ if scalarmult(B,S) != edwards(R,scalarmult(A,h)):
+ raise Exception("signature does not pass verification")
diff --git a/chromium/tools/page_cycler/acid3/LICENSE b/chromium/tools/page_cycler/acid3/LICENSE
new file mode 100644
index 00000000000..85265b0a8ea
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/LICENSE
@@ -0,0 +1 @@
+Public domain
diff --git a/chromium/tools/page_cycler/acid3/README.chromium b/chromium/tools/page_cycler/acid3/README.chromium
new file mode 100644
index 00000000000..5e482735a90
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/README.chromium
@@ -0,0 +1,14 @@
+Name: Acid3
+URL: http://acid3.acidtests.org
+License: Public domain
+License File: NOT_SHIPPED
+
+This is Chromium's copy of the Acid3 page layout tests.
+
+Originally obtained from the Web Standards Project on June
+19, 2009:
+
+Some changes have been made to Acid3 to permit it to run in
+an offline mode. The included JS test harness is copyright
+Google and licensed under the main Chromium license (see
+trunk/src/LICENSE).
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.css b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.css
new file mode 100644
index 00000000000..65c8751c34d
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.css
@@ -0,0 +1,8 @@
+<!DOCTYPE HTML><html><head><title>FAIL</title><style>
+<!-- this file is sent as text/html, not text/css, which is why it is
+ called "empty.css" despite the following lines -->
+
+ body { background: white; color: black; }
+ h1 { color: red; }
+
+</style><body><h1>FAIL</h1></body></html> \ No newline at end of file
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.html b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.html
new file mode 100644
index 00000000000..734c5a1c09b
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.html
@@ -0,0 +1 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN"><html><head><title></title></head><body></body></html> \ No newline at end of file
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.png b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.png
new file mode 100644
index 00000000000..fd5b91ea07b
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.png
Binary files differ
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.txt b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.txt
new file mode 100644
index 00000000000..957d6f8e582
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.txt
@@ -0,0 +1 @@
+<!DOCTYPE html><html><head><title>FAIL</title></head><body><p>FAIL</p><script>parent.notify("empty.txt")</script></body></html> \ No newline at end of file
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.xml b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.xml
new file mode 100644
index 00000000000..3f6063f4db6
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/empty.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- chase added XML 1.0 UTF-8 DTD -->
+<root>
+ <fail> This is an invalid byte in UTF-8: ¿ </fail>
+ <test/> <!-- shouldn't ever be parsed, as the parser should abort at the first sign of non-well-formedness -->
+</root>
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/favicon.ico b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/favicon.ico
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/favicon.ico
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/font.svg b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/font.svg
new file mode 100644
index 00000000000..13683d7d37b
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/font.svg
@@ -0,0 +1 @@
+<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><defs><font horiz-adv-x="500" id="mini"><font-face font-family="ACID3svgfont" units-per-em="4000" ascent="800" descent="-200" alphabetic="0"/><missing-glyph horiz-adv-x="10000" d="M0 0 4000 0"/><glyph unicode="a" glyph-name="a" horiz-adv-x="42"/><glyph unicode="b" glyph-name="b" horiz-adv-x="23"/><glyph unicode="c" glyph-name="c" horiz-adv-x="4711"/></font></defs></svg> \ No newline at end of file
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/font.ttf b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/font.ttf
new file mode 100644
index 00000000000..ac81cb03165
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/font.ttf
Binary files differ
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/head.js b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/head.js
new file mode 100644
index 00000000000..67c5d234269
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/head.js
@@ -0,0 +1,139 @@
+// Copyright (c) 2006-2009 The Chromium Authors. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var __c = ""; // that's good enough for me.
+var __td;
+var __tf;
+var __tl;
+var __iterations;
+var __cycle;
+var __results = false;
+var __page;
+var __TIMEOUT = 15;
+function __get_cookie(name) {
+ var cookies = document.cookie.split("; ");
+ for (var i = 0; i < cookies.length; ++i) {
+ var t = cookies[i].split("=");
+ if (t[0] == name && t[1])
+ return t[1];
+ }
+ return "";
+}
+function __pages() { // fetch lazily
+ if (!("data" in this))
+ this.data = __get_cookie("__pc_pages").split(",");
+ return this.data;
+}
+function __get_timings() {
+ return __get_cookie("__pc_timings");
+}
+function __set_timings(timings) {
+ document.cookie = "__pc_timings=" + timings + "; path=/";
+}
+function __ontimeout() {
+ var doc;
+
+ // Call GC twice to cleanup JS heap before starting a new test.
+ if (window.gc) {
+ window.gc();
+ window.gc();
+ }
+
+ var ts = (new Date()).getTime();
+ var tlag = (ts - __te) - __TIMEOUT;
+ if (tlag > 0)
+ __tf = __tf + tlag;
+ if (__cycle == (__pages().length * __iterations)) {
+ document.cookie = "__pc_done=1; path=/";
+ doc = "../../common/report.html";
+ } else {
+ doc = "../" + __pages()[__page] + "/index.html"
+ }
+
+ var timings = __tl;
+ var oldTimings = __get_timings();
+ if (oldTimings != "") {
+ timings = oldTimings + "," + timings;
+ }
+ __set_timings(timings);
+
+ var url = doc + "?n=" + __iterations + "&i=" + __cycle + "&p=" + __page + "&ts=" + ts + "&td=" + __td + "&tf=" + __tf;
+ document.location.href = url;
+}
+
+function test_complete(errors, elapsed_time) {
+ if (__results)
+ return;
+ var unused = document.body.offsetHeight; // force layout
+
+ var ts = 0, td = 0, te = (new Date()).getTime(), tf = 0;
+
+ var s = document.location.search;
+ if (s) {
+ var params = s.substring(1).split('&');
+ for (var i = 0; i < params.length; ++i) {
+ var f = params[i].split('=');
+ switch (f[0]) {
+ case 'skip':
+ // No calculation, just viewing
+ return;
+ case 'n':
+ __iterations = f[1];
+ break;
+ case 'i':
+ __cycle = (f[1] - 0) + 1;
+ break;
+ case 'p':
+ __page = ((f[1] - 0) + 1) % __pages().length;
+ break;
+ case 'ts':
+ ts = (f[1] - 0);
+ break;
+ case 'td':
+ td = (f[1] - 0);
+ break;
+ case 'tf':
+ tf = (f[1] - 0);
+ break;
+ }
+ }
+ }
+ __tl = (te - ts);
+ __td = td + __tl;
+ __te = te;
+ __tf = tf; // record t-fudge
+
+ setTimeout("__ontimeout()", __TIMEOUT);
+}
+
+/*
+if (window.attachEvent)
+ window.attachEvent("onload", __onload);
+else
+ addEventListener("load", __onload, false);
+ */
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/index.html b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/index.html
new file mode 100644
index 00000000000..be41d540b5c
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/index.html
@@ -0,0 +1,3493 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
+<html>
+ <title>The Acid3 Test</title>
+ <script type="text/javascript">
+ var startTime = new Date();
+ </script>
+ <style type="text/css">
+
+ /* set some basic styles so that we can get reliably exact results */
+ * { margin: 0; border: 1px blue; padding: 0; border-spacing: 0; font: inherit; line-height: 1.2; color: inherit; background: transparent; }
+ :link, :visited { color: blue; }
+
+ /* header and general layout */
+ html { font: 20px Arial, sans-serif; border: 2cm solid gray; width: 32em; margin: 1em; }
+ :root { background: silver; color: black; border-width: 0 0.2em 0.2em 0; } /* left and top content edges: 1*20px = 20px */
+ body { padding: 2em 2em 0; background: url(data:image/gif;base64,iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAIAAAAC64paAAAABGdBTUEAAK%2FINwWK6QAAAAlwSFlzAAAASAAAAEgARslrPgAAABtJREFUOMtj%2FM9APmCiQO%2Bo5lHNo5pHNVNBMwAinAEnIWw89gAAACJ6VFh0U29mdHdhcmUAAHjac0zJT0pV8MxNTE8NSk1MqQQAL5wF1K4MqU0AAAAASUVORK5CYII%3D) no-repeat 99.8392283% 1px white; border: solid 1px black; margin: -0.2em 0 0 -0.2em; } /* left and top content edges: 20px-0.2*20px+1px+2*20px = 57px */
+ h1:first-child { cursor: help; font-size: 5em; font-weight: bolder; margin-bottom: -0.4em; text-shadow: rgba(192, 192, 192, 1.0) 3px 3px; } /* (left:57px, top:57px) */
+ #result { font-weight: bolder; width: 5.68em; text-align: right; }
+ #result { font-size: 5em; margin: -2.19em 0 0; } /* (right:57px+5.2*5*20px = 577px, top:57px+1.2*5*20px-0.4*5*20px+1px+1*40px+1*40px+1px+2*40px+150px-2.19*5*20px = 230px) */
+ .hidden { visibility: hidden; }
+ #slash { color: red; color: hsla(0, 0%, 0%, 1.0); }
+ #instructions { margin-top: 0; font-size: 0.8em; color: gray; color: -acid3-bogus; height: 6.125em; } /* (left:57px, top:230px+1.2*5*20+0 = 350px) */
+ #instructions { margin-right: -20px; padding-right: 20px; background: url(data:image/gif;base64,iVBORw0KGgoAAAANSUhEUgAAABQAAAAUCAIAAAAC64paAAAABGdBTUEAAK%2FINwWK6QAAAAlwSFlzAAAASAAAAEgARslrPgAAABtJREFUOMtj%2FM9APmCiQO%2Bo5lHNo5pHNVNBMwAinAEnIWw89gAAACJ6VFh0U29mdHdhcmUAAHjac0zJT0pV8MxNTE8NSk1MqQQAL5wF1K4MqU0AAAAASUVORK5CYII%3D) no-repeat top right; }
+ #instructions span { float: right; width: 20px; margin-right: -20px; background: white; height: 20px; }
+ @font-face { font-family: "AcidAhemTest"; src: url(font.ttf); }
+ map::after { position: absolute; top: 18px; left: 638px; content: "X"; background: fuchsia; color: white; font: 20px/1 AcidAhemTest; }
+ iframe { float: left; height: 0; width: 0; } /* hide iframes but don't make them display: none */
+ object { position: fixed; left: 130.5px; top: 84.3px; background: transparent; } /* show objects if they have content */
+ .removed { position: absolute; top: 80px; left: 380px; height: 100px; width: 100px; opacity: 0; }
+
+ /* set the line height of the line of coloured boxes so we can add them without the layout changing height */
+ .buckets { font: 0/0 Arial, sans-serif; }
+ .buckets { padding: 0 0 150px 3px; }
+
+ /* the next two rules give the six coloured blocks their default styles (they match the same elements); the third hides them */
+ :first-child + * .buckets p { display: inline-block; vertical-align: 2em; border: 2em dotted red; padding: 1.0em 0 1.0em 2em; }
+ * + * > * > p { margin: 0; border: 1px solid ! important; }
+ .z { visibility: hidden; } /* only matches the buckets with no score */
+
+ /* sizes for the six buckets */
+ #bucket1 { font-size: 20px; margin-left: 0.2em; padding-left: 1.3em; padding-right: 1.3em; margin-right: 0.0001px; }
+ #bucket2 { font-size: 24px; margin-left: 0.375em; padding-left: 30px; padding-right: 32px; margin-right: 2px; }
+ #bucket3 { font-size: 28px; margin-left: 8.9999px; padding-left: 17px; padding-right: 55px; margin-right: 12px; }
+ #bucket4 { font-size: 32px; margin-left: 0; padding-left: 84px; padding-right: 0; margin-right: 0; }
+ #bucket5 { font-size: 36px; margin-left: 13px; padding-left: 0; padding-right: 94px; margin-right: 25px; }
+ #bucket6 { font-size: 40px; margin-left: -10px; padding-left: 104px; padding-right: -10px; }
+
+ /* colours for them */
+ .z, .zP, .zPP, .zPPP, .zPPPP, .zPPPPP { background: black; }
+ .zPPPPPP, .zPPPPPPP, .zPPPPPPPP, .zPPPPPPPP, .zPPPPPPPPP,
+ .zPPPPPPPPPP { background: grey; }
+ .zPPPPPPPPPPP, .zPPPPPPPPPPPP, .zPPPPPPPPPPPPP,
+ .zPPPPPPPPPPPPPP, .zPPPPPPPPPPPPPPP { background: silver; }
+ #bucket1.zPPPPPPPPPPPPPPPP { background: red; }
+ #bucket2.zPPPPPPPPPPPPPPPP { background: orange; }
+ #bucket3.zPPPPPPPPPPPPPPPP { background: yellow; }
+ #bucket4.zPPPPPPPPPPPPPPPP { background: lime; }
+ #bucket5.zPPPPPPPPPPPPPPPP { background: blue; }
+ #bucket6.zPPPPPPPPPPPPPPPP { background: purple; }
+
+ /* The line-height for the .bucket div is worked out as follows:
+ *
+ * The div.bucket element has a line box with a few
+ * inline-blocks. Each inline-block consists of:
+ *
+ * 2.0em vertical-align from baseline to bottom of inline-block
+ * 1px bottom border
+ * 1.0em bottom padding
+ * 1.0em top padding
+ * 1px top border
+ *
+ * The biggest inline-block has font-size: 40px.
+ *
+ * Thus the distance from the baseline to the top of the biggest
+ * inline-block is (2em+1em+1em)*2em*20px+2px = 162px.
+ *
+ * The line box itself has no other contents, and its strut has zero
+ * height and there is no half-leading, so the height of the
+ * div.bucket is 162px.
+ *
+ * (Why use line-height:0 and font-size:0? Well:
+ *
+ * The div.bucket line box would have a height that is the maximum
+ * of the following two sums:
+ *
+ * 1: half-leading + font descent at 1em + font ascent at 1em + half-leading
+ * 2: half-leading + font descent at 1em + 162px
+ *
+ * Now the half-leading is (line-height - (font-ascent + font-descent))/2, so that is really:
+ *
+ * 1: (line-height - (font-ascent + font-descent))/2 + font descent + font ascent + (line-height - (font-ascent + font-descent))/2
+ * 2: (line-height - (font-ascent + font-descent))/2 + font descent + 162px
+ *
+ * Which simplify to:
+ *
+ * 1: line-height
+ * 2: line-height/2 + (font descent - font-ascent)/2 + 162px
+ *
+ * So if the following expression is true:
+ *
+ * line-height > line-height/2 + (font descent - font-ascent)/2 + 162px
+ *
+ * That is, if this is true:
+ *
+ * line-height > font descent - font-ascent + 324px
+ *
+ * ...then the line-height matters, otherwise the font does. Note
+ * that font descent - font-ascent will be in the region of
+ * 10px-30px (with Ahem, exactly 12px). However, if we make the
+ * line-height big, then the _positioning_ of the inline-blocks will
+ * depend on the font descent, since that is what will decide the
+ * distance from the bottom of the line box to the baseline of the
+ * block (since the baseline is set by the strut).
+ *
+ * However, in Acid2 a dependency on the font metrics was introduced
+ * and this caused all kinds of problems. And we can't require Ahem
+ * in the Acid tests, since it's unlikely most people will have it
+ * installed.
+ *
+ * What we want is for the font to not matter, and the baseline to
+ * be as high as possible. We can do that by saying that the font
+ * and the line-height are zero.
+ *
+ * One word of warning. If your browser has a minimum font size feature
+ * that forces font sizes up even when there is no text, you will need
+ * to disable it before running this test.
+ *
+ */
+
+ /* rules specific to the tests below */
+ #instructions:last-child { white-space: pre-wrap; white-space: x-bogus; }
+ #linktest:link { display: block; color: red; text-align: center; text-decoration: none; }
+ #linktest.pending, #linktest:visited { display: none; }
+ #\ { color: transparent; color: hsla(0, 0, 0, 1); position: fixed; top: 10px; left: 10px; font: 40px Arial, sans-serif; }
+ #\ #result, #\ #score { position: fixed; top: 10%; left: 10%; width: 4em; z-index: 1; color: yellow; font-size: 50px; background: fuchsia; border: solid 1em purple; }
+ </style>
+
+ <!-- part of the HTTP tests -->
+ <link rel="stylesheet" href="empty.css"><!-- text/html file (should be ignored, <h1> will go red if it isn't) -->
+
+ <!-- the next five script blocks are part of one of the tests -->
+ <script type="text/javascript">
+ var d1 = "fail";
+ var d2 = "fail";
+ var d3 = "fail";
+ var d4 = "fail";
+ var d5 = "fail";
+ </script>
+ <script type="text/javascript" src="data:text/javascript,d1%20%3D%20'one'%3B"></script>
+ <script type="text/javascript" src="data:text/javascript;base64,ZDIgPSAndHdvJzs%3D"></script>
+ <script type="text/javascript" src="data:text/javascript;base64,%5a%44%4d%67%50%53%41%6e%64%47%68%79%5a%57%55%6e%4f%77%3D%3D"></script>
+ <script type="text/javascript" src="data:text/javascript;base64,%20ZD%20Qg%0D%0APS%20An%20Zm91cic%0D%0A%207%20"></script>
+ <script type="text/javascript" src="data:text/javascript,d5%20%3D%20'five%5Cu0027s'%3B"></script>
+
+ <!-- part of the JS regexp and \0 value tests test -->
+ <script type="text/javascript">
+ var nullInRegexpArgumentResult = 0 < /script/.test('\0script') ? "passed" : "failed";
+ </script>
+
+ <!-- main test body -->
+ <script type="text/javascript">
+ var notifications = {};
+ function notify(file) {
+ // used in cross-file tests
+ notifications[file] = 1;
+ }
+ function fail(message) {
+ throw { message: message };
+ }
+ function assert(condition, message) {
+ if (!condition)
+ fail(message);
+ }
+ function assertEquals(expression, value, message) {
+ if (expression != value) {
+ expression = (""+expression).replace(/[\r\n]+/g, "\\n");
+ value = (""+value).replace(/\r?\n/g, "\\n");
+ fail("expected '" + value + "' but got '" + expression + "' - " + message);
+ }
+ }
+ function getTestDocument() {
+ var iframe = document.getElementById("selectors");
+ var doc = iframe.contentDocument;
+ //alert(doc);
+ for (var i = doc.documentElement.childNodes.length-1; i >= 0; i -= 1) {
+ doc.documentElement.removeChild(doc.documentElement.childNodes[i]);
+ }
+ doc.documentElement.appendChild(doc.createElement('head'));
+ doc.documentElement.firstChild.appendChild(doc.createElement('title'));
+ doc.documentElement.appendChild(doc.createElement('body'));
+ return doc;
+ }
+ function selectorTest(tester) {
+ var doc = getTestDocument();
+ var style = doc.createElement('style');
+ style.appendChild(doc.createTextNode("* { z-index: 0; position: absolute; }\n"));
+ doc.documentElement.firstChild.appendChild(style);
+ var ruleCount = 0;
+ tester(doc, function (selector) {
+ ruleCount += 1;
+ style.appendChild(doc.createTextNode(selector + " { z-index: " + ruleCount + "; }\n"));
+ return ruleCount;
+ }, function(node, rule, message) {
+ var value = doc.defaultView.getComputedStyle(node, "").zIndex;
+ assert(value != 'auto', "underlying problems prevent this test from running properly");
+ assertEquals(value, rule, message);
+ });
+ }
+ var kungFuDeathGrip = null; // used to hold things from test to test
+ var tests = [
+
+ // there are 6 buckets with 16 tests each, plus four special tests (0, 97, 98, and 99).
+
+ // Remove the "JS required" message and the <script> element in the <body>
+ function () {
+ // test 0: whether removing an element that is the last child correctly recomputes styles for the new last child
+ // also tests support for getComputedStyle, :last-child, pre-wrap, removing a <script> element
+ // removing script:
+ var scripts = document.getElementsByTagName('script');
+ document.body.removeChild(scripts[scripts.length-1]);
+ // removing last child:
+ var last = document.getElementById('remove-last-child-test');
+ var penultimate = last.previousSibling; // this should be the whitespace node
+ penultimate = penultimate.previousSibling; // this should now be the actual penultimate element
+ last.parentNode.removeChild(last);
+ assertEquals(document.defaultView.getComputedStyle(penultimate, '').whiteSpace, 'pre-wrap', "found unexpected computed style");
+ return 7;
+ },
+
+ // bucket 1: DOM Traversal, DOM Range, HTTP
+ // DOM Traversal
+ function () {
+ // test 1: NodeFilters and Exceptions
+ var doc = getTestDocument(); // looks like <!DOCTYPE><html><head><title/><\head><body/><\html> (the '\'s are to avoid validation errors)
+ var iteration = 0;
+ var exception = "Roses";
+ var test = function(node) {
+ iteration += 1;
+ switch (iteration) {
+ case 1: case 3: case 4: case 6: case 7: case 8: case 9: case 14: case 15: throw exception;
+ case 2: case 5: case 10: case 11: case 12: case 13: return true; // ToNumber(true) => 1
+ default: throw 0;
+ };
+ };
+ var check = function(o, method) {
+ var ok = false;
+ try {
+ o[method]();
+ } catch (e) {
+ if (e === exception)
+ ok = true;
+ }
+ assert(ok, "method " + o + "." + method + "() didn't forward exception");
+ };
+ var i = doc.createNodeIterator(doc.documentElement, 0xFFFFFFFF, test, true);
+ check(i, "nextNode"); // 1
+ assertEquals(i.nextNode(), doc.documentElement, "i.nextNode() didn't return the right node"); // 2
+ check(i, "previousNode"); // 3
+ var w = document.createTreeWalker(doc.documentElement, 0xFFFFFFFF, test, true);
+ check(w, "nextNode"); // 4
+ assertEquals(w.nextNode(), doc.documentElement.firstChild, "w.nextNode() didn't return the right node"); // 5
+ check(w, "previousNode"); // 6
+ check(w, "firstChild"); // 7
+ check(w, "lastChild"); // 8
+ check(w, "nextSibling"); // 9
+ assertEquals(iteration, 9, "iterations went wrong");
+ assertEquals(w.previousSibling(), null, "w.previousSibling() didn't return the right node"); // doesn't call filter
+ assertEquals(iteration, 9, "filter called incorrectly for previousSibling()");
+ assertEquals(w.lastChild(), doc.getElementsByTagName('title')[0], "w.lastChild() didn't return the right node"); // 10
+ assertEquals(w.nextSibling(), null, "w.nextSibling() didn't return the right node"); // 11 (filter called on parent, to see if it's included, otherwise it could skip that and find a nextsibling elsewhere)
+ assertEquals(iteration, 11, "filter called incorrectly for nextSibling()");
+ assertEquals(w.parentNode(), doc.documentElement.firstChild, "w.parentNode() didn't return the right node"); // 12
+ assertEquals(w.nextSibling(), doc.documentElement.lastChild, "w.nextSibling() didn't return the right node"); // 13
+ check(w, "previousSibling"); // 14
+ check(w, "parentNode"); // 15
+ return 1;
+ },
+ function () {
+ // test 2: Removing nodes during iteration
+ var count = 0;
+ var expect = function(n, node1, node2) {
+ count += 1;
+ assert(n == count, "reached expectation " + n + " when expecting expectation " + count);
+ assertEquals(node1, node2, "expectation " + count + " failed");
+ };
+ var doc = getTestDocument();
+ var t1 = doc.body.appendChild(doc.createElement('t1'));
+ var t2 = doc.body.appendChild(doc.createElement('t2'));
+ var t3 = doc.body.appendChild(doc.createElement('t3'));
+ var t4 = doc.body.appendChild(doc.createElement('t4'));
+ var callCount = 0;
+ var filterFunctions = [
+ function (node) { expect(1, node, doc.body); return true; }, // filter 0
+ function (node) { expect(3, node, t1); return true; }, // filter 1
+ function (node) { expect(5, node, t2); return true; }, // filter 2
+ function (node) { expect(7, node, t3); doc.body.removeChild(t4); return true; }, // filter 3
+ function (node) { expect(9, node, t4); return true; }, // filter 4
+ function (node) { expect(11, node, t4); doc.body.removeChild(t4); return 2 /* REJECT */; }, // filter 5
+ function (node) { expect(12, node, t3); return true; }, // filter 6
+ function (node) { expect(14, node, t2); doc.body.removeChild(t2); return true; }, // filter 7
+ function (node) { expect(16, node, t1); return true; }, // filter 8
+ ];
+ var i = doc.createNodeIterator(doc.documentElement.lastChild, 0xFFFFFFFF, function (node) { return filterFunctions[callCount++](node); }, true);
+ // * B 1 2 3 4
+ expect(2, i.nextNode(), doc.body); // filter 0
+ // [B] * 1 2 3 4
+ expect(4, i.nextNode(), t1); // filter 1
+ // B [1] * 2 3 4
+ expect(6, i.nextNode(), t2); // filter 2
+ // B 1 [2] * 3 4
+ expect(8, i.nextNode(), t3); // filter 3
+ // B 1 2 [3] *
+ doc.body.appendChild(t4);
+ // B 1 2 [3] * 4
+ expect(10, i.nextNode(), t4); // filter 4
+ // B 1 2 3 [4] *
+ expect(13, i.previousNode(), t3); // filters 5, 6
+ // B 1 2 3 * (4) // filter 5
+ // B 1 2 [3] * // between 5 and 6
+ // B 1 2 * (3) // filter 6
+ // B 1 2 * [3]
+ expect(15, i.previousNode(), t2); // filter 7
+ // B 1 * (2) [3]
+ // -- spec says "For instance, if a NodeFilter removes a node
+ // from a document, it can still accept the node, which
+ // means that the node may be returned by the NodeIterator
+ // or TreeWalker even though it is no longer in the subtree
+ // being traversed."
+ // -- but it also says "If changes to the iterated list do not
+ // remove the reference node, they do not affect the state
+ // of the NodeIterator."
+ // B 1 * [3]
+ expect(17, i.previousNode(), t1); // filter 8
+ // B [1] * 3
+ return 1;
+ },
+ function () {
+ // test 3: the infinite iterator
+ var doc = getTestDocument();
+ for (var i = 0; i < 5; i += 1) {
+ doc.body.appendChild(doc.createElement('section'));
+ doc.body.lastChild.title = i;
+ }
+ var count = 0;
+ var test = function() {
+ if (count > 3 && count < 12)
+ doc.body.appendChild(doc.body.firstChild);
+ count += 1;
+ return (count % 2 == 0) ? 1 : 2;
+ };
+ var i = doc.createNodeIterator(doc.body, 0xFFFFFFFF, test, true);
+ assertEquals(i.nextNode().title, "0", "failure 1");
+ assertEquals(i.nextNode().title, "2", "failure 2");
+ assertEquals(i.nextNode().title, "4", "failure 3");
+ assertEquals(i.nextNode().title, "1", "failure 4");
+ assertEquals(i.nextNode().title, "3", "failure 5");
+ assertEquals(i.nextNode().title, "0", "failure 6");
+ assertEquals(i.nextNode().title, "2", "failure 7");
+ assertEquals(i.nextNode(), null, "failure 8");
+ return 1;
+ },
+ function () {
+ // test 4: ignoring whitespace text nodes with node iterators
+ var count = 0;
+ var expect = function(node1, node2) {
+ count += 1;
+ assertEquals(node1, node2, "expectation " + count + " failed");
+ };
+ var allButWS = function (node) {
+ if (node.nodeType == 3 && node.data.match(/^\s*$/))
+ return 2;
+ return 1;
+ };
+ var i = document.createNodeIterator(document.body, 0x01 | 0x04 | 0x08 | 0x10 | 0x20, allButWS, true);
+ // now walk the document body and make sure everything is in the right place
+ expect(i.nextNode(), document.body); // 1
+ expect(i.nextNode(), document.getElementsByTagName('h1')[0]);
+ expect(i.nextNode(), document.getElementsByTagName('h1')[0].firstChild);
+ expect(i.nextNode(), document.getElementsByTagName('div')[0]);
+ expect(i.nextNode(), document.getElementById('bucket1'));
+ expect(i.nextNode(), document.getElementById('bucket2'));
+ expect(i.nextNode(), document.getElementById('bucket3'));
+ expect(i.nextNode(), document.getElementById('bucket4'));
+ expect(i.nextNode(), document.getElementById('bucket5'));
+ expect(i.nextNode(), document.getElementById('bucket6')); // 10
+ expect(i.nextNode(), document.getElementById('result'));
+ expect(i.nextNode(), document.getElementById('score'));
+ expect(i.nextNode(), document.getElementById('score').firstChild);
+ expect(i.nextNode(), document.getElementById('slash'));
+ expect(i.nextNode(), document.getElementById('slash').firstChild);
+ expect(i.nextNode(), document.getElementById('slash').nextSibling);
+ expect(i.nextNode(), document.getElementById('slash').nextSibling.firstChild);
+ expect(i.nextNode(), document.getElementsByTagName('map')[0]);
+ expect(i.nextNode(), document.getElementsByTagName('area')[0]);
+ expect(i.nextNode(), document.getElementsByTagName('iframe')[0]); // 20
+ expect(i.nextNode(), document.getElementsByTagName('iframe')[0].firstChild);
+ expect(i.nextNode(), document.getElementsByTagName('iframe')[1]);
+ expect(i.nextNode(), document.getElementsByTagName('iframe')[1].firstChild);
+ expect(i.nextNode(), document.getElementsByTagName('iframe')[2]);
+ expect(i.nextNode(), document.forms[0]);
+ expect(i.nextNode(), document.forms.form.elements[0]);
+ expect(i.nextNode(), document.getElementsByTagName('table')[0]);
+ expect(i.nextNode(), document.getElementsByTagName('tbody')[0]);
+ expect(i.nextNode(), document.getElementsByTagName('tr')[0]);
+ expect(i.nextNode(), document.getElementsByTagName('td')[0]);
+ expect(i.nextNode(), document.getElementsByTagName('td')[0].getElementsByTagName('p')[0]);
+ expect(i.nextNode(), document.getElementById('instructions'));
+ expect(i.nextNode(), document.getElementById('instructions').firstChild);
+ expect(i.nextNode().nodeName, "SPAN");
+ expect(i.nextNode().nodeName, "#text");
+ expect(i.nextNode(), document.links[1]);
+ expect(i.nextNode(), document.links[1].firstChild);
+ expect(i.nextNode(), document.getElementById('instructions').lastChild);
+ expect(i.nextNode(), null);
+ // walk it backwards for good measure
+ expect(i.previousNode(), document.getElementById('instructions').lastChild);
+ expect(i.previousNode(), document.links[1].firstChild);
+ expect(i.previousNode(), document.links[1]);
+ expect(i.previousNode().nodeName, "#text");
+ expect(i.previousNode().nodeName, "SPAN");
+ expect(i.previousNode(), document.getElementById('instructions').firstChild);
+ expect(i.previousNode(), document.getElementById('instructions'));
+ expect(i.previousNode(), document.getElementsByTagName('td')[0].getElementsByTagName('p')[0]);
+ expect(i.previousNode(), document.getElementsByTagName('td')[0]);
+ expect(i.previousNode(), document.getElementsByTagName('tr')[0]);
+ expect(i.previousNode(), document.getElementsByTagName('tbody')[0]);
+ expect(i.previousNode(), document.getElementsByTagName('table')[0]);
+ expect(i.previousNode(), document.forms.form.elements[0]);
+ expect(i.previousNode(), document.forms[0]);
+ expect(i.previousNode(), document.getElementsByTagName('iframe')[2]);
+ expect(i.previousNode(), document.getElementsByTagName('iframe')[1].firstChild);
+ expect(i.previousNode(), document.getElementsByTagName('iframe')[1]);
+ expect(i.previousNode(), document.getElementsByTagName('iframe')[0].firstChild);
+ expect(i.previousNode(), document.getElementsByTagName('iframe')[0]); // 20
+ expect(i.previousNode(), document.getElementsByTagName('area')[0]);
+ expect(i.previousNode(), document.getElementsByTagName('map')[0]);
+ expect(i.previousNode(), document.getElementById('slash').nextSibling.firstChild);
+ expect(i.previousNode(), document.getElementById('slash').nextSibling);
+ expect(i.previousNode(), document.getElementById('slash').firstChild);
+ expect(i.previousNode(), document.getElementById('slash'));
+ expect(i.previousNode(), document.getElementById('score').firstChild);
+ expect(i.previousNode(), document.getElementById('score'));
+ expect(i.previousNode(), document.getElementById('result'));
+ expect(i.previousNode(), document.getElementById('bucket6'));
+ expect(i.previousNode(), document.getElementById('bucket5'));
+ expect(i.previousNode(), document.getElementById('bucket4'));
+ expect(i.previousNode(), document.getElementById('bucket3'));
+ expect(i.previousNode(), document.getElementById('bucket2'));
+ expect(i.previousNode(), document.getElementById('bucket1'));
+ expect(i.previousNode(), document.getElementsByTagName('div')[0]);
+ expect(i.previousNode(), document.getElementsByTagName('h1')[0].firstChild);
+ expect(i.previousNode(), document.getElementsByTagName('h1')[0]);
+ expect(i.previousNode(), document.body);
+ expect(i.previousNode(), null);
+ return 1;
+ },
+ function () {
+ // test 5: ignoring whitespace text nodes with tree walkers
+ var count = 0;
+ var expect = function(node1, node2) {
+ count += 1;
+ assertEquals(node1, node2, "expectation " + count + " failed");
+ };
+ var allButWS = function (node) {
+ if (node.nodeType == 3 && node.data.match(/^\s*$/))
+ return 3;
+ return 1;
+ };
+ var w = document.createTreeWalker(document.body, 0x01 | 0x04 | 0x08 | 0x10 | 0x20, allButWS, true);
+ expect(w.currentNode, document.body);
+ expect(w.parentNode(), null);
+ expect(w.currentNode, document.body);
+ expect(w.firstChild(), document.getElementsByTagName('h1')[0]);
+ expect(w.firstChild().nodeType, 3);
+ expect(w.parentNode(), document.getElementsByTagName('h1')[0]);
+ expect(w.nextSibling().previousSibling.nodeType, 3);
+ expect(w.nextSibling(), document.getElementsByTagName('p')[6]);
+ expect(w.nextSibling(), document.getElementsByTagName('map')[0]);
+ expect(w.lastChild(), document.getElementsByTagName('table')[0]);
+ expect(w.lastChild(), document.getElementsByTagName('tbody')[0]);
+ expect(w.nextNode(), document.getElementsByTagName('tr')[0]);
+ expect(w.nextNode(), document.getElementsByTagName('td')[0]);
+ expect(w.nextNode(), document.getElementsByTagName('p')[7]);
+ expect(w.nextNode(), document.getElementsByTagName('p')[8]); // instructions.inc paragraph
+ expect(w.previousSibling(), document.getElementsByTagName('map')[0]);
+ expect(w.previousNode().data, "100");
+ expect(w.parentNode().tagName, "SPAN");
+ expect(w.parentNode(), document.getElementById('result'));
+ expect(w.parentNode(), document.body);
+ expect(w.lastChild().id, "instructions");
+ expect(w.lastChild().data.substr(0,1), ".");
+ expect(w.previousNode(), document.links[1].firstChild);
+ return 1;
+ },
+ function () {
+ // test 6: walking outside a tree
+ var doc = getTestDocument();
+ var p = doc.createElement('p');
+ doc.body.appendChild(p);
+ var b = doc.body;
+ var w = document.createTreeWalker(b, 0xFFFFFFFF, null, true);
+ assertEquals(w.currentNode, b, "basic use of TreeWalker failed: currentNode");
+ assertEquals(w.lastChild(), p, "basic use of TreeWalker failed: lastChild()");
+ assertEquals(w.previousNode(), b, "basic use of TreeWalker failed: previousNode()");
+ doc.documentElement.removeChild(b);
+ assertEquals(w.lastChild(), p, "TreeWalker failed after removing the current node from the tree");
+ assertEquals(w.nextNode(), null, "failed to walk into the end of a subtree");
+ doc.documentElement.appendChild(p);
+ assertEquals(w.previousNode(), doc.getElementsByTagName('title')[0], "failed to handle regrafting correctly");
+ p.appendChild(b);
+ assertEquals(w.nextNode(), p, "couldn't retrace steps");
+ assertEquals(w.nextNode(), b, "couldn't step back into root");
+ assertEquals(w.previousNode(), null, "root didn't retake its rootish position");
+ return 1;
+ },
+
+ // DOM Range
+ function () {
+ // test 7: basic ranges tests
+ var r = document.createRange();
+ assert(r, "range not created");
+ assert(r.collapsed, "new range wasn't collapsed");
+ assertEquals(r.commonAncestorContainer, document, "new range's common ancestor wasn't the document");
+ assertEquals(r.startContainer, document, "new range's start container wasn't the document");
+ assertEquals(r.startOffset, 0, "new range's start offset wasn't zero");
+ assertEquals(r.endContainer, document, "new range's end container wasn't the document");
+ assertEquals(r.endOffset, 0, "new range's end offset wasn't zero");
+ assert(r.cloneContents(), "cloneContents() didn't return an object");
+ assertEquals(r.cloneContents().childNodes.length, 0, "nothing cloned was more than nothing");
+ assertEquals(r.cloneRange().toString(), "", "nothing cloned stringifed to more than nothing");
+ r.collapse(true); // no effect
+ assertEquals(r.compareBoundaryPoints(r.START_TO_END, r.cloneRange()), 0, "starting boundary point of range wasn't the same as the end boundary point of the clone range");
+ r.deleteContents(); // no effect
+ assertEquals(r.extractContents().childNodes.length, 0, "nothing removed was more than nothing");
+ var endOffset = r.endOffset;
+ r.insertNode(document.createComment("commented inserted to test ranges"));
+ r.setEnd(r.endContainer, endOffset + 1); // added to work around spec bug that smaug is blocking the errata for
+ try {
+ assert(!r.collapsed, "range with inserted comment is collapsed");
+ assertEquals(r.commonAncestorContainer, document, "range with inserted comment has common ancestor that isn't the document");
+ assertEquals(r.startContainer, document, "range with inserted comment has start container that isn't the document");
+ assertEquals(r.startOffset, 0, "range with inserted comment has start offset that isn't zero");
+ assertEquals(r.endContainer, document, "range with inserted comment has end container that isn't the document");
+ assertEquals(r.endOffset, 1, "range with inserted comment has end offset that isn't after the comment");
+ } finally {
+ document.removeChild(document.firstChild);
+ }
+ return 1;
+ },
+ function () {
+ // test 8: moving boundary points
+ var doc = document.implementation.createDocument(null, null, null);
+ var root = doc.createElement("root");
+ doc.appendChild(root);
+ var e1 = doc.createElement("e");
+ root.appendChild(e1);
+ var e2 = doc.createElement("e");
+ root.appendChild(e2);
+ var e3 = doc.createElement("e");
+ root.appendChild(e3);
+ var r = doc.createRange();
+ r.setStart(e2, 0);
+ r.setEnd(e3, 0);
+ assert(!r.collapsed, "non-empty range claims to be collapsed");
+ r.setEnd(e1, 0);
+ assert(r.collapsed, "setEnd() didn't collapse the range");
+ assertEquals(r.startContainer, e1, "startContainer is wrong after setEnd()");
+ assertEquals(r.startOffset, 0, "startOffset is wrong after setEnd()");
+ assertEquals(r.endContainer, e1, "endContainer is wrong after setEnd()");
+ assertEquals(r.endOffset, 0, "endOffset is wrong after setEnd()");
+ r.setStartBefore(e3);
+ assert(r.collapsed, "setStartBefore() didn't collapse the range");
+ assertEquals(r.startContainer, root, "startContainer is wrong after setStartBefore()");
+ assertEquals(r.startOffset, 2, "startOffset is wrong after setStartBefore()");
+ assertEquals(r.endContainer, root, "endContainer is wrong after setStartBefore()");
+ assertEquals(r.endOffset, 2, "endOffset is wrong after setStartBefore()");
+ r.setEndAfter(root);
+ assert(!r.collapsed, "setEndAfter() didn't uncollapse the range");
+ assertEquals(r.startContainer, root, "startContainer is wrong after setEndAfter()");
+ assertEquals(r.startOffset, 2, "startOffset is wrong after setEndAfter()");
+ assertEquals(r.endContainer, doc, "endContainer is wrong after setEndAfter()");
+ assertEquals(r.endOffset, 1, "endOffset is wrong after setEndAfter()");
+ r.setStartAfter(e2);
+ assert(!r.collapsed, "setStartAfter() collapsed the range");
+ assertEquals(r.startContainer, root, "startContainer is wrong after setStartAfter()");
+ assertEquals(r.startOffset, 2, "startOffset is wrong after setStartAfter()");
+ assertEquals(r.endContainer, doc, "endContainer is wrong after setStartAfter()");
+ assertEquals(r.endOffset, 1, "endOffset is wrong after setStartAfter()");
+ var msg = '';
+ try {
+ r.setEndBefore(doc);
+ msg = "no exception thrown for setEndBefore() the document itself";
+ } catch (e) {
+ if (e.BAD_BOUNDARYPOINTS_ERR != 1)
+ msg = 'not a RangeException';
+ else if (e.INVALID_NODE_TYPE_ERR != 2)
+ msg = 'RangeException has no INVALID_NODE_TYPE_ERR';
+ else if ("INVALID_ACCESS_ERR" in e)
+ msg = 'RangeException has DOMException constants';
+ else if (e.code != e.INVALID_NODE_TYPE_ERR)
+ msg = 'wrong exception raised from setEndBefore()';
+ }
+ assert(msg == "", msg);
+ assert(!r.collapsed, "setEndBefore() collapsed the range");
+ assertEquals(r.startContainer, root, "startContainer is wrong after setEndBefore()");
+ assertEquals(r.startOffset, 2, "startOffset is wrong after setEndBefore()");
+ assertEquals(r.endContainer, doc, "endContainer is wrong after setEndBefore()");
+ assertEquals(r.endOffset, 1, "endOffset is wrong after setEndBefore()");
+ r.collapse(false);
+ assert(r.collapsed, "collapse() collapsed the range");
+ assertEquals(r.startContainer, doc, "startContainer is wrong after collapse()");
+ assertEquals(r.startOffset, 1, "startOffset is wrong after collapse()");
+ assertEquals(r.endContainer, doc, "endContainer is wrong after collapse()");
+ assertEquals(r.endOffset, 1, "endOffset is wrong after collapse()");
+ r.selectNodeContents(root);
+ assert(!r.collapsed, "collapsed is wrong after selectNodeContents()");
+ assertEquals(r.startContainer, root, "startContainer is wrong after selectNodeContents()");
+ assertEquals(r.startOffset, 0, "startOffset is wrong after selectNodeContents()");
+ assertEquals(r.endContainer, root, "endContainer is wrong after selectNodeContents()");
+ assertEquals(r.endOffset, 3, "endOffset is wrong after selectNodeContents()");
+ r.selectNode(e2);
+ assert(!r.collapsed, "collapsed is wrong after selectNode()");
+ assertEquals(r.startContainer, root, "startContainer is wrong after selectNode()");
+ assertEquals(r.startOffset, 1, "startOffset is wrong after selectNode()");
+ assertEquals(r.endContainer, root, "endContainer is wrong after selectNode()");
+ assertEquals(r.endOffset, 2, "endOffset is wrong after selectNode()");
+ return 1;
+ },
+ function () {
+ // test 9: extractContents() in a Document
+ var doc = getTestDocument();
+ var h1 = doc.createElement('h1');
+ var t1 = doc.createTextNode('Hello ');
+ h1.appendChild(t1);
+ var em = doc.createElement('em');
+ var t2 = doc.createTextNode('Wonderful');
+ em.appendChild(t2);
+ h1.appendChild(em);
+ var t3 = doc.createTextNode(' Kitty');
+ h1.appendChild(t3);
+ doc.body.appendChild(h1);
+ var p = doc.createElement('p');
+ var t4 = doc.createTextNode('How are you?');
+ p.appendChild(t4);
+ doc.body.appendChild(p);
+ var r = doc.createRange();
+ r.selectNodeContents(doc);
+ assertEquals(r.toString(), "Hello Wonderful KittyHow are you?", "toString() on range selecting Document gave wrong output");
+ r.setStart(t2, 6);
+ r.setEnd(p, 0);
+ // <body><h1>Hello <em>Wonder ful<\em> Kitty<\h1><p> How are you?<\p><\body> (the '\'s are to avoid validation errors)
+ // ^----------------------^
+ assertEquals(r.toString(), "ful Kitty", "toString() on range crossing text nodes gave wrong output");
+ var f = r.extractContents();
+ // <h1><em>ful<\em> Kitty<\h1><p><\p>
+ // ccccccccccccccccMMMMMMcccccccccccc
+ assertEquals(f.nodeType, 11, "failure 1");
+ assert(f.childNodes.length == 2, "expected two children in the result, got " + f.childNodes.length);
+ assertEquals(f.childNodes[0].tagName, "H1", "failure 3");
+ assert(f.childNodes[0] != h1, "failure 4");
+ assertEquals(f.childNodes[0].childNodes.length, 2, "failure 5");
+ assertEquals(f.childNodes[0].childNodes[0].tagName, "EM", "failure 6");
+ assert(f.childNodes[0].childNodes[0] != em, "failure 7");
+ assertEquals(f.childNodes[0].childNodes[0].childNodes.length, 1, "failure 8");
+ assertEquals(f.childNodes[0].childNodes[0].childNodes[0].data, "ful", "failure 9");
+ assert(f.childNodes[0].childNodes[0].childNodes[0] != t2, "failure 10");
+ assertEquals(f.childNodes[0].childNodes[1], t3, "failure 11");
+ assert(f.childNodes[0].childNodes[1] != em, "failure 12");
+ assertEquals(f.childNodes[1].tagName, "P", "failure 13");
+ assertEquals(f.childNodes[1].childNodes.length, 0, "failure 14");
+ assert(f.childNodes[1] != p, "failure 15");
+ return 1;
+ },
+ function () {
+ // test 10: Ranges and Attribute Nodes
+ var e = document.getElementById('result');
+ if (!e.getAttributeNode)
+ return 1; // support for attribute nodes is optional in Acid3, because attribute nodes might be removed from DOM Core in the future.
+ // however, if they're supported, they'd better work:
+ var a = e.getAttributeNode('id');
+ var r = document.createRange();
+ r.selectNodeContents(a);
+ assertEquals(r.toString(), "result", "toString() didn't work for attribute node");
+ var t = a.firstChild;
+ var f = r.extractContents();
+ assertEquals(f.childNodes.length, 1, "extracted contents were the wrong length");
+ assertEquals(f.childNodes[0], t, "extracted contents were the wrong node");
+ assertEquals(t.textContent, 'result', "extracted contents didn't match old attribute value");
+ assertEquals(r.toString(), '', "extracting contents didn't empty attribute value; instead equals '" + r.toString() + "'");
+ assertEquals(e.getAttribute('id'), '', "extracting contents didn't change 'id' attribute to empty string");
+ e.id = 'result';
+ return 1;
+ },
+ function () {
+ // test 11: Ranges and Comments
+ var msg;
+ var doc = getTestDocument();
+ var c1 = doc.createComment("11111");
+ doc.appendChild(c1);
+ var r = doc.createRange();
+ r.selectNode(c1);
+ msg = 'wrong exception raised';
+ try {
+ r.surroundContents(doc.createElement('a'));
+ msg = 'no exception raised';
+ } catch (e) {
+ if ('code' in e)
+ msg += '; code = ' + e.code;
+ if (e.code == 3)
+ msg = '';
+ }
+ assert(msg == '', "when inserting <a> into Document with another child: " + msg);
+ var c2 = doc.createComment("22222");
+ doc.body.appendChild(c2);
+ var c3 = doc.createComment("33333");
+ doc.body.appendChild(c3);
+ r.setStart(c2, 2);
+ r.setEnd(c3, 3);
+ var msg = 'wrong exception raised';
+ try {
+ r.surroundContents(doc.createElement('a'));
+ msg = 'no exception raised';
+ } catch (e) {
+ if ('code' in e)
+ msg += '; code = ' + e.code;
+ if (e.code == 1)
+ msg = '';
+ }
+ assert(msg == '', "when trying to surround two halves of comment: " + msg);
+ assertEquals(r.toString(), "", "comments returned text");
+ return 1;
+ },
+ function () {
+ // test 12: Ranges under mutations: insertion into text nodes
+ var doc = getTestDocument();
+ var p = doc.createElement('p');
+ var t1 = doc.createTextNode('12345');
+ p.appendChild(t1);
+ var t2 = doc.createTextNode('ABCDE');
+ p.appendChild(t2);
+ doc.body.appendChild(p);
+ var r = doc.createRange();
+ r.setStart(p.firstChild, 2);
+ r.setEnd(p.firstChild, 3);
+ assert(!r.collapsed, "collapsed is wrong at start");
+ assertEquals(r.commonAncestorContainer, p.firstChild, "commonAncestorContainer is wrong at start");
+ assertEquals(r.startContainer, p.firstChild, "startContainer is wrong at start");
+ assertEquals(r.startOffset, 2, "startOffset is wrong at start");
+ assertEquals(r.endContainer, p.firstChild, "endContainer is wrong at start");
+ assertEquals(r.endOffset, 3, "endOffset is wrong at start");
+ assertEquals(r.toString(), "3", "range in text node stringification failed");
+ r.insertNode(p.lastChild);
+ assertEquals(p.childNodes.length, 3, "insertion of node made wrong number of child nodes");
+ assertEquals(p.childNodes[0], t1, "unexpected first text node");
+ assertEquals(p.childNodes[0].data, "12", "unexpected first text node contents");
+ assertEquals(p.childNodes[1], t2, "unexpected second text node");
+ assertEquals(p.childNodes[1].data, "ABCDE", "unexpected second text node");
+ assertEquals(p.childNodes[2].data, "345", "unexpected third text node contents");
+ // The spec is very vague about what exactly should be in the range afterwards:
+ // the insertion results in a splitText(), which it says is equivalent to a truncation
+ // followed by an insertion, but it doesn't say what to do when you have a truncation,
+ // so we don't know where either the start or the end boundary points end up.
+ // The spec really should be clarified for how to handle splitText() and
+ // text node truncation in general
+ // The only thing that seems very clear is that the inserted text node should
+ // be in the range, and it has to be at the start, since insertion always puts it at
+ // the start.
+ assert(!r.collapsed, "collapsed is wrong after insertion");
+ assert(r.toString().match(/^ABCDE/), "range didn't start with the expected text; range stringified to '" + r.toString() + "'");
+ return 1;
+ },
+ function () {
+ // test 13: Ranges under mutations: deletion
+ var doc = getTestDocument();
+ var p = doc.createElement('p');
+ p.appendChild(doc.createTextNode("12345"));
+ doc.body.appendChild(p);
+ var r = doc.createRange();
+ r.setEnd(doc.body, 1);
+ r.setStart(p.firstChild, 2);
+ assert(!r.collapsed, "collapsed is wrong at start");
+ assertEquals(r.commonAncestorContainer, doc.body, "commonAncestorContainer is wrong at start");
+ assertEquals(r.startContainer, p.firstChild, "startContainer is wrong at start");
+ assertEquals(r.startOffset, 2, "startOffset is wrong at start");
+ assertEquals(r.endContainer, doc.body, "endContainer is wrong at start");
+ assertEquals(r.endOffset, 1, "endOffset is wrong at start");
+ doc.body.removeChild(p);
+ assert(r.collapsed, "collapsed is wrong after deletion");
+ assertEquals(r.commonAncestorContainer, doc.body, "commonAncestorContainer is wrong after deletion");
+ assertEquals(r.startContainer, doc.body, "startContainer is wrong after deletion");
+ assertEquals(r.startOffset, 0, "startOffset is wrong after deletion");
+ assertEquals(r.endContainer, doc.body, "endContainer is wrong after deletion");
+ assertEquals(r.endOffset, 0, "endOffset is wrong after deletion");
+ return 1;
+ },
+
+ // HTTP
+ function () {
+ // test 14: HTTP - Content-Type: image/png
+ assert(!notifications['empty.png'], "privilege escalation security bug: PNG ran script");
+ var iframe = document.getElementsByTagName('iframe')[0];
+ assert(iframe, "no <iframe> support");
+ if (iframe && iframe.contentDocument) {
+ var ps = iframe.contentDocument.getElementsByTagName('p');
+ if (ps.length > 0) {
+ if (ps[0].firstChild && ps[0].firstChild.data && ps[0].firstChild.data == 'FAIL')
+ fail("PNG was parsed as HTML.");
+ }
+ }
+ return 1;
+ },
+ function () {
+ // test 15: HTTP - Content-Type: text/plain
+ assert(!notifications['empty.txt'], "privilege escalation security bug: text file ran script");
+ var iframe = document.getElementsByTagName('iframe')[1];
+ assert(iframe, "no <iframe> support");
+ if (iframe && iframe.contentDocument) {
+ var ps = iframe.contentDocument.getElementsByTagName('p');
+ if (ps.length > 0) {
+ if (ps[0].firstChild && ps[0].firstChild.data && ps[0].firstChild.data == 'FAIL')
+ fail("text/plain file was parsed as HTML");
+ }
+ }
+ return 1;
+ },
+ function () {
+ // test 16: <object> handling and HTTP status codes
+ var oC = document.createElement('object');
+ //oC.appendChild(document.createTextNode("FAIL"));
+ var oB = document.createElement('object');
+ var oA = document.createElement('object');
+ oA.data = "support-a.png";
+ oB.data = "support-b.png";
+ oB.appendChild(oC);
+ oC.data = "support-c.png";
+ oA.appendChild(oB);
+ document.getElementsByTagName("map")[0].appendChild(oA);
+ // assuming the above didn't raise any exceptions, this test has passed
+ // (the real test is whether the rendering is correct)
+ return 1;
+ },
+
+ // bucket 2: DOM2 Core and DOM2 Events
+ // Core
+ function () {
+ // test 17: hasAttribute
+ // missing attribute
+ assert(!document.getElementsByTagName('map')[0].hasAttribute('id'), "hasAttribute failure for 'id' on map");
+ // implied attribute
+ assert(!document.getElementsByTagName('form')[0].hasAttribute('method'), "hasAttribute failure for 'method' on form");
+ // actually present attribute
+ assert(document.getElementsByTagName('form')[0].hasAttribute('action'), "hasAttribute failure for 'action' on form");
+ assertEquals(document.getElementsByTagName('form')[0].getAttribute('action'), '', "attribute 'action' on form has wrong value");
+ return 2;
+ },
+ function () {
+ // test 18: nodeType (this test also relies on accurate parsing of the document)
+ assertEquals(document.nodeType, 9, "document nodeType wrong");
+ assertEquals(document.documentElement.nodeType, 1, "element nodeType wrong");
+ if (document.createAttribute) // support for attribute nodes is optional in Acid3, because attribute nodes might be removed from DOM Core in the future.
+ assertEquals(document.createAttribute('test').nodeType, 2, "attribute nodeType wrong"); // however, if they're supported, they'd better work
+ assertEquals(document.getElementById('score').firstChild.nodeType, 3, "text node nodeType wrong");
+ assertEquals(document.firstChild.nodeType, 10, "DOCTYPE nodeType wrong");
+ return 2;
+ },
+ function () {
+ // test 19: value of constants
+ var e = null;
+ try {
+ document.body.appendChild(document.documentElement);
+ // raises a HIERARCHY_REQUEST_ERR
+ } catch (err) {
+ e = err;
+ }
+ assertEquals(document.DOCUMENT_FRAGMENT_NODE, 11, "document DOCUMENT_FRAGMENT_NODE constant missing or wrong");
+ assertEquals(document.body.COMMENT_NODE, 8, "element COMMENT_NODE constant missing or wrong");
+ assertEquals(document.createTextNode('').ELEMENT_NODE, 1, "text node ELEMENT_NODE constant missing or wrong");
+ assert(e.HIERARCHY_REQUEST_ERR == 3, "exception HIERARCHY_REQUEST_ERR constant missing or wrong")
+ assertEquals(e.code, 3, "incorrect exception raised from appendChild()");
+ return 2;
+ },
+ function () {
+ // test 20: nulls bytes in various places
+ assert(!document.getElementById('bucket1\0error'), "null in getElementById() probably terminated string");
+ var ok = true;
+ try {
+ document.createElement('form\0div');
+ ok = false;
+ } catch (e) {
+ if (e.code != 5)
+ ok = false;
+ }
+ assert(ok, "didn't raise the right exception for null byte in createElement()");
+ return 2;
+ },
+ function () {
+ // test 21: basic namespace stuff
+ var element = document.createElementNS('http://ns.example.com/', 'prefix:localname');
+ assertEquals(element.tagName, 'prefix:localname', "wrong tagName");
+ assertEquals(element.nodeName, 'prefix:localname', "wrong nodeName");
+ assertEquals(element.prefix, 'prefix', "wrong prefix");
+ assertEquals(element.localName, 'localname', "wrong localName");
+ assertEquals(element.namespaceURI, 'http://ns.example.com/', "wrong namespaceURI");
+ return 2;
+ },
+ function () {
+ // test 22: createElement() with invalid tag names
+ var test = function (name) {
+ var result;
+ try {
+ var div = document.createElement(name);
+ } catch (e) {
+ result = e;
+ }
+ assert(result, "no exception for createElement('" + name + "')");
+ assertEquals(result.code, 5, "wrong exception for createElement('" + name + "')"); // INVALID_CHARACTER_ERR
+ }
+ test('<div>');
+ test('0div');
+ test('di v');
+ test('di<v');
+ test('-div');
+ test('.div');
+ return 2;
+ },
+ function () {
+ // test 23: createElementNS() with invalid tag names
+ var test = function (name, ns, code) {
+ var result;
+ try {
+ var div = document.createElementNS(ns, name);
+ } catch (e) {
+ result = e;
+ }
+ assert(result, "no exception for createElementNS('" + ns + "', '" + name + "')");
+ assertEquals(result.code, code, "wrong exception for createElementNS('" + ns + "', '" + name + "')");
+ }
+ test('<div>', null, 5);
+ test('0div', null, 5);
+ test('di v', null, 5);
+ test('di<v', null, 5);
+ test('-div', null, 5);
+ test('.div', null, 5);
+ test('<div>', "http://example.com/", 5);
+ test('0div', "http://example.com/", 5);
+ test('di<v', "http://example.com/", 5);
+ test('-div', "http://example.com/", 5);
+ test('.div', "http://example.com/", 5);
+ test(':div', null, 14);
+ test(':div', "http://example.com/", 14);
+ test('d:iv', null, 14);
+ test('xml:test', "http://example.com/", 14);
+ test('xmlns:test', "http://example.com/", 14); // (technically a DOM3 Core test)
+ test('x:test', "http://www.w3.org/2000/xmlns/", 14); // (technically a DOM3 Core test)
+ document.createElementNS("http://www.w3.org/2000/xmlns/", 'xmlns:test'); // (technically a DOM3 Core test)
+ return 2;
+ },
+ function () {
+ // test 24: event handler attributes
+ assertEquals(document.body.getAttribute('onload'), "update() /* this attribute's value is tested in one of the tests */ ", "onload value wrong");
+ return 2;
+ },
+ function () {
+ // test 25: test namespace checking in createDocumentType, and
+ // check that exceptions that are thrown are DOMException objects
+ var message = "";
+ try {
+ document.implementation.createDocumentType('a:', '', ''); /* doesn't contain an illegal character; is malformed */
+ message = "failed to raise exception";
+ } catch (e) {
+ if (e.code != e.NAMESPACE_ERR)
+ message = "wrong exception";
+ else if (e.INVALID_ACCESS_ERR != 15)
+ message = "exceptions don't have all the constants";
+ }
+ if (message)
+ fail(message);
+ return 2;
+ },
+ function () {
+ // test 26: check that document tree survives while still accessible
+ var d;
+ // e1 - an element that's in a document
+ d = document.implementation.createDocument(null, null, null);
+ var e1 = d.createElement('test');
+ d.appendChild(d.createElement('root'));
+ d.documentElement.appendChild(e1);
+ assert(e1.parentNode, "e1 - parent element doesn't exist");
+ assert(e1.parentNode.ownerDocument, "e1 - document doesn't exist");
+ // e2 - an element that's not in a document
+ d = document.implementation.createDocument(null, null, null);
+ var e2 = d.createElement('test');
+ d.createElement('root').appendChild(e2);
+ assert(e2.parentNode, "e2 - parent element doesn't exist");
+ assert(e2.parentNode.ownerDocument, "e2 - document doesn't exist");
+ // now try to decouple them
+ d = null;
+ kungFuDeathGrip = [e1, e2];
+ assert(e1.parentNode, "e1 - parent element doesn't exist after dropping reference to document");
+ assert(e1.parentNode.ownerDocument, "e1 - document doesn't exist after dropping reference to document");
+ assert(e2.parentNode, "e2 - parent element doesn't exist after dropping reference to document");
+ assert(e2.parentNode.ownerDocument, "e2 - document doesn't exist after dropping reference to document");
+ var loops = new Date().valueOf() * 2.813435e-9 - 2412; // increases linearly over time
+ for (var i = 0; i < loops; i += 1) {
+ // we want to force a GC here, so we use up lots of memory
+ // we take the opportunity to sneak in a perf test to make DOM and JS stuff faster...
+ d = new Date();
+ d = new (function (x) { return { toString: function () { return x.toString() } } })(d.valueOf());
+ d = document.createTextNode("iteration " + i + " at " + d);
+ document.createElement('a').appendChild(d);
+ d = d.parentNode;
+ document.body.insertBefore(d, document.getElementById('bucket1').parentNode);
+ assert(document.getElementById('bucket2').nextSibling.parentNode.previousSibling.firstChild.data.match(/AT\W/i), "iteration " + i + " failed");
+ d.setAttribute('class', d.textContent);
+ document.body.removeChild(d);
+ }
+ assert(e1.parentNode, "e1 - parent element doesn't exist after looping");
+ assert(e1.parentNode.ownerDocument, "e1 - document doesn't exist after looping");
+ assertEquals(e1.parentNode.ownerDocument.nodeType, 9, "e1 - document node type has wrong node type");
+ assert(e2.parentNode, "e2 - parent element doesn't exist after looping");
+ assert(e2.parentNode.ownerDocument, "e2 - document doesn't exist after looping");
+ assertEquals(e2.parentNode.ownerDocument.nodeType, 9, "e2 - document node type has wrong node type");
+ return 2;
+ },
+ function () {
+ // test 27: a continuation of the previous test
+ var e1 = kungFuDeathGrip[0];
+ var e2 = kungFuDeathGrip[1];
+ kungFuDeathGrip = null;
+ assert(e1, "e1 - element itself didn't survive across tests");
+ assert(e1.parentNode, "e1 - parent element doesn't exist after waiting");
+ assert(e1.parentNode.ownerDocument, "e1 - document doesn't exist after waiting");
+ assertEquals(e1.parentNode.ownerDocument.nodeType, 9, "e1 - document node type has wrong node type after waiting");
+ assert(e2, "e2 - element itself didn't survive across tests");
+ assert(e2.parentNode, "e2 - parent element doesn't exist after waiting");
+ assert(e2.parentNode.ownerDocument, "e2 - document doesn't exist after waiting");
+ assertEquals(e2.parentNode.ownerDocument.nodeType, 9, "e2 - document node type has wrong node type after waiting");
+ return 2;
+ },
+ function () {
+ // test 28: getElementById()
+ // ...and name=""
+ assert(document.getElementById('form') !== document.getElementsByTagName('form')[0], "getElementById() searched on 'name'");
+ // ...and a space character as the ID
+ var div = document.createElement('div');
+ div.appendChild(document.createTextNode('FAIL'));
+ div.id = " ";
+ document.body.appendChild(div); // it's hidden by CSS
+ assert(div === document.getElementById(" "), "getElementById() didn't return the right element");
+ return 2;
+ },
+ function () {
+ // test 29: check that whitespace survives cloning
+ var t1 = document.getElementsByTagName('table')[0];
+ var t2 = t1.cloneNode(true);
+ assertEquals(t2.tBodies[0].rows[0].cells[0].firstChild.tagName, 'P', "<p> didn't clone right");
+ assertEquals(t2.tBodies[0].rows[0].cells[0].firstChild.childNodes.length, 0, "<p> got child nodes after cloning");
+ assertEquals(t2.childNodes.length, 2, "cloned table had wrong number of children");
+ assertEquals(t2.lastChild.data, " ", "cloned table lost whitespace text node");
+ return 2;
+ },
+
+ // Events
+ function () {
+ // test 30: dispatchEvent()
+ var count = 0;
+ var ok = true;
+ var test = function (event) {
+ if (event.detail != 6)
+ ok = false;
+ count++;
+ };
+ // test event listener addition
+ document.getElementById('result').addEventListener('test', test, false);
+ // test event creation
+ var event = document.createEvent('UIEvents');
+ event.initUIEvent('test', true, false, null, 6);
+ // test event dispatch on elements and text nodes
+ assert(document.getElementById('score').dispatchEvent(event), "dispatchEvent #1 failed");
+ assert(document.getElementById('score').nextSibling.dispatchEvent(event), "dispatchEvent #2 failed");
+ // test event listener removal
+ document.getElementById('result').removeEventListener('test', test, false);
+ assert(document.getElementById('score').dispatchEvent(event), "dispatchEvent #3 failed");
+ assertEquals(count, 2, "unexpected number of events handled");
+ assert(ok, "unexpected events handled");
+ return 2;
+ },
+ function () {
+ // test 31: event.stopPropagation() and capture
+ // we're going to use an input element because we can cause events to bubble from it
+ var input = document.createElement('input');
+ var div = document.createElement('div');
+ div.appendChild(input);
+ document.body.appendChild(div);
+ // the test will consist of two event handlers:
+ var ok = true;
+ var captureCount = 0;
+ var testCapture = function (event) {
+ ok = ok &&
+ (event.type == 'click') &&
+ (event.target == input) &&
+ (event.currentTarget == div) &&
+ (event.eventPhase == 1) &&
+ (event.bubbles) &&
+ (event.cancelable);
+ captureCount++;
+ event.stopPropagation(); // this shouldn't stop it from firing both times on the div element
+ };
+ var testBubble = function (event) {
+ ok = false;
+ };
+ // one of which is added twice:
+ div.addEventListener('click', function (event) { testCapture(event) }, true);
+ div.addEventListener('click', function (event) { testCapture(event) }, true);
+ div.addEventListener('click', testBubble, false);
+ // we cause an event to bubble like this:
+ input.type = 'reset';
+ input.click();
+ // cleanup afterwards
+ document.body.removeChild(div);
+ // capture handler should have been called twice
+ assertEquals(captureCount, 2, "capture handler called the wrong number of times");
+ assert(ok, "capture handler called incorrectly");
+ return 2;
+ },
+ function () {
+ // test 32: events bubbling through Document node
+ // event handler:
+ var ok = true;
+ var count = 0;
+ var test = function (event) {
+ count += 1;
+ if (event.eventPhase != 3)
+ ok = false;
+ }
+ // register event handler
+ document.body.addEventListener('click', test, false);
+ // create an element that bubbles an event, and bubble it
+ var input = document.createElement('input');
+ var div = document.createElement('div');
+ div.appendChild(input);
+ document.body.appendChild(div);
+ input.type = 'reset';
+ input.click();
+ // unregister event handler
+ document.body.removeEventListener('click', test, false);
+ // check that it's removed for good
+ input.click();
+ // remove the newly added elements
+ document.body.removeChild(div);
+ assertEquals(count, 1, "capture handler called the wrong number of times");
+ assert(ok, "capture handler called incorrectly");
+ return 2;
+ },
+
+ // bucket 3: DOM2 Views, DOM2 Style, and Selectors
+ function () {
+ // test 33: basic tests for selectors - classes, attributes
+ var p;
+ var builder = function(doc) {
+ p = doc.createElement("p");
+ doc.body.appendChild(p);
+ };
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ p.className = "selectorPingTest";
+ var good = add(".selectorPingTest");
+ add(".SelectorPingTest");
+ add(".selectorpingtest");
+ expect(doc.body, 0, "failure 1");
+ expect(p, good, "failure 2");
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ p.className = 'a\u0020b\u0009c\u000Ad\u000De\u000Cf\u2003g\u3000h';
+ var good = add(".a.b.c.d.e.f\\2003g\\3000h");
+ expect(p, good, "whitespace error in class processing");
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ p.className = "selectorPingTest";
+ var good = add("[class=selectorPingTest]");
+ add("[class=SelectorPingTest]");
+ add("[class=selectorpingtest]");
+ expect(doc.body, 0, "failure 3");
+ expect(p, good, "class attribute matching failed");
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ p.className = "selectorPingTest";
+ var good = add("[title=selectorPingTest]");
+ add("[title=SelectorPingTest]");
+ add("[title=selectorpingtest]");
+ expect(doc.body, 0, "failure 4");
+ expect(p, 0, "failure 5");
+ p.title = "selectorPingTest";
+ expect(doc.body, 0, "failure 6");
+ expect(p, good, "failure 7");
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ p.setAttribute('align', 'right and left');
+ var good = add("[align=\"right and left\"]");
+ add("[align=left]");
+ add("[align=right]");
+ expect(p, good, "align attribute mismatch");
+ });
+ return 3;
+ },
+ function () {
+ // test 34: :lang() and [|=]
+ var div1;
+ var div2;
+ var p;
+ var builder = function(doc) {
+ div1 = doc.createElement('div');
+ div1.setAttribute("lang", "english");
+ div1.setAttribute("class", "widget-tree");
+ doc.body.appendChild(div1);
+ div2 = doc.createElement('div');
+ div2.setAttribute("lang", "en-GB");
+ div2.setAttribute("class", "WIDGET");
+ doc.body.appendChild(div2);
+ p = doc.createElement('p');
+ div2.appendChild(p);
+ };
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var lang_en = add(":lang(en)");
+ expect(div1, 0, "lang=english should not be matched by :lang(en)");
+ expect(div2, lang_en, "lang=en-GB should be matched by :lang(en)");
+ expect(p, lang_en, "descendants inheriting lang=en-GB should be matched by :lang(en)");
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var class_widget = add("[class|=widget]");
+ expect(div1, class_widget, "class attribute should be supported by |= attribute selectors");
+ expect(div2, 0, "class attribute is case-sensitive");
+ });
+ return 3;
+ },
+ function () {
+ // test 35: :first-child
+ selectorTest(function (doc, add, expect) {
+ var notFirst = 0;
+ var first = add(":first-child");
+ var p1 = doc.createElement("p");
+ doc.body.appendChild(doc.createTextNode(" TEST "));
+ doc.body.appendChild(p1);
+ expect(doc.documentElement, notFirst, "root element, with no parent node, claims to be a :first-child");
+ expect(doc.documentElement.firstChild, first, "first child of root node didn't match :first-child");
+ expect(doc.documentElement.firstChild.firstChild, first, "failure 3");
+ expect(doc.body, notFirst, "failure 4");
+ expect(p1, first, "failure 5");
+ var p2 = doc.createElement("p");
+ doc.body.appendChild(p2);
+ expect(doc.body, notFirst, "failure 6");
+ expect(p1, first, "failure 7");
+ expect(p2, notFirst, "failure 8");
+ var p0 = doc.createElement("p");
+ doc.body.insertBefore(p0, p1);
+ expect(doc.body, notFirst, "failure 9");
+ expect(p0, first, "failure 10");
+ expect(p1, notFirst, ":first-child still applies to element that was previously a first child");
+ expect(p2, notFirst, "failure 12");
+ doc.body.insertBefore(p0, p2);
+ expect(doc.body, notFirst, "failure 13");
+ expect(p1, first, "failure 14");
+ expect(p0, notFirst, "failure 15");
+ expect(p2, notFirst, "failure 16");
+ });
+ return 3;
+ },
+ function () {
+ // test 36: :last-child
+ var p1;
+ var p2;
+ var builder = function(doc) {
+ p1 = doc.createElement('p');
+ p2 = doc.createElement('p');
+ doc.body.appendChild(p1);
+ doc.body.appendChild(p2);
+ };
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var last = add(":last-child");
+ expect(p1, 0, "control test for :last-child failed");
+ expect(p2, last, "last child did not match :last-child");
+ doc.body.appendChild(p1);
+ expect(p2, 0, ":last-child matched element with a following sibling");
+ expect(p1, last, "failure 4");
+ p1.appendChild(p2);
+ expect(p2, last, "failure 5");
+ expect(p1, last, "failure 6");
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var last = add(":last-child");
+ expect(p1, 0, "failure 7");
+ expect(p2, last, "failure 8");
+ doc.body.insertBefore(p2, p1);
+ expect(p2, 0, "failure 9");
+ expect(p1, last, "failure 10");
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var last = add(":last-child");
+ expect(p1, 0, "failure 11");
+ expect(p2, last, "failure 12");
+ doc.body.removeChild(p2);
+ expect(p1, last, "failure 13");
+ assertEquals(p1.nextSibling, null, "failure 14");
+ assertEquals(p2.parentNode, null, "failure 15");
+ });
+ return 3;
+ },
+ function () {
+ // test 37: :only-child
+ var p1;
+ var p2;
+ var builder = function(doc) {
+ p1 = doc.createElement('p');
+ p2 = doc.createElement('p');
+ doc.body.appendChild(p1);
+ doc.body.appendChild(p2);
+ };
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var only = add(":only-child");
+ expect(p1, 0, "control test for :only-child failed");
+ expect(p2, 0, "failure 2");
+ doc.body.removeChild(p2);
+ expect(p1, only, ":only-child did not match only child");
+ p1.appendChild(p2);
+ expect(p2, only, "failure 4");
+ expect(p1, only, "failure 5");
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var only = add(":only-child");
+ expect(p1, 0, "failure 6");
+ expect(p2, 0, "failure 7");
+ doc.body.removeChild(p1);
+ expect(p2, only, "failure 8");
+ p2.appendChild(p1);
+ expect(p2, only, "failure 9");
+ expect(p1, only, "failure 10");
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var only = add(":only-child");
+ expect(p1, 0, "failure 11");
+ expect(p2, 0, "failure 12");
+ var span1 = doc.createElement('span');
+ p1.appendChild(span1);
+ expect(p1, 0, "failure 13");
+ expect(p2, 0, "failure 14");
+ expect(span1, only, "failure 15");
+ var span2 = doc.createElement('span');
+ p1.appendChild(span2);
+ expect(p1, 0, "failure 16");
+ expect(p2, 0, "failure 17");
+ expect(span1, 0, "failure 18");
+ expect(span2, 0, "failure 19");
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var only = add(":only-child");
+ expect(p1, 0, "failure 20");
+ expect(p2, 0, "failure 21");
+ var span1 = doc.createElement('span');
+ p2.appendChild(span1);
+ expect(p1, 0, "failure 22");
+ expect(p2, 0, "failure 23");
+ expect(span1, only, "failure 24");
+ var span2 = doc.createElement('span');
+ p2.insertBefore(span2, span1);
+ expect(p1, 0, "failure 25");
+ expect(p2, 0, "failure 26");
+ expect(span1, 0, "failure 27");
+ expect(span2, 0, "failure 28");
+ });
+ return 3;
+ },
+ function () {
+ // test 38: :empty
+ selectorTest(function (doc, add, expect) {
+ var empty = add(":empty");
+ var p = doc.createElement('p');
+ doc.body.appendChild(p);
+ expect(p, empty, "empty p element didn't match :empty");
+ var span = doc.createElement('span');
+ p.appendChild(span);
+ expect(p, 0, "adding children didn't stop the element matching :empty");
+ expect(span, empty, "empty span element didn't match :empty");
+ p.removeChild(span);
+ expect(p, empty, "removing all children didn't make the element match :empty");
+ p.appendChild(doc.createComment("c"));
+ p.appendChild(doc.createTextNode(""));
+ expect(p, empty, "element with a comment node and an empty text node didn't match :empty");
+ p.appendChild(doc.createTextNode(""));
+ expect(p, empty, "element with a comment node and two empty text nodes didn't match :empty");
+ p.lastChild.data = " ";
+ expect(p, 0, "adding text to a text node didn't make the element non-:empty");
+ assertEquals(p.childNodes.length, 3, "text nodes may have merged");
+ p.childNodes[1].replaceWholeText("");
+ assertEquals(p.childNodes.length, 1, "replaceWholeText('') didn't remove text nodes");
+ expect(p, empty, "element with a comment node only didn't match :empty");
+ p.appendChild(doc.createElementNS("http://example.com/", "test"));
+ expect(p, 0, "adding an element in a namespace didn't make the element non-:empty");
+ });
+ return 3;
+ },
+ function () {
+ // test 39: :nth-child, :nth-last-child
+ var ps;
+ var builder = function(doc) {
+ ps = [
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p')
+ ];
+ for (var i = 0; i < ps.length; i += 1)
+ doc.body.appendChild(ps[i]);
+ };
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":nth-child(odd)");
+ for (var i = 0; i < ps.length; i += 1)
+ expect(ps[i], i % 2 ? 0 : match, ":nth-child(odd) failed with child " + i);
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":nth-child(even)");
+ for (var i = 0; i < ps.length; i += 1)
+ expect(ps[i], i % 2 ? match : 0 , ":nth-child(even) failed with child " + i);
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":nth-child(odd)");
+ doc.body.removeChild(ps[5]);
+ for (var i = 0; i < 5; i += 1)
+ expect(ps[i], i % 2 ? 0 : match, ":nth-child(odd) failed after removal with child " + i);
+ for (var i = 6; i < ps.length; i += 1)
+ expect(ps[i], i % 2 ? match : 0, ":nth-child(odd) failed after removal with child " + i);
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":nth-child(even)");
+ doc.body.removeChild(ps[5]);
+ for (var i = 0; i < 5; i += 1)
+ expect(ps[i], i % 2 ? match : 0, ":nth-child(even) failed after removal with child " + i);
+ for (var i = 6; i < ps.length; i += 1)
+ expect(ps[i], i % 2 ? 0 : match, ":nth-child(even) failed after removal with child " + i);
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":nth-child(-n+3)");
+ for (var i = 0; i < 3; i += 1)
+ expect(ps[i], match, ":nth-child(-n+3) failed with child " + i);
+ for (var i = 3; i < ps.length; i += 1)
+ expect(ps[i], 0, ":nth-child(-n+3) failed with child " + i);
+ });
+ return 3;
+ },
+ function () {
+ // test 40: :first-of-type, :last-of-type, :only-of-type, :nth-of-type, :nth-last-of-type
+ var elements;
+ var builder = function(doc) {
+ elements = [
+ doc.createElement('p'),
+ doc.createElement('div'),
+ doc.createElement('div'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('p'),
+ doc.createElement('div'),
+ doc.createElement('address'),
+ doc.createElement('div'),
+ doc.createElement('div'),
+ doc.createElement('div'),
+ doc.createElement('p'),
+ doc.createElement('div'),
+ doc.createElement('p')
+ ];
+ for (var i = 0; i < elements.length; i += 1)
+ doc.body.appendChild(elements[i]);
+ };
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":first-of-type");
+ var values = [1, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0];
+ for (var i = 0; i < elements.length; i += 1)
+ expect(elements[i], values[i] ? match : 0, "part 1:" + i);
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":last-of-type");
+ var values = [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 1];
+ for (var i = 0; i < elements.length; i += 1)
+ expect(elements[i], values[i] ? match : 0, "part 2:" + i);
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":only-of-type");
+ var values = [0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0];
+ for (var i = 0; i < elements.length; i += 1)
+ expect(elements[i], values[i] ? match : 0, "part 3:" + i);
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":nth-of-type(3n-1)");
+ var values = [0, 0, 1, 1, 0, 0, 0, 0, 0, 1, 0, 1, 0, 0];
+ for (var i = 0; i < elements.length; i += 1)
+ expect(elements[i], values[i] ? match : 0, "part 4:" + i);
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":nth-of-type(3n+1)");
+ var values = [1, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 0];
+ for (var i = 0; i < elements.length; i += 1)
+ expect(elements[i], values[i] ? match : 0, "part 5:" + i);
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":nth-last-of-type(2n)");
+ var values = [1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 0];
+ for (var i = 0; i < elements.length; i += 1)
+ expect(elements[i], values[i] ? match : 0, "part 6:" + i);
+ });
+ selectorTest(function (doc, add, expect) {
+ builder(doc);
+ var match = add(":nth-last-of-type(-5n+3)");
+ var values;
+ values = [0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0];
+ for (var i = 0; i < elements.length; i += 1)
+ expect(elements[i], values[i] ? match : 0, "part 7:" + i);
+ doc.body.appendChild(doc.createElement('blockquote'));
+ values = [0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0];
+ for (var i = 0; i < elements.length; i += 1)
+ expect(elements[i], values[i] ? match : 0, "part 8:" + i);
+ doc.body.appendChild(doc.createElement('div'));
+ values = [0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0];
+ for (var i = 0; i < elements.length; i += 1)
+ expect(elements[i], values[i] ? match : 0, "part 9:" + i);
+ });
+ return 3;
+ },
+ function () {
+ // test 41: :root, :not()
+ selectorTest(function (doc, add, expect) {
+ var match = add(":not(:root)");
+ var p = doc.createElement('p');
+ doc.body.appendChild(p);
+ expect(doc.documentElement, 0, "root was :not(:root)");
+ expect(doc.documentElement.childNodes[0], match,"head was not :not(:root)");
+ expect(doc.documentElement.childNodes[1], match,"body was not :not(:root)");
+ expect(doc.documentElement.childNodes[0].firstChild, match,"title was not :not(:root)");
+ expect(p, match,"p was not :not(:root)");
+ });
+ return 3;
+ },
+ function () {
+ // test 42: +, ~, >, and ' ' in dynamic situations
+ selectorTest(function (doc, add, expect) {
+ var div1 = doc.createElement('div');
+ div1.id = "div1";
+ doc.body.appendChild(div1);
+ var div2 = doc.createElement('div');
+ doc.body.appendChild(div2);
+ var div3 = doc.createElement('div');
+ doc.body.appendChild(div3);
+ var div31 = doc.createElement('div');
+ div3.appendChild(div31);
+ var div311 = doc.createElement('div');
+ div31.appendChild(div311);
+ var div3111 = doc.createElement('div');
+ div311.appendChild(div3111);
+ var match = add("#div1 ~ div div + div > div");
+ expect(div1, 0, "failure 1");
+ expect(div2, 0, "failure 2");
+ expect(div3, 0, "failure 3");
+ expect(div31, 0, "failure 4");
+ expect(div311, 0, "failure 5");
+ expect(div3111, 0, "failure 6");
+ var div310 = doc.createElement('div');
+ div31.insertBefore(div310, div311);
+ expect(div1, 0, "failure 7");
+ expect(div2, 0, "failure 8");
+ expect(div3, 0, "failure 9");
+ expect(div31, 0, "failure 10");
+ expect(div310, 0, "failure 11");
+ expect(div311, 0, "failure 12");
+ expect(div3111, match, "rule did not start matching after change");
+ });
+ selectorTest(function (doc, add, expect) {
+ var div1 = doc.createElement('div');
+ div1.id = "div1";
+ doc.body.appendChild(div1);
+ var div2 = doc.createElement('div');
+ div1.appendChild(div2);
+ var div3 = doc.createElement('div');
+ div2.appendChild(div3);
+ var div4 = doc.createElement('div');
+ div3.appendChild(div4);
+ var div5 = doc.createElement('div');
+ div4.appendChild(div5);
+ var div6 = doc.createElement('div');
+ div5.appendChild(div6);
+ var match = add("#div1 > div div > div");
+ expect(div1, 0, "failure 14");
+ expect(div2, 0, "failure 15");
+ expect(div3, 0, "failure 16");
+ expect(div4, match, "failure 17");
+ expect(div5, match, "failure 18");
+ expect(div6, match, "failure 19");
+ var p34 = doc.createElement('p');
+ div3.insertBefore(p34, div4);
+ p34.insertBefore(div4, null);
+ expect(div1, 0, "failure 20");
+ expect(div2, 0, "failure 21");
+ expect(div3, 0, "failure 22");
+ expect(p34, 0, "failure 23");
+ expect(div4, 0, "failure 24");
+ expect(div5, match, "failure 25");
+ expect(div6, match, "failure 26");
+ });
+ selectorTest(function (doc, add, expect) {
+ var div1 = doc.createElement('div');
+ div1.id = "div1";
+ doc.body.appendChild(div1);
+ var div2 = doc.createElement('div');
+ div1.appendChild(div2);
+ var div3 = doc.createElement('div');
+ div2.appendChild(div3);
+ var div4 = doc.createElement('div');
+ div3.appendChild(div4);
+ var div5 = doc.createElement('div');
+ div4.appendChild(div5);
+ var div6 = doc.createElement('div');
+ div5.appendChild(div6);
+ var match = add("#div1 > div div > div");
+ expect(div1, 0, "failure 27");
+ expect(div2, 0, "failure 28");
+ expect(div3, 0, "failure 29");
+ expect(div4, match, "failure 30");
+ expect(div5, match, "failure 31");
+ expect(div6, match, "failure 32");
+ var p23 = doc.createElement('p');
+ div2.insertBefore(p23, div3);
+ p23.insertBefore(div3, null);
+ expect(div1, 0, "failure 33");
+ expect(div2, 0, "failure 34");
+ expect(div3, 0, "failure 35");
+ expect(p23, 0, "failure 36");
+ expect(div4, match, "failure 37");
+ expect(div5, match, "failure 38");
+ expect(div6, match, "failure 39");
+ });
+ return 3;
+ },
+ function () {
+ // test 43: :enabled, :disabled, :checked, etc
+ selectorTest(function (doc, add, expect) {
+ var input = doc.createElement('input');
+ input.type = 'checkbox';
+ doc.body.appendChild(input);
+ var neither = 0;
+ var both = add(":checked:enabled");
+ var checked = add(":checked");
+ var enabled = add(":enabled");
+ expect(doc.body, neither, "control failure");
+ expect(input, enabled, "input element didn't match :enabled");
+ input.click();
+ expect(input, both, "input element didn't match :checked");
+ input.disabled = true;
+ expect(input, checked, "failure 3");
+ input.checked = false;
+ expect(input, neither, "failure 4");
+ expect(doc.body, neither, "failure 5");
+ });
+ selectorTest(function (doc, add, expect) {
+ var input1 = doc.createElement('input');
+ input1.type = 'radio';
+ input1.name = 'radio';
+ doc.body.appendChild(input1);
+ var input2 = doc.createElement('input');
+ input2.type = 'radio';
+ input2.name = 'radio';
+ doc.body.appendChild(input2);
+ var checked = add(":checked");
+ expect(input1, 0, "failure 6");
+ expect(input2, 0, "failure 7");
+ input2.click();
+ expect(input1, 0, "failure 6");
+ expect(input2, checked, "failure 7");
+ input1.checked = true;
+ expect(input1, checked, "failure 8");
+ expect(input2, 0, "failure 9");
+ input2.setAttribute("checked", "checked"); // sets defaultChecked, doesn't change actual state
+ expect(input1, checked, "failure 10");
+ expect(input2, 0, "failure 11");
+ input1.type = "text";
+ expect(input1, 0, "text field matched :checked");
+ });
+ selectorTest(function (doc, add, expect) {
+ var input = doc.createElement('input');
+ input.type = 'button';
+ doc.body.appendChild(input);
+ var neither = 0;
+ var enabled = add(":enabled");
+ var disabled = add(":disabled");
+ add(":enabled:disabled");
+ expect(input, enabled, "failure 12");
+ input.disabled = true;
+ expect(input, disabled, "failure 13");
+ input.removeAttribute("disabled");
+ expect(input, enabled, "failure 14");
+ expect(doc.body, neither, "failure 15");
+ });
+ return 3;
+ },
+ function () {
+ // test 44: selectors without spaces before a "*"
+ selectorTest(function (doc, add, expect) {
+ doc.body.className = "test";
+ var p = doc.createElement('p');
+ p.className = "test";
+ doc.body.appendChild(p);
+ add("html*.test");
+ expect(doc.body, 0, "misparsed selectors");
+ expect(p, 0, "really misparsed selectors");
+ });
+ return 3;
+ },
+ function () {
+ // test 45: cssFloat and the style attribute
+ assert(!document.body.style.cssFloat, "body has floatation");
+ document.body.setAttribute("style", "float: right");
+ assertEquals(document.body.style.cssFloat, "right", "body doesn't have floatation");
+ document.body.setAttribute("style", "float: none");
+ assertEquals(document.body.style.cssFloat, "none", "body didn't lose floatation");
+ return 3;
+ },
+ function () {
+ // test 46: media queries
+ var doc = getTestDocument();
+ var style = doc.createElement('style');
+ style.setAttribute('type', 'text/css');
+ style.appendChild(doc.createTextNode('@media all and (min-color: 0) { #a { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media not all and (min-color: 0) { #b { text-transform: uppercase; } }'));
+ style.appendChild(doc.createTextNode('@media only all and (min-color: 0) { #c { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media (bogus) { #d { text-transform: uppercase; } }'));
+ style.appendChild(doc.createTextNode('@media all and (bogus) { #e { text-transform: uppercase; } }'));
+ style.appendChild(doc.createTextNode('@media not all and (bogus) { #f { text-transform: uppercase; } }')); // commentd out but should not match
+ style.appendChild(doc.createTextNode('@media only all and (bogus) { #g { text-transform: uppercase; } }'));
+ style.appendChild(doc.createTextNode('@media (bogus), all { #h { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media all and (bogus), all { #i { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media not all and (bogus), all { #j { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media only all and (bogus), all { #k { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media all, (bogus) { #l { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media all, all and (bogus) { #m { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media all, not all and (bogus) { #n { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media all, only all and (bogus) { #o { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media all and color { #p { text-transform: uppercase; } }'));
+ style.appendChild(doc.createTextNode('@media all and min-color: 0 { #q { text-transform: uppercase; } }'));
+ style.appendChild(doc.createTextNode('@media all, all and color { #r { text-transform: uppercase; } }')); // commented out but should match
+ style.appendChild(doc.createTextNode('@media all, all and min-color: 0 { #s { text-transform: uppercase; } }')); // commented out but should match
+ style.appendChild(doc.createTextNode('@media all and min-color: 0, all { #t { text-transform: uppercase; } }')); // commented out but should match
+ style.appendChild(doc.createTextNode('@media (max-color: 0) and (max-monochrome: 0) { #u { text-transform: uppercase; } }'));
+ style.appendChild(doc.createTextNode('@media (min-color: 1), (min-monochrome: 1) { #v { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media all and (min-color: 0) and (min-monochrome: 0) { #w { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media not all and (min-color: 1), not all and (min-monochrome: 1) { #x { text-transform: uppercase; } }')); // matches
+ style.appendChild(doc.createTextNode('@media all and (min-height: 1em) and (min-width: 1em) { #y1 { text-transform: uppercase; } }'));
+ style.appendChild(doc.createTextNode('@media all and (max-height: 1em) and (min-width: 1em) { #y2 { text-transform: uppercase; } }'));
+ style.appendChild(doc.createTextNode('@media all and (min-height: 1em) and (max-width: 1em) { #y3 { text-transform: uppercase; } }'));
+ style.appendChild(doc.createTextNode('@media all and (max-height: 1em) and (max-width: 1em) { #y4 { text-transform: uppercase; } }')); // matches
+ doc.getElementsByTagName('head')[0].appendChild(style);
+ var names = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y1', 'y2', 'y3', 'y4'];
+ for (var i in names) {
+ var p = doc.createElement('p');
+ p.id = names[i];
+ doc.body.appendChild(p);
+ }
+ var count = 0;
+ var check = function (c, e) {
+ count += 1;
+ var p = doc.getElementById(c);
+ assertEquals(doc.defaultView.getComputedStyle(p, '').textTransform, e ? 'uppercase' : 'none', "case " + c + " failed (index " + count + ")");
+ }
+ check('a', true); // 1
+ check('b', false);
+ check('c', true);
+ check('d', false);
+ check('e', false);
+/* COMMENTED OUT BECAUSE THE CSSWG KEEP CHANGING THE RIGHT ANSWER FOR THIS CASE
+ * check('f', false);
+ */
+ check('g', false);
+ check('h', true);
+ check('i', true);
+ check('j', true); // 10
+ check('k', true);
+ check('l', true);
+ check('m', true);
+ check('n', true);
+ check('o', true);
+ check('p', false);
+ check('q', false);
+/* COMMENTED OUT BECAUSE THE CSSWG KEEP CHANGING THE RIGHT ANSWER FOR THESE TOO APPARENTLY
+ * check('r', true);
+ * check('s', true);
+ * check('t', true); // 20
+ */
+ check('u', false);
+ check('v', true);
+ check('w', true);
+ check('x', true);
+ // here the viewport is 0x0
+ check('y1', false); // 25
+ check('y2', false);
+ check('y3', false);
+ check('y4', true);
+ document.getElementById("selectors").setAttribute("style", "height: 100px; width: 100px");
+ // now the viewport is more than 1em by 1em
+ check('y1', true); // 29
+ check('y2', false);
+ check('y3', false);
+ check('y4', false);
+ document.getElementById("selectors").removeAttribute("style");
+ // here the viewport is 0x0 again
+ check('y1', false); // 33
+ check('y2', false);
+ check('y3', false);
+ check('y4', true);
+ return 3;
+ },
+ function () {
+ // test 47: 'cursor' and CSS3 values
+ var doc = getTestDocument();
+ var style = doc.createElement('style');
+ style.setAttribute('type', 'text/css');
+ var cursors = ['auto', 'default', 'none', 'context-menu', 'help', 'pointer', 'progress', 'wait', 'cell', 'crosshair', 'text', 'vertical-text', 'alias', 'copy', 'move', 'no-drop', 'not-allowed', 'e-resize', 'n-resize', 'ne-resize', 'nw-resize', 's-resize', 'se-resize', 'sw-resize', 'w-resize', 'ew-resize', 'ns-resize', 'nesw-resize', 'nwse-resize', 'col-resize', 'row-resize', 'all-scroll'];
+ for (var i in cursors) {
+ var c = cursors[i];
+ style.appendChild(doc.createTextNode('#' + c + ' { cursor: ' + c + '; }'));
+ }
+ style.appendChild(doc.createTextNode('#bogus { cursor: bogus; }'));
+ doc.body.previousSibling.appendChild(style);
+ doc.body.id = "bogus";
+ assertEquals(doc.defaultView.getComputedStyle(doc.body, '').cursor, "auto", "control failed");
+ for (var i in cursors) {
+ var c = cursors[i];
+ doc.body.id = c;
+ assertEquals(doc.defaultView.getComputedStyle(doc.body, '').cursor, c, "cursor " + c + " not supported");
+ }
+ return 3;
+ },
+ function () {
+ // test 48: :link and :visited
+ var iframe = document.getElementById("selectors");
+ var number = (new Date()).valueOf();
+ var a = document.createElement('a');
+ a.appendChild(document.createTextNode('LINKTEST FAILED'));
+ a.setAttribute('id', 'linktest');
+ a.setAttribute('class', 'pending');
+ a.setAttribute('href', iframe.getAttribute('src') + "?" + number);
+ document.getElementsByTagName('map')[0].appendChild(a);
+ iframe.setAttribute("onload", "document.getElementById('linktest').removeAttribute('class')");
+ iframe.src = a.getAttribute("href");
+ return 3;
+ },
+
+ // bucket 4: HTML and the DOM
+ // Tables
+ function () {
+ // test 49: basic table accessor ping test create*, delete*, and *
+ // where * is caption, tHead, tFoot.
+ var table = document.createElement('table');
+ assert(!table.caption, "initially: caption");
+ assert(table.tBodies, "initially: tBodies");
+ assertEquals(table.tBodies.length, 0, "initially: tBodies.length");
+ assert(table.rows, "initially: rows");
+ assertEquals(table.rows.length, 0, "initially: rows.length");
+ assert(!table.tFoot, "initially: tFoot");
+ assert(!table.tHead, "initially: tHead");
+ var caption = table.createCaption();
+ var thead = table.createTHead();
+ var tfoot = table.createTFoot();
+ assertEquals(table.caption, caption, "after creation: caption");
+ assert(table.tBodies, "after creation: tBodies");
+ assertEquals(table.tBodies.length, 0, "after creation: tBodies.length");
+ assert(table.rows, "after creation: rows");
+ assertEquals(table.rows.length, 0, "after creation: rows.length");
+ assertEquals(table.tFoot, tfoot, "after creation: tFoot");
+ assertEquals(table.tHead, thead, "after creation: tHead");
+ assertEquals(table.childNodes.length, 3, "after creation: childNodes.length");
+ table.caption = caption; // no-op
+ table.tHead = thead; // no-op
+ table.tFoot = tfoot; // no-op
+ assertEquals(table.caption, caption, "after setting: caption");
+ assert(table.tBodies, "after setting: tBodies");
+ assertEquals(table.tBodies.length, 0, "after setting: tBodies.length");
+ assert(table.rows, "after setting: rows");
+ assertEquals(table.rows.length, 0, "after setting: rows.length");
+ assertEquals(table.tFoot, tfoot, "after setting: tFoot");
+ assertEquals(table.tHead, thead, "after setting: tHead");
+ assertEquals(table.childNodes.length, 3, "after setting: childNodes.length");
+ table.deleteCaption();
+ table.deleteTHead();
+ table.deleteTFoot();
+ assert(!table.caption, "after deletion: caption");
+ assert(table.tBodies, "after deletion: tBodies");
+ assertEquals(table.tBodies.length, 0, "after deletion: tBodies.length");
+ assert(table.rows, "after deletion: rows");
+ assertEquals(table.rows.length, 0, "after deletion: rows.length");
+ assert(!table.tFoot, "after deletion: tFoot");
+ assert(!table.tHead, "after deletion: tHead");
+ assert(!table.hasChildNodes(), "after deletion: hasChildNodes()");
+ assertEquals(table.childNodes.length, 0, "after deletion: childNodes.length");
+ return 4;
+ },
+ function () {
+ // test 50: construct a table, and see if the table is as expected
+ var table = document.createElement('table');
+ table.appendChild(document.createElement('tbody'));
+ var tr1 = document.createElement('tr');
+ table.appendChild(tr1);
+ table.appendChild(document.createElement('caption'));
+ table.appendChild(document.createElement('thead'));
+ // <table><tbody/><tr/><caption/><thead/>
+ table.insertBefore(table.firstChild.nextSibling, null); // move the <tr/> to the end
+ // <table><tbody/><caption/><thead/><tr/>
+ table.replaceChild(table.firstChild, table.lastChild); // move the <tbody/> to the end and remove the <tr>
+ // <table><caption/><thead/><tbody/>
+ var tr2 = table.tBodies[0].insertRow(0);
+ // <table><caption/><thead/><tbody><tr/><\tbody> (the '\' is to avoid validation errors)
+ assertEquals(table.tBodies[0].rows[0].rowIndex, 0, "rowIndex broken");
+ assertEquals(table.tBodies[0].rows[0].sectionRowIndex, 0, "sectionRowIndex broken");
+ assertEquals(table.childNodes.length, 3, "wrong number of children");
+ assert(table.caption, "caption broken");
+ assert(table.tHead, "tHead broken");
+ assert(!table.tFoot, "tFoot broken");
+ assertEquals(table.tBodies.length, 1, "wrong number of tBodies");
+ assertEquals(table.rows.length, 1, "wrong number of rows");
+ assert(!tr1.parentNode, "orphan row has unexpected parent");
+ assertEquals(table.caption, table.createCaption(), "caption creation failed");
+ assertEquals(table.tFoot, null, "table has unexpected footer");
+ assertEquals(table.tHead, table.createTHead(), "header creation failed");
+ assertEquals(table.createTFoot(), table.tFoot, "footer creation failed");
+ // either: <table><caption/><thead/><tbody><tr/><\tbody><tfoot/>
+ // or: <table><caption/><thead/><tfoot/><tbody><tr/><\tbody>
+ table.tHead.appendChild(tr1);
+ // either: <table><caption/><thead><tr/><\thead><tbody><tr/><\tbody><tfoot/>
+ // or: <table><caption/><thead><tr/><\thead><tfoot/><tbody><tr/><\tbody>
+ assertEquals(table.rows[0], table.tHead.firstChild, "top row not in expected position");
+ assertEquals(table.rows.length, 2, "wrong number of rows after appending one");
+ assertEquals(table.rows[1], table.tBodies[0].firstChild, "second row not in expected position");
+ return 4;
+ },
+ function () {
+ // test 51: test the ordering and creation of rows
+ var table = document.createElement('table');
+ var rows = [
+ document.createElement('tr'), // 0: ends up first child of the tfoot
+ document.createElement('tr'), // 1: goes at the end of the table
+ document.createElement('tr'), // 2: becomes second child of thead
+ document.createElement('tr'), // 3: becomes third child of the thead
+ document.createElement('tr'), // 4: not in the table
+ table.insertRow(0), // 5: not in the table
+ table.createTFoot().insertRow(0) // 6: ends up second in the tfoot
+ ];
+ rows[6].parentNode.appendChild(rows[0]);
+ table.appendChild(rows[1]);
+ table.insertBefore(document.createElement('thead'), table.firstChild);
+ table.firstChild.appendChild(rows[2]);
+ rows[2].parentNode.appendChild(rows[3]);
+ rows[4].appendChild(rows[5].parentNode);
+ table.insertRow(0);
+ table.tFoot.appendChild(rows[6]);
+ assertEquals(table.rows.length, 6, "wrong number of rows");
+ assertEquals(table.getElementsByTagName('tr').length, 6, "wrong number of tr elements");
+ assertEquals(table.childNodes.length, 3, "table has wrong number of children");
+ assertEquals(table.childNodes[0], table.tHead, "tHead isn't first");
+ assertEquals(table.getElementsByTagName('tr')[0], table.tHead.childNodes[0], "first tr isn't in tHead correctly");
+ assertEquals(table.getElementsByTagName('tr')[1], table.tHead.childNodes[1], "second tr isn't in tHead correctly");
+ assertEquals(table.getElementsByTagName('tr')[1], rows[2], "second tr is the wrong row");
+ assertEquals(table.getElementsByTagName('tr')[2], table.tHead.childNodes[2], "third tr isn't in tHead correctly");
+ assertEquals(table.getElementsByTagName('tr')[2], rows[3], "third tr is the wrong row");
+ assertEquals(table.childNodes[1], table.tFoot, "tFoot isn't second");
+ assertEquals(table.getElementsByTagName('tr')[3], table.tFoot.childNodes[0], "fourth tr isn't in tFoot correctly");
+ assertEquals(table.getElementsByTagName('tr')[3], rows[0], "fourth tr is the wrong row");
+ assertEquals(table.getElementsByTagName('tr')[4], table.tFoot.childNodes[1], "fifth tr isn't in tFoot correctly");
+ assertEquals(table.getElementsByTagName('tr')[4], rows[6], "fifth tr is the wrong row");
+ assertEquals(table.getElementsByTagName('tr')[5], table.childNodes[2], "sixth tr isn't in tFoot correctly");
+ assertEquals(table.getElementsByTagName('tr')[5], rows[1], "sixth tr is the wrong row");
+ assertEquals(table.tBodies.length, 0, "non-zero number of tBodies");
+ return 4;
+ },
+
+ // Forms
+ function () {
+ // test 52: <form> and .elements
+ test = document.getElementsByTagName('form')[0];
+ assert(test.elements !== test, "form.elements === form");
+ assert(test.elements !== test.getAttribute('elements'), "form element has an elements content attribute");
+ assertEquals(test.elements.length, 1, "form element has unexpected number of controls");
+ assertEquals(test.elements.length, test.length, "form element has inconsistent numbers of controls");
+ return 4;
+ },
+ function () {
+ // test 53: changing an <input> dynamically
+ var f = document.createElement('form');
+ var i = document.createElement('input');
+ i.name = 'first';
+ i.type = 'text';
+ i.value = 'test';
+ f.appendChild(i);
+ assertEquals(i.getAttribute('name'), 'first', "name attribute wrong");
+ assertEquals(i.name, 'first', "name property wrong");
+ assertEquals(i.getAttribute('type'), 'text', "type attribute wrong");
+ assertEquals(i.type, 'text', "type property wrong");
+ assert(!i.hasAttribute('value'), "value attribute wrong");
+ assertEquals(i.value, 'test', "value property wrong");
+ assertEquals(f.elements.length, 1, "form's elements array has wrong size");
+ assertEquals(f.elements[0], i, "form's element array doesn't have input control by index");
+ assertEquals(f.elements.first, i, "form's element array doesn't have input control by name");
+ assertEquals(f.elements.second, null, "form's element array has unexpected controls by name");
+ i.name = 'second';
+ i.type = 'password';
+ i.value = 'TEST';
+ assertEquals(i.getAttribute('name'), 'second', "name attribute wrong after change");
+ assertEquals(i.name, 'second', "name property wrong after change");
+ assertEquals(i.getAttribute('type'), 'password', "type attribute wrong after change");
+ assertEquals(i.type, 'password', "type property wrong after change");
+ assert(!i.hasAttribute('value'), "value attribute wrong after change");
+ assertEquals(i.value, 'TEST', "value property wrong after change");
+ assertEquals(f.elements.length, 1, "form's elements array has wrong size after change");
+ assertEquals(f.elements[0], i, "form's element array doesn't have input control by index after change");
+ assertEquals(f.elements.second, i, "form's element array doesn't have input control by name after change");
+ assertEquals(f.elements.first, null, "form's element array has unexpected controls by name after change");
+ return 4;
+ },
+ function () {
+ // test 54: changing a parsed <input>
+ var i = document.getElementsByTagName('input')[0];
+ // initial values
+ assertEquals(i.getAttribute('type'), 'HIDDEN', "input control's type content attribute was wrong");
+ assertEquals(i.type, 'hidden', "input control's type DOM attribute was wrong");
+ // change values
+ i.name = 'test';
+ assertEquals(i.parentNode.elements.test, i, "input control's form didn't update");
+ // check event handlers
+ i.parentNode.action = 'javascript:';
+ var called = false;
+ i.parentNode.onsubmit = function (arg) {
+ arg.preventDefault();
+ called = true;
+ };
+ i.type = 'submit';
+ i.click(); // synchronously dispatches a click event to the submit button, which submits the form, which calls onsubmit
+ assert(called, "click handler didn't dispatch properly");
+ i.type = 'hIdDeN';
+ // check numeric attributes
+ i.setAttribute('maxLength', '2');
+ var s = i.getAttribute('maxLength');
+ assert(s.match, "attribute is not a String");
+ assert(!s.MIN_VALUE, "attribute is a Number");
+ return 4;
+ },
+ function () {
+ // test 55: moved checkboxes should keep their state
+ var container = document.getElementsByTagName("iframe")[0];
+ var input1 = document.createElement('input');
+ container.appendChild(input1);
+ input1.type = "checkbox";
+ input1.checked = true;
+ assert(input1.checked, "checkbox not checked after being checked (inserted first)");
+ var input2 = document.createElement('input');
+ input2.type = "checkbox";
+ container.appendChild(input2);
+ input2.checked = true;
+ assert(input2.checked, "checkbox not checked after being checked (inserted after type set)");
+ var input3 = document.createElement('input');
+ input3.type = "checkbox";
+ input3.checked = true;
+ container.appendChild(input3);
+ assert(input3.checked, "checkbox not checked after being checked (inserted after being checked)");
+ var target = document.getElementsByTagName("iframe")[1];
+ target.appendChild(input1);
+ target.appendChild(input2);
+ target.appendChild(input3);
+ assert(input1.checked, "checkbox 1 not checked after being moved");
+ assert(input2.checked, "checkbox 2 not checked after being moved");
+ assert(input3.checked, "checkbox 3 not checked after being moved");
+ return 4;
+ },
+ function () {
+ // test 56: cloned radio buttons should keep their state
+ var form = document.getElementsByTagName("form")[0];
+ var input1 = document.createElement('input');
+ input1.type = "radio";
+ input1.name = "radioGroup1";
+ form.appendChild(input1);
+ var input2 = input1.cloneNode(true);
+ input1.parentNode.appendChild(input2);
+ input1.checked = true;
+ assert(form.elements.radioGroup1, "radio group absent");
+ assert(input1.checked, "first radio button not checked");
+ assert(!input2.checked, "second radio button checked");
+ input2.checked = true;
+ assert(!input1.checked, "first radio button checked");
+ assert(input2.checked, "second radio button not checked");
+ var input3 = document.createElement('input');
+ input3.type = "radio";
+ input3.name = "radioGroup2";
+ form.appendChild(input3);
+ assert(!input3.checked, "third radio button checked");
+ input3.checked = true;
+ assert(!input1.checked, "first radio button newly checked");
+ assert(input2.checked, "second radio button newly not checked");
+ assert(input3.checked, "third radio button not checked");
+ input1.checked = true;
+ assert(input1.checked, "first radio button ended up not checked");
+ assert(!input2.checked, "second radio button ended up checked");
+ assert(input3.checked, "third radio button ended up not checked");
+ input1.parentNode.removeChild(input1);
+ input2.parentNode.removeChild(input2);
+ input3.parentNode.removeChild(input3);
+ return 4;
+ },
+ function () {
+ // test 57: HTMLSelectElement.add()
+ var s = document.createElement('select');
+ var o = document.createElement('option');
+ s.add(o, null);
+ assert(s.firstChild === o, "add() didn't add to firstChild");
+ assertEquals(s.childNodes.length, 1, "add() didn't add to childNodes");
+ assert(s.childNodes[0] === o, "add() didn't add to childNodes correctly");
+ assertEquals(s.options.length, 1, "add() didn't add to options");
+ assert(s.options[0] === o, "add() didn't add to options correctly");
+ return 4;
+ },
+ function () {
+ // test 58: HTMLOptionElement.defaultSelected
+ var s = document.createElement('select');
+ var o1 = document.createElement('option');
+ var o2 = document.createElement('option');
+ o2.defaultSelected = true;
+ var o3 = document.createElement('option');
+ s.appendChild(o1);
+ s.appendChild(o2);
+ s.appendChild(o3);
+ assert(s.options[s.selectedIndex] === o2, "defaultSelected didn't take");
+ return 4;
+ },
+ function () {
+ // test 59: attributes of <button> elements
+ var button = document.createElement('button');
+ assertEquals(button.type, "submit", "<button> doesn't have type=submit");
+ button.setAttribute("type", "button");
+ assertEquals(button.type, "button", "<button type=button> doesn't have type=button");
+ button.removeAttribute("type");
+ assertEquals(button.type, "submit", "<button> doesn't have type=submit back");
+ button.setAttribute('value', 'apple');
+ button.appendChild(document.createTextNode('banana'));
+ assertEquals(button.value, 'apple', "wrong button value");
+ return 4;
+ },
+
+ // Misc DOM2 HTML
+ function () {
+ // test 60: className vs "class" vs attribute nodes
+ var span = document.getElementsByTagName('span')[0];
+ span.setAttribute('class', 'kittens');
+ if (!span.getAttributeNode)
+ return 4; // support for attribute nodes is optional in Acid3, because attribute nodes might be removed from DOM Core in the future.
+ var attr = span.getAttributeNode('class');
+ // however, if they're supported, they'd better work:
+ assert(attr.specified, "attribute not specified");
+ assertEquals(attr.value, 'kittens', "attribute value wrong");
+ assertEquals(attr.name, 'class', "attribute name wrong");
+ attr.value = 'ocelots';
+ assertEquals(attr.value, 'ocelots', "attribute value wrong");
+ assertEquals(span.className, 'ocelots', "setting attribute value failed to be reflected in className");
+ span.className = 'cats';
+ assertEquals(attr.ownerElement.getAttribute('class'), 'cats', "setting attribute value failed to be reflected in getAttribute()");
+ span.removeAttributeNode(attr);
+ assert(attr.specified, "attribute not specified after removal");
+ assert(!attr.ownerElement, "attribute still owned after removal");
+ assert(!span.className, "element had class after removal");
+ return 4;
+ },
+ function () {
+ // test 61: className and the class attribute: space preservation
+ var p = document.createElement('p');
+ assert(!p.hasAttribute('class'), "element had attribute on creation");
+ p.setAttribute('class', ' te st ');
+ assert(p.hasAttribute('class'), "element did not have attribute after setting");
+ assertEquals(p.getAttribute('class'), ' te st ', "class attribute's value was wrong");
+ assertEquals(p.className, ' te st ', "className was wrong");
+ p.className = p.className.replace(/ /g, '\n');
+ assert(p.hasAttribute('class'), "element did not have attribute after replacement");
+ assertEquals(p.getAttribute('class'), '\nte\n\nst\n', "class attribute's value was wrong after replacement");
+ assertEquals(p.className, '\nte\n\nst\n', "className was wrong after replacement");
+ p.className = '';
+ assert(p.hasAttribute('class'), "element lost attribute after being set to empty string");
+ assertEquals(p.getAttribute('class'), '', "class attribute's value was wrong after being emptied");
+ assertEquals(p.className, '', "className was wrong after being emptied");
+ return 4;
+ },
+ function () {
+ // test 62: check that DOM attributes and content attributes aren't equivalent
+ var test;
+ // <div class="">
+ test = document.getElementsByTagName('div')[0];
+ assertEquals(test.className, 'buckets', "buckets: className wrong");
+ assertEquals(test.getAttribute('class'), 'buckets', "buckets: class wrong");
+ assert(!test.hasAttribute('className'), "buckets: element has className attribute");
+ assert(test.className != test.getAttribute('className'), "buckets: className attribute equals className property");
+ assert(!('class' in test), "buckets: element has class property")
+ test['class'] = "oil";
+ assert(test.className != "oil", "buckets: class property affected className");
+ // <label for="">
+ test = document.createElement('label');
+ test.htmlFor = 'jars';
+ assertEquals(test.htmlFor, 'jars', "jars: htmlFor wrong");
+ assertEquals(test.getAttribute('for'), 'jars', "jars: for wrong");
+ assert(!test.hasAttribute('htmlFor'), "jars: element has htmlFor attribute");
+ assert(test.htmlFor != test.getAttribute('htmlFor'), "jars: htmlFor attribute equals htmlFor property");
+ test = document.createElement('label');
+ test.setAttribute('for', 'pots');
+ assertEquals(test.htmlFor, 'pots', "pots: htmlFor wrong");
+ assertEquals(test.getAttribute('for'), 'pots', "pots: for wrong");
+ assert(!test.hasAttribute('htmlFor'), "pots: element has htmlFor attribute");
+ assert(test.htmlFor != test.getAttribute('htmlFor'), "pots: htmlFor attribute equals htmlFor property");
+ assert(!('for' in test), "pots: element has for property");
+ test['for'] = "oil";
+ assert(test.htmlFor != "oil", "pots: for property affected htmlFor");
+ // <meta http-equiv="">
+ test = document.createElement('meta');
+ test.setAttribute('http-equiv', 'boxes');
+ assertEquals(test.httpEquiv, 'boxes', "boxes: httpEquiv wrong");
+ assertEquals(test.getAttribute('http-equiv'), 'boxes', "boxes: http-equiv wrong");
+ assert(!test.hasAttribute('httpEquiv'), "boxes: element has httpEquiv attribute");
+ assert(test.httpEquiv != test.getAttribute('httpEquiv'), "boxes: httpEquiv attribute equals httpEquiv property");
+ test = document.createElement('meta');
+ test.httpEquiv = 'cans';
+ assertEquals(test.httpEquiv, 'cans', "cans: httpEquiv wrong");
+ assertEquals(test.getAttribute('http-equiv'), 'cans', "cans: http-equiv wrong");
+ assert(!test.hasAttribute('httpEquiv'), "cans: element has httpEquiv attribute");
+ assert(test.httpEquiv != test.getAttribute('httpEquiv'), "cans: httpEquiv attribute equals httpEquiv property");
+ assert(!('http-equiv' in test), "cans: element has http-equiv property");
+ test['http-equiv'] = "oil";
+ assert(test.httpEquiv != "oil", "cans: http-equiv property affected httpEquiv");
+ return 4;
+ },
+ function () {
+ // test 63: attributes of the <area> element
+ var area = document.getElementsByTagName('area')[0];
+ assertEquals(area.getAttribute('href'), '', "wrong value for href=''");
+ assertEquals(area.getAttribute('shape'), 'rect', "wrong value for shape=''");
+ assertEquals(area.getAttribute('coords'), '2,2,4,4', "wrong value for coords=''");
+ assertEquals(area.getAttribute('alt'), '<\'>', "wrong value for alt=''");
+ return 4;
+ },
+ function () {
+ // test 64: more attribute tests
+ // attributes of the <object> element
+ var obj1 = document.createElement('object');
+ obj1.setAttribute('data', 'test.html');
+ var obj2 = document.createElement('object');
+ obj2.setAttribute('data', './test.html');
+ assertEquals(obj1.data, obj2.data, "object elements didn't resolve URIs correctly");
+ assert(obj1.data.match(/^file:/), "object.data isn't absolute"); // changed by chase from /^http:/
+ obj1.appendChild(document.createElement('param'));
+ assertEquals(obj1.getElementsByTagName('param').length, 1, "object is missing its only child");
+ // non-existent attributes
+ var test = document.createElement('p');
+ assert(!('TWVvdywgbWV3Li4u' in test), "TWVvdywgbWV3Li4u unexpectedly found");
+ assertEquals(test.TWVvdywgbWV3Li4u, undefined, ".TWVvdywgbWV3Li4u wasn't undefined");
+ assertEquals(test['TWVvdywgbWV3Li4u'], undefined, "['TWVvdywgbWV3Li4u'] wasn't undefined");
+ test.setAttribute('TWVvdywgbWV3Li4u', 'woof');
+ assert(!('TWVvdywgbWV3Li4u' in test), "TWVvdywgbWV3Li4u unexpectedly found after setting");
+ assertEquals(test.TWVvdywgbWV3Li4u, undefined, ".TWVvdywgbWV3Li4u wasn't undefined after setting");
+ assertEquals(test['TWVvdywgbWV3Li4u'], undefined, "['TWVvdywgbWV3Li4u'] wasn't undefined after setting");
+ assertEquals(test.getAttribute('TWVvdywgbWV3Li4u'), 'woof', "TWVvdywgbWV3Li4u has wrong value after setting");
+ return 4;
+ },
+
+ // bucket 5: Tests from the Acid3 Competition
+ function () {
+ // test 65: bring in a couple of SVG files and some HTML files dynamically - preparation for later tests in this bucket
+ kungFuDeathGrip = document.createElement('p');
+ kungFuDeathGrip.className = 'removed';
+ var iframe, object;
+ // svg iframe
+ iframe = document.createElement('iframe');
+ iframe.onload = function () { kungFuDeathGrip.title += '1' };
+ iframe.src = "svg.svg"; // changed by chase from 'svg.xml'
+ kungFuDeathGrip.appendChild(iframe);
+ // object iframe
+ object = document.createElement('object');
+ object.onload = function () { kungFuDeathGrip.title += '2' };
+ object.data = "svg.svg"; // changed by chase from 'svg.xml'
+ kungFuDeathGrip.appendChild(object);
+ // xml iframe
+ iframe = document.createElement('iframe');
+ iframe.onload = function () { kungFuDeathGrip.title += '3' };
+ iframe.src = "empty.xml";
+ kungFuDeathGrip.appendChild(iframe);
+ // html iframe
+ iframe = document.createElement('iframe');
+ iframe.onload = function () { kungFuDeathGrip.title += '4' };
+ iframe.src = "empty.html";
+ kungFuDeathGrip.appendChild(iframe);
+ // html iframe
+ iframe = document.createElement('iframe');
+ iframe.onload = function () { kungFuDeathGrip.title += '5' };
+ iframe.src = "xhtml.1.xhtml"; // changed by chase from 'xhtml.1'
+ kungFuDeathGrip.appendChild(iframe);
+ // html iframe
+ iframe = document.createElement('iframe');
+ iframe.onload = function () { kungFuDeathGrip.title += '6' };
+ iframe.src = "xhtml.2.xhtml"; // changed by chase from 'xhtml.2'
+ kungFuDeathGrip.appendChild(iframe);
+ // html iframe
+ iframe = document.createElement('iframe');
+ iframe.onload = function () { kungFuDeathGrip.title += '7' };
+ iframe.src = "xhtml.3.xhtml"; // changed by chase from 'xhtml.3'
+ kungFuDeathGrip.appendChild(iframe);
+ // add the lot to the document
+ document.getElementsByTagName('map')[0].appendChild(kungFuDeathGrip);
+ return 5;
+ },
+ function () {
+ // test 66: localName on text nodes (and now other things), from Sylvain Pasche
+ assertEquals(document.createTextNode("test").localName, null, 'wrong localName for text node');
+ assertEquals(document.createComment("test").localName, null, 'wrong localName for comment node');
+ assertEquals(document.localName, null, 'wrong localName for document node');
+ return 5;
+ },
+ function () {
+ // test 67: removedNamedItemNS on missing attributes, from Sylvain Pasche
+ var p = document.createElement("p");
+ var msg = 'wrong exception raised';
+ try {
+ p.attributes.removeNamedItemNS("http://www.example.com/", "absent");
+ msg = 'no exception raised';
+ } catch (e) {
+ if ('code' in e) {
+ if (e.code == 8)
+ msg = '';
+ else
+ msg += '; code = ' + e.code;
+ }
+ }
+ assert(msg == '', "when calling removeNamedItemNS in a non existent attribute: " + msg);
+ return 5;
+ },
+ function () {
+ // test 68: UTF-16 surrogate pairs, from David Chan
+ //
+ // In The Unicode Standard 5.0, it is explicitly permitted to
+ // allow malformed UTF-16, that is, to leave the string alone.
+ // (http://www.unicode.org/versions/Unicode5.0.0):
+ //
+ // section 2.7: "...strings in ... ECMAScript are Unicode 16-bit
+ // strings, but are not necessarily well-formed UTF-16
+ // sequences. In normal processing, it can be far more
+ // efficient to allow such strings to contain code unit
+ // sequences that are not well-formed UTF-16 -- that is,
+ // isolated surrogates"
+ //
+ // On the other hand, if the application wishes to ensure
+ // well-formed character sequences, it may not permit the
+ // malformed sequence and it must regard the first codepoint as
+ // an error:
+ //
+ // Section 3.2: "C10. When a process interprets a code sequence
+ // which purports to be in a Unicode character encoding form, it
+ // shall treat ill-formed code unit sequences as an error
+ // condition and shall not interpret such sequences as
+ // characters.
+ // [...]
+ // For example, in UTF-8 every code unit of the form 110....2
+ // must be followed by a code unit of the form 10......2. A
+ // sequence such as 110.....2 0.......2 is ill-formed and must
+ // never be generated. When faced with this ill-formed code unit
+ // sequence while transforming or interpreting text, a
+ // conformant process must treat the first code unit 110.....2
+ // as an illegally terminated code unit sequence~Wfor example,
+ // by signaling an error, filtering the code unit out, or
+ // representing the code unit with a marker such as U+FFFD
+ // replacement character."
+ //
+ // So it would be permitted to do any of the following:
+ // 1) Leave the string alone
+ // 2) Remove the unpaired surrogate
+ // 3) Replace the unpaired surrogate with U+FFFD
+ // 4) Throw an exception
+
+ try {
+ var unpaired = String.fromCharCode(0xd863); // half a surrogate pair
+ var before = unpaired + "text";
+ var elt = document.createElement("input");
+ elt.value = before;
+ var after = elt.value;
+ }
+ catch(ex) {
+ return 5; // Unpaired surrogate caused an exception - ok
+ }
+ if (after == before && before.length == 5)
+ return 5; // Unpaired surrogate kept - ok
+ if (after == "text")
+ return 5; // Unpaired surrogate removed - ok
+ var replacement = String.fromCharCode(0xfffd);
+ if (after == replacement + "text")
+ return 5; // Unpaired surrogate replaced - ok
+ fail("Unpaired surrogate handled wrongly (input was '" + before + "', output was '" + after + "')");
+ },
+ function () {
+ // test 69: check that the support files loaded -- preparation for the rest of the tests in this bucket
+ assert(!(kungFuDeathGrip == null), "kungFuDeathGrip was null");
+ assert(!(kungFuDeathGrip.title == null), "kungFuDeathGrip.title was null");
+ if (kungFuDeathGrip.title.length < 7)
+ return "retry";
+ assert(!(kungFuDeathGrip.firstChild == null), "kungFuDeathGrip.firstChild was null");
+ assert(!(kungFuDeathGrip.firstChild.contentDocument == null), "kungFuDeathGrip.firstChild.contentDocument was null");
+ assert(!(kungFuDeathGrip.firstChild.contentDocument.getElementsByTagName == null), "kungFuDeathGrip.firstChild.contentDocument.getElementsByTagName was null");
+ var t = kungFuDeathGrip.firstChild.contentDocument.getElementsByTagName('text')[0];
+ assert(!(t == null), "t was null");
+ assert(!(t.parentNode == null), "t.parentNode was null");
+ assert(!(t.parentNode.removeChild == null), "t.parentNode.removeChild was null");
+ t.parentNode.removeChild(t);
+ return 5;
+ },
+ function () {
+ // test 70: XML encoding test
+ // the third child in kungFuDeathGrip is an ISO-8859-1 document sent as UTF-8.
+ // q.v. XML 1.0, section 4.3.3 Character Encoding in Entities
+ // this only tests one of a large number of conditions that should cause fatal errors
+ var doc = kungFuDeathGrip.childNodes[2].contentDocument;
+ if (!doc)
+ return 5;
+ if (doc.documentElement.tagName != "root")
+ return 5;
+ if (doc.documentElement.getElementsByTagName('test').length < 1)
+ return 5;
+ fail("UTF-8 encoded XML document with invalid character did not have a well-formedness error");
+ },
+ function () {
+ // test 71: HTML parsing, from Simon Pieters and Anne van Kesteren
+ var doc = kungFuDeathGrip.childNodes[3].contentDocument;
+ assert(doc, "missing document for test");
+ try {
+ // siblings
+ doc.open();
+ doc.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\"><title><\/title><span><\/span><script type=\"text/javascript\"><\/script>");
+ doc.close();
+ assertEquals(doc.childNodes.length, 2, "wrong number of children in #document (first test)");
+ assertEquals(doc.firstChild.name, "HTML", "name wrong (first test)");
+ assertEquals(doc.firstChild.publicId, "-//W3C//DTD HTML 4.0 Transitional//EN", "publicId wrong (first test)");
+ if ((doc.firstChild.systemId != null) && (doc.firstChild.systemId != ""))
+ fail("systemId wrong (first test)");
+ if (('internalSubset' in doc.firstChild) || doc.firstChild.internalSubset)
+ assertEquals(doc.firstChild.internalSubset, null, "internalSubset wrong (first test)");
+ assertEquals(doc.documentElement.childNodes.length, 2, "wrong number of children in HTML (first test)");
+ assertEquals(doc.documentElement.firstChild.nodeName, "HEAD", "misplaced HEAD element (first test)");
+ assertEquals(doc.documentElement.firstChild.childNodes.length, 1, "wrong number of children in HEAD (first test)");
+ assertEquals(doc.documentElement.firstChild.firstChild.tagName, "TITLE", "misplaced TITLE element (first test)");
+ assertEquals(doc.documentElement.lastChild.nodeName, "BODY", "misplaced BODY element (first test)");
+ assertEquals(doc.documentElement.lastChild.childNodes.length, 2, "wrong number of children in BODY (first test)");
+ assertEquals(doc.documentElement.lastChild.firstChild.tagName, "SPAN", "misplaced SPAN element (first test)");
+ assertEquals(doc.documentElement.lastChild.lastChild.tagName, "SCRIPT", "misplaced SCRIPT element (first test)");
+ // parent/child
+ doc.open();
+ doc.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\" \"http://www.w3.org/TR/html4/loose.dtd\"><title><\/title><span><script type=\"text/javascript\"><\/script><\/span>");
+ doc.close();
+ assertEquals(doc.childNodes.length, 2, "wrong number of children in #document (first test)");
+ assertEquals(doc.firstChild.name, "HTML", "name wrong (second test)");
+ assertEquals(doc.firstChild.publicId, "-//W3C//DTD HTML 4.01 Transitional//EN", "publicId wrong (second test)");
+ assertEquals(doc.firstChild.systemId, "http://www.w3.org/TR/html4/loose.dtd", "systemId wrong (second test)");
+ if (('internalSubset' in doc.firstChild) || doc.firstChild.internalSubset)
+ assertEquals(doc.firstChild.internalSubset, null, "internalSubset wrong (second test)");
+ assertEquals(doc.documentElement.childNodes.length, 2, "wrong number of children in HTML (second test)");
+ assertEquals(doc.documentElement.firstChild.nodeName, "HEAD", "misplaced HEAD element (second test)");
+ assertEquals(doc.documentElement.firstChild.childNodes.length, 1, "wrong number of children in HEAD (second test)");
+ assertEquals(doc.documentElement.firstChild.firstChild.tagName, "TITLE", "misplaced TITLE element (second test)");
+ assertEquals(doc.documentElement.lastChild.nodeName, "BODY", "misplaced BODY element (second test)");
+ assertEquals(doc.documentElement.lastChild.childNodes.length, 1, "wrong number of children in BODY (second test)");
+ assertEquals(doc.documentElement.lastChild.firstChild.tagName, "SPAN", "misplaced SPAN element (second test)");
+ assertEquals(doc.documentElement.lastChild.firstChild.firstChild.tagName, "SCRIPT", "misplaced SCRIPT element (second test)");
+ } finally {
+ // prepare the file for the next test
+ doc.open();
+ doc.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\"><head><title><\/title><style type=\"text/css\">img { height: 10px; }<\/style><body><p><img src=\"data:image/gif;base64,R0lGODlhAQABAID%2FAMDAwAAAACH5BAEAAAAALAAAAAABAAEAAAICRAEAOw%3D%3D\" alt=\"\">");
+ doc.close();
+ }
+ return 5;
+ },
+ function () {
+ // test 72: dynamic modification of <style> blocks' text nodes, from Jonas Sicking and Garret Smith
+ var doc = kungFuDeathGrip.childNodes[3].contentDocument;
+ assert(doc, "missing document for test");
+ assert(doc.images[0], "prerequisite failed: no image");
+ assertEquals(doc.images[0].height, 10, "prerequisite failed: style didn't affect image");
+ doc.styleSheets[0].ownerNode.firstChild.data = "img { height: 20px; }";
+ assertEquals(doc.images[0].height, 20, "change failed to take effect");
+ doc.styleSheets[0].ownerNode.appendChild(doc.createTextNode("img { height: 30px; }"));
+ assertEquals(doc.images[0].height, 30, "append failed to take effect");
+ var rules = doc.styleSheets[0].cssRules; // "All CSS objects in the DOM are "live"" says section 2.1, Overview of the DOM Level 2 CSS Interfaces
+ doc.styleSheets[0].insertRule("img { height: 40px; }", 2);
+ assertEquals(doc.images[0].height, 40, "insertRule failed to take effect");
+ assertEquals(doc.styleSheets[0].cssRules.length, 3, "count of rules is wrong");
+ assertEquals(rules.length, 3, "cssRules isn't live");
+ // while we're at it, check some other things on doc.styleSheets:
+ assert(doc.styleSheets[0].href === null, "internal stylesheet had a URI: " + doc.styleSheets[0].href);
+ assert(document.styleSheets[0].href === null, "internal acid3 stylesheet had a URI: " + document.styleSheets[0].href);
+ return 5;
+ },
+ function () {
+ // test 73: nested events, from Jonas Sicking
+ var doc = kungFuDeathGrip.childNodes[3].contentDocument;
+ // implied events
+ var up = 0;
+ var down = 0;
+ var button = doc.createElement("button");
+ button.type = "button";
+ button.onclick = function () { up += 1; if (up < 10) button.click(); down += up; }; // not called
+ button.addEventListener('test', function () { up += 1; var e = doc.createEvent("HTMLEvents"); e.initEvent('test', false, false); if (up < 20) button.dispatchEvent(e); down += up; }, false);
+ var evt = doc.createEvent("HTMLEvents");
+ evt.initEvent('test', false, false);
+ button.dispatchEvent(evt);
+ assertEquals(up, 20, "test event handler called the wrong number of times");
+ assertEquals(down, 400, "test event handler called in the wrong order");
+ return 5;
+ },
+ function () {
+ // test 74: check getSVGDocument(), from Erik Dahlstrom
+ // GetSVGDocument[6]: "In the case where an SVG document is
+ // embedded by reference, such as when an XHTML document has an
+ // 'object' element whose href (or equivalent) attribute
+ // references an SVG document (i.e., a document whose MIME type
+ // is "image/svg+xml" and whose root element is thus an 'svg'
+ // element), the SVG user agent is required to implement the
+ // GetSVGDocument interface for the element which references the
+ // SVG document (e.g., the HTML 'object' or comparable
+ // referencing elements)."
+ //
+ // [6] http://www.w3.org/TR/SVG11/struct.html#InterfaceGetSVGDocument
+ //
+ // iframe
+ var iframe = kungFuDeathGrip.childNodes[0];
+ assert(iframe, "Failed finding svg iframe.");
+ assert(iframe.contentDocument, "contentDocument failed for <iframe> referencing an svg document.");
+ if (!iframe.getSVGDocument)
+ fail("getSVGDocument missing on <iframe> element.");
+ assert(iframe.getSVGDocument(), "getSVGDocument failed for <iframe> referencing an svg document.");
+ assert(iframe.getSVGDocument() == iframe.contentDocument, "Mismatch between getSVGDocument and contentDocument #1.");
+ // object
+ var object = kungFuDeathGrip.childNodes[1];
+ assert(object, "Failed finding svg object.");
+ assert(object.contentDocument, "contentDocument failed for <object> referencing an svg document.");
+ if (!object.getSVGDocument)
+ fail("getSVGDocument missing on <object> element.");
+ assert(object.getSVGDocument(), "getSVGDocument failed for <object> referencing an svg document.");
+ assert(object.getSVGDocument() == object.contentDocument, "Mismatch between getSVGDocument and contentDocument #2.");
+ return 5;
+ },
+ function () {
+ // test 75: SMIL in SVG, from Erik Dahlstrom
+ //
+ // The test begins by creating a few elements, among those is a
+ // <set> element. This element is prevented from running by
+ // setting begin="indefinite", which means that the animation
+ // doesn't start until the 'beginElement' DOM method is called
+ // on the <set> element. The animation is a simple animation
+ // that sets the value of the width attribute to 0. The duration
+ // of the animation is 'indefinite' which means that the value
+ // will stay 0 indefinitely. The target of the animation is the
+ // 'width' attribute of the <rect> element that is the parent of
+ // the <set> element. When 'width' is 0 the rect is not rendered
+ // according to the spec[7].
+ //
+ // Some properties of the SVGAnimatedLength[2] and SVGLength[8]
+ // are also inspected. Before the animation starts both baseVal
+ // and animVal contain the same values[2]. Then the animation is
+ // started by calling the beginElement method[9]. To make sure
+ // that time passes between the triggering of the animation and
+ // the time that the values are read out (in test #66), the
+ // current time is set to 1000 seconds using the setCurrentTime
+ // method[10].
+ //
+ // [2] http://www.w3.org/TR/SVG11/types.html#InterfaceSVGAnimatedLength
+ // [7] http://www.w3.org/TR/SVG11/shapes.html#RectElement
+ // [8] http://www.w3.org/TR/SVG11/types.html#InterfaceSVGLength
+ // [9] http://www.w3.org/TR/SVG11/animate.html#DOMInterfaces
+ // [10] http://www.w3.org/TR/SVG11/struct.html#InterfaceSVGSVGElement
+
+ var svgns = "http://www.w3.org/2000/svg";
+ var svgdoc = kungFuDeathGrip.firstChild.contentDocument;
+ assert(svgdoc, "contentDocument failed on <iframe> for svg document.");
+ var svg = svgdoc.documentElement;
+ var rect = svgdoc.createElementNS(svgns, "rect");
+ rect.setAttribute("fill", "red");
+ rect.setAttribute("width", "100");
+ rect.setAttribute("height", "100");
+ rect.setAttribute("id", "rect");
+ var anim = svgdoc.createElementNS(svgns, "set");
+ anim.setAttribute("begin", "indefinite");
+ anim.setAttribute("to", "0");
+ anim.setAttribute("attributeName", "width");
+ anim.setAttribute("dur", "indefinite");
+ anim.setAttribute("fill", "freeze");
+ rect.appendChild(anim);
+ svg.appendChild(rect);
+ assert(rect.width, "SVG DOM interface SVGRectElement not supported.");
+ assert(rect.width.baseVal, "SVG DOM base type SVGAnimatedLength not supported.");
+ assert(rect.width.animVal, "SVG DOM base type SVGAnimatedLength not supported.");
+ assertEquals(SVGLength.SVG_LENGTHTYPE_NUMBER, 1, "Incorrect SVGLength.SVG_LENGTHTYPE_NUMBER constant value.");
+ assertEquals(rect.width.baseVal.unitType, SVGLength.SVG_LENGTHTYPE_NUMBER, "Incorrect unitType on width attribute.");
+ assertEquals(rect.getAttribute("width"), "100", "Incorrect value from getAttribute.");
+ assertEquals(rect.width.baseVal.valueInSpecifiedUnits, 100, "Incorrect valueInSpecifiedUnits value.");
+ assertEquals(rect.width.baseVal.value, 100, "Incorrect baseVal value before animation.");
+ assertEquals(rect.width.animVal.value, 100, "Incorrect animVal value before animation.");
+ anim.beginElement();
+ assertEquals(rect.width.baseVal.value, 100, "Incorrect baseVal value after starting animation.");
+ svg.setCurrentTime(1000); // setting 1 second to make sure that time != 0s when we check the animVal value
+ // the animation is then tested in the next test
+ return 5;
+ },
+ function () {
+ // test 76: SMIL in SVG, part 2, from Erik Dahlstrom
+ //
+ // About animVal[2]: "If the given attribute or property is
+ // being animated, contains the current animated value of the
+ // attribute or property, and both the object itself and its
+ // contents are readonly. If the given attribute or property is
+ // not currently being animated, contains the same value as
+ // 'baseVal'."
+ //
+ // Since the duration of the animation is indefinite the value
+ // is still being animated at the time it's queried. Now since
+ // the 'width' attribute was animated from its original value of
+ // "100" to the new value of "0" the animVal property must
+ // contain the value 0.
+ //
+ // [2] http://www.w3.org/TR/SVG11/types.html#InterfaceSVGAnimatedLength
+
+ var svgdoc = kungFuDeathGrip.firstChild.contentDocument;
+ assert(svgdoc, "contentDocument failed on <object> for svg document.");
+ var rect = svgdoc.getElementById("rect");
+ assert(rect, "Failed to find <rect> element in svg document.");
+ assertEquals(rect.width.animVal.value, 0, "Incorrect animVal value after svg animation.");
+ return 5;
+ },
+ function () {
+ // test 77: external SVG fonts, from Erik Dahlstrom
+ //
+ // SVGFonts are described here[3], and the relevant DOM methods
+ // used in the test are defined here[4].
+ //
+ // Note that in order to be more predictable the svg should be
+ // visible, so that clause "For non-rendering environments, the
+ // user agent shall make reasonable assumptions about glyph
+ // metrics." doesn't influence the results. We use 'opacity:0'
+ // to hide the SVG, but arguably it's still a "rendering
+ // environment".
+ //
+ // The font-size 4000 was chosen because that matches the
+ // unitsPerEm value in the svgfont, which makes it easy to check
+ // the glyph advances since they will then be exactly what was
+ // specified in the svgfont.
+ //
+ // [3] http://www.w3.org/TR/SVG11/fonts.html
+ // [4] http://www.w3.org/TR/SVG11/text.html#InterfaceSVGTextContentElement
+
+ var svgns = "http://www.w3.org/2000/svg";
+ var xlinkns = "http://www.w3.org/1999/xlink";
+ var svgdoc = kungFuDeathGrip.firstChild.contentDocument;
+ assert(svgdoc, "contentDocument failed on <object> for svg document.");
+ var svg = svgdoc.documentElement;
+ var text = svgdoc.createElementNS(svgns, "text");
+ text.setAttribute("y", "1em");
+ text.setAttribute("font-size", "4000");
+ text.setAttribute("font-family", "ACID3svgfont");
+ var textContent = svgdoc.createTextNode("abc");
+ text.appendChild(textContent);
+ svg.appendChild(text);
+ // The font-size 4000 was chosen because that matches the unitsPerEm value in the svgfont,
+ // which makes it easy to check the glyph advances since they will then be exactly what was specified in the svgfont.
+ assert(text.getNumberOfChars, "SVGTextContentElement.getNumberOfChars() not supported.");
+ assertEquals(text.getNumberOfChars(), 3, "getNumberOfChars returned incorrect string length.");
+ assertEquals(text.getComputedTextLength(), 4711+42+23, "getComputedTextLength failed.");
+ assertEquals(text.getSubStringLength(0,1), 42, "getSubStringLength #1 failed.");
+ assertEquals(text.getSubStringLength(0,2), 42+23, "getSubStringLength #2 failed.");
+ assertEquals(text.getSubStringLength(1,1), 23, "getSubStringLength #3 failed.");
+ assertEquals(text.getSubStringLength(1,0), 0, "getSubStringLength #4 failed.");
+/* COMMENTED OUT BECAUSE SVGWG KEEPS CHANGING THIS
+ * var code = -1000;
+ * try {
+ * var sl = text.getSubStringLength(1,3);
+ * } catch(e) {
+ * code = e.code;
+ * }
+ * assertEquals(code, DOMException.INDEX_SIZE_ERR, "getSubStringLength #1 didn't throw exception.");
+ * code = -1000;
+ * try {
+ * var sl = text.getSubStringLength(0,4);
+ * } catch(e) {
+ * code = e.code;
+ * }
+ * assertEquals(code, DOMException.INDEX_SIZE_ERR, "getSubStringLength #2 didn't throw exception.");
+ * code = -1000;
+ * try {
+ * var sl = text.getSubStringLength(3,0);
+ * } catch(e) {
+ * code = e.code;
+ * }
+ * assertEquals(code, DOMException.INDEX_SIZE_ERR, "getSubStringLength #3 didn't throw exception.");
+ */
+ code = -1000;
+ try {
+ var sl = text.getSubStringLength(-17,20);
+ } catch(e) {
+ code = 0; // negative values might throw native exception since the api accepts only unsigned values
+ }
+ assert(code == 0, "getSubStringLength #4 didn't throw exception.");
+ assertEquals(text.getStartPositionOfChar(0).x, 0, "getStartPositionOfChar(0).x returned invalid value.");
+ assertEquals(text.getStartPositionOfChar(1).x, 42, "getStartPositionOfChar(1).x returned invalid value.");
+ assertEquals(text.getStartPositionOfChar(2).x, 42+23, "getStartPositionOfChar(2).x returned invalid value.");
+ assertEquals(text.getStartPositionOfChar(0).y, 4000, "getStartPositionOfChar(0).y returned invalid value.");
+ code = -1000;
+ try {
+ var val = text.getStartPositionOfChar(-1);
+ } catch(e) {
+ code = 0; // negative values might throw native exception since the api accepts only unsigned values
+ }
+ assert(code == 0, "getStartPositionOfChar #1 exception failed.");
+ code = -1000;
+ try {
+ var val = text.getStartPositionOfChar(4);
+ } catch(e) {
+ code = e.code;
+ }
+ assertEquals(code, DOMException.INDEX_SIZE_ERR, "getStartPositionOfChar #2 exception failed.");
+ assertEquals(text.getEndPositionOfChar(0).x, 42, "getEndPositionOfChar(0).x returned invalid value.");
+ assertEquals(text.getEndPositionOfChar(1).x, 42+23, "getEndPositionOfChar(1).x returned invalid value.");
+ assertEquals(text.getEndPositionOfChar(2).x, 42+23+4711, "getEndPositionOfChar(2).x returned invalid value.");
+ code = -1000;
+ try {
+ var val = text.getEndPositionOfChar(-17);
+ } catch(e) {
+ code = 0; // negative values might throw native exception since the api accepts only unsigned values
+ }
+ assert(code == 0, "getEndPositionOfChar #1 exception failed.");
+ code = -1000;
+ try {
+ var val = text.getEndPositionOfChar(4);
+ } catch(e) {
+ code = e.code;
+ }
+ assertEquals(code, DOMException.INDEX_SIZE_ERR, "getEndPositionOfChar #2 exception failed.");
+ return 5;
+ },
+ function () {
+ // test 78: SVG textPath and getRotationOfChar(), from Erik Dahlstrom
+ //
+ // The getRotationOfChar[4] method fetches the midpoint rotation
+ // of a glyph defined by a character (in this testcase there is
+ // a simple 1:1 correspondence between the two). The path is
+ // defined in the svg.xml file, and consists of first a line
+ // going down, then followed by a line that has a 45 degree
+ // slope and then followed by a horizontal line. The length of
+ // each path segment have been paired with the advance of each
+ // glyph, so that each glyph will be on each of the three
+ // different path segments (see text on a path layout rules[5]).
+ // Thus the rotation of the first glyph is 90 degrees, the
+ // second 45 degrees and the third 0 degrees.
+ //
+ // [4] http://www.w3.org/TR/SVG11/text.html#InterfaceSVGTextContentElement
+ // [5] http://www.w3.org/TR/SVG11/text.html#TextpathLayoutRules
+
+ var svgns = "http://www.w3.org/2000/svg";
+ var xlinkns = "http://www.w3.org/1999/xlink";
+ var svgdoc = kungFuDeathGrip.firstChild.contentDocument;
+ assert(svgdoc, "contentDocument failed on <object> for svg document.");
+ var svg = svgdoc.documentElement;
+ var text = svgdoc.createElementNS(svgns, "text");
+ text.setAttribute("font-size", "4000");
+ text.setAttribute("font-family", "ACID3svgfont");
+ var textpath = svgdoc.createElementNS(svgns, "textPath");
+ textpath.setAttributeNS(xlinkns, "xlink:href", "#path");
+ var textContent = svgdoc.createTextNode("abc");
+ textpath.appendChild(textContent);
+ text.appendChild(textpath);
+ svg.appendChild(text);
+ assertEquals(text.getRotationOfChar(0), 90, "getRotationOfChar(0) failed.");
+ assertEquals(text.getRotationOfChar(1), 45, "getRotationOfChar(1) failed.");
+ assertEquals(text.getRotationOfChar(2), 0, "getRotationOfChar(2) failed.");
+ var code = -1000;
+ try {
+ var val = text.getRotationOfChar(-1)
+ } catch(e) {
+ code = e.code;
+ }
+ assertEquals(code, DOMException.INDEX_SIZE_ERR, "getRotationOfChar #1 exception failed.");
+ code = -1000;
+ try {
+ var val = text.getRotationOfChar(4)
+ } catch(e) {
+ code = e.code;
+ }
+ assertEquals(code, DOMException.INDEX_SIZE_ERR, "getRotationOfChar #2 exception failed.");
+ return 5;
+ },
+ function () {
+ // test 79: a giant test for <svg:font>, from Cameron McCormack
+ // This tests various features of SVG fonts from SVG 1.1. It consists of
+ // a <text> element with 33 characters, styled using an SVG font that has
+ // different advance values for each glyph. The script uses
+ // SVGTextElementContent.getStartPositionOfChar() to determine where the
+ // glyph corresponding to each character was placed, and thus to work out
+ // whether the SVG font was used correctly.
+ //
+ // The font uses 100 units per em, and the text is set in 100px. Since
+ // font-size gives the size of the em box
+ // (http://www.w3.org/TR/SVG11/text.html#DOMInterfaces), the scale of the
+ // coordinate system for the glyphs is the same as the SVG document.
+ //
+ // The expectedAdvances array holds the expected advance value for each
+ // character, and expectedKerning holds the (negative) kerning for each
+ // character. getPositionOfChar() returns the actual x coordinate for the
+ // glyph, corresponding to the given character, and if multiple characters
+ // correspond to the same glyph, the same position value is returned for
+ // each of those characters.
+ //
+ // Here are the reasonings for the advance/kerning values. Note that for
+ // a given character at index i, the expected position is
+ // sum(expectedAdvances[0:i-1] + expectedKerning[0:i-1]).
+ //
+ // char advance kerning reasoning
+ // ------- ------- ------- --------------------------------------------------
+ // A 10000 0 Normal character mapping to a single glyph.
+ // B 0 0 First character of a two character glyph, so the
+ // current position isn't advanced until the second
+ // character.
+ // C 200 0 Second character of a two character glyph, so now
+ // the position is advanced.
+ // B 300 0 Although there is a glyph for "BC" in the font,
+ // it appears after the glyph for "B", so the single
+ // character glyph for "B" should be chosen instead.
+ // D 1100 0 Normal character mapping to a single glyph.
+ // A 10000 200 Kerning of -200 is specified in the font between
+ // the "A" and "EE" glyphs.
+ // E 0 0 The first character of a two character glyph "EE".
+ // E 1300 0 The second character of a two character glyph.
+ // U 0 0 This is a glyph for the six characters "U+0046",
+ // which happen to look like a valid unicode range.
+ // This tests that the <glyph unicode=""> in the
+ // font matches exact strings rather than a range,
+ // as used in the kerning elements.
+ // + 0 0 Second character of six character glyph.
+ // 0 0 0 Third character of six character glyph.
+ // 0 0 0 Fourth character of six character glyph.
+ // 4 0 0 Fifth character of six character glyph.
+ // 6 1700 0 Sixth character of six character glyph.
+ // U 0 0 The same six character glyph that looks like a
+ // Unicode range. One of the kerning elements has
+ // u1="U+0046" u2="U+0046", which shouldn't match
+ // this, because those attributes are interpreted
+ // as Unicode ranges if they are, and normal
+ // strings otherwise. Thus there should be no
+ // kerning between these two glyphs.
+ // G 2300 200 Kerning is between this character and the next
+ // "G", since there is an <hkern> element that
+ // uses a Unicode range on its u1="" attribute
+ // and a glyph name on its g2="" attribute which
+ // both match "G".
+ // G 2300 0 Normal character with kerning before it.
+ // H 3100 0 A glyph with graphical content describing the
+ // glyph, rather than a d="" attribute.
+ // I 4300 0 Glyphs are checked in document order for one
+ // that matches, but the first glyph with
+ // unicode="I" also has lang="zh", which disqualifies
+ // it. Thus the second glyph with unicode="I"
+ // is chosen.
+ // I 4100 0 Since this I has xml:lang="zh" on it in the text,
+ // the first glyph with lang="zh" matches.
+ // J 4700 -4700 A normal glyph with kerning between the "J" and the
+ // next glyph "A" equal to the advance of the "J"
+ // glyph, so the position should stay the same.
+ // A 10000 0 Normal glyph with kerning before it.
+ // K 5900 0 The first glyph with unicode="K" does not match,
+ // since it has orientation="v", so the second
+ // glyph with unicode="K" is chosen.
+ // <spc> 6100 0 The space character should select the glyph with
+ // unicode=" ", despite it having a misleading
+ // glyph-name="L".
+ // L 6700 0 The "L" character should select the glyph with
+ // unicode=" ", despite it having a misleading
+ // glyph-name="spacev".
+ // A 2900 0 An <altGlyph> element is used to select the
+ // glyph for U+10085 instead of the one for "A".
+ // U+10085 2900 0 Tests glyph selection with a non-plane-0
+ // character.
+ // A 10000 0 A final normal character.
+ //
+ // In addition, the script tests the value returned by
+ // SVGTextContentElement.getNumberOfChars(), which in this case should be 34.
+ // If it returned 33, then it incorrectly counted Unicode characters instead
+ // of UTF-16 codepoints (probably).
+ //
+ // See http://www.w3.org/TR/SVG11/fonts.html for a description of the glyph
+ // matching rules, and http://www.w3.org/TR/SVG11/text.html#DOMInterfaces
+ // for a description of getStartPositionOfChar() and getNumberOfChars().
+ //
+ // Note also that the test uses DOMImplementation.createDocument() to create
+ // the SVG document. This seems to cause browsers trouble for the SVG DOM
+ // interfaces, since the document isn't being "rendered" as it might be
+ // if it were in an <iframe>. Changing the test to use an <iframe> will
+ // at least let you see the main part of the test running.
+
+ var NS = {
+ svg: 'http://www.w3.org/2000/svg',
+ xml: 'http://www.w3.org/XML/1998/namespace',
+ xlink: 'http://www.w3.org/1999/xlink'
+ };
+
+ var doc = kungFuDeathGrip.childNodes[1].contentDocument;
+ while (doc.hasChildNodes())
+ doc.removeChild(doc.firstChild);
+ doc.appendChild(doc.createElementNS(NS.svg, "svg:svg"));
+
+ var e = function (n, as, cs) {
+ var elt = doc.createElementNS(NS.svg, n);
+ if (as) {
+ for (var an in as) {
+ var idx = an.indexOf(':');
+ var ns = null;
+ if (idx != -1)
+ ns = NS[an.substring(0, idx)];
+ elt.setAttributeNS(ns, an, as[an]);
+ }
+ }
+ if (cs) {
+ for (var i in cs) {
+ var c = cs[i];
+ elt.appendChild(typeof c == 'string' ? doc.createTextNode(c) : c);
+ }
+ }
+ return elt;
+ }
+
+ doc.documentElement.appendChild(e('font', { 'horiz-adv-x': '10000'}, [e('font-face', { 'font-family': 'HCl', 'units-per-em': '100', 'ascent': '1000', 'descent': '500'}), e('missing-glyph', null, [e('path', { 'd': 'M100,0 h800 v-100 h-800 z'})]), e('glyph', { 'unicode': 'A', 'd': 'M100,0 h100 v-100 h-100 z'}), e('glyph', { 'unicode': 'BC', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '200'}), e('glyph', { 'unicode': 'B', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '300'}), e('glyph', { 'unicode': 'C', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '500'}), e('glyph', { 'unicode': 'BD', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '700'}), e('glyph', { 'unicode': 'D', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '1100'}), e('glyph', { 'unicode': 'EE', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '1300', 'glyph-name': 'grapefruit'}), e('glyph', { 'unicode': 'U+0046', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '1700'}), e('glyph', { 'unicode': 'F', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '1900'}), e('glyph', { 'unicode': 'G', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '2300', 'glyph-name': 'gee'}), e('glyph', { 'unicode': '\uD800\uDC85', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '2900', 'id': 'astral'}), e('glyph', { 'unicode': 'H', 'horiz-adv-x': '3100'}, [e('path', { 'd': 'M100,0 h100 v-100 h-100 z'})]), e('glyph', { 'unicode': 'I', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '4100', 'lang': 'zh'}), e('glyph', { 'unicode': 'I', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '4300'}), e('glyph', { 'unicode': 'J', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '4700'}), e('glyph', { 'unicode': 'K', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '5300', 'orientation': 'v'}), e('glyph', { 'unicode': 'K', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '5900'}), e('glyph', { 'unicode': ' ', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '6100', 'glyph-name': 'L'}), e('glyph', { 'unicode': 'L', 'd': 'M100,0 h100 v-100 h-100 z', 'horiz-adv-x': '6700', 'glyph-name': 'space'}), e('hkern', { 'u1': 'A', 'u2': 'EE', 'k': '1000'}), e('hkern', { 'u1': 'A', 'g2': 'grapefruit', 'k': '-200'}), e('hkern', { 'u1': 'U+0046', 'u2': 'U+0046', 'k': '-200'}), e('hkern', { 'u1': 'U+0047-0047', 'g2': 'gee', 'k': '-200'}), e('hkern', { 'u1': 'J', 'u2': 'A', 'k': '4700'})]));
+ doc.documentElement.appendChild(e('text', { 'y': '100', 'font-family': 'HCl', 'font-size': '100px', 'letter-spacing': '0px', 'word-spacing': '0px'}, ['ABCBDAEEU+0046U+0046GGHI', e('tspan', { 'xml:lang': 'zh'}, ['I']), 'JAK L', e('altGlyph', { 'xlink:href': '#astral'}, ['A']), '\uD800\uDC85A']));
+
+ var t = doc.documentElement.lastChild;
+
+ var characterDescriptions = [
+ "a normal character",
+ "the first character of a two-character glyph",
+ "the second character of a two-character glyph",
+ "a normal character, which shouldn't be the first character of a two-character glyph",
+ "a normal character, which shouldn't be the second character of a two-character glyph",
+ "a normal character, which has some kerning after it",
+ "the first character of a two-character glyph, which has some kerning before it",
+ "the second character of a two-character glyph, which has some kerning before it",
+ "the first character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning after it, but this glyph does not",
+ "the second character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning after it, but this glyph does not",
+ "the third character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning after it, but this glyph does not",
+ "the fourth character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning after it, but this glyph does not",
+ "the fifth character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning after it, but this glyph does not",
+ "the sixth character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning after it, but this glyph does not",
+ "the first character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning before it, but this glyph does not",
+ "the second character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning before it, but this glyph does not",
+ "the third character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning before it, but this glyph does not",
+ "the fourth character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning before it, but this glyph does not",
+ "the fifth character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning before it, but this glyph does not",
+ "the sixth character of a six-character glyph, which happens to look like a Unicode range, where the range-specified glyph has kerning before it, but this glyph does not",
+ "a normal character, which has some kerning after it that is specified by glyph name",
+ "a normal character, which has some kerning before it that is specified by glyph name",
+ "a normal character, whose glyph is given by child graphical content of the <glyph> element",
+ "a normal character, whose glyph should not match the one with a lang=\"\" attribute on it",
+ "a normal character, whose glyph should match the one with a lang=\"\" attribute on it",
+ "a normal character, which has some kerning after it that is equal to the advance of the character",
+ "a normal character, which has some kerning before it that is equal to the advance of the previous character",
+ "a normal character, whose glyph should not match the one with an orientation=\"v\" attribute on it",
+ "a space character, which has a misleading glyph-name=\"\" attribute",
+ "a normal character, which has a misleading glyph-name=\"\" attribute",
+ "a normal character, whose glyph is chosen to be another by using <altGlyph>",
+ "a character not in Plane 0 (high surrogate pair)",
+ "a character not in Plane 0 (low surrogate pair)",
+ "a normal character",
+ ];
+
+ var expectedAdvances = [
+ 10000, // A
+ 0, // BC [0]
+ 200, // BC [1]
+ 300, // B
+ 1100, // D
+ 10000, // A
+ 0, // EE [0]
+ 1300, // EE [1]
+ 0, // U+0046 [0]
+ 0, // U+0046 [1]
+ 0, // U+0046 [2]
+ 0, // U+0046 [3]
+ 0, // U+0046 [4]
+ 1700, // U+0046 [5]
+ 0, // U+0046 [0]
+ 0, // U+0046 [1]
+ 0, // U+0046 [2]
+ 0, // U+0046 [3]
+ 0, // U+0046 [4]
+ 1700, // U+0046 [5]
+ 2300, // G
+ 2300, // G
+ 3100, // H
+ 4300, // I
+ 4100, // I (zh)
+ 4700, // J
+ 10000, // A
+ 5900, // K
+ 6100, // <space>
+ 6700, // L
+ 2900, // A (using &#x10085; altGlyph)
+ 0, // &#x10085; high surrogate pair
+ 2900, // &#x10085; low surrogate pair
+ 10000, // A
+ ];
+
+ var expectedKerning = [
+ 0, // A
+ 0, // BC [0]
+ 0, // BC [1]
+ 0, // B
+ 0, // D
+ 200, // A
+ 0, // EE [0]
+ 0, // EE [1]
+ 0, // U+0046 [0]
+ 0, // U+0046 [1]
+ 0, // U+0046 [2]
+ 0, // U+0046 [3]
+ 0, // U+0046 [4]
+ 0, // U+0046 [5]
+ 0, // U+0046 [0]
+ 0, // U+0046 [1]
+ 0, // U+0046 [2]
+ 0, // U+0046 [3]
+ 0, // U+0046 [4]
+ 0, // U+0046 [5]
+ 200, // G
+ 0, // G
+ 0, // H
+ 0, // I
+ 0, // I (zh)
+ -4700, // J
+ 0, // A
+ 0, // K
+ 0, // <space>
+ 0, // L
+ 0, // A (using &#x10085; altGlyph)
+ 0, // &#x10085; high surrogate pair
+ 0, // &#x10085; low surrogate pair
+ 0, // A
+ ];
+
+ assertEquals(t.getNumberOfChars(), expectedAdvances.length, 'SVGSVGTextElement.getNumberOfChars() incorrect');
+
+ var expectedPositions = [0];
+ for (var i = 0; i < expectedAdvances.length; i++)
+ expectedPositions.push(expectedPositions[i] + expectedAdvances[i] + expectedKerning[i]);
+
+ var actualPositions = [];
+ for (var i = 0; i < t.getNumberOfChars(); i++)
+ actualPositions.push(t.getStartPositionOfChar(i).x);
+ actualPositions.push(t.getEndPositionOfChar(t.getNumberOfChars() - 1).x);
+
+ for (var i = 0; i < expectedPositions.length; i++) {
+ if (expectedPositions[i] != actualPositions[i]) {
+ var s = "character position " + i + ", which is ";
+ if (i == 0) {
+ s += "before " + characterDescriptions[0];
+ } else if (i == expectedPositions.length - 1) {
+ s += "after " + characterDescriptions[characterDescriptions.length - 1];
+ } else {
+ s += "between " + characterDescriptions[i - 1] + " and " + characterDescriptions[i];
+ }
+ s += ", is " + actualPositions[i] + " but should be " + expectedPositions[i] + ".";
+ fail(s);
+ }
+ }
+ return 5;
+ },
+ function () {
+ // test 80: remove the iframes and the object
+ assert(!(kungFuDeathGrip == null), "kungFuDeathGrip was null");
+ assert(!(kungFuDeathGrip.parentNode == null), "kungFuDeathGrip.parentNode was null");
+ kungFuDeathGrip.parentNode.removeChild(kungFuDeathGrip);
+ kungFuDeathGrip = null;
+ // check that the xhtml files worked right
+ assert(notifications['xhtml.1'], "Script in XHTML didn't execute");
+ assert(!notifications['xhtml.2'], "XML well-formedness error didn't stop script from executing");
+ assert(!notifications['xhtml.3'], "Script executed despite having wrong namespace");
+ // while we're at it, check that the linktest is loaded
+ // since the other iframes have forcibly loaded by now, we assume that
+ // there's no way this can't have loaded by now
+ // (probably a safe bet)
+ var a = document.links[1];
+ assert(!(a == null), "linktest was null");
+ assert(a.textContent == "LINKTEST FAILED", "linktest link couldn't be found");
+ if (a.hasAttribute('class'))
+ return "retry"; // linktest onload didn't fire -- could be a networking issue, check that first
+ return 5;
+ },
+
+ // bucket 6: ECMAScript
+ function () {
+ // test 81: length of arrays with elisions at end
+ var t1 = [,];
+ var t2 = [,,];
+ assertEquals(t1.length, 1, "[,] doesn't have length 1");
+ assertEquals(t2.length, 2, "[,,] doesn't have length 2");
+ return 6;
+ },
+ function () {
+ // test 82: length of arrays with elisions in the middle
+ var t3 = ['a', , 'c'];
+ assertEquals(t3.length, 3, "['a',,'c'] doesn't have length 3");
+ assert(0 in t3, "no 0 in t3");
+ assert(!(1 in t3), "unexpected 1 in t3");
+ assert(2 in t3, "no 2 in t3");
+ assertEquals(t3[0], 'a', "t3[0] wrong");
+ assertEquals(t3[2], 'c', "t3[2] wrong");
+ return 6;
+ },
+ function () {
+ // test 83: array methods
+ var x = ['a', 'b', 'c'];
+ assertEquals(x.unshift('A', 'B', 'C'), 6, "array.unshift() returned the wrong value");
+ var s = x.join(undefined);
+ assertEquals(s, 'A,B,C,a,b,c', "array.join(undefined) used wrong separator"); // qv 15.4.4.5:3
+ return 6;
+ },
+ function () {
+ // test 84: converting numbers to strings
+ assertEquals((0.0).toFixed(4), "0.0000", "toFixed(4) wrong for 0");
+ assertEquals((-0.0).toFixed(4), "0.0000", "toFixed(4) wrong for -0");
+ assertEquals((0.00006).toFixed(4), "0.0001", "toFixed(4) wrong for 0.00006");
+ assertEquals((-0.00006).toFixed(4), "-0.0001", "toFixed(4) wrong for -0.00006");
+ assertEquals((0.0).toExponential(4), "0.0000e+0", "toExponential(4) wrong for 0");
+ assertEquals((-0.0).toExponential(4), "0.0000e+0", "toExponential(4) wrong for -0");
+ var x = 7e-4;
+ assertEquals(x.toPrecision(undefined), x.toString(undefined), "toPrecision(undefined) was wrong");
+ return 6;
+ },
+ function () {
+ // test 85: strings and string-related operations
+ // substr() and negative numbers
+ assertEquals("scathing".substr(-7, 3), "cat", "substr() wrong with negative numbers");
+ return 6;
+ },
+ function () {
+ // test 86: Date tests -- methods passed no arguments
+ var d = new Date();
+ assert(isNaN(d.setMilliseconds()), "calling setMilliseconds() with no arguments didn't result in NaN");
+ assert(isNaN(d), "date wasn't made NaN");
+ assert(isNaN(d.getDay()), "date wasn't made NaN");
+ return 6;
+ },
+ function () {
+ // test 87: Date tests -- years
+ var d1 = new Date(Date.UTC(99.9, 6));
+ assertEquals(d1.getUTCFullYear(), 1999, "Date.UTC() didn't do proper 1900 year offsetting");
+ var d2 = new Date(98.9, 6);
+ assertEquals(d2.getFullYear(), 1998, "new Date() didn't do proper 1900 year offsetting");
+ return 6;
+ },
+ function () {
+ // test 88: ES3 section 7.6:3 (unicode escapes can't be used to put non-identifier characters into identifiers)
+ // and there's no other place for them in the syntax (other than strings, of course)
+ var ok = false;
+ try {
+ eval("var test = { };\ntest.i= 0;\ntest.i\\u002b= 1;\ntest.i;\n");
+ } catch (e) {
+ ok = true;
+ }
+ assert(ok, "\\u002b was not considered a parse error in script");
+ return 6;
+ },
+ function () {
+ // test 89: Regular Expressions
+ var ok = true;
+ // empty classes in regexps
+ try {
+ eval("/TA[])]/.exec('TA]')");
+ // JS regexps aren't like Perl regexps, if their character
+ // classes start with a ] that means they're empty. So this
+ // is a syntax error; if we get here it's a bug.
+ ok = false;
+ } catch (e) { }
+ assert(ok, "orphaned bracket not considered parse error in regular expression literal");
+ try {
+ if (eval("/[]/.exec('')"))
+ ok = false;
+ } catch (e) {
+ ok = false;
+ }
+ assert(ok, "/[]/ either failed to parse or matched something");
+ return 6;
+ },
+ function () {
+ // test 90: Regular Expressions
+ // not back references.
+ assert(!(/(1)\0(2)/.test("12")), "NUL in regexp incorrectly ignored");
+ assert((/(1)\0(2)/.test("1" + "\0" + "2")), "NUL in regexp didn't match correctly");
+ assert(!(/(1)\0(2)/.test("1\02")), "octal 2 unexpectedly matched NUL");
+ assertEquals(nullInRegexpArgumentResult, "passed", "failed //.test() check"); // nothing to see here, move along now
+ // back reference to future capture
+ var x = /(\3)(\1)(a)/.exec('cat'); // the \3 matches the empty string, qv. ES3:15.10.2.9
+ assert(x, "/(\\3)(\\1)(a)/ failed to match 'cat'");
+ assertEquals(x.length, 4, "/(\\3)(\\1)(a)/ failed to return four components");
+ assertEquals(x[0], "a", "/(\\3)(\\1)(a)/ failed to find 'a' in 'cat'");
+ assert(x[1] === "", "/(\\3)(\\1)(a)/ failed to find '' in 'cat' as first part");
+ assert(x[2] === "", "/(\\3)(\\1)(a)/ failed to find '' in 'cat' as second part");
+ assertEquals(x[3], "a", "/(\\3)(\\1)(a)/ failed to find 'a' in 'cat' as third part");
+ // negative lookahead
+ x = /(?!(text))(te.t)/.exec("text testing");
+ assertEquals(x.length, 3, "negative lookahead test failed to return the right number of bits");
+ assertEquals(x[0], "test", "negative lookahead test failed to find the right text");
+ assert(x[1] === undefined, "negative lookahead test failed to return undefined for negative lookahead capture");
+ assert(x[2] === "test", "negative lookahead test failed to find the right second capture");
+ return 6;
+ },
+ function () {
+ // test 91: check that properties are enumerable by default
+ var test = {
+ constructor: function() { return 1; },
+ toString: function() { return 2; },
+ toLocaleString: function() { return 3; },
+ valueOf: function() { return 4; },
+ hasOwnProperty: function() { return 5; },
+ isPrototypeOf: function() { return 6; },
+ propertyIsEnumerable: function() { return 7; },
+ prototype: function() { return 8; },
+ length: function() { return 9; },
+ unique: function() { return 10; }
+ };
+ var results = [];
+ for (var property in test)
+ results.push([test[property](), property]);
+ results.sort(function(a, b) {
+ if (a[0] < b[0]) return -1;
+ if (a[0] > b[0]) return 1;
+ return 0;
+ });
+ assertEquals(results.length, 10, "missing properties");
+ for (var index = 0; index < 10; index += 1)
+ assertEquals(results[index][0], index+1, "order wrong at results["+index+"] == ");
+ var index = 0;
+ assertEquals(results[index++][1], "constructor", "failed to find constructor in expected position");
+ assertEquals(results[index++][1], "toString", "failed to find toString in expected position");
+ assertEquals(results[index++][1], "toLocaleString", "failed to find toLocaleString in expected position");
+ assertEquals(results[index++][1], "valueOf", "failed to find valueOf in expected position");
+ assertEquals(results[index++][1], "hasOwnProperty", "failed to find hasOwnProperty in expected position");
+ assertEquals(results[index++][1], "isPrototypeOf", "failed to find isPrototypeOf in expected position");
+ assertEquals(results[index++][1], "propertyIsEnumerable", "failed to find propertyIsEnumerable in expected position");
+ assertEquals(results[index++][1], "prototype", "failed to find prototype in expected position");
+ assertEquals(results[index++][1], "length", "failed to find length in expected position");
+ assertEquals(results[index++][1], "unique", "failed to find unique in expected position");
+ return 6;
+ },
+ function () {
+ // test 92: internal properties of Function objects
+ // constructor is not ReadOnly
+ var f1 = function () { 1 };
+ f1.prototype.constructor = "hello world";
+ var f1i = new f1();
+ assert(f1i.constructor === "hello world", "Function object's prototype's constructor was ReadOnly");
+ // constructor is DontEnum (indeed, no properties at all on a new Function object)
+ var f2 = function () { 2 };
+ var f2i = new f2();
+ var count = 0;
+ for (var property in f2i) {
+ assert(property != "constructor", "Function object's prototype's constructor was not DontEnum");
+ count += 1;
+ }
+ assertEquals(count, 0, "Function object had unexpected properties");
+ // constructor is not DontDelete
+ var f3 = function (a, b) { 3 };
+ delete f3.prototype.constructor;
+ var f3i = new f3();
+ assertEquals(f3i.constructor, Object.prototype.constructor, "Function object's prototype's constructor was DontDelete (or got magically replaced)");
+ return 6;
+ },
+ function () {
+ // test 93: FunctionExpression semantics
+ var functest;
+ var vartest = 0;
+ var value = (function functest(arg) {
+ if (arg)
+ return 1;
+ vartest = 1;
+ functest = function (arg) { return 2; }; // this line does nothing as 'functest' is ReadOnly here
+ return functest(true); // this is therefore tail recursion and returns 1
+ })(false);
+ assertEquals(vartest, 1, "rules in 10.1.4 not followed in FunctionBody");
+ assertEquals(value, 1, "semantics of FunctionExpression: function Identifier ... not followed");
+ assert(!functest, "Property in step 4 of FunctionExpression: function Identifier ... leaked to parent scope");
+ return 6;
+ },
+ function () {
+ // test 94: exception scope
+ var test = 'pass';
+ try {
+ throw 'fail';
+ } catch (test) {
+ test += 'ing';
+ }
+ assertEquals(test, 'pass', 'outer scope poisoned by exception catch{} block');
+ return 6;
+ },
+ function () {
+ // test 95: types of expressions
+ var a = []; var s;
+ s = a.length = "2147483648";
+ assertEquals(typeof s, "string", "type of |\"2147483648\"| is not string");
+ return 6;
+ },
+ function () {
+ // test 96: encodeURI() and encodeURIComponent() and null bytes
+ assertEquals(encodeURIComponent(String.fromCharCode(0)), '%00', "encodeURIComponent failed to encode U+0000");
+ assertEquals(encodeURI(String.fromCharCode(0)), '%00', "encodeURI failed to encode U+0000");
+ return 6;
+ },
+
+ // URIs
+ function () {
+ // test 97: data: URI parsing
+ assertEquals(d1, "one", "data: failed as escaped");
+ assertEquals(d2, "two", "data: failed as base64");
+ assertEquals(d3, "three", "data: failed as base64 escaped");
+ assertEquals(d4, "four", "data: failed as base64 with spaces");
+ assertEquals(d5, "five's", "data: failed with backslash");
+ return 7;
+ },
+
+ // XHTML
+ function () {
+ // test 98: XHTML and the DOM
+ // (special test)
+ var doctype = document.implementation.createDocumentType("html", "-//W3C//DTD XHTML 1.0 Strict//EN", "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd");
+ assertEquals(doctype.ownerDocument, null, "doctype's ownerDocument was wrong after creation");
+ var doc = document.implementation.createDocument("http://www.w3.org/1999/xhtml", "html", doctype);
+ doc.documentElement.appendChild(doc.createElementNS("http://www.w3.org/1999/xhtml", "head"));
+ doc.documentElement.appendChild(doc.createElementNS("http://www.w3.org/1999/xhtml", "body"));
+ var t = doc.createElementNS("http://www.w3.org/1999/xhtml", "title");
+ doc.documentElement.firstChild.appendChild(t);
+ // ok we have a conforming XHTML1 doc in |doc| now.
+ assertEquals(doctype.ownerDocument, doc, "doctype's ownerDocument didn't change when it was assigned to another document");
+ assertEquals(doc.title, "", "document had unexpected title");
+ t.textContent = "Sparrow";
+ assertEquals(doc.title, "Sparrow", "document.title did not update dynamically");
+ doc.body.appendChild(doc.createElementNS("http://www.w3.org/1999/xhtml", "form"));
+ assertEquals(doc.forms.length, 1, "document.forms not updated after inserting a form");
+ return 7;
+ },
+
+ // Sanity
+ function () {
+ // test 99: check for the weirdest bug ever
+ var a = document.createElement('a');
+ a.setAttribute('href', 'http://www.example.com/');
+ a.appendChild(document.createTextNode('www.example.com'));
+ a.href = 'http://hixie.ch/';
+ assertEquals(a.firstChild.data, "www.example.com", "sanity did not prevail");
+ a.href = 'http://damowmow.com/';
+ assertEquals(a.firstChild.data, "www.example.com", "final test failed");
+ return 7;
+ }
+
+ ];
+ var log = '';
+ var delay = 10;
+ var score = 0, index = 0, retry = 0, errors = 0;
+ function update() {
+ var span = document.getElementById('score'); // not cached by JS
+ span.nextSibling.removeAttribute('class'); // no-op after first loop
+ span.nextSibling.nextSibling.firstChild.data = tests.length; // no-op after first loop
+ if (index < tests.length) {
+ var zeroPaddedIndex = index < 10 ? '0' + index : index;
+ try {
+ var beforeTest = new Date();
+ var result = tests[index]();
+ var elapsedTest = new Date() - beforeTest;
+ if (result == "retry") {
+ // some tests uses this magical mechanism to wait for support files to load
+ // we will give this test 500 attempts (5000ms) before aborting
+ retry += 1;
+ if (retry < 500) {
+ setTimeout(update, delay);
+ return;
+ }
+ fail("timeout -- could be a networking issue");
+ } else if (result) {
+ var bucket = document.getElementById('bucket' + result);
+ if (bucket)
+ bucket.className += 'P';
+ score += 1;
+ if (retry > 0) {
+ errors += 1;
+ log += "Test " + zeroPaddedIndex + " passed, but took " + retry + " attempts (less than perfect).\n";
+ } else if (elapsedTest > 33) { // 30fps
+ errors += 1;
+ log += "Test " + zeroPaddedIndex + " passed, but took " + elapsedTest + "ms (less than 30fps)\n";
+ }
+ } else {
+ fail("no error message");
+ }
+ } catch (e) {
+ var s;
+ if (e.message)
+ s = e.message.replace(/\s+$/, "");
+ else
+ s = e;
+ errors += 1;
+ log += "Test " + zeroPaddedIndex + " failed: " + s + "\n";
+ };
+ retry = 0;
+ index += 1;
+ span.firstChild.data = score;
+ setTimeout(update, delay);
+ } else {
+ var endTime = new Date();
+ var elapsedTime = ((endTime - startTime) - (delay * tests.length)) / 1000;
+ log += "Total elapsed time: " + elapsedTime.toFixed(2) + "s";
+ if (errors == 0)
+ log += "\nNo JS errors and no timing issues.\nWas the rendering pixel-for-pixel perfect too?";
+ test_complete(tests.length - score, endTime - startTime);
+ }
+ }
+ function running() {
+ if (index < tests.length) {
+ return true;
+ } else {
+ return false;
+ }
+ }
+ function report(event) {
+ // for debugging either click the "A" in "Acid3" (to get an alert) or shift-click it (to get a report)
+ if (event.shiftKey) {
+ var w = window.open();
+ w.document.write('<pre>Failed ' + (tests.length - score) + ' of ' + tests.length + ' tests.\n' +
+ log.replace(/&/g,'&amp;').replace(RegExp('<', 'g'), '&lt;').replace('\0', '\\0') +
+ '<\/pre>');
+ w.document.close();
+ } else {
+ alert('Failed ' + (tests.length - score) + ' test' + (score == 1 ? '' : 's') + '.\n' + log)
+ }
+ }
+ </script>
+ <script src="head.js"></script>
+ <body onload="update() /* this attribute's value is tested in one of the tests */ ">
+ <h1 onclick="report(event)">Acid3</h1>
+ <div class="buckets"
+ ><p id="bucket1" class="z"></p
+ ><p id="bucket2" class="z"></p
+ ><p id="bucket3" class="z"></p
+ ><p id="bucket4" class="z"></p
+ ><p id="bucket5" class="z"></p
+ ><p id="bucket6" class="z"></p>
+ </div>
+ <p id="result"><span id="score">JS</span><span id="slash" class="hidden">/</span><span>?</span></p>
+ <!-- The following line is used in a number of the tests. It is done using document.write() to sidestep complaints of validity. -->
+ <script type="text/javascript">document.write('<map name=""><area href="" shape="rect" coords="2,2,4,4" alt="<\'>"><iframe src="empty.png">FAIL<\/iframe><iframe src="empty.txt">FAIL<\/iframe><iframe src="empty.html" id="selectors"><\/iframe><form action="" name="form"><input type=HIDDEN><\/form><table><tr><td><p><\/tbody> <\/table><\/map>');</script>
+ <p id="instructions">To pass the test,<span></span> a browser must use its default settings, the animation has to be smooth, the score has to end on 100/100, and the final page has to look exactly, pixel for pixel, like <a href="reference.html">this reference rendering</a>.</p>
+ <p id="remove-last-child-test">Scripting must be enabled to use this test.</p>
+ </body>
+</html>
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/reference.html b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/reference.html
new file mode 100644
index 00000000000..22b10f2a99c
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/reference.html
@@ -0,0 +1,21 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0//EN">
+<html>
+ <title>The Acid3 Test (Reference Rendering)</title>
+ <link rel="icon" href="http://example.invalid/">
+ <style type="text/css">
+ html { margin: 0; padding: 0; }
+ body { background: #c0c0c0 url(reference.png) top left no-repeat; margin: 0; padding: 0; }
+ #a { font: bold 100px/120px Arial, sans-serif; position: absolute; top: 57px; left: 57px; color: #000000; z-index: 1; }
+ #a0 { font: bold 100px/120px Arial, sans-serif; position: absolute; top: 60px; left: 60px; color: #C0C0C0; z-index: 0; }
+ #b { position: absolute; top: 230px; left: 625px; width: 0; white-space: pre; }
+ #b div { font: bold 100px/120px Arial, sans-serif; position: absolute; right: 0; text-align: right; color: #000000; }
+ #c { font: 16px/19.2px Arial, sans-serif; color: #808080; width: 562px; position: absolute; top: 350px; left: 57px; }
+ #c a { color: #0000FF; }
+ </style>
+ <body>
+ <div id="a">Acid3</div>
+ <div id="a0">Acid3</div>
+ <div id="b"><div>100/100</div></div>
+ <div id="c">To pass the test,<span></span> a browser must use its default settings, the animation has to be smooth, the score has to end on 100/100, and the final page has to look exactly, pixel for pixel, like <a href="reference.html">this reference rendering</a>.</div>
+ </body>
+</html> \ No newline at end of file
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/support-b.png b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/support-b.png
new file mode 100644
index 00000000000..752ee7ec05c
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/support-b.png
@@ -0,0 +1 @@
+<!DOCTYPE html><html><head><title>FAIL</title><style> * { background: transparent; } </style></head><body><p><!-- this file is transparent --></p></body></html> \ No newline at end of file
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/support-c.png b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/support-c.png
new file mode 100644
index 00000000000..9f240083deb
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/support-c.png
Binary files differ
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/svg.svg b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/svg.svg
new file mode 100644
index 00000000000..0c7737a68d4
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/svg.svg
@@ -0,0 +1,3 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- chase added XML 1.0 UTF-8 DTD -->
+<?xml-stylesheet href="data:text/css,text%7Bfont-family%3AACID3svgfont%7D"?><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" width="100" height="100"><defs><font-face font-family="ACID3svgfont"><font-face-src><font-face-uri xlink:href="font.svg#mini"/></font-face-src></font-face><path id="path" d="M0 0l0 42l16 16l4711 0"/></defs><text>X</text></svg>
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.1.xhtml b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.1.xhtml
new file mode 100644
index 00000000000..5d2d430361b
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.1.xhtml
@@ -0,0 +1,11 @@
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <title>Test</title>
+ </head>
+ <body>
+ <p> <strong> XHTML Test </strong> </p>
+ <script type="text/javascript">
+ parent.notify("xhtml.1")
+ </script>
+ </body>
+</html> \ No newline at end of file
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.2.xhtml b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.2.xhtml
new file mode 100644
index 00000000000..ab4017c501a
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.2.xhtml
@@ -0,0 +1,11 @@
+<html xmlns="http://www.w3.org/1999/xhtml">
+ <head>
+ <title>Test</title>
+ </head>
+ <body>
+ <p> <strong/> Parsing Test </strong> </p>
+ <script type="text/javascript">
+ parent.notify("xhtml.2")
+ </script>
+ </body>
+</html> \ No newline at end of file
diff --git a/chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.3.xhtml b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.3.xhtml
new file mode 100644
index 00000000000..6ed6b5dd49a
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/acid3.acidtests.org/xhtml.3.xhtml
@@ -0,0 +1,11 @@
+<html xmlns="http://www.w3.org/1999/xhtml#">
+ <head>
+ <title>Test</title>
+ </head>
+ <body>
+ <p> <strong> Namespace Test </strong> </p>
+ <script type="text/javascript">
+ parent.notify("xhtml.3")
+ </script>
+ </body>
+</html> \ No newline at end of file
diff --git a/chromium/tools/page_cycler/acid3/pages.js b/chromium/tools/page_cycler/acid3/pages.js
new file mode 100644
index 00000000000..466e1161e69
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/pages.js
@@ -0,0 +1,31 @@
+// Copyright (c) 2006-2009 The Chromium Authors. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+var __pages = [
+ "acid3.acidtests.org",
+];
diff --git a/chromium/tools/page_cycler/acid3/start.html b/chromium/tools/page_cycler/acid3/start.html
new file mode 100644
index 00000000000..c30e8d840e1
--- /dev/null
+++ b/chromium/tools/page_cycler/acid3/start.html
@@ -0,0 +1,7 @@
+<html>
+<body>
+<h3>Note: You must have started chrome with <tt>--enable-file-cookies</tt> for this test to work manually.</h3>
+<script src="pages.js"></script>
+<script src="../common/start.js"></script>
+</body>
+</html>
diff --git a/chromium/tools/page_cycler/common/head.js b/chromium/tools/page_cycler/common/head.js
new file mode 100644
index 00000000000..dddaa7ee0ac
--- /dev/null
+++ b/chromium/tools/page_cycler/common/head.js
@@ -0,0 +1,128 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+var __c = ""; // that's good enough for me.
+var __td;
+var __tf;
+var __tl;
+var __iterations;
+var __cycle;
+var __results = false;
+var __page;
+var __TIMEOUT = 15;
+function __get_cookie(name) {
+ var cookies = document.cookie.split("; ");
+ for (var i = 0; i < cookies.length; ++i) {
+ var t = cookies[i].split("=");
+ if (t[0] == name && t[1])
+ return t[1];
+ }
+ return "";
+}
+function __pages() { // fetch lazily
+ if (!("data" in this))
+ this.data = __get_cookie("__pc_pages").split(",");
+ return this.data;
+}
+function __get_timings() {
+ if (sessionStorage != null &&
+ sessionStorage.getItem("__pc_timings") != null) {
+ return sessionStorage["__pc_timings"];
+ } else {
+ return __get_cookie("__pc_timings");
+ }
+}
+function __set_timings(timings) {
+ if (sessionStorage == null)
+ document.cookie = "__pc_timings=" + timings + "; path=/";
+ else
+ sessionStorage["__pc_timings"]=timings;
+}
+function __ontimeout() {
+ var doc;
+
+ // Call GC twice to cleanup JS heap before starting a new test.
+ if (window.gc) {
+ window.gc();
+ window.gc();
+ }
+
+ var timings = __tl;
+ var oldTimings = __get_timings();
+ if (oldTimings != "") {
+ timings = oldTimings + "," + timings;
+ }
+ __set_timings(timings);
+
+ var ts = (new Date()).getTime();
+ var tlag = (ts - __te) - __TIMEOUT;
+ if (tlag > 0)
+ __tf = __tf + tlag;
+ if (__cycle == (__pages().length * __iterations)) {
+ document.cookie = "__pc_done=1; path=/";
+ doc = "../../common/report.html";
+ if (window.console) {
+ console.log("Pages: [" + __get_cookie('__pc_pages') + "]");
+ console.log("times: [" + __get_timings() + "]");
+ }
+ } else {
+ doc = "../" + __pages()[__page] + "/index.html";
+ }
+
+ var url = doc + "?n=" + __iterations + "&i=" + __cycle + "&p=" + __page +
+ "&ts=" + ts + "&td=" + __td + "&tf=" + __tf;
+ document.location.href = url;
+}
+function __onload() {
+ if (__results) {
+ // Set a variable to indicate that the result report page is loaded.
+ document.cookie = "__navigated_to_report=1; path=/";
+ return;
+ }
+ var unused = document.body.offsetHeight; // force layout
+
+ var ts = 0, td = 0, te = (new Date()).getTime(), tf = 0;
+
+ var s = document.location.search;
+ if (s) {
+ var params = s.substring(1).split('&');
+ for (var i = 0; i < params.length; ++i) {
+ var f = params[i].split('=');
+ switch (f[0]) {
+ case 'skip':
+ // No calculation, just viewing
+ return;
+ case 'n':
+ __iterations = f[1];
+ break;
+ case 'i':
+ __cycle = (f[1] - 0) + 1;
+ break;
+ case 'p':
+ __page = ((f[1] - 0) + 1) % __pages().length;
+ break;
+ case 'ts':
+ ts = (f[1] - 0);
+ break;
+ case 'td':
+ td = (f[1] - 0);
+ break;
+ case 'tf':
+ tf = (f[1] - 0);
+ break;
+ }
+ }
+ }
+ __tl = (te - ts);
+ __td = td + __tl;
+ __te = te;
+ __tf = tf; // record t-fudge
+
+ setTimeout("__ontimeout()", __TIMEOUT);
+}
+
+if (window.attachEvent)
+ window.attachEvent("onload", __onload);
+else
+ addEventListener("load", __onload, false);
diff --git a/chromium/tools/page_cycler/common/report.html b/chromium/tools/page_cycler/common/report.html
new file mode 100644
index 00000000000..9833fbe8ee1
--- /dev/null
+++ b/chromium/tools/page_cycler/common/report.html
@@ -0,0 +1,183 @@
+<html>
+<head>
+<style>
+.discarded {
+ color: #C0C0C0;
+}
+</style>
+<h2>Summary</h2>
+<dl>
+<script src="head.js"></script>
+<script>
+var __results = true;
+var cycles = 0;
+var s = document.location.search.substring(1);
+var params = s.split('&');
+var iterations, pages, totalTime, fudgeTime;
+for (var i = 0; i < params.length; ++i) {
+ var f = params[i].split('=');
+ switch (f[0]) {
+ case 'n':
+ iterations = (f[1] - 0);
+ break;
+ case 'i':
+ cycle = (f[1] - 0);
+ break;
+ case 'td':
+ totalTime = (f[1] - 0);
+ break;
+ case 'tf':
+ fudgeTime = (f[1] - 0);
+ break;
+ }
+}
+var pages = cycle / iterations;
+document.write("<table border=1>");
+document.write("<tr><td>iterations</td><td>" + iterations + "</td></tr>");
+document.write("<tr><td>pages</td><td>" + pages + "</td></tr>");
+document.write("<tr><td>milliseconds</td><td>" + totalTime + "</td></tr>");
+document.write("<tr><td>mean per set</td><td>" + (totalTime / iterations).toFixed(2) + "</td></tr>");
+document.write("<tr><td>mean per page</td><td>" + (totalTime / iterations / pages).toFixed(2) + "</td></tr>");
+document.write("<tr><td>timer lag</td><td>" + (fudgeTime).toFixed(2) + "</td></tr>");
+document.write("<tr><td>timer lag per page</td><td>" + (fudgeTime / iterations / pages).toFixed(2) + "</td></tr>");
+document.write("</table>");
+
+ // returns an object with the following properties:
+ // min : min value of array elements
+ // max : max value of array elements
+ // mean : mean value of array elements
+ // vari : variance computation
+ // stdd : standard deviation, sqrt(vari)
+ // indexOfMax : index of max element (the element that is
+ // removed from the mean computation)
+ function getArrayStats(ary) {
+ var r = {};
+ r.min = ary[0];
+ r.max = ary[0];
+ r.indexOfMax = 0;
+ var sum = 0;
+ for (var i = 0; i < ary.length; ++i) {
+ if (ary[i] < r.min) {
+ r.min = ary[i];
+ } else if (ary[i] > r.max) {
+ r.max = ary[i];
+ r.indexOfMax = i;
+ }
+ sum = sum + ary[i];
+ }
+
+ // ignore max value when computing mean and stddev
+ r.mean = (sum - r.max) / (ary.length - 1);
+
+ r.vari = 0;
+ for (var i = 0; i < ary.length; ++i) {
+ if (i == r.indexOfMax)
+ continue;
+ var d = r.mean - ary[i];
+ r.vari = r.vari + d * d;
+ }
+
+ r.vari = r.vari / (ary.length - 1);
+ r.stdd = Math.sqrt(r.vari);
+ r.errp = r.stdd / Math.sqrt((ary.length - 1) / 2) / r.mean * 100;
+ return r;
+ }
+
+ function appendTableCol(tr, text, linkify) {
+ var doc = tr.ownerDocument;
+ var td = doc.createElement("TD");
+
+ if (linkify) {
+ var anchor = doc.createElement("A");
+ if (text.indexOf('http://localhost:') == 0 ||
+ text.indexOf('file://') == 0) {
+ // URLs for page cycler HTTP and file tests.
+ anchor.href = text + "/index.html?skip=true";
+ } else {
+ // For Web Page Replay, URLs are same as recorded pages.
+ anchor.href = text;
+ }
+ anchor.appendChild(doc.createTextNode(text));
+ td.appendChild(anchor);
+ }
+ else
+ td.appendChild(doc.createTextNode(text));
+ tr.appendChild(td);
+ return td;
+ }
+
+ function getTimeVals() {
+ var rawData = __get_timings().split(",");
+ var timeVals = [];
+ for (var i = 0; i < iterations; ++i) {
+ for (var j = 0; j < pages; ++j) {
+ if (!timeVals[j])
+ timeVals[j] = [];
+ timeVals[j].push(parseInt(rawData[j + i*pages]));
+ }
+ }
+ return timeVals;
+ }
+
+ function showReport() {
+ var tbody = document.getElementById("tbody");
+ var colsums = [0,0,0,0,0];
+ var timeVals = getTimeVals();
+ for (var i = 0; i < timeVals.length; ++i) {
+ var tr = document.createElement("TR");
+
+ appendTableCol(tr, __pages()[i], true);
+
+ var r = getArrayStats(timeVals[i]);
+ appendTableCol(tr, r.min.toFixed(2));
+ appendTableCol(tr, r.max.toFixed(2));
+ appendTableCol(tr, r.mean.toFixed(2));
+ appendTableCol(tr, r.stdd.toFixed(2));
+ appendTableCol(tr, r.errp.toFixed(2));
+ //appendTableCol(tr, r.chi2.toFixed(2));
+
+ for (var j = 0; j < timeVals[i].length; ++j) {
+ var tv = timeVals[i][j];
+ var td = appendTableCol(tr, tv);
+ if (j == r.indexOfMax)
+ td.setAttribute("class", "discarded");
+ }
+
+ colsums[0] = colsums[0] + r.min;
+ colsums[1] = colsums[1] + r.max;
+ colsums[2] = colsums[2] + r.mean;
+ colsums[3] = colsums[3] + r.stdd;
+ colsums[4] = colsums[4] + r.errp;
+
+ tbody.appendChild(tr);
+ }
+
+ var tr = document.createElement("TR");
+ appendTableCol(tr, "totals:");
+ for (var k = 0; k < colsums.length; ++k)
+ appendTableCol(tr, colsums[k].toFixed(2));
+ tbody.appendChild(tr);
+ }
+ window.onload = showReport;
+
+</script>
+</dl>
+</head>
+<body>
+<h2>Complete Statistics</h2>
+<table border="1">
+<thead>
+<tr>
+ <th>Site</th>
+ <th>Min</th>
+ <th>Max</th>
+ <th>Mean</th>
+ <th>Std.d</th>
+ <th>Err %</th>
+ <th colspan="10">Runs</th>
+</tr>
+</thead>
+<tbody id="tbody"></tbody>
+</table>
+</body>
+</html>
diff --git a/chromium/tools/page_cycler/common/start.js b/chromium/tools/page_cycler/common/start.js
new file mode 100644
index 00000000000..f43994e369f
--- /dev/null
+++ b/chromium/tools/page_cycler/common/start.js
@@ -0,0 +1,82 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+document.title = 'page cycler';
+
+// The __pages is assumed an array which containing the directories for
+// various pages to exercise. Some page cycler tests don't have this variable.
+
+var initialPage;
+var hasVariablePages = (typeof __pages != 'undefined') &&
+ (__pages instanceof Array);
+if (hasVariablePages)
+ initialPage = __pages[0];
+
+document.cookie = '__navigated_to_report=0; path=/';
+document.cookie = '__pc_done=0; path=/';
+if (hasVariablePages)
+ document.cookie = '__pc_pages=' + __pages + '; path=/';
+document.cookie = '__pc_timings=; path=/';
+
+var options = location.search.substring(1).split('&');
+
+function getopt(name) {
+ var r = new RegExp('^' + name + '=');
+ for (var i = 0; i < options.length; ++i) {
+ if (options[i].match(r)) {
+ return options[i].substring(name.length + 1);
+ }
+ }
+ return null;
+}
+
+function start() {
+ var iterations = document.getElementById('iterations').value;
+ window.resizeTo(800, 800);
+ var ts = (new Date()).getTime();
+ var url = '';
+ if (hasVariablePages)
+ url = initialPage + '/';
+ url += 'index.html?n=' + iterations + '&i=0&p=0&ts=' + ts + '&td=0';
+ window.location = url;
+}
+
+function render_form() {
+ var form = document.createElement('FORM');
+ form.onsubmit = function(e) {
+ start();
+ e.preventDefault();
+ };
+
+ var label = document.createTextNode('Iterations: ');
+ form.appendChild(label);
+
+ var input = document.createElement('INPUT');
+ input.setAttribute('id', 'iterations');
+ input.setAttribute('type', 'number');
+ var iterations = getopt('iterations');
+ input.setAttribute('value', iterations ? iterations : '5');
+ form.appendChild(input);
+
+ input = document.createElement('INPUT');
+ input.setAttribute('type', 'submit');
+ input.setAttribute('value', 'Start');
+ form.appendChild(input);
+
+ document.body.appendChild(form);
+}
+
+render_form();
+
+// should we start automatically?
+if (location.search.match('auto=1')) {
+ start();
+} else {
+ if (!window.gc) {
+ document.write('<h3 style=\'color:red\'>WARNING: window.gc is not ' +
+ 'defined. Test results may be unreliable! You must ' +
+ 'started chrome also with <tt>--js-flags=\"--expose_gc\"' +
+ '</tt> for this test to work manually</h3>');
+ }
+}
diff --git a/chromium/tools/page_cycler/sample/page1/index.html b/chromium/tools/page_cycler/sample/page1/index.html
new file mode 100644
index 00000000000..26a2e3a8022
--- /dev/null
+++ b/chromium/tools/page_cycler/sample/page1/index.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>Page1</title>
+ <script src="../../common/head.js"></script>
+ </head>
+ <body>
+ <h1>Page1</h1>
+ </body>
+</html>
diff --git a/chromium/tools/page_cycler/sample/page2/index.html b/chromium/tools/page_cycler/sample/page2/index.html
new file mode 100644
index 00000000000..a020a97929e
--- /dev/null
+++ b/chromium/tools/page_cycler/sample/page2/index.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>Page2</title>
+ <script src="../../common/head.js"></script>
+ </head>
+ <body>
+ <h1>Page2</h1>
+ </body>
+</html>
diff --git a/chromium/tools/page_cycler/sample/page3/index.html b/chromium/tools/page_cycler/sample/page3/index.html
new file mode 100644
index 00000000000..1f819a26ef7
--- /dev/null
+++ b/chromium/tools/page_cycler/sample/page3/index.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>Page3</title>
+ <script src="../../common/head.js"></script>
+ </head>
+ <body>
+ <h1>Page3</h1>
+ </body>
+</html>
diff --git a/chromium/tools/page_cycler/sample/page4/index.html b/chromium/tools/page_cycler/sample/page4/index.html
new file mode 100644
index 00000000000..0079dd931c5
--- /dev/null
+++ b/chromium/tools/page_cycler/sample/page4/index.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>Page4</title>
+ <script src="../../common/head.js"></script>
+ </head>
+ <body>
+ <h1>Page4</h1>
+ </body>
+</html>
diff --git a/chromium/tools/page_cycler/sample/pages.js b/chromium/tools/page_cycler/sample/pages.js
new file mode 100644
index 00000000000..a5121314837
--- /dev/null
+++ b/chromium/tools/page_cycler/sample/pages.js
@@ -0,0 +1,10 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+var __pages = [
+ "page1",
+ "page2",
+ "page3",
+ "page4",
+];
diff --git a/chromium/tools/page_cycler/sample/start.html b/chromium/tools/page_cycler/sample/start.html
new file mode 100644
index 00000000000..c30e8d840e1
--- /dev/null
+++ b/chromium/tools/page_cycler/sample/start.html
@@ -0,0 +1,7 @@
+<html>
+<body>
+<h3>Note: You must have started chrome with <tt>--enable-file-cookies</tt> for this test to work manually.</h3>
+<script src="pages.js"></script>
+<script src="../common/start.js"></script>
+</body>
+</html>
diff --git a/chromium/tools/page_cycler/startup_test_common/README b/chromium/tools/page_cycler/startup_test_common/README
new file mode 100644
index 00000000000..4054b08b52b
--- /dev/null
+++ b/chromium/tools/page_cycler/startup_test_common/README
@@ -0,0 +1,4 @@
+This directory is meant to override the common/ directory in order to allow
+page cycler data to be used for startup tests. It works primarily by
+substituting head.js for a version that doesn't redirect to the next page in the
+suite and instead only performs a consle.log() when the page has loaded.
diff --git a/chromium/tools/page_cycler/startup_test_common/blank.html b/chromium/tools/page_cycler/startup_test_common/blank.html
new file mode 100644
index 00000000000..acb46db51c6
--- /dev/null
+++ b/chromium/tools/page_cycler/startup_test_common/blank.html
@@ -0,0 +1,5 @@
+<script>
+ window.onload = function() {
+ console.log("PAGE_ONLOAD_EVENT" + location.hash);
+ };
+</script>
diff --git a/chromium/tools/page_cycler/startup_test_common/head.js b/chromium/tools/page_cycler/startup_test_common/head.js
new file mode 100644
index 00000000000..768ec54b0a9
--- /dev/null
+++ b/chromium/tools/page_cycler/startup_test_common/head.js
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+window.onload = function() {
+ console.log("PAGE_ONLOAD_EVENT" + location.hash);
+};
diff --git a/chromium/tools/perf/chrome_telemetry_build/telemetry_binary_manager.isolate b/chromium/tools/perf/chrome_telemetry_build/telemetry_binary_manager.isolate
index da18846ee79..8d7d8f5c564 100644
--- a/chromium/tools/perf/chrome_telemetry_build/telemetry_binary_manager.isolate
+++ b/chromium/tools/perf/chrome_telemetry_build/telemetry_binary_manager.isolate
@@ -3,16 +3,6 @@
# found in the LICENSE file.
{
'conditions': [
- ['OS=="win"', {
- 'variables': {
- 'files': [
- '<(PRODUCT_DIR)/crash_service<(EXECUTABLE_SUFFIX)',
- '../../../components/crash/content/tools/generate_breakpad_symbols.py',
- ],
- },
- }],
- ],
- 'conditions': [
['OS=="android" or OS=="linux" or OS=="mac" or OS=="win"', {
'variables': {
'files': [
diff --git a/chromium/tools/perf/chrome_telemetry_build/telemetry_chrome_test.isolate b/chromium/tools/perf/chrome_telemetry_build/telemetry_chrome_test.isolate
index f87dcd48602..86bce4b6dc7 100644
--- a/chromium/tools/perf/chrome_telemetry_build/telemetry_chrome_test.isolate
+++ b/chromium/tools/perf/chrome_telemetry_build/telemetry_chrome_test.isolate
@@ -6,8 +6,18 @@
# that run in Chromium.
'includes': [
'../../../chrome/breakpad.isolate',
+ '../../../chrome/cdb.isolate',
'../../../chrome/chrome.isolate',
- '../../telemetry/telemetry.isolate',
'telemetry_binary_manager.isolate',
],
+ 'conditions': [
+ ['OS=="android" or OS=="linux" or OS=="mac" or OS=="win"', {
+ 'variables': {
+ 'files': [
+ '../../../third_party/catapult/',
+ ],
+ },
+ }],
+ ]
+
}
diff --git a/chromium/tools/perf/perf.isolate b/chromium/tools/perf/perf.isolate
index 234d72428e3..20cb79683b7 100644
--- a/chromium/tools/perf/perf.isolate
+++ b/chromium/tools/perf/perf.isolate
@@ -10,7 +10,6 @@
'variables': {
'files': [
'./',
- '../../build/android/pylib/',
# Field trial configs
'../variations/',
'../../testing/variations/',
diff --git a/chromium/tools/polymer/OWNERS b/chromium/tools/polymer/OWNERS
new file mode 100644
index 00000000000..ef266f3ab66
--- /dev/null
+++ b/chromium/tools/polymer/OWNERS
@@ -0,0 +1,3 @@
+dzhioev@chromium.org
+jklein@chromium.org
+michaelpg@chromium.org
diff --git a/chromium/tools/polymer/generate_compiled_resources_gyp.py b/chromium/tools/polymer/generate_compiled_resources_gyp.py
new file mode 100755
index 00000000000..38f81578ab4
--- /dev/null
+++ b/chromium/tools/polymer/generate_compiled_resources_gyp.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from bs4 import BeautifulSoup
+from datetime import date
+import os.path as path
+import sys
+
+
+_SRC = path.join(path.dirname(path.abspath(__file__)), "..", "..")
+_COMPILE_JS = path.join(
+ _SRC, "third_party", "closure_compiler", "compile_js2.gypi")
+_POLYMERS = ["polymer%s.html" % p for p in "", "-mini", "-micro"]
+_WEB_ANIMATIONS_BASE = "web-animations.html"
+_WEB_ANIMATIONS_TARGET = "<(EXTERNS_GYP):web_animations"
+_COMPILED_RESOURCES_TEMPLATE = """
+# Copyright %d The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# NOTE: Created with %s, please do not edit.
+{
+ 'targets': [
+ %s
+ ],
+}
+""".strip()
+
+
+def main(created_by, html_files):
+ targets = ""
+
+ for html_file in html_files:
+ html_base = path.basename(html_file)
+ if html_base in _POLYMERS:
+ continue
+
+ parsed = BeautifulSoup(open(html_file), "html.parser")
+ imports = set(i.get("href") for i in parsed.find_all("link", rel="import"))
+
+ html_dir = path.dirname(html_file)
+ dependencies = []
+
+ for html_import in sorted(imports):
+ import_dir, import_base = path.split(html_import.encode("ascii"))
+ if import_base in _POLYMERS:
+ continue
+
+ if import_base == _WEB_ANIMATIONS_BASE:
+ dependencies.append(_WEB_ANIMATIONS_TARGET)
+ continue
+
+ target = import_base[:-5] + "-extracted"
+ if not path.isfile(path.join(html_dir, import_dir, target + ".js")):
+ continue
+
+ if import_dir:
+ target = "compiled_resources2.gyp:" + target
+
+ dependencies.append(path.join(import_dir, target))
+
+ path_to_compile_js = path.relpath(_COMPILE_JS, html_dir)
+
+ targets += "\n {"
+ targets += "\n 'target_name': '%s-extracted'," % html_base[:-5]
+ if dependencies:
+ targets += "\n 'dependencies': ["
+ targets += "\n '%s'," % "',\n '".join(dependencies)
+ targets += "\n ],"
+ targets += "\n 'includes': ['%s']," % path_to_compile_js
+ targets += "\n },"
+
+ targets = targets.strip()
+
+ if targets:
+ current_year = date.today().year
+ print _COMPILED_RESOURCES_TEMPLATE % (current_year, created_by, targets)
+
+
+if __name__ == "__main__":
+ main(path.basename(sys.argv[0]), sys.argv[1:])
diff --git a/chromium/tools/polymer/polymer_grdp_to_txt.py b/chromium/tools/polymer/polymer_grdp_to_txt.py
new file mode 100755
index 00000000000..9940faa36de
--- /dev/null
+++ b/chromium/tools/polymer/polymer_grdp_to_txt.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+import xml.sax
+
+
+class PathsExtractor(xml.sax.ContentHandler):
+
+ def __init__(self):
+ self.paths = []
+
+ def startElement(self, name, attrs):
+ if name != 'structure':
+ return
+ path = attrs['file']
+ if path.startswith('../../../third_party/web-animations-js'):
+ return
+ prefix_1_0 = '../../../third_party/polymer/v1_0/components-chromium/'
+ if path.startswith(prefix_1_0):
+ self.paths.append(path[len(prefix_1_0):])
+ else:
+ raise Exception("Unexpected path %s." % path)
+
+def main(argv):
+ xml_handler = PathsExtractor()
+ xml.sax.parse(argv[1], xml_handler)
+ print '\n'.join(sorted(xml_handler.paths))
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/chromium/tools/polymer/txt_to_polymer_grdp.py b/chromium/tools/polymer/txt_to_polymer_grdp.py
new file mode 100755
index 00000000000..02e275a8cac
--- /dev/null
+++ b/chromium/tools/polymer/txt_to_polymer_grdp.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from __future__ import with_statement
+import os
+import string
+import sys
+
+
+FILE_TEMPLATE = \
+"""<?xml version="1.0" encoding="utf-8"?>
+<!--
+ This file is generated.
+ Please use 'src/tools/polymer/polymer_grdp_to_txt.py' and
+ 'src/tools/polymer/txt_to_polymer_grdp.py' to modify it, if possible.
+
+ 'polymer_grdp_to_txt.py' converts 'polymer_resources.grdp' to a plane list of
+ used Polymer components:
+ ...
+ iron-iron-iconset/iron-iconset-extracted.js
+ iron-iron-iconset/iron-iconset.html
+ ...
+
+ 'txt_to_polymer_grdp.py' converts list back to GRDP file.
+
+ Usage:
+ $ polymer_grdp_to_txt.py polymer_resources.grdp > /tmp/list.txt
+ $ vim /tmp/list.txt
+ $ txt_to_polymer_grdp.py /tmp/list.txt > polymer_resources.grdp
+-->
+<grit-part>
+ <!-- Polymer 1.0 -->
+%(v_1_0)s
+ <structure name="IDR_POLYMER_1_0_WEB_ANIMATIONS_JS_WEB_ANIMATIONS_NEXT_LITE_MIN_JS"
+ file="../../../third_party/web-animations-js/sources/web-animations-next-lite.min.js"
+ type="chrome_html" />
+</grit-part>
+"""
+
+
+DEFINITION_TEMPLATE_1_0 = \
+""" <structure name="%s"
+ file="../../../third_party/polymer/v1_0/components-chromium/%s"
+ type="chrome_html" />"""
+
+
+def PathToGritId(path):
+ table = string.maketrans(string.lowercase + '/.-', string.uppercase + '___')
+ return 'IDR_POLYMER_1_0_' + path.translate(table)
+
+
+def SortKey(record):
+ return (record, PathToGritId(record))
+
+
+def ParseRecord(record):
+ return record.strip()
+
+
+class FileNotFoundException(Exception):
+ pass
+
+
+_HERE = os.path.dirname(os.path.realpath(__file__))
+_POLYMER_DIR = os.path.join(_HERE, os.pardir, os.pardir,
+ 'third_party', 'polymer', 'v1_0', 'components-chromium')
+
+
+def main(argv):
+ with open(argv[1]) as f:
+ records = [ParseRecord(r) for r in f if not r.isspace()]
+ lines = { 'v_1_0': [] }
+ for path in sorted(set(records), key=SortKey):
+ full_path = os.path.normpath(os.path.join(_POLYMER_DIR, path))
+ if not os.path.exists(full_path):
+ raise FileNotFoundException('%s not found' % full_path)
+
+ template = DEFINITION_TEMPLATE_1_0
+ lines['v_1_0'].append(
+ template % (PathToGritId(path), path))
+ print FILE_TEMPLATE % { 'v_1_0': '\n'.join(lines['v_1_0']) }
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/chromium/tools/prepare-bisect-perf-regression.py b/chromium/tools/prepare-bisect-perf-regression.py
new file mode 100755
index 00000000000..0029a793a85
--- /dev/null
+++ b/chromium/tools/prepare-bisect-perf-regression.py
@@ -0,0 +1,70 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prepare Performance Test Bisect Tool
+
+This script is used by a try bot to create a working directory and sync an
+initial copy of the depot for use in bisecting performance regressions.
+
+An example usage:
+
+./tools/prepare-bisect-perf-regressions.py --working_directory "~/builds"
+ --output_buildbot_annotations
+
+Would result in creating ~/builds/bisect and then populating it with a copy of
+the depot.
+"""
+
+import optparse
+import sys
+
+from auto_bisect import bisect_utils
+
+
+def main():
+ """Does an initial checkout of Chromium then exits."""
+
+ usage = ('%prog [options] [-- chromium-options]\n'
+ 'Prepares a temporary depot for use on a try bot.')
+
+ parser = optparse.OptionParser(usage=usage)
+
+ parser.add_option('-w', '--working_directory',
+ type='str',
+ help='Path to the working directory where the script will '
+ 'do an initial checkout of the chromium depot. The '
+ 'files will be placed in a subdirectory "bisect" under '
+ 'working_directory and that will be used to perform the '
+ 'bisection.')
+ parser.add_option('--output_buildbot_annotations',
+ action='store_true',
+ help='Add extra annotation output for buildbot.')
+ parser.add_option('--target_platform',
+ type='choice',
+ choices=['chromium', 'cros', 'android'],
+ default='chromium',
+ help='The target platform. Choices are "chromium" (current '
+ 'platform), "cros", or "android". If you specify something '
+ 'other than "chromium", you must be properly set up to '
+ 'build that platform.')
+ opts, _ = parser.parse_args()
+
+ if not opts.working_directory:
+ print 'Error: missing required parameter: --working_directory'
+ print
+ parser.print_help()
+ return 1
+
+ if not bisect_utils.CheckIfBisectDepotExists(opts):
+ try:
+ bisect_utils.CreateBisectDirectoryAndSetupDepot(
+ opts, bisect_utils.DEFAULT_GCLIENT_CUSTOM_DEPS)
+ except RuntimeError:
+ return 1
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/python/google/__init__.py b/chromium/tools/python/google/__init__.py
new file mode 100644
index 00000000000..8b137891791
--- /dev/null
+++ b/chromium/tools/python/google/__init__.py
@@ -0,0 +1 @@
+
diff --git a/chromium/tools/python/google/gethash_timer.py b/chromium/tools/python/google/gethash_timer.py
new file mode 100755
index 00000000000..9c4bd460895
--- /dev/null
+++ b/chromium/tools/python/google/gethash_timer.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Issue a series of GetHash requests to the SafeBrowsing servers and measure
+the response times.
+
+Usage:
+
+ $ ./gethash_timer.py --period=600 --samples=20 --output=resp.csv
+
+ --period (or -p): The amount of time (in seconds) to wait between GetHash
+ requests. Using a value of more than 300 (5 minutes) to
+ include the effect of DNS.
+
+ --samples (or -s): The number of requests to issue. If this parameter is not
+ specified, the test will run indefinitely.
+
+ --output (or -o): The path to a file where the output will be written in
+ CSV format: sample_number,response_code,elapsed_time_ms
+"""
+
+import getopt
+import httplib
+import sys
+import time
+
+_GETHASH_HOST = 'safebrowsing.clients.google.com'
+_GETHASH_REQUEST = (
+ '/safebrowsing/gethash?client=googleclient&appver=1.0&pver=2.1')
+
+# Global logging file handle.
+g_file_handle = None
+
+
+def IssueGetHash(prefix):
+ '''Issue one GetHash request to the safebrowsing servers.
+ Args:
+ prefix: A 4 byte value to look up on the server.
+ Returns:
+ The HTTP response code for the GetHash request.
+ '''
+ body = '4:4\n' + prefix
+ h = httplib.HTTPConnection(_GETHASH_HOST)
+ h.putrequest('POST', _GETHASH_REQUEST)
+ h.putheader('content-length', str(len(body)))
+ h.endheaders()
+ h.send(body)
+ response_code = h.getresponse().status
+ h.close()
+ return response_code
+
+
+def TimedGetHash(prefix):
+ '''Measure the amount of time it takes to receive a GetHash response.
+ Args:
+ prefix: A 4 byte value to look up on the the server.
+ Returns:
+ A tuple of HTTP resonse code and the response time (in milliseconds).
+ '''
+ start = time.time()
+ response_code = IssueGetHash(prefix)
+ return response_code, (time.time() - start) * 1000
+
+
+def RunTimedGetHash(period, samples=None):
+ '''Runs an experiment to measure the amount of time it takes to receive
+ multiple responses from the GetHash servers.
+
+ Args:
+ period: A floating point value that indicates (in seconds) the delay
+ between requests.
+ samples: An integer value indicating the number of requests to make.
+ If 'None', the test continues indefinitely.
+ Returns:
+ None.
+ '''
+ global g_file_handle
+ prefix = '\x50\x61\x75\x6c'
+ sample_count = 1
+ while True:
+ response_code, elapsed_time = TimedGetHash(prefix)
+ LogResponse(sample_count, response_code, elapsed_time)
+ sample_count += 1
+ if samples is not None and sample_count == samples:
+ break
+ time.sleep(period)
+
+
+def LogResponse(sample_count, response_code, elapsed_time):
+ '''Output the response for one GetHash query.
+ Args:
+ sample_count: The current sample number.
+ response_code: The HTTP response code for the GetHash request.
+ elapsed_time: The round-trip time (in milliseconds) for the
+ GetHash request.
+ Returns:
+ None.
+ '''
+ global g_file_handle
+ output_list = (sample_count, response_code, elapsed_time)
+ print 'Request: %d, status: %d, elapsed time: %f ms' % output_list
+ if g_file_handle is not None:
+ g_file_handle.write(('%d,%d,%f' % output_list) + '\n')
+ g_file_handle.flush()
+
+
+def SetupOutputFile(file_name):
+ '''Open a file for logging results.
+ Args:
+ file_name: A path to a file to store the output.
+ Returns:
+ None.
+ '''
+ global g_file_handle
+ g_file_handle = open(file_name, 'w')
+
+
+def main():
+ period = 10
+ samples = None
+
+ options, args = getopt.getopt(sys.argv[1:],
+ 's:p:o:',
+ ['samples=', 'period=', 'output='])
+ for option, value in options:
+ if option == '-s' or option == '--samples':
+ samples = int(value)
+ elif option == '-p' or option == '--period':
+ period = float(value)
+ elif option == '-o' or option == '--output':
+ file_name = value
+ else:
+ print 'Bad option: %s' % option
+ return 1
+ try:
+ print 'Starting Timed GetHash ----------'
+ SetupOutputFile(file_name)
+ RunTimedGetHash(period, samples)
+ except KeyboardInterrupt:
+ pass
+
+ print 'Timed GetHash complete ----------'
+ g_file_handle.close()
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/python/google/httpd_config/httpd.conf b/chromium/tools/python/google/httpd_config/httpd.conf
new file mode 100644
index 00000000000..0457be4ff30
--- /dev/null
+++ b/chromium/tools/python/google/httpd_config/httpd.conf
@@ -0,0 +1,734 @@
+##
+## httpd.conf -- Apache HTTP server configuration file
+##
+
+#
+# Based upon the NCSA server configuration files originally by Rob McCool.
+#
+# This is the main Apache server configuration file. It contains the
+# configuration directives that give the server its instructions.
+# See <URL:http://httpd.apache.org/docs/> for detailed information about
+# the directives.
+#
+# Do NOT simply read the instructions in here without understanding
+# what they do. They're here only as hints or reminders. If you are unsure
+# consult the online docs. You have been warned.
+#
+# After this file is processed, the server will look for and process
+# /private/etc/httpd/srm.conf and then /private/etc/httpd/access.conf
+# unless you have overridden these with ResourceConfig and/or
+# AccessConfig directives here.
+#
+# The configuration directives are grouped into three basic sections:
+# 1. Directives that control the operation of the Apache server process as a
+# whole (the 'global environment').
+# 2. Directives that define the parameters of the 'main' or 'default' server,
+# which responds to requests that aren't handled by a virtual host.
+# These directives also provide default values for the settings
+# of all virtual hosts.
+# 3. Settings for virtual hosts, which allow Web requests to be sent to
+# different IP addresses or hostnames and have them handled by the
+# same Apache server process.
+#
+# Configuration and logfile names: If the filenames you specify for many
+# of the server's control files begin with "/" (or "drive:/" for Win32), the
+# server will use that explicit path. If the filenames do *not* begin
+# with "/", the value of ServerRoot is prepended -- so "logs/foo.log"
+# with ServerRoot set to "/usr/local/apache" will be interpreted by the
+# server as "/usr/local/apache/logs/foo.log".
+#
+
+### Section 1: Global Environment
+#
+# The directives in this section affect the overall operation of Apache,
+# such as the number of concurrent requests it can handle or where it
+# can find its configuration files.
+#
+
+#
+# ServerType is either inetd, or standalone. Inetd mode is only supported on
+# Unix platforms.
+#
+ServerType standalone
+
+#
+# ServerRoot: The top of the directory tree under which the server's
+# configuration, error, and log files are kept.
+#
+# NOTE! If you intend to place this on an NFS (or otherwise network)
+# mounted filesystem then please read the LockFile documentation
+# (available at <URL:http://www.apache.org/docs/mod/core.html#lockfile>);
+# you will save yourself a lot of trouble.
+#
+#ServerRoot "/usr"
+
+#
+# The LockFile directive sets the path to the lockfile used when Apache
+# is compiled with either USE_FCNTL_SERIALIZED_ACCEPT or
+# USE_FLOCK_SERIALIZED_ACCEPT. This directive should normally be left at
+# its default value. The main reason for changing it is if the logs
+# directory is NFS mounted, since the lockfile MUST BE STORED ON A LOCAL
+# DISK. The PID of the main server process is automatically appended to
+# the filename.
+#
+#LockFile "/private/var/run/httpd.lock"
+
+#
+# PidFile: The file in which the server should record its process
+# identification number when it starts.
+#
+PidFile "/tmp/WebKit/httpd.pid"
+
+#
+# ScoreBoardFile: File used to store internal server process information.
+# Not all architectures require this. But if yours does (you'll know because
+# this file will be created when you run Apache) then you *must* ensure that
+# no two invocations of Apache share the same scoreboard file.
+#
+ScoreBoardFile "/tmp/WebKit/httpd.scoreboard"
+
+#
+# In the standard configuration, the server will process httpd.conf (this
+# file, specified by the -f command line option), srm.conf, and access.conf
+# in that order. The latter two files are now distributed empty, as it is
+# recommended that all directives be kept in a single file for simplicity.
+# The commented-out values below are the built-in defaults. You can have the
+# server ignore these files altogether by using "/dev/null" (for Unix) or
+# "nul" (for Win32) for the arguments to the directives.
+#
+ResourceConfig /dev/null
+AccessConfig /dev/null
+
+#
+# Timeout: The number of seconds before receives and sends time out.
+#
+Timeout 300
+
+#
+# KeepAlive: Whether or not to allow persistent connections (more than
+# one request per connection). Set to "Off" to deactivate.
+#
+KeepAlive On
+
+#
+# MaxKeepAliveRequests: The maximum number of requests to allow
+# during a persistent connection. Set to 0 to allow an unlimited amount.
+# We recommend you leave this number high, for maximum performance.
+#
+MaxKeepAliveRequests 100
+
+#
+# KeepAliveTimeout: Number of seconds to wait for the next request from the
+# same client on the same connection.
+#
+KeepAliveTimeout 15
+
+#
+# Server-pool size regulation. Rather than making you guess how many
+# server processes you need, Apache dynamically adapts to the load it
+# sees --- that is, it tries to maintain enough server processes to
+# handle the current load, plus a few spare servers to handle transient
+# load spikes (e.g., multiple simultaneous requests from a single
+# Netscape browser).
+#
+# It does this by periodically checking how many servers are waiting
+# for a request. If there are fewer than MinSpareServers, it creates
+# a new spare. If there are more than MaxSpareServers, some of the
+# spares die off. The default values are probably OK for most sites.
+#
+MinSpareServers 1
+MaxSpareServers 5
+
+#
+# Number of servers to start initially --- should be a reasonable ballpark
+# figure.
+#
+StartServers 1
+
+#
+# Limit on total number of servers running, i.e., limit on the number
+# of clients who can simultaneously connect --- if this limit is ever
+# reached, clients will be LOCKED OUT, so it should NOT BE SET TOO LOW.
+# It is intended mainly as a brake to keep a runaway server from taking
+# the system with it as it spirals down...
+#
+MaxClients 150
+
+#
+# MaxRequestsPerChild: the number of requests each child process is
+# allowed to process before the child dies. The child will exit so
+# as to avoid problems after prolonged use when Apache (and maybe the
+# libraries it uses) leak memory or other resources. On most systems, this
+# isn't really needed, but a few (such as Solaris) do have notable leaks
+# in the libraries. For these platforms, set to something like 10000
+# or so; a setting of 0 means unlimited.
+#
+# NOTE: This value does not include keepalive requests after the initial
+# request per connection. For example, if a child process handles
+# an initial request and 10 subsequent "keptalive" requests, it
+# would only count as 1 request towards this limit.
+#
+MaxRequestsPerChild 100000
+
+#
+# Listen: Allows you to bind Apache to specific IP addresses and/or
+# ports, instead of the default. See also the <VirtualHost>
+# directive.
+#
+# Configured from the httpd command line for WebKit layout tests.
+#
+Listen 127.0.0.1:8000
+Listen 127.0.0.1:8080
+Listen 127.0.0.1:8081
+Listen 127.0.0.1:9000
+Listen 127.0.0.1:9080
+
+#
+# Dynamic Shared Object (DSO) Support
+#
+# To be able to use the functionality of a module which was built as a DSO you
+# have to place corresponding `LoadModule' lines at this location so the
+# directives contained in it are actually available _before_ they are used.
+# Please read the file http://httpd.apache.org/docs/dso.html for more
+# details about the DSO mechanism and run `httpd -l' for the list of already
+# built-in (statically linked and thus always available) modules in your httpd
+# binary.
+#
+# Note: The order in which modules are loaded is important. Don't change
+# the order below without expert advice.
+#
+# Example:
+# LoadModule foo_module lib/apache/mod_foo.dll
+#LoadModule vhost_alias_module lib/apache/mod_vhost_alias.dll
+#LoadModule env_module lib/apache/mod_env.dll
+LoadModule config_log_module lib/apache/mod_log_config.dll
+#LoadModule mime_magic_module lib/apache/mod_mime_magic.dll
+LoadModule mime_module lib/apache/mod_mime.dll
+LoadModule negotiation_module lib/apache/mod_negotiation.dll
+#LoadModule status_module lib/apache/mod_status.dll
+#LoadModule info_module lib/apache/mod_info.dll
+LoadModule includes_module lib/apache/mod_include.dll
+LoadModule autoindex_module lib/apache/mod_autoindex.dll
+#LoadModule dir_module lib/apache/mod_dir.dll
+LoadModule cgi_module lib/apache/mod_cgi.dll
+LoadModule asis_module lib/apache/mod_asis.dll
+LoadModule imap_module lib/apache/mod_imap.dll
+LoadModule action_module lib/apache/mod_actions.dll
+#LoadModule speling_module lib/apache/mod_speling.dll
+#LoadModule userdir_module lib/apache/mod_userdir.dll
+LoadModule alias_module lib/apache/mod_alias.dll
+LoadModule rewrite_module lib/apache/mod_rewrite.dll
+LoadModule access_module lib/apache/mod_access.dll
+LoadModule auth_module lib/apache/mod_auth.dll
+#LoadModule anon_auth_module lib/apache/mod_auth_anon.dll
+#LoadModule dbm_auth_module lib/apache/mod_auth_dbm.dll
+#LoadModule digest_module lib/apache/mod_digest.dll
+#LoadModule proxy_module lib/apache/libproxy.dll
+#LoadModule cern_meta_module lib/apache/mod_cern_meta.dll
+#LoadModule expires_module lib/apache/mod_expires.dll
+LoadModule headers_module lib/apache/mod_headers.dll
+#LoadModule usertrack_module lib/apache/mod_usertrack.dll
+#LoadModule log_forensic_module lib/apache/mod_log_forensic.dll
+#LoadModule unique_id_module lib/apache/mod_unique_id.dll
+#LoadModule setenvif_module lib/apache/mod_setenvif.dll
+#LoadModule dav_module lib/apache/libdav.dll
+#LoadModule ssl_module lib/apache/libssl.dll
+#LoadModule perl_module lib/apache/libperl.dll
+#LoadModule php4_module lib/apache/libphp4.dll
+#LoadModule hfs_apple_module lib/apache/mod_hfs_apple.dll
+#LoadModule bonjour_module lib/apache/mod_bonjour.dll
+
+# Reconstruction of the complete module list from all available modules
+# (static and shared ones) to achieve correct module execution order.
+# [WHENEVER YOU CHANGE THE LOADMODULE SECTION ABOVE UPDATE THIS, TOO]
+ClearModuleList
+#AddModule mod_vhost_alias.c
+#AddModule mod_env.c
+AddModule mod_log_config.c
+#AddModule mod_mime_magic.c
+AddModule mod_mime.c
+AddModule mod_negotiation.c
+#AddModule mod_status.c
+#AddModule mod_info.c
+AddModule mod_include.c
+AddModule mod_autoindex.c
+#AddModule mod_dir.c
+AddModule mod_cgi.c
+AddModule mod_asis.c
+AddModule mod_imap.c
+AddModule mod_actions.c
+#AddModule mod_speling.c
+#AddModule mod_userdir.c
+AddModule mod_alias.c
+AddModule mod_rewrite.c
+AddModule mod_access.c
+AddModule mod_auth.c
+#AddModule mod_auth_anon.c
+#AddModule mod_auth_dbm.c
+#AddModule mod_digest.c
+#AddModule mod_proxy.c
+#AddModule mod_cern_meta.c
+#AddModule mod_expires.c
+AddModule mod_headers.c
+#AddModule mod_usertrack.c
+#AddModule mod_log_forensic.c
+#AddModule mod_unique_id.c
+AddModule mod_so.c
+#AddModule mod_setenvif.c
+#AddModule mod_dav.c
+#AddModule mod_ssl.c
+#AddModule mod_perl.c
+#AddModule mod_php4.c
+#AddModule mod_hfs_apple.c
+#AddModule mod_bonjour.c
+
+### Section 2: 'Main' server configuration
+#
+# The directives in this section set up the values used by the 'main'
+# server, which responds to any requests that aren't handled by a
+# <VirtualHost> definition. These values also provide defaults for
+# any <VirtualHost> containers you may define later in the file.
+#
+# All of these directives may appear inside <VirtualHost> containers,
+# in which case these default settings will be overridden for the
+# virtual host being defined.
+#
+
+#
+# ServerName allows you to set a host name which is sent back to clients for
+# your server if it's different than the one the program would get (i.e., use
+# "www" instead of the host's real name).
+#
+# Note: You cannot just invent host names and hope they work. The name you
+# define here must be a valid DNS name for your host. If you don't understand
+# this, ask your network administrator.
+# If your host doesn't have a registered DNS name, enter its IP address here.
+# You will have to access it by its address (e.g., http://123.45.67.89/)
+# anyway, and this will make redirections work in a sensible way.
+#
+# 127.0.0.1 is the TCP/IP local loop-back address, often named localhost. Your
+# machine always knows itself by this address. If you use Apache strictly for
+# local testing and development, you may use 127.0.0.1 as the server name.
+#
+ServerName 127.0.0.1
+
+#
+# DocumentRoot: The directory out of which you will serve your
+# documents. By default, all requests are taken from this directory, but
+# symbolic links and aliases may be used to point to other locations.
+#
+# Configured from the httpd command line for WebKit layout tests.
+#DocumentRoot "/Library/WebServer/Documents"
+
+#
+# Each directory to which Apache has access, can be configured with respect
+# to which services and features are allowed and/or disabled in that
+# directory (and its subdirectories).
+#
+<Directory />
+#
+# This may also be "None", "All", or any combination of "Indexes",
+# "Includes", "FollowSymLinks", "ExecCGI", or "MultiViews".
+#
+# Note that "MultiViews" must be named *explicitly* --- "Options All"
+# doesn't give it to you.
+#
+ Options Indexes FollowSymLinks MultiViews ExecCGI Includes
+
+#
+# This controls which options the .htaccess files in directories can
+# override. Can also be "All", or any combination of "Options", "FileInfo",
+# "AuthConfig", and "Limit"
+#
+ AllowOverride All
+
+#
+# Controls who can get stuff from this server.
+#
+ Order allow,deny
+ Allow from all
+</Directory>
+
+#
+# AccessFileName: The name of the file to look for in each directory
+# for access control information.
+#
+AccessFileName .htaccess
+
+#
+# The following lines prevent .htaccess files from being viewed by
+# Web clients. Since .htaccess files often contain authorization
+# information, access is disallowed for security reasons. Comment
+# these lines out if you want Web visitors to see the contents of
+# .htaccess files. If you change the AccessFileName directive above,
+# be sure to make the corresponding changes here.
+#
+# Also, folks tend to use names such as .htpasswd for password
+# files, so this will protect those as well.
+#
+<Files ~ "^\.([Hh][Tt]|[Dd][Ss]_[Ss])">
+ Order allow,deny
+ Deny from all
+ Satisfy All
+</Files>
+
+#
+# Apple specific filesystem protection.
+#
+
+<Files "rsrc">
+ Order allow,deny
+ Deny from all
+ Satisfy All
+</Files>
+
+<Directory ~ ".*\.\.namedfork">
+ Order allow,deny
+ Deny from all
+ Satisfy All
+</Directory>
+
+#
+# CacheNegotiatedDocs: By default, Apache sends "Pragma: no-cache" with each
+# document that was negotiated on the basis of content. This asks proxy
+# servers not to cache the document. Uncommenting the following line disables
+# this behavior, and proxies will be allowed to cache the documents.
+#
+#CacheNegotiatedDocs
+
+#
+# UseCanonicalName: (new for 1.3) With this setting turned on, whenever
+# Apache needs to construct a self-referencing URL (a URL that refers back
+# to the server the response is coming from) it will use ServerName and
+# Port to form a "canonical" name. With this setting off, Apache will
+# use the hostname:port that the client supplied, when possible. This
+# also affects SERVER_NAME and SERVER_PORT in CGI scripts.
+#
+UseCanonicalName On
+
+#
+# TypesConfig describes where the mime.types file (or equivalent) is
+# to be found.
+#
+# Configured from the httpd command line for WebKit layout tests.
+#
+#<IfModule mod_mime.c>
+# TypesConfig /private/etc/httpd/mime.types
+#</IfModule>
+
+#
+# DefaultType is the default MIME type the server will use for a document
+# if it cannot otherwise determine one, such as from filename extensions.
+# If your server contains mostly text or HTML documents, "text/plain" is
+# a good value. If most of your content is binary, such as applications
+# or images, you may want to use "application/octet-stream" instead to
+# keep browsers from trying to display binary files as though they are
+# text.
+#
+DefaultType text/plain
+
+#
+# HostnameLookups: Log the names of clients or just their IP addresses
+# e.g., www.apache.org (on) or 204.62.129.132 (off).
+# The default is off because it'd be overall better for the net if people
+# had to knowingly turn this feature on, since enabling it means that
+# each client request will result in AT LEAST one lookup request to the
+# nameserver.
+#
+HostnameLookups Off
+
+#
+# ErrorLog: The location of the error log file.
+# If you do not specify an ErrorLog directive within a <VirtualHost>
+# container, error messages relating to that virtual host will be
+# logged here. If you *do* define an error logfile for a <VirtualHost>
+# container, that host's errors will be logged there and not here.
+#
+# Configured from the httpd command line for WebKit layout tests.
+#ErrorLog "/tmp/layout-test-results/error_log"
+
+#
+# LogLevel: Control the number of messages logged to the error_log.
+# Possible values include: debug, info, notice, warn, error, crit,
+# alert, emerg.
+#
+LogLevel warn
+
+#
+# The following directives define some format nicknames for use with
+# a CustomLog directive (see below).
+#
+LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
+LogFormat "%p %h %l %u %t \"%r\" %>s %b" common
+LogFormat "%{Referer}i -> %U" referer
+LogFormat "%{User-agent}i" agent
+
+#
+# The location and format of the access logfile (Common Logfile Format).
+# If you do not define any access logfiles within a <VirtualHost>
+# container, they will be logged here. Contrariwise, if you *do*
+# define per-<VirtualHost> access logfiles, transactions will be
+# logged therein and *not* in this file.
+#
+# Configured from the httpd command line for WebKit layout tests.
+#CustomLog "/tmp/layout-test-results/access_log" common
+
+#
+# If you prefer a single logfile with access, agent, and referer information
+# (Combined Logfile Format) you can use the following directive.
+#
+#CustomLog "/tmp/layout-test-results/access_log" combined
+
+#
+# Optionally add a line containing the server version and virtual host
+# name to server-generated pages (error documents, FTP directory listings,
+# mod_status and mod_info output etc., but not CGI generated documents).
+# Set to "EMail" to also include a mailto: link to the ServerAdmin.
+# Set to one of: On | Off | EMail
+#
+ServerSignature On
+
+#
+# Aliases: Add here as many aliases as you need (with no limit). The format is
+# Alias fakename realname
+#
+<IfModule mod_alias.c>
+</IfModule>
+# End of aliases.
+
+#
+# Redirect allows you to tell clients about documents which used to exist in
+# your server's namespace, but do not anymore. This allows you to tell the
+# clients where to look for the relocated document.
+# Format: Redirect old-URI new-URL
+#
+
+#
+# Document types.
+#
+<IfModule mod_mime.c>
+
+ #
+ # AddLanguage allows you to specify the language of a document. You can
+ # then use content negotiation to give a browser a file in a language
+ # it can understand.
+ #
+ # Note 1: The suffix does not have to be the same as the language
+ # keyword --- those with documents in Polish (whose net-standard
+ # language code is pl) may wish to use "AddLanguage pl .po" to
+ # avoid the ambiguity with the common suffix for perl scripts.
+ #
+ # Note 2: The example entries below illustrate that in quite
+ # some cases the two character 'Language' abbreviation is not
+ # identical to the two character 'Country' code for its country,
+ # E.g. 'Danmark/dk' versus 'Danish/da'.
+ #
+ # Note 3: In the case of 'ltz' we violate the RFC by using a three char
+ # specifier. But there is 'work in progress' to fix this and get
+ # the reference data for rfc1766 cleaned up.
+ #
+ # Danish (da) - Dutch (nl) - English (en) - Estonian (ee)
+ # French (fr) - German (de) - Greek-Modern (el)
+ # Italian (it) - Korean (kr) - Norwegian (no) - Norwegian Nynorsk (nn)
+ # Portugese (pt) - Luxembourgeois* (ltz)
+ # Spanish (es) - Swedish (sv) - Catalan (ca) - Czech(cs)
+ # Polish (pl) - Brazilian Portuguese (pt-br) - Japanese (ja)
+ # Russian (ru)
+ #
+ AddLanguage da .dk
+ AddLanguage nl .nl
+ AddLanguage en .en
+ AddLanguage et .ee
+ AddLanguage fr .fr
+ AddLanguage de .de
+ AddLanguage el .el
+ AddLanguage he .he
+ AddCharset ISO-8859-8 .iso8859-8
+ AddLanguage it .it
+ AddLanguage ja .ja
+ AddCharset ISO-2022-JP .jis
+ AddLanguage kr .kr
+ AddCharset ISO-2022-KR .iso-kr
+ AddLanguage nn .nn
+ AddLanguage no .no
+ AddLanguage pl .po
+ AddCharset ISO-8859-2 .iso-pl
+ AddLanguage pt .pt
+ AddLanguage pt-br .pt-br
+ AddLanguage ltz .lu
+ AddLanguage ca .ca
+ AddLanguage es .es
+ AddLanguage sv .sv
+ AddLanguage cs .cz .cs
+ AddLanguage ru .ru
+ AddLanguage zh-TW .zh-tw
+ AddCharset Big5 .Big5 .big5
+ AddCharset WINDOWS-1251 .cp-1251
+ AddCharset CP866 .cp866
+ AddCharset ISO-8859-5 .iso-ru
+ AddCharset KOI8-R .koi8-r
+ AddCharset UCS-2 .ucs2
+ AddCharset UCS-4 .ucs4
+ AddCharset UTF-8 .utf8
+
+ # LanguagePriority allows you to give precedence to some languages
+ # in case of a tie during content negotiation.
+ #
+ # Just list the languages in decreasing order of preference. We have
+ # more or less alphabetized them here. You probably want to change this.
+ #
+ <IfModule mod_negotiation.c>
+ LanguagePriority en da nl et fr de el it ja kr no pl pt pt-br ru ltz ca es sv tw
+ </IfModule>
+
+ #
+ # AddType allows you to tweak mime.types without actually editing it, or to
+ # make certain files to be certain types.
+ #
+ AddType application/x-tar .tgz
+
+ #
+ # AddEncoding allows you to have certain browsers uncompress
+ # information on the fly. Note: Not all browsers support this.
+ # Despite the name similarity, the following Add* directives have nothing
+ # to do with the FancyIndexing customization directives above.
+ #
+ AddEncoding x-compress .Z
+ AddEncoding x-gzip .gz .tgz
+ #
+ # If the AddEncoding directives above are commented-out, then you
+ # probably should define those extensions to indicate media types:
+ #
+ #AddType application/x-compress .Z
+ #AddType application/x-gzip .gz .tgz
+
+ #
+ # AddHandler allows you to map certain file extensions to "handlers",
+ # actions unrelated to filetype. These can be either built into the server
+ # or added with the Action command (see below)
+ #
+ # If you want to use server side includes, or CGI outside
+ # ScriptAliased directories, uncomment the following lines.
+ #
+ # To use CGI scripts:
+ #
+ AddHandler cgi-script .cgi .pl
+
+ #
+ # To use server-parsed HTML files
+ #
+ AddType text/html .shtml
+ AddHandler server-parsed .shtml
+
+ #
+ # Uncomment the following line to enable Apache's send-asis HTTP file
+ # feature
+ #
+ AddHandler send-as-is asis
+
+ #
+ # If you wish to use server-parsed imagemap files, use
+ #
+ #AddHandler imap-file map
+
+ #
+ # To enable type maps, you might want to use
+ #
+ #AddHandler type-map var
+
+</IfModule>
+# End of document types.
+
+#
+# Action lets you define media types that will execute a script whenever
+# a matching file is called. This eliminates the need for repeated URL
+# pathnames for oft-used CGI file processors.
+# Format: Action media/type /cgi-script/location
+# Format: Action handler-name /cgi-script/location
+#
+
+#
+# MetaDir: specifies the name of the directory in which Apache can find
+# meta information files. These files contain additional HTTP headers
+# to include when sending the document
+#
+#MetaDir .web
+
+#
+# MetaSuffix: specifies the file name suffix for the file containing the
+# meta information.
+#
+#MetaSuffix .meta
+
+#
+# Customizable error response (Apache style)
+# these come in three flavors
+#
+# 1) plain text
+#ErrorDocument 500 "The server made a boo boo.
+# n.b. the single leading (") marks it as text, it does not get output
+#
+# 2) local redirects
+#ErrorDocument 404 /missing.html
+# to redirect to local URL /missing.html
+#ErrorDocument 404 /cgi-bin/missing_handler.pl
+# N.B.: You can redirect to a script or a document using server-side-includes.
+#
+# 3) external redirects
+#ErrorDocument 402 http://some.other-server.com/subscription_info.html
+# N.B.: Many of the environment variables associated with the original
+# request will *not* be available to such a script.
+
+#
+# Proxy Server directives. Uncomment the following lines to
+# enable the proxy server:
+#
+#<IfModule mod_proxy.c>
+# ProxyRequests On
+
+# <Directory proxy:*>
+# Order deny,allow
+# Deny from all
+# Allow from .your-domain.com
+# </Directory>
+
+ #
+ # Enable/disable the handling of HTTP/1.1 "Via:" headers.
+ # ("Full" adds the server version; "Block" removes all outgoing Via: headers)
+ # Set to one of: Off | On | Full | Block
+ #
+# ProxyVia On
+
+ #
+ # To enable the cache as well, edit and uncomment the following lines:
+ # (no cacheing without CacheRoot)
+ #
+# CacheRoot "/private/var/run/proxy"
+# CacheSize 5
+# CacheGcInterval 4
+# CacheMaxExpire 24
+# CacheLastModifiedFactor 0.1
+# CacheDefaultExpire 1
+# NoCache a-domain.com another-domain.edu joes.garage-sale.com
+
+#</IfModule>
+# End of proxy directives.
+
+
+<IfModule mod_php4.c>
+ # If php is turned on, we repsect .php and .phps files.
+ AddType application/x-httpd-php .php
+ AddType application/x-httpd-php-source .phps
+
+ # Since most users will want index.php to work we
+ # also automatically enable index.php
+ <IfModule mod_dir.c>
+ DirectoryIndex index.html index.php
+ </IfModule>
+</IfModule>
+
+<IfModule mod_rewrite.c>
+ RewriteEngine On
+ RewriteCond %{REQUEST_METHOD} ^TRACE
+ RewriteRule .* - [F]
+</IfModule>
diff --git a/chromium/tools/python/google/httpd_config/httpd2.conf b/chromium/tools/python/google/httpd_config/httpd2.conf
new file mode 100644
index 00000000000..8c96f757787
--- /dev/null
+++ b/chromium/tools/python/google/httpd_config/httpd2.conf
@@ -0,0 +1,280 @@
+## httpd2.conf -- Apache 2.x HTTP server configuration file
+
+#
+# Listen: Allows you to bind Apache to specific IP addresses and/or
+# ports, instead of the default. See also the <VirtualHost>
+# directive.
+#
+Listen 127.0.0.1:8000
+Listen 127.0.0.1:8080
+Listen 127.0.0.1:8081
+Listen 127.0.0.1:8443
+Listen 127.0.0.1:9000
+Listen 127.0.0.1:9080
+Listen 127.0.0.1:9443
+
+#
+# Dynamic Shared Object (DSO) Support
+#
+# To be able to use the functionality of a module which was built as a DSO you
+# have to place corresponding `LoadModule' lines at this location so the
+# directives contained in it are actually available _before_ they are used.
+# Please read the file http://httpd.apache.org/docs/dso.html for more
+# details about the DSO mechanism and run `httpd -l' for the list of already
+# built-in (statically linked and thus always available) modules in your httpd
+# binary.
+#
+# Note: The order in which modules are loaded is important. Don't change
+# the order below without expert advice.
+#
+#LoadModule authn_file_module lib/apache2/mod_authn_file.so
+#LoadModule authn_dbm_module lib/apache2/mod_authn_dbm.so
+#LoadModule authn_anon_module lib/apache2/mod_authn_anon.so
+#LoadModule authn_dbd_module lib/apache2/mod_authn_dbd.so
+#LoadModule authn_default_module lib/apache2/mod_authn_default.so
+LoadModule authz_host_module lib/apache2/mod_authz_host.so
+#LoadModule authz_groupfile_module lib/apache2/mod_authz_groupfile.so
+#LoadModule authz_user_module lib/apache2/mod_authz_user.so
+#LoadModule authz_dbm_module lib/apache2/mod_authz_dbm.so
+#LoadModule authz_owner_module lib/apache2/mod_authz_owner.so
+#LoadModule authz_default_module lib/apache2/mod_authz_default.so
+#LoadModule auth_basic_module lib/apache2/mod_auth_basic.so
+#LoadModule auth_digest_module lib/apache2/mod_auth_digest.so
+#LoadModule dbd_module lib/apache2/mod_dbd.so
+#LoadModule dumpio_module lib/apache2/mod_dumpio.so
+#LoadModule ext_filter_module lib/apache2/mod_ext_filter.so
+LoadModule include_module lib/apache2/mod_include.so
+#LoadModule filter_module lib/apache2/mod_filter.so
+#LoadModule deflate_module lib/apache2/mod_deflate.so
+LoadModule log_config_module lib/apache2/mod_log_config.so
+#LoadModule log_forensic_module lib/apache2/mod_log_forensic.so
+#LoadModule logio_module lib/apache2/mod_logio.so
+#LoadModule env_module lib/apache2/mod_env.so
+#LoadModule mime_magic_module lib/apache2/mod_mime_magic.so
+#LoadModule cern_meta_module lib/apache2/mod_cern_meta.so
+#LoadModule expires_module lib/apache2/mod_expires.so
+LoadModule headers_module lib/apache2/mod_headers.so
+#LoadModule ident_module lib/apache2/mod_ident.so
+#LoadModule usertrack_module lib/apache2/mod_usertrack.so
+#LoadModule unique_id_module lib/apache2/mod_unique_id.so
+#LoadModule setenvif_module lib/apache2/mod_setenvif.so
+#LoadModule version_module lib/apache2/mod_version.so
+#LoadModule proxy_module lib/apache2/mod_proxy.so
+#LoadModule proxy_connect_module lib/apache2/mod_proxy_connect.so
+#LoadModule proxy_ftp_module lib/apache2/mod_proxy_ftp.so
+#LoadModule proxy_http_module lib/apache2/mod_proxy_http.so
+#LoadModule proxy_ajp_module lib/apache2/mod_proxy_ajp.so
+#LoadModule proxy_balancer_module lib/apache2/mod_proxy_balancer.so
+LoadModule ssl_module lib/apache2/mod_ssl.so
+LoadModule mime_module lib/apache2/mod_mime.so
+#LoadModule dav_module lib/apache2/mod_dav.so
+#LoadModule status_module lib/apache2/mod_status.so
+LoadModule autoindex_module lib/apache2/mod_autoindex.so
+LoadModule asis_module lib/apache2/mod_asis.so
+#LoadModule info_module lib/apache2/mod_info.so
+LoadModule cgi_module lib/apache2/mod_cgi.so
+#LoadModule dav_fs_module lib/apache2/mod_dav_fs.so
+#LoadModule vhost_alias_module lib/apache2/mod_vhost_alias.so
+LoadModule negotiation_module lib/apache2/mod_negotiation.so
+#LoadModule dir_module lib/apache2/mod_dir.so
+LoadModule imagemap_module lib/apache2/mod_imagemap.so
+LoadModule actions_module lib/apache2/mod_actions.so
+#LoadModule speling_module lib/apache2/mod_speling.so
+#LoadModule userdir_module lib/apache2/mod_userdir.so
+LoadModule alias_module lib/apache2/mod_alias.so
+LoadModule rewrite_module lib/apache2/mod_rewrite.so
+LoadModule php5_module lib/apache2/cygphp5.so
+
+#LoadModule imap_module lib/apache/mod_imap.dll
+#LoadModule access_module lib/apache/mod_access.dll
+#LoadModule auth_module lib/apache/mod_auth.dll
+
+
+#
+# Each directory to which Apache has access, can be configured with respect
+# to which services and features are allowed and/or disabled in that
+# directory (and its subdirectories).
+#
+<Directory />
+ Options Indexes FollowSymLinks MultiViews ExecCGI Includes
+ AllowOverride All
+ Order allow,deny
+ Allow from all
+</Directory>
+
+
+#
+# Apple specific filesystem protection.
+#
+<Files "rsrc">
+ Order allow,deny
+ Deny from all
+ Satisfy All
+</Files>
+<Directory ~ ".*\.\.namedfork">
+ Order allow,deny
+ Deny from all
+ Satisfy All
+</Directory>
+
+
+#
+# UseCanonicalName: (new for 1.3) With this setting turned on, whenever
+# Apache needs to construct a self-referencing URL (a URL that refers back
+# to the server the response is coming from) it will use ServerName and
+# Port to form a "canonical" name. With this setting off, Apache will
+# use the hostname:port that the client supplied, when possible. This
+# also affects SERVER_NAME and SERVER_PORT in CGI scripts.
+#
+UseCanonicalName On
+
+
+#
+# The following directives define some format nicknames for use with
+# a CustomLog directive (see below).
+#
+LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
+LogFormat "%h %l %u %t \"%r\" %>s %b" common
+LogFormat "%{Referer}i -> %U" referer
+LogFormat "%{User-agent}i" agent
+
+
+#
+# Optionally add a line containing the server version and virtual host
+# name to server-generated pages (error documents, FTP directory listings,
+# mod_status and mod_info output etc., but not CGI generated documents).
+# Set to "EMail" to also include a mailto: link to the ServerAdmin.
+# Set to one of: On | Off | EMail
+#
+ServerSignature On
+
+
+#
+# Document types.
+#
+<IfModule mime_module>
+
+ #
+ # AddLanguage allows you to specify the language of a document. You can
+ # then use content negotiation to give a browser a file in a language
+ # it can understand.
+ #
+ # Note 1: The suffix does not have to be the same as the language
+ # keyword --- those with documents in Polish (whose net-standard
+ # language code is pl) may wish to use "AddLanguage pl .po" to
+ # avoid the ambiguity with the common suffix for perl scripts.
+ #
+ # Note 2: The example entries below illustrate that in quite
+ # some cases the two character 'Language' abbreviation is not
+ # identical to the two character 'Country' code for its country,
+ # E.g. 'Danmark/dk' versus 'Danish/da'.
+ #
+ # Note 3: In the case of 'ltz' we violate the RFC by using a three char
+ # specifier. But there is 'work in progress' to fix this and get
+ # the reference data for rfc1766 cleaned up.
+ #
+ # Danish (da) - Dutch (nl) - English (en) - Estonian (ee)
+ # French (fr) - German (de) - Greek-Modern (el)
+ # Italian (it) - Korean (kr) - Norwegian (no) - Norwegian Nynorsk (nn)
+ # Portugese (pt) - Luxembourgeois* (ltz)
+ # Spanish (es) - Swedish (sv) - Catalan (ca) - Czech(cs)
+ # Polish (pl) - Brazilian Portuguese (pt-br) - Japanese (ja)
+ # Russian (ru)
+ #
+ AddLanguage da .dk
+ AddLanguage nl .nl
+ AddLanguage en .en
+ AddLanguage et .ee
+ AddLanguage fr .fr
+ AddLanguage de .de
+ AddLanguage el .el
+ AddLanguage he .he
+ AddCharset ISO-8859-8 .iso8859-8
+ AddLanguage it .it
+ AddLanguage ja .ja
+ AddCharset ISO-2022-JP .jis
+ AddLanguage kr .kr
+ AddCharset ISO-2022-KR .iso-kr
+ AddLanguage nn .nn
+ AddLanguage no .no
+ AddLanguage pl .po
+ AddCharset ISO-8859-2 .iso-pl
+ AddLanguage pt .pt
+ AddLanguage pt-br .pt-br
+ AddLanguage ltz .lu
+ AddLanguage ca .ca
+ AddLanguage es .es
+ AddLanguage sv .sv
+ AddLanguage cs .cz .cs
+ AddLanguage ru .ru
+ AddLanguage zh-TW .zh-tw
+ AddCharset Big5 .Big5 .big5
+ AddCharset WINDOWS-1251 .cp-1251
+ AddCharset CP866 .cp866
+ AddCharset ISO-8859-5 .iso-ru
+ AddCharset KOI8-R .koi8-r
+ AddCharset UCS-2 .ucs2
+ AddCharset UCS-4 .ucs4
+ AddCharset UTF-8 .utf8
+
+ # LanguagePriority allows you to give precedence to some languages
+ # in case of a tie during content negotiation.
+ #
+ # Just list the languages in decreasing order of preference. We have
+ # more or less alphabetized them here. You probably want to change this.
+ #
+ <IfModule negotiation_module>
+ LanguagePriority en da nl et fr de el it ja kr no pl pt pt-br ru ltz ca es sv tw
+ </IfModule>
+
+ #
+ # AddType allows you to tweak mime.types without actually editing it, or to
+ # make certain files to be certain types.
+ #
+ AddType application/x-tar .tgz
+
+ #
+ # AddEncoding allows you to have certain browsers uncompress
+ # information on the fly. Note: Not all browsers support this.
+ # Despite the name similarity, the following Add* directives have nothing
+ # to do with the FancyIndexing customization directives above.
+ #
+ AddEncoding x-compress .Z
+ AddEncoding x-gzip .gz .tgz
+
+ #
+ # AddHandler allows you to map certain file extensions to "handlers",
+ # actions unrelated to filetype. These can be either built into the server
+ # or added with the Action command (see below)
+ #
+ # If you want to use server side includes, or CGI outside
+ # ScriptAliased directories, uncomment the following lines.
+ #
+ # To use CGI scripts:
+ #
+ AddHandler cgi-script .cgi .pl
+
+ #
+ # To use server-parsed HTML files
+ #
+ AddType text/html .shtml
+ AddHandler server-parsed .shtml
+
+ #
+ # Uncomment the following line to enable Apache's send-asis HTTP file
+ # feature
+ #
+ AddHandler send-as-is asis
+</IfModule>
+
+
+<IfModule php5_module>
+ AddType application/x-httpd-php .php
+ AddType application/x-httpd-php-source .phps
+</IfModule>
+
+<IfModule rewrite_module>
+ RewriteEngine On
+ RewriteCond %{REQUEST_METHOD} ^TRACE
+ RewriteRule .* - [F]
+</IfModule>
diff --git a/chromium/tools/python/google/httpd_config/httpd2.pem b/chromium/tools/python/google/httpd_config/httpd2.pem
new file mode 100644
index 00000000000..69d10d2219c
--- /dev/null
+++ b/chromium/tools/python/google/httpd_config/httpd2.pem
@@ -0,0 +1,110 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQCbE8ILYEITh9up
+ken6c61zNAibRnq70g7glLy6fHqQJsrIVzmj6b+4dHLYUA3mu13pqZyoK6YP1MPd
+afdh23IrW4vo6xtxCpqQk0rAEa+0cji7E+01JHEvO4Y1Q4NScGj628HDe+dJqiQD
+AZWRDgIodlKrRjD1XZT91U0UsuytDvYQVbEIosgMdEevW8FvqWAhDH6fkb4Q5OdW
+wUxGnX0+cHLINVMJvCoPq+fDQWy4A/2uRiFn/x9NBZEz2AwNta+y6e3pn8gZE9vh
+Feqe3Jv9toIZzsqQZ/fCWK6FKzfkxQNYc5vcSqHx96VuDhqrbnPFdpcQfRB08Yg4
+3TSioKY7AgMBAAECggEAOqImzPxRH2dPs3Z6+/n+y78RvfvQ7fDHNTyneu8TvCse
+os7v+TypA4nr3kOubd3L7Uv28lLGj9fHUpD9Ot+o9CHB7YfvMTdsJ1u5eJN3VoeV
+UY6AMoab0Nr1rG/hWCsuViL+yPWxBlYxFX3k2hps0HWkXiPE4RDIA41BfqEEAY4+
+6V0lvoBZAJbYncGg1BEDxH+erXIFmAu3PeCYEpb2VP7hQH8JITEWco+DmK5impoB
+e+BaEVLqFKUjU+EdvpE4WKB24K9lw35bfGhWd/cQwSaLIPezG1OK9M0JbpPoj5gg
+KBdwrS7EdOur64Ue774KPAFRYU8mEpnnQMKOnNUuSQKBgQDHfriFwjViBdRJGTZ1
+Wa/AgJ7rVU9yMhaUSifYbLQDrYRwagYNtA/NqTgRYl7BbHZpsYmglQoCAWJa+KK2
+xwJ/1uLsNG+I04lSQCsRr4/z23O6Vc2VzmMpru6Upa9mL8aNiUm2+7VJje/NnUW5
+OXiSxbYGchYbO7+sIo8UmSpm7wKBgQDHAFLoBZOeh1BGnD0YNH7WYJ4kH9IriFcB
+QqJL8yFjikbUU976yAyXNbMIVBMkCZoT/l7RvgwLufarKTzRHv88nCPwg8/sbQQT
+WP4TRcxS6zZj4y0VHN2i0KSldAYk1ohgLFXzAQzXVcMBzNNOdSSLtqRbPZo9Gd7U
+DYaHxRIVdQKBgQCFykaV7hk/FAm6vF35dZyYzanGyf/t5gmeid0PGFfh34zilzhY
+GFpA4yvm/MHvln4ThC14tHtxvNvphrYZPn4+ni6xmrjyWmvN7Zr00XkJYjPK06B8
+x11Zpyf6KOPo9EGEyn3Vahm6qqYYj1EjV5e1V0MsL3cD7J4vIz4x4ka9oQKBgQCN
+9tQuQ7Qw1rVU+ia3etO1Wc3XVYAYoDX5ZzDi37rFCSNIW+DppQceZCepXFkfT15E
+vyWjmWF8iBjJuCxzvxo0ges9rLsLHiZXKxhuZU/DI5t0nN9PfX07pn6ereuoIge+
+HELgjbI8eCkawqVIBleg+BW+JW9AAZGuU0vS1ar19QKBgQC3J57JCl6ZZzGKpFpU
+/9qYA0qFFRBIRddHlfelk7EBqg/6C6yEXAqNO+DcurdU6li+lEOKNSPAqii2MC+H
+XqCIdtbZcOX7pUSg5E8N883ruMjsaePTsvA5iEY5QvA8Mn47wpPikYEXQgFWoP+W
+UFlVhwe/E/ebjJZqyTiQaQcMPQ==
+-----END PRIVATE KEY-----
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 237 (0xed)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: CN=Test Root CA
+ Validity
+ Not Before: Jun 18 19:52:02 2013 GMT
+ Not After : Jun 16 19:52:02 2023 GMT
+ Subject: C=US, ST=California, L=Mountain View, O=Test CA, CN=127.0.0.1
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (2048 bit)
+ Modulus:
+ 00:9b:13:c2:0b:60:42:13:87:db:a9:91:e9:fa:73:
+ ad:73:34:08:9b:46:7a:bb:d2:0e:e0:94:bc:ba:7c:
+ 7a:90:26:ca:c8:57:39:a3:e9:bf:b8:74:72:d8:50:
+ 0d:e6:bb:5d:e9:a9:9c:a8:2b:a6:0f:d4:c3:dd:69:
+ f7:61:db:72:2b:5b:8b:e8:eb:1b:71:0a:9a:90:93:
+ 4a:c0:11:af:b4:72:38:bb:13:ed:35:24:71:2f:3b:
+ 86:35:43:83:52:70:68:fa:db:c1:c3:7b:e7:49:aa:
+ 24:03:01:95:91:0e:02:28:76:52:ab:46:30:f5:5d:
+ 94:fd:d5:4d:14:b2:ec:ad:0e:f6:10:55:b1:08:a2:
+ c8:0c:74:47:af:5b:c1:6f:a9:60:21:0c:7e:9f:91:
+ be:10:e4:e7:56:c1:4c:46:9d:7d:3e:70:72:c8:35:
+ 53:09:bc:2a:0f:ab:e7:c3:41:6c:b8:03:fd:ae:46:
+ 21:67:ff:1f:4d:05:91:33:d8:0c:0d:b5:af:b2:e9:
+ ed:e9:9f:c8:19:13:db:e1:15:ea:9e:dc:9b:fd:b6:
+ 82:19:ce:ca:90:67:f7:c2:58:ae:85:2b:37:e4:c5:
+ 03:58:73:9b:dc:4a:a1:f1:f7:a5:6e:0e:1a:ab:6e:
+ 73:c5:76:97:10:7d:10:74:f1:88:38:dd:34:a2:a0:
+ a6:3b
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints: critical
+ CA:FALSE
+ X509v3 Subject Key Identifier:
+ 34:FF:09:84:DB:23:D0:1F:45:72:50:CE:79:28:D3:EF:FB:B1:46:07
+ X509v3 Authority Key Identifier:
+ keyid:2B:88:93:E1:D2:54:50:F4:B8:A4:20:BD:B1:79:E6:0B:AA:EB:EC:1A
+
+ X509v3 Extended Key Usage:
+ TLS Web Server Authentication, TLS Web Client Authentication
+ X509v3 Subject Alternative Name:
+ IP Address:127.0.0.1
+ Signature Algorithm: sha1WithRSAEncryption
+ a6:21:b1:53:7f:ec:a8:23:6f:76:d4:bd:0a:6a:67:a7:a8:9e:
+ 7d:08:38:62:cd:f4:34:d9:41:3a:02:a7:6d:31:7d:33:02:27:
+ ab:06:e6:01:c8:65:32:b5:f3:96:27:4c:5a:82:a5:84:a7:99:
+ 29:4a:b6:b9:57:41:75:a9:e2:a6:87:00:25:ff:5a:85:f6:68:
+ da:e1:5a:19:fb:91:5e:70:27:31:dd:9a:ac:20:9a:d6:27:1c:
+ 55:34:8c:f9:a4:97:ff:81:63:fb:b8:7d:71:d4:42:88:3c:10:
+ db:78:54:e3:42:b1:a4:83:81:b9:92:7a:f6:b2:f2:19:1c:b6:
+ 68:80:2b:14:5a:36:84:e0:67:ad:f9:e4:bc:a1:63:af:a1:13:
+ 13:95:3f:76:5e:2c:81:ed:7c:4a:38:04:bf:dc:03:b0:ca:8b:
+ d3:17:d8:fc:60:d9:83:31:9f:ef:be:a7:e3:05:4f:b4:3e:97:
+ 8a:6d:86:c5:69:ef:93:8c:1b:9e:6e:95:f7:1a:66:f7:1f:bf:
+ 5e:92:c4:ed:15:e1:2e:56:56:11:80:be:02:1d:96:fa:39:6a:
+ e0:dd:04:d7:98:e9:29:72:a5:60:f1:0e:14:5d:08:db:26:18:
+ 42:5b:f3:82:fb:79:83:48:1b:86:8d:9d:8b:5c:87:1a:23:ae:
+ bc:4c:13:46
+-----BEGIN CERTIFICATE-----
+MIIDdDCCAlygAwIBAgICAO0wDQYJKoZIhvcNAQEFBQAwFzEVMBMGA1UEAwwMVGVz
+dCBSb290IENBMB4XDTEzMDYxODE5NTIwMloXDTIzMDYxNjE5NTIwMlowYDELMAkG
+A1UEBhMCVVMxEzARBgNVBAgMCkNhbGlmb3JuaWExFjAUBgNVBAcMDU1vdW50YWlu
+IFZpZXcxEDAOBgNVBAoMB1Rlc3QgQ0ExEjAQBgNVBAMMCTEyNy4wLjAuMTCCASIw
+DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJsTwgtgQhOH26mR6fpzrXM0CJtG
+ervSDuCUvLp8epAmyshXOaPpv7h0cthQDea7XempnKgrpg/Uw91p92Hbcitbi+jr
+G3EKmpCTSsARr7RyOLsT7TUkcS87hjVDg1JwaPrbwcN750mqJAMBlZEOAih2UqtG
+MPVdlP3VTRSy7K0O9hBVsQiiyAx0R69bwW+pYCEMfp+RvhDk51bBTEadfT5wcsg1
+Uwm8Kg+r58NBbLgD/a5GIWf/H00FkTPYDA21r7Lp7emfyBkT2+EV6p7cm/22ghnO
+ypBn98JYroUrN+TFA1hzm9xKofH3pW4OGqtuc8V2lxB9EHTxiDjdNKKgpjsCAwEA
+AaOBgDB+MAwGA1UdEwEB/wQCMAAwHQYDVR0OBBYEFDT/CYTbI9AfRXJQznko0+/7
+sUYHMB8GA1UdIwQYMBaAFCuIk+HSVFD0uKQgvbF55guq6+waMB0GA1UdJQQWMBQG
+CCsGAQUFBwMBBggrBgEFBQcDAjAPBgNVHREECDAGhwR/AAABMA0GCSqGSIb3DQEB
+BQUAA4IBAQCmIbFTf+yoI2921L0KamenqJ59CDhizfQ02UE6AqdtMX0zAierBuYB
+yGUytfOWJ0xagqWEp5kpSra5V0F1qeKmhwAl/1qF9mja4VoZ+5FecCcx3ZqsIJrW
+JxxVNIz5pJf/gWP7uH1x1EKIPBDbeFTjQrGkg4G5knr2svIZHLZogCsUWjaE4Get
++eS8oWOvoRMTlT92XiyB7XxKOAS/3AOwyovTF9j8YNmDMZ/vvqfjBU+0PpeKbYbF
+ae+TjBuebpX3Gmb3H79eksTtFeEuVlYRgL4CHZb6OWrg3QTXmOkpcqVg8Q4UXQjb
+JhhCW/OC+3mDSBuGjZ2LXIcaI668TBNG
+-----END CERTIFICATE-----
diff --git a/chromium/tools/python/google/httpd_config/httpd2_linux.conf b/chromium/tools/python/google/httpd_config/httpd2_linux.conf
new file mode 100644
index 00000000000..27a67e027bf
--- /dev/null
+++ b/chromium/tools/python/google/httpd_config/httpd2_linux.conf
@@ -0,0 +1,144 @@
+# For this to work, you need to have ssl.conf and ssl.load in
+# /etc/apache/mods-enabled. You also need to be able to write to
+# /var/run/apache2. (Tested on Ubuntu Hardy, directory names may
+# vary with other distros.)
+
+Listen 127.0.0.1:8000
+ServerName 127.0.0.1
+
+#
+# Timeout: The number of seconds before receives and sends time out.
+#
+Timeout 300
+
+#
+# KeepAlive: Whether or not to allow persistent connections (more than
+# one request per connection). Set to "Off" to deactivate.
+#
+KeepAlive On
+
+#
+# MaxKeepAliveRequests: The maximum number of requests to allow
+# during a persistent connection. Set to 0 to allow an unlimited amount.
+# We recommend you leave this number high, for maximum performance.
+#
+MaxKeepAliveRequests 100
+
+#
+# KeepAliveTimeout: Number of seconds to wait for the next request from the
+# same client on the same connection.
+#
+KeepAliveTimeout 15
+
+##
+## Server-Pool Size Regulation (MPM specific)
+##
+
+# prefork MPM
+# StartServers: number of server processes to start
+# MinSpareServers: minimum number of server processes which are kept spare
+# MaxSpareServers: maximum number of server processes which are kept spare
+# MaxClients: maximum number of server processes allowed to start
+# MaxRequestsPerChild: maximum number of requests a server process serves
+<IfModule mpm_prefork_module>
+ StartServers 5
+ MinSpareServers 5
+ MaxSpareServers 10
+ MaxClients 150
+ MaxRequestsPerChild 0
+</IfModule>
+
+# worker MPM
+# StartServers: initial number of server processes to start
+# MaxClients: maximum number of simultaneous client connections
+# MinSpareThreads: minimum number of worker threads which are kept spare
+# MaxSpareThreads: maximum number of worker threads which are kept spare
+# ThreadsPerChild: constant number of worker threads in each server process
+# MaxRequestsPerChild: maximum number of requests a server process serves
+<IfModule mpm_worker_module>
+ StartServers 2
+ MaxClients 150
+ MinSpareThreads 25
+ MaxSpareThreads 75
+ ThreadsPerChild 25
+ MaxRequestsPerChild 0
+</IfModule>
+
+#
+# AccessFileName: The name of the file to look for in each directory
+# for additional configuration directives. See also the AllowOverride
+# directive.
+#
+
+AccessFileName .htaccess
+
+#
+# The following lines prevent .htaccess and .htpasswd files from being
+# viewed by Web clients.
+#
+<Files ~ "^\.ht">
+ Order allow,deny
+ Deny from all
+</Files>
+
+#
+# DefaultType is the default MIME type the server will use for a document
+# if it cannot otherwise determine one, such as from filename extensions.
+# If your server contains mostly text or HTML documents, "text/plain" is
+# a good value. If most of your content is binary, such as applications
+# or images, you may want to use "application/octet-stream" instead to
+# keep browsers from trying to display binary files as though they are
+# text.
+#
+DefaultType text/plain
+
+
+#
+# HostnameLookups: Log the names of clients or just their IP addresses
+# e.g., www.apache.org (on) or 204.62.129.132 (off).
+# The default is off because it'd be overall better for the net if people
+# had to knowingly turn this feature on, since enabling it means that
+# each client request will result in AT LEAST one lookup request to the
+# nameserver.
+#
+HostnameLookups Off
+
+#
+# LogLevel: Control the number of messages logged to the error_log.
+# Possible values include: debug, info, notice, warn, error, crit,
+# alert, emerg.
+#
+LogLevel warn
+
+Include /etc/apache2/mods-enabled/*.load
+Include /etc/apache2/mods-enabled/*.conf
+
+#
+# The following directives define some format nicknames for use with
+# a CustomLog directive (see below).
+# If you are behind a reverse proxy, you might want to change %h into %{X-Forwarded-For}i
+#
+LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
+LogFormat "%h %l %u %t \"%r\" %>s %b" common
+LogFormat "%{Referer}i -> %U" referer
+LogFormat "%{User-agent}i" agent
+
+#
+# ServerTokens
+# This directive configures what you return as the Server HTTP response
+# Header. The default is 'Full' which sends information about the OS-Type
+# and compiled in modules.
+# Set to one of: Full | OS | Minor | Minimal | Major | Prod
+# where Full conveys the most information, and Prod the least.
+#
+ServerTokens Full
+
+#
+# Optionally add a line containing the server version and virtual host
+# name to server-generated pages (internal error documents, FTP directory
+# listings, mod_status and mod_info output etc., but not CGI generated
+# documents or custom error documents).
+# Set to "EMail" to also include a mailto: link to the ServerAdmin.
+# Set to one of: On | Off | EMail
+#
+ServerSignature On
diff --git a/chromium/tools/python/google/httpd_config/httpd2_mac.conf b/chromium/tools/python/google/httpd_config/httpd2_mac.conf
new file mode 100644
index 00000000000..07f88999b85
--- /dev/null
+++ b/chromium/tools/python/google/httpd_config/httpd2_mac.conf
@@ -0,0 +1,229 @@
+## httpd2.conf -- Apache 2.x HTTP server configuration file
+
+#
+# Listen: Allows you to bind Apache to specific IP addresses and/or
+# ports, instead of the default. See also the <VirtualHost>
+# directive.
+#
+Listen 127.0.0.1:8000
+
+#
+# Dynamic Shared Object (DSO) Support
+#
+# To be able to use the functionality of a module which was built as a DSO you
+# have to place corresponding `LoadModule' lines at this location so the
+# directives contained in it are actually available _before_ they are used.
+# Please read the file http://httpd.apache.org/docs/dso.html for more
+# details about the DSO mechanism and run `httpd -l' for the list of already
+# built-in (statically linked and thus always available) modules in your httpd
+# binary.
+#
+# Note: The order in which modules are loaded is important. Don't change
+# the order below without expert advice.
+#
+# NOTE: This is not the same set of modules that gets loaded on win.
+LoadModule authz_host_module libexec/apache2/mod_authz_host.so
+LoadModule mime_module libexec/apache2/mod_mime.so
+LoadModule ssl_module libexec/apache2/mod_ssl.so
+LoadModule autoindex_module libexec/apache2/mod_autoindex.so
+LoadModule alias_module libexec/apache2/mod_alias.so
+LoadModule log_config_module libexec/apache2/mod_log_config.so
+
+#LoadModule include_module libexec/apache2/mod_include.so
+#LoadModule headers_module libexec/apache2/mod_headers.so
+#LoadModule asis_module libexec/apache2/mod_asis.so
+#LoadModule cgi_module libexec/apache2/mod_cgi.so
+#LoadModule negotiation_module libexec/apache2/mod_negotiation.so
+#LoadModule imagemap_module libexec/apache2/mod_imagemap.so
+#LoadModule actions_module libexec/apache2/mod_actions.so
+#LoadModule rewrite_module libexec/apache2/mod_rewrite.so
+
+
+#
+# Each directory to which Apache has access, can be configured with respect
+# to which services and features are allowed and/or disabled in that
+# directory (and its subdirectories).
+#
+<Directory />
+ Options Indexes FollowSymLinks MultiViews ExecCGI Includes
+ AllowOverride All
+ Order allow,deny
+ Allow from all
+</Directory>
+
+
+#
+# Apple specific filesystem protection.
+
+<Files "rsrc">
+ Order allow,deny
+ Deny from all
+ Satisfy All
+</Files>
+<Directory ~ ".*\.\.namedfork">
+ Order allow,deny
+ Deny from all
+ Satisfy All
+</Directory>
+
+
+#
+# UseCanonicalName: (new for 1.3) With this setting turned on, whenever
+# Apache needs to construct a self-referencing URL (a URL that refers back
+# to the server the response is coming from) it will use ServerName and
+# Port to form a "canonical" name. With this setting off, Apache will
+# use the hostname:port that the client supplied, when possible. This
+# also affects SERVER_NAME and SERVER_PORT in CGI scripts.
+#
+UseCanonicalName On
+
+
+#
+# The following directives define some format nicknames for use with
+# a CustomLog directive (see below).
+#
+LogFormat "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-Agent}i\"" combined
+LogFormat "%h %l %u %t \"%r\" %>s %b" common
+LogFormat "%{Referer}i -> %U" referer
+LogFormat "%{User-agent}i" agent
+
+
+#
+# Optionally add a line containing the server version and virtual host
+# name to server-generated pages (error documents, FTP directory listings,
+# mod_status and mod_info output etc., but not CGI generated documents).
+# Set to "EMail" to also include a mailto: link to the ServerAdmin.
+# Set to one of: On | Off | EMail
+#
+ServerSignature On
+
+
+#
+# Document types.
+#
+<IfModule mime_module>
+
+ #
+ # AddLanguage allows you to specify the language of a document. You can
+ # then use content negotiation to give a browser a file in a language
+ # it can understand.
+ #
+ # Note 1: The suffix does not have to be the same as the language
+ # keyword --- those with documents in Polish (whose net-standard
+ # language code is pl) may wish to use "AddLanguage pl .po" to
+ # avoid the ambiguity with the common suffix for perl scripts.
+ #
+ # Note 2: The example entries below illustrate that in quite
+ # some cases the two character 'Language' abbreviation is not
+ # identical to the two character 'Country' code for its country,
+ # E.g. 'Danmark/dk' versus 'Danish/da'.
+ #
+ # Note 3: In the case of 'ltz' we violate the RFC by using a three char
+ # specifier. But there is 'work in progress' to fix this and get
+ # the reference data for rfc1766 cleaned up.
+ #
+ # Danish (da) - Dutch (nl) - English (en) - Estonian (ee)
+ # French (fr) - German (de) - Greek-Modern (el)
+ # Italian (it) - Korean (kr) - Norwegian (no) - Norwegian Nynorsk (nn)
+ # Portugese (pt) - Luxembourgeois* (ltz)
+ # Spanish (es) - Swedish (sv) - Catalan (ca) - Czech(cs)
+ # Polish (pl) - Brazilian Portuguese (pt-br) - Japanese (ja)
+ # Russian (ru)
+ #
+ AddLanguage da .dk
+ AddLanguage nl .nl
+ AddLanguage en .en
+ AddLanguage et .ee
+ AddLanguage fr .fr
+ AddLanguage de .de
+ AddLanguage el .el
+ AddLanguage he .he
+ AddCharset ISO-8859-8 .iso8859-8
+ AddLanguage it .it
+ AddLanguage ja .ja
+ AddCharset ISO-2022-JP .jis
+ AddLanguage kr .kr
+ AddCharset ISO-2022-KR .iso-kr
+ AddLanguage nn .nn
+ AddLanguage no .no
+ AddLanguage pl .po
+ AddCharset ISO-8859-2 .iso-pl
+ AddLanguage pt .pt
+ AddLanguage pt-br .pt-br
+ AddLanguage ltz .lu
+ AddLanguage ca .ca
+ AddLanguage es .es
+ AddLanguage sv .sv
+ AddLanguage cs .cz .cs
+ AddLanguage ru .ru
+ AddLanguage zh-TW .zh-tw
+ AddCharset Big5 .Big5 .big5
+ AddCharset WINDOWS-1251 .cp-1251
+ AddCharset CP866 .cp866
+ AddCharset ISO-8859-5 .iso-ru
+ AddCharset KOI8-R .koi8-r
+ AddCharset UCS-2 .ucs2
+ AddCharset UCS-4 .ucs4
+ AddCharset UTF-8 .utf8
+
+ # LanguagePriority allows you to give precedence to some languages
+ # in case of a tie during content negotiation.
+ #
+ # Just list the languages in decreasing order of preference. We have
+ # more or less alphabetized them here. You probably want to change this.
+ #
+ <IfModule negotiation_module>
+ LanguagePriority en da nl et fr de el it ja kr no pl pt pt-br ru ltz ca es sv tw
+ </IfModule>
+
+ #
+ # AddType allows you to tweak mime.types without actually editing it, or to
+ # make certain files to be certain types.
+ #
+ AddType application/x-tar .tgz
+
+ #
+ # AddEncoding allows you to have certain browsers uncompress
+ # information on the fly. Note: Not all browsers support this.
+ # Despite the name similarity, the following Add* directives have nothing
+ # to do with the FancyIndexing customization directives above.
+ #
+ AddEncoding x-compress .Z
+ AddEncoding x-gzip .gz .tgz
+
+ #
+ # AddHandler allows you to map certain file extensions to "handlers",
+ # actions unrelated to filetype. These can be either built into the server
+ # or added with the Action command (see below)
+ #
+ # If you want to use server side includes, or CGI outside
+ # ScriptAliased directories, uncomment the following lines.
+ #
+ # To use CGI scripts:
+ #
+ AddHandler cgi-script .cgi .pl
+
+ #
+ # To use server-parsed HTML files
+ #
+ AddType text/html .shtml
+ AddHandler server-parsed .shtml
+
+ #
+ # Uncomment the following line to enable Apache's send-asis HTTP file
+ # feature
+ #
+ AddHandler send-as-is asis
+</IfModule>
+
+
+<IfModule php5_module>
+ AddType application/x-httpd-php .php
+ AddType application/x-httpd-php-source .phps
+</IfModule>
+
+<IfModule rewrite_module>
+ RewriteEngine On
+ RewriteCond %{REQUEST_METHOD} ^TRACE
+ RewriteRule .* - [F]
+</IfModule>
diff --git a/chromium/tools/python/google/httpd_config/mime.types b/chromium/tools/python/google/httpd_config/mime.types
new file mode 100644
index 00000000000..6735f1b0e15
--- /dev/null
+++ b/chromium/tools/python/google/httpd_config/mime.types
@@ -0,0 +1,599 @@
+# This is a comment. I love comments.
+
+# This file controls what Internet media types are sent to the client for
+# given file extension(s). Sending the correct media type to the client
+# is important so they know how to handle the content of the file.
+# Extra types can either be added here or by using an AddType directive
+# in your config files. For more information about Internet media types,
+# please read RFC 2045, 2046, 2047, 2048, and 2077. The Internet media type
+# registry is at <http://www.iana.org/assignments/media-types/>.
+
+# MIME type Extensions
+application/activemessage
+application/andrew-inset ez
+application/applefile
+application/atom+xml atom
+application/atomicmail
+application/batch-smtp
+application/beep+xml
+application/cals-1840
+application/cnrp+xml
+application/commonground
+application/cpl+xml
+application/cybercash
+application/dca-rft
+application/dec-dx
+application/dvcs
+application/edi-consent
+application/edifact
+application/edi-x12
+application/eshop
+application/font-tdpfr
+application/http
+application/hyperstudio
+application/iges
+application/index
+application/index.cmd
+application/index.obj
+application/index.response
+application/index.vnd
+application/iotp
+application/ipp
+application/isup
+application/mac-binhex40 hqx
+application/mac-compactpro cpt
+application/macwriteii
+application/marc
+application/mathematica
+application/mathml+xml mathml
+application/msword doc
+application/news-message-id
+application/news-transmission
+application/ocsp-request
+application/ocsp-response
+application/octet-stream bin dms lha lzh exe class so dll dmg
+application/oda oda
+application/ogg ogg
+application/parityfec
+application/pdf pdf
+application/pgp-encrypted
+application/pgp-keys
+application/pgp-signature
+application/pkcs10
+application/pkcs7-mime
+application/pkcs7-signature
+application/pkix-cert
+application/pkix-crl
+application/pkixcmp
+application/postscript ai eps ps
+application/prs.alvestrand.titrax-sheet
+application/prs.cww
+application/prs.nprend
+application/prs.plucker
+application/qsig
+application/rdf+xml rdf
+application/reginfo+xml
+application/remote-printing
+application/riscos
+application/rtf
+application/sdp
+application/set-payment
+application/set-payment-initiation
+application/set-registration
+application/set-registration-initiation
+application/sgml
+application/sgml-open-catalog
+application/sieve
+application/slate
+application/smil smi smil
+application/srgs gram
+application/srgs+xml grxml
+application/timestamp-query
+application/timestamp-reply
+application/tve-trigger
+application/vemmi
+application/vnd.3gpp.pic-bw-large
+application/vnd.3gpp.pic-bw-small
+application/vnd.3gpp.pic-bw-var
+application/vnd.3gpp.sms
+application/vnd.3m.post-it-notes
+application/vnd.accpac.simply.aso
+application/vnd.accpac.simply.imp
+application/vnd.acucobol
+application/vnd.acucorp
+application/vnd.adobe.xfdf
+application/vnd.aether.imp
+application/vnd.amiga.ami
+application/vnd.anser-web-certificate-issue-initiation
+application/vnd.anser-web-funds-transfer-initiation
+application/vnd.audiograph
+application/vnd.blueice.multipass
+application/vnd.bmi
+application/vnd.businessobjects
+application/vnd.canon-cpdl
+application/vnd.canon-lips
+application/vnd.cinderella
+application/vnd.claymore
+application/vnd.commerce-battelle
+application/vnd.commonspace
+application/vnd.contact.cmsg
+application/vnd.cosmocaller
+application/vnd.criticaltools.wbs+xml
+application/vnd.ctc-posml
+application/vnd.cups-postscript
+application/vnd.cups-raster
+application/vnd.cups-raw
+application/vnd.curl
+application/vnd.cybank
+application/vnd.data-vision.rdz
+application/vnd.dna
+application/vnd.dpgraph
+application/vnd.dreamfactory
+application/vnd.dxr
+application/vnd.ecdis-update
+application/vnd.ecowin.chart
+application/vnd.ecowin.filerequest
+application/vnd.ecowin.fileupdate
+application/vnd.ecowin.series
+application/vnd.ecowin.seriesrequest
+application/vnd.ecowin.seriesupdate
+application/vnd.enliven
+application/vnd.epson.esf
+application/vnd.epson.msf
+application/vnd.epson.quickanime
+application/vnd.epson.salt
+application/vnd.epson.ssf
+application/vnd.ericsson.quickcall
+application/vnd.eudora.data
+application/vnd.fdf
+application/vnd.ffsns
+application/vnd.fints
+application/vnd.flographit
+application/vnd.framemaker
+application/vnd.fsc.weblaunch
+application/vnd.fujitsu.oasys
+application/vnd.fujitsu.oasys2
+application/vnd.fujitsu.oasys3
+application/vnd.fujitsu.oasysgp
+application/vnd.fujitsu.oasysprs
+application/vnd.fujixerox.ddd
+application/vnd.fujixerox.docuworks
+application/vnd.fujixerox.docuworks.binder
+application/vnd.fut-misnet
+application/vnd.grafeq
+application/vnd.groove-account
+application/vnd.groove-help
+application/vnd.groove-identity-message
+application/vnd.groove-injector
+application/vnd.groove-tool-message
+application/vnd.groove-tool-template
+application/vnd.groove-vcard
+application/vnd.hbci
+application/vnd.hhe.lesson-player
+application/vnd.hp-hpgl
+application/vnd.hp-hpid
+application/vnd.hp-hps
+application/vnd.hp-pcl
+application/vnd.hp-pclxl
+application/vnd.httphone
+application/vnd.hzn-3d-crossword
+application/vnd.ibm.afplinedata
+application/vnd.ibm.electronic-media
+application/vnd.ibm.minipay
+application/vnd.ibm.modcap
+application/vnd.ibm.rights-management
+application/vnd.ibm.secure-container
+application/vnd.informix-visionary
+application/vnd.intercon.formnet
+application/vnd.intertrust.digibox
+application/vnd.intertrust.nncp
+application/vnd.intu.qbo
+application/vnd.intu.qfx
+application/vnd.irepository.package+xml
+application/vnd.is-xpr
+application/vnd.japannet-directory-service
+application/vnd.japannet-jpnstore-wakeup
+application/vnd.japannet-payment-wakeup
+application/vnd.japannet-registration
+application/vnd.japannet-registration-wakeup
+application/vnd.japannet-setstore-wakeup
+application/vnd.japannet-verification
+application/vnd.japannet-verification-wakeup
+application/vnd.jisp
+application/vnd.kde.karbon
+application/vnd.kde.kchart
+application/vnd.kde.kformula
+application/vnd.kde.kivio
+application/vnd.kde.kontour
+application/vnd.kde.kpresenter
+application/vnd.kde.kspread
+application/vnd.kde.kword
+application/vnd.kenameaapp
+application/vnd.koan
+application/vnd.liberty-request+xml
+application/vnd.llamagraphics.life-balance.desktop
+application/vnd.llamagraphics.life-balance.exchange+xml
+application/vnd.lotus-1-2-3
+application/vnd.lotus-approach
+application/vnd.lotus-freelance
+application/vnd.lotus-notes
+application/vnd.lotus-organizer
+application/vnd.lotus-screencam
+application/vnd.lotus-wordpro
+application/vnd.mcd
+application/vnd.mediastation.cdkey
+application/vnd.meridian-slingshot
+application/vnd.micrografx.flo
+application/vnd.micrografx.igx
+application/vnd.mif mif
+application/vnd.minisoft-hp3000-save
+application/vnd.mitsubishi.misty-guard.trustweb
+application/vnd.mobius.daf
+application/vnd.mobius.dis
+application/vnd.mobius.mbk
+application/vnd.mobius.mqy
+application/vnd.mobius.msl
+application/vnd.mobius.plc
+application/vnd.mobius.txf
+application/vnd.mophun.application
+application/vnd.mophun.certificate
+application/vnd.motorola.flexsuite
+application/vnd.motorola.flexsuite.adsi
+application/vnd.motorola.flexsuite.fis
+application/vnd.motorola.flexsuite.gotap
+application/vnd.motorola.flexsuite.kmr
+application/vnd.motorola.flexsuite.ttc
+application/vnd.motorola.flexsuite.wem
+application/vnd.mozilla.xul+xml xul
+application/vnd.ms-artgalry
+application/vnd.ms-asf
+application/vnd.ms-excel xls
+application/vnd.ms-lrm
+application/vnd.ms-powerpoint ppt
+application/vnd.ms-project
+application/vnd.ms-tnef
+application/vnd.ms-works
+application/vnd.ms-wpl
+application/vnd.mseq
+application/vnd.msign
+application/vnd.music-niff
+application/vnd.musician
+application/vnd.netfpx
+application/vnd.noblenet-directory
+application/vnd.noblenet-sealer
+application/vnd.noblenet-web
+application/vnd.novadigm.edm
+application/vnd.novadigm.edx
+application/vnd.novadigm.ext
+application/vnd.obn
+application/vnd.osa.netdeploy
+application/vnd.palm
+application/vnd.pg.format
+application/vnd.pg.osasli
+application/vnd.powerbuilder6
+application/vnd.powerbuilder6-s
+application/vnd.powerbuilder7
+application/vnd.powerbuilder7-s
+application/vnd.powerbuilder75
+application/vnd.powerbuilder75-s
+application/vnd.previewsystems.box
+application/vnd.publishare-delta-tree
+application/vnd.pvi.ptid1
+application/vnd.pwg-multiplexed
+application/vnd.pwg-xhtml-print+xml
+application/vnd.quark.quarkxpress
+application/vnd.rapid
+application/vnd.rn-realmedia rm
+application/vnd.s3sms
+application/vnd.sealed.net
+application/vnd.seemail
+application/vnd.shana.informed.formdata
+application/vnd.shana.informed.formtemplate
+application/vnd.shana.informed.interchange
+application/vnd.shana.informed.package
+application/vnd.smaf
+application/vnd.sss-cod
+application/vnd.sss-dtf
+application/vnd.sss-ntf
+application/vnd.street-stream
+application/vnd.svd
+application/vnd.swiftview-ics
+application/vnd.triscape.mxs
+application/vnd.trueapp
+application/vnd.truedoc
+application/vnd.ufdl
+application/vnd.uplanet.alert
+application/vnd.uplanet.alert-wbxml
+application/vnd.uplanet.bearer-choice
+application/vnd.uplanet.bearer-choice-wbxml
+application/vnd.uplanet.cacheop
+application/vnd.uplanet.cacheop-wbxml
+application/vnd.uplanet.channel
+application/vnd.uplanet.channel-wbxml
+application/vnd.uplanet.list
+application/vnd.uplanet.list-wbxml
+application/vnd.uplanet.listcmd
+application/vnd.uplanet.listcmd-wbxml
+application/vnd.uplanet.signal
+application/vnd.vcx
+application/vnd.vectorworks
+application/vnd.vidsoft.vidconference
+application/vnd.visio
+application/vnd.visionary
+application/vnd.vividence.scriptfile
+application/vnd.vsf
+application/vnd.wap.sic
+application/vnd.wap.slc
+application/vnd.wap.wbxml wbxml
+application/vnd.wap.wmlc wmlc
+application/vnd.wap.wmlscriptc wmlsc
+application/vnd.webturbo
+application/vnd.wrq-hp3000-labelled
+application/vnd.wt.stf
+application/vnd.wv.csp+wbxml
+application/vnd.xara
+application/vnd.xfdl
+application/vnd.yamaha.hv-dic
+application/vnd.yamaha.hv-script
+application/vnd.yamaha.hv-voice
+application/vnd.yellowriver-custom-menu
+application/voicexml+xml vxml
+application/watcherinfo+xml
+application/whoispp-query
+application/whoispp-response
+application/wita
+application/wordperfect5.1
+application/x-bcpio bcpio
+application/x-cdlink vcd
+application/x-chess-pgn pgn
+application/x-compress
+application/x-cpio cpio
+application/x-csh csh
+application/x-director dcr dir dxr
+application/x-dvi dvi
+application/x-futuresplash spl
+application/x-gtar gtar
+application/x-gzip
+application/x-hdf hdf
+application/x-javascript js
+application/x-java-jnlp-file jnlp
+application/x-koan skp skd skt skm
+application/x-latex latex
+application/x-netcdf nc cdf
+application/x-sh sh
+application/x-shar shar
+application/x-shockwave-flash swf
+application/x-stuffit sit
+application/x-sv4cpio sv4cpio
+application/x-sv4crc sv4crc
+application/x-tar tar
+application/x-tcl tcl
+application/x-tex tex
+application/x-texinfo texinfo texi
+application/x-troff t tr roff
+application/x-troff-man man
+application/x-troff-me me
+application/x-troff-ms ms
+application/x-ustar ustar
+application/x-wais-source src
+application/x400-bp
+application/xhtml+xml xhtml xht
+application/xslt+xml xslt
+application/xml xml xsl
+application/xml-dtd dtd
+application/xml-external-parsed-entity
+application/zip zip
+audio/32kadpcm
+audio/amr
+audio/amr-wb
+audio/basic au snd
+audio/cn
+audio/dat12
+audio/dsr-es201108
+audio/dvi4
+audio/evrc
+audio/evrc0
+audio/g722
+audio/g.722.1
+audio/g723
+audio/g726-16
+audio/g726-24
+audio/g726-32
+audio/g726-40
+audio/g728
+audio/g729
+audio/g729D
+audio/g729E
+audio/gsm
+audio/gsm-efr
+audio/l8
+audio/l16
+audio/l20
+audio/l24
+audio/lpc
+audio/midi mid midi kar
+audio/mpa
+audio/mpa-robust
+audio/mp4a-latm m4a m4p
+audio/mpeg mpga mp2 mp3
+audio/parityfec
+audio/pcma
+audio/pcmu
+audio/prs.sid
+audio/qcelp
+audio/red
+audio/smv
+audio/smv0
+audio/telephone-event
+audio/tone
+audio/vdvi
+audio/vnd.3gpp.iufp
+audio/vnd.cisco.nse
+audio/vnd.cns.anp1
+audio/vnd.cns.inf1
+audio/vnd.digital-winds
+audio/vnd.everad.plj
+audio/vnd.lucent.voice
+audio/vnd.nortel.vbk
+audio/vnd.nuera.ecelp4800
+audio/vnd.nuera.ecelp7470
+audio/vnd.nuera.ecelp9600
+audio/vnd.octel.sbc
+audio/vnd.qcelp
+audio/vnd.rhetorex.32kadpcm
+audio/vnd.vmx.cvsd
+audio/x-aiff aif aiff aifc
+audio/x-alaw-basic
+audio/x-mpegurl m3u
+audio/x-pn-realaudio ram ra
+audio/x-pn-realaudio-plugin
+audio/x-wav wav
+chemical/x-pdb pdb
+chemical/x-xyz xyz
+image/bmp bmp
+image/cgm cgm
+image/g3fax
+image/gif gif
+image/ief ief
+image/jpeg jpeg jpg jpe
+image/jp2 jp2
+image/naplps
+image/pict pict pic pct
+image/png png
+image/prs.btif
+image/prs.pti
+image/svg+xml svg
+image/t38
+image/tiff tiff tif
+image/tiff-fx
+image/vnd.cns.inf2
+image/vnd.djvu djvu djv
+image/vnd.dwg
+image/vnd.dxf
+image/vnd.fastbidsheet
+image/vnd.fpx
+image/vnd.fst
+image/vnd.fujixerox.edmics-mmr
+image/vnd.fujixerox.edmics-rlc
+image/vnd.globalgraphics.pgb
+image/vnd.mix
+image/vnd.ms-modi
+image/vnd.net-fpx
+image/vnd.svf
+image/vnd.wap.wbmp wbmp
+image/vnd.xiff
+image/x-cmu-raster ras
+image/x-macpaint pntg pnt mac
+image/x-icon ico
+image/x-portable-anymap pnm
+image/x-portable-bitmap pbm
+image/x-portable-graymap pgm
+image/x-portable-pixmap ppm
+image/x-quicktime qtif qti
+image/x-rgb rgb
+image/x-xbitmap xbm
+image/x-xpixmap xpm
+image/x-xwindowdump xwd
+message/delivery-status
+message/disposition-notification
+message/external-body
+message/http
+message/news
+message/partial
+message/rfc822
+message/s-http
+message/sip
+message/sipfrag
+model/iges igs iges
+model/mesh msh mesh silo
+model/vnd.dwf
+model/vnd.flatland.3dml
+model/vnd.gdl
+model/vnd.gs-gdl
+model/vnd.gtw
+model/vnd.mts
+model/vnd.parasolid.transmit.binary
+model/vnd.parasolid.transmit.text
+model/vnd.vtu
+model/vrml wrl vrml
+multipart/alternative
+multipart/appledouble
+multipart/byteranges
+multipart/digest
+multipart/encrypted
+multipart/form-data
+multipart/header-set
+multipart/mixed
+multipart/parallel
+multipart/related
+multipart/report
+multipart/signed
+multipart/voice-message
+text/calendar ics ifb
+text/css css
+text/directory
+text/enriched
+text/html html htm
+text/parityfec
+text/plain asc txt
+text/prs.lines.tag
+text/rfc822-headers
+text/richtext rtx
+text/rtf rtf
+text/sgml sgml sgm
+text/t140
+text/tab-separated-values tsv
+text/uri-list
+text/vnd.abc
+text/vnd.curl
+text/vnd.dmclientscript
+text/vnd.fly
+text/vnd.fmi.flexstor
+text/vnd.in3d.3dml
+text/vnd.in3d.spot
+text/vnd.iptc.nitf
+text/vnd.iptc.newsml
+text/vnd.latex-z
+text/vnd.motorola.reflex
+text/vnd.ms-mediapackage
+text/vnd.net2phone.commcenter.command
+text/vnd.sun.j2me.app-descriptor
+text/vnd.wap.si
+text/vnd.wap.sl
+text/vnd.wap.wml wml
+text/vnd.wap.wmlscript wmls
+text/x-setext etx
+text/xml
+text/xml-external-parsed-entity
+video/bmpeg
+video/bt656
+video/celb
+video/dv
+video/h261
+video/h263
+video/h263-1998
+video/h263-2000
+video/jpeg
+video/mp1s
+video/mp2p
+video/mp2t
+video/mp4 mp4
+video/mp4v-es
+video/mpv
+video/mpeg mpeg mpg mpe
+video/nv
+video/parityfec
+video/pointer
+video/quicktime qt mov
+video/smpte292m
+video/vnd.fvt
+video/vnd.motorola.video
+video/vnd.motorola.videop
+video/vnd.mpegurl mxu m4u
+video/vnd.nokia.interleaved-multimedia
+video/vnd.objectvideo
+video/vnd.vivo
+video/x-dv dv dif
+video/x-msvideo avi
+video/x-sgi-movie movie
+x-conference/x-cooltalk ice
diff --git a/chromium/tools/python/google/httpd_config/root_ca_cert.pem b/chromium/tools/python/google/httpd_config/root_ca_cert.pem
new file mode 100644
index 00000000000..0e58f2ab5a9
--- /dev/null
+++ b/chromium/tools/python/google/httpd_config/root_ca_cert.pem
@@ -0,0 +1,102 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEAq6OEFgWu9ICFgaeoWfq7Dl57BNzERHpBBTedRaFr3uj+D4nT
+OXjraAFPFcBLE6RMJZXtpLvZrfdUDPEzTtcliLAoXmQB8DN8TTvYXEgEr3dSb+qZ
+sAfmbbtjnjOtGJQwlkb0QdZp4+5V3vrD1DbT0XGHKDu4/Estvzzi+4zo+plEDL1d
+y+Op9g09HOu2gB6+pVG1YAR3ckeWFw2ORO76xF+rMRbcaJqfmnmUBLkPFN/Bmvo3
+q39wuIDdSCXtvUNnAcEynXah/sFk2AB3c9E/IYaScuiRNkWEi7cUXrAyXKPtMNo2
+RdvfVUEYz/42N+270wkf1taR0thfcwJS06oNIwIDAQABAoIBAQCml41KurV3h48N
+qydSPnR0m+uXapftRyP45mP2A5lpwIdnmYS00dLbHfoeyIeWRu/Jw13neJTtr5Ob
+BDmimlxJ8YEdLMBc3fwWErILz7MMhCo1imUlYGWduDYWhV3K73rBQZkulegtXVVn
+mcmdHqI1NZXTzTPdXTLH3VcKhVhUcftA+foSmY5Sr2EgWL34lkF0ciWSbYf4dIAc
+RS+DikEpAM552hPu9+oo0+5aheH9ijuF4SCRZcvToC2YaTUR5OG4j31oGtDMXmeu
+5alBhaUZqS3b8FX8Q2GbqJ4sR+2XrgovrVteOGN0QvoExZeZIdt6ArqArKGQ/hQf
+gPITpX7RAoGBANp/9E3a+CB9fculDW/Vdpo2jVMLFmlMzWCxA5V0BwIgx4yCKgK7
+Zx1gsO+vaOKu1uLfAj5wWd1qNRbAAvL0CpSkEqfteHInyyN6muSWuDEELCIYU/0u
+IdhPH7yU/SYKsZK3qnOZBwSJMI0iJAKXO0qVYepTtN3MXzhIWkaPqoj9AoGBAMkY
+qjS/QDmyIukeUrhpEn6F2nh6/Upek0+xFUP/0SIeaCAfFiJ9+mFLSDLQ0Rvsq5G6
+glfMFaQz/+610+6YZ5ioEPUlVMAx0qNZRS8Gy97MDoe9Z+PmbKdvbZXLTP6nhpGy
+9Nhilog+U8MZCwiklDEKyldQBg/TghMsqhYEyFifAoGBAIGJ9obp2M06WQgQ1ISG
+44fN679eBW1sUR6QE3XZkgYa9PNCgDGadSmfl4aiUeaCjXd3i1cFOOHiA5N3RshP
+Eq7JDx5r1UqoePCR0Z7QFGdK9/dGwRSK8xnQ3xnooqSZFmnLZcye3uA7jYM+yGaY
+zlgLdD9+XO5aKeGuU4cdyZbpAoGAIm8cxgdxoe+E1tIga/9h8VROQo8czB1BsEWs
+VIZ7cxIt1kI8cg+MThir9jeXduahwkRjiVEi64kT6/YqohJqutMchD2uM3LUlK/2
+jsTlJ/NdEZU3MukD9e+4ngu/1WMuQciY8GyBRjcV9LRXFVXJIlSsrSLAxvj6rvnY
+7ghHnhUCgYBiKCwWTxuoh450TwZMv8X3/6+88w6CieUa0EvdVDcmSQ1cnlAw/7Uh
+inzU7HizHG2tq8Rg5HjaXTnkNhacT9tg2WELSH8Cay3287CmIhdkvZMJIPRhMMCW
+NUQMCtufqhgyX4Qn0W1dbhBzyuyfsj1Ec3BrUp1aPHr0QRm4HdsAIA==
+-----END RSA PRIVATE KEY-----
+
+Certificate:
+ Data:
+ Version: 3 (0x2)
+ Serial Number: 12573969073242583322 (0xae7fb6409b5f311a)
+ Signature Algorithm: sha1WithRSAEncryption
+ Issuer: CN=Test Root CA
+ Validity
+ Not Before: Jun 18 19:52:02 2013 GMT
+ Not After : Jun 16 19:52:02 2023 GMT
+ Subject: CN=Test Root CA
+ Subject Public Key Info:
+ Public Key Algorithm: rsaEncryption
+ Public-Key: (2048 bit)
+ Modulus:
+ 00:ab:a3:84:16:05:ae:f4:80:85:81:a7:a8:59:fa:
+ bb:0e:5e:7b:04:dc:c4:44:7a:41:05:37:9d:45:a1:
+ 6b:de:e8:fe:0f:89:d3:39:78:eb:68:01:4f:15:c0:
+ 4b:13:a4:4c:25:95:ed:a4:bb:d9:ad:f7:54:0c:f1:
+ 33:4e:d7:25:88:b0:28:5e:64:01:f0:33:7c:4d:3b:
+ d8:5c:48:04:af:77:52:6f:ea:99:b0:07:e6:6d:bb:
+ 63:9e:33:ad:18:94:30:96:46:f4:41:d6:69:e3:ee:
+ 55:de:fa:c3:d4:36:d3:d1:71:87:28:3b:b8:fc:4b:
+ 2d:bf:3c:e2:fb:8c:e8:fa:99:44:0c:bd:5d:cb:e3:
+ a9:f6:0d:3d:1c:eb:b6:80:1e:be:a5:51:b5:60:04:
+ 77:72:47:96:17:0d:8e:44:ee:fa:c4:5f:ab:31:16:
+ dc:68:9a:9f:9a:79:94:04:b9:0f:14:df:c1:9a:fa:
+ 37:ab:7f:70:b8:80:dd:48:25:ed:bd:43:67:01:c1:
+ 32:9d:76:a1:fe:c1:64:d8:00:77:73:d1:3f:21:86:
+ 92:72:e8:91:36:45:84:8b:b7:14:5e:b0:32:5c:a3:
+ ed:30:da:36:45:db:df:55:41:18:cf:fe:36:37:ed:
+ bb:d3:09:1f:d6:d6:91:d2:d8:5f:73:02:52:d3:aa:
+ 0d:23
+ Exponent: 65537 (0x10001)
+ X509v3 extensions:
+ X509v3 Basic Constraints: critical
+ CA:TRUE
+ X509v3 Subject Key Identifier:
+ 2B:88:93:E1:D2:54:50:F4:B8:A4:20:BD:B1:79:E6:0B:AA:EB:EC:1A
+ X509v3 Key Usage: critical
+ Certificate Sign, CRL Sign
+ Signature Algorithm: sha1WithRSAEncryption
+ a8:58:42:e4:7c:b1:46:11:ee:56:b7:09:08:fb:06:44:f0:a9:
+ 60:03:f0:05:23:09:3c:36:d6:28:1b:e5:d6:61:15:a0:6f:de:
+ 69:ac:28:58:05:f1:ce:9b:61:c2:58:b0:5d:ed:6c:75:44:e2:
+ 68:01:91:59:b1:4f:f3:51:f2:23:f6:47:42:41:57:26:4f:87:
+ 1e:d2:9f:94:3a:e2:d0:4e:6f:02:d2:92:76:2c:0a:dd:58:93:
+ e1:47:b9:02:a3:3d:75:b4:ba:24:70:87:32:87:cf:76:4e:a0:
+ 41:8b:86:42:18:55:ed:a5:ae:5d:6a:3a:8c:28:70:4c:f1:c5:
+ 36:6c:ec:01:a9:d6:51:39:32:31:30:24:82:9f:88:d9:f5:c1:
+ 09:6b:5a:6b:f1:95:d3:9d:3f:e0:42:63:fc:b7:32:90:55:56:
+ f2:76:1b:71:38:bd:bd:fb:3b:23:50:46:4c:2c:4e:49:48:52:
+ ea:05:5f:16:f2:98:51:af:2f:79:36:2a:a0:ba:36:68:1b:29:
+ 8b:7b:e8:8c:ea:73:31:e5:86:d7:2c:d8:56:06:43:d7:72:d2:
+ f0:27:4e:64:0a:2b:27:38:36:cd:be:c1:33:db:74:4b:4e:74:
+ be:21:bd:f6:81:66:d2:fd:2b:7f:f4:55:36:c0:ed:a7:44:ca:
+ b1:78:1d:0f
+-----BEGIN CERTIFICATE-----
+MIIC8zCCAdugAwIBAgIJAK5/tkCbXzEaMA0GCSqGSIb3DQEBBQUAMBcxFTATBgNV
+BAMMDFRlc3QgUm9vdCBDQTAeFw0xMzA2MTgxOTUyMDJaFw0yMzA2MTYxOTUyMDJa
+MBcxFTATBgNVBAMMDFRlc3QgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAKujhBYFrvSAhYGnqFn6uw5eewTcxER6QQU3nUWha97o/g+J0zl4
+62gBTxXASxOkTCWV7aS72a33VAzxM07XJYiwKF5kAfAzfE072FxIBK93Um/qmbAH
+5m27Y54zrRiUMJZG9EHWaePuVd76w9Q209Fxhyg7uPxLLb884vuM6PqZRAy9Xcvj
+qfYNPRzrtoAevqVRtWAEd3JHlhcNjkTu+sRfqzEW3Gian5p5lAS5DxTfwZr6N6t/
+cLiA3Ugl7b1DZwHBMp12of7BZNgAd3PRPyGGknLokTZFhIu3FF6wMlyj7TDaNkXb
+31VBGM/+Njftu9MJH9bWkdLYX3MCUtOqDSMCAwEAAaNCMEAwDwYDVR0TAQH/BAUw
+AwEB/zAdBgNVHQ4EFgQUK4iT4dJUUPS4pCC9sXnmC6rr7BowDgYDVR0PAQH/BAQD
+AgEGMA0GCSqGSIb3DQEBBQUAA4IBAQCoWELkfLFGEe5WtwkI+wZE8KlgA/AFIwk8
+NtYoG+XWYRWgb95prChYBfHOm2HCWLBd7Wx1ROJoAZFZsU/zUfIj9kdCQVcmT4ce
+0p+UOuLQTm8C0pJ2LArdWJPhR7kCoz11tLokcIcyh892TqBBi4ZCGFXtpa5dajqM
+KHBM8cU2bOwBqdZROTIxMCSCn4jZ9cEJa1pr8ZXTnT/gQmP8tzKQVVbydhtxOL29
++zsjUEZMLE5JSFLqBV8W8phRry95NiqgujZoGymLe+iM6nMx5YbXLNhWBkPXctLw
+J05kCisnODbNvsEz23RLTnS+Ib32gWbS/St/9FU2wO2nRMqxeB0P
+-----END CERTIFICATE-----
diff --git a/chromium/tools/python/google/httpd_utils.py b/chromium/tools/python/google/httpd_utils.py
new file mode 100755
index 00000000000..4a03ffc7fb0
--- /dev/null
+++ b/chromium/tools/python/google/httpd_utils.py
@@ -0,0 +1,200 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A class to help start/stop a local apache http server."""
+
+import logging
+import optparse
+import os
+import subprocess
+import sys
+import time
+import urllib
+
+import google.path_utils
+import google.platform_utils
+
+class HttpdNotStarted(Exception): pass
+
+def UrlIsAlive(url):
+ """Checks to see if we get an http response from |url|.
+ We poll the url 5 times with a 1 second delay. If we don't
+ get a reply in that time, we give up and assume the httpd
+ didn't start properly.
+
+ Args:
+ url: The URL to check.
+ Return:
+ True if the url is alive.
+ """
+ wait_time = 5
+ while wait_time > 0:
+ try:
+ response = urllib.urlopen(url)
+ # Server is up and responding.
+ return True
+ except IOError:
+ pass
+ wait_time -= 1
+ # Wait a second and try again.
+ time.sleep(1)
+
+ return False
+
+def ApacheConfigDir(start_dir):
+ """Returns a path to the directory holding the Apache config files."""
+ return google.path_utils.FindUpward(start_dir, 'tools', 'python',
+ 'google', 'httpd_config')
+
+
+def GetCygserverPath(start_dir, apache2=False):
+ """Returns the path to the directory holding cygserver.exe file."""
+ cygserver_path = None
+ if apache2:
+ cygserver_path = google.path_utils.FindUpward(start_dir, 'third_party',
+ 'cygwin', 'usr', 'sbin')
+ return cygserver_path
+
+
+def StartServer(document_root=None, output_dir=None, apache2=False):
+ """Starts a local server on port 8000 using the basic configuration files.
+
+ Args:
+ document_root: If present, specifies the document root for the server;
+ otherwise, the filesystem's root (e.g., C:/ or /) will be used.
+ output_dir: If present, specifies where to put server logs; otherwise,
+ they'll be placed in the system's temp dir (e.g., $TEMP or /tmp).
+ apache2: boolean if true will cause this function to configure
+ for Apache 2.x as opposed to Apache 1.3.x
+
+ Returns: the ApacheHttpd object that was created
+ """
+ script_dir = google.path_utils.ScriptDir()
+ platform_util = google.platform_utils.PlatformUtility(script_dir)
+ if not output_dir:
+ output_dir = platform_util.GetTempDirectory()
+ if not document_root:
+ document_root = platform_util.GetFilesystemRoot()
+ apache_config_dir = ApacheConfigDir(script_dir)
+ if apache2:
+ httpd_conf_path = os.path.join(apache_config_dir, 'httpd2.conf')
+ else:
+ httpd_conf_path = os.path.join(apache_config_dir, 'httpd.conf')
+ mime_types_path = os.path.join(apache_config_dir, 'mime.types')
+ start_cmd = platform_util.GetStartHttpdCommand(output_dir,
+ httpd_conf_path,
+ mime_types_path,
+ document_root,
+ apache2=apache2)
+ stop_cmd = platform_util.GetStopHttpdCommand()
+ httpd = ApacheHttpd(start_cmd, stop_cmd, [8000],
+ cygserver_path=GetCygserverPath(script_dir, apache2))
+ httpd.StartServer()
+ return httpd
+
+
+def StopServers(apache2=False):
+ """Calls the platform's stop command on a newly created server, forcing it
+ to stop.
+
+ The details depend on the behavior of the platform stop command. For example,
+ it's often implemented to kill all running httpd processes, as implied by
+ the name of this function.
+
+ Args:
+ apache2: boolean if true will cause this function to configure
+ for Apache 2.x as opposed to Apache 1.3.x
+ """
+ script_dir = google.path_utils.ScriptDir()
+ platform_util = google.platform_utils.PlatformUtility(script_dir)
+ httpd = ApacheHttpd('', platform_util.GetStopHttpdCommand(), [],
+ cygserver_path=GetCygserverPath(script_dir, apache2))
+ httpd.StopServer(force=True)
+
+
+class ApacheHttpd(object):
+ def __init__(self, start_command, stop_command, port_list,
+ cygserver_path=None):
+ """Args:
+ start_command: command list to call to start the httpd
+ stop_command: command list to call to stop the httpd if one has been
+ started. May kill all httpd processes running on the machine.
+ port_list: list of ports expected to respond on the local machine when
+ the server has been successfully started.
+ cygserver_path: Path to cygserver.exe. If specified, exe will be started
+ with server as well as stopped when server is stopped.
+ """
+ self._http_server_proc = None
+ self._start_command = start_command
+ self._stop_command = stop_command
+ self._port_list = port_list
+ self._cygserver_path = cygserver_path
+
+ def StartServer(self):
+ if self._http_server_proc:
+ return
+ if self._cygserver_path:
+ cygserver_exe = os.path.join(self._cygserver_path, "cygserver.exe")
+ cygbin = google.path_utils.FindUpward(cygserver_exe, 'third_party',
+ 'cygwin', 'bin')
+ env = os.environ
+ env['PATH'] += ";" + cygbin
+ subprocess.Popen(cygserver_exe, env=env)
+ logging.info('Starting http server')
+ self._http_server_proc = subprocess.Popen(self._start_command)
+
+ # Ensure that the server is running on all the desired ports.
+ for port in self._port_list:
+ if not UrlIsAlive('http://127.0.0.1:%s/' % str(port)):
+ raise HttpdNotStarted('Failed to start httpd on port %s' % str(port))
+
+ def StopServer(self, force=False):
+ """If we started an httpd.exe process, or if force is True, call
+ self._stop_command (passed in on init so it can be platform-dependent).
+ This will presumably kill it, and may also kill any other httpd.exe
+ processes that are running.
+ """
+ if force or self._http_server_proc:
+ logging.info('Stopping http server')
+ kill_proc = subprocess.Popen(self._stop_command,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ logging.info('%s\n%s' % (kill_proc.stdout.read(),
+ kill_proc.stderr.read()))
+ self._http_server_proc = None
+ if self._cygserver_path:
+ subprocess.Popen(["taskkill.exe", "/f", "/im", "cygserver.exe"],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+
+
+def main():
+ # Provide some command line params for starting/stopping the http server
+ # manually.
+ option_parser = optparse.OptionParser()
+ option_parser.add_option('-k', '--server', help='Server action (start|stop)')
+ option_parser.add_option('-r', '--root', help='Document root (optional)')
+ option_parser.add_option('-a', '--apache2', action='store_true',
+ default=False, help='Starts Apache 2 instead of Apache 1.3 (default). '
+ 'Ignored on Mac (apache2 is used always)')
+ options, args = option_parser.parse_args()
+
+ if not options.server:
+ print ("Usage: %s -k {start|stop} [-r document_root] [--apache2]" %
+ sys.argv[0])
+ return 1
+
+ document_root = None
+ if options.root:
+ document_root = options.root
+
+ if 'start' == options.server:
+ StartServer(document_root, apache2=options.apache2)
+ else:
+ StopServers(apache2=options.apache2)
+
+
+if '__main__' == __name__:
+ sys.exit(main())
diff --git a/chromium/tools/python/google/logging_utils.py b/chromium/tools/python/google/logging_utils.py
new file mode 100644
index 00000000000..ef2d674950f
--- /dev/null
+++ b/chromium/tools/python/google/logging_utils.py
@@ -0,0 +1,82 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+''' Utility functions and objects for logging.
+'''
+
+import logging
+import sys
+
+class StdoutStderrHandler(logging.Handler):
+ ''' Subclass of logging.Handler which outputs to either stdout or stderr
+ based on a threshold level.
+ '''
+
+ def __init__(self, threshold=logging.WARNING, err=sys.stderr, out=sys.stdout):
+ ''' Args:
+ threshold: below this logging level messages are sent to stdout,
+ otherwise they are sent to stderr
+ err: a stream object that error messages are sent to, defaults to
+ sys.stderr
+ out: a stream object that non-error messages are sent to, defaults to
+ sys.stdout
+ '''
+ logging.Handler.__init__(self)
+ self._err = logging.StreamHandler(err)
+ self._out = logging.StreamHandler(out)
+ self._threshold = threshold
+ self._last_was_err = False
+
+ def setLevel(self, lvl):
+ logging.Handler.setLevel(self, lvl)
+ self._err.setLevel(lvl)
+ self._out.setLevel(lvl)
+
+ def setFormatter(self, formatter):
+ logging.Handler.setFormatter(self, formatter)
+ self._err.setFormatter(formatter)
+ self._out.setFormatter(formatter)
+
+ def emit(self, record):
+ if record.levelno < self._threshold:
+ self._out.emit(record)
+ self._last_was_err = False
+ else:
+ self._err.emit(record)
+ self._last_was_err = False
+
+ def flush(self):
+ # preserve order on the flushing, the stalest stream gets flushed first
+ if self._last_was_err:
+ self._out.flush()
+ self._err.flush()
+ else:
+ self._err.flush()
+ self._out.flush()
+
+
+FORMAT = "%(asctime)s %(filename)s [%(levelname)s] %(message)s"
+DATEFMT = "%H:%M:%S"
+
+def config_root(level=logging.INFO, threshold=logging.WARNING, format=FORMAT,
+ datefmt=DATEFMT):
+ ''' Configure the root logger to use a StdoutStderrHandler and some default
+ formatting.
+ Args:
+ level: messages below this level are ignored
+ threshold: below this logging level messages are sent to stdout,
+ otherwise they are sent to stderr
+ format: format for log messages, see logger.Format
+ datefmt: format for date in log messages
+
+ '''
+ # to set the handler of the root logging object, we need to do setup
+ # manually rather than using basicConfig
+ root = logging.getLogger()
+ root.setLevel(level)
+ formatter = logging.Formatter(format, datefmt)
+ handler = StdoutStderrHandler(threshold=threshold)
+ handler.setLevel(level)
+ handler.setFormatter(formatter)
+ root.addHandler(handler)
diff --git a/chromium/tools/python/google/path_utils.py b/chromium/tools/python/google/path_utils.py
new file mode 100644
index 00000000000..6ab43120434
--- /dev/null
+++ b/chromium/tools/python/google/path_utils.py
@@ -0,0 +1,84 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Some utility methods for getting and manipulating paths."""
+
+# TODO(pamg): Have the buildbot use these, too.
+
+
+import errno
+import os
+import sys
+
+class PathNotFound(Exception): pass
+
+def ScriptDir():
+ """Get the full path to the directory containing the current script."""
+ script_filename = os.path.abspath(sys.argv[0])
+ return os.path.dirname(script_filename)
+
+def FindAncestor(start_dir, ancestor):
+ """Finds an ancestor dir in a path.
+
+ For example, FindAncestor('c:\foo\bar\baz', 'bar') would return
+ 'c:\foo\bar'. Unlike FindUpward*, this only looks at direct path ancestors.
+ """
+ start_dir = os.path.abspath(start_dir)
+ path = start_dir
+ while True:
+ (parent, tail) = os.path.split(path)
+ if tail == ancestor:
+ return path
+ if not tail:
+ break
+ path = parent
+ raise PathNotFound("Unable to find ancestor %s in %s" % (ancestor, start_dir))
+
+def FindUpwardParent(start_dir, *desired_list):
+ """Finds the desired object's parent, searching upward from the start_dir.
+
+ Searches start_dir and all its parents looking for the desired directory
+ or file, which may be given in one or more path components. Returns the
+ first directory in which the top desired path component was found, or raises
+ PathNotFound if it wasn't.
+ """
+ desired_path = os.path.join(*desired_list)
+ last_dir = ''
+ cur_dir = start_dir
+ found_path = os.path.join(cur_dir, desired_path)
+ while not os.path.exists(found_path):
+ last_dir = cur_dir
+ cur_dir = os.path.dirname(cur_dir)
+ if last_dir == cur_dir:
+ raise PathNotFound('Unable to find %s above %s' %
+ (desired_path, start_dir))
+ found_path = os.path.join(cur_dir, desired_path)
+ # Strip the entire original desired path from the end of the one found
+ # and remove a trailing path separator, if present.
+ found_path = found_path[:len(found_path) - len(desired_path)]
+ if found_path.endswith(os.sep):
+ found_path = found_path[:len(found_path) - 1]
+ return found_path
+
+
+def FindUpward(start_dir, *desired_list):
+ """Returns a path to the desired directory or file, searching upward.
+
+ Searches start_dir and all its parents looking for the desired directory
+ or file, which may be given in one or more path components. Returns the full
+ path to the desired object, or raises PathNotFound if it wasn't found.
+ """
+ parent = FindUpwardParent(start_dir, *desired_list)
+ return os.path.join(parent, *desired_list)
+
+
+def MaybeMakeDirectory(*path):
+ """Creates an entire path, if it doesn't already exist."""
+ file_path = os.path.join(*path)
+ try:
+ os.makedirs(file_path)
+ except OSError, e:
+ # errno.EEXIST is "File exists". If we see another error, re-raise.
+ if e.errno != errno.EEXIST:
+ raise
diff --git a/chromium/tools/python/google/platform_utils.py b/chromium/tools/python/google/platform_utils.py
new file mode 100644
index 00000000000..50bfb70cc08
--- /dev/null
+++ b/chromium/tools/python/google/platform_utils.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Platform-specific utilities and pseudo-constants
+
+Any functions whose implementations or values differ from one platform to
+another should be defined in their respective platform_utils_<platform>.py
+modules. The appropriate one of those will be imported into this module to
+provide callers with a common, platform-independent interface.
+"""
+
+import sys
+
+# We may not support the version of Python that a user has installed (Cygwin
+# especially has had problems), but we'll allow the platform utils to be
+# included in any case so we don't get an import error.
+if sys.platform in ('cygwin', 'win32'):
+ from platform_utils_win import *
+elif sys.platform == 'darwin':
+ from platform_utils_mac import *
+elif sys.platform.startswith('linux'):
+ from platform_utils_linux import *
diff --git a/chromium/tools/python/google/platform_utils_linux.py b/chromium/tools/python/google/platform_utils_linux.py
new file mode 100644
index 00000000000..237565e3da8
--- /dev/null
+++ b/chromium/tools/python/google/platform_utils_linux.py
@@ -0,0 +1,148 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Platform-specific utility methods shared by several scripts."""
+
+import os
+import subprocess
+
+import google.path_utils
+
+
+class PlatformUtility(object):
+ def __init__(self, base_dir):
+ """Args:
+ base_dir: the base dir for running tests.
+ """
+ self._base_dir = base_dir
+ self._httpd_cmd_string = None # used for starting/stopping httpd
+ self._bash = "/bin/bash"
+
+ def _UnixRoot(self):
+ """Returns the path to root."""
+ return "/"
+
+ def GetFilesystemRoot(self):
+ """Returns the root directory of the file system."""
+ return self._UnixRoot()
+
+ def GetTempDirectory(self):
+ """Returns the file system temp directory
+
+ Note that this does not use a random subdirectory, so it's not
+ intrinsically secure. If you need a secure subdir, use the tempfile
+ package.
+ """
+ return os.getenv("TMPDIR", "/tmp")
+
+ def FilenameToUri(self, path, use_http=False, use_ssl=False, port=8000):
+ """Convert a filesystem path to a URI.
+
+ Args:
+ path: For an http URI, the path relative to the httpd server's
+ DocumentRoot; for a file URI, the full path to the file.
+ use_http: if True, returns a URI of the form http://127.0.0.1:8000/.
+ If False, returns a file:/// URI.
+ use_ssl: if True, returns HTTPS URL (https://127.0.0.1:8000/).
+ This parameter is ignored if use_http=False.
+ port: The port number to append when returning an HTTP URI
+ """
+ if use_http:
+ protocol = 'http'
+ if use_ssl:
+ protocol = 'https'
+ return "%s://127.0.0.1:%d/%s" % (protocol, port, path)
+ return "file://" + path
+
+ def GetStartHttpdCommand(self, output_dir,
+ httpd_conf_path, mime_types_path,
+ document_root=None, apache2=False):
+ """Prepares the config file and output directory to start an httpd server.
+ Returns a list of strings containing the server's command line+args.
+
+ Args:
+ output_dir: the path to the server's output directory, for log files.
+ It will be created if necessary.
+ httpd_conf_path: full path to the httpd.conf file to be used.
+ mime_types_path: full path to the mime.types file to be used.
+ document_root: full path to the DocumentRoot. If None, the DocumentRoot
+ from the httpd.conf file will be used. Note that the httpd.conf
+ file alongside this script does not specify any DocumentRoot, so if
+ you're using that one, be sure to specify a document_root here.
+ apache2: boolean if true will cause this function to return start
+ command for Apache 2.x as opposed to Apache 1.3.x. This flag
+ is ignored on Linux (but preserved here for compatibility in
+ function signature with win), where apache2 is used always
+ """
+
+ exe_name = "apache2"
+ cert_file = google.path_utils.FindUpward(self._base_dir, 'tools',
+ 'python', 'google',
+ 'httpd_config', 'httpd2.pem')
+ ssl_enabled = os.path.exists('/etc/apache2/mods-enabled/ssl.conf')
+
+ httpd_vars = {
+ "httpd_executable_path":
+ os.path.join(self._UnixRoot(), "usr", "sbin", exe_name),
+ "httpd_conf_path": httpd_conf_path,
+ "ssl_certificate_file": cert_file,
+ "document_root" : document_root,
+ "server_root": os.path.join(self._UnixRoot(), "usr"),
+ "mime_types_path": mime_types_path,
+ "output_dir": output_dir,
+ "ssl_mutex": "file:"+os.path.join(output_dir, "ssl_mutex"),
+ "ssl_session_cache":
+ "shmcb:" + os.path.join(output_dir, "ssl_scache") + "(512000)",
+ "user": os.environ.get("USER", "#%d" % os.geteuid()),
+ "lock_file": os.path.join(output_dir, "accept.lock"),
+ }
+
+ google.path_utils.MaybeMakeDirectory(output_dir)
+
+ # We have to wrap the command in bash
+ # -C: process directive before reading config files
+ # -c: process directive after reading config files
+ # Apache wouldn't run CGIs with permissions==700 unless we add
+ # -c User "<username>"
+ httpd_cmd_string = (
+ '%(httpd_executable_path)s'
+ ' -f %(httpd_conf_path)s'
+ ' -c \'TypesConfig "%(mime_types_path)s"\''
+ ' -c \'CustomLog "%(output_dir)s/access_log.txt" common\''
+ ' -c \'ErrorLog "%(output_dir)s/error_log.txt"\''
+ ' -c \'PidFile "%(output_dir)s/httpd.pid"\''
+ ' -C \'User "%(user)s"\''
+ ' -C \'ServerRoot "%(server_root)s"\''
+ ' -c \'LockFile "%(lock_file)s"\''
+ )
+
+ if document_root:
+ httpd_cmd_string += ' -C \'DocumentRoot "%(document_root)s"\''
+
+ if ssl_enabled:
+ httpd_cmd_string += (
+ ' -c \'SSLCertificateFile "%(ssl_certificate_file)s"\''
+ ' -c \'SSLMutex "%(ssl_mutex)s"\''
+ ' -c \'SSLSessionCache "%(ssl_session_cache)s"\''
+ )
+
+ # Save a copy of httpd_cmd_string to use for stopping httpd
+ self._httpd_cmd_string = httpd_cmd_string % httpd_vars
+
+ httpd_cmd = [self._bash, "-c", self._httpd_cmd_string]
+ return httpd_cmd
+
+ def GetStopHttpdCommand(self):
+ """Returns a list of strings that contains the command line+args needed to
+ stop the http server used in the http tests.
+
+ This tries to fetch the pid of httpd (if available) and returns the
+ command to kill it. If pid is not available, kill all httpd processes
+ """
+
+ if not self._httpd_cmd_string:
+ return ["true"] # Haven't been asked for the start cmd yet. Just pass.
+ # Add a sleep after the shutdown because sometimes it takes some time for
+ # the port to be available again.
+ return [self._bash, "-c", self._httpd_cmd_string + ' -k stop && sleep 5']
diff --git a/chromium/tools/python/google/platform_utils_mac.py b/chromium/tools/python/google/platform_utils_mac.py
new file mode 100644
index 00000000000..c4686fb16eb
--- /dev/null
+++ b/chromium/tools/python/google/platform_utils_mac.py
@@ -0,0 +1,145 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Platform-specific utility methods shared by several scripts."""
+
+import os
+import subprocess
+
+import google.path_utils
+
+
+class PlatformUtility(object):
+ def __init__(self, base_dir):
+ """Args:
+ base_dir: the base dir for running tests.
+ """
+ self._base_dir = base_dir
+ self._httpd_cmd_string = None # used for starting/stopping httpd
+ self._bash = "/bin/bash"
+
+ def _UnixRoot(self):
+ """Returns the path to root."""
+ return "/"
+
+ def GetFilesystemRoot(self):
+ """Returns the root directory of the file system."""
+ return self._UnixRoot()
+
+ def GetTempDirectory(self):
+ """Returns the file system temp directory
+
+ Note that this does not use a random subdirectory, so it's not
+ intrinsically secure. If you need a secure subdir, use the tempfile
+ package.
+ """
+ return os.getenv("TMPDIR", "/tmp")
+
+ def FilenameToUri(self, path, use_http=False, use_ssl=False, port=8000):
+ """Convert a filesystem path to a URI.
+
+ Args:
+ path: For an http URI, the path relative to the httpd server's
+ DocumentRoot; for a file URI, the full path to the file.
+ use_http: if True, returns a URI of the form http://127.0.0.1:8000/.
+ If False, returns a file:/// URI.
+ use_ssl: if True, returns HTTPS URL (https://127.0.0.1:8000/).
+ This parameter is ignored if use_http=False.
+ port: The port number to append when returning an HTTP URI
+ """
+ if use_http:
+ protocol = 'http'
+ if use_ssl:
+ protocol = 'https'
+ return "%s://127.0.0.1:%d/%s" % (protocol, port, path)
+ return "file://" + path
+
+ def GetStartHttpdCommand(self, output_dir,
+ httpd_conf_path, mime_types_path,
+ document_root=None, apache2=False):
+ """Prepares the config file and output directory to start an httpd server.
+ Returns a list of strings containing the server's command line+args.
+
+ Args:
+ output_dir: the path to the server's output directory, for log files.
+ It will be created if necessary.
+ httpd_conf_path: full path to the httpd.conf file to be used.
+ mime_types_path: full path to the mime.types file to be used.
+ document_root: full path to the DocumentRoot. If None, the DocumentRoot
+ from the httpd.conf file will be used. Note that the httpd.conf
+ file alongside this script does not specify any DocumentRoot, so if
+ you're using that one, be sure to specify a document_root here.
+ apache2: boolean if true will cause this function to return start
+ command for Apache 2.x as opposed to Apache 1.3.x. This flag
+ is ignored on Mac (but preserved here for compatibility in
+ function signature with win), where httpd2 is used always
+ """
+
+ exe_name = "httpd"
+ cert_file = google.path_utils.FindUpward(self._base_dir, 'tools',
+ 'python', 'google',
+ 'httpd_config', 'httpd2.pem')
+ ssl_enabled = os.path.exists('/etc/apache2/mods-enabled/ssl.conf')
+
+ httpd_vars = {
+ "httpd_executable_path":
+ os.path.join(self._UnixRoot(), "usr", "sbin", exe_name),
+ "httpd_conf_path": httpd_conf_path,
+ "ssl_certificate_file": cert_file,
+ "document_root" : document_root,
+ "server_root": os.path.join(self._UnixRoot(), "usr"),
+ "mime_types_path": mime_types_path,
+ "output_dir": output_dir,
+ "ssl_mutex": "file:"+os.path.join(output_dir, "ssl_mutex"),
+ "user": os.environ.get("USER", "#%d" % os.geteuid()),
+ "lock_file": os.path.join(output_dir, "accept.lock"),
+ }
+
+ google.path_utils.MaybeMakeDirectory(output_dir)
+
+ # We have to wrap the command in bash
+ # -C: process directive before reading config files
+ # -c: process directive after reading config files
+ # Apache wouldn't run CGIs with permissions==700 unless we add
+ # -c User "<username>"
+ httpd_cmd_string = (
+ '%(httpd_executable_path)s'
+ ' -f %(httpd_conf_path)s'
+ ' -c \'TypesConfig "%(mime_types_path)s"\''
+ ' -c \'CustomLog "%(output_dir)s/access_log.txt" common\''
+ ' -c \'ErrorLog "%(output_dir)s/error_log.txt"\''
+ ' -c \'PidFile "%(output_dir)s/httpd.pid"\''
+ ' -C \'User "%(user)s"\''
+ ' -C \'ServerRoot "%(server_root)s"\''
+ ' -c \'LockFile "%(lock_file)s"\''
+ )
+
+ if document_root:
+ httpd_cmd_string += ' -C \'DocumentRoot "%(document_root)s"\''
+
+ if ssl_enabled:
+ httpd_cmd_string += (
+ ' -c \'SSLCertificateFile "%(ssl_certificate_file)s"\''
+ ' -c \'SSLMutex "%(ssl_mutex)s"\''
+ )
+
+ # Save a copy of httpd_cmd_string to use for stopping httpd
+ self._httpd_cmd_string = httpd_cmd_string % httpd_vars
+
+ httpd_cmd = [self._bash, "-c", self._httpd_cmd_string]
+ return httpd_cmd
+
+ def GetStopHttpdCommand(self):
+ """Returns a list of strings that contains the command line+args needed to
+ stop the http server used in the http tests.
+
+ This tries to fetch the pid of httpd (if available) and returns the
+ command to kill it. If pid is not available, kill all httpd processes
+ """
+
+ if not self._httpd_cmd_string:
+ return ["true"] # Haven't been asked for the start cmd yet. Just pass.
+ # Add a sleep after the shutdown because sometimes it takes some time for
+ # the port to be available again.
+ return [self._bash, "-c", self._httpd_cmd_string + ' -k stop && sleep 5']
diff --git a/chromium/tools/python/google/platform_utils_win.py b/chromium/tools/python/google/platform_utils_win.py
new file mode 100644
index 00000000000..65db96c3faa
--- /dev/null
+++ b/chromium/tools/python/google/platform_utils_win.py
@@ -0,0 +1,194 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Platform-specific utility methods shared by several scripts."""
+
+import os
+import re
+import subprocess
+import sys
+
+import google.path_utils
+
+# Cache a single cygpath process for use throughout, even across instances of
+# the PlatformUtility class.
+_cygpath_proc = None
+
+class PlatformUtility(object):
+ def __init__(self, base_dir):
+ """Args:
+ base_dir: a directory above which third_party/cygwin can be found,
+ used to locate the cygpath executable for path conversions.
+ """
+ self._cygwin_root = None
+ self._base_dir = base_dir
+
+ def _CygwinRoot(self):
+ """Returns the full path to third_party/cygwin/."""
+ if not self._cygwin_root:
+ self._cygwin_root = google.path_utils.FindUpward(self._base_dir,
+ 'third_party', 'cygwin')
+ return self._cygwin_root
+
+ def _PathToExecutable(self, executable):
+ """Returns the full path to an executable in Cygwin's bin dir."""
+ return os.path.join(self._CygwinRoot(), 'bin', executable)
+
+ def GetAbsolutePath(self, path, force=False):
+ """Returns an absolute windows path. If platform is cygwin, converts it to
+ windows style using cygpath.
+
+ For performance reasons, we use a single cygpath process, shared among all
+ instances of this class. Otherwise Python can run out of file handles.
+ """
+ if not force and sys.platform != "cygwin":
+ return os.path.abspath(path)
+ global _cygpath_proc
+ if not _cygpath_proc:
+ cygpath_command = [self._PathToExecutable("cygpath.exe"),
+ "-a", "-m", "-f", "-"]
+ _cygpath_proc = subprocess.Popen(cygpath_command,
+ stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+ _cygpath_proc.stdin.write(path + "\n")
+ return _cygpath_proc.stdout.readline().rstrip()
+
+ def GetFilesystemRoot(self):
+ """Returns the root directory of the file system."""
+ return os.environ['SYSTEMDRIVE'] + '\\'
+
+ def GetTempDirectory(self):
+ """Returns the file system's base temp directory, or the filesystem root
+ if the standard temp directory can't be determined.
+
+ Note that this does not use a random subdirectory, so it's not
+ intrinsically secure. If you need a secure subdir, use the tempfile
+ package.
+ """
+ return os.environ.get('TEMP', self.GetFilesystemRoot())
+
+ def FilenameToUri(self, path, use_http=False, use_ssl=False, port=8000):
+ """Convert a Windows style path to a URI.
+
+ Args:
+ path: For an http URI, the path relative to the httpd server's
+ DocumentRoot; for a file URI, the full path to the file.
+ use_http: if True, returns a URI of the form http://127.0.0.1:8000/.
+ If False, returns a file:/// URI.
+ use_ssl: if True, returns HTTPS URL (https://127.0.0.1:8000/).
+ This parameter is ignored if use_http=False.
+ port: The port number to append when returning an HTTP URI
+ """
+ if use_http:
+ protocol = 'http'
+ if use_ssl:
+ protocol = 'https'
+ path = path.replace("\\", "/")
+ return "%s://127.0.0.1:%s/%s" % (protocol, str(port), path)
+ return "file:///" + self.GetAbsolutePath(path)
+
+ def GetStartHttpdCommand(self, output_dir,
+ httpd_conf_path, mime_types_path,
+ document_root=None, apache2=False):
+ """Prepares the config file and output directory to start an httpd server.
+ Returns a list of strings containing the server's command line+args.
+
+ Args:
+ output_dir: the path to the server's output directory, for log files.
+ It will be created if necessary.
+ httpd_conf_path: full path to the httpd.conf file to be used.
+ mime_types_path: full path to the mime.types file to be used.
+ document_root: full path to the DocumentRoot. If None, the DocumentRoot
+ from the httpd.conf file will be used. Note that the httpd.conf
+ file alongside this script does not specify any DocumentRoot, so if
+ you're using that one, be sure to specify a document_root here.
+ apache2: boolean if true will cause this function to return start
+ command for Apache 2.x as opposed to Apache 1.3.x
+ """
+
+ if document_root:
+ document_root = GetCygwinPath(document_root)
+ exe_name = "httpd"
+ cert_file = ""
+ if apache2:
+ exe_name = "httpd2"
+ cert_file = google.path_utils.FindUpward(self._base_dir, 'tools',
+ 'python', 'google',
+ 'httpd_config', 'httpd2.pem')
+ httpd_vars = {
+ "httpd_executable_path": GetCygwinPath(
+ os.path.join(self._CygwinRoot(), "usr", "sbin", exe_name)),
+ "httpd_conf_path": GetCygwinPath(httpd_conf_path),
+ "ssl_certificate_file": GetCygwinPath(cert_file),
+ "document_root" : document_root,
+ "server_root": GetCygwinPath(os.path.join(self._CygwinRoot(), "usr")),
+ "mime_types_path": GetCygwinPath(mime_types_path),
+ "output_dir": GetCygwinPath(output_dir),
+ "bindir": GetCygwinPath(os.path.join(self._CygwinRoot(), "bin")),
+ "user": os.environ.get("USERNAME", os.environ.get("USER", "")),
+ }
+ if not httpd_vars["user"]:
+ # Failed to get the username from the environment; use whoami.exe
+ # instead.
+ proc = subprocess.Popen(self._PathToExecutable("whoami.exe"),
+ stdout=subprocess.PIPE)
+ httpd_vars["user"] = proc.stdout.read().strip()
+
+ if not httpd_vars["user"]:
+ raise Exception("Failed to get username.")
+
+ google.path_utils.MaybeMakeDirectory(output_dir)
+
+ # We have to wrap the command in bash because the cygwin environment
+ # is required for httpd to run.
+ # -C: process directive before reading config files
+ # -c: process directive after reading config files
+ # Apache wouldn't run CGIs with permissions==700 unless we add
+ # -c User "<username>"
+ bash = self._PathToExecutable("bash.exe")
+ httpd_cmd_string = (
+ ' PATH=%(bindir)s %(httpd_executable_path)s'
+ ' -f %(httpd_conf_path)s'
+ ' -c \'TypesConfig "%(mime_types_path)s"\''
+ ' -c \'CustomLog "%(output_dir)s/access_log.txt" common\''
+ ' -c \'ErrorLog "%(output_dir)s/error_log.txt"\''
+ ' -c \'PidFile "%(output_dir)s/httpd.pid"\''
+ ' -C \'User "%(user)s"\''
+ ' -C \'ServerRoot "%(server_root)s"\''
+ )
+ if apache2:
+ httpd_cmd_string = ('export CYGWIN=server;' + httpd_cmd_string +
+ ' -c \'SSLCertificateFile "%(ssl_certificate_file)s"\'')
+ if document_root:
+ httpd_cmd_string += ' -C \'DocumentRoot "%(document_root)s"\''
+
+ httpd_cmd = [bash, "-c", httpd_cmd_string % httpd_vars]
+ return httpd_cmd
+
+ def GetStopHttpdCommand(self):
+ """Returns a list of strings that contains the command line+args needed to
+ stop the http server used in the http tests.
+ """
+ # Force kill (/f) *all* httpd processes. This has the side effect of
+ # killing httpd processes that we didn't start.
+ return ["taskkill.exe", "/f", "/im", "httpd*"]
+
+###########################################################################
+# This method is specific to windows, expected to be used only by *_win.py
+# files.
+
+def GetCygwinPath(path):
+ """Convert a Windows path to a cygwin path.
+
+ The cygpath utility insists on converting paths that it thinks are Cygwin
+ root paths to what it thinks the correct roots are. So paths such as
+ "C:\b\slave\webkit-release-kjs\build\third_party\cygwin\bin" are converted to
+ plain "/usr/bin". To avoid this, we do the conversion manually.
+
+ The path is expected to be an absolute path, on any drive.
+ """
+ drive_regexp = re.compile(r'([a-z]):[/\\]', re.IGNORECASE)
+ def LowerDrive(matchobj):
+ return '/cygdrive/%s/' % matchobj.group(1).lower()
+ path = drive_regexp.sub(LowerDrive, path)
+ return path.replace('\\', '/')
diff --git a/chromium/tools/python/google/process_utils.py b/chromium/tools/python/google/process_utils.py
new file mode 100644
index 00000000000..64c92ea9f19
--- /dev/null
+++ b/chromium/tools/python/google/process_utils.py
@@ -0,0 +1,221 @@
+# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Shared process-related utility functions."""
+
+import errno
+import os
+import subprocess
+import sys
+
+class CommandNotFound(Exception): pass
+
+
+TASKKILL = os.path.join(os.environ['WINDIR'], 'system32', 'taskkill.exe')
+TASKKILL_PROCESS_NOT_FOUND_ERR = 128
+# On windows 2000 there is no taskkill.exe, we need to have pskill somewhere
+# in the path.
+PSKILL = 'pskill.exe'
+PSKILL_PROCESS_NOT_FOUND_ERR = -1
+
+def KillAll(executables):
+ """Tries to kill all copies of each process in the processes list. Returns
+ an error if any running processes couldn't be killed.
+ """
+ result = 0
+ if os.path.exists(TASKKILL):
+ command = [TASKKILL, '/f', '/im']
+ process_not_found_err = TASKKILL_PROCESS_NOT_FOUND_ERR
+ else:
+ command = [PSKILL, '/t']
+ process_not_found_err = PSKILL_PROCESS_NOT_FOUND_ERR
+
+ for name in executables:
+ new_error = RunCommand(command + [name])
+ # Ignore "process not found" error.
+ if new_error != 0 and new_error != process_not_found_err:
+ result = new_error
+ return result
+
+def RunCommandFull(command, verbose=True, collect_output=False,
+ print_output=True):
+ """Runs the command list.
+
+ Prints the given command (which should be a list of one or more strings).
+ If specified, prints its stderr (and optionally stdout) to stdout,
+ line-buffered, converting line endings to CRLF (see note below). If
+ specified, collects the output as a list of lines and returns it. Waits
+ for the command to terminate and returns its status.
+
+ Args:
+ command: the full command to run, as a list of one or more strings
+ verbose: if True, combines all output (stdout and stderr) into stdout.
+ Otherwise, prints only the command's stderr to stdout.
+ collect_output: if True, collects the output of the command as a list of
+ lines and returns it
+ print_output: if True, prints the output of the command
+
+ Returns:
+ A tuple consisting of the process's exit status and output. If
+ collect_output is False, the output will be [].
+
+ Raises:
+ CommandNotFound if the command executable could not be found.
+ """
+ print '\n' + subprocess.list2cmdline(command).replace('\\', '/') + '\n', ###
+
+ if verbose:
+ out = subprocess.PIPE
+ err = subprocess.STDOUT
+ else:
+ out = file(os.devnull, 'w')
+ err = subprocess.PIPE
+ try:
+ proc = subprocess.Popen(command, stdout=out, stderr=err, bufsize=1)
+ except OSError, e:
+ if e.errno == errno.ENOENT:
+ raise CommandNotFound('Unable to find "%s"' % command[0])
+ raise
+
+ output = []
+
+ if verbose:
+ read_from = proc.stdout
+ else:
+ read_from = proc.stderr
+ line = read_from.readline()
+ while line:
+ line = line.rstrip()
+
+ if collect_output:
+ output.append(line)
+
+ if print_output:
+ # Windows Python converts \n to \r\n automatically whenever it
+ # encounters it written to a text file (including stdout). The only
+ # way around it is to write to a binary file, which isn't feasible for
+ # stdout. So we end up with \r\n here even though we explicitly write
+ # \n. (We could write \r instead, which doesn't get converted to \r\n,
+ # but that's probably more troublesome for people trying to read the
+ # files.)
+ print line + '\n',
+
+ # Python on windows writes the buffer only when it reaches 4k. This is
+ # not fast enough for all purposes.
+ sys.stdout.flush()
+ line = read_from.readline()
+
+ # Make sure the process terminates.
+ proc.wait()
+
+ if not verbose:
+ out.close()
+ return (proc.returncode, output)
+
+def RunCommand(command, verbose=True):
+ """Runs the command list, printing its output and returning its exit status.
+
+ Prints the given command (which should be a list of one or more strings),
+ then runs it and prints its stderr (and optionally stdout) to stdout,
+ line-buffered, converting line endings to CRLF. Waits for the command to
+ terminate and returns its status.
+
+ Args:
+ command: the full command to run, as a list of one or more strings
+ verbose: if True, combines all output (stdout and stderr) into stdout.
+ Otherwise, prints only the command's stderr to stdout.
+
+ Returns:
+ The process's exit status.
+
+ Raises:
+ CommandNotFound if the command executable could not be found.
+ """
+ return RunCommandFull(command, verbose)[0]
+
+def RunCommandsInParallel(commands, verbose=True, collect_output=False,
+ print_output=True):
+ """Runs a list of commands in parallel, waits for all commands to terminate
+ and returns their status. If specified, the ouput of commands can be
+ returned and/or printed.
+
+ Args:
+ commands: the list of commands to run, each as a list of one or more
+ strings.
+ verbose: if True, combines stdout and stderr into stdout.
+ Otherwise, prints only the command's stderr to stdout.
+ collect_output: if True, collects the output of the each command as a list
+ of lines and returns it.
+ print_output: if True, prints the output of each command.
+
+ Returns:
+ A list of tuples consisting of each command's exit status and output. If
+ collect_output is False, the output will be [].
+
+ Raises:
+ CommandNotFound if any of the command executables could not be found.
+ """
+
+ command_num = len(commands)
+ outputs = [[] for i in xrange(command_num)]
+ procs = [None for i in xrange(command_num)]
+ eofs = [False for i in xrange(command_num)]
+
+ for command in commands:
+ print '\n' + subprocess.list2cmdline(command).replace('\\', '/') + '\n',
+
+ if verbose:
+ out = subprocess.PIPE
+ err = subprocess.STDOUT
+ else:
+ out = file(os.devnull, 'w')
+ err = subprocess.PIPE
+
+ for i in xrange(command_num):
+ try:
+ command = commands[i]
+ procs[i] = subprocess.Popen(command, stdout=out, stderr=err, bufsize=1)
+ except OSError, e:
+ if e.errno == errno.ENOENT:
+ raise CommandNotFound('Unable to find "%s"' % command[0])
+ raise
+ # We could consider terminating the processes already started.
+ # But Popen.kill() is only available in version 2.6.
+ # For now the clean up is done by KillAll.
+
+ while True:
+ eof_all = True
+ for i in xrange(command_num):
+ if eofs[i]:
+ continue
+ if verbose:
+ read_from = procs[i].stdout
+ else:
+ read_from = procs[i].stderr
+ line = read_from.readline()
+ if line:
+ eof_all = False
+ line = line.rstrip()
+ outputs[i].append(line)
+ if print_output:
+ # Windows Python converts \n to \r\n automatically whenever it
+ # encounters it written to a text file (including stdout). The only
+ # way around it is to write to a binary file, which isn't feasible
+ # for stdout. So we end up with \r\n here even though we explicitly
+ # write \n. (We could write \r instead, which doesn't get converted
+ # to \r\n, but that's probably more troublesome for people trying to
+ # read the files.)
+ print line + '\n',
+ else:
+ eofs[i] = True
+ if eof_all:
+ break
+
+ # Make sure the process terminates.
+ for i in xrange(command_num):
+ procs[i].wait()
+
+ if not verbose:
+ out.close()
+
+ return [(procs[i].returncode, outputs[i]) for i in xrange(command_num)]
diff --git a/chromium/tools/real_world_impact/nsfw_urls.py b/chromium/tools/real_world_impact/nsfw_urls.py
new file mode 100644
index 00000000000..01977fc43cf
--- /dev/null
+++ b/chromium/tools/real_world_impact/nsfw_urls.py
@@ -0,0 +1,79 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""NSFW urls in the Alexa top 2000 sites."""
+nsfw_urls = set([
+ "http://xhamster.com/",
+ "http://xvideos.com/",
+ "http://livejasmin.com/",
+ "http://pornhub.com/",
+ "http://redtube.com/",
+ "http://youporn.com/",
+ "http://xnxx.com/",
+ "http://tube8.com/",
+ "http://youjizz.com/",
+ "http://adultfriendfinder.com/",
+ "http://hardsextube.com/",
+ "http://yourlust.com/",
+ "http://drtuber.com/",
+ "http://beeg.com/",
+ "http://largeporntube.com/",
+ "http://nuvid.com/",
+ "http://bravotube.net/",
+ "http://spankwire.com/",
+ "http://discreethearts.com/",
+ "http://keezmovies.com/",
+ "http://xtube.com/",
+ "http://alphaporno.com/",
+ "http://4tube.com/",
+ "http://nudevista.com/",
+ "http://porntube.com/",
+ "http://xhamstercams.com/",
+ "http://porn.com/",
+ "http://video-one.com/",
+ "http://perfectgirls.net/",
+ "http://slutload.com/",
+ "http://sunporno.com/",
+ "http://tnaflix.com/",
+ "http://pornerbros.com/",
+ "http://h2porn.com/",
+ "http://adult-empire.com/",
+ "http://pornhublive.com/",
+ "http://sexitnow.com/",
+ "http://pornsharia.com/",
+ "http://freeones.com/",
+ "http://tubegalore.com/",
+ "http://xvideos.jp/",
+ "http://brazzers.com/",
+ "http://fapdu.com/",
+ "http://pornoxo.com/",
+ "http://extremetube.com/",
+ "http://hot-sex-tube.com/",
+ "http://xhamsterhq.com/",
+ "http://18andabused.com/",
+ "http://tubepleasure.com/",
+ "http://18schoolgirlz.com/",
+ "http://chaturbate.com/",
+ "http://motherless.com/",
+ "http://yobt.com/",
+ "http://empflix.com/",
+ "http://hellporno.com/",
+ "http://ashemaletube.com/",
+ "http://watchmygf.com/",
+ "http://redtubelive.com/",
+ "http://met-art.com/",
+ "http://gonzoxxxmovies.com/",
+ "http://shufuni.com/",
+ "http://vid2c.com/",
+ "http://dojki.com/",
+ "http://cerdas.com/",
+ "http://overthumbs.com/",
+ "http://xvideoslive.com/",
+ "http://playboy.com/",
+ "http://caribbeancom.com/",
+ "http://tubewolf.com/",
+ "http://xmatch.com/",
+ "http://ixxx.com/",
+ "http://nymphdate.com/",
+]) \ No newline at end of file
diff --git a/chromium/tools/real_world_impact/real_world_impact.py b/chromium/tools/real_world_impact/real_world_impact.py
new file mode 100755
index 00000000000..4d0b1663230
--- /dev/null
+++ b/chromium/tools/real_world_impact/real_world_impact.py
@@ -0,0 +1,554 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Tool for seeing the real world impact of a patch.
+#
+# Layout Tests can tell you whether something has changed, but this can help
+# you determine whether a subtle/controversial change is beneficial or not.
+#
+# It dumps the rendering of a large number of sites, both with and without a
+# patch being evaluated, then sorts them by greatest difference in rendering,
+# such that a human reviewer can quickly review the most impacted sites,
+# rather than having to manually try sites to see if anything changes.
+#
+# In future it might be possible to extend this to other kinds of differences,
+# e.g. page load times.
+
+import argparse
+from argparse import RawTextHelpFormatter
+from contextlib import closing
+import datetime
+import errno
+from distutils.spawn import find_executable
+from operator import itemgetter
+import multiprocessing
+import os
+import re
+from cStringIO import StringIO
+import subprocess
+import sys
+import textwrap
+import time
+from urllib2 import urlopen
+from urlparse import urlparse
+import webbrowser
+from zipfile import ZipFile
+
+from nsfw_urls import nsfw_urls
+
+action = None
+allow_js = False
+additional_content_shell_flags = ""
+chromium_src_root = ""
+chromium_out_dir = ""
+image_diff = ""
+content_shell = ""
+output_dir = ""
+num_sites = 100
+urls = []
+print_lock = multiprocessing.Lock()
+
+
+def MakeDirsIfNotExist(dir):
+ try:
+ os.makedirs(dir)
+ except OSError as e:
+ if e.errno != errno.EEXIST:
+ raise
+
+
+def SetupPathsAndOut():
+ global chromium_src_root, chromium_out_dir, output_dir
+ global image_diff, content_shell
+ chromium_src_root = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ os.pardir,
+ os.pardir))
+ # Find out directory (might be out_linux for users of cr).
+ for out_suffix in ["_linux", ""]:
+ out_dir = os.path.join(chromium_src_root, "out" + out_suffix)
+ if os.path.exists(out_dir):
+ chromium_out_dir = out_dir
+ break
+ if not chromium_out_dir:
+ return False
+
+ this_script_name = "real_world_impact"
+ output_dir = os.path.join(chromium_out_dir,
+ "Release",
+ this_script_name)
+ MakeDirsIfNotExist(output_dir)
+
+ image_diff = os.path.join(chromium_out_dir, "Release", "image_diff")
+
+ if sys.platform == 'darwin':
+ content_shell = os.path.join(chromium_out_dir, "Release",
+ "Content Shell.app/Contents/MacOS/Content Shell")
+ elif sys.platform.startswith('linux'):
+ content_shell = os.path.join(chromium_out_dir, "Release",
+ "content_shell")
+ elif sys.platform.startswith('win'):
+ content_shell = os.path.join(chromium_out_dir, "Release",
+ "content_shell.exe")
+ return True
+
+
+def CheckPrerequisites():
+ if not find_executable("wget"):
+ print "wget not found! Install wget and re-run this."
+ return False
+ if not os.path.exists(image_diff):
+ print "image_diff not found (%s)!" % image_diff
+ print "Build the image_diff target and re-run this."
+ return False
+ if not os.path.exists(content_shell):
+ print "Content shell not found (%s)!" % content_shell
+ print "Build Release/content_shell and re-run this."
+ return False
+ return True
+
+
+def PickSampleUrls():
+ global urls
+ data_dir = os.path.join(output_dir, "data")
+ MakeDirsIfNotExist(data_dir)
+
+ # Download Alexa top 1,000,000 sites
+ # TODO(johnme): Should probably update this when it gets too stale...
+ csv_path = os.path.join(data_dir, "top-1m.csv")
+ if not os.path.exists(csv_path):
+ print "Downloading list of top 1,000,000 sites from Alexa..."
+ csv_url = "http://s3.amazonaws.com/alexa-static/top-1m.csv.zip"
+ with closing(urlopen(csv_url)) as stream:
+ ZipFile(StringIO(stream.read())).extract("top-1m.csv", data_dir)
+
+ bad_urls_path = os.path.join(data_dir, "bad_urls.txt")
+ if os.path.exists(bad_urls_path):
+ with open(bad_urls_path) as f:
+ bad_urls = set(f.read().splitlines())
+ else:
+ bad_urls = set()
+
+ # See if we've already selected a sample of size num_sites (this way, if you
+ # call this script with arguments "before N" then "after N", where N is the
+ # same number, we'll use the same sample, as expected!).
+ urls_path = os.path.join(data_dir, "%06d_urls.txt" % num_sites)
+ if not os.path.exists(urls_path):
+ if action == 'compare':
+ print ("Error: you must run 'before %d' and 'after %d' before "
+ "running 'compare %d'") % (num_sites, num_sites, num_sites)
+ return False
+ print "Picking %d sample urls..." % num_sites
+
+ # TODO(johnme): For now this just gets the top num_sites entries. In future
+ # this should pick a weighted random sample. For example, it could fit a
+ # power-law distribution, which is a good model of website popularity
+ # (http://www.useit.com/alertbox/9704b.html).
+ urls = []
+ remaining_num_sites = num_sites
+ with open(csv_path) as f:
+ for entry in f:
+ if remaining_num_sites <= 0:
+ break
+ remaining_num_sites -= 1
+ hostname = entry.strip().split(',')[1]
+ if not '/' in hostname: # Skip Alexa 1,000,000 entries that have paths.
+ url = "http://%s/" % hostname
+ if not url in bad_urls:
+ urls.append(url)
+ # Don't write these to disk yet; we'll do that in SaveWorkingUrls below
+ # once we have tried to download them and seen which ones fail.
+ else:
+ with open(urls_path) as f:
+ urls = [u for u in f.read().splitlines() if not u in bad_urls]
+ return True
+
+
+def SaveWorkingUrls():
+ # TODO(johnme): Update the list if a url that used to work goes offline.
+ urls_path = os.path.join(output_dir, "data", "%06d_urls.txt" % num_sites)
+ if not os.path.exists(urls_path):
+ with open(urls_path, 'w') as f:
+ f.writelines(u + '\n' for u in urls)
+
+
+def PrintElapsedTime(elapsed, detail=""):
+ elapsed = round(elapsed * 10) / 10.0
+ m = elapsed / 60
+ s = elapsed % 60
+ print "Took %dm%.1fs" % (m, s), detail
+
+
+def DownloadStaticCopyTask(url):
+ url_parts = urlparse(url)
+ host_dir = os.path.join(output_dir, "data", url_parts.hostname)
+ # Use wget for now, as does a reasonable job of spidering page dependencies
+ # (e.g. CSS, JS, images).
+ success = True
+ try:
+ subprocess.check_call(["wget",
+ "--execute", "robots=off",
+ ("--user-agent=Mozilla/5.0 (Macintosh; Intel Mac OS "
+ "X 10_8_5) AppleWebKit/537.36 (KHTML, like Gecko) C"
+ "hrome/32.0.1700.14 Safari/537.36"),
+ "--page-requisites",
+ "--span-hosts",
+ "--adjust-extension",
+ "--convert-links",
+ "--directory-prefix=" + host_dir,
+ "--force-directories",
+ "--default-page=index.html",
+ "--no-check-certificate",
+ "--timeout=5", # 5s timeout
+ "--tries=2",
+ "--quiet",
+ url])
+ except KeyboardInterrupt:
+ success = False
+ except subprocess.CalledProcessError:
+ # Ignoring these for now, as some sites have issues with their subresources
+ # yet still produce a renderable index.html
+ pass #success = False
+ if success:
+ download_path = os.path.join(host_dir, url_parts.hostname, "index.html")
+ if not os.path.exists(download_path):
+ success = False
+ else:
+ with print_lock:
+ print "Downloaded:", url
+ if not success:
+ with print_lock:
+ print "Failed to download:", url
+ return False
+ return True
+
+
+def DownloadStaticCopies():
+ global urls
+ new_urls = []
+ for url in urls:
+ url_parts = urlparse(url)
+ host_dir = os.path.join(output_dir, "data", url_parts.hostname)
+ download_path = os.path.join(host_dir, url_parts.hostname, "index.html")
+ if not os.path.exists(download_path):
+ new_urls.append(url)
+
+ if new_urls:
+ print "Downloading static copies of %d sites..." % len(new_urls)
+ start_time = time.time()
+
+ results = multiprocessing.Pool(20).map(DownloadStaticCopyTask, new_urls)
+ failed_urls = [new_urls[i] for i,ret in enumerate(results) if not ret]
+ if failed_urls:
+ bad_urls_path = os.path.join(output_dir, "data", "bad_urls.txt")
+ with open(bad_urls_path, 'a') as f:
+ f.writelines(u + '\n' for u in failed_urls)
+ failed_urls_set = set(failed_urls)
+ urls = [u for u in urls if u not in failed_urls_set]
+
+ PrintElapsedTime(time.time() - start_time)
+
+ SaveWorkingUrls()
+
+
+def RunDrtTask(url):
+ url_parts = urlparse(url)
+ host_dir = os.path.join(output_dir, "data", url_parts.hostname)
+ html_path = os.path.join(host_dir, url_parts.hostname, "index.html")
+
+ if not allow_js:
+ nojs_path = os.path.join(host_dir, url_parts.hostname, "index-nojs.html")
+ if not os.path.exists(nojs_path):
+ with open(html_path) as f:
+ html = f.read()
+ if not html:
+ return False
+ # These aren't intended to be XSS safe :)
+ block_tags = (r'<\s*(script|object|video|audio|iframe|frameset|frame)'
+ r'\b.*?<\s*\/\s*\1\s*>')
+ block_attrs = r'\s(onload|onerror)\s*=\s*(\'[^\']*\'|"[^"]*|\S*)'
+ html = re.sub(block_tags, '', html, flags=re.I|re.S)
+ html = re.sub(block_attrs, '', html, flags=re.I)
+ with open(nojs_path, 'w') as f:
+ f.write(html)
+ html_path = nojs_path
+
+ start_time = time.time()
+
+ with open(os.devnull, "w") as fnull:
+ p = subprocess.Popen([content_shell,
+ "--run-layout-test",
+ additional_content_shell_flags,
+ # The single quote is not a typo, it's a separator!
+ html_path + "'--pixel-test"
+ ],
+ shell=False,
+ stdout=subprocess.PIPE,
+ stderr=fnull)
+ result = p.stdout.read()
+ PNG_START = b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A"
+ PNG_END = b"\x49\x45\x4E\x44\xAE\x42\x60\x82"
+ try:
+ start = result.index(PNG_START)
+ end = result.rindex(PNG_END) + 8
+ except ValueError:
+ return False
+
+ png_path = os.path.join(output_dir, action, url_parts.hostname + ".png")
+ MakeDirsIfNotExist(os.path.dirname(png_path))
+ with open(png_path, 'wb') as f:
+ f.write(result[start:end])
+ elapsed_time = (time.time() - start_time, url)
+ return elapsed_time
+
+
+def RunDrt():
+ print "Taking screenshots of %d pages..." % len(urls)
+ start_time = time.time()
+
+ results = multiprocessing.Pool().map(RunDrtTask, urls, 1)
+
+ max_time, url = max(t for t in results if t)
+ elapsed_detail = "(slowest: %.2fs on %s)" % (max_time, url)
+ PrintElapsedTime(time.time() - start_time, elapsed_detail)
+
+
+def CompareResultsTask(url):
+ url_parts = urlparse(url)
+ before_path = os.path.join(output_dir, "before", url_parts.hostname + ".png")
+ after_path = os.path.join(output_dir, "after", url_parts.hostname + ".png")
+ diff_path = os.path.join(output_dir, "diff", url_parts.hostname + ".png")
+ MakeDirsIfNotExist(os.path.join(output_dir, "diff"))
+
+ # TODO(johnme): Don't hardcode "real_world_impact".
+ red_path = ("data:image/gif;base64,R0lGODlhAQABAPAAAP8AAP///yH5BAAAAAAALAAAAA"
+ "ABAAEAAAICRAEAOw==")
+
+ before_exists = os.path.exists(before_path)
+ after_exists = os.path.exists(after_path)
+ if not before_exists and not after_exists:
+ # TODO(johnme): Make this more informative.
+ return (-100, url, red_path)
+ if before_exists != after_exists:
+ # TODO(johnme): Make this more informative.
+ return (200, url, red_path)
+
+ # Get percentage difference.
+ p = subprocess.Popen([image_diff, "--histogram",
+ before_path, after_path],
+ shell=False,
+ stdout=subprocess.PIPE)
+ output,_ = p.communicate()
+ if p.returncode == 0:
+ return (0, url, before_path)
+ diff_match = re.match(r'histogram diff: (\d+\.\d{2})% (?:passed|failed)\n'
+ 'exact diff: (\d+\.\d{2})% (?:passed|failed)', output)
+ if not diff_match:
+ raise Exception("image_diff output format changed")
+ histogram_diff = float(diff_match.group(1))
+ exact_diff = float(diff_match.group(2))
+ combined_diff = max(histogram_diff + exact_diff / 8, 0.001)
+
+ # Produce diff PNG.
+ subprocess.call([image_diff, "--diff", before_path, after_path, diff_path])
+ return (combined_diff, url, diff_path)
+
+
+def CompareResults():
+ print "Running image_diff on %d pages..." % len(urls)
+ start_time = time.time()
+
+ results = multiprocessing.Pool().map(CompareResultsTask, urls)
+ results.sort(key=itemgetter(0), reverse=True)
+
+ PrintElapsedTime(time.time() - start_time)
+
+ now = datetime.datetime.today().strftime("%a %Y-%m-%d %H:%M")
+ html_start = textwrap.dedent("""\
+ <!DOCTYPE html>
+ <html>
+ <head>
+ <title>Real World Impact report %s</title>
+ <script>
+ var togglingImg = null;
+ var toggleTimer = null;
+
+ var before = true;
+ function toggle() {
+ var newFolder = before ? "before" : "after";
+ togglingImg.src = togglingImg.src.replace(/before|after|diff/, newFolder);
+ before = !before;
+ toggleTimer = setTimeout(toggle, 300);
+ }
+
+ function startToggle(img) {
+ before = true;
+ togglingImg = img;
+ if (!img.origSrc)
+ img.origSrc = img.src;
+ toggle();
+ }
+ function stopToggle(img) {
+ clearTimeout(toggleTimer);
+ img.src = img.origSrc;
+ }
+
+ document.onkeydown = function(e) {
+ e = e || window.event;
+ var keyCode = e.keyCode || e.which;
+ var newFolder;
+ switch (keyCode) {
+ case 49: //'1'
+ newFolder = "before"; break;
+ case 50: //'2'
+ newFolder = "after"; break;
+ case 51: //'3'
+ newFolder = "diff"; break;
+ default:
+ return;
+ }
+ var imgs = document.getElementsByTagName("img");
+ for (var i = 0; i < imgs.length; i++) {
+ imgs[i].src = imgs[i].src.replace(/before|after|diff/, newFolder);
+ }
+ };
+ </script>
+ <style>
+ h1 {
+ font-family: sans;
+ }
+ h2 {
+ font-family: monospace;
+ white-space: pre;
+ }
+ .nsfw-spacer {
+ height: 50vh;
+ }
+ .nsfw-warning {
+ background: yellow;
+ border: 10px solid red;
+ }
+ .info {
+ font-size: 1.2em;
+ font-style: italic;
+ }
+ body:not(.details-supported) details {
+ display: none;
+ }
+ </style>
+ </head>
+ <body>
+ <script>
+ if ('open' in document.createElement('details'))
+ document.body.className = "details-supported";
+ </script>
+ <!--<div class="nsfw-spacer"></div>-->
+ <p class="nsfw-warning">Warning: sites below are taken from the Alexa top %d
+ and may be NSFW.</p>
+ <!--<div class="nsfw-spacer"></div>-->
+ <h1>Real World Impact report %s</h1>
+ <p class="info">Press 1, 2 and 3 to switch between before, after and diff
+ screenshots respectively; or hover over the images to rapidly alternate
+ between before and after.</p>
+ """ % (now, num_sites, now))
+
+ html_same_row = """\
+ <h2>No difference on <a href="%s">%s</a>.</h2>
+ """
+
+ html_diff_row = """\
+ <h2>%7.3f%% difference on <a href="%s">%s</a>:</h2>
+ <img src="%s" width="800" height="600"
+ onmouseover="startToggle(this)" onmouseout="stopToggle(this)">
+ """
+
+ html_nsfw_diff_row = """\
+ <h2>%7.3f%% difference on <a href="%s">%s</a>:</h2>
+ <details>
+ <summary>This site may be NSFW. Click to expand/collapse.</summary>
+ <img src="%s" width="800" height="600"
+ onmouseover="startToggle(this)" onmouseout="stopToggle(this)">
+ </details>
+ """
+
+ html_end = textwrap.dedent("""\
+ </body>
+ </html>""")
+
+ html_path = os.path.join(output_dir, "diff.html")
+ with open(html_path, 'w') as f:
+ f.write(html_start)
+ for (diff_float, url, diff_path) in results:
+ diff_path = os.path.relpath(diff_path, output_dir)
+ if diff_float == 0:
+ f.write(html_same_row % (url, url))
+ elif url in nsfw_urls:
+ f.write(html_nsfw_diff_row % (diff_float, url, url, diff_path))
+ else:
+ f.write(html_diff_row % (diff_float, url, url, diff_path))
+ f.write(html_end)
+
+ webbrowser.open_new_tab("file://" + html_path)
+
+
+def main(argv):
+ global num_sites, action, allow_js, additional_content_shell_flags
+
+ parser = argparse.ArgumentParser(
+ formatter_class=RawTextHelpFormatter,
+ description="Compare the real world impact of a content shell change.",
+ epilog=textwrap.dedent("""\
+ Example usage:
+ 1. Build content_shell in out/Release without any changes.
+ 2. Run: %s before [num sites to test (default %d)].
+ 3. Either:
+ a. Apply your controversial patch and rebuild content_shell.
+ b. Pass --additional_flags="--enable_your_flag" in step 4.
+ 4. Run: %s after [num sites to test (default %d)].
+ 5. Run: %s compare [num sites to test (default %d)].
+ This will open the results in your web browser.
+ """ % (argv[0], num_sites, argv[0], num_sites, argv[0], num_sites)))
+ parser.add_argument("--allow_js", help="Don't disable Javascript",
+ action="store_true")
+ parser.add_argument("--additional_flags",
+ help="Additional flags to pass to content shell")
+ parser.add_argument("action",
+ help=textwrap.dedent("""\
+ Action to perform.
+ download - Just download the sites.
+ before - Run content shell and record 'before' result.
+ after - Run content shell and record 'after' result.
+ compare - Compare before and after results.
+ """),
+ choices=["download", "before", "after", "compare"])
+ parser.add_argument("num_sites",
+ help="Number of sites (default %s)" % num_sites,
+ type=int, default=num_sites, nargs='?')
+ args = parser.parse_args()
+
+ action = args.action
+
+ if (args.num_sites):
+ num_sites = args.num_sites
+
+ if (args.allow_js):
+ allow_js = args.allow_js
+
+ if (args.additional_flags):
+ additional_content_shell_flags = args.additional_flags
+
+ if not SetupPathsAndOut() or not CheckPrerequisites() or not PickSampleUrls():
+ return 1
+
+ if action == 'compare':
+ CompareResults()
+ else:
+ DownloadStaticCopies()
+ if action != 'download':
+ RunDrt()
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv)) \ No newline at end of file
diff --git a/chromium/tools/remove_stale_pyc_files.py b/chromium/tools/remove_stale_pyc_files.py
new file mode 100755
index 00000000000..b32c5f4d269
--- /dev/null
+++ b/chromium/tools/remove_stale_pyc_files.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+def RemoveAllStalePycFiles(base_dir):
+ """Scan directories for old .pyc files without a .py file and delete them."""
+ for dirname, _, filenames in os.walk(base_dir):
+ if '.svn' in dirname or '.git' in dirname:
+ continue
+ for filename in filenames:
+ root, ext = os.path.splitext(filename)
+ if ext != '.pyc':
+ continue
+
+ pyc_path = os.path.join(dirname, filename)
+ py_path = os.path.join(dirname, root + '.py')
+
+ try:
+ if not os.path.exists(py_path):
+ os.remove(pyc_path)
+ except OSError:
+ # Wrap OS calls in try/except in case another process touched this file.
+ pass
+
+ try:
+ os.removedirs(dirname)
+ except OSError:
+ # Wrap OS calls in try/except in case another process touched this dir.
+ pass
+
+
+if __name__ == '__main__':
+ for path in sys.argv[1:]:
+ RemoveAllStalePycFiles(path)
diff --git a/chromium/tools/resources/find_unused_resources.py b/chromium/tools/resources/find_unused_resources.py
new file mode 100755
index 00000000000..d6e52c866ed
--- /dev/null
+++ b/chromium/tools/resources/find_unused_resources.py
@@ -0,0 +1,204 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script searches for unused art assets listed in a .grd file.
+
+It uses git grep to look for references to the IDR resource id or the base
+filename. If neither is found, the file is reported unused.
+
+Requires a git checkout. Must be run from your checkout's "src" root.
+
+Example:
+ cd /work/chrome/src
+ tools/resources/find_unused_resouces.py ash/resources/ash_resources.grd
+"""
+
+__author__ = 'jamescook@chromium.org (James Cook)'
+
+
+import os
+import re
+import subprocess
+import sys
+
+
+def GetBaseResourceId(resource_id):
+ """Removes common suffixes from a resource ID.
+
+ Removes suffixies that may be added by macros like IMAGE_GRID or IMAGE_BORDER.
+ For example, converts IDR_FOO_LEFT and IDR_FOO_RIGHT to just IDR_FOO.
+
+ Args:
+ resource_id: String resource ID.
+
+ Returns:
+ A string with the base part of the resource ID.
+ """
+ suffixes = [
+ '_TOP_LEFT', '_TOP', '_TOP_RIGHT',
+ '_LEFT', '_CENTER', '_RIGHT',
+ '_BOTTOM_LEFT', '_BOTTOM', '_BOTTOM_RIGHT',
+ '_TL', '_T', '_TR',
+ '_L', '_M', '_R',
+ '_BL', '_B', '_BR']
+ # Note: This does not check _HOVER, _PRESSED, _HOT, etc. as those are never
+ # used in macros.
+ for suffix in suffixes:
+ if resource_id.endswith(suffix):
+ resource_id = resource_id[:-len(suffix)]
+ return resource_id
+
+
+def FindFilesWithContents(string_a, string_b):
+ """Returns list of paths of files that contain |string_a| or |string_b|.
+
+ Uses --name-only to print the file paths. The default behavior of git grep
+ is to OR together multiple patterns.
+
+ Args:
+ string_a: A string to search for (not a regular expression).
+ string_b: As above.
+
+ Returns:
+ A list of file paths as strings.
+ """
+ matching_files = subprocess.check_output([
+ 'git', 'grep', '--name-only', '--fixed-strings', '-e', string_a,
+ '-e', string_b])
+ files_list = matching_files.split('\n')
+ # The output ends in a newline, so slice that off.
+ files_list = files_list[:-1]
+ return files_list
+
+
+def GetUnusedResources(grd_filepath):
+ """Returns a list of resources that are unused in the code.
+
+ Prints status lines to the console because this function is quite slow.
+
+ Args:
+ grd_filepath: Path to a .grd file listing resources.
+
+ Returns:
+ A list of pairs of [resource_id, filepath] for the unused resources.
+ """
+ unused_resources = []
+ grd_file = open(grd_filepath, 'r')
+ grd_data = grd_file.read()
+ print 'Checking:'
+ # Match the resource id and file path out of substrings like:
+ # ...name="IDR_FOO_123" file="common/foo.png"...
+ # by matching between the quotation marks.
+ pattern = re.compile(
+ r"""name="([^"]*)" # Match resource ID between quotes.
+ \s* # Run of whitespace, including newlines.
+ file="([^"]*)" # Match file path between quotes.""",
+ re.VERBOSE)
+ # Use finditer over the file contents because there may be newlines between
+ # the name and file attributes.
+ searched = set()
+ for result in pattern.finditer(grd_data):
+ # Extract the IDR resource id and file path.
+ resource_id = result.group(1)
+ filepath = result.group(2)
+ filename = os.path.basename(filepath)
+ base_resource_id = GetBaseResourceId(resource_id)
+
+ # Do not bother repeating searches.
+ key = (base_resource_id, filename)
+ if key in searched:
+ continue
+ searched.add(key)
+
+ # Print progress as we go along.
+ print resource_id
+
+ # Ensure the resource isn't used anywhere by checking both for the resource
+ # id (which should appear in C++ code) and the raw filename (in case the
+ # file is referenced in a script, test HTML file, etc.).
+ matching_files = FindFilesWithContents(base_resource_id, filename)
+
+ # Each file is matched once in the resource file itself. If there are no
+ # other matching files, it is unused.
+ if len(matching_files) == 1:
+ # Give the user some happy news.
+ print 'Unused!'
+ unused_resources.append([resource_id, filepath])
+
+ return unused_resources
+
+
+def GetScaleDirectories(resources_path):
+ """Returns a list of paths to per-scale-factor resource directories.
+
+ Assumes the directory names end in '_percent', for example,
+ ash/resources/default_200_percent or
+ chrome/app/theme/resources/touch_140_percent
+
+ Args:
+ resources_path: The base path of interest.
+
+ Returns:
+ A list of paths relative to the 'src' directory.
+ """
+ file_list = os.listdir(resources_path)
+ scale_directories = []
+ for file_entry in file_list:
+ file_path = os.path.join(resources_path, file_entry)
+ if os.path.isdir(file_path) and file_path.endswith('_percent'):
+ scale_directories.append(file_path)
+
+ scale_directories.sort()
+ return scale_directories
+
+
+def main():
+ # The script requires exactly one parameter, the .grd file path.
+ if len(sys.argv) != 2:
+ print 'Usage: tools/resources/find_unused_resources.py <path/to/grd>'
+ sys.exit(1)
+ grd_filepath = sys.argv[1]
+
+ # Try to ensure we are in a source checkout.
+ current_dir = os.getcwd()
+ if os.path.basename(current_dir) != 'src':
+ print 'Script must be run in your "src" directory.'
+ sys.exit(1)
+
+ # We require a git checkout to use git grep.
+ if not os.path.exists(current_dir + '/.git'):
+ print 'You must use a git checkout for this script to run.'
+ print current_dir + '/.git', 'not found.'
+ sys.exit(1)
+
+ # Look up the scale-factor directories.
+ resources_path = os.path.dirname(grd_filepath)
+ scale_directories = GetScaleDirectories(resources_path)
+ if not scale_directories:
+ print 'No scale directories (like "default_100_percent") found.'
+ sys.exit(1)
+
+ # |unused_resources| stores pairs of [resource_id, filepath] for resource ids
+ # that are not referenced in the code.
+ unused_resources = GetUnusedResources(grd_filepath)
+ if not unused_resources:
+ print 'All resources are used.'
+ sys.exit(0)
+
+ # Dump our output for the user.
+ print
+ print 'Unused resource ids:'
+ for resource_id, filepath in unused_resources:
+ print resource_id
+ # Print a list of 'git rm' command lines to remove unused assets.
+ print
+ print 'Unused files:'
+ for resource_id, filepath in unused_resources:
+ for directory in scale_directories:
+ print 'git rm ' + os.path.join(directory, filepath)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/resources/find_used_resources.py b/chromium/tools/resources/find_used_resources.py
new file mode 100755
index 00000000000..0528115ead5
--- /dev/null
+++ b/chromium/tools/resources/find_used_resources.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+
+USAGE = """find_used_resources.py [-h] [-i INPUT] [-o OUTPUT]
+
+Outputs the sorted list of resource ids that are part of unknown pragma warning
+in the given build log.
+
+This script is used to find the resources that are actually compiled in Chrome
+in order to only include the needed strings/images in Chrome PAK files. The
+script parses out the list of used resource ids. These resource ids show up in
+the build output after building Chrome with gyp variable
+enable_resource_whitelist_generation set to 1. This gyp flag causes the compiler
+to print out a UnknownPragma message every time a resource id is used. E.g.:
+foo.cc:22:0: warning: ignoring #pragma whitelisted_resource_12345
+[-Wunknown-pragmas]
+
+On Windows, the message is simply a message via __pragma(message(...)).
+
+"""
+
+COMPONENTS_STRINGS_HEADER = 'gen/components/strings/grit/components_strings.h'
+
+# We don't want the resources are different between 32-bit and 64-bit build,
+# added arch related resources even they are not used.
+ARCH_SPECIFIC_RESOURCES = [
+ 'IDS_VERSION_UI_64BIT',
+ 'IDS_VERSION_UI_32BIT',
+]
+
+def FindResourceIds(header, resource_names):
+ """Returns the numerical resource IDs that correspond to the given resource
+ names, as #defined in the given header file."
+ """
+ pattern = re.compile(
+ r'^#define (%s) _Pragma\S+ (\d+)$' % '|'.join(resource_names))
+ with open(header, 'r') as f:
+ res_ids = [ int(pattern.match(line).group(2))
+ for line in f if pattern.match(line) ]
+ if len(res_ids) != len(resource_names):
+ raise Exception('Find resource id failed: the result is ' +
+ ', '.join(str(i) for i in res_ids))
+ return set(res_ids)
+
+def GetResourceIdsInPragmaWarnings(input):
+ """Returns set of resource ids that are inside unknown pragma warnings
+ for the given input.
+ """
+ used_resources = set()
+ unknown_pragma_warning_pattern = re.compile(
+ 'whitelisted_resource_(?P<resource_id>[0-9]+)')
+ for ln in input:
+ match = unknown_pragma_warning_pattern.search(ln)
+ if match:
+ resource_id = int(match.group('resource_id'))
+ used_resources.add(resource_id)
+ return used_resources
+
+def Main():
+ parser = argparse.ArgumentParser(usage=USAGE)
+ parser.add_argument(
+ '-i', '--input', type=argparse.FileType('r'), default=sys.stdin,
+ help='The build log to read (default stdin)')
+ parser.add_argument(
+ '-o', '--output', type=argparse.FileType('w'), default=sys.stdout,
+ help='The resource list path to write (default stdout)')
+ parser.add_argument('--out-dir', required=True,
+ help='The out target directory, for example out/Release')
+
+ args = parser.parse_args()
+
+
+ used_resources = GetResourceIdsInPragmaWarnings(args.input)
+ used_resources |= FindResourceIds(
+ os.path.join(args.out_dir, COMPONENTS_STRINGS_HEADER),
+ ARCH_SPECIFIC_RESOURCES)
+
+ for resource_id in sorted(used_resources):
+ args.output.write('%d\n' % resource_id)
+
+if __name__ == '__main__':
+ Main()
diff --git a/chromium/tools/resources/ico_tools.py b/chromium/tools/resources/ico_tools.py
new file mode 100644
index 00000000000..259f7f19761
--- /dev/null
+++ b/chromium/tools/resources/ico_tools.py
@@ -0,0 +1,202 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import math
+import os
+import struct
+import subprocess
+import sys
+import tempfile
+
+OPTIMIZE_PNG_FILES = 'tools/resources/optimize-png-files.sh'
+
+logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s')
+
+class InvalidFile(Exception):
+ """Represents an invalid ICO file."""
+
+def IsPng(png_data):
+ """Determines whether a sequence of bytes is a PNG."""
+ return png_data.startswith('\x89PNG\r\n\x1a\n')
+
+def OptimizePngFile(temp_dir, png_filename, optimization_level=None):
+ """Optimize a PNG file.
+
+ Args:
+ temp_dir: The directory containing the PNG file. Must be the only file in
+ the directory.
+ png_filename: The full path to the PNG file to optimize.
+
+ Returns:
+ The raw bytes of a PNG file, an optimized version of the input.
+ """
+ logging.debug('Crushing PNG image...')
+ args = [OPTIMIZE_PNG_FILES]
+ if optimization_level is not None:
+ args.append('-o%d' % optimization_level)
+ args.append(temp_dir)
+ result = subprocess.call(args, stdout=sys.stderr)
+ if result != 0:
+ logging.warning('Warning: optimize-png-files failed (%d)', result)
+ else:
+ logging.debug('optimize-png-files succeeded')
+
+ with open(png_filename, 'rb') as png_file:
+ return png_file.read()
+
+def OptimizePng(png_data, optimization_level=None):
+ """Optimize a PNG.
+
+ Args:
+ png_data: The raw bytes of a PNG file.
+
+ Returns:
+ The raw bytes of a PNG file, an optimized version of the input.
+ """
+ temp_dir = tempfile.mkdtemp()
+ try:
+ logging.debug('temp_dir = %s', temp_dir)
+ png_filename = os.path.join(temp_dir, 'image.png')
+ with open(png_filename, 'wb') as png_file:
+ png_file.write(png_data)
+ return OptimizePngFile(temp_dir, png_filename,
+ optimization_level=optimization_level)
+
+ finally:
+ if os.path.exists(png_filename):
+ os.unlink(png_filename)
+ os.rmdir(temp_dir)
+
+def ComputeANDMaskFromAlpha(image_data, width, height):
+ """Compute an AND mask from 32-bit BGRA image data."""
+ and_bytes = []
+ for y in range(height):
+ bit_count = 0
+ current_byte = 0
+ for x in range(width):
+ alpha = image_data[(y * width + x) * 4 + 3]
+ current_byte <<= 1
+ if ord(alpha) == 0:
+ current_byte |= 1
+ bit_count += 1
+ if bit_count == 8:
+ and_bytes.append(current_byte)
+ bit_count = 0
+ current_byte = 0
+
+ # At the end of a row, pad the current byte.
+ if bit_count > 0:
+ current_byte <<= (8 - bit_count)
+ and_bytes.append(current_byte)
+ # And keep padding until a multiple of 4 bytes.
+ while len(and_bytes) % 4 != 0:
+ and_bytes.append(0)
+
+ and_bytes = ''.join(map(chr, and_bytes))
+ return and_bytes
+
+def RebuildANDMask(iconimage):
+ """Rebuild the AND mask in an icon image.
+
+ GIMP (<=2.8.14) creates a bad AND mask on 32-bit icon images (pixels with <50%
+ opacity are marked as transparent, which end up looking black on Windows). So,
+ if this is a 32-bit image, throw the mask away and recompute it from the alpha
+ data. (See: https://bugzilla.gnome.org/show_bug.cgi?id=755200)
+
+ Args:
+ iconimage: Bytes of an icon image (the BMP data for an entry in an ICO
+ file). Must be in BMP format, not PNG. Does not need to be 32-bit (if it
+ is not 32-bit, this is a no-op).
+
+ Returns:
+ An updated |iconimage|, with the AND mask re-computed using
+ ComputeANDMaskFromAlpha.
+ """
+ # Parse BITMAPINFOHEADER.
+ (_, width, height, _, bpp, _, _, _, _, num_colors, _) = struct.unpack(
+ '<LLLHHLLLLLL', iconimage[:40])
+
+ if bpp != 32:
+ # No alpha channel, so the mask cannot be "wrong" (it is the only source of
+ # transparency information).
+ return iconimage
+
+ height /= 2
+ xor_size = int(math.ceil(width * bpp / 32.0)) * 4 * height
+
+ # num_colors can be 0, implying 2^bpp colors.
+ xor_palette_size = (num_colors or (1 << bpp if bpp < 24 else 0)) * 4
+ xor_data = iconimage[40 + xor_palette_size :
+ 40 + xor_palette_size + xor_size]
+
+ and_data = ComputeANDMaskFromAlpha(xor_data, width, height)
+
+ # Replace the AND mask in the original icon data.
+ return iconimage[:40 + xor_palette_size + xor_size] + and_data
+
+def OptimizeIcoFile(infile, outfile, optimization_level=None):
+ """Read an ICO file, optimize its PNGs, and write the output to outfile.
+
+ Args:
+ infile: The file to read from. Must be a seekable file-like object
+ containing a Microsoft ICO file.
+ outfile: The file to write to.
+ """
+ filename = os.path.basename(infile.name)
+ icondir = infile.read(6)
+ zero, image_type, num_images = struct.unpack('<HHH', icondir)
+ if zero != 0:
+ raise InvalidFile('First word must be 0.')
+ if image_type not in (1, 2):
+ raise InvalidFile('Image type must be 1 or 2.')
+
+ # Read and unpack each ICONDIRENTRY.
+ icon_dir_entries = []
+ for i in range(num_images):
+ icondirentry = infile.read(16)
+ icon_dir_entries.append(struct.unpack('<BBBBHHLL', icondirentry))
+
+ # Read each icon's bitmap data, crush PNGs, and update icon dir entries.
+ current_offset = infile.tell()
+ icon_bitmap_data = []
+ for i in range(num_images):
+ width, height, num_colors, r1, r2, r3, size, _ = icon_dir_entries[i]
+ width = width or 256
+ height = height or 256
+ offset = current_offset
+ icon_data = infile.read(size)
+ if len(icon_data) != size:
+ raise EOFError()
+
+ entry_is_png = IsPng(icon_data)
+ logging.info('%s entry #%d: %dx%d, %d bytes (%s)', filename, i + 1, width,
+ height, size, 'PNG' if entry_is_png else 'BMP')
+
+ if entry_is_png:
+ icon_data = OptimizePng(icon_data, optimization_level=optimization_level)
+ else:
+ new_icon_data = RebuildANDMask(icon_data)
+ if new_icon_data != icon_data:
+ logging.info(' * Rebuilt AND mask for this image from alpha channel.')
+ icon_data = new_icon_data
+
+ if width >= 256 or height >= 256:
+ # TODO(mgiuca): Automatically convert large BMP images to PNGs.
+ logging.warning('Entry #%d is a large image in uncompressed BMP '
+ 'format. Please manually convert to PNG format before '
+ 'running this utility.', i + 1)
+
+ new_size = len(icon_data)
+ current_offset += new_size
+ icon_dir_entries[i] = (width % 256, height % 256, num_colors, r1, r2, r3,
+ new_size, offset)
+ icon_bitmap_data.append(icon_data)
+
+ # Write the data back to outfile.
+ outfile.write(icondir)
+ for icon_dir_entry in icon_dir_entries:
+ outfile.write(struct.pack('<BBBBHHLL', *icon_dir_entry))
+ for icon_bitmap in icon_bitmap_data:
+ outfile.write(icon_bitmap)
diff --git a/chromium/tools/resources/list_resources_removed_by_repack.py b/chromium/tools/resources/list_resources_removed_by_repack.py
new file mode 100755
index 00000000000..a009eade41a
--- /dev/null
+++ b/chromium/tools/resources/list_resources_removed_by_repack.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import os
+import re
+import sys
+
+usage = """%s BUILDTYPE BUILDDIR
+
+BUILDTYPE: either chromium or chrome.
+BUILDDIR: The path to the output directory. e.g. relpath/to/out/Release
+
+Prints out (to stdout) the sorted list of resource ids that are marked as
+unused during the repacking process in the given build log (via stdin).
+Additionally, attempt to print out the name of the resource and the generated
+header file that contains the resource.
+
+This script is used to print the list of resources that are not used so that
+developers will notice and fix their .grd files.
+"""
+
+
+def GetResourceIdsFromRepackMessage(in_data):
+ """Returns sorted set of resource ids that are not used from in_data.
+ """
+ unused_resources = set()
+ unused_pattern = re.compile(
+ 'RePackFromDataPackStrings Removed Key: (?P<resource_id>[0-9]+)')
+ for line in in_data:
+ match = unused_pattern.match(line)
+ if match:
+ resource_id = int(match.group('resource_id'))
+ unused_resources.add(resource_id)
+ return sorted(unused_resources)
+
+
+def Main():
+ if len(sys.argv) != 3:
+ sys.stderr.write(usage % sys.argv[0])
+ return 1
+
+ build_type = sys.argv[1]
+ build_dir = sys.argv[2]
+
+ if build_type not in ('chromium', 'chrome'):
+ sys.stderr.write(usage % sys.argv[0])
+ return 1
+
+ generated_output_dir = os.path.join(build_dir, 'gen')
+ if not os.path.exists(generated_output_dir):
+ sys.stderr.write('Cannot find gen dir %s' % generated_output_dir)
+ return 1
+
+ if build_type == 'chromium':
+ excluded_header = 'google_chrome_strings.h'
+ else:
+ excluded_header = 'chromium_strings.h'
+ data_files = []
+ for root, dirs, files in os.walk(generated_output_dir):
+ if os.path.basename(root) != 'grit':
+ continue
+
+ header_files = [header for header in files if header.endswith('.h')]
+ if excluded_header in header_files:
+ header_files.remove(excluded_header)
+ data_files.extend([os.path.join(root, header) for header in header_files])
+
+ resource_id_to_name_file_map = {}
+ resource_pattern = re.compile('#define (?P<resource_name>[A-Z0-9_]+).* '
+ '(?P<resource_id>[0-9]+)$')
+ for f in data_files:
+ data = open(f).read()
+ for line in data.splitlines():
+ match = resource_pattern.match(line)
+ if match:
+ resource_id = int(match.group('resource_id'))
+ resource_name = match.group('resource_name')
+ if resource_id in resource_id_to_name_file_map:
+ print 'Duplicate:', resource_id
+ print (resource_name, f)
+ print resource_id_to_name_file_map[resource_id]
+ raise
+ resource_id_to_name_file_map[resource_id] = (resource_name, f)
+
+ unused_resources = GetResourceIdsFromRepackMessage(sys.stdin)
+ for resource_id in unused_resources:
+ if resource_id not in resource_id_to_name_file_map:
+ print 'WARNING: Unknown resource id', resource_id
+ continue
+ (resource_name, filename) = resource_id_to_name_file_map[resource_id]
+ sys.stdout.write('%d: %s in %s\n' % (resource_id, resource_name, filename))
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(Main())
diff --git a/chromium/tools/resources/list_unused_grit_header.py b/chromium/tools/resources/list_unused_grit_header.py
new file mode 100755
index 00000000000..49cf088dbf2
--- /dev/null
+++ b/chromium/tools/resources/list_unused_grit_header.py
@@ -0,0 +1,233 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A tool to scan source files for unneeded grit includes.
+
+Example:
+ cd /work/chrome/src
+ tools/resources/list_unused_grit_header.py ui/strings/ui_strings.grd chrome ui
+"""
+
+import os
+import sys
+import xml.etree.ElementTree
+
+from find_unused_resources import GetBaseResourceId
+
+IF_ELSE_TAGS = ('if', 'else')
+
+
+def Usage(prog_name):
+ print prog_name, 'GRD_FILE PATHS_TO_SCAN'
+
+
+def FilterResourceIds(resource_id):
+ """If the resource starts with IDR_, find its base resource id."""
+ if resource_id.startswith('IDR_'):
+ return GetBaseResourceId(resource_id)
+ return resource_id
+
+
+def GetResourcesForNode(node, parent_file, resource_tag):
+ """Recursively iterate through a node and extract resource names.
+
+ Args:
+ node: The node to iterate through.
+ parent_file: The file that contains node.
+ resource_tag: The resource tag to extract names from.
+
+ Returns:
+ A list of resource names.
+ """
+ resources = []
+ for child in node.getchildren():
+ if child.tag == resource_tag:
+ resources.append(child.attrib['name'])
+ elif child.tag in IF_ELSE_TAGS:
+ resources.extend(GetResourcesForNode(child, parent_file, resource_tag))
+ elif child.tag == 'part':
+ parent_dir = os.path.dirname(parent_file)
+ part_file = os.path.join(parent_dir, child.attrib['file'])
+ part_tree = xml.etree.ElementTree.parse(part_file)
+ part_root = part_tree.getroot()
+ assert part_root.tag == 'grit-part'
+ resources.extend(GetResourcesForNode(part_root, part_file, resource_tag))
+ else:
+ raise Exception('unknown tag:', child.tag)
+
+ # Handle the special case for resources of type "FOO_{LEFT,RIGHT,TOP}".
+ if resource_tag == 'structure':
+ resources = [FilterResourceIds(resource_id) for resource_id in resources]
+ return resources
+
+
+def FindNodeWithTag(node, tag):
+ """Look through a node's children for a child node with a given tag.
+
+ Args:
+ root: The node to examine.
+ tag: The tag on a child node to look for.
+
+ Returns:
+ A child node with the given tag, or None.
+ """
+ result = None
+ for n in node.getchildren():
+ if n.tag == tag:
+ assert not result
+ result = n
+ return result
+
+
+def GetResourcesForGrdFile(tree, grd_file):
+ """Find all the message and include resources from a given grit file.
+
+ Args:
+ tree: The XML tree.
+ grd_file: The file that contains the XML tree.
+
+ Returns:
+ A list of resource names.
+ """
+ root = tree.getroot()
+ assert root.tag == 'grit'
+ release_node = FindNodeWithTag(root, 'release')
+ assert release_node != None
+
+ resources = set()
+ for node_type in ('message', 'include', 'structure'):
+ resources_node = FindNodeWithTag(release_node, node_type + 's')
+ if resources_node != None:
+ resources = resources.union(
+ set(GetResourcesForNode(resources_node, grd_file, node_type)))
+ return resources
+
+
+def GetOutputFileForNode(node):
+ """Find the output file starting from a given node.
+
+ Args:
+ node: The root node to scan from.
+
+ Returns:
+ A grit header file name.
+ """
+ output_file = None
+ for child in node.getchildren():
+ if child.tag == 'output':
+ if child.attrib['type'] == 'rc_header':
+ assert output_file is None
+ output_file = child.attrib['filename']
+ elif child.tag in IF_ELSE_TAGS:
+ child_output_file = GetOutputFileForNode(child)
+ if not child_output_file:
+ continue
+ assert output_file is None
+ output_file = child_output_file
+ else:
+ raise Exception('unknown tag:', child.tag)
+ return output_file
+
+
+def GetOutputHeaderFile(tree):
+ """Find the output file for a given tree.
+
+ Args:
+ tree: The tree to scan.
+
+ Returns:
+ A grit header file name.
+ """
+ root = tree.getroot()
+ assert root.tag == 'grit'
+ output_node = FindNodeWithTag(root, 'outputs')
+ assert output_node != None
+ return GetOutputFileForNode(output_node)
+
+
+def ShouldScanFile(filename):
+ """Return if the filename has one of the extensions below."""
+ extensions = ['.cc', '.cpp', '.h', '.mm']
+ file_extension = os.path.splitext(filename)[1]
+ return file_extension in extensions
+
+
+def NeedsGritInclude(grit_header, resources, filename):
+ """Return whether a file needs a given grit header or not.
+
+ Args:
+ grit_header: The grit header file name.
+ resources: The list of resource names in grit_header.
+ filename: The file to scan.
+
+ Returns:
+ True if the file should include the grit header.
+ """
+ # A list of special keywords that implies the file needs grit headers.
+ # To be more thorough, one would need to run a pre-processor.
+ SPECIAL_KEYWORDS = (
+ '#include "ui_localizer_table.h"', # ui_localizer.mm
+ 'DEFINE_RESOURCE_ID', # chrome/browser/android/resource_mapper.cc
+ )
+ with open(filename, 'rb') as f:
+ grit_header_line = grit_header + '"\n'
+ has_grit_header = False
+ while True:
+ line = f.readline()
+ if not line:
+ break
+ if line.endswith(grit_header_line):
+ has_grit_header = True
+ break
+
+ if not has_grit_header:
+ return True
+ rest_of_the_file = f.read()
+ return (any(resource in rest_of_the_file for resource in resources) or
+ any(keyword in rest_of_the_file for keyword in SPECIAL_KEYWORDS))
+
+
+def main(argv):
+ if len(argv) < 3:
+ Usage(argv[0])
+ return 1
+ grd_file = argv[1]
+ paths_to_scan = argv[2:]
+ for f in paths_to_scan:
+ if not os.path.exists(f):
+ print 'Error: %s does not exist' % f
+ return 1
+
+ tree = xml.etree.ElementTree.parse(grd_file)
+ grit_header = GetOutputHeaderFile(tree)
+ if not grit_header:
+ print 'Error: %s does not generate any output headers.' % grit_header
+ return 1
+ resources = GetResourcesForGrdFile(tree, grd_file)
+
+ files_with_unneeded_grit_includes = []
+ for path_to_scan in paths_to_scan:
+ if os.path.isdir(path_to_scan):
+ for root, dirs, files in os.walk(path_to_scan):
+ if '.git' in dirs:
+ dirs.remove('.git')
+ full_paths = [os.path.join(root, f) for f in files if ShouldScanFile(f)]
+ files_with_unneeded_grit_includes.extend(
+ [f for f in full_paths
+ if not NeedsGritInclude(grit_header, resources, f)])
+ elif os.path.isfile(path_to_scan):
+ if not NeedsGritInclude(grit_header, resources, path_to_scan):
+ files_with_unneeded_grit_includes.append(path_to_scan)
+ else:
+ print 'Warning: Skipping %s' % path_to_scan
+
+ if files_with_unneeded_grit_includes:
+ print '\n'.join(files_with_unneeded_grit_includes)
+ return 2
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main(sys.argv))
diff --git a/chromium/tools/resources/optimize-ico-files.py b/chromium/tools/resources/optimize-ico-files.py
new file mode 100755
index 00000000000..2635e9c509b
--- /dev/null
+++ b/chromium/tools/resources/optimize-ico-files.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Windows ICO file crusher.
+
+Optimizes the PNG images within a Windows ICO icon file. This extracts all of
+the sub-images within the file, runs any PNG-formatted images through
+optimize-png-files.sh, then packs them back into an ICO file.
+
+NOTE: ICO files can contain both raw uncompressed BMP files and PNG files. This
+script does not touch the BMP files, which means if you have a huge uncompressed
+image, it will not get smaller. 256x256 icons should be PNG-formatted first.
+(Smaller icons should be BMPs for compatibility with Windows XP.)
+"""
+
+import argparse
+import logging
+import os
+import StringIO
+import sys
+
+import ico_tools
+
+def main(args=None):
+ if args is None:
+ args = sys.argv[1:]
+
+ parser = argparse.ArgumentParser(description='Crush Windows ICO files.')
+ parser.add_argument('files', metavar='ICO', type=argparse.FileType('r+b'),
+ nargs='+', help='.ico files to be crushed')
+ parser.add_argument('-o', dest='optimization_level', metavar='OPT', type=int,
+ help='optimization level')
+ parser.add_argument('-d', '--debug', dest='debug', action='store_true',
+ help='enable debug logging')
+
+ args = parser.parse_args()
+
+ if args.debug:
+ logging.getLogger().setLevel(logging.DEBUG)
+
+ for file in args.files:
+ buf = StringIO.StringIO()
+ file.seek(0, os.SEEK_END)
+ old_length = file.tell()
+ file.seek(0, os.SEEK_SET)
+ ico_tools.OptimizeIcoFile(file, buf, args.optimization_level)
+
+ new_length = len(buf.getvalue())
+
+ # Always write (even if file size not reduced), because we make other fixes
+ # such as regenerating the AND mask.
+ file.truncate(new_length)
+ file.seek(0)
+ file.write(buf.getvalue())
+
+ if new_length >= old_length:
+ logging.info('%s : Could not reduce file size.', file.name)
+ else:
+ saving = old_length - new_length
+ saving_percent = float(saving) / old_length
+ logging.info('%s : %d => %d (%d bytes : %d %%)', file.name, old_length,
+ new_length, saving, int(saving_percent * 100))
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/resources/optimize-png-files.sh b/chromium/tools/resources/optimize-png-files.sh
new file mode 100755
index 00000000000..ecee0675d8a
--- /dev/null
+++ b/chromium/tools/resources/optimize-png-files.sh
@@ -0,0 +1,549 @@
+#!/bin/bash -i
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# The optimization code is based on pngslim (http://goo.gl/a0XHg)
+# and executes a similar pipleline to optimize the png file size.
+# The steps that require pngoptimizercl/pngrewrite/deflopt are omitted,
+# but this runs all other processes, including:
+# 1) various color-dependent optimizations using optipng.
+# 2) optimize the number of huffman blocks.
+# 3) randomize the huffman table.
+# 4) Further optimize using optipng and advdef (zlib stream).
+# Due to the step 3), each run may produce slightly different results.
+#
+# Note(oshima): In my experiment, advdef didn't reduce much. I'm keeping it
+# for now as it does not take much time to run.
+
+readonly ALL_DIRS="
+ash/resources
+chrome/android/java/res
+chrome/app/theme
+chrome/browser/resources
+chrome/renderer/resources
+component/resources
+content/public/android/java/res
+content/app/resources
+content/renderer/resources
+content/shell/resources
+remoting/resources
+ui/android/java/res
+ui/resources
+ui/chromeos/resources
+ui/webui/resources/images
+win8/resources
+"
+
+# Files larger than this file size (in bytes) will
+# use the optimization parameters tailored for large files.
+LARGE_FILE_THRESHOLD=3000
+
+# Constants used for optimization
+readonly DEFAULT_MIN_BLOCK_SIZE=128
+readonly DEFAULT_LIMIT_BLOCKS=256
+readonly DEFAULT_RANDOM_TRIALS=100
+# Taken from the recommendation in the pngslim's readme.txt.
+readonly LARGE_MIN_BLOCK_SIZE=1
+readonly LARGE_LIMIT_BLOCKS=2
+readonly LARGE_RANDOM_TRIALS=1
+
+# Global variables for stats
+TOTAL_OLD_BYTES=0
+TOTAL_NEW_BYTES=0
+TOTAL_FILE=0
+CORRUPTED_FILE=0
+PROCESSED_FILE=0
+
+declare -a THROBBER_STR=('-' '\\' '|' '/')
+THROBBER_COUNT=0
+
+VERBOSE=false
+
+# Echo only if verbose option is set.
+function info {
+ if $VERBOSE ; then
+ echo $@
+ fi
+}
+
+# Show throbber character at current cursor position.
+function throbber {
+ info -ne "${THROBBER_STR[$THROBBER_COUNT]}\b"
+ let THROBBER_COUNT=$THROBBER_COUNT+1
+ let THROBBER_COUNT=$THROBBER_COUNT%4
+}
+
+# Usage: pngout_loop <file> <png_out_options> ...
+# Optimize the png file using pngout with the given options
+# using various block split thresholds and filter types.
+function pngout_loop {
+ local file=$1
+ shift
+ local opts=$*
+ if [ $OPTIMIZE_LEVEL == 1 ]; then
+ for j in $(eval echo {0..5}); do
+ throbber
+ pngout -q -k1 -s1 -f$j $opts $file
+ done
+ else
+ for i in 0 128 256 512; do
+ for j in $(eval echo {0..5}); do
+ throbber
+ pngout -q -k1 -s1 -b$i -f$j $opts $file
+ done
+ done
+ fi
+}
+
+# Usage: get_color_depth_list
+# Returns the list of color depth options for current optimization level.
+function get_color_depth_list {
+ if [ $OPTIMIZE_LEVEL == 1 ]; then
+ echo "-d0"
+ else
+ echo "-d1 -d2 -d4 -d8"
+ fi
+}
+
+# Usage: process_grayscale <file>
+# Optimize grayscale images for all color bit depths.
+#
+# TODO(oshima): Experiment with -d0 w/o -c0.
+function process_grayscale {
+ info -ne "\b\b\b\b\b\b\b\bgray...."
+ for opt in $(get_color_depth_list); do
+ pngout_loop $file -c0 $opt
+ done
+}
+
+# Usage: process_grayscale_alpha <file>
+# Optimize grayscale images with alpha for all color bit depths.
+function process_grayscale_alpha {
+ info -ne "\b\b\b\b\b\b\b\bgray-a.."
+ pngout_loop $file -c4
+ for opt in $(get_color_depth_list); do
+ pngout_loop $file -c3 $opt
+ done
+}
+
+# Usage: process_rgb <file>
+# Optimize rgb images with or without alpha for all color bit depths.
+function process_rgb {
+ info -ne "\b\b\b\b\b\b\b\brgb....."
+ for opt in $(get_color_depth_list); do
+ pngout_loop $file -c3 $opt
+ done
+ pngout_loop $file -c2
+ pngout_loop $file -c6
+}
+
+# Usage: huffman_blocks <file>
+# Optimize the huffman blocks.
+function huffman_blocks {
+ info -ne "\b\b\b\b\b\b\b\bhuffman."
+ local file=$1
+ local size=$(stat -c%s $file)
+ local min_block_size=$DEFAULT_MIN_BLOCK_SIZE
+ local limit_blocks=$DEFAULT_LIMIT_BLOCKS
+
+ if [ $size -gt $LARGE_FILE_THRESHOLD ]; then
+ min_block_size=$LARGE_MIN_BLOCK_SIZE
+ limit_blocks=$LARGE_LIMIT_BLOCKS
+ fi
+ let max_blocks=$size/$min_block_size
+ if [ $max_blocks -gt $limit_blocks ]; then
+ max_blocks=$limit_blocks
+ fi
+
+ for i in $(eval echo {2..$max_blocks}); do
+ throbber
+ pngout -q -k1 -ks -s1 -n$i $file
+ done
+}
+
+# Usage: random_huffman_table_trial <file>
+# Try compressing by randomizing the initial huffman table.
+#
+# TODO(oshima): Try adjusting different parameters for large files to
+# reduce runtime.
+function random_huffman_table_trial {
+ info -ne "\b\b\b\b\b\b\b\brandom.."
+ local file=$1
+ local old_size=$(stat -c%s $file)
+ local trials_count=$DEFAULT_RANDOM_TRIALS
+
+ if [ $old_size -gt $LARGE_FILE_THRESHOLD ]; then
+ trials_count=$LARGE_RANDOM_TRIALS
+ fi
+ for i in $(eval echo {1..$trials_count}); do
+ throbber
+ pngout -q -k1 -ks -s0 -r $file
+ done
+ local new_size=$(stat -c%s $file)
+ if [ $new_size -lt $old_size ]; then
+ random_huffman_table_trial $file
+ fi
+}
+
+# Usage: final_comprssion <file>
+# Further compress using optipng and advdef.
+# TODO(oshima): Experiment with 256.
+function final_compression {
+ info -ne "\b\b\b\b\b\b\b\bfinal..."
+ local file=$1
+ if [ $OPTIMIZE_LEVEL == 2 ]; then
+ for i in 32k 16k 8k 4k 2k 1k 512; do
+ throbber
+ optipng -q -nb -nc -zw$i -zc1-9 -zm1-9 -zs0-3 -f0-5 $file
+ done
+ fi
+ for i in $(eval echo {1..4}); do
+ throbber
+ advdef -q -z -$i $file
+ done
+
+ # Clear the current line.
+ if $VERBOSE ; then
+ printf "\033[0G\033[K"
+ fi
+}
+
+# Usage: get_color_type <file>
+# Returns the color type name of the png file. Here is the list of names
+# for each color type codes.
+# 0: grayscale
+# 2: RGB
+# 3: colormap
+# 4: gray+alpha
+# 6: RGBA
+# See http://en.wikipedia.org/wiki/Portable_Network_Graphics#Color_depth
+# for details about the color type code.
+function get_color_type {
+ local file=$1
+ echo $(file $file | awk -F, '{print $3}' | awk '{print $2}')
+}
+
+# Usage: optimize_size <file>
+# Performs png file optimization.
+function optimize_size {
+ # Print filename, trimmed to ensure it + status don't take more than 1 line
+ local filename_length=${#file}
+ local -i allowed_length=$COLUMNS-11
+ local -i trimmed_length=$filename_length-$COLUMNS+14
+ if [ "$filename_length" -lt "$allowed_length" ]; then
+ info -n "$file|........"
+ else
+ info -n "...${file:$trimmed_length}|........"
+ fi
+
+ local file=$1
+
+ advdef -q -z -4 $file
+
+ pngout -q -s4 -c0 -force $file $file.tmp.png
+ if [ -f $file.tmp.png ]; then
+ rm $file.tmp.png
+ process_grayscale $file
+ process_grayscale_alpha $file
+ else
+ pngout -q -s4 -c4 -force $file $file.tmp.png
+ if [ -f $file.tmp.png ]; then
+ rm $file.tmp.png
+ process_grayscale_alpha $file
+ else
+ process_rgb $file
+ fi
+ fi
+
+ info -ne "\b\b\b\b\b\b\b\bfilter.."
+ local old_color_type=$(get_color_type $file)
+ optipng -q -zc9 -zm8 -zs0-3 -f0-5 -out $file.tmp.png $file
+ local new_color_type=$(get_color_type $file.tmp.png)
+ # optipng may corrupt a png file when reducing the color type
+ # to grayscale/grayscale+alpha. Just skip such cases until
+ # the bug is fixed. See crbug.com/174505, crbug.com/174084.
+ # The issue is reported in
+ # https://sourceforge.net/tracker/?func=detail&aid=3603630&group_id=151404&atid=780913
+ if [[ $old_color_type == "RGBA" && $new_color_type == gray* ]] ; then
+ rm $file.tmp.png
+ else
+ mv $file.tmp.png $file
+ fi
+ pngout -q -k1 -s1 $file
+
+ huffman_blocks $file
+
+ # TODO(oshima): Experiment with strategy 1.
+ info -ne "\b\b\b\b\b\b\b\bstrategy"
+ if [ $OPTIMIZE_LEVEL == 2 ]; then
+ for i in 3 2 0; do
+ pngout -q -k1 -ks -s$i $file
+ done
+ else
+ pngout -q -k1 -ks -s1 $file
+ fi
+
+ if [ $OPTIMIZE_LEVEL == 2 ]; then
+ random_huffman_table_trial $file
+ fi
+
+ final_compression $file
+}
+
+# Usage: process_file <file>
+function process_file {
+ local file=$1
+ local name=$(basename $file)
+ # -rem alla removes all ancillary chunks except for tRNS
+ pngcrush -d $TMP_DIR -brute -reduce -rem alla $file > /dev/null 2>&1
+
+ if [ -f $TMP_DIR/$name -a $OPTIMIZE_LEVEL != 0 ]; then
+ optimize_size $TMP_DIR/$name
+ fi
+}
+
+# Usage: optimize_file <file>
+function optimize_file {
+ local file=$1
+ if $using_cygwin ; then
+ file=$(cygpath -w $file)
+ fi
+
+ local name=$(basename $file)
+ local old=$(stat -c%s $file)
+ local tmp_file=$TMP_DIR/$name
+ let TOTAL_FILE+=1
+
+ process_file $file
+
+ if [ ! -e $tmp_file ] ; then
+ let CORRUPTED_FILE+=1
+ echo "$file may be corrupted; skipping\n"
+ return
+ fi
+
+ local new=$(stat -c%s $tmp_file)
+ let diff=$old-$new
+ let percent=$diff*100
+ let percent=$percent/$old
+
+ if [ $new -lt $old ]; then
+ info "$file: $old => $new ($diff bytes: $percent%)"
+ cp "$tmp_file" "$file"
+ let TOTAL_OLD_BYTES+=$old
+ let TOTAL_NEW_BYTES+=$new
+ let PROCESSED_FILE+=1
+ else
+ if [ $OPTIMIZE_LEVEL == 0 ]; then
+ info "$file: Skipped"
+ else
+ info "$file: Unable to reduce size"
+ fi
+ rm $tmp_file
+ fi
+}
+
+function optimize_dir {
+ local dir=$1
+ if $using_cygwin ; then
+ dir=$(cygpath -w $dir)
+ fi
+
+ for f in $(find $dir -name "*.png"); do
+ optimize_file $f
+ done
+}
+
+function install_if_not_installed {
+ local program=$1
+ local package=$2
+ which $program > /dev/null 2>&1
+ if [ "$?" != "0" ]; then
+ if $using_cygwin ; then
+ echo "Couldn't find $program. " \
+ "Please run cygwin's setup.exe and install the $package package."
+ exit 1
+ else
+ read -p "Couldn't find $program. Do you want to install? (y/n)"
+ [ "$REPLY" == "y" ] && sudo apt-get install $package
+ [ "$REPLY" == "y" ] || exit
+ fi
+ fi
+}
+
+function fail_if_not_installed {
+ local program=$1
+ local url=$2
+ which $program > /dev/null 2>&1
+ if [ $? != 0 ]; then
+ echo "Couldn't find $program. Please download and install it from $url ."
+ exit 1
+ fi
+}
+
+# Check pngcrush version and exit if the version is in bad range.
+# See crbug.com/404893.
+function exit_if_bad_pngcrush_version {
+ local version=$(pngcrush -v 2>&1 | awk "/pngcrush 1.7./ {print \$3}")
+ local version_num=$(echo $version | sed "s/\.//g")
+ if [[ (1748 -lt $version_num && $version_num -lt 1773) ]] ; then
+ echo "Your pngcrush ($version) has a bug that exists from " \
+ "1.7.49 to 1.7.72 (see crbug.com/404893 for details)."
+ echo "Please upgrade pngcrush and try again"
+ exit 1;
+ fi
+}
+
+function show_help {
+ local program=$(basename $0)
+ echo \
+"Usage: $program [options] <dir> ...
+
+$program is a utility to reduce the size of png files by removing
+unnecessary chunks and compressing the image.
+
+Options:
+ -o<optimize_level> Specify optimization level: (default is 1)
+ 0 Just run pngcrush. It removes unnecessary chunks and perform basic
+ optimization on the encoded data.
+ 1 Optimize png files using pngout/optipng and advdef. This can further
+ reduce addtional 5~30%. This is the default level.
+ 2 Aggressively optimize the size of png files. This may produce
+ addtional 1%~5% reduction. Warning: this is *VERY*
+ slow and can take hours to process all files.
+ -r<revision> If this is specified, the script processes only png files
+ changed since this revision. The <dir> options will be used
+ to narrow down the files under specific directories.
+ -v Shows optimization process for each file.
+ -h Print this help text."
+ exit 1
+}
+
+if [ ! -e ../.gclient ]; then
+ echo "$0 must be run in src directory"
+ exit 1
+fi
+
+if [ "$(expr substr $(uname -s) 1 6)" == "CYGWIN" ]; then
+ using_cygwin=true
+else
+ using_cygwin=false
+fi
+
+# The -i in the shebang line should result in $COLUMNS being set on newer
+# versions of bash. If it's not set yet, attempt to set it.
+if [ -z $COLUMNS ]; then
+ which tput > /dev/null 2>&1
+ if [ "$?" == "0" ]; then
+ COLUMNS=$(tput cols)
+ else
+ # No tput either... give up and just guess 80 columns.
+ COLUMNS=80
+ fi
+ export COLUMNS
+fi
+
+OPTIMIZE_LEVEL=1
+# Parse options
+while getopts o:r:h:v opts
+do
+ case $opts in
+ r)
+ COMMIT=$(git svn find-rev r$OPTARG | tail -1) || exit
+ if [ -z "$COMMIT" ] ; then
+ echo "Revision $OPTARG not found"
+ show_help
+ fi
+ ;;
+ o)
+ if [[ "$OPTARG" != 0 && "$OPTARG" != 1 && "$OPTARG" != 2 ]] ; then
+ show_help
+ fi
+ OPTIMIZE_LEVEL=$OPTARG
+ ;;
+ v)
+ VERBOSE=true
+ ;;
+ [h?])
+ show_help;;
+ esac
+done
+
+# Remove options from argument list.
+shift $(($OPTIND -1))
+
+# Make sure we have all necessary commands installed.
+install_if_not_installed pngcrush pngcrush
+exit_if_bad_pngcrush_version
+
+if [ $OPTIMIZE_LEVEL -ge 1 ]; then
+ install_if_not_installed optipng optipng
+
+ if $using_cygwin ; then
+ fail_if_not_installed advdef "http://advancemame.sourceforge.net/comp-readme.html"
+ else
+ install_if_not_installed advdef advancecomp
+ fi
+
+ if $using_cygwin ; then
+ pngout_url="http://www.advsys.net/ken/utils.htm"
+ else
+ pngout_url="http://www.jonof.id.au/kenutils"
+ fi
+ fail_if_not_installed pngout $pngout_url
+fi
+
+# Create tmp directory for crushed png file.
+TMP_DIR=$(mktemp -d)
+if $using_cygwin ; then
+ TMP_DIR=$(cygpath -w $TMP_DIR)
+fi
+
+# Make sure we cleanup temp dir
+#trap "rm -rf $TMP_DIR" EXIT
+
+# If no directories are specified, optimize all directories.
+DIRS=$@
+set ${DIRS:=$ALL_DIRS}
+
+info "Optimize level=$OPTIMIZE_LEVEL"
+
+if [ -n "$COMMIT" ] ; then
+ ALL_FILES=$(git diff --name-only $COMMIT HEAD $DIRS | grep "png$")
+ ALL_FILES_LIST=( $ALL_FILES )
+ echo "Processing ${#ALL_FILES_LIST[*]} files"
+ for f in $ALL_FILES; do
+ if [ -f $f ] ; then
+ optimize_file $f
+ else
+ echo "Skipping deleted file: $f";
+ fi
+ done
+else
+ for d in $DIRS; do
+ if [ -d $d ] ; then
+ info "Optimizing png files in $d"
+ optimize_dir $d
+ info ""
+ elif [ -f $d ] ; then
+ optimize_file $d
+ else
+ echo "Not a file or directory: $d";
+ fi
+ done
+fi
+
+# Print the results.
+echo "Optimized $PROCESSED_FILE/$TOTAL_FILE files in" \
+ "$(date -d "0 + $SECONDS sec" +%Ts)"
+if [ $PROCESSED_FILE != 0 ]; then
+ let diff=$TOTAL_OLD_BYTES-$TOTAL_NEW_BYTES
+ let percent=$diff*100/$TOTAL_OLD_BYTES
+ echo "Result: $TOTAL_OLD_BYTES => $TOTAL_NEW_BYTES bytes" \
+ "($diff bytes: $percent%)"
+fi
+if [ $CORRUPTED_FILE != 0 ]; then
+ echo "Warning: corrupted files found: $CORRUPTED_FILE"
+ echo "Please contact the author of the CL that landed corrupted png files"
+fi
diff --git a/chromium/tools/roll_angle.py b/chromium/tools/roll_angle.py
new file mode 100755
index 00000000000..6432cc4a093
--- /dev/null
+++ b/chromium/tools/roll_angle.py
@@ -0,0 +1,412 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import os
+import re
+import subprocess
+import sys
+import time
+
+extra_cq_trybots = [
+ {
+ "mastername": "tryserver.chromium.win",
+ "buildernames": ["win_optional_gpu_tests_rel"]
+ },
+ {
+ "mastername": "tryserver.chromium.mac",
+ "buildernames": ["mac_optional_gpu_tests_rel"]
+ },
+ {
+ "mastername": "tryserver.chromium.linux",
+ "buildernames": ["linux_optional_gpu_tests_rel"]
+ }
+]
+extra_fyi_trybots = [
+ {
+ "mastername": "tryserver.chromium.win",
+ "buildernames": ["win_clang_dbg"]
+ }
+]
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+SRC_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+sys.path.insert(0, os.path.join(SRC_DIR, 'build'))
+import find_depot_tools
+find_depot_tools.add_depot_tools_to_path()
+import roll_dep_svn
+from gclient import GClientKeywords
+from third_party import upload
+
+# Avoid depot_tools/third_party/upload.py print verbose messages.
+upload.verbosity = 0 # Errors only.
+
+CHROMIUM_GIT_URL = 'https://chromium.googlesource.com/chromium/src.git'
+CL_ISSUE_RE = re.compile('^Issue number: ([0-9]+) \((.*)\)$')
+RIETVELD_URL_RE = re.compile('^https?://(.*)/(.*)')
+ROLL_BRANCH_NAME = 'special_angle_roll_branch'
+TRYJOB_STATUS_SLEEP_SECONDS = 30
+
+# Use a shell for subcommands on Windows to get a PATH search.
+IS_WIN = sys.platform.startswith('win')
+ANGLE_PATH = os.path.join('third_party', 'angle')
+
+CommitInfo = collections.namedtuple('CommitInfo', ['git_commit',
+ 'git_repo_url'])
+CLInfo = collections.namedtuple('CLInfo', ['issue', 'url', 'rietveld_server'])
+
+def _PosixPath(path):
+ """Convert a possibly-Windows path to a posix-style path."""
+ (_, path) = os.path.splitdrive(path)
+ return path.replace(os.sep, '/')
+
+def _ParseGitCommitHash(description):
+ for line in description.splitlines():
+ if line.startswith('commit '):
+ return line.split()[1]
+ logging.error('Failed to parse git commit id from:\n%s\n', description)
+ sys.exit(-1)
+ return None
+
+
+def _ParseDepsFile(filename):
+ with open(filename, 'rb') as f:
+ deps_content = f.read()
+ return _ParseDepsDict(deps_content)
+
+
+def _ParseDepsDict(deps_content):
+ local_scope = {}
+ var = GClientKeywords.VarImpl({}, local_scope)
+ global_scope = {
+ 'File': GClientKeywords.FileImpl,
+ 'From': GClientKeywords.FromImpl,
+ 'Var': var.Lookup,
+ 'deps_os': {},
+ }
+ exec(deps_content, global_scope, local_scope)
+ return local_scope
+
+
+def _GenerateCLDescriptionCommand(angle_current, angle_new, bugs, tbr):
+ def GetChangeString(current_hash, new_hash):
+ return '%s..%s' % (current_hash[0:7], new_hash[0:7]);
+
+ def GetChangeLogURL(git_repo_url, change_string):
+ return '%s/+log/%s' % (git_repo_url, change_string)
+
+ def GetBugString(bugs):
+ bug_str = 'BUG='
+ for bug in bugs:
+ bug_str += str(bug) + ','
+ return bug_str.rstrip(',')
+
+ if angle_current.git_commit != angle_new.git_commit:
+ change_str = GetChangeString(angle_current.git_commit,
+ angle_new.git_commit)
+ changelog_url = GetChangeLogURL(angle_current.git_repo_url,
+ change_str)
+
+ def GetExtraCQTrybotString():
+ s = ''
+ for t in extra_cq_trybots:
+ if s:
+ s += ';'
+ s += t['mastername'] + ':' + ','.join(t['buildernames'])
+ return s
+
+ def GetTBRString(tbr):
+ if not tbr:
+ return ''
+ return 'TBR=' + tbr
+
+ extra_trybot_args = []
+ if extra_cq_trybots:
+ extra_trybot_string = GetExtraCQTrybotString()
+ extra_trybot_args = ['-m', 'CQ_INCLUDE_TRYBOTS=' + extra_trybot_string]
+
+ return [
+ '-m', 'Roll ANGLE ' + change_str,
+ '-m', '%s' % changelog_url,
+ '-m', GetBugString(bugs),
+ '-m', GetTBRString(tbr),
+ '-m', 'TEST=bots',
+ ] + extra_trybot_args
+
+
+class AutoRoller(object):
+ def __init__(self, chromium_src):
+ self._chromium_src = chromium_src
+
+ def _RunCommand(self, command, working_dir=None, ignore_exit_code=False,
+ extra_env=None):
+ """Runs a command and returns the stdout from that command.
+
+ If the command fails (exit code != 0), the function will exit the process.
+ """
+ working_dir = working_dir or self._chromium_src
+ logging.debug('cmd: %s cwd: %s', ' '.join(command), working_dir)
+ env = os.environ.copy()
+ if extra_env:
+ logging.debug('extra env: %s', extra_env)
+ env.update(extra_env)
+ p = subprocess.Popen(command, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, shell=IS_WIN, env=env,
+ cwd=working_dir, universal_newlines=True)
+ output = p.stdout.read()
+ p.wait()
+ p.stdout.close()
+ p.stderr.close()
+
+ if not ignore_exit_code and p.returncode != 0:
+ logging.error('Command failed: %s\n%s', str(command), output)
+ sys.exit(p.returncode)
+ return output
+
+ def _GetCommitInfo(self, path_below_src, git_hash=None, git_repo_url=None):
+ working_dir = os.path.join(self._chromium_src, path_below_src)
+ self._RunCommand(['git', 'fetch', 'origin'], working_dir=working_dir)
+ revision_range = git_hash or 'origin'
+ ret = self._RunCommand(
+ ['git', '--no-pager', 'log', revision_range, '--pretty=full', '-1'],
+ working_dir=working_dir)
+ return CommitInfo(_ParseGitCommitHash(ret), git_repo_url)
+
+ def _GetDepsCommitInfo(self, deps_dict, path_below_src):
+ entry = deps_dict['deps'][_PosixPath('src/%s' % path_below_src)]
+ at_index = entry.find('@')
+ git_repo_url = entry[:at_index]
+ git_hash = entry[at_index + 1:]
+ return self._GetCommitInfo(path_below_src, git_hash, git_repo_url)
+
+ def _GetCLInfo(self):
+ cl_output = self._RunCommand(['git', 'cl', 'issue'])
+ m = CL_ISSUE_RE.match(cl_output.strip())
+ if not m:
+ logging.error('Cannot find any CL info. Output was:\n%s', cl_output)
+ sys.exit(-1)
+ issue_number = int(m.group(1))
+ url = m.group(2)
+
+ # Parse the Rietveld host from the URL.
+ m = RIETVELD_URL_RE.match(url)
+ if not m:
+ logging.error('Cannot parse Rietveld host from URL: %s', url)
+ sys.exit(-1)
+ rietveld_server = m.group(1)
+ return CLInfo(issue_number, url, rietveld_server)
+
+ def _GetCurrentBranchName(self):
+ return self._RunCommand(
+ ['git', 'rev-parse', '--abbrev-ref', 'HEAD']).splitlines()[0]
+
+ def _IsTreeClean(self):
+ lines = self._RunCommand(
+ ['git', 'status', '--porcelain', '-uno']).splitlines()
+ if len(lines) == 0:
+ return True
+
+ logging.debug('Dirty/unversioned files:\n%s', '\n'.join(lines))
+ return False
+
+ def _GetBugList(self, path_below_src, angle_current, angle_new):
+ working_dir = os.path.join(self._chromium_src, path_below_src)
+ lines = self._RunCommand(
+ ['git','log',
+ '%s..%s' % (angle_current.git_commit, angle_new.git_commit)],
+ working_dir=working_dir).split('\n')
+ bugs = set()
+ for line in lines:
+ line = line.strip()
+ bug_prefix = 'BUG='
+ if line.startswith(bug_prefix):
+ bugs_strings = line[len(bug_prefix):].split(',')
+ for bug_string in bugs_strings:
+ try:
+ bugs.add(int(bug_string))
+ except:
+ # skip this, it may be a project specific bug such as
+ # "angleproject:X" or an ill-formed BUG= message
+ pass
+ return bugs
+
+ def _UpdateReadmeFile(self, readme_path, new_revision):
+ readme = open(os.path.join(self._chromium_src, readme_path), 'r+')
+ txt = readme.read()
+ m = re.sub(re.compile('.*^Revision\: ([0-9]*).*', re.MULTILINE),
+ ('Revision: %s' % new_revision), txt)
+ readme.seek(0)
+ readme.write(m)
+ readme.truncate()
+
+ def _TriggerExtraTrybots(self, trybots):
+ for trybot in trybots:
+ for builder in trybot['buildernames']:
+ self._RunCommand([
+ 'git', 'cl', 'try',
+ '-m', trybot['mastername'],
+ '-b', builder])
+
+ def PrepareRoll(self, ignore_checks, tbr, should_commit):
+ # TODO(kjellander): use os.path.normcase, os.path.join etc for all paths for
+ # cross platform compatibility.
+
+ if not ignore_checks:
+ if self._GetCurrentBranchName() != 'master':
+ logging.error('Please checkout the master branch.')
+ return -1
+ if not self._IsTreeClean():
+ logging.error('Please make sure you don\'t have any modified files.')
+ return -1
+
+ # Always clean up any previous roll.
+ self.Abort()
+
+ logging.debug('Pulling latest changes')
+ if not ignore_checks:
+ self._RunCommand(['git', 'pull'])
+
+ self._RunCommand(['git', 'checkout', '-b', ROLL_BRANCH_NAME])
+
+ # Modify Chromium's DEPS file.
+
+ # Parse current hashes.
+ deps_filename = os.path.join(self._chromium_src, 'DEPS')
+ deps = _ParseDepsFile(deps_filename)
+ angle_current = self._GetDepsCommitInfo(deps, ANGLE_PATH)
+
+ # Find ToT revisions.
+ angle_latest = self._GetCommitInfo(ANGLE_PATH)
+
+ if IS_WIN:
+ # Make sure the roll script doesn't use windows line endings
+ self._RunCommand(['git', 'config', 'core.autocrlf', 'true'])
+
+ self._UpdateDep(deps_filename, ANGLE_PATH, angle_latest)
+
+ if self._IsTreeClean():
+ logging.debug('Tree is clean - no changes detected.')
+ self._DeleteRollBranch()
+ else:
+ bugs = self._GetBugList(ANGLE_PATH, angle_current, angle_latest)
+ description = _GenerateCLDescriptionCommand(
+ angle_current, angle_latest, bugs, tbr)
+ logging.debug('Committing changes locally.')
+ self._RunCommand(['git', 'add', '--update', '.'])
+ self._RunCommand(['git', 'commit'] + description)
+ logging.debug('Uploading changes...')
+ self._RunCommand(['git', 'cl', 'upload'],
+ extra_env={'EDITOR': 'true'})
+
+ # Kick off tryjobs.
+ base_try_cmd = ['git', 'cl', 'try']
+ self._RunCommand(base_try_cmd)
+
+ if extra_cq_trybots:
+ # Run additional tryjobs.
+ # TODO(kbr): this should not be necessary -- the
+ # CQ_INCLUDE_TRYBOTS directive above should handle it.
+ # http://crbug.com/585237
+ self._TriggerExtraTrybots(extra_cq_trybots)
+
+ if extra_fyi_trybots:
+ self._TriggerExtraTrybots(extra_fyi_trybots)
+
+ # Mark the CL to be committed if requested
+ if should_commit:
+ self._RunCommand(['git', 'cl', 'set-commit'])
+
+ cl_info = self._GetCLInfo()
+ print 'Issue: %d URL: %s' % (cl_info.issue, cl_info.url)
+
+ # Checkout master again.
+ self._RunCommand(['git', 'checkout', 'master'])
+ print 'Roll branch left as ' + ROLL_BRANCH_NAME
+ return 0
+
+ def _UpdateDep(self, deps_filename, dep_relative_to_src, commit_info):
+ dep_name = _PosixPath(os.path.join('src', dep_relative_to_src))
+
+ # roll_dep_svn.py relies on cwd being the Chromium checkout, so let's
+ # temporarily change the working directory and then change back.
+ cwd = os.getcwd()
+ os.chdir(os.path.dirname(deps_filename))
+ roll_dep_svn.update_deps(deps_filename, dep_relative_to_src, dep_name,
+ commit_info.git_commit, '')
+ os.chdir(cwd)
+
+ def _DeleteRollBranch(self):
+ self._RunCommand(['git', 'checkout', 'master'])
+ self._RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME])
+ logging.debug('Deleted the local roll branch (%s)', ROLL_BRANCH_NAME)
+
+
+ def _GetBranches(self):
+ """Returns a tuple of active,branches.
+
+ The 'active' is the name of the currently active branch and 'branches' is a
+ list of all branches.
+ """
+ lines = self._RunCommand(['git', 'branch']).split('\n')
+ branches = []
+ active = ''
+ for l in lines:
+ if '*' in l:
+ # The assumption is that the first char will always be the '*'.
+ active = l[1:].strip()
+ branches.append(active)
+ else:
+ b = l.strip()
+ if b:
+ branches.append(b)
+ return (active, branches)
+
+ def Abort(self):
+ active_branch, branches = self._GetBranches()
+ if active_branch == ROLL_BRANCH_NAME:
+ active_branch = 'master'
+ if ROLL_BRANCH_NAME in branches:
+ print 'Aborting pending roll.'
+ self._RunCommand(['git', 'checkout', ROLL_BRANCH_NAME])
+ # Ignore an error here in case an issue wasn't created for some reason.
+ self._RunCommand(['git', 'cl', 'set_close'], ignore_exit_code=True)
+ self._RunCommand(['git', 'checkout', active_branch])
+ self._RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME])
+ return 0
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Auto-generates a CL containing an ANGLE roll.')
+ parser.add_argument('--abort',
+ help=('Aborts a previously prepared roll. '
+ 'Closes any associated issues and deletes the roll branches'),
+ action='store_true')
+ parser.add_argument('--ignore-checks', action='store_true', default=False,
+ help=('Skips checks for being on the master branch, dirty workspaces and '
+ 'the updating of the checkout. Will still delete and create local '
+ 'Git branches.'))
+ parser.add_argument('--tbr', help='Add a TBR to the commit message.')
+ parser.add_argument('--commit', action='store_true', default=False,
+ help='Submit the roll to the CQ after uploading.')
+ parser.add_argument('-v', '--verbose', action='store_true', default=False,
+ help='Be extra verbose in printing of log messages.')
+ args = parser.parse_args()
+
+ if args.verbose:
+ logging.basicConfig(level=logging.DEBUG)
+ else:
+ logging.basicConfig(level=logging.ERROR)
+
+ autoroller = AutoRoller(SRC_DIR)
+ if args.abort:
+ return autoroller.Abort()
+ else:
+ return autoroller.PrepareRoll(args.ignore_checks, args.tbr, args.commit)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/roll_webgl_conformance.py b/chromium/tools/roll_webgl_conformance.py
new file mode 100755
index 00000000000..7ea056f1f50
--- /dev/null
+++ b/chromium/tools/roll_webgl_conformance.py
@@ -0,0 +1,389 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import os
+import re
+import subprocess
+import sys
+import time
+
+extra_trybots = [
+ {
+ "mastername": "tryserver.chromium.win",
+ "buildernames": ["win_optional_gpu_tests_rel"]
+ },
+ {
+ "mastername": "tryserver.chromium.mac",
+ "buildernames": ["mac_optional_gpu_tests_rel"]
+ }
+]
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+SRC_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+sys.path.insert(0, os.path.join(SRC_DIR, 'build'))
+import find_depot_tools
+find_depot_tools.add_depot_tools_to_path()
+import roll_dep_svn
+from gclient import GClientKeywords
+from third_party import upload
+
+# Avoid depot_tools/third_party/upload.py print verbose messages.
+upload.verbosity = 0 # Errors only.
+
+CHROMIUM_GIT_URL = 'https://chromium.googlesource.com/chromium/src.git'
+CL_ISSUE_RE = re.compile('^Issue number: ([0-9]+) \((.*)\)$')
+RIETVELD_URL_RE = re.compile('^https?://(.*)/(.*)')
+ROLL_BRANCH_NAME = 'special_webgl_roll_branch'
+TRYJOB_STATUS_SLEEP_SECONDS = 30
+
+# Use a shell for subcommands on Windows to get a PATH search.
+IS_WIN = sys.platform.startswith('win')
+WEBGL_PATH = os.path.join('third_party', 'webgl', 'src')
+
+CommitInfo = collections.namedtuple('CommitInfo', ['git_commit',
+ 'git_repo_url'])
+CLInfo = collections.namedtuple('CLInfo', ['issue', 'url', 'rietveld_server'])
+
+def _PosixPath(path):
+ """Convert a possibly-Windows path to a posix-style path."""
+ (_, path) = os.path.splitdrive(path)
+ return path.replace(os.sep, '/')
+
+def _ParseGitCommitHash(description):
+ for line in description.splitlines():
+ if line.startswith('commit '):
+ return line.split()[1]
+ logging.error('Failed to parse git commit id from:\n%s\n', description)
+ sys.exit(-1)
+ return None
+
+
+def _ParseDepsFile(filename):
+ with open(filename, 'rb') as f:
+ deps_content = f.read()
+ return _ParseDepsDict(deps_content)
+
+
+def _ParseDepsDict(deps_content):
+ local_scope = {}
+ var = GClientKeywords.VarImpl({}, local_scope)
+ global_scope = {
+ 'File': GClientKeywords.FileImpl,
+ 'From': GClientKeywords.FromImpl,
+ 'Var': var.Lookup,
+ 'deps_os': {},
+ }
+ exec(deps_content, global_scope, local_scope)
+ return local_scope
+
+
+def _GenerateCLDescriptionCommand(webgl_current, webgl_new, bugs):
+ def GetChangeString(current_hash, new_hash):
+ return '%s..%s' % (current_hash[0:7], new_hash[0:7]);
+
+ def GetChangeLogURL(git_repo_url, change_string):
+ return '%s/+log/%s' % (git_repo_url, change_string)
+
+ def GetBugString(bugs):
+ bug_str = 'BUG='
+ for bug in bugs:
+ bug_str += str(bug) + ','
+ return bug_str.rstrip(',')
+
+ if webgl_current.git_commit != webgl_new.git_commit:
+ change_str = GetChangeString(webgl_current.git_commit,
+ webgl_new.git_commit)
+ changelog_url = GetChangeLogURL(webgl_current.git_repo_url,
+ change_str)
+
+ def GetExtraTrybotString():
+ s = ''
+ for t in extra_trybots:
+ if s:
+ s += ';'
+ s += t['mastername'] + ':' + ','.join(t['buildernames'])
+ return s
+
+ extra_trybot_args = []
+ if extra_trybots:
+ extra_trybot_string = GetExtraTrybotString()
+ extra_trybot_args = ['-m', 'CQ_INCLUDE_TRYBOTS=' + extra_trybot_string]
+
+ return [
+ '-m', 'Roll WebGL ' + change_str,
+ '-m', '%s' % changelog_url,
+ '-m', GetBugString(bugs),
+ '-m', 'TEST=bots',
+ ] + extra_trybot_args
+
+
+class AutoRoller(object):
+ def __init__(self, chromium_src):
+ self._chromium_src = chromium_src
+
+ def _RunCommand(self, command, working_dir=None, ignore_exit_code=False,
+ extra_env=None):
+ """Runs a command and returns the stdout from that command.
+
+ If the command fails (exit code != 0), the function will exit the process.
+ """
+ working_dir = working_dir or self._chromium_src
+ logging.debug('cmd: %s cwd: %s', ' '.join(command), working_dir)
+ env = os.environ.copy()
+ if extra_env:
+ logging.debug('extra env: %s', extra_env)
+ env.update(extra_env)
+ p = subprocess.Popen(command, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, shell=IS_WIN, env=env,
+ cwd=working_dir, universal_newlines=True)
+ output = p.stdout.read()
+ p.wait()
+ p.stdout.close()
+ p.stderr.close()
+
+ if not ignore_exit_code and p.returncode != 0:
+ logging.error('Command failed: %s\n%s', str(command), output)
+ sys.exit(p.returncode)
+ return output
+
+ def _GetCommitInfo(self, path_below_src, git_hash=None, git_repo_url=None):
+ working_dir = os.path.join(self._chromium_src, path_below_src)
+ self._RunCommand(['git', 'fetch', 'origin'], working_dir=working_dir)
+ revision_range = git_hash or 'origin'
+ ret = self._RunCommand(
+ ['git', '--no-pager', 'log', revision_range, '--pretty=full', '-1'],
+ working_dir=working_dir)
+ return CommitInfo(_ParseGitCommitHash(ret), git_repo_url)
+
+ def _GetDepsCommitInfo(self, deps_dict, path_below_src):
+ entry = deps_dict['deps'][_PosixPath('src/%s' % path_below_src)]
+ at_index = entry.find('@')
+ git_repo_url = entry[:at_index]
+ git_hash = entry[at_index + 1:]
+ return self._GetCommitInfo(path_below_src, git_hash, git_repo_url)
+
+ def _GetCLInfo(self):
+ cl_output = self._RunCommand(['git', 'cl', 'issue'])
+ m = CL_ISSUE_RE.match(cl_output.strip())
+ if not m:
+ logging.error('Cannot find any CL info. Output was:\n%s', cl_output)
+ sys.exit(-1)
+ issue_number = int(m.group(1))
+ url = m.group(2)
+
+ # Parse the Rietveld host from the URL.
+ m = RIETVELD_URL_RE.match(url)
+ if not m:
+ logging.error('Cannot parse Rietveld host from URL: %s', url)
+ sys.exit(-1)
+ rietveld_server = m.group(1)
+ return CLInfo(issue_number, url, rietveld_server)
+
+ def _GetCurrentBranchName(self):
+ return self._RunCommand(
+ ['git', 'rev-parse', '--abbrev-ref', 'HEAD']).splitlines()[0]
+
+ def _IsTreeClean(self):
+ lines = self._RunCommand(
+ ['git', 'status', '--porcelain', '-uno']).splitlines()
+ if len(lines) == 0:
+ return True
+
+ logging.debug('Dirty/unversioned files:\n%s', '\n'.join(lines))
+ return False
+
+ def _GetBugList(self, path_below_src, webgl_current, webgl_new):
+ # TODO(kbr): this isn't useful, at least not yet, when run against
+ # the WebGL Github repository.
+ working_dir = os.path.join(self._chromium_src, path_below_src)
+ lines = self._RunCommand(
+ ['git','log',
+ '%s..%s' % (webgl_current.git_commit, webgl_new.git_commit)],
+ working_dir=working_dir).split('\n')
+ bugs = set()
+ for line in lines:
+ line = line.strip()
+ bug_prefix = 'BUG='
+ if line.startswith(bug_prefix):
+ bugs_strings = line[len(bug_prefix):].split(',')
+ for bug_string in bugs_strings:
+ try:
+ bugs.add(int(bug_string))
+ except:
+ # skip this, it may be a project specific bug such as
+ # "angleproject:X" or an ill-formed BUG= message
+ pass
+ return bugs
+
+ def _UpdateReadmeFile(self, readme_path, new_revision):
+ readme = open(os.path.join(self._chromium_src, readme_path), 'r+')
+ txt = readme.read()
+ m = re.sub(re.compile('.*^Revision\: ([0-9]*).*', re.MULTILINE),
+ ('Revision: %s' % new_revision), txt)
+ readme.seek(0)
+ readme.write(m)
+ readme.truncate()
+
+ def PrepareRoll(self, ignore_checks, skip_tryjobs):
+ # TODO(kjellander): use os.path.normcase, os.path.join etc for all paths for
+ # cross platform compatibility.
+
+ if not ignore_checks:
+ if self._GetCurrentBranchName() != 'master':
+ logging.error('Please checkout the master branch.')
+ return -1
+ if not self._IsTreeClean():
+ logging.error('Please make sure you don\'t have any modified files.')
+ return -1
+
+ # Always clean up any previous roll.
+ self.Abort()
+
+ logging.debug('Pulling latest changes')
+ if not ignore_checks:
+ self._RunCommand(['git', 'pull'])
+
+ self._RunCommand(['git', 'checkout', '-b', ROLL_BRANCH_NAME])
+
+ # Modify Chromium's DEPS file.
+
+ # Parse current hashes.
+ deps_filename = os.path.join(self._chromium_src, 'DEPS')
+ deps = _ParseDepsFile(deps_filename)
+ webgl_current = self._GetDepsCommitInfo(deps, WEBGL_PATH)
+
+ # Find ToT revisions.
+ webgl_latest = self._GetCommitInfo(WEBGL_PATH)
+
+ if IS_WIN:
+ # Make sure the roll script doesn't use windows line endings
+ self._RunCommand(['git', 'config', 'core.autocrlf', 'true'])
+
+ self._UpdateDep(deps_filename, WEBGL_PATH, webgl_latest)
+
+ if self._IsTreeClean():
+ logging.debug('Tree is clean - no changes detected.')
+ self._DeleteRollBranch()
+ else:
+ bugs = self._GetBugList(WEBGL_PATH, webgl_current, webgl_latest)
+ description = _GenerateCLDescriptionCommand(
+ webgl_current, webgl_latest, bugs)
+ logging.debug('Committing changes locally.')
+ self._RunCommand(['git', 'add', '--update', '.'])
+ self._RunCommand(['git', 'commit'] + description)
+ logging.debug('Uploading changes...')
+ self._RunCommand(['git', 'cl', 'upload'],
+ extra_env={'EDITOR': 'true'})
+
+ if not skip_tryjobs:
+ # Kick off tryjobs.
+ base_try_cmd = ['git', 'cl', 'try']
+ self._RunCommand(base_try_cmd)
+ if extra_trybots:
+ # Run additional tryjobs.
+ # TODO(kbr): this should not be necessary -- the
+ # CQ_INCLUDE_TRYBOTS directive above should handle it.
+ # http://crbug.com/585237
+ for trybot in extra_trybots:
+ for builder in trybot['buildernames']:
+ self._RunCommand(base_try_cmd + [
+ '-m', trybot['mastername'],
+ '-b', builder])
+
+ cl_info = self._GetCLInfo()
+ print 'Issue: %d URL: %s' % (cl_info.issue, cl_info.url)
+
+ # Checkout master again.
+ self._RunCommand(['git', 'checkout', 'master'])
+ print 'Roll branch left as ' + ROLL_BRANCH_NAME
+ return 0
+
+ def _UpdateDep(self, deps_filename, dep_relative_to_src, commit_info):
+ dep_name = _PosixPath(os.path.join('src', dep_relative_to_src))
+
+ # roll_dep_svn.py relies on cwd being the Chromium checkout, so let's
+ # temporarily change the working directory and then change back.
+ cwd = os.getcwd()
+ os.chdir(os.path.dirname(deps_filename))
+ roll_dep_svn.update_deps(deps_filename, dep_relative_to_src, dep_name,
+ commit_info.git_commit, '')
+ os.chdir(cwd)
+
+ def _DeleteRollBranch(self):
+ self._RunCommand(['git', 'checkout', 'master'])
+ self._RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME])
+ logging.debug('Deleted the local roll branch (%s)', ROLL_BRANCH_NAME)
+
+
+ def _GetBranches(self):
+ """Returns a tuple of active,branches.
+
+ The 'active' is the name of the currently active branch and 'branches' is a
+ list of all branches.
+ """
+ lines = self._RunCommand(['git', 'branch']).split('\n')
+ branches = []
+ active = ''
+ for l in lines:
+ if '*' in l:
+ # The assumption is that the first char will always be the '*'.
+ active = l[1:].strip()
+ branches.append(active)
+ else:
+ b = l.strip()
+ if b:
+ branches.append(b)
+ return (active, branches)
+
+ def Abort(self):
+ active_branch, branches = self._GetBranches()
+ if active_branch == ROLL_BRANCH_NAME:
+ active_branch = 'master'
+ if ROLL_BRANCH_NAME in branches:
+ print 'Aborting pending roll.'
+ self._RunCommand(['git', 'checkout', ROLL_BRANCH_NAME])
+ # Ignore an error here in case an issue wasn't created for some reason.
+ self._RunCommand(['git', 'cl', 'set_close'], ignore_exit_code=True)
+ self._RunCommand(['git', 'checkout', active_branch])
+ self._RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME])
+ return 0
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Auto-generates a CL containing a WebGL conformance roll.')
+ parser.add_argument('--abort',
+ help=('Aborts a previously prepared roll. '
+ 'Closes any associated issues and deletes the roll branches'),
+ action='store_true')
+ parser.add_argument('--ignore-checks', action='store_true', default=False,
+ help=('Skips checks for being on the master branch, dirty workspaces and '
+ 'the updating of the checkout. Will still delete and create local '
+ 'Git branches.'))
+ parser.add_argument('--skip-tryjobs', action='store_true', default=False,
+ help=('Skip the dry-run tryjobs for the newly generated CL. Use this '
+ 'when you expect to have to make many changes to the WebGL '
+ 'conformance test expectations in the same CL and want to avoid '
+ 'wasted tryjobs.'))
+ parser.add_argument('-v', '--verbose', action='store_true', default=False,
+ help='Be extra verbose in printing of log messages.')
+ args = parser.parse_args()
+
+ if args.verbose:
+ logging.basicConfig(level=logging.DEBUG)
+ else:
+ logging.basicConfig(level=logging.ERROR)
+
+ autoroller = AutoRoller(SRC_DIR)
+ if args.abort:
+ return autoroller.Abort()
+ else:
+ return autoroller.PrepareRoll(args.ignore_checks, args.skip_tryjobs)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/roll_webrtc.py b/chromium/tools/roll_webrtc.py
new file mode 100755
index 00000000000..f016fb39c15
--- /dev/null
+++ b/chromium/tools/roll_webrtc.py
@@ -0,0 +1,442 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import collections
+import logging
+import os
+import re
+import subprocess
+import sys
+import time
+
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+SRC_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+sys.path.insert(0, os.path.join(SRC_DIR, 'build'))
+import find_depot_tools
+find_depot_tools.add_depot_tools_to_path()
+import rietveld
+import roll_dep_svn
+from gclient import GClientKeywords
+from third_party import upload
+
+# Avoid depot_tools/third_party/upload.py print verbose messages.
+upload.verbosity = 0 # Errors only.
+
+CHROMIUM_GIT_URL = 'https://chromium.googlesource.com/chromium/src.git'
+COMMIT_POSITION_RE = re.compile('^Cr-Original-Commit-Position: .*#([0-9]+).*$')
+CL_ISSUE_RE = re.compile('^Issue number: ([0-9]+) \((.*)\)$')
+RIETVELD_URL_RE = re.compile('^https?://(.*)/(.*)')
+ROLL_BRANCH_NAME = 'special_webrtc_roll_branch'
+TRYJOB_STATUS_SLEEP_SECONDS = 30
+
+# Use a shell for subcommands on Windows to get a PATH search.
+IS_WIN = sys.platform.startswith('win')
+WEBRTC_PATH = os.path.join('third_party', 'webrtc')
+LIBJINGLE_PATH = os.path.join('third_party', 'libjingle', 'source', 'talk')
+LIBJINGLE_README = os.path.join('third_party', 'libjingle', 'README.chromium')
+
+# Result codes from build/third_party/buildbot_8_4p1/buildbot/status/results.py
+# plus the -1 code which is used when there's no result yet.
+TRYJOB_STATUS = {
+ -1: 'RUNNING',
+ 0: 'SUCCESS',
+ 1: 'WARNINGS',
+ 2: 'FAILURE',
+ 3: 'SKIPPED',
+ 4: 'EXCEPTION',
+ 5: 'RETRY',
+}
+SUCCESS_STATUS = (0, 1, 3)
+FAILURE_STATUS = (2, 4, 5)
+
+CommitInfo = collections.namedtuple('CommitInfo', ['commit_position',
+ 'git_commit',
+ 'git_repo_url'])
+CLInfo = collections.namedtuple('CLInfo', ['issue', 'url', 'rietveld_server'])
+
+
+def _PosixPath(path):
+ """Convert a possibly-Windows path to a posix-style path."""
+ (_, path) = os.path.splitdrive(path)
+ return path.replace(os.sep, '/')
+
+
+def _ParseGitCommitPosition(description):
+ for line in reversed(description.splitlines()):
+ m = COMMIT_POSITION_RE.match(line.strip())
+ if m:
+ return m.group(1)
+ logging.error('Failed to parse svn revision id from:\n%s\n', description)
+ sys.exit(-1)
+
+
+def _ParseGitCommitHash(description):
+ for line in description.splitlines():
+ if line.startswith('commit '):
+ return line.split()[1]
+ logging.error('Failed to parse git commit id from:\n%s\n', description)
+ sys.exit(-1)
+ return None
+
+
+def _ParseDepsFile(filename):
+ with open(filename, 'rb') as f:
+ deps_content = f.read()
+ return _ParseDepsDict(deps_content)
+
+
+def _ParseDepsDict(deps_content):
+ local_scope = {}
+ var = GClientKeywords.VarImpl({}, local_scope)
+ global_scope = {
+ 'File': GClientKeywords.FileImpl,
+ 'From': GClientKeywords.FromImpl,
+ 'Var': var.Lookup,
+ 'deps_os': {},
+ }
+ exec(deps_content, global_scope, local_scope)
+ return local_scope
+
+
+def _WaitForTrybots(issue, rietveld_server):
+ """Wait until all trybots have passed or at least one have failed.
+
+ Returns:
+ An exit code of 0 if all trybots passed or non-zero otherwise.
+ """
+ assert type(issue) is int
+ print 'Trybot status for https://%s/%d:' % (rietveld_server, issue)
+ remote = rietveld.Rietveld('https://' + rietveld_server, None, None)
+
+ attempt = 0
+ max_tries = 60*60/TRYJOB_STATUS_SLEEP_SECONDS # Max one hour
+ while attempt < max_tries:
+ # Get patches for the issue so we can use the latest one.
+ data = remote.get_issue_properties(issue, messages=False)
+ patchsets = data['patchsets']
+
+ # Get trybot status for the latest patch set.
+ data = remote.get_patchset_properties(issue, patchsets[-1])
+
+ tryjob_results = data['try_job_results']
+ if len(tryjob_results) == 0:
+ logging.debug('No trybots have yet been triggered for https://%s/%d' ,
+ rietveld_server, issue)
+ else:
+ _PrintTrybotsStatus(tryjob_results)
+ if any(r['result'] in FAILURE_STATUS for r in tryjob_results):
+ logging.error('Found failing tryjobs (see above)')
+ return 1
+ if all(r['result'] in SUCCESS_STATUS for r in tryjob_results):
+ return 0
+
+ logging.debug('Waiting for %d seconds before next check...',
+ TRYJOB_STATUS_SLEEP_SECONDS)
+ time.sleep(TRYJOB_STATUS_SLEEP_SECONDS)
+ attempt += 1
+
+
+def _PrintTrybotsStatus(tryjob_results):
+ status_to_name = {}
+ for trybot_result in tryjob_results:
+ status = TRYJOB_STATUS.get(trybot_result['result'], 'UNKNOWN')
+ status_to_name.setdefault(status, [])
+ status_to_name[status].append(trybot_result['builder'])
+
+ print '\n========== TRYJOBS STATUS =========='
+ for status,name_list in status_to_name.iteritems():
+ print '%s: %s' % (status, ','.join(sorted(name_list)))
+
+
+def _GenerateCLDescriptionCommand(webrtc_current, libjingle_current,
+ webrtc_new, libjingle_new):
+ delim = ''
+ webrtc_str = ''
+ def GetChangeLogURL(git_repo_url, current_hash, new_hash):
+ return '%s/+log/%s..%s' % (git_repo_url, current_hash[0:7], new_hash[0:7])
+
+ if webrtc_current.git_commit != webrtc_new.git_commit:
+ webrtc_str = 'WebRTC %s:%s' % (webrtc_current.commit_position,
+ webrtc_new.commit_position)
+ webrtc_changelog_url = GetChangeLogURL(webrtc_current.git_repo_url,
+ webrtc_current.git_commit,
+ webrtc_new.git_commit)
+
+ libjingle_str = ''
+ if libjingle_current.git_commit != libjingle_new.git_commit:
+ if webrtc_str:
+ delim += ', '
+ libjingle_str = 'Libjingle %s:%s' % (libjingle_current.commit_position,
+ libjingle_new.commit_position)
+ libjingle_changelog_url = GetChangeLogURL(libjingle_current.git_repo_url,
+ libjingle_current.git_commit,
+ libjingle_new.git_commit)
+
+ description = [ '-m', 'Roll ' + webrtc_str + delim + libjingle_str ]
+ if webrtc_str:
+ description.extend(['-m', webrtc_str])
+ description.extend(['-m', 'Changes: %s' % webrtc_changelog_url])
+ if libjingle_str:
+ description.extend(['-m', libjingle_str])
+ description.extend(['-m', 'Changes: %s' % libjingle_changelog_url])
+ description.extend(['-m', 'TBR='])
+ return description
+
+
+class AutoRoller(object):
+ def __init__(self, chromium_src):
+ self._chromium_src = chromium_src
+
+ def _RunCommand(self, command, working_dir=None, ignore_exit_code=False,
+ extra_env=None):
+ """Runs a command and returns the stdout from that command.
+
+ If the command fails (exit code != 0), the function will exit the process.
+ """
+ working_dir = working_dir or self._chromium_src
+ logging.debug('cmd: %s cwd: %s', ' '.join(command), working_dir)
+ env = os.environ.copy()
+ if extra_env:
+ logging.debug('extra env: %s', extra_env)
+ env.update(extra_env)
+ p = subprocess.Popen(command, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, shell=IS_WIN, env=env,
+ cwd=working_dir, universal_newlines=True)
+ output = p.stdout.read()
+ p.wait()
+ p.stdout.close()
+ p.stderr.close()
+
+ if not ignore_exit_code and p.returncode != 0:
+ logging.error('Command failed: %s\n%s', str(command), output)
+ sys.exit(p.returncode)
+ return output
+
+ def _GetCommitInfo(self, path_below_src, git_hash=None, git_repo_url=None):
+ working_dir = os.path.join(self._chromium_src, path_below_src)
+ self._RunCommand(['git', 'fetch', 'origin'], working_dir=working_dir)
+ revision_range = git_hash or 'origin'
+ ret = self._RunCommand(
+ ['git', '--no-pager', 'log', revision_range, '--pretty=full', '-1'],
+ working_dir=working_dir)
+ return CommitInfo(_ParseGitCommitPosition(ret), _ParseGitCommitHash(ret),
+ git_repo_url)
+
+ def _GetDepsCommitInfo(self, deps_dict, path_below_src):
+ entry = deps_dict['deps'][_PosixPath('src/%s' % path_below_src)]
+ at_index = entry.find('@')
+ git_repo_url = entry[:at_index]
+ git_hash = entry[at_index + 1:]
+ return self._GetCommitInfo(path_below_src, git_hash, git_repo_url)
+
+ def _GetCLInfo(self):
+ cl_output = self._RunCommand(['git', 'cl', 'issue'])
+ m = CL_ISSUE_RE.match(cl_output.strip())
+ if not m:
+ logging.error('Cannot find any CL info. Output was:\n%s', cl_output)
+ sys.exit(-1)
+ issue_number = int(m.group(1))
+ url = m.group(2)
+
+ # Parse the Rietveld host from the URL.
+ m = RIETVELD_URL_RE.match(url)
+ if not m:
+ logging.error('Cannot parse Rietveld host from URL: %s', url)
+ sys.exit(-1)
+ rietveld_server = m.group(1)
+ return CLInfo(issue_number, url, rietveld_server)
+
+ def _GetCurrentBranchName(self):
+ return self._RunCommand(
+ ['git', 'rev-parse', '--abbrev-ref', 'HEAD']).splitlines()[0]
+
+ def _IsTreeClean(self):
+ lines = self._RunCommand(['git', 'status', '--porcelain']).splitlines()
+ if len(lines) == 0:
+ return True
+
+ logging.debug('Dirty/unversioned files:\n%s', '\n'.join(lines))
+ return False
+
+ def _UpdateReadmeFile(self, readme_path, new_revision):
+ readme = open(os.path.join(self._chromium_src, readme_path), 'r+')
+ txt = readme.read()
+ m = re.sub(re.compile('.*^Revision\: ([0-9]*).*', re.MULTILINE),
+ ('Revision: %s' % new_revision), txt)
+ readme.seek(0)
+ readme.write(m)
+ readme.truncate()
+
+ def PrepareRoll(self, dry_run, ignore_checks, no_commit, close_previous_roll):
+ # TODO(kjellander): use os.path.normcase, os.path.join etc for all paths for
+ # cross platform compatibility.
+
+ if not ignore_checks:
+ if self._GetCurrentBranchName() != 'master':
+ logging.error('Please checkout the master branch.')
+ return -1
+ if not self._IsTreeClean():
+ logging.error('Please make sure you don\'t have any modified files.')
+ return -1
+
+ logging.debug('Checking for a previous roll branch.')
+ if close_previous_roll:
+ self.Abort()
+
+ logging.debug('Pulling latest changes')
+ if not ignore_checks:
+ self._RunCommand(['git', 'pull'])
+
+ self._RunCommand(['git', 'checkout', '-b', ROLL_BRANCH_NAME])
+
+ # Modify Chromium's DEPS file.
+
+ # Parse current hashes.
+ deps_filename = os.path.join(self._chromium_src, 'DEPS')
+ deps = _ParseDepsFile(deps_filename)
+ webrtc_current = self._GetDepsCommitInfo(deps, WEBRTC_PATH)
+ libjingle_current = self._GetDepsCommitInfo(deps, LIBJINGLE_PATH)
+
+ # Find ToT revisions.
+ webrtc_latest = self._GetCommitInfo(WEBRTC_PATH)
+ libjingle_latest = self._GetCommitInfo(LIBJINGLE_PATH)
+
+ if IS_WIN:
+ # Make sure the roll script doesn't use Windows line endings.
+ self._RunCommand(['git', 'config', 'core.autocrlf', 'true'])
+
+ self._UpdateDep(deps_filename, WEBRTC_PATH, webrtc_latest)
+ self._UpdateDep(deps_filename, LIBJINGLE_PATH, libjingle_latest)
+
+ if self._IsTreeClean():
+ print 'The latest revision is already rolled for WebRTC and libjingle.'
+ self._DeleteRollBranch()
+ else:
+ self._UpdateReadmeFile(LIBJINGLE_README, libjingle_latest.commit_position)
+ description = _GenerateCLDescriptionCommand(
+ webrtc_current, libjingle_current, webrtc_latest, libjingle_latest)
+ logging.debug('Committing changes locally.')
+ self._RunCommand(['git', 'add', '--update', '.'])
+ self._RunCommand(['git', 'commit'] + description)
+ logging.debug('Uploading changes...')
+ self._RunCommand(['git', 'cl', 'upload'],
+ extra_env={'EDITOR': 'true'})
+ cl_info = self._GetCLInfo()
+ logging.debug('Issue: %d URL: %s', cl_info.issue, cl_info.url)
+
+ if not dry_run and not no_commit:
+ logging.debug('Sending the CL to the CQ...')
+ self._RunCommand(['git', 'cl', 'set_commit'])
+ logging.debug('Sent the CL to the CQ. Monitor here: %s', cl_info.url)
+
+ # TODO(kjellander): Checkout masters/previous branches again.
+ return 0
+
+ def _UpdateDep(self, deps_filename, dep_relative_to_src, commit_info):
+ dep_name = os.path.join('src', dep_relative_to_src)
+ comment = 'commit position %s' % commit_info.commit_position
+
+ # roll_dep_svn.py relies on cwd being the Chromium checkout, so let's
+ # temporarily change the working directory and then change back.
+ cwd = os.getcwd()
+ os.chdir(os.path.dirname(deps_filename))
+ roll_dep_svn.update_deps(deps_filename, dep_relative_to_src, dep_name,
+ commit_info.git_commit, comment)
+ os.chdir(cwd)
+
+ def _DeleteRollBranch(self):
+ self._RunCommand(['git', 'checkout', 'master'])
+ self._RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME])
+ logging.debug('Deleted the local roll branch (%s)', ROLL_BRANCH_NAME)
+
+
+ def _GetBranches(self):
+ """Returns a tuple of active,branches.
+
+ The 'active' is the name of the currently active branch and 'branches' is a
+ list of all branches.
+ """
+ lines = self._RunCommand(['git', 'branch']).split('\n')
+ branches = []
+ active = ''
+ for l in lines:
+ if '*' in l:
+ # The assumption is that the first char will always be the '*'.
+ active = l[1:].strip()
+ branches.append(active)
+ else:
+ b = l.strip()
+ if b:
+ branches.append(b)
+ return (active, branches)
+
+ def Abort(self):
+ active_branch, branches = self._GetBranches()
+ if active_branch == ROLL_BRANCH_NAME:
+ active_branch = 'master'
+ if ROLL_BRANCH_NAME in branches:
+ print 'Aborting pending roll.'
+ self._RunCommand(['git', 'checkout', ROLL_BRANCH_NAME])
+ # Ignore an error here in case an issue wasn't created for some reason.
+ self._RunCommand(['git', 'cl', 'set_close'], ignore_exit_code=True)
+ self._RunCommand(['git', 'checkout', active_branch])
+ self._RunCommand(['git', 'branch', '-D', ROLL_BRANCH_NAME])
+ return 0
+
+ def WaitForTrybots(self):
+ active_branch, _ = self._GetBranches()
+ if active_branch != ROLL_BRANCH_NAME:
+ self._RunCommand(['git', 'checkout', ROLL_BRANCH_NAME])
+ cl_info = self._GetCLInfo()
+ return _WaitForTrybots(cl_info.issue, cl_info.rietveld_server)
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Find webrtc and libjingle revisions for roll.')
+ parser.add_argument('--abort',
+ help=('Aborts a previously prepared roll. '
+ 'Closes any associated issues and deletes the roll branches'),
+ action='store_true')
+ parser.add_argument('--no-commit',
+ help=('Don\'t send the CL to the CQ. This is useful if additional changes '
+ 'are needed to the CL (like for API changes).'),
+ action='store_true')
+ parser.add_argument('--wait-for-trybots',
+ help=('Waits until all trybots from a previously created roll are either '
+ 'successful or at least one has failed. This is useful to be able to '
+ 'continuously run this script but not initiating new rolls until a '
+ 'previous one is known to have passed or failed.'),
+ action='store_true')
+ parser.add_argument('--close-previous-roll', action='store_true',
+ help='Abort a previous roll if one exists.')
+ parser.add_argument('--dry-run', action='store_true', default=False,
+ help='Create branches and CLs but doesn\'t send tryjobs or commit.')
+ parser.add_argument('--ignore-checks', action='store_true', default=False,
+ help=('Skips checks for being on the master branch, dirty workspaces and '
+ 'the updating of the checkout. Will still delete and create local '
+ 'Git branches.'))
+ parser.add_argument('-v', '--verbose', action='store_true', default=False,
+ help='Be extra verbose in printing of log messages.')
+ args = parser.parse_args()
+
+ if args.verbose:
+ logging.basicConfig(level=logging.DEBUG)
+ else:
+ logging.basicConfig(level=logging.ERROR)
+
+ autoroller = AutoRoller(SRC_DIR)
+ if args.abort:
+ return autoroller.Abort()
+ elif args.wait_for_trybots:
+ return autoroller.WaitForTrybots()
+ else:
+ return autoroller.PrepareRoll(args.dry_run, args.ignore_checks,
+ args.no_commit, args.close_previous_roll)
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/run-bisect-manual-test.py b/chromium/tools/run-bisect-manual-test.py
new file mode 100755
index 00000000000..e1e6aeb6121
--- /dev/null
+++ b/chromium/tools/run-bisect-manual-test.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run Manual Test Bisect Tool
+
+An example usage:
+tools/run-bisect-manual-test.py -g 201281 -b 201290
+
+On Linux platform, follow the instructions in this document
+https://chromium.googlesource.com/chromium/src/+/master/docs/linux_suid_sandbox_development.md
+to setup the sandbox manually before running the script. Otherwise the script
+fails to launch Chrome and exits with an error.
+
+This script serves a similar function to bisect-builds.py, except it uses
+the bisect_perf_regression.py. This means that that it can obtain builds of
+Chromium for revisions where builds aren't available in cloud storage.
+"""
+
+import os
+import subprocess
+import sys
+
+CROS_BOARD_ENV = 'BISECT_CROS_BOARD'
+CROS_IP_ENV = 'BISECT_CROS_IP'
+_TOOLS_DIR = os.path.abspath(os.path.dirname(__file__))
+_BISECT_SCRIPT_PATH = os.path.join(
+ _TOOLS_DIR, 'auto_bisect', 'bisect_perf_regression.py')
+
+sys.path.append(os.path.join(_TOOLS_DIR, 'perf'))
+from chrome_telemetry_build import chromium_config
+sys.path.append(chromium_config.GetTelemetryDir())
+from telemetry.internal.browser import browser_options
+
+
+def _RunBisectionScript(options):
+ """Attempts to execute the bisect script (bisect_perf_regression.py).
+
+ Args:
+ options: The configuration options to pass to the bisect script.
+
+ Returns:
+ An exit code; 0 for success, 1 for failure.
+ """
+ script_path = os.path.join(options.working_directory,
+ 'bisect', 'src', 'tools','bisect-manual-test.py')
+ abs_script_path = os.path.abspath(script_path)
+
+ test_command = ('python %s --browser=%s --chrome-root=.' %
+ (abs_script_path, options.browser_type))
+
+ cmd = ['python', _BISECT_SCRIPT_PATH,
+ '-c', test_command,
+ '-g', options.good_revision,
+ '-b', options.bad_revision,
+ '-m', 'manual_test/manual_test',
+ '-r', '1',
+ '--working_directory', options.working_directory,
+ '--build_preference', 'ninja',
+ '--no_custom_deps',
+ '--builder_type', options.builder_type]
+
+ if options.extra_src:
+ cmd.extend(['--extra_src', options.extra_src])
+
+ if 'cros' in options.browser_type:
+ cmd.extend(['--target_platform', 'cros'])
+
+ if os.environ[CROS_BOARD_ENV] and os.environ[CROS_IP_ENV]:
+ cmd.extend(['--cros_board', os.environ[CROS_BOARD_ENV]])
+ cmd.extend(['--cros_remote_ip', os.environ[CROS_IP_ENV]])
+ else:
+ print ('Error: Cros build selected, but BISECT_CROS_IP or'
+ 'BISECT_CROS_BOARD undefined.\n')
+ return 1
+ elif 'android-chrome' == options.browser_type:
+ if not options.extra_src:
+ print 'Error: Missing --extra_src to run bisect for android-chrome.'
+ sys.exit(-1)
+ cmd.extend(['--target_platform', 'android-chrome'])
+ elif 'android' in options.browser_type:
+ cmd.extend(['--target_platform', 'android'])
+ elif not options.target_build_type:
+ cmd.extend(['--target_build_type', options.browser_type.title()])
+
+ if options.target_build_type:
+ cmd.extend(['--target_build_type', options.target_build_type])
+
+ if options.goma_threads:
+ cmd.extend(['--use_goma', '--goma_threads', options.goma_threads])
+
+ cmd = [str(c) for c in cmd]
+
+ return_code = subprocess.call(cmd)
+
+ if return_code:
+ print 'Error: bisect_perf_regression.py had exit code %d.' % return_code
+ print
+
+ return return_code
+
+
+def main():
+ """Does a bisect based on the command-line arguments passed in.
+
+ The user will be prompted to classify each revision as good or bad.
+ """
+ usage = ('%prog [options]\n'
+ 'Used to run the bisection script with a manual test.')
+
+ options = browser_options.BrowserFinderOptions('release')
+ parser = options.CreateParser(usage)
+
+ parser.add_option('-b', '--bad_revision',
+ type='str',
+ help='A bad revision to start bisection. ' +
+ 'Must be later than good revision. May be either a git' +
+ ' or svn revision.')
+ parser.add_option('-g', '--good_revision',
+ type='str',
+ help='A revision to start bisection where performance' +
+ ' test is known to pass. Must be earlier than the ' +
+ 'bad revision. May be either a git or svn revision.')
+ parser.add_option('-w', '--working_directory',
+ type='str',
+ default='..',
+ help='A working directory to supply to the bisection '
+ 'script, which will use it as the location to checkout '
+ 'a copy of the chromium depot.')
+ parser.add_option('--extra_src',
+ type='str',
+ help='Path to extra source file. If this is supplied, '
+ 'bisect script will use this to override default behavior.')
+ parser.add_option('--target_build_type',
+ type='choice',
+ choices=['Release', 'Debug'],
+ help='The target build type. Choices are "Release" '
+ 'or "Debug".')
+ parser.add_option('--goma_threads', default=64,
+ type='int',
+ help='Number of goma threads to use. 0 will disable goma.')
+ parser.add_option('--builder_type', default='',
+ choices=['perf',
+ 'full',
+ 'android-chrome-perf', ''],
+ help='Type of builder to get build from. This allows '
+ 'script to use cached builds. By default (empty), binaries '
+ 'are built locally.')
+ options, _ = parser.parse_args()
+ error_msg = ''
+ if not options.good_revision:
+ error_msg += 'Error: missing required parameter: --good_revision\n'
+ if not options.bad_revision:
+ error_msg += 'Error: missing required parameter: --bad_revision\n'
+
+ if error_msg:
+ print error_msg
+ parser.print_help()
+ return 1
+
+ if 'android' not in options.browser_type and sys.platform.startswith('linux'):
+ if not os.environ.get('CHROME_DEVEL_SANDBOX'):
+ print 'SUID sandbox has not been setup.'\
+ ' See https://chromium.googlesource.com/chromium/src/'\
+ '+/master/docs/linux_suid_sandbox_development.md.'
+ return 1
+
+ return _RunBisectionScript(options)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/run-bisect-perf-regression.py b/chromium/tools/run-bisect-perf-regression.py
new file mode 100755
index 00000000000..52019db9b68
--- /dev/null
+++ b/chromium/tools/run-bisect-perf-regression.py
@@ -0,0 +1,886 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run Performance Test Bisect Tool
+
+This script is used by a try bot to run the bisect script with the parameters
+specified in the bisect config file. It checks out a copy of the depot in
+a subdirectory 'bisect' of the working directory provided, annd runs the
+bisect scrip there.
+"""
+
+import json
+import optparse
+import os
+import platform
+import re
+import shlex
+import subprocess
+import sys
+import traceback
+
+from auto_bisect import bisect_perf_regression
+from auto_bisect import bisect_utils
+from auto_bisect import math_utils
+from auto_bisect import source_control
+
+CROS_BOARD_ENV = 'BISECT_CROS_BOARD'
+CROS_IP_ENV = 'BISECT_CROS_IP'
+SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
+SRC_DIR = os.path.join(SCRIPT_DIR, os.path.pardir)
+BISECT_CONFIG_PATH = os.path.join(SCRIPT_DIR, 'auto_bisect', 'bisect.cfg')
+RUN_TEST_CONFIG_PATH = os.path.join(SCRIPT_DIR, 'run-perf-test.cfg')
+WEBKIT_RUN_TEST_CONFIG_PATH = os.path.join(
+ SRC_DIR, 'third_party', 'WebKit', 'Tools', 'run-perf-test.cfg')
+BISECT_SCRIPT_DIR = os.path.join(SCRIPT_DIR, 'auto_bisect')
+
+PERF_BENCHMARKS_PATH = 'tools/perf/benchmarks'
+PERF_MEASUREMENTS_PATH = 'tools/perf/measurements'
+BUILDBOT_BUILDERNAME = 'BUILDBOT_BUILDERNAME'
+BENCHMARKS_JSON_FILE = 'benchmarks.json'
+
+# This is used to identify tryjobs triggered by the commit queue.
+_COMMIT_QUEUE_USERS = [
+ '5071639625-1lppvbtck1morgivc6sq4dul7klu27sd@developer.gserviceaccount.com',
+ 'commit-bot@chromium.org']
+
+class Goma(object):
+
+ def __init__(self, path_to_goma):
+ self._abs_path_to_goma = None
+ self._abs_path_to_goma_file = None
+ if not path_to_goma:
+ return
+ self._abs_path_to_goma = os.path.abspath(path_to_goma)
+ filename = 'goma_ctl.bat' if os.name == 'nt' else 'goma_ctl.sh'
+ self._abs_path_to_goma_file = os.path.join(self._abs_path_to_goma, filename)
+
+ def __enter__(self):
+ if self._HasGomaPath():
+ self._SetupAndStart()
+ return self
+
+ def __exit__(self, *_):
+ if self._HasGomaPath():
+ self._Stop()
+
+ def _HasGomaPath(self):
+ return bool(self._abs_path_to_goma)
+
+ def _SetupEnvVars(self):
+ if os.name == 'nt':
+ os.environ['CC'] = (os.path.join(self._abs_path_to_goma, 'gomacc.exe') +
+ ' cl.exe')
+ os.environ['CXX'] = (os.path.join(self._abs_path_to_goma, 'gomacc.exe') +
+ ' cl.exe')
+ else:
+ os.environ['PATH'] = os.pathsep.join([self._abs_path_to_goma,
+ os.environ['PATH']])
+
+ def _SetupAndStart(self):
+ """Sets up goma and launches it.
+
+ Args:
+ path_to_goma: Path to goma directory.
+
+ Returns:
+ True if successful."""
+ self._SetupEnvVars()
+
+ # Sometimes goma is lingering around if something went bad on a previous
+ # run. Stop it before starting a new process. Can ignore the return code
+ # since it will return an error if it wasn't running.
+ self._Stop()
+
+ if subprocess.call([self._abs_path_to_goma_file, 'start']):
+ raise RuntimeError('Goma failed to start.')
+
+ def _Stop(self):
+ subprocess.call([self._abs_path_to_goma_file, 'stop'])
+
+
+def _LoadConfigFile(config_file_path):
+ """Attempts to load the specified config file as a module
+ and grab the global config dict.
+
+ Args:
+ config_file_path: Path to the config file.
+
+ Returns:
+ If successful, returns the config dict loaded from the file. If no
+ such dictionary could be loaded, returns the empty dictionary.
+ """
+ try:
+ local_vars = {}
+ execfile(config_file_path, local_vars)
+ return local_vars['config']
+ except Exception:
+ print
+ traceback.print_exc()
+ print
+ return {}
+
+
+def _ValidateConfigFile(config_contents, required_parameters):
+ """Validates the config file contents, checking whether all values are
+ non-empty.
+
+ Args:
+ config_contents: A config dictionary.
+ required_parameters: A list of parameters to check for.
+
+ Returns:
+ True if valid.
+ """
+ for parameter in required_parameters:
+ if parameter not in config_contents:
+ return False
+ value = config_contents[parameter]
+ if not value or type(value) is not str:
+ return False
+ return True
+
+
+def _ValidatePerfConfigFile(config_contents):
+ """Validates the perf config file contents.
+
+ This is used when we're doing a perf try job, rather than a bisect.
+ The config file is called run-perf-test.cfg by default.
+
+ The parameters checked are the required parameters; any additional optional
+ parameters won't be checked and validation will still pass.
+
+ Args:
+ config_contents: A config dictionary.
+
+ Returns:
+ True if valid.
+ """
+ return _ValidateConfigFile(config_contents, required_parameters=['command'])
+
+
+def _ValidateBisectConfigFile(config_contents):
+ """Validates the bisect config file contents.
+
+ The parameters checked are the required parameters; any additional optional
+ parameters won't be checked and validation will still pass.
+
+ Args:
+ config_contents: A config dictionary.
+
+ Returns:
+ True if valid.
+ """
+ return _ValidateConfigFile(
+ config_contents,
+ required_parameters=['command', 'good_revision', 'bad_revision'])
+
+
+def _OutputFailedResults(text_to_print):
+ bisect_utils.OutputAnnotationStepStart('Results - Failed')
+ print
+ print text_to_print
+ print
+ bisect_utils.OutputAnnotationStepClosed()
+
+
+def _CreateBisectOptionsFromConfig(config):
+ print config['command']
+ opts_dict = {}
+ opts_dict['command'] = config['command']
+ opts_dict['metric'] = config.get('metric')
+
+ if config['repeat_count']:
+ opts_dict['repeat_test_count'] = int(config['repeat_count'])
+
+ if config['truncate_percent']:
+ opts_dict['truncate_percent'] = int(config['truncate_percent'])
+
+ if config['max_time_minutes']:
+ opts_dict['max_time_minutes'] = _Clamp(
+ int(config['max_time_minutes']), low=1, high=60)
+
+ if config.has_key('use_goma'):
+ opts_dict['use_goma'] = config['use_goma']
+ if config.has_key('goma_dir'):
+ opts_dict['goma_dir'] = config['goma_dir']
+
+ if config.has_key('improvement_direction'):
+ opts_dict['improvement_direction'] = int(config['improvement_direction'])
+
+ if config.has_key('required_initial_confidence'):
+ opts_dict['required_initial_confidence'] = float(
+ config['required_initial_confidence'])
+
+ if config.has_key('target_arch'):
+ opts_dict['target_arch'] = config['target_arch']
+
+ if config.has_key('bug_id') and str(config['bug_id']).isdigit():
+ opts_dict['bug_id'] = config['bug_id']
+
+ if config.has_key('try_job_id'):
+ opts_dict['try_job_id'] = config['try_job_id']
+
+ opts_dict['build_preference'] = 'ninja'
+ opts_dict['output_buildbot_annotations'] = True
+
+ if '--browser=cros' in config['command']:
+ opts_dict['target_platform'] = 'cros'
+
+ if os.environ[CROS_BOARD_ENV] and os.environ[CROS_IP_ENV]:
+ opts_dict['cros_board'] = os.environ[CROS_BOARD_ENV]
+ opts_dict['cros_remote_ip'] = os.environ[CROS_IP_ENV]
+ else:
+ raise RuntimeError('CrOS build selected, but BISECT_CROS_IP or'
+ 'BISECT_CROS_BOARD undefined.')
+ elif 'android' in config['command']:
+ if 'android-chromium' in config['command']:
+ opts_dict['target_platform'] = 'android'
+ elif 'android-chrome' in config['command']:
+ opts_dict['target_platform'] = 'android-chrome'
+ else:
+ opts_dict['target_platform'] = 'android'
+
+ return bisect_perf_regression.BisectOptions.FromDict(opts_dict)
+
+
+def _Clamp(n, low, high):
+ """Clamps a value to a range."""
+ return min(high, max(low, n))
+
+
+def _ParseCloudLinksFromOutput(output):
+ html_results_pattern = re.compile(
+ r'\s(?P<VALUES>http://storage.googleapis.com/' +
+ 'chromium-telemetry/html-results/results-[a-z0-9-_]+)\s',
+ re.MULTILINE)
+ profiler_pattern = re.compile(
+ r'\s(?P<VALUES>https://console.developers.google.com/' +
+ 'm/cloudstorage/b/[a-z-]+/o/profiler-[a-z0-9-_.]+)\s',
+ re.MULTILINE)
+
+ results = {
+ 'html-results': html_results_pattern.findall(output),
+ 'profiler': profiler_pattern.findall(output),
+ }
+
+ return results
+
+
+def _ParseAndOutputCloudLinks(
+ results_without_patch, results_with_patch, annotations_dict):
+ cloud_links_without_patch = _ParseCloudLinksFromOutput(
+ results_without_patch[2])
+ cloud_links_with_patch = _ParseCloudLinksFromOutput(
+ results_with_patch[2])
+
+ cloud_file_link = (cloud_links_without_patch['html-results'][0]
+ if cloud_links_without_patch['html-results'] else '')
+
+ profiler_file_links_with_patch = cloud_links_with_patch['profiler']
+ profiler_file_links_without_patch = cloud_links_without_patch['profiler']
+
+ # Calculate the % difference in the means of the 2 runs.
+ percent_diff_in_means = None
+ std_err = None
+ if (results_with_patch[0].has_key('mean') and
+ results_with_patch[0].has_key('values')):
+ percent_diff_in_means = (results_with_patch[0]['mean'] /
+ max(0.0001, results_without_patch[0]['mean'])) * 100.0 - 100.0
+ std_err = math_utils.PooledStandardError(
+ [results_with_patch[0]['values'], results_without_patch[0]['values']])
+
+ if percent_diff_in_means is not None and std_err is not None:
+ bisect_utils.OutputAnnotationStepStart('Results - %.02f +- %0.02f delta' %
+ (percent_diff_in_means, std_err))
+ print ' %s %s %s' % (''.center(10, ' '), 'Mean'.center(20, ' '),
+ 'Std. Error'.center(20, ' '))
+ print ' %s %s %s' % ('Patch'.center(10, ' '),
+ ('%.02f' % results_with_patch[0]['mean']).center(20, ' '),
+ ('%.02f' % results_with_patch[0]['std_err']).center(20, ' '))
+ print ' %s %s %s' % ('No Patch'.center(10, ' '),
+ ('%.02f' % results_without_patch[0]['mean']).center(20, ' '),
+ ('%.02f' % results_without_patch[0]['std_err']).center(20, ' '))
+ if cloud_file_link:
+ bisect_utils.OutputAnnotationStepLink('HTML Results', cloud_file_link)
+ bisect_utils.OutputAnnotationStepClosed()
+ elif cloud_file_link:
+ bisect_utils.OutputAnnotationStepLink('HTML Results', cloud_file_link)
+
+ if profiler_file_links_with_patch and profiler_file_links_without_patch:
+ for i in xrange(len(profiler_file_links_with_patch)):
+ bisect_utils.OutputAnnotationStepLink(
+ '%s[%d]' % (annotations_dict.get('profiler_link1'), i),
+ profiler_file_links_with_patch[i])
+ for i in xrange(len(profiler_file_links_without_patch)):
+ bisect_utils.OutputAnnotationStepLink(
+ '%s[%d]' % (annotations_dict.get('profiler_link2'), i),
+ profiler_file_links_without_patch[i])
+
+
+def _ResolveRevisionsFromConfig(config):
+ if not 'good_revision' in config and not 'bad_revision' in config:
+ return (None, None)
+
+ bad_revision = source_control.ResolveToRevision(
+ config['bad_revision'], 'chromium', bisect_utils.DEPOT_DEPS_NAME, 100)
+ if not bad_revision:
+ raise RuntimeError('Failed to resolve [%s] to git hash.',
+ config['bad_revision'])
+ good_revision = source_control.ResolveToRevision(
+ config['good_revision'], 'chromium', bisect_utils.DEPOT_DEPS_NAME, -100)
+ if not good_revision:
+ raise RuntimeError('Failed to resolve [%s] to git hash.',
+ config['good_revision'])
+
+ return (good_revision, bad_revision)
+
+
+def _GetStepAnnotationStringsDict(config):
+ if 'good_revision' in config and 'bad_revision' in config:
+ return {
+ 'build1': 'Building [%s]' % config['good_revision'],
+ 'build2': 'Building [%s]' % config['bad_revision'],
+ 'run1': 'Running [%s]' % config['good_revision'],
+ 'run2': 'Running [%s]' % config['bad_revision'],
+ 'sync1': 'Syncing [%s]' % config['good_revision'],
+ 'sync2': 'Syncing [%s]' % config['bad_revision'],
+ 'results_label1': config['good_revision'],
+ 'results_label2': config['bad_revision'],
+ 'profiler_link1': 'Profiler Data - %s' % config['good_revision'],
+ 'profiler_link2': 'Profiler Data - %s' % config['bad_revision'],
+ }
+ else:
+ return {
+ 'build1': 'Building With Patch',
+ 'build2': 'Building Without Patch',
+ 'run1': 'Running With Patch',
+ 'run2': 'Running Without Patch',
+ 'results_label1': 'Patch',
+ 'results_label2': 'ToT',
+ 'profiler_link1': 'With Patch - Profiler Data',
+ 'profiler_link2': 'Without Patch - Profiler Data',
+ }
+
+
+def _RunBuildStepForPerformanceTest(bisect_instance,
+ build_string,
+ sync_string,
+ revision):
+ if revision:
+ bisect_utils.OutputAnnotationStepStart(sync_string)
+ if not source_control.SyncToRevision(revision, 'gclient'):
+ raise RuntimeError('Failed [%s].' % sync_string)
+ bisect_utils.OutputAnnotationStepClosed()
+
+ bisect_utils.OutputAnnotationStepStart(build_string)
+
+ if bisect_utils.RunGClient(['runhooks']):
+ raise RuntimeError('Failed to run gclient runhooks')
+
+ if not bisect_instance.ObtainBuild('chromium'):
+ raise RuntimeError('Patched version failed to build.')
+
+ bisect_utils.OutputAnnotationStepClosed()
+
+
+def _RunCommandStepForPerformanceTest(bisect_instance,
+ opts,
+ reset_on_first_run,
+ upload_on_last_run,
+ results_label,
+ run_string):
+ bisect_utils.OutputAnnotationStepStart(run_string)
+
+ results = bisect_instance.RunPerformanceTestAndParseResults(
+ opts.command,
+ opts.metric,
+ reset_on_first_run=reset_on_first_run,
+ upload_on_last_run=upload_on_last_run,
+ results_label=results_label,
+ allow_flakes=False)
+
+ if results[1]:
+ raise RuntimeError('Patched version failed to run performance test.')
+
+ bisect_utils.OutputAnnotationStepClosed()
+
+ return results
+
+
+def _RunPerformanceTest(config):
+ """Runs a performance test with and without the current patch.
+
+ Args:
+ config: Contents of the config file, a dictionary.
+
+ Attempts to build and run the current revision with and without the
+ current patch, with the parameters passed in.
+ """
+ # Bisect script expects to be run from the src directory
+ os.chdir(SRC_DIR)
+
+ opts = _CreateBisectOptionsFromConfig(config)
+ revisions = _ResolveRevisionsFromConfig(config)
+ annotations_dict = _GetStepAnnotationStringsDict(config)
+ b = bisect_perf_regression.BisectPerformanceMetrics(opts, os.getcwd())
+
+ _RunBuildStepForPerformanceTest(b,
+ annotations_dict.get('build1'),
+ annotations_dict.get('sync1'),
+ revisions[0])
+
+ results_with_patch = _RunCommandStepForPerformanceTest(
+ b, opts, True, True, annotations_dict['results_label1'],
+ annotations_dict['run1'])
+
+ bisect_utils.OutputAnnotationStepStart('Reverting Patch')
+ # TODO: When this is re-written to recipes, this should use bot_update's
+ # revert mechanism to fully revert the client. But for now, since we know that
+ # the perf try bot currently only supports src/ and src/third_party/WebKit, we
+ # simply reset those two directories.
+ bisect_utils.CheckRunGit(['reset', '--hard'])
+ bisect_utils.CheckRunGit(['reset', '--hard'],
+ os.path.join('third_party', 'WebKit'))
+ bisect_utils.OutputAnnotationStepClosed()
+
+ _RunBuildStepForPerformanceTest(b,
+ annotations_dict.get('build2'),
+ annotations_dict.get('sync2'),
+ revisions[1])
+
+ results_without_patch = _RunCommandStepForPerformanceTest(
+ b, opts, False, True, annotations_dict['results_label2'],
+ annotations_dict['run2'])
+
+ # Find the link to the cloud stored results file.
+ _ParseAndOutputCloudLinks(
+ results_without_patch, results_with_patch, annotations_dict)
+
+
+def _SetupAndRunPerformanceTest(config, path_to_goma, is_cq_tryjob=False):
+ """Attempts to build and run the current revision with and without the
+ current patch, with the parameters passed in.
+
+ Args:
+ config: The config read from run-perf-test.cfg.
+ path_to_goma: Path to goma directory.
+ is_cq_tryjob: Whether or not the try job was initiated by commit queue.
+
+ Returns:
+ An exit code: 0 on success, otherwise 1.
+ """
+ if platform.release() == 'XP':
+ print 'Windows XP is not supported for perf try jobs because it lacks '
+ print 'goma support. Please refer to crbug.com/330900.'
+ return 1
+ try:
+ with Goma(path_to_goma) as _:
+ config['use_goma'] = bool(path_to_goma)
+ if config['use_goma']:
+ config['goma_dir'] = os.path.abspath(path_to_goma)
+ if not is_cq_tryjob:
+ _RunPerformanceTest(config)
+ else:
+ return _RunBenchmarksForCommitQueue(config)
+ return 0
+ except RuntimeError, e:
+ bisect_utils.OutputAnnotationStepFailure()
+ bisect_utils.OutputAnnotationStepClosed()
+ _OutputFailedResults('Error: %s' % e.message)
+ return 1
+
+
+def _RunBisectionScript(
+ config, working_directory, path_to_goma, path_to_extra_src, dry_run):
+ """Attempts to execute the bisect script with the given parameters.
+
+ Args:
+ config: A dict containing the parameters to pass to the script.
+ working_directory: A working directory to provide to the bisect script,
+ where it will store it's own copy of the depot.
+ path_to_goma: Path to goma directory.
+ path_to_extra_src: Path to extra source file.
+ dry_run: Do a dry run, skipping sync, build, and performance testing steps.
+
+ Returns:
+ An exit status code: 0 on success, otherwise 1.
+ """
+ _PrintConfigStep(config)
+
+ # Construct the basic command with all necessary arguments.
+ cmd = [
+ 'python',
+ os.path.join(BISECT_SCRIPT_DIR, 'bisect_perf_regression.py'),
+ '--command', config['command'],
+ '--good_revision', config['good_revision'],
+ '--bad_revision', config['bad_revision'],
+ '--working_directory', working_directory,
+ '--output_buildbot_annotations'
+ ]
+
+ # Add flags for any optional config parameters if given in the config.
+ options = [
+ ('metric', '--metric'),
+ ('repeat_count', '--repeat_test_count'),
+ ('truncate_percent', '--truncate_percent'),
+ ('max_time_minutes', '--max_time_minutes'),
+ ('bisect_mode', '--bisect_mode'),
+ ('improvement_direction', '--improvement_direction'),
+ ('bug_id', '--bug_id'),
+ ('try_job_id', '--try_job_id'),
+ ('builder_type', '--builder_type'),
+ ('target_arch', '--target_arch'),
+ ('required_initial_confidence', '--required_initial_confidence'),
+ ]
+ for config_key, flag in options:
+ if config.has_key(config_key):
+ cmd.extend([flag, config[config_key]])
+
+ cmd.extend(['--build_preference', 'ninja'])
+
+ # Possibly set the target platform name based on the browser name in a
+ # Telemetry command.
+ if 'android-chromium' in config['command']:
+ cmd.extend(['--target_platform', 'android'])
+ elif 'android-chrome' in config['command']:
+ cmd.extend(['--target_platform', 'android-chrome'])
+ elif 'android' in config['command']:
+ cmd.extend(['--target_platform', 'android'])
+
+ if path_to_goma:
+ # For Windows XP platforms, goma service is not supported.
+ # Moreover we don't compile chrome when gs_bucket flag is set instead
+ # use builds archives, therefore ignore goma service for Windows XP.
+ # See http://crbug.com/330900.
+ if platform.release() == 'XP':
+ print ('Goma doesn\'t have a win32 binary, therefore it is not supported '
+ 'on Windows XP platform. Please refer to crbug.com/330900.')
+ path_to_goma = None
+ cmd.append('--use_goma')
+ cmd.append('--goma_dir')
+ cmd.append(os.path.abspath(path_to_goma))
+
+ if path_to_extra_src:
+ cmd.extend(['--extra_src', path_to_extra_src])
+
+ if dry_run:
+ cmd.extend([
+ '--debug_ignore_build',
+ '--debug_ignore_sync',
+ '--debug_ignore_perf_test'
+ ])
+
+ cmd = [str(c) for c in cmd]
+
+ with Goma(path_to_goma) as _:
+ return_code = subprocess.call(cmd)
+
+ if return_code:
+ print ('Error: bisect_perf_regression.py returned with error %d\n'
+ % return_code)
+
+ return return_code
+
+
+def _PrintConfigStep(config):
+ """Prints out the given config, along with Buildbot annotations."""
+ bisect_utils.OutputAnnotationStepStart('Config')
+ print
+ for k, v in config.iteritems():
+ print ' %s : %s' % (k, v)
+ print
+ bisect_utils.OutputAnnotationStepClosed()
+
+
+def _GetBrowserType(bot_platform):
+ """Gets the browser type to be used in the run benchmark command."""
+ if bot_platform == 'android':
+ return 'android-chromium'
+ elif 'x64' in bot_platform:
+ return 'release_x64'
+
+ return 'release'
+
+
+
+def _GuessTelemetryTestCommand(bot_platform, test_name=None):
+ """Creates a Telemetry benchmark command based on bot and test name."""
+ command = []
+ # On Windows, Python scripts should be prefixed with the python command.
+ if bot_platform == 'win':
+ command.append('python')
+ command.append('tools/perf/run_benchmark')
+ command.append('-v')
+ command.append('--browser=%s' % _GetBrowserType(bot_platform))
+ if test_name:
+ command.append(test_name)
+
+ return ' '.join(command)
+
+
+def _GetConfigBasedOnPlatform(config, bot_name, test_name):
+ """Generates required options to create BisectPerformanceMetrics instance."""
+ opts_dict = {
+ 'command': _GuessTelemetryTestCommand(bot_name, test_name),
+ 'target_arch': 'x64' if 'x64' in bot_name else 'ia32',
+ 'build_preference': 'ninja',
+ 'output_buildbot_annotations': True,
+ 'repeat_test_count': 1,
+ 'bisect_mode': bisect_utils.BISECT_MODE_RETURN_CODE,
+ }
+
+ if 'use_goma' in config:
+ opts_dict['use_goma'] = config['use_goma']
+ if 'goma_dir' in config:
+ opts_dict['goma_dir'] = config['goma_dir']
+ if 'android-chromium' in opts_dict['command']:
+ opts_dict['target_platform'] = 'android'
+
+ return bisect_perf_regression.BisectOptions.FromDict(opts_dict)
+
+
+def _GetModifiedFilesFromPatch(cwd=None):
+ """Gets list of files modified in the current patch."""
+ log_output = bisect_utils.CheckRunGit(
+ ['diff', '--no-ext-diff', '--name-only', 'HEAD~1'], cwd=cwd)
+ modified_files = log_output.split()
+ return modified_files
+
+
+def _GetAffectedBenchmarkModuleNames():
+ """Gets list of modified benchmark files under tools/perf/benchmarks."""
+ all_affected_files = _GetModifiedFilesFromPatch()
+ modified_benchmarks = []
+ for affected_file in all_affected_files:
+ if (affected_file.startswith(PERF_BENCHMARKS_PATH) or
+ affected_file.startswith(PERF_MEASUREMENTS_PATH)):
+ benchmark = os.path.basename(os.path.splitext(affected_file)[0])
+ modified_benchmarks.append(benchmark)
+ return modified_benchmarks
+
+
+def _ListAvailableBenchmarks(bot_platform):
+ """Gets all available benchmarks names as a list."""
+ browser_type = _GetBrowserType(bot_platform)
+ if os.path.exists(BENCHMARKS_JSON_FILE):
+ os.remove(BENCHMARKS_JSON_FILE)
+ command = []
+ if 'win' in bot_platform:
+ command.append('python')
+ command.append('tools/perf/run_benchmark')
+ command.extend([
+ 'list',
+ '--browser',
+ browser_type,
+ '--json-output',
+ BENCHMARKS_JSON_FILE])
+ try:
+ output, return_code = bisect_utils.RunProcessAndRetrieveOutput(
+ command=command, cwd=SRC_DIR)
+ if return_code:
+ raise RuntimeError('Something went wrong while listing benchmarks. '
+ 'Please review the command line: %s.\nERROR: [%s]' %
+ (' '.join(command), output))
+ with open(BENCHMARKS_JSON_FILE) as tests_json:
+ tests_data = json.load(tests_json)
+ if tests_data.get('steps'):
+ return tests_data.get('steps').keys()
+ finally:
+ try:
+ if os.path.exists(BENCHMARKS_JSON_FILE):
+ os.remove(BENCHMARKS_JSON_FILE)
+ except OSError as e:
+ if e.errno != errno.ENOENT:
+ raise
+ return None
+
+
+def _OutputOverallResults(results):
+ """Creates results step and prints results on buildbot job."""
+ test_status = all(current_value == True for current_value in results.values())
+ bisect_utils.OutputAnnotationStepStart(
+ 'Results - %s' % ('Passed' if test_status else 'Failed'))
+ print
+ print 'Results of benchmarks:'
+ print
+ for benchmark, result in results.iteritems():
+ print '%s: %s' % (benchmark, 'Passed' if result else 'Failed')
+ if not test_status:
+ bisect_utils.OutputAnnotationStepFailure()
+ bisect_utils.OutputAnnotationStepClosed()
+ # Returns 0 for success and 1 for failure.
+ return 0 if test_status else 1
+
+
+def _RunBenchmark(bisect_instance, opts, bot_name, benchmark_name):
+ """Runs a Telemetry benchmark."""
+ bisect_utils.OutputAnnotationStepStart(benchmark_name)
+ command_to_run = _GuessTelemetryTestCommand(bot_name, benchmark_name)
+ args = shlex.split(command_to_run, posix=not bisect_utils.IsWindowsHost())
+ output, return_code = bisect_utils.RunProcessAndRetrieveOutput(args, SRC_DIR)
+ # A value other than 0 indicates that the test couldn't be run, and results
+ # should also include an error message.
+ if return_code:
+ print ('Error: Something went wrong running the benchmark: %s.'
+ 'Please review the command line:%s\n\n%s' %
+ (benchmark_name, command_to_run, output))
+ bisect_utils.OutputAnnotationStepFailure()
+ print output
+ bisect_utils.OutputAnnotationStepClosed()
+ # results[1] contains the return code from subprocess that executes test
+ # command, On successful test run it contains 0 otherwise any non-zero value.
+ return return_code == 0
+
+
+def _RunBenchmarksForCommitQueue(config):
+ """Runs Telemetry benchmark for the commit queue."""
+ os.chdir(SRC_DIR)
+ # To determine the bot platform by reading buildbot name from environment
+ # variable.
+ bot_name = os.environ.get(BUILDBOT_BUILDERNAME)
+ if not bot_name:
+ bot_name = sys.platform
+ bot_name = bot_name.split('_')[0]
+
+ affected_benchmarks = _GetAffectedBenchmarkModuleNames()
+ # Abort if there are no changes to benchmark any existing benchmark files.
+ if not affected_benchmarks:
+ bisect_utils.OutputAnnotationStepStart('Results')
+ print
+ print ('There are no modification to Telemetry benchmarks,'
+ ' aborting the try job.')
+ bisect_utils.OutputAnnotationStepClosed()
+ return 0
+
+ # Bisect script expects to be run from the src directory
+ # Gets required options inorder to create BisectPerformanceMetrics instance.
+ # Since command is a required arg in BisectPerformanceMetrics, we just create
+ # a dummy command for now.
+ opts = _GetConfigBasedOnPlatform(config, bot_name, test_name='')
+ annotations_dict = _GetStepAnnotationStringsDict(config)
+ b = bisect_perf_regression.BisectPerformanceMetrics(opts, os.getcwd())
+ _RunBuildStepForPerformanceTest(b,
+ annotations_dict.get('build1'),
+ annotations_dict.get('sync1'),
+ None)
+ available_benchmarks = _ListAvailableBenchmarks(bot_name)
+ overall_results = {}
+ for affected_benchmark in affected_benchmarks:
+ for benchmark in available_benchmarks:
+ if (benchmark.startswith(affected_benchmark) and
+ not benchmark.endswith('reference')):
+ overall_results[benchmark] = _RunBenchmark(b, opts, bot_name, benchmark)
+
+ return _OutputOverallResults(overall_results)
+
+
+def _OptionParser():
+ """Returns the options parser for run-bisect-perf-regression.py."""
+
+ def ConvertJson(option, _, value, parser):
+ """Provides an OptionParser callback to unmarshal a JSON string."""
+ setattr(parser.values, option.dest, json.loads(value))
+
+ usage = ('%prog [options] [-- chromium-options]\n'
+ 'Used by a try bot to run the bisection script using the parameters'
+ ' provided in the auto_bisect/bisect.cfg file.')
+ parser = optparse.OptionParser(usage=usage)
+ parser.add_option('-w', '--working_directory',
+ type='str',
+ help='A working directory to supply to the bisection '
+ 'script, which will use it as the location to checkout '
+ 'a copy of the chromium depot.')
+ parser.add_option('-p', '--path_to_goma',
+ type='str',
+ help='Path to goma directory. If this is supplied, goma '
+ 'builds will be enabled.')
+ parser.add_option('--path_to_config',
+ type='str',
+ help='Path to the config file to use. If this is supplied, '
+ 'the bisect script will use this to override the default '
+ 'config file path. The script will attempt to load it '
+ 'as a bisect config first, then a perf config.')
+ parser.add_option('--extra_src',
+ type='str',
+ help='Path to extra source file. If this is supplied, '
+ 'bisect script will use this to override default behavior.')
+ parser.add_option('--dry_run',
+ action="store_true",
+ help='The script will perform the full bisect, but '
+ 'without syncing, building, or running the performance '
+ 'tests.')
+ # This argument is passed by buildbot to supply build properties to the bisect
+ # script. Note: Don't change "--build-properties" property name.
+ parser.add_option('--build-properties', action='callback',
+ dest='build_properties',
+ callback=ConvertJson, type='string',
+ nargs=1, default={},
+ help='build properties in JSON format')
+
+ return parser
+
+
+def main():
+ """Entry point for run-bisect-perf-regression.py.
+
+ Reads the config file, and then tries to either bisect a regression or
+ just run a performance test, depending on the particular config parameters
+ specified in the config file.
+ """
+ parser = _OptionParser()
+ opts, _ = parser.parse_args()
+
+ # Use the default config file path unless one was specified.
+ config_path = BISECT_CONFIG_PATH
+ if opts.path_to_config:
+ config_path = opts.path_to_config
+ config = _LoadConfigFile(config_path)
+
+ # Check if the config is valid for running bisect job.
+ config_is_valid = _ValidateBisectConfigFile(config)
+
+ if config and config_is_valid:
+ if not opts.working_directory:
+ print 'Error: missing required parameter: --working_directory\n'
+ parser.print_help()
+ return 1
+
+ return _RunBisectionScript(
+ config, opts.working_directory, opts.path_to_goma, opts.extra_src,
+ opts.dry_run)
+
+ # If it wasn't valid for running a bisect, then maybe the user wanted
+ # to run a perf test instead of a bisect job. Try reading any possible
+ # perf test config files.
+ perf_cfg_files = [RUN_TEST_CONFIG_PATH, WEBKIT_RUN_TEST_CONFIG_PATH]
+ for current_perf_cfg_file in perf_cfg_files:
+ if opts.path_to_config:
+ path_to_perf_cfg = opts.path_to_config
+ else:
+ path_to_perf_cfg = os.path.join(
+ os.path.abspath(os.path.dirname(sys.argv[0])),
+ current_perf_cfg_file)
+
+ config = _LoadConfigFile(path_to_perf_cfg)
+ config_is_valid = _ValidatePerfConfigFile(config)
+
+ if config and config_is_valid:
+ return _SetupAndRunPerformanceTest(config, opts.path_to_goma)
+
+ # If there are no changes to config file, then check if the request is
+ # from the commit queue, if so then run the modified Telemetry benchmarks for
+ # the patch.
+ if opts.build_properties.get('requester') in _COMMIT_QUEUE_USERS:
+ return _SetupAndRunPerformanceTest(
+ config={}, path_to_goma=opts.path_to_goma, is_cq_tryjob=True)
+
+ print ('Error: Could not load config file. Double check your changes to '
+ 'auto_bisect/bisect.cfg or run-perf-test.cfg for syntax errors.\n')
+ return 1
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/run-perf-test.cfg b/chromium/tools/run-perf-test.cfg
new file mode 100644
index 00000000000..2529fca92f3
--- /dev/null
+++ b/chromium/tools/run-perf-test.cfg
@@ -0,0 +1,77 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Config file for Run Performance Test Bot
+
+This script is intended for use by anyone that wants to run a remote performance
+test. Modify the config below and add the command to run the performance test,
+the metric you're interested in, and repeat/discard parameters. You can then
+run a git try <bot>.
+
+Changes to this file should never be submitted.
+
+Args:
+ 'command': This is the full command line to pass to the
+ bisect-perf-regression.py script in order to execute the test.
+ 'metric': The name of the metric to parse out from the results of the
+ performance test. You can retrieve the metric by looking at the stdio of
+ the performance test. Look for lines of the format:
+
+ RESULT <graph>: <trace>= <value> <units>
+
+ The metric name is "<graph>/<trace>".
+ 'repeat_count': The number of times to repeat the performance test.
+ 'max_time_minutes': The script will attempt to run the performance test
+ "repeat_count" times, unless it exceeds "max_time_minutes".
+ 'truncate_percent': Discard the highest/lowest % values from performance test.
+
+Sample config:
+
+config = {
+ 'command': './tools/perf/run_benchmark --browser=release smoothness.key_mobile_sites',
+ 'metric': 'mean_frame_time/mean_frame_time',
+ 'repeat_count': '20',
+ 'max_time_minutes': '20',
+ 'truncate_percent': '25',
+}
+
+On Windows:
+ - If you're calling a python script you will need to add "python" to
+the command:
+
+config = {
+ 'command': 'python tools/perf/run_benchmark -v --browser=release smoothness.key_mobile_sites',
+ 'metric': 'mean_frame_time/mean_frame_time',
+ 'repeat_count': '20',
+ 'max_time_minutes': '20',
+ 'truncate_percent': '25',
+}
+
+
+On ChromeOS:
+ - Script accepts either ChromeOS versions, or unix timestamps as revisions.
+ - You don't need to specify --identity and --remote, they will be added to
+ the command using the bot's BISECT_CROS_IP and BISECT_CROS_BOARD values.
+
+config = {
+ 'command': './tools/perf/run_benchmark -v '\
+ '--browser=cros-chrome-guest '\
+ 'smoothness.key_mobile_sites',
+ 'metric': 'mean_frame_time/mean_frame_time',
+ 'repeat_count': '20',
+ 'max_time_minutes': '20',
+ 'truncate_percent': '25',
+}
+
+"""
+
+config = {
+ 'command': '',
+ 'metric': '',
+ 'repeat_count': '',
+ 'max_time_minutes': '',
+ 'truncate_percent': '',
+}
+
+# Workaround git try issue, see crbug.com/257689
diff --git a/chromium/tools/safely-roll-deps.py b/chromium/tools/safely-roll-deps.py
new file mode 100755
index 00000000000..73941238796
--- /dev/null
+++ b/chromium/tools/safely-roll-deps.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate a CL to roll a DEPS entry to the specified revision number and post
+it to Rietveld so that the CL will land automatically if it passes the
+commit-queue's checks.
+"""
+
+import logging
+import optparse
+import os
+import re
+import sys
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+SRC_DIR = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+sys.path.insert(0, os.path.join(SRC_DIR, 'build'))
+import find_depot_tools
+import scm
+import subprocess2
+
+
+def die_with_error(msg):
+ print >> sys.stderr, msg
+ sys.exit(1)
+
+
+def process_deps(path, project, new_rev, is_dry_run):
+ """Update project_revision to |new_issue|.
+
+ A bit hacky, could it be made better?
+ """
+ content = open(path).read()
+ # Hack for Blink to get the AutoRollBot running again.
+ if project == "blink":
+ project = "webkit"
+ old_line = r"(\s+)'%s_revision': '([0-9a-f]{2,40})'," % project
+ new_line = r"\1'%s_revision': '%s'," % (project, new_rev)
+ new_content = re.sub(old_line, new_line, content, 1)
+ old_rev = re.search(old_line, content).group(2)
+ if not old_rev or new_content == content:
+ die_with_error('Failed to update the DEPS file')
+
+ if not is_dry_run:
+ open(path, 'w').write(new_content)
+ return old_rev
+
+
+class PrintSubprocess(object):
+ """Wrapper for subprocess2 which prints out every command."""
+ def __getattr__(self, attr):
+ def _run_subprocess2(cmd, *args, **kwargs):
+ print cmd
+ sys.stdout.flush()
+ return getattr(subprocess2, attr)(cmd, *args, **kwargs)
+ return _run_subprocess2
+
+prnt_subprocess = PrintSubprocess()
+
+
+def main():
+ tool_dir = os.path.dirname(os.path.abspath(__file__))
+ parser = optparse.OptionParser(usage='%prog [options] <project> <new rev>',
+ description=sys.modules[__name__].__doc__)
+ parser.add_option('-v', '--verbose', action='count', default=0)
+ parser.add_option('--dry-run', action='store_true')
+ parser.add_option('-f', '--force', action='store_true',
+ help='Make destructive changes to the local checkout if '
+ 'necessary.')
+ parser.add_option('--commit', action='store_true', default=True,
+ help='(default) Put change in commit queue on upload.')
+ parser.add_option('--no-commit', action='store_false', dest='commit',
+ help='Don\'t put change in commit queue on upload.')
+ parser.add_option('-r', '--reviewers', default='',
+ help='Add given users as either reviewers or TBR as'
+ ' appropriate.')
+ parser.add_option('--upstream', default='origin/master',
+ help='(default "%default") Use given start point for change'
+ ' to upload. For instance, if you use the old git workflow,'
+ ' you might set it to "origin/trunk".')
+ parser.add_option('--cc', help='CC email addresses for issue.')
+ parser.add_option('-m', '--message', help='Custom commit message.')
+
+ options, args = parser.parse_args()
+ logging.basicConfig(
+ level=
+ [logging.WARNING, logging.INFO, logging.DEBUG][
+ min(2, options.verbose)])
+ if len(args) != 2:
+ parser.print_help()
+ exit(0)
+
+ root_dir = os.path.dirname(tool_dir)
+ os.chdir(root_dir)
+
+ project = args[0]
+ new_rev = args[1]
+
+ # Silence the editor.
+ os.environ['EDITOR'] = 'true'
+
+ if options.force and not options.dry_run:
+ prnt_subprocess.check_call(['git', 'clean', '-d', '-f'])
+ prnt_subprocess.call(['git', 'rebase', '--abort'])
+
+ old_branch = scm.GIT.GetBranch(root_dir)
+ new_branch = '%s_roll' % project
+
+ if options.upstream == new_branch:
+ parser.error('Cannot set %s as its own upstream.' % new_branch)
+
+ if old_branch == new_branch:
+ if options.force:
+ if not options.dry_run:
+ prnt_subprocess.check_call(['git', 'checkout', options.upstream, '-f'])
+ prnt_subprocess.call(['git', 'branch', '-D', old_branch])
+ else:
+ parser.error('Please delete the branch %s and move to a different branch'
+ % new_branch)
+
+ if not options.dry_run:
+ prnt_subprocess.check_call(['git', 'fetch', 'origin'])
+ prnt_subprocess.call(['git', 'svn', 'fetch'])
+ branch_cmd = ['git', 'checkout', '-b', new_branch, options.upstream]
+ if options.force:
+ branch_cmd.append('-f')
+ prnt_subprocess.check_output(branch_cmd)
+
+ try:
+ old_rev = process_deps(os.path.join(root_dir, 'DEPS'), project, new_rev,
+ options.dry_run)
+ print '%s roll %s:%s' % (project.title(), old_rev, new_rev)
+
+ review_field = 'TBR' if options.commit else 'R'
+ commit_msg = options.message or '%s roll %s:%s\n' % (project.title(),
+ old_rev, new_rev)
+ commit_msg += '\n%s=%s\n' % (review_field, options.reviewers)
+
+ if options.dry_run:
+ print 'Commit message: ' + commit_msg
+ return 0
+
+ prnt_subprocess.check_output(['git', 'commit', '-m', commit_msg, 'DEPS'])
+ prnt_subprocess.check_call(['git', 'diff', '--no-ext-diff',
+ options.upstream])
+ upload_cmd = ['git', 'cl', 'upload', '--bypass-hooks']
+ if options.commit:
+ upload_cmd.append('--use-commit-queue')
+ if options.reviewers:
+ upload_cmd.append('--send-mail')
+ if options.cc:
+ upload_cmd.extend(['--cc', options.cc])
+ prnt_subprocess.check_call(upload_cmd)
+ finally:
+ if not options.dry_run:
+ prnt_subprocess.check_output(['git', 'checkout', old_branch])
+ prnt_subprocess.check_output(['git', 'branch', '-D', new_branch])
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/screenshot_testing/update_golden_screenshots.py b/chromium/tools/screenshot_testing/update_golden_screenshots.py
new file mode 100644
index 00000000000..6e14b8519ae
--- /dev/null
+++ b/chromium/tools/screenshot_testing/update_golden_screenshots.py
@@ -0,0 +1,99 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import sys
+import getopt
+import os
+
+here = os.path.realpath(__file__)
+src_path = (os.path.normpath(os.path.join(here, '..', '..', '..')))
+sys.path.append(os.path.normpath(os.path.join(src_path, '..', 'depot_tools')))
+
+USAGE = 'The utility uploads .png files to ' \
+ 'chrome-os-oobe-ui-screenshot-testing Google Storage bucket.\n' \
+ '-i:\n\tdirectory with .png files which have to be uploaded\n' \
+ '-o (optional):\n\tdirectory to store generated .sha1 files. ' \
+ 'Is set to chrome/browser/chromeos/login/screenshot_testing' \
+ '/golden_screenshots by default\n--help:\n\thelp'
+
+
+import upload_to_google_storage
+import download_from_google_storage
+
+def upload(png_path):
+
+ # Creating a list of files which need to be uploaded to Google Storage:
+ # all .png files from the directory containing golden screenshots.
+ target = []
+ for file in os.listdir(png_path):
+ if file.endswith('.png'):
+ target.append(os.path.join(png_path, file))
+
+ # Creating a standard gsutil object, assuming there are depot_tools
+ # and everything related is set up already.
+ gsutil_path = os.path.abspath(os.path.join(src_path, '..', 'depot_tools',
+ 'third_party', 'gsutil',
+ 'gsutil'))
+ gsutil = download_from_google_storage.Gsutil(gsutil_path,
+ boto_path=None,
+ bypass_prodaccess=True)
+
+ # URL of the bucket used for storing screenshots.
+ bucket_url = 'gs://chrome-os-oobe-ui-screenshot-testing'
+
+ # Uploading using the most simple way,
+ # see depot_tools/upload_to_google_storage.py to have better understanding
+ # of this False and 1 arguments.
+ upload_to_google_storage.upload_to_google_storage(target, bucket_url, gsutil,
+ False, False, 1, False)
+
+ print 'All images are uploaded to Google Storage.'
+
+def move_sha1(from_path, to_path):
+ from shutil import move
+ for file in os.listdir(from_path):
+ if (file.endswith('.sha1')):
+ old_place = os.path.join(from_path, file)
+ new_place = os.path.join(to_path, file)
+ if not os.path.exists(os.path.dirname(new_place)):
+ os.makedirs(os.path.dirname(new_place))
+ move(old_place, new_place)
+
+def main(argv):
+ png_path = ''
+ sha1_path = os.path.join(src_path,
+ 'chrome', 'browser', 'chromeos', 'login',
+ 'screenshot_testing', 'golden_screenshots')
+ try:
+ opts, args = getopt.getopt(argv,'i:o:', ['--help'])
+ except getopt.GetoptError:
+ print USAGE
+ sys.exit(1)
+ for opt, arg in opts:
+ if opt == '--help':
+ print USAGE
+ sys.exit()
+ elif opt == '-i':
+ png_path = arg
+ elif opt =='-o':
+ sha1_path = arg
+
+ if png_path == '':
+ print USAGE
+ sys.exit(1)
+
+ png_path = os.path.abspath(png_path)
+ sha1_path = os.path.abspath(sha1_path)
+
+ upload(png_path)
+ move_sha1(png_path, sha1_path)
+
+ # TODO(elizavetai): Can this git stuff be done automatically?
+ print 'Please add new .sha1 files from ' \
+ + str(sha1_path) + \
+ ' to git manually.'
+
+if __name__ == "__main__":
+ main(sys.argv[1:]) \ No newline at end of file
diff --git a/chromium/tools/security/OWNERS b/chromium/tools/security/OWNERS
new file mode 100644
index 00000000000..0102bf5c5d5
--- /dev/null
+++ b/chromium/tools/security/OWNERS
@@ -0,0 +1,2 @@
+jschuh@chromium.org
+tsepez@chromium.org
diff --git a/chromium/tools/security/check_message_owners.py b/chromium/tools/security/check_message_owners.py
new file mode 100755
index 00000000000..b3793f6fb34
--- /dev/null
+++ b/chromium/tools/security/check_message_owners.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Make sure all of the per-file *_messages.h OWNERS are consistent"""
+
+import os
+import re
+import sys
+
+def main():
+ file_path = os.path.dirname(__file__);
+ root_dir = os.path.abspath(os.path.join(file_path, '..', '..'))
+ owners = collect_owners(root_dir)
+ all_owners = get_all_owners(owners)
+ print_missing_owners(owners, all_owners)
+ return 0
+
+def collect_owners(root_dir):
+ result = {}
+ for root, dirs, files in os.walk(root_dir):
+ if "OWNERS" in files:
+ owner_file_path = os.path.join(root, "OWNERS")
+ owner_set = extract_owners_from_file(owner_file_path)
+ if owner_set:
+ result[owner_file_path] = owner_set
+ return result
+
+def extract_owners_from_file(owner_file_path):
+ result = set()
+ regexp = re.compile('^per-file.*_messages[^=]*=\s*(.*)@([^#]*)')
+ with open(owner_file_path) as f:
+ for line in f:
+ match = regexp.match(line)
+ if match:
+ result.add(match.group(1).strip())
+ return result
+
+def get_all_owners(owner_dict):
+ result = set()
+ for key in owner_dict:
+ result = result.union(owner_dict[key])
+ return result
+
+def print_missing_owners(owner_dict, owner_set):
+ for key in owner_dict:
+ for owner in owner_set:
+ if not owner in owner_dict[key]:
+ print key + " is missing " + owner
+
+if '__main__' == __name__:
+ sys.exit(main())
diff --git a/chromium/tools/set_default_handler/DEPS b/chromium/tools/set_default_handler/DEPS
new file mode 100644
index 00000000000..12a2df7a3fd
--- /dev/null
+++ b/chromium/tools/set_default_handler/DEPS
@@ -0,0 +1,4 @@
+include_rules = [
+ "+ui",
+ "+win8/test",
+]
diff --git a/chromium/tools/set_default_handler/set_default_handler_main.cc b/chromium/tools/set_default_handler/set_default_handler_main.cc
new file mode 100644
index 00000000000..92edb6c1182
--- /dev/null
+++ b/chromium/tools/set_default_handler/set_default_handler_main.cc
@@ -0,0 +1,65 @@
+// Copyright (c) 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Makes a given program ("Google Chrome" by default) the default handler for
+// some URL protocol ("http" by default) on Windows 8. These defaults can be
+// overridden via the --program and --protocol command line switches.
+
+#include <windows.h>
+
+#include "base/at_exit.h"
+#include "base/command_line.h"
+#include "base/logging.h"
+#include "base/strings/string16.h"
+#include "base/strings/string_util.h"
+#include "ui/base/win/atl_module.h"
+#include "win8/test/open_with_dialog_controller.h"
+
+namespace {
+
+const char kSwitchProgram[] = "program";
+const char kSwitchProtocol[] = "protocol";
+const wchar_t kDefaultProgram[] = L"Google Chrome";
+const wchar_t kDefaultProtocol[] = L"http";
+
+} // namespace
+
+extern "C"
+int wmain(int argc, wchar_t* argv[]) {
+ // Initialize the commandline singleton from the environment.
+ base::CommandLine::Init(0, NULL);
+ // The exit manager is in charge of calling the dtors of singletons.
+ base::AtExitManager exit_manager;
+ logging::LoggingSettings settings;
+ settings.logging_dest = logging::LOG_TO_SYSTEM_DEBUG_LOG;
+ logging::InitLogging(settings);
+ logging::SetMinLogLevel(logging::LOG_VERBOSE);
+
+ ui::win::CreateATLModuleIfNeeded();
+
+ base::CommandLine* command_line = base::CommandLine::ForCurrentProcess();
+ base::string16 protocol(command_line->GetSwitchValueNative(kSwitchProtocol));
+ if (protocol.empty())
+ protocol = kDefaultProtocol;
+
+ base::string16 program(command_line->GetSwitchValueNative(kSwitchProgram));
+ if (program.empty())
+ program = kDefaultProgram;
+
+ std::vector<base::string16> choices;
+ HRESULT result = S_OK;
+ win8::OpenWithDialogController controller;
+ result = controller.RunSynchronously(NULL, protocol, program, &choices);
+
+ if (SUCCEEDED(result)) {
+ printf("success\n");
+ } else if (!choices.empty()) {
+ printf("failed to set program. possible choices: %ls\n",
+ base::JoinString(choices, L", ").c_str());
+ } else {
+ printf("failed with HRESULT: %0x08X\n", result);
+ }
+
+ return FAILED(result);
+}
diff --git a/chromium/tools/site_compare/command_line.py b/chromium/tools/site_compare/command_line.py
new file mode 100755
index 00000000000..de93d18ce0a
--- /dev/null
+++ b/chromium/tools/site_compare/command_line.py
@@ -0,0 +1,802 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Parse a command line, retrieving a command and its arguments.
+
+Supports the concept of command line commands, each with its own set
+of arguments. Supports dependent arguments and mutually exclusive arguments.
+Basically, a better optparse. I took heed of epg's WHINE() in gvn.cmdline
+and dumped optparse in favor of something better.
+"""
+
+import os.path
+import re
+import string
+import sys
+import textwrap
+import types
+
+
+def IsString(var):
+ """Little helper function to see if a variable is a string."""
+ return type(var) in types.StringTypes
+
+
+class ParseError(Exception):
+ """Encapsulates errors from parsing, string arg is description."""
+ pass
+
+
+class Command(object):
+ """Implements a single command."""
+
+ def __init__(self, names, helptext, validator=None, impl=None):
+ """Initializes Command from names and helptext, plus optional callables.
+
+ Args:
+ names: command name, or list of synonyms
+ helptext: brief string description of the command
+ validator: callable for custom argument validation
+ Should raise ParseError if it wants
+ impl: callable to be invoked when command is called
+ """
+ self.names = names
+ self.validator = validator
+ self.helptext = helptext
+ self.impl = impl
+ self.args = []
+ self.required_groups = []
+ self.arg_dict = {}
+ self.positional_args = []
+ self.cmdline = None
+
+ class Argument(object):
+ """Encapsulates an argument to a command."""
+ VALID_TYPES = ['string', 'readfile', 'int', 'flag', 'coords']
+ TYPES_WITH_VALUES = ['string', 'readfile', 'int', 'coords']
+
+ def __init__(self, names, helptext, type, metaname,
+ required, default, positional):
+ """Command-line argument to a command.
+
+ Args:
+ names: argument name, or list of synonyms
+ helptext: brief description of the argument
+ type: type of the argument. Valid values include:
+ string - a string
+ readfile - a file which must exist and be available
+ for reading
+ int - an integer
+ flag - an optional flag (bool)
+ coords - (x,y) where x and y are ints
+ metaname: Name to display for value in help, inferred if not
+ specified
+ required: True if argument must be specified
+ default: Default value if not specified
+ positional: Argument specified by location, not name
+
+ Raises:
+ ValueError: the argument name is invalid for some reason
+ """
+ if type not in Command.Argument.VALID_TYPES:
+ raise ValueError("Invalid type: %r" % type)
+
+ if required and default is not None:
+ raise ValueError("required and default are mutually exclusive")
+
+ if required and type == 'flag':
+ raise ValueError("A required flag? Give me a break.")
+
+ if metaname and type not in Command.Argument.TYPES_WITH_VALUES:
+ raise ValueError("Type %r can't have a metaname" % type)
+
+ # If no metaname is provided, infer it: use the alphabetical characters
+ # of the last provided name
+ if not metaname and type in Command.Argument.TYPES_WITH_VALUES:
+ metaname = (
+ names[-1].lstrip(string.punctuation + string.whitespace).upper())
+
+ self.names = names
+ self.helptext = helptext
+ self.type = type
+ self.required = required
+ self.default = default
+ self.positional = positional
+ self.metaname = metaname
+
+ self.mutex = [] # arguments that are mutually exclusive with
+ # this one
+ self.depends = [] # arguments that must be present for this
+ # one to be valid
+ self.present = False # has this argument been specified?
+
+ def AddDependency(self, arg):
+ """Makes this argument dependent on another argument.
+
+ Args:
+ arg: name of the argument this one depends on
+ """
+ if arg not in self.depends:
+ self.depends.append(arg)
+
+ def AddMutualExclusion(self, arg):
+ """Makes this argument invalid if another is specified.
+
+ Args:
+ arg: name of the mutually exclusive argument.
+ """
+ if arg not in self.mutex:
+ self.mutex.append(arg)
+
+ def GetUsageString(self):
+ """Returns a brief string describing the argument's usage."""
+ if not self.positional:
+ string = self.names[0]
+ if self.type in Command.Argument.TYPES_WITH_VALUES:
+ string += "="+self.metaname
+ else:
+ string = self.metaname
+
+ if not self.required:
+ string = "["+string+"]"
+
+ return string
+
+ def GetNames(self):
+ """Returns a string containing a list of the arg's names."""
+ if self.positional:
+ return self.metaname
+ else:
+ return ", ".join(self.names)
+
+ def GetHelpString(self, width=80, indent=5, names_width=20, gutter=2):
+ """Returns a help string including help for all the arguments."""
+ names = [" "*indent + line +" "*(names_width-len(line)) for line in
+ textwrap.wrap(self.GetNames(), names_width)]
+
+ helpstring = textwrap.wrap(self.helptext, width-indent-names_width-gutter)
+
+ if len(names) < len(helpstring):
+ names += [" "*(indent+names_width)]*(len(helpstring)-len(names))
+
+ if len(helpstring) < len(names):
+ helpstring += [""]*(len(names)-len(helpstring))
+
+ return "\n".join([name_line + " "*gutter + help_line for
+ name_line, help_line in zip(names, helpstring)])
+
+ def __repr__(self):
+ if self.present:
+ string = '= %r' % self.value
+ else:
+ string = "(absent)"
+
+ return "Argument %s '%s'%s" % (self.type, self.names[0], string)
+
+ # end of nested class Argument
+
+ def AddArgument(self, names, helptext, type="string", metaname=None,
+ required=False, default=None, positional=False):
+ """Command-line argument to a command.
+
+ Args:
+ names: argument name, or list of synonyms
+ helptext: brief description of the argument
+ type: type of the argument
+ metaname: Name to display for value in help, inferred if not
+ required: True if argument must be specified
+ default: Default value if not specified
+ positional: Argument specified by location, not name
+
+ Raises:
+ ValueError: the argument already exists or is invalid
+
+ Returns:
+ The newly-created argument
+ """
+ if IsString(names): names = [names]
+
+ names = [name.lower() for name in names]
+
+ for name in names:
+ if name in self.arg_dict:
+ raise ValueError("%s is already an argument"%name)
+
+ if (positional and required and
+ [arg for arg in self.args if arg.positional] and
+ not [arg for arg in self.args if arg.positional][-1].required):
+ raise ValueError(
+ "A required positional argument may not follow an optional one.")
+
+ arg = Command.Argument(names, helptext, type, metaname,
+ required, default, positional)
+
+ self.args.append(arg)
+
+ for name in names:
+ self.arg_dict[name] = arg
+
+ return arg
+
+ def GetArgument(self, name):
+ """Return an argument from a name."""
+ return self.arg_dict[name.lower()]
+
+ def AddMutualExclusion(self, args):
+ """Specifies that a list of arguments are mutually exclusive."""
+ if len(args) < 2:
+ raise ValueError("At least two arguments must be specified.")
+
+ args = [arg.lower() for arg in args]
+
+ for index in xrange(len(args)-1):
+ for index2 in xrange(index+1, len(args)):
+ self.arg_dict[args[index]].AddMutualExclusion(self.arg_dict[args[index2]])
+
+ def AddDependency(self, dependent, depends_on):
+ """Specifies that one argument may only be present if another is.
+
+ Args:
+ dependent: the name of the dependent argument
+ depends_on: the name of the argument on which it depends
+ """
+ self.arg_dict[dependent.lower()].AddDependency(
+ self.arg_dict[depends_on.lower()])
+
+ def AddMutualDependency(self, args):
+ """Specifies that a list of arguments are all mutually dependent."""
+ if len(args) < 2:
+ raise ValueError("At least two arguments must be specified.")
+
+ args = [arg.lower() for arg in args]
+
+ for (arg1, arg2) in [(arg1, arg2) for arg1 in args for arg2 in args]:
+ if arg1 == arg2: continue
+ self.arg_dict[arg1].AddDependency(self.arg_dict[arg2])
+
+ def AddRequiredGroup(self, args):
+ """Specifies that at least one of the named arguments must be present."""
+ if len(args) < 2:
+ raise ValueError("At least two arguments must be in a required group.")
+
+ args = [self.arg_dict[arg.lower()] for arg in args]
+
+ self.required_groups.append(args)
+
+ def ParseArguments(self):
+ """Given a command line, parse and validate the arguments."""
+
+ # reset all the arguments before we parse
+ for arg in self.args:
+ arg.present = False
+ arg.value = None
+
+ self.parse_errors = []
+
+ # look for arguments remaining on the command line
+ while len(self.cmdline.rargs):
+ try:
+ self.ParseNextArgument()
+ except ParseError, e:
+ self.parse_errors.append(e.args[0])
+
+ # after all the arguments are parsed, check for problems
+ for arg in self.args:
+ if not arg.present and arg.required:
+ self.parse_errors.append("'%s': required parameter was missing"
+ % arg.names[0])
+
+ if not arg.present and arg.default:
+ arg.present = True
+ arg.value = arg.default
+
+ if arg.present:
+ for mutex in arg.mutex:
+ if mutex.present:
+ self.parse_errors.append(
+ "'%s', '%s': arguments are mutually exclusive" %
+ (arg.argstr, mutex.argstr))
+
+ for depend in arg.depends:
+ if not depend.present:
+ self.parse_errors.append("'%s': '%s' must be specified as well" %
+ (arg.argstr, depend.names[0]))
+
+ # check for required groups
+ for group in self.required_groups:
+ if not [arg for arg in group if arg.present]:
+ self.parse_errors.append("%s: at least one must be present" %
+ (", ".join(["'%s'" % arg.names[-1] for arg in group])))
+
+ # if we have any validators, invoke them
+ if not self.parse_errors and self.validator:
+ try:
+ self.validator(self)
+ except ParseError, e:
+ self.parse_errors.append(e.args[0])
+
+ # Helper methods so you can treat the command like a dict
+ def __getitem__(self, key):
+ arg = self.arg_dict[key.lower()]
+
+ if arg.type == 'flag':
+ return arg.present
+ else:
+ return arg.value
+
+ def __iter__(self):
+ return [arg for arg in self.args if arg.present].__iter__()
+
+ def ArgumentPresent(self, key):
+ """Tests if an argument exists and has been specified."""
+ return key.lower() in self.arg_dict and self.arg_dict[key.lower()].present
+
+ def __contains__(self, key):
+ return self.ArgumentPresent(key)
+
+ def ParseNextArgument(self):
+ """Find the next argument in the command line and parse it."""
+ arg = None
+ value = None
+ argstr = self.cmdline.rargs.pop(0)
+
+ # First check: is this a literal argument?
+ if argstr.lower() in self.arg_dict:
+ arg = self.arg_dict[argstr.lower()]
+ if arg.type in Command.Argument.TYPES_WITH_VALUES:
+ if len(self.cmdline.rargs):
+ value = self.cmdline.rargs.pop(0)
+
+ # Second check: is this of the form "arg=val" or "arg:val"?
+ if arg is None:
+ delimiter_pos = -1
+
+ for delimiter in [':', '=']:
+ pos = argstr.find(delimiter)
+ if pos >= 0:
+ if delimiter_pos < 0 or pos < delimiter_pos:
+ delimiter_pos = pos
+
+ if delimiter_pos >= 0:
+ testarg = argstr[:delimiter_pos]
+ testval = argstr[delimiter_pos+1:]
+
+ if testarg.lower() in self.arg_dict:
+ arg = self.arg_dict[testarg.lower()]
+ argstr = testarg
+ value = testval
+
+ # Third check: does this begin an argument?
+ if arg is None:
+ for key in self.arg_dict.iterkeys():
+ if (len(key) < len(argstr) and
+ self.arg_dict[key].type in Command.Argument.TYPES_WITH_VALUES and
+ argstr[:len(key)].lower() == key):
+ value = argstr[len(key):]
+ argstr = argstr[:len(key)]
+ arg = self.arg_dict[argstr]
+
+ # Fourth check: do we have any positional arguments available?
+ if arg is None:
+ for positional_arg in [
+ testarg for testarg in self.args if testarg.positional]:
+ if not positional_arg.present:
+ arg = positional_arg
+ value = argstr
+ argstr = positional_arg.names[0]
+ break
+
+ # Push the retrieved argument/value onto the largs stack
+ if argstr: self.cmdline.largs.append(argstr)
+ if value: self.cmdline.largs.append(value)
+
+ # If we've made it this far and haven't found an arg, give up
+ if arg is None:
+ raise ParseError("Unknown argument: '%s'" % argstr)
+
+ # Convert the value, if necessary
+ if arg.type in Command.Argument.TYPES_WITH_VALUES and value is None:
+ raise ParseError("Argument '%s' requires a value" % argstr)
+
+ if value is not None:
+ value = self.StringToValue(value, arg.type, argstr)
+
+ arg.argstr = argstr
+ arg.value = value
+ arg.present = True
+
+ # end method ParseNextArgument
+
+ def StringToValue(self, value, type, argstr):
+ """Convert a string from the command line to a value type."""
+ try:
+ if type == 'string':
+ pass # leave it be
+
+ elif type == 'int':
+ try:
+ value = int(value)
+ except ValueError:
+ raise ParseError
+
+ elif type == 'readfile':
+ if not os.path.isfile(value):
+ raise ParseError("'%s': '%s' does not exist" % (argstr, value))
+
+ elif type == 'coords':
+ try:
+ value = [int(val) for val in
+ re.match("\(\s*(\d+)\s*\,\s*(\d+)\s*\)\s*\Z", value).
+ groups()]
+ except AttributeError:
+ raise ParseError
+
+ else:
+ raise ValueError("Unknown type: '%s'" % type)
+
+ except ParseError, e:
+ # The bare exception is raised in the generic case; more specific errors
+ # will arrive with arguments and should just be reraised
+ if not e.args:
+ e = ParseError("'%s': unable to convert '%s' to type '%s'" %
+ (argstr, value, type))
+ raise e
+
+ return value
+
+ def SortArgs(self):
+ """Returns a method that can be passed to sort() to sort arguments."""
+
+ def ArgSorter(arg1, arg2):
+ """Helper for sorting arguments in the usage string.
+
+ Positional arguments come first, then required arguments,
+ then optional arguments. Pylint demands this trivial function
+ have both Args: and Returns: sections, sigh.
+
+ Args:
+ arg1: the first argument to compare
+ arg2: the second argument to compare
+
+ Returns:
+ -1 if arg1 should be sorted first, +1 if it should be sorted second,
+ and 0 if arg1 and arg2 have the same sort level.
+ """
+ return ((arg2.positional-arg1.positional)*2 +
+ (arg2.required-arg1.required))
+ return ArgSorter
+
+ def GetUsageString(self, width=80, name=None):
+ """Gets a string describing how the command is used."""
+ if name is None: name = self.names[0]
+
+ initial_indent = "Usage: %s %s " % (self.cmdline.prog, name)
+ subsequent_indent = " " * len(initial_indent)
+
+ sorted_args = self.args[:]
+ sorted_args.sort(self.SortArgs())
+
+ return textwrap.fill(
+ " ".join([arg.GetUsageString() for arg in sorted_args]), width,
+ initial_indent=initial_indent,
+ subsequent_indent=subsequent_indent)
+
+ def GetHelpString(self, width=80):
+ """Returns a list of help strings for all this command's arguments."""
+ sorted_args = self.args[:]
+ sorted_args.sort(self.SortArgs())
+
+ return "\n".join([arg.GetHelpString(width) for arg in sorted_args])
+
+ # end class Command
+
+
+class CommandLine(object):
+ """Parse a command line, extracting a command and its arguments."""
+
+ def __init__(self):
+ self.commands = []
+ self.cmd_dict = {}
+
+ # Add the help command to the parser
+ help_cmd = self.AddCommand(["help", "--help", "-?", "-h"],
+ "Displays help text for a command",
+ ValidateHelpCommand,
+ DoHelpCommand)
+
+ help_cmd.AddArgument(
+ "command", "Command to retrieve help for", positional=True)
+ help_cmd.AddArgument(
+ "--width", "Width of the output", type='int', default=80)
+
+ self.Exit = sys.exit # override this if you don't want the script to halt
+ # on error or on display of help
+
+ self.out = sys.stdout # override these if you want to redirect
+ self.err = sys.stderr # output or error messages
+
+ def AddCommand(self, names, helptext, validator=None, impl=None):
+ """Add a new command to the parser.
+
+ Args:
+ names: command name, or list of synonyms
+ helptext: brief string description of the command
+ validator: method to validate a command's arguments
+ impl: callable to be invoked when command is called
+
+ Raises:
+ ValueError: raised if command already added
+
+ Returns:
+ The new command
+ """
+ if IsString(names): names = [names]
+
+ for name in names:
+ if name in self.cmd_dict:
+ raise ValueError("%s is already a command"%name)
+
+ cmd = Command(names, helptext, validator, impl)
+ cmd.cmdline = self
+
+ self.commands.append(cmd)
+ for name in names:
+ self.cmd_dict[name.lower()] = cmd
+
+ return cmd
+
+ def GetUsageString(self):
+ """Returns simple usage instructions."""
+ return "Type '%s help' for usage." % self.prog
+
+ def ParseCommandLine(self, argv=None, prog=None, execute=True):
+ """Does the work of parsing a command line.
+
+ Args:
+ argv: list of arguments, defaults to sys.args[1:]
+ prog: name of the command, defaults to the base name of the script
+ execute: if false, just parse, don't invoke the 'impl' member
+
+ Returns:
+ The command that was executed
+ """
+ if argv is None: argv = sys.argv[1:]
+ if prog is None: prog = os.path.basename(sys.argv[0]).split('.')[0]
+
+ # Store off our parameters, we may need them someday
+ self.argv = argv
+ self.prog = prog
+
+ # We shouldn't be invoked without arguments, that's just lame
+ if not len(argv):
+ self.out.writelines(self.GetUsageString())
+ self.Exit()
+ return None # in case the client overrides Exit
+
+ # Is it a valid command?
+ self.command_string = argv[0].lower()
+ if not self.command_string in self.cmd_dict:
+ self.err.write("Unknown command: '%s'\n\n" % self.command_string)
+ self.out.write(self.GetUsageString())
+ self.Exit()
+ return None # in case the client overrides Exit
+
+ self.command = self.cmd_dict[self.command_string]
+
+ # "rargs" = remaining (unparsed) arguments
+ # "largs" = already parsed, "left" of the read head
+ self.rargs = argv[1:]
+ self.largs = []
+
+ # let the command object do the parsing
+ self.command.ParseArguments()
+
+ if self.command.parse_errors:
+ # there were errors, output the usage string and exit
+ self.err.write(self.command.GetUsageString()+"\n\n")
+ self.err.write("\n".join(self.command.parse_errors))
+ self.err.write("\n\n")
+
+ self.Exit()
+
+ elif execute and self.command.impl:
+ self.command.impl(self.command)
+
+ return self.command
+
+ def __getitem__(self, key):
+ return self.cmd_dict[key]
+
+ def __iter__(self):
+ return self.cmd_dict.__iter__()
+
+
+def ValidateHelpCommand(command):
+ """Checks to make sure an argument to 'help' is a valid command."""
+ if 'command' in command and command['command'] not in command.cmdline:
+ raise ParseError("'%s': unknown command" % command['command'])
+
+
+def DoHelpCommand(command):
+ """Executed when the command is 'help'."""
+ out = command.cmdline.out
+ width = command['--width']
+
+ if 'command' not in command:
+ out.write(command.GetUsageString())
+ out.write("\n\n")
+
+ indent = 5
+ gutter = 2
+
+ command_width = (
+ max([len(cmd.names[0]) for cmd in command.cmdline.commands]) + gutter)
+
+ for cmd in command.cmdline.commands:
+ cmd_name = cmd.names[0]
+
+ initial_indent = (" "*indent + cmd_name + " "*
+ (command_width+gutter-len(cmd_name)))
+ subsequent_indent = " "*(indent+command_width+gutter)
+
+ out.write(textwrap.fill(cmd.helptext, width,
+ initial_indent=initial_indent,
+ subsequent_indent=subsequent_indent))
+ out.write("\n")
+
+ out.write("\n")
+
+ else:
+ help_cmd = command.cmdline[command['command']]
+
+ out.write(textwrap.fill(help_cmd.helptext, width))
+ out.write("\n\n")
+ out.write(help_cmd.GetUsageString(width=width))
+ out.write("\n\n")
+ out.write(help_cmd.GetHelpString(width=width))
+ out.write("\n")
+
+ command.cmdline.Exit()
+
+
+def main():
+ # If we're invoked rather than imported, run some tests
+ cmdline = CommandLine()
+
+ # Since we're testing, override Exit()
+ def TestExit():
+ pass
+ cmdline.Exit = TestExit
+
+ # Actually, while we're at it, let's override error output too
+ cmdline.err = open(os.path.devnull, "w")
+
+ test = cmdline.AddCommand(["test", "testa", "testb"], "test command")
+ test.AddArgument(["-i", "--int", "--integer", "--optint", "--optionalint"],
+ "optional integer parameter", type='int')
+ test.AddArgument("--reqint", "required integer parameter", type='int',
+ required=True)
+ test.AddArgument("pos1", "required positional argument", positional=True,
+ required=True)
+ test.AddArgument("pos2", "optional positional argument", positional=True)
+ test.AddArgument("pos3", "another optional positional arg",
+ positional=True)
+
+ # mutually dependent arguments
+ test.AddArgument("--mutdep1", "mutually dependent parameter 1")
+ test.AddArgument("--mutdep2", "mutually dependent parameter 2")
+ test.AddArgument("--mutdep3", "mutually dependent parameter 3")
+ test.AddMutualDependency(["--mutdep1", "--mutdep2", "--mutdep3"])
+
+ # mutually exclusive arguments
+ test.AddArgument("--mutex1", "mutually exclusive parameter 1")
+ test.AddArgument("--mutex2", "mutually exclusive parameter 2")
+ test.AddArgument("--mutex3", "mutually exclusive parameter 3")
+ test.AddMutualExclusion(["--mutex1", "--mutex2", "--mutex3"])
+
+ # dependent argument
+ test.AddArgument("--dependent", "dependent argument")
+ test.AddDependency("--dependent", "--int")
+
+ # other argument types
+ test.AddArgument("--file", "filename argument", type='readfile')
+ test.AddArgument("--coords", "coordinate argument", type='coords')
+ test.AddArgument("--flag", "flag argument", type='flag')
+
+ test.AddArgument("--req1", "part of a required group", type='flag')
+ test.AddArgument("--req2", "part 2 of a required group", type='flag')
+
+ test.AddRequiredGroup(["--req1", "--req2"])
+
+ # a few failure cases
+ exception_cases = """
+ test.AddArgument("failpos", "can't have req'd pos arg after opt",
+ positional=True, required=True)
++++
+ test.AddArgument("--int", "this argument already exists")
++++
+ test.AddDependency("--int", "--doesntexist")
++++
+ test.AddMutualDependency(["--doesntexist", "--mutdep2"])
++++
+ test.AddMutualExclusion(["--doesntexist", "--mutex2"])
++++
+ test.AddArgument("--reqflag", "required flag", required=True, type='flag')
++++
+ test.AddRequiredGroup(["--req1", "--doesntexist"])
+"""
+ for exception_case in exception_cases.split("+++"):
+ try:
+ exception_case = exception_case.strip()
+ exec exception_case # yes, I'm using exec, it's just for a test.
+ except ValueError:
+ # this is expected
+ pass
+ except KeyError:
+ # ...and so is this
+ pass
+ else:
+ print ("FAILURE: expected an exception for '%s'"
+ " and didn't get it" % exception_case)
+
+ # Let's do some parsing! first, the minimal success line:
+ MIN = "test --reqint 123 param1 --req1 "
+
+ # tuples of (command line, expected error count)
+ test_lines = [
+ ("test --int 3 foo --req1", 1), # missing required named parameter
+ ("test --reqint 3 --req1", 1), # missing required positional parameter
+ (MIN, 0), # success!
+ ("test param1 --reqint 123 --req1", 0), # success, order shouldn't matter
+ ("test param1 --reqint 123 --req2", 0), # success, any of required group ok
+ (MIN+"param2", 0), # another positional parameter is okay
+ (MIN+"param2 param3", 0), # and so are three
+ (MIN+"param2 param3 param4", 1), # but four are just too many
+ (MIN+"--int", 1), # where's the value?
+ (MIN+"--int 456", 0), # this is fine
+ (MIN+"--int456", 0), # as is this
+ (MIN+"--int:456", 0), # and this
+ (MIN+"--int=456", 0), # and this
+ (MIN+"--file c:\\windows\\system32\\kernel32.dll", 0), # yup
+ (MIN+"--file c:\\thisdoesntexist", 1), # nope
+ (MIN+"--mutdep1 a", 2), # no!
+ (MIN+"--mutdep2 b", 2), # also no!
+ (MIN+"--mutdep3 c", 2), # dream on!
+ (MIN+"--mutdep1 a --mutdep2 b", 2), # almost!
+ (MIN+"--mutdep1 a --mutdep2 b --mutdep3 c", 0), # yes
+ (MIN+"--mutex1 a", 0), # yes
+ (MIN+"--mutex2 b", 0), # yes
+ (MIN+"--mutex3 c", 0), # fine
+ (MIN+"--mutex1 a --mutex2 b", 1), # not fine
+ (MIN+"--mutex1 a --mutex2 b --mutex3 c", 3), # even worse
+ (MIN+"--dependent 1", 1), # no
+ (MIN+"--dependent 1 --int 2", 0), # ok
+ (MIN+"--int abc", 1), # bad type
+ (MIN+"--coords abc", 1), # also bad
+ (MIN+"--coords (abc)", 1), # getting warmer
+ (MIN+"--coords (abc,def)", 1), # missing something
+ (MIN+"--coords (123)", 1), # ooh, so close
+ (MIN+"--coords (123,def)", 1), # just a little farther
+ (MIN+"--coords (123,456)", 0), # finally!
+ ("test --int 123 --reqint=456 foo bar --coords(42,88) baz --req1", 0)
+ ]
+
+ badtests = 0
+
+ for (test, expected_failures) in test_lines:
+ cmdline.ParseCommandLine([x.strip() for x in test.strip().split(" ")])
+
+ if not len(cmdline.command.parse_errors) == expected_failures:
+ print "FAILED:\n issued: '%s'\n expected: %d\n received: %d\n\n" % (
+ test, expected_failures, len(cmdline.command.parse_errors))
+ badtests += 1
+
+ print "%d failed out of %d tests" % (badtests, len(test_lines))
+
+ cmdline.ParseCommandLine(["help", "test"])
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/site_compare/commands/__init__.py b/chromium/tools/site_compare/commands/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/site_compare/commands/__init__.py
diff --git a/chromium/tools/site_compare/commands/compare2.py b/chromium/tools/site_compare/commands/compare2.py
new file mode 100644
index 00000000000..7e15559badb
--- /dev/null
+++ b/chromium/tools/site_compare/commands/compare2.py
@@ -0,0 +1,170 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""SiteCompare command to invoke the same page in two versions of a browser.
+
+Does the easiest compatibility test: equality comparison between two different
+versions of the same browser. Invoked with a series of command line options
+that specify which URLs to check, which browser to use, where to store results,
+etc.
+"""
+
+import os # Functions for walking the directory tree
+import tempfile # Get a temporary directory to hold intermediates
+
+import command_line
+import drivers # Functions for driving keyboard/mouse/windows, OS-specific
+import operators # Functions that, given two bitmaps as input, produce
+ # output depending on the performance of an operation
+import scrapers # Functions that know how to capture a render from
+ # particular browsers
+
+
+def CreateCommand(cmdline):
+ """Inserts the command and arguments into a command line for parsing."""
+ cmd = cmdline.AddCommand(
+ ["compare2"],
+ "Compares the output of two browsers on the same URL or list of URLs",
+ ValidateCompare2,
+ ExecuteCompare2)
+
+ cmd.AddArgument(
+ ["-b1", "--browser1"], "Full path to first browser's executable",
+ type="readfile", metaname="PATH", required=True)
+ cmd.AddArgument(
+ ["-b2", "--browser2"], "Full path to second browser's executable",
+ type="readfile", metaname="PATH", required=True)
+ cmd.AddArgument(
+ ["-b", "--browser"], "Which browser to use", type="string",
+ default="chrome")
+ cmd.AddArgument(
+ ["-b1v", "--browser1ver"], "Version of first browser", metaname="VERSION")
+ cmd.AddArgument(
+ ["-b2v", "--browser2ver"], "Version of second browser", metaname="VERSION")
+ cmd.AddArgument(
+ ["-b1n", "--browser1name"], "Optional name for first browser (used in "
+ "directory to hold intermediate files)", metaname="NAME")
+ cmd.AddArgument(
+ ["-b2n", "--browser2name"], "Optional name for second browser (used in "
+ "directory to hold intermediate files)", metaname="NAME")
+ cmd.AddArgument(
+ ["-o", "--outdir"], "Directory to store scrape files", metaname="DIR")
+ cmd.AddArgument(
+ ["-u", "--url"], "URL to compare")
+ cmd.AddArgument(
+ ["-l", "--list"], "List of URLs to compare", type="readfile")
+ cmd.AddMutualExclusion(["--url", "--list"])
+ cmd.AddArgument(
+ ["-s", "--startline"], "First line of URL list", type="int")
+ cmd.AddArgument(
+ ["-e", "--endline"], "Last line of URL list (exclusive)", type="int")
+ cmd.AddArgument(
+ ["-c", "--count"], "Number of lines of URL file to use", type="int")
+ cmd.AddDependency("--startline", "--list")
+ cmd.AddRequiredGroup(["--url", "--list"])
+ cmd.AddDependency("--endline", "--list")
+ cmd.AddDependency("--count", "--list")
+ cmd.AddMutualExclusion(["--count", "--endline"])
+ cmd.AddDependency("--count", "--startline")
+ cmd.AddArgument(
+ ["-t", "--timeout"], "Amount of time (seconds) to wait for browser to "
+ "finish loading",
+ type="int", default=60)
+ cmd.AddArgument(
+ ["-log", "--logfile"], "File to write output", type="string", required=True)
+ cmd.AddArgument(
+ ["-sz", "--size"], "Browser window size", default=(800, 600), type="coords")
+ cmd.AddArgument(
+ ["-m", "--maskdir"], "Path that holds masks to use for comparison")
+ cmd.AddArgument(
+ ["-d", "--diffdir"], "Path to hold the difference of comparisons that fail")
+
+
+def ValidateCompare2(command):
+ """Validate the arguments to compare2. Raises ParseError if failed."""
+ executables = [".exe", ".com", ".bat"]
+ if (os.path.splitext(command["--browser1"])[1].lower() not in executables or
+ os.path.splitext(command["--browser2"])[1].lower() not in executables):
+ raise command_line.ParseError("Browser filename must be an executable")
+
+
+def ExecuteCompare2(command):
+ """Executes the Compare2 command."""
+ if command["--url"]:
+ url_list = [command["--url"]]
+ else:
+ startline = command["--startline"]
+ if command["--count"]:
+ endline = startline+command["--count"]
+ else:
+ endline = command["--endline"]
+ url_list = [url.strip() for url in
+ open(command["--list"], "r").readlines()[startline:endline]]
+
+ log_file = open(command["--logfile"], "w")
+
+ outdir = command["--outdir"]
+ if not outdir: outdir = tempfile.gettempdir()
+
+ scrape_info_list = []
+
+ class ScrapeInfo(object):
+ """Helper class to hold information about a scrape."""
+ __slots__ = ["browser_path", "scraper", "outdir", "result"]
+
+ for index in xrange(1, 3):
+ scrape_info = ScrapeInfo()
+ scrape_info.browser_path = command["--browser%d" % index]
+ scrape_info.scraper = scrapers.GetScraper(
+ (command["--browser"], command["--browser%dver" % index]))
+
+ if command["--browser%dname" % index]:
+ scrape_info.outdir = os.path.join(outdir,
+ command["--browser%dname" % index])
+ else:
+ scrape_info.outdir = os.path.join(outdir, str(index))
+
+ drivers.windowing.PreparePath(scrape_info.outdir)
+ scrape_info_list.append(scrape_info)
+
+ compare = operators.GetOperator("equals_with_mask")
+
+ for url in url_list:
+ success = True
+
+ for scrape_info in scrape_info_list:
+ scrape_info.result = scrape_info.scraper.Scrape(
+ [url], scrape_info.outdir, command["--size"], (0, 0),
+ command["--timeout"], path=scrape_info.browser_path)
+
+ if not scrape_info.result:
+ scrape_info.result = "success"
+ else:
+ success = False
+
+ result = "unknown"
+
+ if success:
+ result = "equal"
+
+ file1 = drivers.windowing.URLtoFilename(
+ url, scrape_info_list[0].outdir, ".bmp")
+ file2 = drivers.windowing.URLtoFilename(
+ url, scrape_info_list[1].outdir, ".bmp")
+
+ comparison_result = compare.Compare(file1, file2,
+ maskdir=command["--maskdir"])
+
+ if comparison_result is not None:
+ result = "not-equal"
+
+ if command["--diffdir"]:
+ comparison_result[1].save(
+ drivers.windowing.URLtoFilename(url, command["--diffdir"], ".bmp"))
+
+ # TODO(jhaas): maybe use the logging module rather than raw file writes
+ log_file.write("%s %s %s %s\n" % (url,
+ scrape_info_list[0].result,
+ scrape_info_list[1].result,
+ result))
diff --git a/chromium/tools/site_compare/commands/maskmaker.py b/chromium/tools/site_compare/commands/maskmaker.py
new file mode 100644
index 00000000000..8aeefcbf005
--- /dev/null
+++ b/chromium/tools/site_compare/commands/maskmaker.py
@@ -0,0 +1,272 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Component for automatically creating masks of changing areas of a website.
+
+Works by repeated invokation of a browser and scraping of the resulting page.
+Areas that differ will be added to the auto-generated mask. The mask generator
+considers the mask complete when further scrapes fail to produce any differences
+in the mask.
+"""
+
+import os # Functions for walking the directory tree
+import tempfile # Get a temporary directory to hold intermediates
+import time # Used for sleep() and naming masks by time
+
+import command_line
+import drivers
+from PIL import Image
+from PIL import ImageChops
+import scrapers
+
+
+def CreateCommand(cmdline):
+ """Inserts the command and arguments into a command line for parsing."""
+ cmd = cmdline.AddCommand(
+ ["maskmaker"],
+ "Automatically generates a mask from a list of URLs",
+ ValidateMaskmaker,
+ ExecuteMaskmaker)
+
+ cmd.AddArgument(
+ ["-bp", "--browserpath"], "Full path to browser's executable",
+ type="readfile", metaname="PATH")
+ cmd.AddArgument(
+ ["-b", "--browser"], "Which browser to use", type="string",
+ default="chrome")
+ cmd.AddArgument(
+ ["-bv", "--browserver"], "Version of the browser", metaname="VERSION")
+ cmd.AddArgument(
+ ["-o", "--outdir"], "Directory to store generated masks", metaname="DIR",
+ required=True)
+ cmd.AddArgument(
+ ["-u", "--url"], "URL to compare")
+ cmd.AddArgument(
+ ["-l", "--list"], "List of URLs to compare", type="readfile")
+ cmd.AddMutualExclusion(["--url", "--list"])
+ cmd.AddArgument(
+ ["-s", "--startline"], "First line of URL list", type="int")
+ cmd.AddArgument(
+ ["-e", "--endline"], "Last line of URL list (exclusive)", type="int")
+ cmd.AddArgument(
+ ["-c", "--count"], "Number of lines of URL file to use", type="int")
+ cmd.AddDependency("--startline", "--list")
+ cmd.AddRequiredGroup(["--url", "--list"])
+ cmd.AddDependency("--endline", "--list")
+ cmd.AddDependency("--count", "--list")
+ cmd.AddMutualExclusion(["--count", "--endline"])
+ cmd.AddDependency("--count", "--startline")
+ cmd.AddArgument(
+ ["-t", "--timeout"], "Amount of time (seconds) to wait for browser to "
+ "finish loading",
+ type="int", default=60)
+ cmd.AddArgument(
+ ["-w", "--wait"],
+ "Amount of time (in seconds) to wait between successive scrapes",
+ type="int", default=60)
+ cmd.AddArgument(
+ ["-sc", "--scrapes"],
+ "Number of successive scrapes which must result in no change to a mask "
+ "before mask creation is considered complete", type="int", default=10)
+ cmd.AddArgument(
+ ["-sz", "--size"], "Browser window size", default=(800, 600), type="coords")
+ cmd.AddArgument(["-sd", "--scrapedir"], "Directory to store scrapes")
+ cmd.AddArgument(
+ ["-gu", "--giveup"],
+ "Number of times to scrape before giving up", type="int", default=50)
+ cmd.AddArgument(
+ ["-th", "--threshhold"],
+ "Percentage of different pixels (0-100) above which the scrape will be"
+ "discarded and the mask not updated.", type="int", default=100)
+ cmd.AddArgument(
+ ["--er", "--errors"],
+ "Number of times a scrape can fail before giving up on the URL.",
+ type="int", default=1)
+
+
+def ValidateMaskmaker(command):
+ """Validate the arguments to maskmaker. Raises ParseError if failed."""
+ executables = [".exe", ".com", ".bat"]
+ if command["--browserpath"]:
+ if os.path.splitext(command["--browserpath"])[1].lower() not in executables:
+ raise command_line.ParseError("Browser filename must be an executable")
+
+
+def ExecuteMaskmaker(command):
+ """Performs automatic mask generation."""
+
+ # Get the list of URLs to generate masks for
+ class MaskmakerURL(object):
+ """Helper class for holding information about a URL passed to maskmaker."""
+ __slots__ = ['url', 'consecutive_successes', 'errors']
+ def __init__(self, url):
+ self.url = url
+ self.consecutive_successes = 0
+ self.errors = 0
+
+ if command["--url"]:
+ url_list = [MaskmakerURL(command["--url"])]
+ else:
+ startline = command["--startline"]
+ if command["--count"]:
+ endline = startline+command["--count"]
+ else:
+ endline = command["--endline"]
+ url_list = [MaskmakerURL(url.strip()) for url in
+ open(command["--list"], "r").readlines()[startline:endline]]
+
+ complete_list = []
+ error_list = []
+
+ outdir = command["--outdir"]
+ scrapes = command["--scrapes"]
+ errors = command["--errors"]
+ size = command["--size"]
+ scrape_pass = 0
+
+ scrapedir = command["--scrapedir"]
+ if not scrapedir: scrapedir = tempfile.gettempdir()
+
+ # Get the scraper
+ scraper = scrapers.GetScraper((command["--browser"], command["--browserver"]))
+
+ # Repeatedly iterate through the list of URLs until either every URL has
+ # a successful mask or too many errors, or we've exceeded the giveup limit
+ while url_list and scrape_pass < command["--giveup"]:
+ # Scrape each URL
+ for url in url_list:
+ print "Processing %r..." % url.url
+ mask_filename = drivers.windowing.URLtoFilename(url.url, outdir, ".bmp")
+
+ # Load the existing mask. This is in a loop so we can try to recover
+ # from error conditions
+ while True:
+ try:
+ mask = Image.open(mask_filename)
+ if mask.size != size:
+ print " %r already exists and is the wrong size! (%r vs %r)" % (
+ mask_filename, mask.size, size)
+ mask_filename = "%s_%r%s" % (
+ mask_filename[:-4], size, mask_filename[-4:])
+ print " Trying again as %r..." % mask_filename
+ continue
+ break
+ except IOError:
+ print " %r does not exist, creating" % mask_filename
+ mask = Image.new("1", size, 1)
+ mask.save(mask_filename)
+
+ # Find the stored scrape path
+ mask_scrape_dir = os.path.join(
+ scrapedir, os.path.splitext(os.path.basename(mask_filename))[0])
+ drivers.windowing.PreparePath(mask_scrape_dir)
+
+ # Find the baseline image
+ mask_scrapes = os.listdir(mask_scrape_dir)
+ mask_scrapes.sort()
+
+ if not mask_scrapes:
+ print " No baseline image found, mask will not be updated"
+ baseline = None
+ else:
+ baseline = Image.open(os.path.join(mask_scrape_dir, mask_scrapes[0]))
+
+ mask_scrape_filename = os.path.join(mask_scrape_dir,
+ time.strftime("%y%m%d-%H%M%S.bmp"))
+
+ # Do the scrape
+ result = scraper.Scrape(
+ [url.url], mask_scrape_dir, size, (0, 0),
+ command["--timeout"], path=command["--browserpath"],
+ filename=mask_scrape_filename)
+
+ if result:
+ # Return value other than None means an error
+ print " Scrape failed with error '%r'" % result
+ url.errors += 1
+ if url.errors >= errors:
+ print " ** Exceeded maximum error count for this URL, giving up"
+ continue
+
+ # Load the new scrape
+ scrape = Image.open(mask_scrape_filename)
+
+ # Calculate the difference between the new scrape and the baseline,
+ # subject to the current mask
+ if baseline:
+ diff = ImageChops.multiply(ImageChops.difference(scrape, baseline),
+ mask.convert(scrape.mode))
+
+ # If the difference is none, there's nothing to update
+ if max(diff.getextrema()) == (0, 0):
+ print " Scrape identical to baseline, no change in mask"
+ url.consecutive_successes += 1
+ if url.consecutive_successes >= scrapes:
+ print " ** No change for %r scrapes, done!" % scrapes
+ else:
+ # convert the difference to black and white, then change all
+ # black pixels (where the scrape and the baseline were identical)
+ # to white, all others (where the scrape and the baseline differed)
+ # to black.
+ #
+ # Since the below command is a little unclear, here's how it works.
+ # 1. convert("L") converts the RGB image to grayscale
+ # 2. point() maps grayscale values (or the individual channels)
+ # of an RGB image) to different ones. Because it operates on
+ # individual channels, the grayscale conversion from step 1
+ # is necessary.
+ # 3. The "1" second parameter to point() outputs the result as
+ # a monochrome bitmap. If the original RGB image were converted
+ # directly to monochrome, PIL would dither it.
+ diff = diff.convert("L").point([255]+[0]*255, "1")
+
+ # count the number of different pixels
+ diff_pixels = diff.getcolors()[0][0]
+
+ # is this too much?
+ diff_pixel_percent = diff_pixels * 100.0 / (mask.size[0]*mask.size[1])
+ if diff_pixel_percent > command["--threshhold"]:
+ print (" Scrape differed from baseline by %.2f percent, ignoring"
+ % diff_pixel_percent)
+ else:
+ print " Scrape differed in %d pixels, updating mask" % diff_pixels
+ mask = ImageChops.multiply(mask, diff)
+ mask.save(mask_filename)
+
+ # reset the number of consecutive "good" scrapes
+ url.consecutive_successes = 0
+
+ # Remove URLs whose mask is deemed done
+ complete_list.extend(
+ [url for url in url_list if url.consecutive_successes >= scrapes])
+ error_list.extend(
+ [url for url in url_list if url.errors >= errors])
+ url_list = [
+ url for url in url_list if
+ url.consecutive_successes < scrapes and
+ url.errors < errors]
+
+ scrape_pass += 1
+ print "**Done with scrape pass %d\n" % scrape_pass
+
+ if scrape_pass >= command["--giveup"]:
+ print "**Exceeded giveup threshhold. Giving up."
+ else:
+ print "Waiting %d seconds..." % command["--wait"]
+ time.sleep(command["--wait"])
+
+ print
+ print "*** MASKMAKER COMPLETE ***"
+ print "Summary report:"
+ print " %d masks successfully generated" % len(complete_list)
+ for url in complete_list:
+ print " ", url.url
+ print " %d masks failed with too many errors" % len(error_list)
+ for url in error_list:
+ print " ", url.url
+ if scrape_pass >= command["--giveup"]:
+ print (" %d masks were not completed before "
+ "reaching the giveup threshhold" % len(url_list))
+ for url in url_list:
+ print " ", url.url
diff --git a/chromium/tools/site_compare/commands/measure.py b/chromium/tools/site_compare/commands/measure.py
new file mode 100644
index 00000000000..2bd71f56034
--- /dev/null
+++ b/chromium/tools/site_compare/commands/measure.py
@@ -0,0 +1,52 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command for measuring how long pages take to load in a browser.
+
+Prerequisites:
+ 1. The command_line package from tools/site_compare
+ 2. Either the IE BHO or Firefox extension (or both)
+
+Installation:
+ 1. Build the IE BHO, or call regsvr32 on a prebuilt binary
+ 2. Add a file called "measurepageloadtimeextension@google.com" to
+ the default Firefox profile directory under extensions, containing
+ the path to the Firefox extension root
+
+Invoke with the command line arguments as documented within
+the command line.
+"""
+
+import command_line
+import win32process
+
+from drivers import windowing
+from utils import browser_iterate
+
+def CreateCommand(cmdline):
+ """Inserts the command and arguments into a command line for parsing."""
+ cmd = cmdline.AddCommand(
+ ["measure"],
+ "Measures how long a series of URLs takes to load in one or more browsers.",
+ None,
+ ExecuteMeasure)
+
+ browser_iterate.SetupIterationCommandLine(cmd)
+ cmd.AddArgument(
+ ["-log", "--logfile"], "File to write output", type="string", required=True)
+
+
+def ExecuteMeasure(command):
+ """Executes the Measure command."""
+
+ def LogResult(url, proc, wnd, result):
+ """Write the result of the browse to the log file."""
+ log_file.write(result)
+
+ log_file = open(command["--logfile"], "w")
+
+ browser_iterate.Iterate(command, LogResult)
+
+ # Close the log file and return. We're done.
+ log_file.close()
diff --git a/chromium/tools/site_compare/commands/scrape.py b/chromium/tools/site_compare/commands/scrape.py
new file mode 100644
index 00000000000..8fee5a3b3ba
--- /dev/null
+++ b/chromium/tools/site_compare/commands/scrape.py
@@ -0,0 +1,59 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command for scraping images from a URL or list of URLs.
+
+Prerequisites:
+ 1. The command_line package from tools/site_compare
+ 2. Either the IE BHO or Firefox extension (or both)
+
+Installation:
+ 1. Build the IE BHO, or call regsvr32 on a prebuilt binary
+ 2. Add a file called "measurepageloadtimeextension@google.com" to
+ the default Firefox profile directory under extensions, containing
+ the path to the Firefox extension root
+
+Invoke with the command line arguments as documented within
+the command line.
+"""
+
+import command_line
+
+from drivers import windowing
+from utils import browser_iterate
+
+def CreateCommand(cmdline):
+ """Inserts the command and arguments into a command line for parsing."""
+ cmd = cmdline.AddCommand(
+ ["scrape"],
+ "Scrapes an image from a URL or series of URLs.",
+ None,
+ ExecuteScrape)
+
+ browser_iterate.SetupIterationCommandLine(cmd)
+ cmd.AddArgument(
+ ["-log", "--logfile"], "File to write text output", type="string")
+ cmd.AddArgument(
+ ["-out", "--outdir"], "Directory to store scrapes", type="string", required=True)
+
+
+def ExecuteScrape(command):
+ """Executes the Scrape command."""
+
+ def ScrapeResult(url, proc, wnd, result):
+ """Capture and save the scrape."""
+ if log_file: log_file.write(result)
+
+ # Scrape the page
+ image = windowing.ScrapeWindow(wnd)
+ filename = windowing.URLtoFilename(url, command["--outdir"], ".bmp")
+ image.save(filename)
+
+ if command["--logfile"]: log_file = open(command["--logfile"], "w")
+ else: log_file = None
+
+ browser_iterate.Iterate(command, ScrapeResult)
+
+ # Close the log file and return. We're done.
+ if log_file: log_file.close()
diff --git a/chromium/tools/site_compare/commands/timeload.py b/chromium/tools/site_compare/commands/timeload.py
new file mode 100644
index 00000000000..f34ee1dfeb5
--- /dev/null
+++ b/chromium/tools/site_compare/commands/timeload.py
@@ -0,0 +1,144 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""SiteCompare command to time page loads
+
+Loads a series of URLs in a series of browsers (and browser versions)
+and measures how long the page takes to load in each. Outputs a
+comma-delimited file. The first line is "URL,[browser names", each
+additional line is a URL follored by comma-delimited times (in seconds),
+or the string "timeout" or "crashed".
+
+"""
+
+import os # Functions for walking the directory tree
+import tempfile # Get a temporary directory to hold intermediates
+
+import command_line
+import drivers # Functions for driving keyboard/mouse/windows, OS-specific
+import operators # Functions that, given two bitmaps as input, produce
+ # output depending on the performance of an operation
+import scrapers # Functions that know how to capture a render from
+ # particular browsers
+
+
+def CreateCommand(cmdline):
+ """Inserts the command and arguments into a command line for parsing."""
+ cmd = cmdline.AddCommand(
+ ["timeload"],
+ "Measures how long a series of URLs takes to load in one or more browsers.",
+ None,
+ ExecuteTimeLoad)
+
+ cmd.AddArgument(
+ ["-b", "--browsers"], "List of browsers to use. Comma-separated",
+ type="string", required=True)
+ cmd.AddArgument(
+ ["-bp", "--browserpaths"], "List of paths to browsers. Comma-separated",
+ type="string", required=False)
+ cmd.AddArgument(
+ ["-bv", "--browserversions"],
+ "List of versions of browsers. Comma-separated",
+ type="string", required=False)
+ cmd.AddArgument(
+ ["-u", "--url"], "URL to time")
+ cmd.AddArgument(
+ ["-l", "--list"], "List of URLs to time", type="readfile")
+ cmd.AddMutualExclusion(["--url", "--list"])
+ cmd.AddArgument(
+ ["-s", "--startline"], "First line of URL list", type="int")
+ cmd.AddArgument(
+ ["-e", "--endline"], "Last line of URL list (exclusive)", type="int")
+ cmd.AddArgument(
+ ["-c", "--count"], "Number of lines of URL file to use", type="int")
+ cmd.AddDependency("--startline", "--list")
+ cmd.AddRequiredGroup(["--url", "--list"])
+ cmd.AddDependency("--endline", "--list")
+ cmd.AddDependency("--count", "--list")
+ cmd.AddMutualExclusion(["--count", "--endline"])
+ cmd.AddDependency("--count", "--startline")
+ cmd.AddArgument(
+ ["-t", "--timeout"], "Amount of time (seconds) to wait for browser to "
+ "finish loading",
+ type="int", default=60)
+ cmd.AddArgument(
+ ["-log", "--logfile"], "File to write output", type="string", required=True)
+ cmd.AddArgument(
+ ["-sz", "--size"], "Browser window size", default=(800, 600), type="coords")
+
+
+def ExecuteTimeLoad(command):
+ """Executes the TimeLoad command."""
+ browsers = command["--browsers"].split(",")
+ num_browsers = len(browsers)
+
+ if command["--browserversions"]:
+ browser_versions = command["--browserversions"].split(",")
+ else:
+ browser_versions = [None] * num_browsers
+
+ if command["--browserpaths"]:
+ browser_paths = command["--browserpaths"].split(",")
+ else:
+ browser_paths = [None] * num_browsers
+
+ if len(browser_versions) != num_browsers:
+ raise ValueError(
+ "--browserversions must be same length as --browser_paths")
+ if len(browser_paths) != num_browsers:
+ raise ValueError(
+ "--browserversions must be same length as --browser_paths")
+
+ if [b for b in browsers if b not in ["chrome", "ie", "firefox"]]:
+ raise ValueError("unknown browsers: %r" % b)
+
+ scraper_list = []
+
+ for b in xrange(num_browsers):
+ version = browser_versions[b]
+ if not version: version = None
+
+ scraper = scrapers.GetScraper( (browsers[b], version) )
+ if not scraper:
+ raise ValueError("could not find scraper for (%r, %r)" %
+ (browsers[b], version))
+ scraper_list.append(scraper)
+
+ if command["--url"]:
+ url_list = [command["--url"]]
+ else:
+ startline = command["--startline"]
+ if command["--count"]:
+ endline = startline+command["--count"]
+ else:
+ endline = command["--endline"]
+ url_list = [url.strip() for url in
+ open(command["--list"], "r").readlines()[startline:endline]]
+
+ log_file = open(command["--logfile"], "w")
+
+ log_file.write("URL")
+ for b in xrange(num_browsers):
+ log_file.write(",%s" % browsers[b])
+
+ if browser_versions[b]: log_file.write(" %s" % browser_versions[b])
+ log_file.write("\n")
+
+ results = {}
+ for url in url_list:
+ results[url] = [None] * num_browsers
+
+ for b in xrange(num_browsers):
+ result = scraper_list[b].Time(url_list, command["--size"],
+ command["--timeout"],
+ path=browser_paths[b])
+
+ for (url, time) in result:
+ results[url][b] = time
+
+ # output the results
+ for url in url_list:
+ log_file.write(url)
+ for b in xrange(num_browsers):
+ log_file.write(",%r" % results[url][b])
diff --git a/chromium/tools/site_compare/drivers/__init__.py b/chromium/tools/site_compare/drivers/__init__.py
new file mode 100644
index 00000000000..9b46261b3bb
--- /dev/null
+++ b/chromium/tools/site_compare/drivers/__init__.py
@@ -0,0 +1,13 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Imports a set of drivers appropriate to the current OS."""
+
+import sys
+
+platform_dir = sys.platform
+
+keyboard = __import__(platform_dir+".keyboard", globals(), locals(), [''])
+mouse = __import__(platform_dir+".mouse", globals(), locals(), [''])
+windowing = __import__(platform_dir+".windowing", globals(), locals(), [''])
diff --git a/chromium/tools/site_compare/drivers/win32/__init__.py b/chromium/tools/site_compare/drivers/win32/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/site_compare/drivers/win32/__init__.py
diff --git a/chromium/tools/site_compare/drivers/win32/keyboard.py b/chromium/tools/site_compare/drivers/win32/keyboard.py
new file mode 100755
index 00000000000..e3410e1ab77
--- /dev/null
+++ b/chromium/tools/site_compare/drivers/win32/keyboard.py
@@ -0,0 +1,201 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""SiteCompare module for simulating keyboard input.
+
+This module contains functions that can be used to simulate a user
+pressing keys on a keyboard. Support is provided for formatted strings
+including special characters to represent modifier keys like CTRL and ALT
+"""
+
+import time # for sleep
+import win32api # for keybd_event and VkKeyCode
+import win32con # Windows constants
+
+# TODO(jhaas): Ask the readability guys if this would be acceptable:
+#
+# from win32con import VK_SHIFT, VK_CONTROL, VK_MENU, VK_LWIN, KEYEVENTF_KEYUP
+#
+# This is a violation of the style guide but having win32con. everywhere
+# is just plain ugly, and win32con is a huge import for just a handful of
+# constants
+
+
+def PressKey(down, key):
+ """Presses or unpresses a key.
+
+ Uses keybd_event to simulate either depressing or releasing
+ a key
+
+ Args:
+ down: Whether the key is to be pressed or released
+ key: Virtual key code of key to press or release
+ """
+
+ # keybd_event injects key events at a very low level (it's the
+ # Windows API keyboard device drivers call) so this is a very
+ # reliable way of simulating user input
+ win32api.keybd_event(key, 0, (not down) * win32con.KEYEVENTF_KEYUP)
+
+
+def TypeKey(key, keystroke_time=0):
+ """Simulate a keypress of a virtual key.
+
+ Args:
+ key: which key to press
+ keystroke_time: length of time (in seconds) to "hold down" the key
+ Note that zero works just fine
+
+ Returns:
+ None
+ """
+
+ # This just wraps a pair of PressKey calls with an intervening delay
+ PressKey(True, key)
+ time.sleep(keystroke_time)
+ PressKey(False, key)
+
+
+def TypeString(string_to_type,
+ use_modifiers=False,
+ keystroke_time=0,
+ time_between_keystrokes=0):
+ """Simulate typing a string on the keyboard.
+
+ Args:
+ string_to_type: the string to print
+ use_modifiers: specifies whether the following modifier characters
+ should be active:
+ {abc}: type characters with ALT held down
+ [abc]: type characters with CTRL held down
+ \ escapes {}[] and treats these values as literal
+ standard escape sequences are valid even if use_modifiers is false
+ \p is "pause" for one second, useful when driving menus
+ \1-\9 is F-key, \0 is F10
+
+ TODO(jhaas): support for explicit control of SHIFT, support for
+ nonprintable keys (F-keys, ESC, arrow keys, etc),
+ support for explicit control of left vs. right ALT or SHIFT,
+ support for Windows key
+
+ keystroke_time: length of time (in secondes) to "hold down" the key
+ time_between_keystrokes: length of time (seconds) to pause between keys
+
+ Returns:
+ None
+ """
+
+ shift_held = win32api.GetAsyncKeyState(win32con.VK_SHIFT ) < 0
+ ctrl_held = win32api.GetAsyncKeyState(win32con.VK_CONTROL) < 0
+ alt_held = win32api.GetAsyncKeyState(win32con.VK_MENU ) < 0
+
+ next_escaped = False
+ escape_chars = {
+ 'a': '\a', 'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t', 'v': '\v'}
+
+ for char in string_to_type:
+ vk = None
+ handled = False
+
+ # Check to see if this is the start or end of a modified block (that is,
+ # {abc} for ALT-modified keys or [abc] for CTRL-modified keys
+ if use_modifiers and not next_escaped:
+ handled = True
+ if char == "{" and not alt_held:
+ alt_held = True
+ PressKey(True, win32con.VK_MENU)
+ elif char == "}" and alt_held:
+ alt_held = False
+ PressKey(False, win32con.VK_MENU)
+ elif char == "[" and not ctrl_held:
+ ctrl_held = True
+ PressKey(True, win32con.VK_CONTROL)
+ elif char == "]" and ctrl_held:
+ ctrl_held = False
+ PressKey(False, win32con.VK_CONTROL)
+ else:
+ handled = False
+
+ # If this is an explicitly-escaped character, replace it with the
+ # appropriate code
+ if next_escaped and char in escape_chars: char = escape_chars[char]
+
+ # If this is \p, pause for one second.
+ if next_escaped and char == 'p':
+ time.sleep(1)
+ next_escaped = False
+ handled = True
+
+ # If this is \(d), press F key
+ if next_escaped and char.isdigit():
+ fkey = int(char)
+ if not fkey: fkey = 10
+ next_escaped = False
+ vk = win32con.VK_F1 + fkey - 1
+
+ # If this is the backslash, the next character is escaped
+ if not next_escaped and char == "\\":
+ next_escaped = True
+ handled = True
+
+ # If we make it here, it's not a special character, or it's an
+ # escaped special character which should be treated as a literal
+ if not handled:
+ next_escaped = False
+ if not vk: vk = win32api.VkKeyScan(char)
+
+ # VkKeyScan() returns the scan code in the low byte. The upper
+ # byte specifies modifiers necessary to produce the given character
+ # from the given scan code. The only one we're concerned with at the
+ # moment is Shift. Determine the shift state and compare it to the
+ # current state... if it differs, press or release the shift key.
+ new_shift_held = bool(vk & (1<<8))
+
+ if new_shift_held != shift_held:
+ PressKey(new_shift_held, win32con.VK_SHIFT)
+ shift_held = new_shift_held
+
+ # Type the key with the specified length, then wait the specified delay
+ TypeKey(vk & 0xFF, keystroke_time)
+ time.sleep(time_between_keystrokes)
+
+ # Release the modifier keys, if held
+ if shift_held: PressKey(False, win32con.VK_SHIFT)
+ if ctrl_held: PressKey(False, win32con.VK_CONTROL)
+ if alt_held: PressKey(False, win32con.VK_MENU)
+
+
+def main():
+ # We're being invoked rather than imported. Let's do some tests
+
+ # Press command-R to bring up the Run dialog
+ PressKey(True, win32con.VK_LWIN)
+ TypeKey(ord('R'))
+ PressKey(False, win32con.VK_LWIN)
+
+ # Wait a sec to make sure it comes up
+ time.sleep(1)
+
+ # Invoke Notepad through the Run dialog
+ TypeString("wordpad\n")
+
+ # Wait another sec, then start typing
+ time.sleep(1)
+ TypeString("This is a test of SiteCompare's Keyboard.py module.\n\n")
+ TypeString("There should be a blank line above and below this one.\n\n")
+ TypeString("This line has control characters to make "
+ "[b]boldface text[b] and [i]italic text[i] and normal text.\n\n",
+ use_modifiers=True)
+ TypeString(r"This line should be typed with a visible delay between "
+ "characters. When it ends, there should be a 3-second pause, "
+ "then the menu will select File/Exit, then another 3-second "
+ "pause, then No to exit without saving. Ready?\p\p\p{f}x\p\p\pn",
+ use_modifiers=True,
+ keystroke_time=0.05,
+ time_between_keystrokes=0.05)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/site_compare/drivers/win32/mouse.py b/chromium/tools/site_compare/drivers/win32/mouse.py
new file mode 100755
index 00000000000..0096af987ca
--- /dev/null
+++ b/chromium/tools/site_compare/drivers/win32/mouse.py
@@ -0,0 +1,222 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""SiteCompare module for simulating mouse input.
+
+This module contains functions that can be used to simulate a user
+navigating using a pointing device. This includes mouse movement,
+clicking with any button, and dragging.
+"""
+
+import time # for sleep
+
+import win32api # for mouse_event
+import win32con # Windows constants
+import win32gui # for window functions
+
+
+def ScreenToMouse(pt):
+ """Convert a value in screen coordinates to mouse coordinates.
+
+ Mouse coordinates are specified as a percentage of screen dimensions,
+ normalized to 16 bits. 0 represents the far left/top of the screen,
+ 65535 represents the far right/bottom. This function assumes that
+ the size of the screen is fixed at module load time and does not change
+
+ Args:
+ pt: the point of the coords to convert
+
+ Returns:
+ the converted point
+ """
+
+ # Initialize the screen dimensions on first execution. Note that this
+ # function assumes that the screen dimensions do not change during run.
+ if not ScreenToMouse._SCREEN_DIMENSIONS:
+ desktop = win32gui.GetClientRect(win32gui.GetDesktopWindow())
+ ScreenToMouse._SCREEN_DIMENSIONS = (desktop[2], desktop[3])
+
+ return ((65535 * pt[0]) / ScreenToMouse._SCREEN_DIMENSIONS[0],
+ (65535 * pt[1]) / ScreenToMouse._SCREEN_DIMENSIONS[1])
+
+ScreenToMouse._SCREEN_DIMENSIONS = None
+
+
+def PressButton(down, button='left'):
+ """Simulate a mouse button press or release at the current mouse location.
+
+ Args:
+ down: whether the button is pressed or released
+ button: which button is pressed
+
+ Returns:
+ None
+ """
+
+ # Put the mouse_event flags in a convenient dictionary by button
+ flags = {
+ 'left': (win32con.MOUSEEVENTF_LEFTUP, win32con.MOUSEEVENTF_LEFTDOWN),
+ 'middle': (win32con.MOUSEEVENTF_MIDDLEUP, win32con.MOUSEEVENTF_MIDDLEDOWN),
+ 'right': (win32con.MOUSEEVENTF_RIGHTUP, win32con.MOUSEEVENTF_RIGHTDOWN)
+ }
+
+ # hit the button
+ win32api.mouse_event(flags[button][down], 0, 0)
+
+
+def ClickButton(button='left', click_time=0):
+ """Press and release a mouse button at the current mouse location.
+
+ Args:
+ button: which button to click
+ click_time: duration between press and release
+
+ Returns:
+ None
+ """
+ PressButton(True, button)
+ time.sleep(click_time)
+ PressButton(False, button)
+
+
+def DoubleClickButton(button='left', click_time=0, time_between_clicks=0):
+ """Double-click a mouse button at the current mouse location.
+
+ Args:
+ button: which button to click
+ click_time: duration between press and release
+ time_between_clicks: time to pause between clicks
+
+ Returns:
+ None
+ """
+ ClickButton(button, click_time)
+ time.sleep(time_between_clicks)
+ ClickButton(button, click_time)
+
+
+def MoveToLocation(pos, duration=0, tick=0.01):
+ """Move the mouse cursor to a specified location, taking the specified time.
+
+ Args:
+ pos: position (in screen coordinates) to move to
+ duration: amount of time the move should take
+ tick: amount of time between successive moves of the mouse
+
+ Returns:
+ None
+ """
+ # calculate the number of moves to reach the destination
+ num_steps = (duration/tick)+1
+
+ # get the current and final mouse position in mouse coords
+ current_location = ScreenToMouse(win32gui.GetCursorPos())
+ end_location = ScreenToMouse(pos)
+
+ # Calculate the step size
+ step_size = ((end_location[0]-current_location[0])/num_steps,
+ (end_location[1]-current_location[1])/num_steps)
+ step = 0
+
+ while step < num_steps:
+ # Move the mouse one step
+ current_location = (current_location[0]+step_size[0],
+ current_location[1]+step_size[1])
+
+ # Coerce the coords to int to avoid a warning from pywin32
+ win32api.mouse_event(
+ win32con.MOUSEEVENTF_MOVE|win32con.MOUSEEVENTF_ABSOLUTE,
+ int(current_location[0]), int(current_location[1]))
+
+ step += 1
+ time.sleep(tick)
+
+
+def ClickAtLocation(pos, button='left', click_time=0):
+ """Simulate a mouse click in a particular location, in screen coordinates.
+
+ Args:
+ pos: position in screen coordinates (x,y)
+ button: which button to click
+ click_time: duration of the click
+
+ Returns:
+ None
+ """
+ MoveToLocation(pos)
+ ClickButton(button, click_time)
+
+
+def ClickInWindow(hwnd, offset=None, button='left', click_time=0):
+ """Simulate a user mouse click in the center of a window.
+
+ Args:
+ hwnd: handle of the window to click in
+ offset: where to click, defaults to dead center
+ button: which button to click
+ click_time: duration of the click
+
+ Returns:
+ Nothing
+ """
+
+ rect = win32gui.GetClientRect(hwnd)
+ if offset is None: offset = (rect[2]/2, rect[3]/2)
+
+ # get the screen coordinates of the window's center
+ pos = win32gui.ClientToScreen(hwnd, offset)
+
+ ClickAtLocation(pos, button, click_time)
+
+
+def DoubleClickInWindow(
+ hwnd, offset=None, button='left', click_time=0, time_between_clicks=0.1):
+ """Simulate a user mouse double click in the center of a window.
+
+ Args:
+ hwnd: handle of the window to click in
+ offset: where to click, defaults to dead center
+ button: which button to click
+ click_time: duration of the clicks
+ time_between_clicks: length of time to pause between clicks
+
+ Returns:
+ Nothing
+ """
+ ClickInWindow(hwnd, offset, button, click_time)
+ time.sleep(time_between_clicks)
+ ClickInWindow(hwnd, offset, button, click_time)
+
+
+def main():
+ # We're being invoked rather than imported. Let's do some tests
+
+ screen_size = win32gui.GetClientRect(win32gui.GetDesktopWindow())
+ screen_size = (screen_size[2], screen_size[3])
+
+ # move the mouse (instantly) to the upper right corner
+ MoveToLocation((screen_size[0], 0))
+
+ # move the mouse (over five seconds) to the lower left corner
+ MoveToLocation((0, screen_size[1]), 5)
+
+ # click the left mouse button. This will open up the Start menu
+ # if the taskbar is at the bottom
+
+ ClickButton()
+
+ # wait a bit, then click the right button to open the context menu
+ time.sleep(3)
+ ClickButton('right')
+
+ # move the mouse away and then click the left button to dismiss the
+ # context menu
+ MoveToLocation((screen_size[0]/2, screen_size[1]/2), 3)
+ MoveToLocation((0, 0), 3)
+ ClickButton()
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/site_compare/drivers/win32/windowing.py b/chromium/tools/site_compare/drivers/win32/windowing.py
new file mode 100755
index 00000000000..47d63f02272
--- /dev/null
+++ b/chromium/tools/site_compare/drivers/win32/windowing.py
@@ -0,0 +1,366 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""SiteCompare module for invoking, locating, and manipulating windows.
+
+This module is a catch-all wrapper for operating system UI functionality
+that doesn't belong in other modules. It contains functions for finding
+particular windows, scraping their contents, and invoking processes to
+create them.
+"""
+
+import os
+import string
+import time
+
+import PIL.ImageGrab
+import pywintypes
+import win32event
+import win32gui
+import win32process
+
+
+def FindChildWindows(hwnd, path):
+ """Find a set of windows through a path specification.
+
+ Args:
+ hwnd: Handle of the parent window
+ path: Path to the window to find. Has the following form:
+ "foo/bar/baz|foobar/|foobarbaz"
+ The slashes specify the "path" to the child window.
+ The text is the window class, a pipe (if present) is a title.
+ * is a wildcard and will find all child windows at that level
+
+ Returns:
+ A list of the windows that were found
+ """
+ windows_to_check = [hwnd]
+
+ # The strategy will be to take windows_to_check and use it
+ # to find a list of windows that match the next specification
+ # in the path, then repeat with the list of found windows as the
+ # new list of windows to check
+ for segment in path.split("/"):
+ windows_found = []
+ check_values = segment.split("|")
+
+ # check_values is now a list with the first element being
+ # the window class, the second being the window caption.
+ # If the class is absent (or wildcarded) set it to None
+ if check_values[0] == "*" or not check_values[0]: check_values[0] = None
+
+ # If the window caption is also absent, force it to None as well
+ if len(check_values) == 1: check_values.append(None)
+
+ # Loop through the list of windows to check
+ for window_check in windows_to_check:
+ window_found = None
+ while window_found != 0: # lint complains, but 0 != None
+ if window_found is None: window_found = 0
+ try:
+ # Look for the next sibling (or first sibling if window_found is 0)
+ # of window_check with the specified caption and/or class
+ window_found = win32gui.FindWindowEx(
+ window_check, window_found, check_values[0], check_values[1])
+ except pywintypes.error, e:
+ # FindWindowEx() raises error 2 if not found
+ if e[0] == 2:
+ window_found = 0
+ else:
+ raise e
+
+ # If FindWindowEx struck gold, add to our list of windows found
+ if window_found: windows_found.append(window_found)
+
+ # The windows we found become the windows to check for the next segment
+ windows_to_check = windows_found
+
+ return windows_found
+
+
+def FindChildWindow(hwnd, path):
+ """Find a window through a path specification.
+
+ This method is a simple wrapper for FindChildWindows() for the
+ case (the majority case) where you expect to find a single window
+
+ Args:
+ hwnd: Handle of the parent window
+ path: Path to the window to find. See FindChildWindows()
+
+ Returns:
+ The window that was found
+ """
+ return FindChildWindows(hwnd, path)[0]
+
+
+def ScrapeWindow(hwnd, rect=None):
+ """Scrape a visible window and return its contents as a bitmap.
+
+ Args:
+ hwnd: handle of the window to scrape
+ rect: rectangle to scrape in client coords, defaults to the whole thing
+ If specified, it's a 4-tuple of (left, top, right, bottom)
+
+ Returns:
+ An Image containing the scraped data
+ """
+ # Activate the window
+ SetForegroundWindow(hwnd)
+
+ # If no rectangle was specified, use the fill client rectangle
+ if not rect: rect = win32gui.GetClientRect(hwnd)
+
+ upper_left = win32gui.ClientToScreen(hwnd, (rect[0], rect[1]))
+ lower_right = win32gui.ClientToScreen(hwnd, (rect[2], rect[3]))
+ rect = upper_left+lower_right
+
+ return PIL.ImageGrab.grab(rect)
+
+
+def SetForegroundWindow(hwnd):
+ """Bring a window to the foreground."""
+ win32gui.SetForegroundWindow(hwnd)
+
+
+def InvokeAndWait(path, cmdline="", timeout=10, tick=1.):
+ """Invoke an application and wait for it to bring up a window.
+
+ Args:
+ path: full path to the executable to invoke
+ cmdline: command line to pass to executable
+ timeout: how long (in seconds) to wait before giving up
+ tick: length of time to wait between checks
+
+ Returns:
+ A tuple of handles to the process and the application's window,
+ or (None, None) if it timed out waiting for the process
+ """
+
+ def EnumWindowProc(hwnd, ret):
+ """Internal enumeration func, checks for visibility and proper PID."""
+ if win32gui.IsWindowVisible(hwnd): # don't bother even checking hidden wnds
+ pid = win32process.GetWindowThreadProcessId(hwnd)[1]
+ if pid == ret[0]:
+ ret[1] = hwnd
+ return 0 # 0 means stop enumeration
+ return 1 # 1 means continue enumeration
+
+ # We don't need to change anything about the startupinfo structure
+ # (the default is quite sufficient) but we need to create it just the
+ # same.
+ sinfo = win32process.STARTUPINFO()
+
+ proc = win32process.CreateProcess(
+ path, # path to new process's executable
+ cmdline, # application's command line
+ None, # process security attributes (default)
+ None, # thread security attributes (default)
+ False, # inherit parent's handles
+ 0, # creation flags
+ None, # environment variables
+ None, # directory
+ sinfo) # default startup info
+
+ # Create process returns (prochandle, pid, threadhandle, tid). At
+ # some point we may care about the other members, but for now, all
+ # we're after is the pid
+ pid = proc[2]
+
+ # Enumeration APIs can take an arbitrary integer, usually a pointer,
+ # to be passed to the enumeration function. We'll pass a pointer to
+ # a structure containing the PID we're looking for, and an empty out
+ # parameter to hold the found window ID
+ ret = [pid, None]
+
+ tries_until_timeout = timeout/tick
+ num_tries = 0
+
+ # Enumerate top-level windows, look for one with our PID
+ while num_tries < tries_until_timeout and ret[1] is None:
+ try:
+ win32gui.EnumWindows(EnumWindowProc, ret)
+ except pywintypes.error, e:
+ # error 0 isn't an error, it just meant the enumeration was
+ # terminated early
+ if e[0]: raise e
+
+ time.sleep(tick)
+ num_tries += 1
+
+ # TODO(jhaas): Should we throw an exception if we timeout? Or is returning
+ # a window ID of None sufficient?
+ return (proc[0], ret[1])
+
+
+def WaitForProcessExit(proc, timeout=None):
+ """Waits for a given process to terminate.
+
+ Args:
+ proc: handle to process
+ timeout: timeout (in seconds). None = wait indefinitely
+
+ Returns:
+ True if process ended, False if timed out
+ """
+ if timeout is None:
+ timeout = win32event.INFINITE
+ else:
+ # convert sec to msec
+ timeout *= 1000
+
+ return (win32event.WaitForSingleObject(proc, timeout) ==
+ win32event.WAIT_OBJECT_0)
+
+
+def WaitForThrobber(hwnd, rect=None, timeout=20, tick=0.1, done=10):
+ """Wait for a browser's "throbber" (loading animation) to complete.
+
+ Args:
+ hwnd: window containing the throbber
+ rect: rectangle of the throbber, in client coords. If None, whole window
+ timeout: if the throbber is still throbbing after this long, give up
+ tick: how often to check the throbber
+ done: how long the throbber must be unmoving to be considered done
+
+ Returns:
+ Number of seconds waited, -1 if timed out
+ """
+ if not rect: rect = win32gui.GetClientRect(hwnd)
+
+ # last_throbber will hold the results of the preceding scrape;
+ # we'll compare it against the current scrape to see if we're throbbing
+ last_throbber = ScrapeWindow(hwnd, rect)
+ start_clock = time.clock()
+ timeout_clock = start_clock + timeout
+ last_changed_clock = start_clock;
+
+ while time.clock() < timeout_clock:
+ time.sleep(tick)
+
+ current_throbber = ScrapeWindow(hwnd, rect)
+ if current_throbber.tostring() != last_throbber.tostring():
+ last_throbber = current_throbber
+ last_changed_clock = time.clock()
+ else:
+ if time.clock() - last_changed_clock > done:
+ return last_changed_clock - start_clock
+
+ return -1
+
+
+def MoveAndSizeWindow(wnd, position=None, size=None, child=None):
+ """Moves and/or resizes a window.
+
+ Repositions and resizes a window. If a child window is provided,
+ the parent window is resized so the child window has the given size
+
+ Args:
+ wnd: handle of the frame window
+ position: new location for the frame window
+ size: new size for the frame window (or the child window)
+ child: handle of the child window
+
+ Returns:
+ None
+ """
+ rect = win32gui.GetWindowRect(wnd)
+
+ if position is None: position = (rect[0], rect[1])
+ if size is None:
+ size = (rect[2]-rect[0], rect[3]-rect[1])
+ elif child is not None:
+ child_rect = win32gui.GetWindowRect(child)
+ slop = (rect[2]-rect[0]-child_rect[2]+child_rect[0],
+ rect[3]-rect[1]-child_rect[3]+child_rect[1])
+ size = (size[0]+slop[0], size[1]+slop[1])
+
+ win32gui.MoveWindow(wnd, # window to move
+ position[0], # new x coord
+ position[1], # new y coord
+ size[0], # new width
+ size[1], # new height
+ True) # repaint?
+
+
+def EndProcess(proc, code=0):
+ """Ends a process.
+
+ Wraps the OS TerminateProcess call for platform-independence
+
+ Args:
+ proc: process ID
+ code: process exit code
+
+ Returns:
+ None
+ """
+ win32process.TerminateProcess(proc, code)
+
+
+def URLtoFilename(url, path=None, extension=None):
+ """Converts a URL to a filename, given a path.
+
+ This in theory could cause collisions if two URLs differ only
+ in unprintable characters (eg. http://www.foo.com/?bar and
+ http://www.foo.com/:bar. In practice this shouldn't be a problem.
+
+ Args:
+ url: The URL to convert
+ path: path to the directory to store the file
+ extension: string to append to filename
+
+ Returns:
+ filename
+ """
+ trans = string.maketrans(r'\/:*?"<>|', '_________')
+
+ if path is None: path = ""
+ if extension is None: extension = ""
+ if len(path) > 0 and path[-1] != '\\': path += '\\'
+ url = url.translate(trans)
+ return "%s%s%s" % (path, url, extension)
+
+
+def PreparePath(path):
+ """Ensures that a given path exists, making subdirectories if necessary.
+
+ Args:
+ path: fully-qualified path of directory to ensure exists
+
+ Returns:
+ None
+ """
+ try:
+ os.makedirs(path)
+ except OSError, e:
+ if e[0] != 17: raise e # error 17: path already exists
+
+
+def main():
+ PreparePath(r"c:\sitecompare\scrapes\ie7")
+ # We're being invoked rather than imported. Let's do some tests
+
+ # Hardcode IE's location for the purpose of this test
+ (proc, wnd) = InvokeAndWait(
+ r"c:\program files\internet explorer\iexplore.exe")
+
+ # Find the browser pane in the IE window
+ browser = FindChildWindow(
+ wnd, "TabWindowClass/Shell DocObject View/Internet Explorer_Server")
+
+ # Move and size the window
+ MoveAndSizeWindow(wnd, (0, 0), (1024, 768), browser)
+
+ # Take a screenshot
+ i = ScrapeWindow(browser)
+
+ i.show()
+
+ EndProcess(proc, 0)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/site_compare/operators/__init__.py b/chromium/tools/site_compare/operators/__init__.py
new file mode 100644
index 00000000000..5d6ffd7555a
--- /dev/null
+++ b/chromium/tools/site_compare/operators/__init__.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Selects the appropriate operator."""
+
+
+def GetOperator(operator):
+ """Given an operator by name, returns its module.
+
+ Args:
+ operator: string describing the comparison
+
+ Returns:
+ module
+ """
+
+ # TODO(jhaas): come up with a happy way of integrating multiple operators
+ # with different, possibly divergent and possibly convergent, operators.
+
+ module = __import__(operator, globals(), locals(), [''])
+
+ return module
diff --git a/chromium/tools/site_compare/operators/equals.py b/chromium/tools/site_compare/operators/equals.py
new file mode 100644
index 00000000000..311f530a059
--- /dev/null
+++ b/chromium/tools/site_compare/operators/equals.py
@@ -0,0 +1,37 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compare two images for equality."""
+
+from PIL import Image
+from PIL import ImageChops
+
+
+def Compare(file1, file2, **kwargs):
+ """Compares two images to see if they're identical.
+
+ Args:
+ file1: path to first image to compare
+ file2: path to second image to compare
+ kwargs: unused for this operator
+
+ Returns:
+ None if the images are identical
+ A tuple of (errorstring, image) if they're not
+ """
+ kwargs = kwargs # unused parameter
+
+ im1 = Image.open(file1)
+ im2 = Image.open(file2)
+
+ if im1.size != im2.size:
+ return ("The images are of different size (%s vs %s)" %
+ (im1.size, im2.size), im1)
+
+ diff = ImageChops.difference(im1, im2)
+
+ if max(diff.getextrema()) != (0, 0):
+ return ("The images differ", diff)
+ else:
+ return None
diff --git a/chromium/tools/site_compare/operators/equals_with_mask.py b/chromium/tools/site_compare/operators/equals_with_mask.py
new file mode 100644
index 00000000000..e42f7e16a91
--- /dev/null
+++ b/chromium/tools/site_compare/operators/equals_with_mask.py
@@ -0,0 +1,57 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compare two images for equality, subject to a mask."""
+
+from PIL import Image
+from PIL import ImageChops
+
+import os.path
+
+
+def Compare(file1, file2, **kwargs):
+ """Compares two images to see if they're identical subject to a mask.
+
+ An optional directory containing masks is supplied. If a mask exists
+ which matches file1's name, areas under the mask where it's black
+ are ignored.
+
+ Args:
+ file1: path to first image to compare
+ file2: path to second image to compare
+ kwargs: ["maskdir"] contains the directory holding the masks
+
+ Returns:
+ None if the images are identical
+ A tuple of (errorstring, image) if they're not
+ """
+
+ maskdir = None
+ if "maskdir" in kwargs:
+ maskdir = kwargs["maskdir"]
+
+ im1 = Image.open(file1)
+ im2 = Image.open(file2)
+
+ if im1.size != im2.size:
+ return ("The images are of different size (%r vs %r)" %
+ (im1.size, im2.size), im1)
+
+ diff = ImageChops.difference(im1, im2)
+
+ if maskdir:
+ maskfile = os.path.join(maskdir, os.path.basename(file1))
+ if os.path.exists(maskfile):
+ mask = Image.open(maskfile)
+
+ if mask.size != im1.size:
+ return ("The mask is of a different size than the images (%r vs %r)" %
+ (mask.size, im1.size), mask)
+
+ diff = ImageChops.multiply(diff, mask.convert(diff.mode))
+
+ if max(diff.getextrema()) != (0, 0):
+ return ("The images differ", diff)
+ else:
+ return None
diff --git a/chromium/tools/site_compare/scrapers/__init__.py b/chromium/tools/site_compare/scrapers/__init__.py
new file mode 100755
index 00000000000..5f6d77885b7
--- /dev/null
+++ b/chromium/tools/site_compare/scrapers/__init__.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Selects the appropriate scraper for a given browser and version."""
+
+import types
+
+# TODO(jhaas): unify all optional scraper parameters into kwargs
+
+def GetScraper(browser):
+ """Given a browser and an optional version, returns the scraper module.
+
+ Args:
+ browser: either a string (browser name) or a tuple (name, version)
+
+ Returns:
+ module
+ """
+
+ if type(browser) == types.StringType: browser = (browser, None)
+
+ package = __import__(browser[0], globals(), locals(), [''])
+ module = package.GetScraper(browser[1])
+ if browser[1] is not None: module.version = browser[1]
+
+ return module
+
+
+# if invoked rather than imported, do some tests
+if __name__ == "__main__":
+ print GetScraper("IE")
diff --git a/chromium/tools/site_compare/scrapers/chrome/__init__.py b/chromium/tools/site_compare/scrapers/chrome/__init__.py
new file mode 100755
index 00000000000..587a50db735
--- /dev/null
+++ b/chromium/tools/site_compare/scrapers/chrome/__init__.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Selects the appropriate scraper for Chrome."""
+
+
+def GetScraper(version):
+ """Returns the scraper module for the given version.
+
+ Args:
+ version: version string of Chrome, or None for most recent
+
+ Returns:
+ scrape module for given version
+ """
+ if version is None:
+ version = "0.1.101.0"
+
+ parsed_version = [int(x) for x in version.split(".")]
+
+ if (parsed_version[0] > 0 or
+ parsed_version[1] > 1 or
+ parsed_version[2] > 97 or
+ parsed_version[3] > 0):
+ scraper_version = "chrome011010"
+ else:
+ scraper_version = "chrome01970"
+
+ return __import__(scraper_version, globals(), locals(), [''])
+
+
+# if invoked rather than imported, test
+if __name__ == "__main__":
+ print GetScraper("0.1.101.0").version
diff --git a/chromium/tools/site_compare/scrapers/chrome/chrome011010.py b/chromium/tools/site_compare/scrapers/chrome/chrome011010.py
new file mode 100644
index 00000000000..6f0dfb4bf7c
--- /dev/null
+++ b/chromium/tools/site_compare/scrapers/chrome/chrome011010.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Does scraping for versions of Chrome from 0.1.101.0 up."""
+
+from drivers import windowing
+
+import chromebase
+
+# Default version
+version = "0.1.101.0"
+
+
+def GetChromeRenderPane(wnd):
+ return windowing.FindChildWindow(wnd, "Chrome_TabContents")
+
+
+def Scrape(urls, outdir, size, pos, timeout=20, **kwargs):
+ """Invoke a browser, send it to a series of URLs, and save its output.
+
+ Args:
+ urls: list of URLs to scrape
+ outdir: directory to place output
+ size: size of browser window to use
+ pos: position of browser window
+ timeout: amount of time to wait for page to load
+ kwargs: miscellaneous keyword args
+
+ Returns:
+ None if succeeded, else an error code
+ """
+ chromebase.GetChromeRenderPane = GetChromeRenderPane
+
+ return chromebase.Scrape(urls, outdir, size, pos, timeout, kwargs)
+
+
+def Time(urls, size, timeout, **kwargs):
+ """Forwards the Time command to chromebase."""
+ chromebase.GetChromeRenderPane = GetChromeRenderPane
+
+ return chromebase.Time(urls, size, timeout, kwargs)
diff --git a/chromium/tools/site_compare/scrapers/chrome/chrome01970.py b/chromium/tools/site_compare/scrapers/chrome/chrome01970.py
new file mode 100644
index 00000000000..2f237fa4df4
--- /dev/null
+++ b/chromium/tools/site_compare/scrapers/chrome/chrome01970.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Does scraping for versions of Chrome up to 0.1.97.0."""
+
+from drivers import windowing
+
+import chromebase
+
+# Default version
+version = "0.1.97.0"
+
+
+def GetChromeRenderPane(wnd):
+ return windowing.FindChildWindow(wnd, "Chrome_BrowserWindow")
+
+
+def Scrape(urls, outdir, size, pos, timeout=20, **kwargs):
+ """Invoke a browser, send it to a series of URLs, and save its output.
+
+ Args:
+ urls: list of URLs to scrape
+ outdir: directory to place output
+ size: size of browser window to use
+ pos: position of browser window
+ timeout: amount of time to wait for page to load
+ kwargs: miscellaneous keyword args
+
+ Returns:
+ None if succeeded, else an error code
+ """
+ chromebase.GetChromeRenderPane = GetChromeRenderPane
+
+ return chromebase.Scrape(urls, outdir, size, pos, timeout, kwargs)
+
+
+def Time(urls, size, timeout, **kwargs):
+ """Forwards the Time command to chromebase."""
+ chromebase.GetChromeRenderPane = GetChromeRenderPane
+
+ return chromebase.Time(urls, size, timeout, kwargs)
diff --git a/chromium/tools/site_compare/scrapers/chrome/chromebase.py b/chromium/tools/site_compare/scrapers/chrome/chromebase.py
new file mode 100755
index 00000000000..2b8f1774cc4
--- /dev/null
+++ b/chromium/tools/site_compare/scrapers/chrome/chromebase.py
@@ -0,0 +1,199 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Does scraping for all currently-known versions of Chrome"""
+
+import pywintypes
+import types
+
+from drivers import keyboard
+from drivers import mouse
+from drivers import windowing
+
+
+# TODO: this has moved, use some logic to find it. For now,
+# expects a subst k:.
+DEFAULT_PATH = r"k:\chrome.exe"
+
+
+def InvokeBrowser(path):
+ """Invoke the Chrome browser.
+
+ Args:
+ path: full path to browser
+
+ Returns:
+ A tuple of (main window, process handle, address bar, render pane)
+ """
+
+ # Reuse an existing instance of the browser if we can find one. This
+ # may not work correctly, especially if the window is behind other windows.
+
+ # TODO(jhaas): make this work with Vista
+ wnds = windowing.FindChildWindows(0, "Chrome_XPFrame")
+ if len(wnds):
+ wnd = wnds[0]
+ proc = None
+ else:
+ # Invoke Chrome
+ (proc, wnd) = windowing.InvokeAndWait(path)
+
+ # Get windows we'll need
+ address_bar = windowing.FindChildWindow(wnd, "Chrome_AutocompleteEdit")
+ render_pane = GetChromeRenderPane(wnd)
+
+ return (wnd, proc, address_bar, render_pane)
+
+
+def Scrape(urls, outdir, size, pos, timeout, kwargs):
+ """Invoke a browser, send it to a series of URLs, and save its output.
+
+ Args:
+ urls: list of URLs to scrape
+ outdir: directory to place output
+ size: size of browser window to use
+ pos: position of browser window
+ timeout: amount of time to wait for page to load
+ kwargs: miscellaneous keyword args
+
+ Returns:
+ None if success, else an error string
+ """
+ if "path" in kwargs and kwargs["path"]: path = kwargs["path"]
+ else: path = DEFAULT_PATH
+
+ (wnd, proc, address_bar, render_pane) = InvokeBrowser(path)
+
+ # Resize and reposition the frame
+ windowing.MoveAndSizeWindow(wnd, pos, size, render_pane)
+
+ # Visit each URL we're given
+ if type(urls) in types.StringTypes: urls = [urls]
+
+ timedout = False
+
+ for url in urls:
+ # Double-click in the address bar, type the name, and press Enter
+ mouse.ClickInWindow(address_bar)
+ keyboard.TypeString(url, 0.1)
+ keyboard.TypeString("\n")
+
+ # Wait for the page to finish loading
+ load_time = windowing.WaitForThrobber(wnd, (20, 16, 36, 32), timeout)
+ timedout = load_time < 0
+
+ if timedout:
+ break
+
+ # Scrape the page
+ image = windowing.ScrapeWindow(render_pane)
+
+ # Save to disk
+ if "filename" in kwargs:
+ if callable(kwargs["filename"]):
+ filename = kwargs["filename"](url)
+ else:
+ filename = kwargs["filename"]
+ else:
+ filename = windowing.URLtoFilename(url, outdir, ".bmp")
+ image.save(filename)
+
+ if proc:
+ windowing.SetForegroundWindow(wnd)
+
+ # Send Alt-F4, then wait for process to end
+ keyboard.TypeString(r"{\4}", use_modifiers=True)
+ if not windowing.WaitForProcessExit(proc, timeout):
+ windowing.EndProcess(proc)
+ return "crashed"
+
+ if timedout:
+ return "timeout"
+
+ return None
+
+
+def Time(urls, size, timeout, kwargs):
+ """Measure how long it takes to load each of a series of URLs
+
+ Args:
+ urls: list of URLs to time
+ size: size of browser window to use
+ timeout: amount of time to wait for page to load
+ kwargs: miscellaneous keyword args
+
+ Returns:
+ A list of tuples (url, time). "time" can be "crashed" or "timeout"
+ """
+ if "path" in kwargs and kwargs["path"]: path = kwargs["path"]
+ else: path = DEFAULT_PATH
+ proc = None
+
+ # Visit each URL we're given
+ if type(urls) in types.StringTypes: urls = [urls]
+
+ ret = []
+ for url in urls:
+ try:
+ # Invoke the browser if necessary
+ if not proc:
+ (wnd, proc, address_bar, render_pane) = InvokeBrowser(path)
+
+ # Resize and reposition the frame
+ windowing.MoveAndSizeWindow(wnd, (0,0), size, render_pane)
+
+ # Double-click in the address bar, type the name, and press Enter
+ mouse.ClickInWindow(address_bar)
+ keyboard.TypeString(url, 0.1)
+ keyboard.TypeString("\n")
+
+ # Wait for the page to finish loading
+ load_time = windowing.WaitForThrobber(wnd, (20, 16, 36, 32), timeout)
+
+ timedout = load_time < 0
+
+ if timedout:
+ load_time = "timeout"
+
+ # Send an alt-F4 to make the browser close; if this times out,
+ # we've probably got a crash
+ windowing.SetForegroundWindow(wnd)
+
+ keyboard.TypeString(r"{\4}", use_modifiers=True)
+ if not windowing.WaitForProcessExit(proc, timeout):
+ windowing.EndProcess(proc)
+ load_time = "crashed"
+ proc = None
+ except pywintypes.error:
+ proc = None
+ load_time = "crashed"
+
+ ret.append( (url, load_time) )
+
+ if proc:
+ windowing.SetForegroundWindow(wnd)
+ keyboard.TypeString(r"{\4}", use_modifiers=True)
+ if not windowing.WaitForProcessExit(proc, timeout):
+ windowing.EndProcess(proc)
+
+ return ret
+
+
+def main():
+ # We're being invoked rather than imported, so run some tests
+ path = r"c:\sitecompare\scrapes\chrome\0.1.97.0"
+ windowing.PreparePath(path)
+
+ # Scrape three sites and save the results
+ Scrape([
+ "http://www.microsoft.com",
+ "http://www.google.com",
+ "http://www.sun.com"],
+ path, (1024, 768), (0, 0))
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/site_compare/scrapers/firefox/__init__.py b/chromium/tools/site_compare/scrapers/firefox/__init__.py
new file mode 100755
index 00000000000..34c0699f072
--- /dev/null
+++ b/chromium/tools/site_compare/scrapers/firefox/__init__.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Selects the appropriate scraper for Firefox."""
+
+
+def GetScraper(version):
+ """Returns the scraper module for the given version.
+
+ Args:
+ version: version string of IE, or None for most recent
+
+ Returns:
+ scrape module for given version
+ """
+
+ # Pychecker will warn that the parameter is unused; we only
+ # support one version of Firefox at this time
+
+ # We only have one version of the Firefox scraper for now
+ return __import__("firefox2", globals(), locals(), [''])
+
+
+# if invoked rather than imported, test
+if __name__ == "__main__":
+ print GetScraper("2.0.0.6").version
diff --git a/chromium/tools/site_compare/scrapers/firefox/firefox2.py b/chromium/tools/site_compare/scrapers/firefox/firefox2.py
new file mode 100755
index 00000000000..2181f588f81
--- /dev/null
+++ b/chromium/tools/site_compare/scrapers/firefox/firefox2.py
@@ -0,0 +1,249 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Does scraping for Firefox 2.0."""
+
+import pywintypes
+import time
+import types
+
+from drivers import keyboard
+from drivers import mouse
+from drivers import windowing
+
+# Default version
+version = "2.0.0.6"
+
+DEFAULT_PATH = r"c:\program files\mozilla firefox\firefox.exe"
+
+# TODO(jhaas): the Firefox scraper is a bit rickety at the moment. Known
+# issues: 1) won't work if the default profile puts toolbars in different
+# locations, 2) uses sleep() statements rather than more robust checks,
+# 3) fails badly if an existing Firefox window is open when the scrape
+# is invoked. This needs to be fortified at some point.
+
+def GetBrowser(path):
+ """Invoke the Firefox browser and return the process and window.
+
+ Args:
+ path: full path to browser
+
+ Returns:
+ A tuple of (process handle, render pane)
+ """
+ if not path: path = DEFAULT_PATH
+
+ # Invoke Firefox
+ (proc, wnd) = windowing.InvokeAndWait(path)
+
+ # Get the content pane
+ render_pane = windowing.FindChildWindow(
+ wnd,
+ "MozillaWindowClass/MozillaWindowClass/MozillaWindowClass")
+
+ return (proc, wnd, render_pane)
+
+
+def InvokeBrowser(path):
+ """Invoke the Firefox browser.
+
+ Args:
+ path: full path to browser
+
+ Returns:
+ A tuple of (main window, process handle, render pane)
+ """
+ # Reuse an existing instance of the browser if we can find one. This
+ # may not work correctly, especially if the window is behind other windows.
+ wnds = windowing.FindChildWindows(0, "MozillaUIWindowClass")
+ if len(wnds):
+ wnd = wnds[0]
+ proc = None
+ else:
+ # Invoke Firefox
+ (proc, wnd) = windowing.InvokeAndWait(path)
+
+ # Get the content pane
+ render_pane = windowing.FindChildWindow(
+ wnd,
+ "MozillaWindowClass/MozillaWindowClass/MozillaWindowClass")
+
+ return (wnd, proc, render_pane)
+
+
+def Scrape(urls, outdir, size, pos, timeout=20, **kwargs):
+ """Invoke a browser, send it to a series of URLs, and save its output.
+
+ Args:
+ urls: list of URLs to scrape
+ outdir: directory to place output
+ size: size of browser window to use
+ pos: position of browser window
+ timeout: amount of time to wait for page to load
+ kwargs: miscellaneous keyword args
+
+ Returns:
+ None if success, else an error string
+ """
+ if "path" in kwargs and kwargs["path"]: path = kwargs["path"]
+ else: path = DEFAULT_PATH
+
+ (wnd, proc, render_pane) = InvokeBrowser(path)
+
+ # Resize and reposition the frame
+ windowing.MoveAndSizeWindow(wnd, pos, size, render_pane)
+
+ time.sleep(3)
+
+ # Firefox is a bit of a pain: it doesn't use standard edit controls,
+ # and it doesn't display a throbber when there's no tab. Let's make
+ # sure there's at least one tab, then select the first one
+
+ mouse.ClickInWindow(wnd)
+ keyboard.TypeString("[t]", True)
+ mouse.ClickInWindow(wnd, (30, 115))
+ time.sleep(2)
+
+ timedout = False
+
+ # Visit each URL we're given
+ if type(urls) in types.StringTypes: urls = [urls]
+
+ for url in urls:
+
+ # Use keyboard shortcuts
+ keyboard.TypeString("{d}", True)
+ keyboard.TypeString(url)
+ keyboard.TypeString("\n")
+
+ # Wait for the page to finish loading
+ load_time = windowing.WaitForThrobber(wnd, (10, 96, 26, 112), timeout)
+ timedout = load_time < 0
+
+ if timedout:
+ break
+
+ # Scrape the page
+ image = windowing.ScrapeWindow(render_pane)
+
+ # Save to disk
+ if "filename" in kwargs:
+ if callable(kwargs["filename"]):
+ filename = kwargs["filename"](url)
+ else:
+ filename = kwargs["filename"]
+ else:
+ filename = windowing.URLtoFilename(url, outdir, ".bmp")
+ image.save(filename)
+
+ # Close all the tabs, cheesily
+ mouse.ClickInWindow(wnd)
+
+ while len(windowing.FindChildWindows(0, "MozillaUIWindowClass")):
+ keyboard.TypeString("[w]", True)
+ time.sleep(1)
+
+ if timedout:
+ return "timeout"
+
+
+def Time(urls, size, timeout, **kwargs):
+ """Measure how long it takes to load each of a series of URLs
+
+ Args:
+ urls: list of URLs to time
+ size: size of browser window to use
+ timeout: amount of time to wait for page to load
+ kwargs: miscellaneous keyword args
+
+ Returns:
+ A list of tuples (url, time). "time" can be "crashed" or "timeout"
+ """
+ if "path" in kwargs and kwargs["path"]: path = kwargs["path"]
+ else: path = DEFAULT_PATH
+ proc = None
+
+ # Visit each URL we're given
+ if type(urls) in types.StringTypes: urls = [urls]
+
+ ret = []
+ for url in urls:
+ try:
+ # Invoke the browser if necessary
+ if not proc:
+ (wnd, proc, render_pane) = InvokeBrowser(path)
+
+ # Resize and reposition the frame
+ windowing.MoveAndSizeWindow(wnd, (0,0), size, render_pane)
+
+ time.sleep(3)
+
+ # Firefox is a bit of a pain: it doesn't use standard edit controls,
+ # and it doesn't display a throbber when there's no tab. Let's make
+ # sure there's at least one tab, then select the first one
+
+ mouse.ClickInWindow(wnd)
+ keyboard.TypeString("[t]", True)
+ mouse.ClickInWindow(wnd, (30, 115))
+ time.sleep(2)
+
+ # Use keyboard shortcuts
+ keyboard.TypeString("{d}", True)
+ keyboard.TypeString(url)
+ keyboard.TypeString("\n")
+
+ # Wait for the page to finish loading
+ load_time = windowing.WaitForThrobber(wnd, (10, 96, 26, 112), timeout)
+ timedout = load_time < 0
+
+ if timedout:
+ load_time = "timeout"
+
+ # Try to close the browser; if this fails it's probably a crash
+ mouse.ClickInWindow(wnd)
+
+ count = 0
+ while (len(windowing.FindChildWindows(0, "MozillaUIWindowClass"))
+ and count < 5):
+ keyboard.TypeString("[w]", True)
+ time.sleep(1)
+ count = count + 1
+
+ if len(windowing.FindChildWindows(0, "MozillaUIWindowClass")):
+ windowing.EndProcess(proc)
+ load_time = "crashed"
+
+ proc = None
+ except pywintypes.error:
+ proc = None
+ load_time = "crashed"
+
+ ret.append( (url, load_time) )
+
+ if proc:
+ count = 0
+ while (len(windowing.FindChildWindows(0, "MozillaUIWindowClass"))
+ and count < 5):
+ keyboard.TypeString("[w]", True)
+ time.sleep(1)
+ count = count + 1
+ return ret
+
+
+def main():
+ # We're being invoked rather than imported, so run some tests
+ path = r"c:\sitecompare\scrapes\Firefox\2.0.0.6"
+ windowing.PreparePath(path)
+
+ # Scrape three sites and save the results
+ Scrape(
+ ["http://www.microsoft.com", "http://www.google.com",
+ "http://www.sun.com"],
+ path, (1024, 768), (0, 0))
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/site_compare/scrapers/ie/__init__.py b/chromium/tools/site_compare/scrapers/ie/__init__.py
new file mode 100755
index 00000000000..b4dab09e4c2
--- /dev/null
+++ b/chromium/tools/site_compare/scrapers/ie/__init__.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Selects the appropriate scraper for Internet Explorer."""
+
+
+def GetScraper(version):
+ """Returns the scraper module for the given version.
+
+ Args:
+ version: version string of IE, or None for most recent
+
+ Returns:
+ scrape module for given version
+ """
+
+ # Pychecker will warn that the parameter is unused; we only
+ # support one version of IE at this time
+
+ # We only have one version of the IE scraper for now
+ return __import__("ie7", globals(), locals(), [''])
+
+
+# if invoked rather than imported, test
+if __name__ == "__main__":
+ print GetScraper("7.0.5370.1").version
diff --git a/chromium/tools/site_compare/scrapers/ie/ie7.py b/chromium/tools/site_compare/scrapers/ie/ie7.py
new file mode 100755
index 00000000000..dcb83ca238f
--- /dev/null
+++ b/chromium/tools/site_compare/scrapers/ie/ie7.py
@@ -0,0 +1,210 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Does scraping for all known versions of IE."""
+
+import pywintypes
+import time
+import types
+
+from drivers import keyboard
+from drivers import mouse
+from drivers import windowing
+
+# Default version
+version = "7.0.5730.1"
+
+DEFAULT_PATH = r"c:\program files\internet explorer\iexplore.exe"
+
+def GetBrowser(path):
+ """Invoke the IE browser and return the process, frame, and content window.
+
+ Args:
+ path: full path to browser
+
+ Returns:
+ A tuple of (process handle, render pane)
+ """
+ if not path: path = DEFAULT_PATH
+
+ (iewnd, ieproc, address_bar, render_pane, tab_window) = InvokeBrowser(path)
+ return (ieproc, iewnd, render_pane)
+
+
+def InvokeBrowser(path):
+ """Invoke the IE browser.
+
+ Args:
+ path: full path to browser
+
+ Returns:
+ A tuple of (main window, process handle, address bar,
+ render_pane, tab_window)
+ """
+ # Invoke IE
+ (ieproc, iewnd) = windowing.InvokeAndWait(path)
+
+ # Get windows we'll need
+ for tries in xrange(10):
+ try:
+ address_bar = windowing.FindChildWindow(
+ iewnd, "WorkerW|Navigation Bar/ReBarWindow32/"
+ "Address Band Root/ComboBoxEx32/ComboBox/Edit")
+ render_pane = windowing.FindChildWindow(
+ iewnd, "TabWindowClass/Shell DocObject View")
+ tab_window = windowing.FindChildWindow(
+ iewnd, "CommandBarClass/ReBarWindow32/TabBandClass/DirectUIHWND")
+ except IndexError:
+ time.sleep(1)
+ continue
+ break
+
+ return (iewnd, ieproc, address_bar, render_pane, tab_window)
+
+
+def Scrape(urls, outdir, size, pos, timeout=20, **kwargs):
+ """Invoke a browser, send it to a series of URLs, and save its output.
+
+ Args:
+ urls: list of URLs to scrape
+ outdir: directory to place output
+ size: size of browser window to use
+ pos: position of browser window
+ timeout: amount of time to wait for page to load
+ kwargs: miscellaneous keyword args
+
+ Returns:
+ None if success, else an error string
+ """
+ path = r"c:\program files\internet explorer\iexplore.exe"
+
+ if "path" in kwargs and kwargs["path"]: path = kwargs["path"]
+
+ (iewnd, ieproc, address_bar, render_pane, tab_window) = (
+ InvokeBrowser(path) )
+
+ # Resize and reposition the frame
+ windowing.MoveAndSizeWindow(iewnd, pos, size, render_pane)
+
+ # Visit each URL we're given
+ if type(urls) in types.StringTypes: urls = [urls]
+
+ timedout = False
+
+ for url in urls:
+
+ # Double-click in the address bar, type the name, and press Enter
+ mouse.DoubleClickInWindow(address_bar)
+ keyboard.TypeString(url)
+ keyboard.TypeString("\n")
+
+ # Wait for the page to finish loading
+ load_time = windowing.WaitForThrobber(
+ tab_window, (6, 8, 22, 24), timeout)
+ timedout = load_time < 0
+
+ if timedout:
+ break
+
+ # Scrape the page
+ image = windowing.ScrapeWindow(render_pane)
+
+ # Save to disk
+ if "filename" in kwargs:
+ if callable(kwargs["filename"]):
+ filename = kwargs["filename"](url)
+ else:
+ filename = kwargs["filename"]
+ else:
+ filename = windowing.URLtoFilename(url, outdir, ".bmp")
+ image.save(filename)
+
+ windowing.EndProcess(ieproc)
+
+ if timedout:
+ return "timeout"
+
+
+def Time(urls, size, timeout, **kwargs):
+ """Measure how long it takes to load each of a series of URLs
+
+ Args:
+ urls: list of URLs to time
+ size: size of browser window to use
+ timeout: amount of time to wait for page to load
+ kwargs: miscellaneous keyword args
+
+ Returns:
+ A list of tuples (url, time). "time" can be "crashed" or "timeout"
+ """
+ if "path" in kwargs and kwargs["path"]: path = kwargs["path"]
+ else: path = DEFAULT_PATH
+ proc = None
+
+ # Visit each URL we're given
+ if type(urls) in types.StringTypes: urls = [urls]
+
+ ret = []
+ for url in urls:
+ try:
+ # Invoke the browser if necessary
+ if not proc:
+ (wnd, proc, address_bar, render_pane, tab_window) = InvokeBrowser(path)
+
+ # Resize and reposition the frame
+ windowing.MoveAndSizeWindow(wnd, (0,0), size, render_pane)
+
+ # Double-click in the address bar, type the name, and press Enter
+ mouse.DoubleClickInWindow(address_bar)
+ keyboard.TypeString(url)
+ keyboard.TypeString("\n")
+
+ # Wait for the page to finish loading
+ load_time = windowing.WaitForThrobber(
+ tab_window, (6, 8, 22, 24), timeout)
+ timedout = load_time < 0
+
+ if timedout:
+ load_time = "timeout"
+
+ # Send an alt-F4 to make the browser close; if this times out,
+ # we've probably got a crash
+ keyboard.TypeString(r"{\4}", use_modifiers=True)
+ if not windowing.WaitForProcessExit(proc, timeout):
+ windowing.EndProcess(proc)
+ load_time = "crashed"
+ proc = None
+ except pywintypes.error:
+ load_time = "crashed"
+ proc = None
+
+ ret.append( (url, load_time) )
+
+ # Send an alt-F4 to make the browser close; if this times out,
+ # we've probably got a crash
+ if proc:
+ keyboard.TypeString(r"{\4}", use_modifiers=True)
+ if not windowing.WaitForProcessExit(proc, timeout):
+ windowing.EndProcess(proc)
+
+ return ret
+
+
+def main():
+ # We're being invoked rather than imported, so run some tests
+ path = r"c:\sitecompare\scrapes\ie7\7.0.5380.11"
+ windowing.PreparePath(path)
+
+ # Scrape three sites and save the results
+ Scrape(
+ ["http://www.microsoft.com",
+ "http://www.google.com",
+ "http://www.sun.com"],
+ path, (1024, 768), (0, 0))
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/site_compare/site_compare.py b/chromium/tools/site_compare/site_compare.py
new file mode 100755
index 00000000000..db9216f2d0c
--- /dev/null
+++ b/chromium/tools/site_compare/site_compare.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""SiteCompare component to handle bulk scrapes.
+
+Invokes a list of browsers and sends them to a list of URLs,
+saving the rendered results to a specified directory, then
+performs comparison operations on the resulting bitmaps and
+saves the results
+"""
+
+
+# This line is necessary to work around a QEMU bug
+import _imaging
+
+import os # Functions for walking the directory tree
+import types # Runtime type-checking
+
+import command_line # command-line parsing
+import drivers # Functions for driving keyboard/mouse/windows, OS-specific
+import operators # Functions that, given two bitmaps as input, produce
+ # output depending on the performance of an operation
+import scrapers # Functions that know how to capture a render from
+ # particular browsers
+
+import commands.compare2 # compare one page in two versions of same browser
+import commands.maskmaker # generate a mask based on repeated scrapes
+import commands.measure # measure length of time a page takes to load
+import commands.scrape # scrape a URL or series of URLs to a bitmap
+
+# The timeload command is obsolete (too flaky); it may be reinstated
+# later but for now it's been superceded by "measure"
+# import commands.timeload # measure length of time a page takes to load
+
+def Scrape(browsers, urls, window_size=(1024, 768),
+ window_pos=(0, 0), timeout=20, save_path=None, **kwargs):
+ """Invoke one or more browsers over one or more URLs, scraping renders.
+
+ Args:
+ browsers: browsers to invoke with optional version strings
+ urls: URLs to visit
+ window_size: size of the browser window to display
+ window_pos: location of browser window
+ timeout: time (in seconds) to wait for page to load
+ save_path: root of save path, automatically appended with browser and
+ version
+ kwargs: miscellaneous keyword args, passed to scraper
+ Returns:
+ None
+
+ @TODO(jhaas): more parameters, or perhaps an indefinite dictionary
+ parameter, for things like length of time to wait for timeout, speed
+ of mouse clicks, etc. Possibly on a per-browser, per-URL, or
+ per-browser-per-URL basis
+ """
+
+ if type(browsers) in types.StringTypes: browsers = [browsers]
+
+ if save_path is None:
+ # default save path is "scrapes" off the current root
+ save_path = os.path.join(os.path.split(__file__)[0], "Scrapes")
+
+ for browser in browsers:
+ # Browsers should be tuples of (browser, version)
+ if type(browser) in types.StringTypes: browser = (browser, None)
+ scraper = scrapers.GetScraper(browser)
+
+ full_path = os.path.join(save_path, browser[0], scraper.version)
+ drivers.windowing.PreparePath(full_path)
+
+ scraper.Scrape(urls, full_path, window_size, window_pos, timeout, kwargs)
+
+
+def Compare(base, compare, ops, root_path=None, out_path=None):
+ """Compares a series of scrapes using a series of operators.
+
+ Args:
+ base: (browser, version) tuple of version to consider the baseline
+ compare: (browser, version) tuple of version to compare to
+ ops: list of operators plus operator arguments
+ root_path: root of the scrapes
+ out_path: place to put any output from the operators
+
+ Returns:
+ None
+
+ @TODO(jhaas): this method will likely change, to provide a robust and
+ well-defined way of chaining operators, applying operators conditionally,
+ and full-featured scripting of the operator chain. There also needs
+ to be better definition of the output; right now it's to stdout and
+ a log.txt file, with operator-dependent images saved for error output
+ """
+ if root_path is None:
+ # default save path is "scrapes" off the current root
+ root_path = os.path.join(os.path.split(__file__)[0], "Scrapes")
+
+ if out_path is None:
+ out_path = os.path.join(os.path.split(__file__)[0], "Compares")
+
+ if type(base) in types.StringTypes: base = (base, None)
+ if type(compare) in types.StringTypes: compare = (compare, None)
+ if type(ops) in types.StringTypes: ops = [ops]
+
+ base_dir = os.path.join(root_path, base[0])
+ compare_dir = os.path.join(root_path, compare[0])
+
+ if base[1] is None:
+ # base defaults to earliest capture
+ base = (base[0], max(os.listdir(base_dir)))
+
+ if compare[1] is None:
+ # compare defaults to latest capture
+ compare = (compare[0], min(os.listdir(compare_dir)))
+
+ out_path = os.path.join(out_path, base[0], base[1], compare[0], compare[1])
+ drivers.windowing.PreparePath(out_path)
+
+ # TODO(jhaas): right now we're just dumping output to a log file
+ # (and the console), which works as far as it goes but isn't nearly
+ # robust enough. Change this after deciding exactly what we want to
+ # change it to.
+ out_file = open(os.path.join(out_path, "log.txt"), "w")
+ description_string = ("Comparing %s %s to %s %s" %
+ (base[0], base[1], compare[0], compare[1]))
+ out_file.write(description_string)
+ print description_string
+
+ base_dir = os.path.join(base_dir, base[1])
+ compare_dir = os.path.join(compare_dir, compare[1])
+
+ for filename in os.listdir(base_dir):
+ out_file.write("%s: " % filename)
+
+ if not os.path.isfile(os.path.join(compare_dir, filename)):
+ out_file.write("Does not exist in target directory\n")
+ print "File %s does not exist in target directory" % filename
+ continue
+
+ base_filename = os.path.join(base_dir, filename)
+ compare_filename = os.path.join(compare_dir, filename)
+
+ for op in ops:
+ if type(op) in types.StringTypes: op = (op, None)
+
+ module = operators.GetOperator(op[0])
+
+ ret = module.Compare(base_filename, compare_filename)
+ if ret is None:
+ print "%s: OK" % (filename,)
+ out_file.write("OK\n")
+ else:
+ print "%s: %s" % (filename, ret[0])
+ out_file.write("%s\n" % (ret[0]))
+ ret[1].save(os.path.join(out_path, filename))
+
+ out_file.close()
+
+
+def main():
+ """Main executable. Parse the command line and invoke the command."""
+ cmdline = command_line.CommandLine()
+
+ # The below two commands are currently unstable so have been disabled
+ # commands.compare2.CreateCommand(cmdline)
+ # commands.maskmaker.CreateCommand(cmdline)
+ commands.measure.CreateCommand(cmdline)
+ commands.scrape.CreateCommand(cmdline)
+
+ cmdline.ParseCommandLine()
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/site_compare/utils/__init__.py b/chromium/tools/site_compare/utils/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/site_compare/utils/__init__.py
diff --git a/chromium/tools/site_compare/utils/browser_iterate.py b/chromium/tools/site_compare/utils/browser_iterate.py
new file mode 100644
index 00000000000..596b475cdcb
--- /dev/null
+++ b/chromium/tools/site_compare/utils/browser_iterate.py
@@ -0,0 +1,199 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility to use a browser to visit multiple URLs.
+
+Prerequisites:
+ 1. The command_line package from tools/site_compare
+ 2. Either the IE BHO or Firefox extension (or both)
+
+Installation:
+ 1. Build the IE BHO, or call regsvr32 on a prebuilt binary
+ 2. Add a file called "measurepageloadtimeextension@google.com" to
+ the default Firefox profile directory under extensions, containing
+ the path to the Firefox extension root
+
+Invoke with the command line arguments as documented within
+the command line.
+"""
+
+import command_line
+import scrapers
+import socket
+import time
+
+from drivers import windowing
+
+# Constants
+MAX_URL = 1024
+PORT = 42492
+
+def SetupIterationCommandLine(cmd):
+ """Adds the necessary flags for iteration to a command.
+
+ Args:
+ cmd: an object created by cmdline.AddCommand
+ """
+ cmd.AddArgument(
+ ["-b", "--browser"], "Browser to use (ie, firefox, chrome)",
+ type="string", required=True)
+ cmd.AddArgument(
+ ["-b1v", "--browserver"], "Version of browser", metaname="VERSION")
+ cmd.AddArgument(
+ ["-p", "--browserpath"], "Path to browser.",
+ type="string", required=False)
+ cmd.AddArgument(
+ ["-u", "--url"], "URL to visit")
+ cmd.AddArgument(
+ ["-l", "--list"], "File containing list of URLs to visit", type="readfile")
+ cmd.AddMutualExclusion(["--url", "--list"])
+ cmd.AddArgument(
+ ["-s", "--startline"], "First line of URL list", type="int")
+ cmd.AddArgument(
+ ["-e", "--endline"], "Last line of URL list (exclusive)", type="int")
+ cmd.AddArgument(
+ ["-c", "--count"], "Number of lines of URL file to use", type="int")
+ cmd.AddDependency("--startline", "--list")
+ cmd.AddRequiredGroup(["--url", "--list"])
+ cmd.AddDependency("--endline", "--list")
+ cmd.AddDependency("--count", "--list")
+ cmd.AddMutualExclusion(["--count", "--endline"])
+ cmd.AddDependency("--count", "--startline")
+ cmd.AddArgument(
+ ["-t", "--timeout"], "Amount of time (seconds) to wait for browser to "
+ "finish loading",
+ type="int", default=300)
+ cmd.AddArgument(
+ ["-sz", "--size"], "Browser window size", default=(800, 600), type="coords")
+
+
+def Iterate(command, iteration_func):
+ """Iterates over a list of URLs, calling a function on each.
+
+ Args:
+ command: the command line containing the iteration flags
+ iteration_func: called for each URL with (proc, wnd, url, result)
+ """
+
+ # Retrieve the browser scraper to use to invoke the browser
+ scraper = scrapers.GetScraper((command["--browser"], command["--browserver"]))
+
+ def AttachToBrowser(path, timeout):
+ """Invoke the browser process and connect to the socket."""
+ (proc, frame, wnd) = scraper.GetBrowser(path)
+
+ if not wnd: raise ValueError("Could not invoke browser.")
+
+ # Try to connect the socket. If it fails, wait and try
+ # again. Do this for ten seconds
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
+
+ for attempt in xrange(10):
+ try:
+ s.connect(("localhost", PORT))
+ except socket.error:
+ time.sleep(1)
+ continue
+ break
+
+ try:
+ s.getpeername()
+ except socket.error:
+ raise ValueError("Could not connect to browser")
+
+ if command["--size"]:
+ # Resize and reposition the frame
+ windowing.MoveAndSizeWindow(frame, (0, 0), command["--size"], wnd)
+
+ s.settimeout(timeout)
+
+ Iterate.proc = proc
+ Iterate.wnd = wnd
+ Iterate.s = s
+
+ def DetachFromBrowser():
+ """Close the socket and kill the process if necessary."""
+ if Iterate.s:
+ Iterate.s.close()
+ Iterate.s = None
+
+ if Iterate.proc:
+ if not windowing.WaitForProcessExit(Iterate.proc, 0):
+ try:
+ windowing.EndProcess(Iterate.proc)
+ windowing.WaitForProcessExit(Iterate.proc, 0)
+ except pywintypes.error:
+ # Exception here most likely means the process died on its own
+ pass
+ Iterate.proc = None
+
+ if command["--browserpath"]:
+ browser = command["--browserpath"]
+ else:
+ browser = None
+
+ # Read the URLs from the file
+ if command["--url"]:
+ url_list = [command["--url"]]
+ else:
+ startline = command["--startline"]
+ if command["--count"]:
+ endline = startline+command["--count"]
+ else:
+ endline = command["--endline"]
+
+ url_list = []
+ file = open(command["--list"], "r")
+
+ for line in xrange(startline-1):
+ file.readline()
+
+ for line in xrange(endline-startline):
+ url_list.append(file.readline().strip())
+
+ timeout = command["--timeout"]
+
+ # Loop through the URLs and send them through the socket
+ Iterate.s = None
+ Iterate.proc = None
+ Iterate.wnd = None
+
+ for url in url_list:
+ # Invoke the browser if necessary
+ if not Iterate.proc:
+ AttachToBrowser(browser, timeout)
+ # Send the URL and wait for a response
+ Iterate.s.send(url + "\n")
+
+ response = ""
+
+ while (response.find("\n") < 0):
+
+ try:
+ recv = Iterate.s.recv(MAX_URL)
+ response = response + recv
+
+ # Workaround for an oddity: when Firefox closes
+ # gracefully, somehow Python doesn't detect it.
+ # (Telnet does)
+ if not recv:
+ raise socket.error
+
+ except socket.timeout:
+ response = url + ",hang\n"
+ DetachFromBrowser()
+ except socket.error:
+ # If there was a socket error, it's probably a crash
+ response = url + ",crash\n"
+ DetachFromBrowser()
+
+ # If we received a timeout response, restart the browser
+ if response[-9:] == ",timeout\n":
+ DetachFromBrowser()
+
+ # Invoke the iteration function
+ iteration_func(url, Iterate.proc, Iterate.wnd, response)
+
+ # We're done
+ DetachFromBrowser()
diff --git a/chromium/tools/sort-headers.py b/chromium/tools/sort-headers.py
new file mode 100755
index 00000000000..88bbd555c39
--- /dev/null
+++ b/chromium/tools/sort-headers.py
@@ -0,0 +1,187 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Given a filename as an argument, sort the #include/#imports in that file.
+
+Shows a diff and prompts for confirmation before doing the deed.
+Works great with tools/git/for-all-touched-files.py.
+"""
+
+import optparse
+import os
+import sys
+
+from yes_no import YesNo
+
+
+def IsInclude(line):
+ """Returns True if the line is an #include/#import/import line."""
+ return any([line.startswith('#include '), line.startswith('#import '),
+ line.startswith('import ')])
+
+
+def IncludeCompareKey(line, for_blink):
+ """Sorting comparator key used for comparing two #include lines.
+
+ Returns an integer, optionally followed by a string. The integer is used
+ for coarse sorting of different categories of headers, and the string is
+ used for fine sorting of headers within categeries.
+ """
+ for prefix in ('#include ', '#import ', 'import '):
+ if line.startswith(prefix):
+ line = line[len(prefix):]
+ break
+
+ if for_blink:
+ # Blink likes to have its "config.h" include first.
+ if line.startswith('"config.h"'):
+ return '0'
+
+ # Blink sorts system headers after others. This is handled by sorting
+ # alphabetically so no need to do anything tricky.
+ return '1' + line
+
+ # The win32 api has all sorts of implicit include order dependencies :-/
+ # Give a few headers special sort keys that make sure they appear before all
+ # other headers.
+ if line.startswith('<windows.h>'): # Must be before e.g. shellapi.h
+ return '0'
+ if line.startswith('<atlbase.h>'): # Must be before atlapp.h.
+ return '1' + line
+ if line.startswith('<ole2.h>'): # Must be before e.g. intshcut.h
+ return '1' + line
+ if line.startswith('<unknwn.h>'): # Must be before e.g. intshcut.h
+ return '1' + line
+
+ # C++ system headers should come after C system headers.
+ if line.startswith('<'):
+ if line.find('.h>') != -1:
+ return '2' + line.lower()
+ else:
+ return '3' + line.lower()
+
+ return '4' + line
+
+
+def SortHeader(infile, outfile, for_blink):
+ """Sorts the headers in infile, writing the sorted file to outfile."""
+ def CompareKey(line):
+ return IncludeCompareKey(line, for_blink)
+
+ for line in infile:
+ if IsInclude(line):
+ headerblock = []
+ while IsInclude(line):
+ infile_ended_on_include_line = False
+ headerblock.append(line)
+ # Ensure we don't die due to trying to read beyond the end of the file.
+ try:
+ line = infile.next()
+ except StopIteration:
+ infile_ended_on_include_line = True
+ break
+ for header in sorted(headerblock, key=CompareKey):
+ outfile.write(header)
+ if infile_ended_on_include_line:
+ # We already wrote the last line above; exit to ensure it isn't written
+ # again.
+ return
+ # Intentionally fall through, to write the line that caused
+ # the above while loop to exit.
+ outfile.write(line)
+
+
+def FixFileWithConfirmFunction(filename, confirm_function,
+ perform_safety_checks, for_blink=False):
+ """Creates a fixed version of the file, invokes |confirm_function|
+ to decide whether to use the new file, and cleans up.
+
+ |confirm_function| takes two parameters, the original filename and
+ the fixed-up filename, and returns True to use the fixed-up file,
+ false to not use it.
+
+ If |perform_safety_checks| is True, then the function checks whether it is
+ unsafe to reorder headers in this file and skips the reorder with a warning
+ message in that case.
+ """
+ if perform_safety_checks and IsUnsafeToReorderHeaders(filename):
+ print ('Not reordering headers in %s as the script thinks that the '
+ 'order of headers in this file is semantically significant.'
+ % (filename))
+ return
+ fixfilename = filename + '.new'
+ infile = open(filename, 'rb')
+ outfile = open(fixfilename, 'wb')
+ SortHeader(infile, outfile, for_blink)
+ infile.close()
+ outfile.close() # Important so the below diff gets the updated contents.
+
+ try:
+ if confirm_function(filename, fixfilename):
+ if sys.platform == 'win32':
+ os.unlink(filename)
+ os.rename(fixfilename, filename)
+ finally:
+ try:
+ os.remove(fixfilename)
+ except OSError:
+ # If the file isn't there, we don't care.
+ pass
+
+
+def DiffAndConfirm(filename, should_confirm, perform_safety_checks, for_blink):
+ """Shows a diff of what the tool would change the file named
+ filename to. Shows a confirmation prompt if should_confirm is true.
+ Saves the resulting file if should_confirm is false or the user
+ answers Y to the confirmation prompt.
+ """
+ def ConfirmFunction(filename, fixfilename):
+ diff = os.system('diff -u %s %s' % (filename, fixfilename))
+ if sys.platform != 'win32':
+ diff >>= 8
+ if diff == 0: # Check exit code.
+ print '%s: no change' % filename
+ return False
+
+ return (not should_confirm or YesNo('Use new file (y/N)?'))
+
+ FixFileWithConfirmFunction(filename, ConfirmFunction, perform_safety_checks,
+ for_blink)
+
+def IsUnsafeToReorderHeaders(filename):
+ # *_message_generator.cc is almost certainly a file that generates IPC
+ # definitions. Changes in include order in these files can result in them not
+ # building correctly.
+ if filename.find("message_generator.cc") != -1:
+ return True
+ return False
+
+def main():
+ parser = optparse.OptionParser(usage='%prog filename1 filename2 ...')
+ parser.add_option('-f', '--force', action='store_false', default=True,
+ dest='should_confirm',
+ help='Turn off confirmation prompt.')
+ parser.add_option('--no_safety_checks',
+ action='store_false', default=True,
+ dest='perform_safety_checks',
+ help='Do not perform the safety checks via which this '
+ 'script refuses to operate on files for which it thinks '
+ 'the include ordering is semantically significant.')
+ parser.add_option('--for_blink', action='store_true', default=False,
+ dest='for_blink', help='Whether the blink header sorting '
+ 'rules should be applied.')
+ opts, filenames = parser.parse_args()
+
+ if len(filenames) < 1:
+ parser.print_help()
+ return 1
+
+ for filename in filenames:
+ DiffAndConfirm(filename, opts.should_confirm, opts.perform_safety_checks,
+ opts.for_blink)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/sort_sources.py b/chromium/tools/sort_sources.py
new file mode 100755
index 00000000000..bcbdbf67d92
--- /dev/null
+++ b/chromium/tools/sort_sources.py
@@ -0,0 +1,187 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Given a GYP/GN filename, sort C-ish source files in that file.
+
+Shows a diff and prompts for confirmation before doing the deed.
+Works great with tools/git/for-all-touched-files.py.
+
+Limitations:
+
+1) Comments used as section headers
+
+If a comment (1+ lines starting with #) appears in a source list without a
+preceding blank line, the tool assumes that the comment is about the next
+line. For example, given the following source list,
+
+ sources = [
+ "b.cc",
+ # Comment.
+ "a.cc",
+ "c.cc",
+ ]
+
+the tool will produce the following output:
+
+ sources = [
+ # Comment.
+ "a.cc",
+ "b.cc",
+ "c.cc",
+ ]
+
+This is not correct if the comment is for starting a new section like:
+
+ sources = [
+ "b.cc",
+ # These are for Linux.
+ "a.cc",
+ "c.cc",
+ ]
+
+The tool cannot disambiguate the two types of comments. The problem can be
+worked around by inserting a blank line before the comment because the tool
+interprets a blank line as the end of a source list.
+
+2) Sources commented out
+
+Sometimes sources are commented out with their positions kept in the
+alphabetical order, but what if the list is not sorted correctly? For
+example, given the following source list,
+
+ sources = [
+ "a.cc",
+ # "b.cc",
+ "d.cc",
+ "c.cc",
+ ]
+
+the tool will produce the following output:
+
+ sources = [
+ "a.cc",
+ "c.cc",
+ # "b.cc",
+ "d.cc",
+ ]
+
+This is because the tool assumes that the comment (# "b.cc",) is about the
+next line ("d.cc",). This kind of errors should be fixed manually, or the
+commented-out code should be deleted.
+
+3) " and ' are used both used in the same source list (GYP only problem)
+
+If both " and ' are used in the same source list, sources quoted with " will
+appear first in the output. The problem is rare enough so the tool does not
+attempt to normalize them. Hence this kind of errors should be fixed
+manually.
+
+4) Spaces and tabs used in the same source list
+
+Similarly, if spaces and tabs are both used in the same source list, sources
+indented with tabs will appear first in the output. This kind of errors
+should be fixed manually.
+
+"""
+
+import difflib
+import optparse
+import re
+import sys
+
+from yes_no import YesNo
+
+SUFFIXES = ['c', 'cc', 'cpp', 'h', 'mm', 'rc', 'rc.version', 'ico', 'def',
+ 'release']
+SOURCE_PATTERN = re.compile(r'^\s+[\'"].*\.(%s)[\'"],$' %
+ '|'.join([re.escape(x) for x in SUFFIXES]))
+COMMENT_PATTERN = re.compile(r'^\s+#')
+
+
+def SortSources(original_lines):
+ """Sort source file names in |original_lines|.
+
+ Args:
+ original_lines: Lines of the original content as a list of strings.
+
+ Returns:
+ Lines of the sorted content as a list of strings.
+
+ The algorithm is fairly naive. The code tries to find a list of C-ish
+ source file names by a simple regex, then sort them. The code does not try
+ to understand the syntax of the build files. See the file comment above for
+ details.
+ """
+
+ output_lines = []
+ comments = []
+ sources = []
+ for line in original_lines:
+ if re.search(COMMENT_PATTERN, line):
+ comments.append(line)
+ elif re.search(SOURCE_PATTERN, line):
+ # Associate the line with the preceding comments.
+ sources.append([line, comments])
+ comments = []
+ else:
+ # |sources| should be flushed first, to handle comments at the end of a
+ # source list correctly.
+ if sources:
+ for source_line, source_comments in sorted(sources):
+ output_lines.extend(source_comments)
+ output_lines.append(source_line)
+ sources = []
+ if comments:
+ output_lines.extend(comments)
+ comments = []
+ output_lines.append(line)
+ return output_lines
+
+
+def ProcessFile(filename, should_confirm):
+ """Process the input file and rewrite if needed.
+
+ Args:
+ filename: Path to the input file.
+ should_confirm: If true, diff and confirmation prompt are shown.
+ """
+
+ original_lines = []
+ with open(filename, 'r') as input_file:
+ for line in input_file:
+ original_lines.append(line)
+
+ new_lines = SortSources(original_lines)
+ if original_lines == new_lines:
+ print '%s: no change' % filename
+ return
+
+ if should_confirm:
+ diff = difflib.unified_diff(original_lines, new_lines)
+ sys.stdout.writelines(diff)
+ if not YesNo('Use new file (y/N)'):
+ return
+
+ with open(filename, 'w') as output_file:
+ output_file.writelines(new_lines)
+
+
+def main():
+ parser = optparse.OptionParser(usage='%prog filename1 filename2 ...')
+ parser.add_option('-f', '--force', action='store_false', default=True,
+ dest='should_confirm',
+ help='Turn off confirmation prompt.')
+ opts, filenames = parser.parse_args()
+
+ if len(filenames) < 1:
+ parser.print_help()
+ return 1
+
+ for filename in filenames:
+ ProcessFile(filename, opts.should_confirm)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/stats_viewer/OpenDialog.Designer.cs b/chromium/tools/stats_viewer/OpenDialog.Designer.cs
new file mode 100644
index 00000000000..b4f7def8702
--- /dev/null
+++ b/chromium/tools/stats_viewer/OpenDialog.Designer.cs
@@ -0,0 +1,88 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+namespace StatsViewer
+{
+ partial class OpenDialog
+ {
+ /// <summary>
+ /// Required designer variable.
+ /// </summary>
+ private System.ComponentModel.IContainer components = null;
+
+ /// <summary>
+ /// Clean up any resources being used.
+ /// </summary>
+ /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing && (components != null))
+ {
+ components.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
+ #region Windows Form Designer generated code
+
+ /// <summary>
+ /// Required method for Designer support - do not modify
+ /// the contents of this method with the code editor.
+ /// </summary>
+ private void InitializeComponent()
+ {
+ this.name_box_ = new System.Windows.Forms.TextBox();
+ this.label1 = new System.Windows.Forms.Label();
+ this.button1 = new System.Windows.Forms.Button();
+ this.SuspendLayout();
+ //
+ // name_box_
+ //
+ this.name_box_.Location = new System.Drawing.Point(108, 25);
+ this.name_box_.Name = "name_box_";
+ this.name_box_.Size = new System.Drawing.Size(180, 20);
+ this.name_box_.TabIndex = 0;
+ this.name_box_.KeyUp += new System.Windows.Forms.KeyEventHandler(this.OnKeyUp);
+ //
+ // label1
+ //
+ this.label1.AutoSize = true;
+ this.label1.Location = new System.Drawing.Point(12, 28);
+ this.label1.Name = "label1";
+ this.label1.Size = new System.Drawing.Size(91, 13);
+ this.label1.TabIndex = 1;
+ this.label1.Text = "Stats File to Open";
+ //
+ // button1
+ //
+ this.button1.Location = new System.Drawing.Point(108, 61);
+ this.button1.Name = "button1";
+ this.button1.Size = new System.Drawing.Size(75, 23);
+ this.button1.TabIndex = 2;
+ this.button1.Text = "OK";
+ this.button1.UseVisualStyleBackColor = true;
+ this.button1.Click += new System.EventHandler(this.button1_Click);
+ //
+ // OpenDialog
+ //
+ this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
+ this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
+ this.ClientSize = new System.Drawing.Size(303, 94);
+ this.Controls.Add(this.button1);
+ this.Controls.Add(this.label1);
+ this.Controls.Add(this.name_box_);
+ this.Name = "OpenDialog";
+ this.Text = "OpenDialog";
+ this.ResumeLayout(false);
+ this.PerformLayout();
+
+ }
+
+ #endregion
+
+ private System.Windows.Forms.TextBox name_box_;
+ private System.Windows.Forms.Label label1;
+ private System.Windows.Forms.Button button1;
+ }
+}
diff --git a/chromium/tools/stats_viewer/OpenDialog.cs b/chromium/tools/stats_viewer/OpenDialog.cs
new file mode 100644
index 00000000000..39d93d8ddde
--- /dev/null
+++ b/chromium/tools/stats_viewer/OpenDialog.cs
@@ -0,0 +1,45 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Data;
+using System.Drawing;
+using System.Text;
+using System.Windows.Forms;
+
+namespace StatsViewer
+{
+ public partial class OpenDialog : Form
+ {
+ public OpenDialog()
+ {
+ InitializeComponent();
+ }
+
+ /// <summary>
+ /// Get the user selected filename
+ /// </summary>
+ public string FileName
+ {
+ get {
+ return this.name_box_.Text;
+ }
+ }
+
+ private void button1_Click(object sender, EventArgs e)
+ {
+ this.Close();
+ }
+
+ private void OnKeyUp(object sender, KeyEventArgs e)
+ {
+ if (e.KeyCode == Keys.Enter)
+ {
+ this.Close();
+ }
+ }
+ }
+}
diff --git a/chromium/tools/stats_viewer/OpenDialog.resx b/chromium/tools/stats_viewer/OpenDialog.resx
new file mode 100644
index 00000000000..19dc0dd8b39
--- /dev/null
+++ b/chromium/tools/stats_viewer/OpenDialog.resx
@@ -0,0 +1,120 @@
+<?xml version="1.0" encoding="utf-8"?>
+<root>
+ <!--
+ Microsoft ResX Schema
+
+ Version 2.0
+
+ The primary goals of this format is to allow a simple XML format
+ that is mostly human readable. The generation and parsing of the
+ various data types are done through the TypeConverter classes
+ associated with the data types.
+
+ Example:
+
+ ... ado.net/XML headers & schema ...
+ <resheader name="resmimetype">text/microsoft-resx</resheader>
+ <resheader name="version">2.0</resheader>
+ <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
+ <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
+ <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
+ <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
+ <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
+ <value>[base64 mime encoded serialized .NET Framework object]</value>
+ </data>
+ <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
+ <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
+ <comment>This is a comment</comment>
+ </data>
+
+ There are any number of "resheader" rows that contain simple
+ name/value pairs.
+
+ Each data row contains a name, and value. The row also contains a
+ type or mimetype. Type corresponds to a .NET class that support
+ text/value conversion through the TypeConverter architecture.
+ Classes that don't support this are serialized and stored with the
+ mimetype set.
+
+ The mimetype is used for serialized objects, and tells the
+ ResXResourceReader how to depersist the object. This is currently not
+ extensible. For a given mimetype the value must be set accordingly:
+
+ Note - application/x-microsoft.net.object.binary.base64 is the format
+ that the ResXResourceWriter will generate, however the reader can
+ read any of the formats listed below.
+
+ mimetype: application/x-microsoft.net.object.binary.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.soap.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Soap.SoapFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.bytearray.base64
+ value : The object must be serialized into a byte array
+ : using a System.ComponentModel.TypeConverter
+ : and then encoded with base64 encoding.
+ -->
+ <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
+ <xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
+ <xsd:element name="root" msdata:IsDataSet="true">
+ <xsd:complexType>
+ <xsd:choice maxOccurs="unbounded">
+ <xsd:element name="metadata">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" />
+ </xsd:sequence>
+ <xsd:attribute name="name" use="required" type="xsd:string" />
+ <xsd:attribute name="type" type="xsd:string" />
+ <xsd:attribute name="mimetype" type="xsd:string" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="assembly">
+ <xsd:complexType>
+ <xsd:attribute name="alias" type="xsd:string" />
+ <xsd:attribute name="name" type="xsd:string" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="data">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
+ <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
+ <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="resheader">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" />
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:choice>
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:schema>
+ <resheader name="resmimetype">
+ <value>text/microsoft-resx</value>
+ </resheader>
+ <resheader name="version">
+ <value>2.0</value>
+ </resheader>
+ <resheader name="reader">
+ <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <resheader name="writer">
+ <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+</root> \ No newline at end of file
diff --git a/chromium/tools/stats_viewer/Properties/AssemblyInfo.cs b/chromium/tools/stats_viewer/Properties/AssemblyInfo.cs
new file mode 100644
index 00000000000..bf1e57de929
--- /dev/null
+++ b/chromium/tools/stats_viewer/Properties/AssemblyInfo.cs
@@ -0,0 +1,33 @@
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("StatsViewer")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("Google")]
+[assembly: AssemblyProduct("StatsViewer")]
+[assembly: AssemblyCopyright("Copyright © Google 2007")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("0c5760e1-3ada-48e2-800d-5b104bef4a95")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/chromium/tools/stats_viewer/Properties/Resources.Designer.cs b/chromium/tools/stats_viewer/Properties/Resources.Designer.cs
new file mode 100644
index 00000000000..d3789b7d69e
--- /dev/null
+++ b/chromium/tools/stats_viewer/Properties/Resources.Designer.cs
@@ -0,0 +1,71 @@
+//------------------------------------------------------------------------------
+// <auto-generated>
+// This code was generated by a tool.
+// Runtime Version:2.0.50727.832
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+// </auto-generated>
+//------------------------------------------------------------------------------
+
+namespace StatsViewer.Properties
+{
+
+
+ /// <summary>
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ /// </summary>
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "2.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources
+ {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources()
+ {
+ }
+
+ /// <summary>
+ /// Returns the cached ResourceManager instance used by this class.
+ /// </summary>
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager
+ {
+ get
+ {
+ if ((resourceMan == null))
+ {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("StatsViewer.Properties.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ /// <summary>
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ /// </summary>
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture
+ {
+ get
+ {
+ return resourceCulture;
+ }
+ set
+ {
+ resourceCulture = value;
+ }
+ }
+ }
+}
diff --git a/chromium/tools/stats_viewer/Properties/Resources.resx b/chromium/tools/stats_viewer/Properties/Resources.resx
new file mode 100644
index 00000000000..af7dbebbace
--- /dev/null
+++ b/chromium/tools/stats_viewer/Properties/Resources.resx
@@ -0,0 +1,117 @@
+<?xml version="1.0" encoding="utf-8"?>
+<root>
+ <!--
+ Microsoft ResX Schema
+
+ Version 2.0
+
+ The primary goals of this format is to allow a simple XML format
+ that is mostly human readable. The generation and parsing of the
+ various data types are done through the TypeConverter classes
+ associated with the data types.
+
+ Example:
+
+ ... ado.net/XML headers & schema ...
+ <resheader name="resmimetype">text/microsoft-resx</resheader>
+ <resheader name="version">2.0</resheader>
+ <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
+ <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
+ <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
+ <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
+ <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
+ <value>[base64 mime encoded serialized .NET Framework object]</value>
+ </data>
+ <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
+ <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
+ <comment>This is a comment</comment>
+ </data>
+
+ There are any number of "resheader" rows that contain simple
+ name/value pairs.
+
+ Each data row contains a name, and value. The row also contains a
+ type or mimetype. Type corresponds to a .NET class that support
+ text/value conversion through the TypeConverter architecture.
+ Classes that don't support this are serialized and stored with the
+ mimetype set.
+
+ The mimetype is used for serialized objects, and tells the
+ ResXResourceReader how to depersist the object. This is currently not
+ extensible. For a given mimetype the value must be set accordingly:
+
+ Note - application/x-microsoft.net.object.binary.base64 is the format
+ that the ResXResourceWriter will generate, however the reader can
+ read any of the formats listed below.
+
+ mimetype: application/x-microsoft.net.object.binary.base64
+ value : The object must be serialized with
+ : System.Serialization.Formatters.Binary.BinaryFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.soap.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Soap.SoapFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.bytearray.base64
+ value : The object must be serialized into a byte array
+ : using a System.ComponentModel.TypeConverter
+ : and then encoded with base64 encoding.
+ -->
+ <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
+ <xsd:element name="root" msdata:IsDataSet="true">
+ <xsd:complexType>
+ <xsd:choice maxOccurs="unbounded">
+ <xsd:element name="metadata">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" />
+ <xsd:attribute name="type" type="xsd:string" />
+ <xsd:attribute name="mimetype" type="xsd:string" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="assembly">
+ <xsd:complexType>
+ <xsd:attribute name="alias" type="xsd:string" />
+ <xsd:attribute name="name" type="xsd:string" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="data">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" msdata:Ordinal="1" />
+ <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
+ <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="resheader">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" />
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:choice>
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:schema>
+ <resheader name="resmimetype">
+ <value>text/microsoft-resx</value>
+ </resheader>
+ <resheader name="version">
+ <value>2.0</value>
+ </resheader>
+ <resheader name="reader">
+ <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <resheader name="writer">
+ <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+</root> \ No newline at end of file
diff --git a/chromium/tools/stats_viewer/Properties/Settings.Designer.cs b/chromium/tools/stats_viewer/Properties/Settings.Designer.cs
new file mode 100644
index 00000000000..f59b4284a67
--- /dev/null
+++ b/chromium/tools/stats_viewer/Properties/Settings.Designer.cs
@@ -0,0 +1,30 @@
+//------------------------------------------------------------------------------
+// <auto-generated>
+// This code was generated by a tool.
+// Runtime Version:2.0.50727.832
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+// </auto-generated>
+//------------------------------------------------------------------------------
+
+namespace StatsViewer.Properties
+{
+
+
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "8.0.0.0")]
+ internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase
+ {
+
+ private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
+
+ public static Settings Default
+ {
+ get
+ {
+ return defaultInstance;
+ }
+ }
+ }
+}
diff --git a/chromium/tools/stats_viewer/Properties/Settings.settings b/chromium/tools/stats_viewer/Properties/Settings.settings
new file mode 100644
index 00000000000..39645652af6
--- /dev/null
+++ b/chromium/tools/stats_viewer/Properties/Settings.settings
@@ -0,0 +1,7 @@
+<?xml version='1.0' encoding='utf-8'?>
+<SettingsFile xmlns="http://schemas.microsoft.com/VisualStudio/2004/01/settings" CurrentProfile="(Default)">
+ <Profiles>
+ <Profile Name="(Default)" />
+ </Profiles>
+ <Settings />
+</SettingsFile>
diff --git a/chromium/tools/stats_viewer/Resources.Designer.cs b/chromium/tools/stats_viewer/Resources.Designer.cs
new file mode 100644
index 00000000000..94f8c72018e
--- /dev/null
+++ b/chromium/tools/stats_viewer/Resources.Designer.cs
@@ -0,0 +1,77 @@
+//------------------------------------------------------------------------------
+// <auto-generated>
+// This code was generated by a tool.
+// Runtime Version:2.0.50727.832
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+// </auto-generated>
+//------------------------------------------------------------------------------
+
+namespace StatsViewer {
+ using System;
+
+
+ /// <summary>
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ /// </summary>
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "2.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources() {
+ }
+
+ /// <summary>
+ /// Returns the cached ResourceManager instance used by this class.
+ /// </summary>
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager {
+ get {
+ if (object.ReferenceEquals(resourceMan, null)) {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("StatsViewer.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ /// <summary>
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ /// </summary>
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture {
+ get {
+ return resourceCulture;
+ }
+ set {
+ resourceCulture = value;
+ }
+ }
+
+ internal static System.Drawing.Bitmap kitten {
+ get {
+ object obj = ResourceManager.GetObject("kitten", resourceCulture);
+ return ((System.Drawing.Bitmap)(obj));
+ }
+ }
+
+ internal static System.Drawing.Bitmap kittenbackground {
+ get {
+ object obj = ResourceManager.GetObject("kittenbackground", resourceCulture);
+ return ((System.Drawing.Bitmap)(obj));
+ }
+ }
+ }
+}
diff --git a/chromium/tools/stats_viewer/Resources.resx b/chromium/tools/stats_viewer/Resources.resx
new file mode 100644
index 00000000000..0a76e3a8b03
--- /dev/null
+++ b/chromium/tools/stats_viewer/Resources.resx
@@ -0,0 +1,127 @@
+<?xml version="1.0" encoding="utf-8"?>
+<root>
+ <!--
+ Microsoft ResX Schema
+
+ Version 2.0
+
+ The primary goals of this format is to allow a simple XML format
+ that is mostly human readable. The generation and parsing of the
+ various data types are done through the TypeConverter classes
+ associated with the data types.
+
+ Example:
+
+ ... ado.net/XML headers & schema ...
+ <resheader name="resmimetype">text/microsoft-resx</resheader>
+ <resheader name="version">2.0</resheader>
+ <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
+ <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
+ <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
+ <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
+ <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
+ <value>[base64 mime encoded serialized .NET Framework object]</value>
+ </data>
+ <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
+ <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
+ <comment>This is a comment</comment>
+ </data>
+
+ There are any number of "resheader" rows that contain simple
+ name/value pairs.
+
+ Each data row contains a name, and value. The row also contains a
+ type or mimetype. Type corresponds to a .NET class that support
+ text/value conversion through the TypeConverter architecture.
+ Classes that don't support this are serialized and stored with the
+ mimetype set.
+
+ The mimetype is used for serialized objects, and tells the
+ ResXResourceReader how to depersist the object. This is currently not
+ extensible. For a given mimetype the value must be set accordingly:
+
+ Note - application/x-microsoft.net.object.binary.base64 is the format
+ that the ResXResourceWriter will generate, however the reader can
+ read any of the formats listed below.
+
+ mimetype: application/x-microsoft.net.object.binary.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.soap.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Soap.SoapFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.bytearray.base64
+ value : The object must be serialized into a byte array
+ : using a System.ComponentModel.TypeConverter
+ : and then encoded with base64 encoding.
+ -->
+ <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
+ <xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
+ <xsd:element name="root" msdata:IsDataSet="true">
+ <xsd:complexType>
+ <xsd:choice maxOccurs="unbounded">
+ <xsd:element name="metadata">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" />
+ </xsd:sequence>
+ <xsd:attribute name="name" use="required" type="xsd:string" />
+ <xsd:attribute name="type" type="xsd:string" />
+ <xsd:attribute name="mimetype" type="xsd:string" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="assembly">
+ <xsd:complexType>
+ <xsd:attribute name="alias" type="xsd:string" />
+ <xsd:attribute name="name" type="xsd:string" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="data">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
+ <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
+ <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="resheader">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" />
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:choice>
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:schema>
+ <resheader name="resmimetype">
+ <value>text/microsoft-resx</value>
+ </resheader>
+ <resheader name="version">
+ <value>2.0</value>
+ </resheader>
+ <resheader name="reader">
+ <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <resheader name="writer">
+ <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <assembly alias="System.Windows.Forms" name="System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" />
+ <data name="kitten" type="System.Resources.ResXFileRef, System.Windows.Forms">
+ <value>Resources\kitten.png;System.Drawing.Bitmap, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a</value>
+ </data>
+ <data name="kittenbackground" type="System.Resources.ResXFileRef, System.Windows.Forms">
+ <value>Resources\kittenbackground.png;System.Drawing.Bitmap, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a</value>
+ </data>
+</root> \ No newline at end of file
diff --git a/chromium/tools/stats_viewer/Resources/kitten.png b/chromium/tools/stats_viewer/Resources/kitten.png
new file mode 100644
index 00000000000..51cc9b8f51e
--- /dev/null
+++ b/chromium/tools/stats_viewer/Resources/kitten.png
Binary files differ
diff --git a/chromium/tools/stats_viewer/Resources/kittenbackground.png b/chromium/tools/stats_viewer/Resources/kittenbackground.png
new file mode 100644
index 00000000000..09a72687fd6
--- /dev/null
+++ b/chromium/tools/stats_viewer/Resources/kittenbackground.png
Binary files differ
diff --git a/chromium/tools/stats_viewer/program.cs b/chromium/tools/stats_viewer/program.cs
new file mode 100644
index 00000000000..8af1e416eb9
--- /dev/null
+++ b/chromium/tools/stats_viewer/program.cs
@@ -0,0 +1,23 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Windows.Forms;
+
+namespace StatsViewer
+{
+ static class Program
+ {
+ /// <summary>
+ /// The main entry point for the application.
+ /// </summary>
+ [STAThread]
+ static void Main()
+ {
+ Application.EnableVisualStyles();
+ Application.SetCompatibleTextRenderingDefault(false);
+ Application.Run(new StatsViewer());
+ }
+ }
+}
diff --git a/chromium/tools/stats_viewer/stats_table.cs b/chromium/tools/stats_viewer/stats_table.cs
new file mode 100644
index 00000000000..1ba0e213aa5
--- /dev/null
+++ b/chromium/tools/stats_viewer/stats_table.cs
@@ -0,0 +1,546 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Diagnostics;
+using System.Runtime.InteropServices;
+using System.Text;
+
+namespace StatsViewer
+{
+ /// <summary>
+ /// The stats table shared memory segment contains this
+ /// header structure.
+ /// </summary>
+ [StructLayout(LayoutKind.Sequential)]
+ internal struct StatsFileHeader {
+ public int version;
+ public int size;
+ public int max_counters;
+ public int max_threads;
+ };
+
+ /// <summary>
+ /// An entry in the StatsTable.
+ /// </summary>
+ class StatsTableEntry {
+ public StatsTableEntry(int id, string name, StatsTable table) {
+ id_ = id;
+ name_ = name;
+ table_ = table;
+ }
+
+ /// <summary>
+ /// The unique id for this entry
+ /// </summary>
+ public int id { get { return id_; } }
+
+ /// <summary>
+ /// The name for this entry.
+ /// </summary>
+ public string name { get { return name_; } }
+
+ /// <summary>
+ /// The value of this entry now.
+ /// </summary>
+ public int GetValue(int filter_pid) {
+ return table_.GetValue(id_, filter_pid);
+ }
+
+ private int id_;
+ private string name_;
+ private StatsTable table_;
+ }
+
+ // An interface for StatsCounters
+ interface IStatsCounter {
+ // The name of the counter
+ string name { get; }
+ }
+
+ // A counter.
+ class StatsCounter : IStatsCounter {
+ public StatsCounter(StatsTableEntry entry) {
+ entry_ = entry;
+ }
+
+ public string name {
+ get {
+ return entry_.name;
+ }
+ }
+
+ public int GetValue(int filter_pid) {
+ return entry_.GetValue(filter_pid);
+ }
+
+ private StatsTableEntry entry_;
+ }
+
+ // A timer.
+ class StatsTimer : IStatsCounter {
+ public StatsTimer(StatsTableEntry entry)
+ {
+ entry_ = entry;
+ }
+
+ public string name {
+ get {
+ return entry_.name;
+ }
+ }
+
+ public int GetValue(int filter_pid) {
+ return entry_.GetValue(filter_pid);
+ }
+
+ private StatsTableEntry entry_;
+ }
+
+ // A rate.
+ class StatsCounterRate : IStatsCounter
+ {
+ public StatsCounterRate(StatsCounter counter, StatsTimer timer) {
+ counter_ = counter;
+ timer_ = timer;
+ }
+
+ public string name { get { return counter_.name; } }
+
+ public int GetCount(int filter_pid) {
+ return counter_.GetValue(filter_pid);
+ }
+
+ public int GetTime(int filter_pid) {
+ return timer_.GetValue(filter_pid);
+ }
+
+ private StatsCounter counter_;
+ private StatsTimer timer_;
+ }
+
+ /// <summary>
+ /// This is a C# reader for the chrome stats_table.
+ /// </summary>
+ class StatsTable {
+ internal const int kMaxThreadNameLength = 32;
+ internal const int kMaxCounterNameLength = 32;
+
+ /// <summary>
+ /// Open a StatsTable
+ /// </summary>
+ public StatsTable() {
+ }
+
+ #region Public Properties
+ /// <summary>
+ /// Get access to the counters in the table.
+ /// </summary>
+ public StatsTableCounters Counters() {
+ return new StatsTableCounters(this);
+ }
+
+ /// <summary>
+ /// Get access to the processes in the table
+ /// </summary>
+ public ICollection Processes {
+ get {
+ return new StatsTableProcesses(this);
+ }
+ }
+ #endregion
+
+ #region Internal Properties
+ //
+ // The internal methods are accessible to the enumerators
+ // and helper classes below.
+ //
+
+ /// <summary>
+ /// Access to the table header
+ /// </summary>
+ internal StatsFileHeader Header {
+ get { return header_; }
+ }
+
+ /// <summary>
+ /// Get the offset of the ThreadName table
+ /// </summary>
+ internal long ThreadNamesOffset {
+ get {
+ return memory_.ToInt64() + Marshal.SizeOf(typeof(StatsFileHeader));
+ }
+ }
+
+ /// <summary>
+ /// Get the offset of the PIDs table
+ /// </summary>
+ internal long PidsOffset {
+ get {
+ long offset = ThreadNamesOffset;
+ // Thread names table
+ offset += AlignedSize(header_.max_threads * kMaxThreadNameLength * 2);
+ // Thread TID table
+ offset += AlignedSize(header_.max_threads *
+ Marshal.SizeOf(typeof(int)));
+ return offset;
+ }
+ }
+
+ /// <summary>
+ /// Get the offset of the CounterName table
+ /// </summary>
+ internal long CounterNamesOffset {
+ get {
+ long offset = PidsOffset;
+ // Thread PID table
+ offset += AlignedSize(header_.max_threads *
+ Marshal.SizeOf(typeof(int)));
+ return offset;
+ }
+ }
+
+ /// <summary>
+ /// Get the offset of the Data table
+ /// </summary>
+ internal long DataOffset {
+ get {
+ long offset = CounterNamesOffset;
+ // Counter names table
+ offset += AlignedSize(header_.max_counters *
+ kMaxCounterNameLength * 2);
+ return offset;
+ }
+ }
+ #endregion
+
+ #region Public Methods
+ /// <summary>
+ /// Opens the memory map
+ /// </summary>
+ /// <returns></returns>
+ /// <param name="name">The name of the file to open</param>
+ public bool Open(string name) {
+ map_handle_ =
+ Win32.OpenFileMapping((int)Win32.MapAccess.FILE_MAP_WRITE, false,
+ name);
+ if (map_handle_ == IntPtr.Zero)
+ return false;
+
+ memory_ =
+ Win32.MapViewOfFile(map_handle_, (int)Win32.MapAccess.FILE_MAP_WRITE,
+ 0,0, 0);
+ if (memory_ == IntPtr.Zero) {
+ Win32.CloseHandle(map_handle_);
+ return false;
+ }
+
+ header_ = (StatsFileHeader)Marshal.PtrToStructure(memory_, header_.GetType());
+ return true;
+ }
+
+ /// <summary>
+ /// Close the mapped file.
+ /// </summary>
+ public void Close() {
+ Win32.UnmapViewOfFile(memory_);
+ Win32.CloseHandle(map_handle_);
+ }
+
+ /// <summary>
+ /// Zero out the stats file.
+ /// </summary>
+ public void Zero() {
+ long offset = DataOffset;
+ for (int threads = 0; threads < header_.max_threads; threads++) {
+ for (int counters = 0; counters < header_.max_counters; counters++) {
+ Marshal.WriteInt32((IntPtr) offset, 0);
+ offset += Marshal.SizeOf(typeof(int));
+ }
+ }
+ }
+
+ /// <summary>
+ /// Get the value for a StatsCounterEntry now.
+ /// </summary>
+ /// <returns></returns>
+ /// <param name="filter_pid">If a specific PID is being queried, filter to this PID. 0 means use all data.</param>
+ /// <param name="id">The id of the CounterEntry to get the value for.</param>
+ public int GetValue(int id, int filter_pid) {
+ long pid_offset = PidsOffset;
+ long data_offset = DataOffset;
+ data_offset += id * (Header.max_threads *
+ Marshal.SizeOf(typeof(int)));
+ int rv = 0;
+ for (int cols = 0; cols < Header.max_threads; cols++)
+ {
+ int pid = Marshal.ReadInt32((IntPtr)pid_offset);
+ if (filter_pid == 0 || filter_pid == pid)
+ {
+ rv += Marshal.ReadInt32((IntPtr)data_offset);
+ }
+ data_offset += Marshal.SizeOf(typeof(int));
+ pid_offset += Marshal.SizeOf(typeof(int));
+ }
+ return rv;
+ }
+ #endregion
+
+ #region Private Methods
+ /// <summary>
+ /// Align to 4-byte boundaries
+ /// </summary>
+ /// <param name="size"></param>
+ /// <returns></returns>
+ private long AlignedSize(long size) {
+ Debug.Assert(sizeof(int) == 4);
+ return size + (sizeof(int) - (size % sizeof(int))) % sizeof(int);
+ }
+ #endregion
+
+ #region Private Members
+ private IntPtr memory_;
+ private IntPtr map_handle_;
+ private StatsFileHeader header_;
+ #endregion
+ }
+
+ /// <summary>
+ /// Enumerable list of Counters in the StatsTable
+ /// </summary>
+ class StatsTableCounters : ICollection {
+ /// <summary>
+ /// Create the list of counters
+ /// </summary>
+ /// <param name="table"></param>
+ /// pid</param>
+ public StatsTableCounters(StatsTable table) {
+ table_ = table;
+ counter_hi_water_mark_ = -1;
+ counters_ = new List<IStatsCounter>();
+ FindCounters();
+ }
+
+ /// <summary>
+ /// Scans the table for new entries.
+ /// </summary>
+ public void Update() {
+ FindCounters();
+ }
+
+ #region IEnumerable Members
+ public IEnumerator GetEnumerator() {
+ return counters_.GetEnumerator();
+ }
+ #endregion
+
+ #region ICollection Members
+ public void CopyTo(Array array, int index) {
+ throw new Exception("The method or operation is not implemented.");
+ }
+
+ public int Count {
+ get {
+ return counters_.Count;
+ }
+ }
+
+ public bool IsSynchronized {
+ get {
+ throw new Exception("The method or operation is not implemented.");
+ }
+ }
+
+ public object SyncRoot {
+ get {
+ throw new Exception("The method or operation is not implemented.");
+ }
+ }
+ #endregion
+
+ #region Private Methods
+ /// <summary>
+ /// Create a counter based on an entry
+ /// </summary>
+ /// <param name="id"></param>
+ /// <param name="name"></param>
+ /// <returns></returns>
+ private IStatsCounter NameToCounter(int id, string name)
+ {
+ IStatsCounter rv = null;
+
+ // check if the name has a type encoded
+ if (name.Length > 2 && name[1] == ':')
+ {
+ StatsTableEntry entry = new StatsTableEntry(id, name.Substring(2), table_);
+ switch (name[0])
+ {
+ case 't':
+ rv = new StatsTimer(entry);
+ break;
+ case 'c':
+ rv = new StatsCounter(entry);
+ break;
+ }
+ }
+ else
+ {
+ StatsTableEntry entry = new StatsTableEntry(id, name, table_);
+ rv = new StatsCounter(entry);
+ }
+
+ return rv;
+ }
+
+ // If we have two StatsTableEntries with the same name,
+ // attempt to upgrade them to a higher level type.
+ // Example: A counter + a timer == a rate!
+ private void UpgradeCounter(IStatsCounter old_counter, IStatsCounter counter)
+ {
+ if (old_counter is StatsCounter && counter is StatsTimer)
+ {
+ StatsCounterRate rate = new StatsCounterRate(old_counter as StatsCounter,
+ counter as StatsTimer);
+ counters_.Remove(old_counter);
+ counters_.Add(rate);
+ }
+ else if (old_counter is StatsTimer && counter is StatsCounter)
+ {
+ StatsCounterRate rate = new StatsCounterRate(counter as StatsCounter,
+ old_counter as StatsTimer);
+ counters_.Remove(old_counter);
+ counters_.Add(rate);
+ }
+ }
+
+ /// <summary>
+ /// Find the counters in the table and insert into the counters_
+ /// hash table.
+ /// </summary>
+ private void FindCounters()
+ {
+ Debug.Assert(table_.Header.max_counters > 0);
+
+ int index = counter_hi_water_mark_;
+
+ do
+ {
+ // Find an entry in the table.
+ index++;
+ long offset = table_.CounterNamesOffset +
+ (index * StatsTable.kMaxCounterNameLength * 2);
+ string name = Marshal.PtrToStringUni((IntPtr)offset);
+ if (name.Length == 0)
+ continue;
+
+ // Record that we've already looked at this StatsTableEntry.
+ counter_hi_water_mark_ = index;
+
+ IStatsCounter counter = NameToCounter(index, name);
+
+ if (counter != null)
+ {
+ IStatsCounter old_counter = FindExistingCounter(counter.name);
+ if (old_counter != null)
+ UpgradeCounter(old_counter, counter);
+ else
+ counters_.Add(counter);
+ }
+ } while (index < table_.Header.max_counters - 1);
+ }
+
+ /// <summary>
+ /// Find an existing counter in our table
+ /// </summary>
+ /// <param name="name"></param>
+ private IStatsCounter FindExistingCounter(string name) {
+ foreach (IStatsCounter ctr in counters_)
+ {
+ if (ctr.name == name)
+ return ctr;
+ }
+ return null;
+ }
+ #endregion
+
+ #region Private Members
+ private StatsTable table_;
+ private List<IStatsCounter> counters_;
+ // Highest index of counters processed.
+ private int counter_hi_water_mark_;
+ #endregion
+ }
+
+ /// <summary>
+ /// A collection of processes
+ /// </summary>
+ class StatsTableProcesses : ICollection
+ {
+ /// <summary>
+ /// Constructor
+ /// </summary>
+ /// <param name="table"></param>
+ public StatsTableProcesses(StatsTable table) {
+ table_ = table;
+ pids_ = new List<int>();
+ Initialize();
+ }
+
+ #region ICollection Members
+ public void CopyTo(Array array, int index) {
+ throw new Exception("The method or operation is not implemented.");
+ }
+
+ public int Count {
+ get {
+ return pids_.Count;
+ }
+ }
+
+ public bool IsSynchronized {
+ get {
+ throw new Exception("The method or operation is not implemented.");
+ }
+ }
+
+ public object SyncRoot {
+ get {
+ throw new Exception("The method or operation is not implemented.");
+ }
+ }
+ #endregion
+
+ #region IEnumerable Members
+ public IEnumerator GetEnumerator() {
+ return pids_.GetEnumerator();
+ }
+ #endregion
+
+ /// <summary>
+ /// Initialize the pid list.
+ /// </summary>
+ private void Initialize() {
+ long offset = table_.ThreadNamesOffset;
+
+ for (int index = 0; index < table_.Header.max_threads; index++) {
+ string thread_name = Marshal.PtrToStringUni((IntPtr)offset);
+ if (thread_name.Length > 0) {
+ long pidOffset = table_.PidsOffset + index *
+ Marshal.SizeOf(typeof(int));
+ int pid = Marshal.ReadInt32((IntPtr)pidOffset);
+ if (!pids_.Contains(pid))
+ pids_.Add(pid);
+ }
+ offset += StatsTable.kMaxThreadNameLength * 2;
+ }
+ }
+
+ #region Private Members
+ private StatsTable table_;
+ private List<int> pids_;
+ #endregion
+ }
+}
diff --git a/chromium/tools/stats_viewer/stats_viewer.Designer.cs b/chromium/tools/stats_viewer/stats_viewer.Designer.cs
new file mode 100644
index 00000000000..1e6c038f082
--- /dev/null
+++ b/chromium/tools/stats_viewer/stats_viewer.Designer.cs
@@ -0,0 +1,392 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+namespace StatsViewer
+{
+ partial class StatsViewer
+ {
+ /// <summary>
+ /// Required designer variable.
+ /// </summary>
+ private System.ComponentModel.IContainer components = null;
+
+ /// <summary>
+ /// Clean up any resources being used.
+ /// </summary>
+ /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing && (components != null))
+ {
+ components.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
+ #region Windows Form Designer generated code
+
+ /// <summary>
+ /// Required method for Designer support - do not modify
+ /// the contents of this method with the code editor.
+ /// </summary>
+ private void InitializeComponent()
+ {
+ System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(StatsViewer));
+ this.listViewCounters = new System.Windows.Forms.ListView();
+ this.columnHeaderName = new System.Windows.Forms.ColumnHeader();
+ this.columnHeaderValue = new System.Windows.Forms.ColumnHeader();
+ this.columnHeaderDelta = new System.Windows.Forms.ColumnHeader();
+ this.pictureBoxTitle = new System.Windows.Forms.PictureBox();
+ this.panelHeader = new System.Windows.Forms.Panel();
+ this.labelKills = new System.Windows.Forms.Label();
+ this.label1 = new System.Windows.Forms.Label();
+ this.labelInterval = new System.Windows.Forms.Label();
+ this.comboBoxFilter = new System.Windows.Forms.ComboBox();
+ this.panelControls = new System.Windows.Forms.Panel();
+ this.buttonExport = new System.Windows.Forms.Button();
+ this.buttonZero = new System.Windows.Forms.Button();
+ this.comboBoxInterval = new System.Windows.Forms.ComboBox();
+ this.labelFilter = new System.Windows.Forms.Label();
+ this.saveFileDialogExport = new System.Windows.Forms.SaveFileDialog();
+ this.menuStrip1 = new System.Windows.Forms.MenuStrip();
+ this.fileToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem();
+ this.openToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem();
+ this.closeToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem();
+ this.quitToolStripMenuItem = new System.Windows.Forms.ToolStripMenuItem();
+ this.listViewRates = new System.Windows.Forms.ListView();
+ this.columnHeaderRateName = new System.Windows.Forms.ColumnHeader();
+ this.columnHeaderRateCount = new System.Windows.Forms.ColumnHeader();
+ this.columnHeaderRateTotaltime = new System.Windows.Forms.ColumnHeader();
+ this.columnHeaderRateAvgTime = new System.Windows.Forms.ColumnHeader();
+ this.splitContainer1 = new System.Windows.Forms.SplitContainer();
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBoxTitle)).BeginInit();
+ this.panelHeader.SuspendLayout();
+ this.panelControls.SuspendLayout();
+ this.menuStrip1.SuspendLayout();
+ this.splitContainer1.Panel1.SuspendLayout();
+ this.splitContainer1.Panel2.SuspendLayout();
+ this.splitContainer1.SuspendLayout();
+ this.SuspendLayout();
+ //
+ // listViewCounters
+ //
+ this.listViewCounters.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] {
+ this.columnHeaderName,
+ this.columnHeaderValue,
+ this.columnHeaderDelta});
+ this.listViewCounters.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.listViewCounters.FullRowSelect = true;
+ this.listViewCounters.Location = new System.Drawing.Point(0, 0);
+ this.listViewCounters.Name = "listViewCounters";
+ this.listViewCounters.Size = new System.Drawing.Size(505, 221);
+ this.listViewCounters.Sorting = System.Windows.Forms.SortOrder.Descending;
+ this.listViewCounters.TabIndex = 0;
+ this.listViewCounters.UseCompatibleStateImageBehavior = false;
+ this.listViewCounters.View = System.Windows.Forms.View.Details;
+ this.listViewCounters.ColumnClick += new System.Windows.Forms.ColumnClickEventHandler(this.column_Click);
+ //
+ // columnHeaderName
+ //
+ this.columnHeaderName.Text = "Counter Name";
+ this.columnHeaderName.Width = 203;
+ //
+ // columnHeaderValue
+ //
+ this.columnHeaderValue.Text = "Value";
+ this.columnHeaderValue.Width = 69;
+ //
+ // columnHeaderDelta
+ //
+ this.columnHeaderDelta.Text = "Delta";
+ this.columnHeaderDelta.Width = 86;
+ //
+ // pictureBoxTitle
+ //
+ this.pictureBoxTitle.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
+ this.pictureBoxTitle.BackColor = System.Drawing.Color.Transparent;
+ this.pictureBoxTitle.Image = ((System.Drawing.Image)(resources.GetObject("pictureBoxTitle.Image")));
+ this.pictureBoxTitle.Location = new System.Drawing.Point(257, 0);
+ this.pictureBoxTitle.Name = "pictureBoxTitle";
+ this.pictureBoxTitle.Size = new System.Drawing.Size(248, 86);
+ this.pictureBoxTitle.TabIndex = 1;
+ this.pictureBoxTitle.TabStop = false;
+ //
+ // panelHeader
+ //
+ this.panelHeader.BackgroundImage = ((System.Drawing.Image)(resources.GetObject("panelHeader.BackgroundImage")));
+ this.panelHeader.BackgroundImageLayout = System.Windows.Forms.ImageLayout.Stretch;
+ this.panelHeader.Controls.Add(this.labelKills);
+ this.panelHeader.Controls.Add(this.label1);
+ this.panelHeader.Controls.Add(this.pictureBoxTitle);
+ this.panelHeader.Dock = System.Windows.Forms.DockStyle.Top;
+ this.panelHeader.Location = new System.Drawing.Point(0, 24);
+ this.panelHeader.Name = "panelHeader";
+ this.panelHeader.Size = new System.Drawing.Size(505, 86);
+ this.panelHeader.TabIndex = 2;
+ //
+ // labelKills
+ //
+ this.labelKills.AutoSize = true;
+ this.labelKills.BackColor = System.Drawing.Color.Transparent;
+ this.labelKills.Font = new System.Drawing.Font("Arial", 9.75F, System.Drawing.FontStyle.Italic, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
+ this.labelKills.Location = new System.Drawing.Point(12, 33);
+ this.labelKills.Name = "labelKills";
+ this.labelKills.Size = new System.Drawing.Size(280, 16);
+ this.labelKills.TabIndex = 3;
+ this.labelKills.Text = "During the World Wide Wait, God Kills Kittens.";
+ //
+ // label1
+ //
+ this.label1.AutoSize = true;
+ this.label1.BackColor = System.Drawing.Color.Transparent;
+ this.label1.Font = new System.Drawing.Font("Arial", 15.75F, System.Drawing.FontStyle.Bold, System.Drawing.GraphicsUnit.Point, ((byte)(0)));
+ this.label1.Location = new System.Drawing.Point(12, 9);
+ this.label1.Name = "label1";
+ this.label1.Size = new System.Drawing.Size(140, 24);
+ this.label1.TabIndex = 2;
+ this.label1.Text = "Chrome Varz";
+ //
+ // labelInterval
+ //
+ this.labelInterval.AutoSize = true;
+ this.labelInterval.Location = new System.Drawing.Point(11, 9);
+ this.labelInterval.Name = "labelInterval";
+ this.labelInterval.Size = new System.Drawing.Size(73, 13);
+ this.labelInterval.TabIndex = 3;
+ this.labelInterval.Text = "Interval (secs)";
+ //
+ // comboBoxFilter
+ //
+ this.comboBoxFilter.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
+ this.comboBoxFilter.FormattingEnabled = true;
+ this.comboBoxFilter.Location = new System.Drawing.Point(302, 5);
+ this.comboBoxFilter.Name = "comboBoxFilter";
+ this.comboBoxFilter.Size = new System.Drawing.Size(121, 21);
+ this.comboBoxFilter.TabIndex = 5;
+ this.comboBoxFilter.SelectedIndexChanged += new System.EventHandler(this.filter_changed);
+ this.comboBoxFilter.DropDownClosed += new System.EventHandler(this.mouse_Leave);
+ this.comboBoxFilter.DropDown += new System.EventHandler(this.mouse_Enter);
+ //
+ // panelControls
+ //
+ this.panelControls.Controls.Add(this.buttonExport);
+ this.panelControls.Controls.Add(this.buttonZero);
+ this.panelControls.Controls.Add(this.comboBoxInterval);
+ this.panelControls.Controls.Add(this.labelFilter);
+ this.panelControls.Controls.Add(this.comboBoxFilter);
+ this.panelControls.Controls.Add(this.labelInterval);
+ this.panelControls.Dock = System.Windows.Forms.DockStyle.Top;
+ this.panelControls.Location = new System.Drawing.Point(0, 110);
+ this.panelControls.Name = "panelControls";
+ this.panelControls.Size = new System.Drawing.Size(505, 32);
+ this.panelControls.TabIndex = 6;
+ //
+ // buttonExport
+ //
+ this.buttonExport.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
+ this.buttonExport.Location = new System.Drawing.Point(187, 4);
+ this.buttonExport.Name = "buttonExport";
+ this.buttonExport.Size = new System.Drawing.Size(75, 23);
+ this.buttonExport.TabIndex = 9;
+ this.buttonExport.Text = "Export";
+ this.buttonExport.UseVisualStyleBackColor = true;
+ this.buttonExport.Click += new System.EventHandler(this.buttonExport_Click);
+ //
+ // buttonZero
+ //
+ this.buttonZero.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
+ this.buttonZero.Location = new System.Drawing.Point(427, 4);
+ this.buttonZero.Name = "buttonZero";
+ this.buttonZero.Size = new System.Drawing.Size(75, 23);
+ this.buttonZero.TabIndex = 8;
+ this.buttonZero.Text = "Clear All";
+ this.buttonZero.UseVisualStyleBackColor = true;
+ this.buttonZero.Click += new System.EventHandler(this.buttonZero_Click);
+ //
+ // comboBoxInterval
+ //
+ this.comboBoxInterval.FormattingEnabled = true;
+ this.comboBoxInterval.Items.AddRange(new object[] {
+ "1",
+ "2",
+ "5",
+ "10",
+ "30",
+ "60"});
+ this.comboBoxInterval.Location = new System.Drawing.Point(84, 6);
+ this.comboBoxInterval.Name = "comboBoxInterval";
+ this.comboBoxInterval.Size = new System.Drawing.Size(55, 21);
+ this.comboBoxInterval.TabIndex = 7;
+ this.comboBoxInterval.Text = "1";
+ this.comboBoxInterval.SelectedIndexChanged += new System.EventHandler(this.interval_changed);
+ this.comboBoxInterval.DropDownClosed += new System.EventHandler(this.mouse_Leave);
+ this.comboBoxInterval.DropDown += new System.EventHandler(this.mouse_Enter);
+ //
+ // labelFilter
+ //
+ this.labelFilter.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
+ this.labelFilter.AutoSize = true;
+ this.labelFilter.Location = new System.Drawing.Point(268, 9);
+ this.labelFilter.Name = "labelFilter";
+ this.labelFilter.Size = new System.Drawing.Size(29, 13);
+ this.labelFilter.TabIndex = 6;
+ this.labelFilter.Text = "Filter";
+ //
+ // saveFileDialogExport
+ //
+ this.saveFileDialogExport.FileName = "results.txt";
+ //
+ // menuStrip1
+ //
+ this.menuStrip1.Items.AddRange(new System.Windows.Forms.ToolStripItem[] {
+ this.fileToolStripMenuItem});
+ this.menuStrip1.Location = new System.Drawing.Point(0, 0);
+ this.menuStrip1.Name = "menuStrip1";
+ this.menuStrip1.Size = new System.Drawing.Size(505, 24);
+ this.menuStrip1.TabIndex = 7;
+ this.menuStrip1.Text = "menuStrip1";
+ //
+ // fileToolStripMenuItem
+ //
+ this.fileToolStripMenuItem.DropDownItems.AddRange(new System.Windows.Forms.ToolStripItem[] {
+ this.openToolStripMenuItem,
+ this.closeToolStripMenuItem,
+ this.quitToolStripMenuItem});
+ this.fileToolStripMenuItem.Name = "fileToolStripMenuItem";
+ this.fileToolStripMenuItem.Size = new System.Drawing.Size(35, 20);
+ this.fileToolStripMenuItem.Text = "File";
+ //
+ // openToolStripMenuItem
+ //
+ this.openToolStripMenuItem.Name = "openToolStripMenuItem";
+ this.openToolStripMenuItem.Size = new System.Drawing.Size(111, 22);
+ this.openToolStripMenuItem.Text = "Open";
+ this.openToolStripMenuItem.Click += new System.EventHandler(this.openToolStripMenuItem_Click);
+ //
+ // closeToolStripMenuItem
+ //
+ this.closeToolStripMenuItem.Name = "closeToolStripMenuItem";
+ this.closeToolStripMenuItem.Size = new System.Drawing.Size(111, 22);
+ this.closeToolStripMenuItem.Text = "Close";
+ this.closeToolStripMenuItem.Click += new System.EventHandler(this.closeToolStripMenuItem_Click);
+ //
+ // quitToolStripMenuItem
+ //
+ this.quitToolStripMenuItem.Name = "quitToolStripMenuItem";
+ this.quitToolStripMenuItem.Size = new System.Drawing.Size(111, 22);
+ this.quitToolStripMenuItem.Text = "Quit";
+ this.quitToolStripMenuItem.Click += new System.EventHandler(this.quitToolStripMenuItem_Click);
+ //
+ // listViewRates
+ //
+ this.listViewRates.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] {
+ this.columnHeaderRateName,
+ this.columnHeaderRateCount,
+ this.columnHeaderRateTotaltime,
+ this.columnHeaderRateAvgTime});
+ this.listViewRates.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.listViewRates.FullRowSelect = true;
+ this.listViewRates.Location = new System.Drawing.Point(0, 0);
+ this.listViewRates.Name = "listViewRates";
+ this.listViewRates.Size = new System.Drawing.Size(505, 270);
+ this.listViewRates.Sorting = System.Windows.Forms.SortOrder.Descending;
+ this.listViewRates.TabIndex = 8;
+ this.listViewRates.UseCompatibleStateImageBehavior = false;
+ this.listViewRates.View = System.Windows.Forms.View.Details;
+ //
+ // columnHeaderRateName
+ //
+ this.columnHeaderRateName.Text = "Rate Name";
+ this.columnHeaderRateName.Width = 205;
+ //
+ // columnHeaderRateCount
+ //
+ this.columnHeaderRateCount.Text = "Count";
+ //
+ // columnHeaderRateTotaltime
+ //
+ this.columnHeaderRateTotaltime.Text = "Total Time (ms)";
+ this.columnHeaderRateTotaltime.Width = 100;
+ //
+ // columnHeaderRateAvgTime
+ //
+ this.columnHeaderRateAvgTime.Text = "Average Time (ms)";
+ this.columnHeaderRateAvgTime.Width = 110;
+ //
+ // splitContainer1
+ //
+ this.splitContainer1.Dock = System.Windows.Forms.DockStyle.Fill;
+ this.splitContainer1.Location = new System.Drawing.Point(0, 142);
+ this.splitContainer1.Name = "splitContainer1";
+ this.splitContainer1.Orientation = System.Windows.Forms.Orientation.Horizontal;
+ //
+ // splitContainer1.Panel1
+ //
+ this.splitContainer1.Panel1.Controls.Add(this.listViewCounters);
+ //
+ // splitContainer1.Panel2
+ //
+ this.splitContainer1.Panel2.Controls.Add(this.listViewRates);
+ this.splitContainer1.Size = new System.Drawing.Size(505, 495);
+ this.splitContainer1.SplitterDistance = 221;
+ this.splitContainer1.TabIndex = 9;
+ //
+ // StatsViewer
+ //
+ this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
+ this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
+ this.ClientSize = new System.Drawing.Size(505, 637);
+ this.Controls.Add(this.splitContainer1);
+ this.Controls.Add(this.panelControls);
+ this.Controls.Add(this.panelHeader);
+ this.Controls.Add(this.menuStrip1);
+ this.DoubleBuffered = true;
+ this.Name = "StatsViewer";
+ this.Text = "Chrome Varz";
+ ((System.ComponentModel.ISupportInitialize)(this.pictureBoxTitle)).EndInit();
+ this.panelHeader.ResumeLayout(false);
+ this.panelHeader.PerformLayout();
+ this.panelControls.ResumeLayout(false);
+ this.panelControls.PerformLayout();
+ this.menuStrip1.ResumeLayout(false);
+ this.menuStrip1.PerformLayout();
+ this.splitContainer1.Panel1.ResumeLayout(false);
+ this.splitContainer1.Panel2.ResumeLayout(false);
+ this.splitContainer1.ResumeLayout(false);
+ this.ResumeLayout(false);
+ this.PerformLayout();
+
+ }
+
+ #endregion
+
+ private System.Windows.Forms.ListView listViewCounters;
+ private System.Windows.Forms.ColumnHeader columnHeaderName;
+ private System.Windows.Forms.ColumnHeader columnHeaderValue;
+ private System.Windows.Forms.ColumnHeader columnHeaderDelta;
+ private System.Windows.Forms.PictureBox pictureBoxTitle;
+ private System.Windows.Forms.Panel panelHeader;
+ private System.Windows.Forms.Label label1;
+ private System.Windows.Forms.Label labelInterval;
+ private System.Windows.Forms.ComboBox comboBoxFilter;
+ private System.Windows.Forms.Panel panelControls;
+ private System.Windows.Forms.Label labelFilter;
+ private System.Windows.Forms.ComboBox comboBoxInterval;
+ private System.Windows.Forms.Label labelKills;
+ private System.Windows.Forms.Button buttonZero;
+ private System.Windows.Forms.Button buttonExport;
+ private System.Windows.Forms.SaveFileDialog saveFileDialogExport;
+ private System.Windows.Forms.MenuStrip menuStrip1;
+ private System.Windows.Forms.ToolStripMenuItem fileToolStripMenuItem;
+ private System.Windows.Forms.ToolStripMenuItem openToolStripMenuItem;
+ private System.Windows.Forms.ToolStripMenuItem closeToolStripMenuItem;
+ private System.Windows.Forms.ToolStripMenuItem quitToolStripMenuItem;
+ private System.Windows.Forms.ListView listViewRates;
+ private System.Windows.Forms.ColumnHeader columnHeaderRateName;
+ private System.Windows.Forms.ColumnHeader columnHeaderRateCount;
+ private System.Windows.Forms.ColumnHeader columnHeaderRateTotaltime;
+ private System.Windows.Forms.ColumnHeader columnHeaderRateAvgTime;
+ private System.Windows.Forms.SplitContainer splitContainer1;
+ }
+}
diff --git a/chromium/tools/stats_viewer/stats_viewer.cs b/chromium/tools/stats_viewer/stats_viewer.cs
new file mode 100644
index 00000000000..0270f070b4e
--- /dev/null
+++ b/chromium/tools/stats_viewer/stats_viewer.cs
@@ -0,0 +1,510 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Data;
+using System.Diagnostics;
+using System.Drawing;
+using System.Text;
+using System.Windows.Forms;
+using System.IO;
+
+namespace StatsViewer {
+ public partial class StatsViewer : Form {
+ /// <summary>
+ /// Create a StatsViewer.
+ /// </summary>
+ public StatsViewer() {
+ InitializeComponent();
+ }
+
+ #region Protected Methods
+ /// <summary>
+ /// Callback when the form loads.
+ /// </summary>
+ /// <param name="e"></param>
+ protected override void OnLoad(EventArgs e) {
+ base.OnLoad(e);
+
+ timer_ = new Timer();
+ timer_.Interval = kPollInterval;
+ timer_.Tick += new EventHandler(PollTimerTicked);
+ timer_.Start();
+ }
+ #endregion
+
+ #region Private Methods
+ /// <summary>
+ /// Attempt to open the stats file.
+ /// Return true on success, false otherwise.
+ /// </summary>
+ private bool OpenStatsFile() {
+ StatsTable table = new StatsTable();
+ if (table.Open(kStatsTableName)) {
+ stats_table_ = table;
+ return true;
+ }
+ return false;
+ }
+
+ /// <summary>
+ /// Close the open stats file.
+ /// </summary>
+ private void CloseStatsFile() {
+ if (this.stats_table_ != null)
+ {
+ this.stats_table_.Close();
+ this.stats_table_ = null;
+ this.listViewCounters.Items.Clear();
+ }
+ }
+
+ /// <summary>
+ /// Updates the process list in the UI.
+ /// </summary>
+ private void UpdateProcessList() {
+ int current_pids = comboBoxFilter.Items.Count;
+ int table_pids = stats_table_.Processes.Count;
+ if (current_pids != table_pids + 1) // Add one because of the "all" entry.
+ {
+ int selected_index = this.comboBoxFilter.SelectedIndex;
+ this.comboBoxFilter.Items.Clear();
+ this.comboBoxFilter.Items.Add(kStringAllProcesses);
+ foreach (int pid in stats_table_.Processes)
+ this.comboBoxFilter.Items.Add(kStringProcess + pid.ToString());
+ this.comboBoxFilter.SelectedIndex = selected_index;
+ }
+ }
+
+ /// <summary>
+ /// Updates the UI for a counter.
+ /// </summary>
+ /// <param name="counter"></param>
+ private void UpdateCounter(IStatsCounter counter) {
+ ListView view;
+
+ // Figure out which list this counter goes into.
+ if (counter is StatsCounterRate)
+ view = listViewRates;
+ else if (counter is StatsCounter || counter is StatsTimer)
+ view = listViewCounters;
+ else
+ return; // Counter type not supported yet.
+
+ // See if the counter is already in the list.
+ ListViewItem item = view.Items[counter.name];
+ if (item != null)
+ {
+ // Update an existing counter.
+ Debug.Assert(item is StatsCounterListViewItem);
+ StatsCounterListViewItem counter_item = item as StatsCounterListViewItem;
+ counter_item.Update(counter, filter_pid_);
+ }
+ else
+ {
+ // Create a new counter
+ StatsCounterListViewItem new_item = null;
+ if (counter is StatsCounterRate)
+ new_item = new RateListViewItem(counter, filter_pid_);
+ else if (counter is StatsCounter || counter is StatsTimer)
+ new_item = new CounterListViewItem(counter, filter_pid_);
+ Debug.Assert(new_item != null);
+ view.Items.Add(new_item);
+ }
+ }
+
+ /// <summary>
+ /// Sample the data and update the UI
+ /// </summary>
+ private void SampleData() {
+ // If the table isn't open, try to open it again.
+ if (stats_table_ == null)
+ if (!OpenStatsFile())
+ return;
+
+ if (stats_counters_ == null)
+ stats_counters_ = stats_table_.Counters();
+
+ if (pause_updates_)
+ return;
+
+ stats_counters_.Update();
+
+ UpdateProcessList();
+
+ foreach (IStatsCounter counter in stats_counters_)
+ UpdateCounter(counter);
+ }
+
+ /// <summary>
+ /// Set the background color based on the value
+ /// </summary>
+ /// <param name="item"></param>
+ /// <param name="value"></param>
+ private void ColorItem(ListViewItem item, int value)
+ {
+ if (value < 0)
+ item.ForeColor = Color.Red;
+ else if (value > 0)
+ item.ForeColor = Color.DarkGreen;
+ else
+ item.ForeColor = Color.Black;
+ }
+
+ /// <summary>
+ /// Called when the timer fires.
+ /// </summary>
+ /// <param name="sender"></param>
+ /// <param name="e"></param>
+ void PollTimerTicked(object sender, EventArgs e) {
+ SampleData();
+ }
+
+ /// <summary>
+ /// Called when the interval is changed by the user.
+ /// </summary>
+ /// <param name="sender"></param>
+ /// <param name="e"></param>
+ private void interval_changed(object sender, EventArgs e) {
+ int interval = 1;
+ if (int.TryParse(comboBoxInterval.Text, out interval)) {
+ if (timer_ != null) {
+ timer_.Stop();
+ timer_.Interval = interval * 1000;
+ timer_.Start();
+ }
+ } else {
+ comboBoxInterval.Text = timer_.Interval.ToString();
+ }
+ }
+
+ /// <summary>
+ /// Called when the user changes the filter
+ /// </summary>
+ /// <param name="sender"></param>
+ /// <param name="e"></param>
+ private void filter_changed(object sender, EventArgs e) {
+ // While in this event handler, don't allow recursive events!
+ this.comboBoxFilter.SelectedIndexChanged -= new System.EventHandler(this.filter_changed);
+ if (this.comboBoxFilter.Text == kStringAllProcesses)
+ filter_pid_ = 0;
+ else
+ int.TryParse(comboBoxFilter.Text.Substring(kStringProcess.Length), out filter_pid_);
+ SampleData();
+ this.comboBoxFilter.SelectedIndexChanged += new System.EventHandler(this.filter_changed);
+ }
+
+ /// <summary>
+ /// Callback when the mouse enters a control
+ /// </summary>
+ /// <param name="sender"></param>
+ /// <param name="e"></param>
+ private void mouse_Enter(object sender, EventArgs e) {
+ // When the dropdown is expanded, we pause
+ // updates, as it messes with the UI.
+ pause_updates_ = true;
+ }
+
+ /// <summary>
+ /// Callback when the mouse leaves a control
+ /// </summary>
+ /// <param name="sender"></param>
+ /// <param name="e"></param>
+ private void mouse_Leave(object sender, EventArgs e) {
+ pause_updates_ = false;
+ }
+
+ /// <summary>
+ /// Called when the user clicks the zero-stats button.
+ /// </summary>
+ /// <param name="sender"></param>
+ /// <param name="e"></param>
+ private void buttonZero_Click(object sender, EventArgs e) {
+ this.stats_table_.Zero();
+ SampleData();
+ }
+
+ /// <summary>
+ /// Called when the user clicks a column heading.
+ /// </summary>
+ /// <param name="sender"></param>
+ /// <param name="e"></param>
+ private void column_Click(object sender, ColumnClickEventArgs e) {
+ if (e.Column != sort_column_) {
+ sort_column_ = e.Column;
+ this.listViewCounters.Sorting = SortOrder.Ascending;
+ } else {
+ if (this.listViewCounters.Sorting == SortOrder.Ascending)
+ this.listViewCounters.Sorting = SortOrder.Descending;
+ else
+ this.listViewCounters.Sorting = SortOrder.Ascending;
+ }
+
+ this.listViewCounters.ListViewItemSorter =
+ new ListViewItemComparer(e.Column, this.listViewCounters.Sorting);
+ this.listViewCounters.Sort();
+ }
+
+ /// <summary>
+ /// Called when the user clicks the button "Export".
+ /// </summary>
+ /// <param name="sender"></param>
+ /// <param name="e"></param>
+ private void buttonExport_Click(object sender, EventArgs e) {
+ //Have to pick a textfile to export to.
+ //Saves what is shown in listViewStats in the format: function value
+ //(with a tab in between), so that it is easy to copy paste into a spreadsheet.
+ //(Does not save the delta values.)
+ TextWriter tw = null;
+ try {
+ saveFileDialogExport.CheckFileExists = false;
+ saveFileDialogExport.ShowDialog();
+ tw = new StreamWriter(saveFileDialogExport.FileName);
+
+ for (int i = 0; i < listViewCounters.Items.Count; i++) {
+ tw.Write(listViewCounters.Items[i].SubItems[0].Text + "\t");
+ tw.WriteLine(listViewCounters.Items[i].SubItems[1].Text);
+ }
+ }
+ catch (IOException ex) {
+ MessageBox.Show(string.Format("There was an error while saving your results file. The results might not have been saved correctly.: {0}", ex.Message));
+ }
+ finally{
+ if (tw != null) tw.Close();
+ }
+ }
+
+ #endregion
+
+ class ListViewItemComparer : IComparer {
+ public ListViewItemComparer() {
+ this.col_ = 0;
+ this.order_ = SortOrder.Ascending;
+ }
+
+ public ListViewItemComparer(int column, SortOrder order) {
+ this.col_ = column;
+ this.order_ = order;
+ }
+
+ public int Compare(object x, object y) {
+ int return_value = -1;
+
+ object x_tag = ((ListViewItem)x).SubItems[col_].Tag;
+ object y_tag = ((ListViewItem)y).SubItems[col_].Tag;
+
+ if (Comparable(x_tag, y_tag))
+ return_value = ((IComparable)x_tag).CompareTo(y_tag);
+ else
+ return_value = String.Compare(((ListViewItem)x).SubItems[col_].Text,
+ ((ListViewItem)y).SubItems[col_].Text);
+
+ if (order_ == SortOrder.Descending)
+ return_value *= -1;
+
+ return return_value;
+ }
+
+ #region Private Methods
+ private bool Comparable(object x, object y) {
+ if (x == null || y == null)
+ return false;
+
+ return x is IComparable && y is IComparable;
+ }
+ #endregion
+
+ #region Private Members
+ private int col_;
+ private SortOrder order_;
+ #endregion
+ }
+
+ #region Private Members
+ private const string kStringAllProcesses = "All Processes";
+ private const string kStringProcess = "Process ";
+ private const int kPollInterval = 1000; // 1 second
+ private const string kStatsTableName = "ChromeStats";
+ private StatsTable stats_table_;
+ private StatsTableCounters stats_counters_;
+ private Timer timer_;
+ private int filter_pid_;
+ private bool pause_updates_;
+ private int sort_column_ = -1;
+ #endregion
+
+ #region Private Event Callbacks
+ private void openToolStripMenuItem_Click(object sender, EventArgs e)
+ {
+ OpenDialog dialog = new OpenDialog();
+ dialog.ShowDialog();
+
+ CloseStatsFile();
+
+ StatsTable table = new StatsTable();
+ bool rv = table.Open(dialog.FileName);
+ if (!rv)
+ {
+ MessageBox.Show("Could not open statsfile: " + dialog.FileName);
+ }
+ else
+ {
+ stats_table_ = table;
+ }
+ }
+
+ private void closeToolStripMenuItem_Click(object sender, EventArgs e)
+ {
+ CloseStatsFile();
+ }
+
+ private void quitToolStripMenuItem_Click(object sender, EventArgs e)
+ {
+ Application.Exit();
+ }
+ #endregion
+ }
+
+ /// <summary>
+ /// Base class for counter list view items.
+ /// </summary>
+ internal class StatsCounterListViewItem : ListViewItem
+ {
+ /// <summary>
+ /// Create the ListViewItem
+ /// </summary>
+ /// <param name="text"></param>
+ public StatsCounterListViewItem(string text) : base(text) { }
+
+ /// <summary>
+ /// Update the ListViewItem given a new counter value.
+ /// </summary>
+ /// <param name="counter"></param>
+ /// <param name="filter_pid"></param>
+ public virtual void Update(IStatsCounter counter, int filter_pid) { }
+
+ /// <summary>
+ /// Set the background color based on the value
+ /// </summary>
+ /// <param name="value"></param>
+ protected void ColorItem(int value)
+ {
+ if (value < 0)
+ ForeColor = Color.Red;
+ else if (value > 0)
+ ForeColor = Color.DarkGreen;
+ else
+ ForeColor = Color.Black;
+ }
+
+ /// <summary>
+ /// Create a new subitem with a zeroed Tag.
+ /// </summary>
+ /// <returns></returns>
+ protected ListViewSubItem NewSubItem()
+ {
+ ListViewSubItem item = new ListViewSubItem();
+ item.Tag = -1; // Arbitrarily initialize to -1.
+ return item;
+ }
+
+ /// <summary>
+ /// Set the value for a subitem.
+ /// </summary>
+ /// <param name="item"></param>
+ /// <param name="val"></param>
+ /// <returns>True if the value changed, false otherwise</returns>
+ protected bool SetSubItem(ListViewSubItem item, int val)
+ {
+ // The reason for doing this extra compare is because
+ // we introduce flicker if we unnecessarily update the
+ // subitems. The UI is much less likely to cause you
+ // a seizure when we do this.
+ if (val != (int)item.Tag)
+ {
+ item.Text = val.ToString();
+ item.Tag = val;
+ return true;
+ }
+ return false;
+ }
+ }
+
+ /// <summary>
+ /// A listview item which contains a rate.
+ /// </summary>
+ internal class RateListViewItem : StatsCounterListViewItem
+ {
+ public RateListViewItem(IStatsCounter ctr, int filter_pid) :
+ base(ctr.name)
+ {
+ StatsCounterRate rate = ctr as StatsCounterRate;
+ Name = rate.name;
+ SubItems.Add(NewSubItem());
+ SubItems.Add(NewSubItem());
+ SubItems.Add(NewSubItem());
+ Update(ctr, filter_pid);
+ }
+
+ public override void Update(IStatsCounter counter, int filter_pid)
+ {
+ Debug.Assert(counter is StatsCounterRate);
+
+ StatsCounterRate new_rate = counter as StatsCounterRate;
+ int new_count = new_rate.GetCount(filter_pid);
+ int new_time = new_rate.GetTime(filter_pid);
+ int old_avg = Tag != null ? (int)Tag : 0;
+ int new_avg = new_count > 0 ? (new_time / new_count) : 0;
+ int delta = new_avg - old_avg;
+
+ SetSubItem(SubItems[column_count_index], new_count);
+ SetSubItem(SubItems[column_time_index], new_time);
+ if (SetSubItem(SubItems[column_avg_index], new_avg))
+ ColorItem(delta);
+ Tag = new_avg;
+ }
+
+ private const int column_count_index = 1;
+ private const int column_time_index = 2;
+ private const int column_avg_index = 3;
+ }
+
+ /// <summary>
+ /// A listview item which contains a counter.
+ /// </summary>
+ internal class CounterListViewItem : StatsCounterListViewItem
+ {
+ public CounterListViewItem(IStatsCounter ctr, int filter_pid) :
+ base(ctr.name)
+ {
+ Name = ctr.name;
+ SubItems.Add(NewSubItem());
+ SubItems.Add(NewSubItem());
+ Update(ctr, filter_pid);
+ }
+
+ public override void Update(IStatsCounter counter, int filter_pid) {
+ Debug.Assert(counter is StatsCounter || counter is StatsTimer);
+
+ int new_value = 0;
+ if (counter is StatsCounter)
+ new_value = ((StatsCounter)counter).GetValue(filter_pid);
+ else if (counter is StatsTimer)
+ new_value = ((StatsTimer)counter).GetValue(filter_pid);
+
+ int old_value = Tag != null ? (int)Tag : 0;
+ int delta = new_value - old_value;
+ SetSubItem(SubItems[column_value_index], new_value);
+ if (SetSubItem(SubItems[column_delta_index], delta))
+ ColorItem(delta);
+ Tag = new_value;
+ }
+
+ private const int column_value_index = 1;
+ private const int column_delta_index = 2;
+ }
+}
diff --git a/chromium/tools/stats_viewer/stats_viewer.csproj b/chromium/tools/stats_viewer/stats_viewer.csproj
new file mode 100644
index 00000000000..4a262ea71ee
--- /dev/null
+++ b/chromium/tools/stats_viewer/stats_viewer.csproj
@@ -0,0 +1,107 @@
+<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <PropertyGroup>
+ <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+ <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+ <ProductVersion>8.0.50727</ProductVersion>
+ <SchemaVersion>2.0</SchemaVersion>
+ <ProjectGuid>{41735CD9-3E35-47F7-9FD1-4A9950B6B131}</ProjectGuid>
+ <OutputType>WinExe</OutputType>
+ <AppDesignerFolder>Properties</AppDesignerFolder>
+ <RootNamespace>StatsViewer</RootNamespace>
+ <AssemblyName>stats_viewer</AssemblyName>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+ <DebugSymbols>true</DebugSymbols>
+ <DebugType>full</DebugType>
+ <Optimize>false</Optimize>
+ <OutputPath>$(SolutionDir)$(Configuration)</OutputPath>
+ <DefineConstants>DEBUG;TRACE</DefineConstants>
+ <ErrorReport>prompt</ErrorReport>
+ <WarningLevel>4</WarningLevel>
+ <AllowUnsafeBlocks>true</AllowUnsafeBlocks>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+ <DebugType>pdbonly</DebugType>
+ <Optimize>true</Optimize>
+ <OutputPath>$(SolutionDir)$(Configuration)</OutputPath>
+ <DefineConstants>TRACE</DefineConstants>
+ <ErrorReport>prompt</ErrorReport>
+ <WarningLevel>4</WarningLevel>
+ </PropertyGroup>
+ <ItemGroup>
+ <Reference Include="System" />
+ <Reference Include="System.Data" />
+ <Reference Include="System.Deployment" />
+ <Reference Include="System.Drawing" />
+ <Reference Include="System.Windows.Forms" />
+ <Reference Include="System.Xml" />
+ </ItemGroup>
+ <ItemGroup>
+ <Compile Include="OpenDialog.cs">
+ <SubType>Form</SubType>
+ </Compile>
+ <Compile Include="OpenDialog.Designer.cs">
+ <DependentUpon>OpenDialog.cs</DependentUpon>
+ </Compile>
+ <Compile Include="Resources.Designer.cs">
+ <AutoGen>True</AutoGen>
+ <DesignTime>True</DesignTime>
+ <DependentUpon>Resources.resx</DependentUpon>
+ </Compile>
+ <Compile Include="stats_viewer.cs">
+ <SubType>Form</SubType>
+ </Compile>
+ <Compile Include="stats_viewer.Designer.cs">
+ <DependentUpon>stats_viewer.cs</DependentUpon>
+ </Compile>
+ <Compile Include="Program.cs" />
+ <Compile Include="Properties\AssemblyInfo.cs" />
+ <EmbeddedResource Include="OpenDialog.resx">
+ <SubType>Designer</SubType>
+ <DependentUpon>OpenDialog.cs</DependentUpon>
+ </EmbeddedResource>
+ <EmbeddedResource Include="Resources.resx">
+ <SubType>Designer</SubType>
+ <Generator>ResXFileCodeGenerator</Generator>
+ <LastGenOutput>Resources.Designer.cs</LastGenOutput>
+ </EmbeddedResource>
+ <EmbeddedResource Include="stats_viewer.resx">
+ <SubType>Designer</SubType>
+ <DependentUpon>stats_viewer.cs</DependentUpon>
+ </EmbeddedResource>
+ <EmbeddedResource Include="Properties\Resources.resx">
+ <Generator>ResXFileCodeGenerator</Generator>
+ <LastGenOutput>Resources.Designer.cs</LastGenOutput>
+ <SubType>Designer</SubType>
+ </EmbeddedResource>
+ <Compile Include="Properties\Resources.Designer.cs">
+ <AutoGen>True</AutoGen>
+ <DependentUpon>Resources.resx</DependentUpon>
+ </Compile>
+ <None Include="Properties\Settings.settings">
+ <Generator>SettingsSingleFileGenerator</Generator>
+ <LastGenOutput>Settings.Designer.cs</LastGenOutput>
+ </None>
+ <Compile Include="Properties\Settings.Designer.cs">
+ <AutoGen>True</AutoGen>
+ <DependentUpon>Settings.settings</DependentUpon>
+ <DesignTimeSharedInput>True</DesignTimeSharedInput>
+ </Compile>
+ <Compile Include="stats_table.cs" />
+ <Compile Include="win32.cs" />
+ </ItemGroup>
+ <ItemGroup>
+ <None Include="Resources\kitten.png" />
+ </ItemGroup>
+ <ItemGroup>
+ <None Include="Resources\kittenbackground.png" />
+ </ItemGroup>
+ <Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
+ <!-- To modify your build process, add your task inside one of the targets below and uncomment it.
+ Other similar extension points exist, see Microsoft.Common.targets.
+ <Target Name="BeforeBuild">
+ </Target>
+ <Target Name="AfterBuild">
+ </Target>
+ -->
+</Project> \ No newline at end of file
diff --git a/chromium/tools/stats_viewer/stats_viewer.resx b/chromium/tools/stats_viewer/stats_viewer.resx
new file mode 100644
index 00000000000..73e7e5963e4
--- /dev/null
+++ b/chromium/tools/stats_viewer/stats_viewer.resx
@@ -0,0 +1,1129 @@
+<?xml version="1.0" encoding="utf-8"?>
+<root>
+ <!--
+ Microsoft ResX Schema
+
+ Version 2.0
+
+ The primary goals of this format is to allow a simple XML format
+ that is mostly human readable. The generation and parsing of the
+ various data types are done through the TypeConverter classes
+ associated with the data types.
+
+ Example:
+
+ ... ado.net/XML headers & schema ...
+ <resheader name="resmimetype">text/microsoft-resx</resheader>
+ <resheader name="version">2.0</resheader>
+ <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
+ <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
+ <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
+ <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
+ <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
+ <value>[base64 mime encoded serialized .NET Framework object]</value>
+ </data>
+ <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
+ <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
+ <comment>This is a comment</comment>
+ </data>
+
+ There are any number of "resheader" rows that contain simple
+ name/value pairs.
+
+ Each data row contains a name, and value. The row also contains a
+ type or mimetype. Type corresponds to a .NET class that support
+ text/value conversion through the TypeConverter architecture.
+ Classes that don't support this are serialized and stored with the
+ mimetype set.
+
+ The mimetype is used for serialized objects, and tells the
+ ResXResourceReader how to depersist the object. This is currently not
+ extensible. For a given mimetype the value must be set accordingly:
+
+ Note - application/x-microsoft.net.object.binary.base64 is the format
+ that the ResXResourceWriter will generate, however the reader can
+ read any of the formats listed below.
+
+ mimetype: application/x-microsoft.net.object.binary.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.soap.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Soap.SoapFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.bytearray.base64
+ value : The object must be serialized into a byte array
+ : using a System.ComponentModel.TypeConverter
+ : and then encoded with base64 encoding.
+ -->
+ <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
+ <xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
+ <xsd:element name="root" msdata:IsDataSet="true">
+ <xsd:complexType>
+ <xsd:choice maxOccurs="unbounded">
+ <xsd:element name="metadata">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" />
+ </xsd:sequence>
+ <xsd:attribute name="name" use="required" type="xsd:string" />
+ <xsd:attribute name="type" type="xsd:string" />
+ <xsd:attribute name="mimetype" type="xsd:string" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="assembly">
+ <xsd:complexType>
+ <xsd:attribute name="alias" type="xsd:string" />
+ <xsd:attribute name="name" type="xsd:string" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="data">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
+ <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
+ <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="resheader">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" />
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:choice>
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:schema>
+ <resheader name="resmimetype">
+ <value>text/microsoft-resx</value>
+ </resheader>
+ <resheader name="version">
+ <value>2.0</value>
+ </resheader>
+ <resheader name="reader">
+ <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <resheader name="writer">
+ <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <assembly alias="System.Drawing" name="System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a" />
+ <data name="pictureBoxTitle.Image" type="System.Drawing.Bitmap, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
+ <value>
+ iVBORw0KGgoAAAANSUhEUgAAAPgAAABWCAIAAABtpczCAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8
+ YQUAAAAgY0hSTQAAeiYAAICEAAD6AAAAgOgAAHUwAADqYAAAOpgAABdwnLpRPAAAAAlwSFlzAAAOxAAA
+ DsQBlSsOGwAA4PdJREFUeF5k/QVAXNf2/4GmdnutvZV4gjPu7oN70KBBEggJcScBgru7u7u7u2sggQTi
+ JLgmaa/1tnnrDGl///de++3umTMDDMPnfGfttdde81nzg+B3vz3/+edf9n3++W+f7fvss88+//zzfb99
+ /O3jr3C07+PHj8g5yR2f/fb5Z59JTnzc93Ef/PPZ55J/4d59+3777TfkzGdwz8fPP/8CTsMDkX8+R87v
+ g9OS7yF5DIzwGORbwQ+AB38BP0jyGPhayXeAx8AT2PspyH/wUyRPA77H3j/IwyR3Sb7hp/8kj5b8VHi4
+ 5CnDEdyUPAz53faeMzJ8hGe79+sgPwueC/JbID9xHzxbyZcgD5c8FeSLkV9Y8v0k5yRPDHk94IuQZ/L7
+ k/r03H5/Wnsvx96vtfddfn8FkG+GvFx7J+CZSJ6G5Bf6DXkCezf++CLJzb1fcu8l3btr74X69KrBr7f3
+ 6/8G3wN+H+R3/PRg+LWQ7yn5Q8Idn++9QvDCS37ZvdcE+Ta//iZ5QSS/oOQvgbzMkh8gOUBuw13Ic4Xv
+ uPc67/0PeSmQZ4P8ADiBfIu9v7Hkzr0fIXll954sPDP4ayLfU/JtADXJS4N829/gGcIvAH8deMrwf+Rb
+ 7n255IF7Z/aQkPxR4FXc+xGffl/kYb/uIbv35fCH3vfZF5/99uvHfakdJ2MGyQFteL9Ogm8nyb+bEtBL
+ Deym+HcTA3pIAd1k5EwPJbCHGtxHCRmkhgyQQwaIIQMkRIPk0CFK2DA1bIgiOaCEjyCKGKNHTjAixkH0
+ yEl61BSIGjFNi5qhg2IegWhxcyBqwiw1ZZ6ZscBOf8pJf8pKf8rMeM7KeM5Mf0FPe0ZLe05LfUZNXqCk
+ PqOlv2CkP2ekP6NlPKNmvKBmvqBmvaJlv6LnLIIYuYv03EVaziI1RzLmvqHkviXnviXlL5Hyl4mgvE83
+ KfnL5PwVcv4yKX+FULJOLt4gF6+TSzZoJZuM4i0acnODVLJJKtkil21RyjYpZRtUicifhJyhlK6Ty7eo
+ lds0UPUOtWaHujfW7tD+UN07Wu07hkT0PdW9p9f/xATBQcMOo36b0bjDaH7Han7PbHxHb3xPh7HhHa3h
+ HbXxPa3hPbXuHaVuV6J3lPr31IYPyMkGyV3I+IGGnJScb/qZ3vgTPIZY/wFf/wFb/wHXgIhQjwhuous/
+ YGrfYepA7/HIw96R6t6R63ZItdvEmm1i3TtS7S6hegdX+45Q+55QvYuv2sFVvsNX7hAqdwmVO8SKbVLF
+ NgXRFrVskwq/fskqsXiVhGiFXLwKrwmtZI0KByUrlOIVSukqtWSFWrRMLl6BA3rJMr1wiZq3TM1foecv
+ M/KXmHlvmXlv2Dmv2Vkv4C/OyHjBTH/KSF2gpy0wUhYYSXP0xFl6/ENK3Aw5bpoc+4AU84AYOYGPGMdG
+ TeAix0FYuBk+hgsZwoQM40KHCcFDhOBBfFA/IbCXENiHC+jDBvRi/bqxfl24gF6ibydhX3aPTXAL1r0S
+ 5VaDca3DuNZi79fjPOpw9+sw9+sxHg0Ej0aiRyPBswnv3ULwbsX7tOK8WzFeLYjgwKcN69eO92nD+bbj
+ /ToI/p1E/05CQDcpoIcMo38XXC3kwD5iUB8+cIAYNEgKHSKFj5BBEaPksCFC+DA+ZpwcP0GNm6AkTCGK
+ nSLFTBJiQVOE2GlC7ANi9CQ+ZgIXN0WMBU0S4idxCQ/widOIkmbwKY+IEhFSHuGTH+JSHkk0iwWlPsal
+ PcamPcakP4ERhMuYJ6Q/wafP49KeYDIWMNnPcVnPEGU/J2S/IGY9J2S9wGU+hfOY3Jf43JeEvJfE/Jek
+ /BfEvBf4vJcSvcDnvyLkwk04eEnIf4kvfIUreo0vfI2HsfgVvmRPrwkli4TiRbxEcEAofg0ilr4hl70l
+ ly6SS16R9lS+SC1/A6KUL5LL4d5FYvlbUsVbMowlyDfEgeCg9A2h7C2hdBFXuoj/fYQDfBnoDaFiiVD+
+ Fu5Cl75Blb5RLHuLKl9Cly9hyt5iyt6gy94iZ+A8cu8inMGCSt/gSl/DN8cUvcSUvMYWv0IVvFIsWkQX
+ vkYXvlIseKmQD2deo/NfofNeYnJfYHKf40A58Io9x8MrBq9exgI+Yx6fuUDIeorPfkbKfkbOWiBlzRMz
+ nxBgzHhMTHuMz3hCSn9CTp0jpsziUx6TUh9TUh9TU2apSbO0xIeMhGla3BQ1dpIaM0GLHCWHDxMjhknh
+ Q+TgPmJIPymoFx/QjduTfxfWp13Ru03Bt0MRDnzaFbzbFL1aFTya5T2aFT2aFD0a0e6NCLFuwG0dyr0O
+ eFZ0q1J0QcBGu1Zh9uX02oa24z1rMO71kscB3I0470a8ZyPWoxHr1UzwbiEB5V4I6DivFiyQ7d2G8W5D
+ +7Zj/DvhGcDzIAT2EGEEBfUR4ZIK6iUF91IQ9ZFD+smhg/DsEb4jJIoao0SNkSJHiDFjhPgJcuw4MWGK
+ nDRNTZgixU0SEmdICaBpQsJDYvwMPu4BHo4TZ4hJM6Skh6TkhyTAOn2WlDEHryAIXk1C6hw+ZRaBO3UO
+ yAbh058A0IQMeNEX4C8B7GIzFrAZ8wA6PuspIec5CcjOegZAY4H1nBf4nBcEEMCd9wqPUP4CU/CaACp8
+ RSx4RYKx6BUeEQD9+wgHCOKv/sAOOAN0sKWvsABQ2Wtc+Rt8+VtsxRts+Vt8xRsQoRyIBIiXyJVvyBWL
+ VFDlW3r1Eq16CRkr31Ar31CQB7whVL4lgsoXCeWLeBAcVy2RqpfJ1ctEUJVk/ENVS4TKJULVErFyCVvx
+ Fl25hNkbK5exlZInADd/F6b8DRzDGTiPA5W9xoDKF+E5IxdJGXJtoIvfoIpeK5bAmUVMySKu8DW24BW2
+ 4CU2H7ns4eKHF4qQ/RxeKEL2MyIIeT2fEbOekhDoF/BAeTYQ/5SUPk/InCenz5OTHxOS5wipQPwTaupj
+ WsocLekRNWGGnjBNj38AYsRNMqNGqQBJBAQFg8AMKbiPFNJHDgSvBN/sIvh3433a0d7tKJ92lHebvFer
+ PIDu2Yxyb5T3aEJ5NiOsArfuDXi3OpxrDcatGuNaiXatxLhUYO7XEl0r8fty+2zDOvFedRjPBuTR7o04
+ gNu3mejdDJTjfFqJXs14j0bwbzByrGczxrcd69eJCK6zwB5AHBFcgkG9YNvE0CEksAkeIIUOUCQihw3C
+ sydFjVKiEb4RgYXHjpMkfJMSp8gJk8SkaQCdnDBFiJvAA+iJj0iJDwlJswA6Lv4BBmw7aYaQAog/JKbM
+ EJIf4lMf4tJmcRlA8xw+bRabPodD9BhBGcjOWgC+4UVHXmvAOusp4tnAetZTbPYzfM5zct4rSu5LIvCN
+ OPQrQv4rIggx71fg1rh8+KO+hr8uYtIFr/AFL8G5cUUv0EUv0TAWvkAVvgRfxJW9QRAHDiQGKRkXJfQs
+ 4irf4Kvf4muWCDXLWERL+JplXN0ysQZgfUMEVb0hVb2lggDxulVG7Qq9eplWtUSpekupfEsG0CVCWN8T
+ UF61RK5ZIdWsEiUiwFiLiFS7BifhB+FrVgjVKzjguwr4BsqXsNUr+KolXOVbOAln9gQ3kbv2BPdKcEeu
+ h/I3mKoVXNUqvmIZW74Mro8qhysEfq+3WPD7olc45Ap/RYALO/8FIQ95iyPmviAVvKSC8p6TEWsHE4FX
+ Hv4EwPcCUfLmCXCTUh6Tk+dISWDqjyXHs5SkR5SEGQrYeTwCOlDOiB5jRo7QwgaBFiowE9JPCewmBnaT
+ A7pIvu0QLODAXr1a0J4t4OLA9ycjd29E3a8H1sGaEZt2q4cwBO9Wi3OrxrtVEO6V4p2LsbcL0a4VRJdy
+ 4r78AbuIbqJPA9a3Ge/djJO4OMQnYOQYwBoOfJCIBX4S8vMgPgHzDgLP7gHbxgf3AeIgfEg/IWyQiNj2
+ MGBNBsEb0J4kFk6OmaDGTlBiwb8nwLaJ8ZOkxGmwZ0rSDDnxAbg1OQk8exoOgGN4myOkPSalPZa83z3E
+ AtNpc3h4KwSlPsKlPsSmPpLADazP4jLnsJmPsZlPEGXN47Ln8dkL8E6KOE32U0LuM0Ie6Dkh5xkW3nbz
+ XkCwQSl4SUZ8+jWxAFCGkGORWLRIKgL/XgQLlwQhb/DFb/GlICRUwJYgAm9Dlb0GoUHli+CaYNVgkMAN
+ kISrAG7eYqvfSsYlXO0yoW4FX7eCrl1B1S6japYxcLN+lVC7BALiidUS1S6T61aptSuUmmVyzTIJVP2W
+ WP2WUA0O/RZfvYSvAoKXgWMQPBhfu0aoXSPWrQPfxD0B5bXrJDhTBwcr+Opl4BsHrMNYs4pHWF/GIZcB
+ IlzNiuTCQ87gqiWgS94K4A0Ewb0KuSDhJlwe+Oo3+MpFQpXkkoN3p+KX2OKXuJIX+JIXhKKXpMKX1MKX
+ tKKXjOJXrKKXrLyn1KwnpOwnxJwFcuY8CQwI/nZpc3t/RErqLC31ETXlERxQkKDlES3hIT1umh47SYsb
+ Z8SN0aNH6JEjjPAhKvAd0gdRAFBO9u8AL4eR7NuGBMwQSniB8wKTTWjAGuRWh3apRrnWIvG2cxXqXhXa
+ pRLjVkW4X0lwLcPfLcY7F+Ju56Nv5CneKcLcLcLvKxiyjeol+DVifMG/94ITCMTbEP8GAdlw81P83YUP
+ 6AIXxwHZiPpxoQP40AFC2BAR+IZoBBQxQoweB8+mIuY9ivh39DhQToqdBKAhBCclTBERsqdJqchvToY4
+ ZM+wkx9CkE1InSWmzhIgDoGABCIQiEPS5zCIVQPKEs/IeAKGAc5ByF4g5s6Tc+fhxSVkz2OzFzCAct5T
+ XC4Ct8RvnuOB8oIX5MKXYDxwgC98AX8wYjFExq/JJa+JEEMj8S6Eqm/wZcD0G0LJW4mQm7jyJVz5Mhbw
+ rUBiAAzic0hIgKl8g6l+C0JwAar+QAogQ0CUMFS7BP6Na1jFNayj69dQ9WvoulVs/Rq+YY0Iql8FkepW
+ ENWvkhvWKKC947pVcu0yqX55716wbUS1q3DBIKqHEb52jdSwToJxT7UrBDjZACdXCXAhwcNqJV8FAtYl
+ uMPJvasOB08DeSYwrsCliNsDHd4ogHUIruBaqlsh1rxFVPuWXPOWUg1vL29J8MoUvoIgHignFj8nFb2g
+ FL1mlLxhFb9i5z9n5D1nwFjwggljzgIta4EGQXnaHIiS8ZiW8YSRNktPfQis09Me0mFMesiIf8iMfcCM
+ mWDGjjJiRxjRw/SIIXpoPzUIpnPdJEiEBHRS/Tsofu1U/3aabxvFq5mIBM/NIIIkwJaEKLU4l2qwcIJL
+ Le5OJfZOOfZeGe5+BdmtjHi3AHcrFwu6k4+9U4C7mYe6mqW4r3DILroX79eA8mnCeDfvzS//j/IAyMN0
+ EiAhgwTivbjAXmxgLyaoDxPcjw0dxIcPw2ySGDFCiBwFvkkQk0SNEWPgYIIMN4H76AlyzCQpbooUP00G
+ C0fi7BkiePZeqA1Yg3nDJFLCNzF1Dt7siMj73Rzu92gbmwFWPf9JWQtYCAFznoE9IzTnv6DkQYD4FJ/z
+ FKZKGAgw8p7j8yA4gXvh7RX4RsJrECH/Ba7wBRbmiGUwHUREKH0N0zjEkmGWVraIhvfuqhVC1Rqxco0I
+ iAPflcu4yhVkBBqqIR5YkTC9jK0FLUmEAI0YZO0qrm4N3BovQZkAONav4OpXsAA6IF6/pli3CiMcI/cC
+ c3A9wAhY71HeuA6gwzECOsL6CqkBoEe4J9aBea+RYES+5yqhYQ/o/xNyEx4GF0/jBtAP3xNXt4KgXLuG
+ q13/XXCdAOur8G6Aq13F1Kyia1ZAmOrlvfci8G8ChPu1qxAakashOgLQ3xBrl8hVSzCFoJQvUUqXyMib
+ 3hty8WsKUF4I3vGaVvKWWbTIArjzXzDznjH2lPuMmf2UlfGEnv6Ymj5Hz3zCyHjCTJtlpj1kpjwCMZJh
+ AjrDiHvAiJ6gR48zIodpgHj4IFBOgxkdBCoBncA3xb+DCnx7t1C8m6ieTRSPRtL9eoJbHZImAb5da3D3
+ qjDOFWjnCuzdCtzNMsz1YtTNYsytQrRzEe5eMf52Pv5mNv5mDgh3Ixt3LRt9KR0B3TaqBy9xdKwEdCwk
+ T8DCfTvAv5F5QCAyy8QH9eLAwoP70cH9qNBBdOggJnwYGzGKjxzFRY7hosZx0RP46EkCkiGZxEcjNyFz
+ QkLyJFP4eJhZQmQi8ey99IgkMYLfAxq0N3dE8iGI4CY2/QmkSjAZ85gsyI08xUmEBSERCIIycib3OQJ0
+ 7jNczjNMznN0/kts3gsITrCQEoEQvOA1Gf42EJAgs6hnqKLnMOVC3oglMzwcTL8k0zJU2Vv58jcKlcvo
+ qlV09Rq2Zg1cEMJcdDWcWcIA4ojeoqqR8ANVs6QI0Uj9CkYioBkBq34N07CObVhDnBLgbgSt4WFsWMPU
+ rQLlChC9wAMa1+E8Yrp7yDauI8bcuEZCDiQ2j9wEre7pE9O/P4wIX9u0ToARrpY9waW1d9C0RmzeBNbh
+ R8AzwTRuYOpXUXXwTrKBrV/H1K1h9qCvWcPUrMHviJIcYKtXkSAeYn2gHEy9bo1SAxEUaIlS85YMXi6Z
+ GVMrlully1TklVykFr2iQcRS+JKS/5Kc/4KW95Ke94KZ+4yV+5SV+5wNB9kLrMwn7IzHADct5SE1BTFy
+ ZspDVso0K3mGlfSQlTDDigMvn6RHjlIjhmlhQ7SQfmpILy24hwYu7ttG9muj+bZSvZop3ohoHvUUd0RU
+ 1xqCcwXmbiUIB+Z9sxh1owh1vRB9tQB1vRhztRB9OVfxSrbitRzU9Sz0jSzcrRzi9Sz8lXTM5TTU5Qz0
+ 5XTMvsJh29gBUmArHlLpvhCIQ0K9g4AkCiFQgbRON04Si+NC+nEhA5iQQcXQIcWwYVTEKDpiFBMFec1x
+ GBFFT2Bjp3CxU9iYSWzMBCbuASH+ASFuCpcwjUucgSkmLvkRZi8x8in3NwfHIAyMvyMO6SokPQIJPkme
+ BJ25gMp+is59DgTjc18g6S04Bklw38MaDy6OJL9eYPJewjxyTxB5kwoXScVvSUVIXg9b/AJV8hJdsYhk
+ P6reIuEvRNKVS6iqZUVQ5ZI8omWF6hXFmlVU7aoksF5ByK5Fgmx07ZJi7YoCUFu/gmpcQTetYprXME2r
+ 2EYIA1YQyiWQIZw1rmGa17EtG/iWDVzTOqZpHQ1qXMO2bOJbNolN6ySAEgR8N20galxHiEcE5+HeDVIz
+ CO5FHkluWqc0bZDh3qZ1YvMGoWWD0LwOWOOb1/BNq3g4aFpHxuY1uAvftIFp2kA3rqP34qWGdeQaq1tT
+ rF9HS0CXUA52voauWUeuZ8klDaAjoQuwXgkTg2UI/Wm1a/TqFZgZw1yZXvWWUblEL12kFL4kFr2kFL+k
+ Fz+nF72g578CxEGMvBec3GecnAU2KHuelfmYnfmYmzHHSXvETJ6hJ88wUmaAcnbyA07SNDdxmhf3gBMz
+ yYoeZ0YgU09aWD89pJce0EUP6KAD4j6tVJ8Wuk8Lw6Oe6tFA9Wpk3K+lu9XQXaupzuWEWyXY26U4GG8U
+ Ya4XYq4VYK7kKl7IVrxagLuci3bKVLicibqShbmSgbqagb6WibmUqngpReEiKE3BKUV+X/GIXfwQOagN
+ F9iG9+/AI5lvMHIkYwjhOFZi5PgQiMUHscEDqNAhhdBhhbBhBSA7cgwNY8wkIA4jNvYBpAKxcVOo+Ck0
+ KOEBVpLnxiXP4JIeYhNnUEkPFVNmMZL0H5LVTp1D/57hRvLcv+cBMVnPIOUHggNU9nP07xADx9g9gXlL
+ sMb+jvUffOMKIBH2GkYcZEsKJZnskje4kjeY0tcYyWQL0hcwBUSmepJZI6pqSbF6WaHqrXzlW/maFVTd
+ KgrxQuTNHQ7QDYgw9YiRy1cvy9SvKjSto5pXFFvXMO0buNZ1YBpMGtgCwjDNm0AzrmUT27aFb93Ete1p
+ C9+2CTcJbVvE1g1iyzqhZY2IaJ3cuiXRJqKWTRIIUAa1bpLaNkitG5SWDXLzOqV5A0RGvhAunnX4oYja
+ Nwjt64S2dXzbOg4ZN/Dtm4SWTXgOmJYtbDOCO0wMgHIFGBs20HXr2Dqw9k+CwAaZqsKkYi80ql2lAOhI
+ ngden1Vq1Qq1cpla/ZZW84ZZvcgof00tfkGWiFr6gl7ynFH0glkAE9BnzLyn7LynPFDOPCf7CSfrMRsQ
+ z5jjp8ywk6clLv6AlTjJTpzgJoxzY8e4MeO8mDFO9Cg7eoQVMcQM62cEddMDu1iBnayADpZvKxMQ92pk
+ ejYw3esYrtU0lyqqSxX9bjnjdin5egH2ah7qWgEWUT72Si72Ujb6QibKKQN1MQtzPhN9Hg7SFC+moi6n
+ oa+koy6lyjslyZxPlAVdSJU/nyyzr2jENm4QH9CCDmjHBiJzTcioECGpgqwt9WACesHFCaHDxJAhWIVC
+ h42gwsfAzhWjxlCRYygJ4ujoKVTsA0zcNDYGKH+gmPAAnTgNOUFcChKrwCIONvEhOvkROmUWlToH+oNv
+ dPo8Gll3mEfWbiRGjpXktveEzgGf/p3mPcT3yP6Dckkme08wW0IEee4iWKB5gytCEn/IsghMJSExXPoW
+ VQGh9gqEoZSqZTKoGtILMJtcgT85GolbVsDwMODiQHkdBLJwAKwvo+uWUaDaZfmaVan6NdnGdYWmVbmW
+ NVQbWPWqYhP4t8RHAfSWDTByRK1bgB2+YxPftU3o2CJ0bBKBSxiRg01S+8YntW0SgM52OLlNbNsiwE1E
+ W8jDQG2b5LYNSusmrWWT2roJB3AG37GB21PnJr5zU3L8x8lNuKLgp8PTQHBHnhJY+5oiEA8RVMM6vmEd
+ ZsaIGjdw9es4JOxBgiUygF6zCuE4JPgJFfDKrFArliiwblX5llb1hlHxmlb6iloMXv4KQnNK4XNq0TNa
+ ISD+jJO7wM1d4Oc9FeYtCHOe8EHZT3hg56kPuUnA9xQzaZKVOMGOH+PGjfDiRvjRw4LIIV54PzuslxnW
+ Sw/toYd2MwPbmf5tLL92tm8r27uJ5dXIcq9jutcy3aoZd8upd8oot0upN4to1wtIV/Nwl3PAwhHEr+Ti
+ LmRizqejHFMVz6bIO6YpOqQqOCTLOybJn0uUO58ofyEZKJc9lyB9Nk7aIUbqXKLs2XhpCF1s4oYIga2Y
+ gDbMJ9AhI46so2ID+zBB/VhYWQ0dIoQNE8JGcOEQkU9ARA5ejihmCowcRnTcNA5Aj55UiH+gAOYNcEsC
+ cXwyZAPnYJESghY0QvljRVDaExSQvUe5JETZAx0sHBZuYJESsXPEy19iQX8gvnewRzkCPbKqRyhBRIT1
+ xeKXsMpIhFxK6SIBMtxlr5GsdjkSgsOioGL5MrpiBV+5SqoEx1qmwvtyDdgYhKeQgFvFISO8j69BvgJJ
+ wNVCQLKOBxeEGAYicjB1IKZ2DUCXalyTbViVbVlTaN1EIdCvoVoRvP5PretYBMQtAqhrm9i5BSJJ2CV1
+ bZM7QVsgCjIisOK74GF7ktxEwN0EwwbBOwC1bYvWtklt26R0IBcAAnfnJlYyfhJ8IfJVG9iOLRw8k+Z1
+ 9B7ryLNCrB3TtAnEQwSFxPf/j5DYCea7tSBJgrIKEi/L5MoVsHZaxRK1DFh/Sy97Qy9ZpBW/pUAQWLhI
+ zn9FzX1BzQM9YwLiufOC3CfC3CeinMfCrFl+1iwva5aTMQuxODPxATNhihE/wYob48QMc2OGBDFD4qhB
+ cXi/MKKHF9bFCumgB4PamQEtTJ8mpk8zx7uJ49nA8mxg30coZ7pUMu+U0m4WUa7nk67mEq/mEq7mES5l
+ Y4H1i9mYCxmYcymKDklyoNMJMqeTZO0SZc4kyDgkyJyNlzkbJ+OISNohVto+WtY+WsYhVvZMtPS+omGb
+ eCgV6MQHdmBhoTUQmXdKcuT92BBAfBApIUAOgPURHDL7HIdJJzZ6Ah09CXxj46Yxv9s5OmpSIWFaMfkh
+ BviG1XhkQX4WUoRIljD1MQQtKED8/x90YH1PMIPMf02C5UkAHew8X5Lk3tOekYMgtQLZbmTZ8iXkCiWg
+ I4gjKgMh6+cgWLKR5M6WYfVbofSNfDlkCVeIVTDTWmXWrbBr37LqV1h1y3TIoMHaTfVbUu0SBVS/RKlb
+ JtcvExtWyY1rZJgRQijcDHHwJrZxU6ZpU6YZtCHXsqnQto1u2UQ1b6CQ4EQSn0hCFBx4+R7lEhG7NkGk
+ TqB8i9S9Te7eQg6Q4y1S7xZxTz1/aJPQs0XoRi4P+FpS+xa1Y4vWvgmigPfDBdC5iekGbWH31LOF7d3G
+ 92zhujfRnRuY9i2IlHDt2/B8cIiv7wnimQ0gHkL8T5JE/zDZJTesQDYTWW9CTH2NVAHRC2RaVmgVK7Ty
+ ZWrxG0rRIhnh+y05Z5GQ84qY85qS84qWAxPQBXb+nFLBnFLeQ3HujChnRpg1zc+a5mbNcDOmOckPICLn
+ Jk5y4sbY4OVRA9zIfkFkn1J4ryi8RxDeww/tZAe10YJa6UEtTP9mllcd06eR59XIvV/DvF/DcqlkOJfR
+ 7pTQbhSQr+QQL2cTLsGcModwOQcHUYpTOupcqoJjsvzZZHn7BDn7BPnT8bK2CTK2cdK2sVKn46TsY6XO
+ REudiTxmF3HMJvyYbZiUXYTs6Sh52wjZfYUjVnEj2BAEcUxAD9YfwpUeTFAvNqgPEizYkAEcpBGhKCV0
+ EBc6hGRaosYJUZNg6sA6zDhxSGg+BeE41J9g4qdQiTMQnaOTZrAgSeUJHlk4gOVfZHEeyaWkPUZL0imS
+ ueZTcHEwdWQEIwc7z4MVylewRI/MPoteE4tfwzoOTCgheQJL8TigHPiG4BsJwSFd+BJX+pJYArHjS2r5
+ a1LlK1wlrGYjC+8I4rWQD1km1L4hVy9S616zml4K2p6Lu1+odS5odcxpdsxqtk+rtU2pto4rt4wptU0o
+ tY+Lu8ZEnQ9E3XMqPfOqfaCnot5XvK4lZhuE1JuYtm1U67Z867ZM67Zsx65i+za2cxt8mgLO3QWOu0Vs
+ g9AZiNwm7glCl27QFhEZt/F7Y88OAdS7S+jfJQzsEPq3CX2fROzbJvbsEHt2ST3bFFD3Nq1ri9aDjHBt
+ wPsDrnsbBw/uBW3he7eRxw/skvp34GrBd2/CvciPg+8AY9s6FiYSrWt4ZGIAQiJ4eItAZgItm5TmdTLM
+ dBvXKPWwVgWzz1UK+HoVsrRErl6lA+UlS5RiJJkIaURy/htqLrjPC1LeAqPwibB4RrVsUqdyxKhi2LB0
+ WL94SLdoSLdgULtoWLtoSCtnUDN1QD21XyOlTzWpWzmhWymmSxTZJYroVgrtEYaAuoWB7Vz/FqZ3I9W3
+ kerXzPauY3nV89xreeDi9yoYt4qpEKhcyyNfziZdSCc6pRHPp+LPpWIdU9AOyQpnkxXsE+XOJMrZJyqe
+ SVCwjZW1i5a2jZGxjZK2jjhuHX7MJuKoTcQx2/CjdqFHbEOO2IBCj9lGSJ0KObqvaPRU7DAmpAsd2IOB
+ cCWgF+3fgwroQQX2ooL7P+XLJSlzKLiBZCIe8oaRE0iyJWYSB6mVOMgeTuEkoAPuGBDgvsf6XvQCC0Bp
+ c8g62e95Q2y6JCgHC98LxyWTTrBwJGiRVJsgNSeQ+QbDBhWCkOBbsmaJ1JkggTgsXkKpUxnkwl/B0g+9
+ +BWt7DVBspQDGXHEuSEfXL9EbX7Nbnkuan6i2jKt0T6m09Gv09mp096k21yr1Vip2VSq2Vyi1VSk2VSg
+ 3lKk0Vyk3lKo1lyq0lGv1d2s292s1duu0T+k1jut1LXA71pkdq/SumAKuKHQtiXXtgMjqmOH1L1D3iOs
+ e5uEOPE2oXOX2P2OhGiX2LtLAiH4ItpDHE4iGkREGtwhDe2QJAfk/h1y3w4ZAR1GhHUqqHcHRrLk+8CX
+ 4/t3gGx4NyDsHYAAd7jZs0f5Flw2yM0uiKCQOSsEPJT2LRKE/ogkE1+I+CGx07SOvGXVr8BCFRVWqWrW
+ kFR6JRThLNHLlhglb+nFizTIrhS+YBS95BY85xbPC0oeqJYN6ZV3m1S3Wtc3nKmrt6tpsK2st65oOFVW
+ b1HRYFnRaFXaaFnYaFXYaJnfYJpbb5TZcCK1SSe+WTO6WS28RSW0WTm4SSmgQeTfyPOuZ3jWUj3rYd7J
+ 8gDKKzh3Shg3i6jX8iBQIV3OJl/KpDilURyTiWeTcA6JWPsE9Ol48G95gNs2VsYuVt4mWu5UpLRNJFAu
+ axclax0uZRV61DL4qFXIUZuwY3ZhQPlh65Ajp2AMO2wZdHhf6aRt/CguqBMqtFCAOEI5eHkfgK64tzC0
+ Z+oAeuQoIWqMEIWELjigHBLkUHGFzEcnINmCiZv85OWJ04id71H+/w06ZA+hQAUqUjAZkgSiJBxHJqAg
+ AB0pCXxFKFgEUwfzxha9wBU9xxUgaz0QjuP3qv9KFpEVTajmQwr63kI4DssZUAVKK4HpFCwEwnLgMrV2
+ mdv0RqnlmWrbjHrroHZLm25DlV5DoV595onaOO3KYLWKAOVKf9Uqf40aP81qH41qX80aP40qP80qX60K
+ f/WqUPW6CM3GKI3mePXWLI2Ocq3OFs2eMZWheZXhN6LeVXonEiEotmzKtW0rdm5jwGjBZft2CH27hN4d
+ fA8Q+Y6IaBfgllC7uycSMiJniD1gxtvkwW3y0A55eIcytEMd3KHAmf5tYJ0kEaVvG0Tu3Sb1wsldOEPs
+ 3SEM7ML1gMD9ifItxNcRuBGB00tA38T3bkA8g0MCpy2I7yltyFwW3nBILeskSTIHyV02rhDrl/B1S8ga
+ Vs0KEVZAyxappW8YpW9YJYus4kVWwQte0TOlsqdaZY9OVI4bV3WbVdVZ15Sebci71Jp1rSXzSmv21eac
+ K825VxrzLjfmXarPu1ib41SXd7Gx4FJd4fmKgjMlhacKSi2zy81Sy42SyvTjSvWiS3TCy7SCKlX9qoUe
+ lZJApQJiFdbtIsaNAtrVXMTIL2dTJF5OOp9KcUgknInHnIlH2cWCf8vZxcnZxMhYR0oD4lYRMlYR0tYS
+ 0AF3i5Cj5iFHLIIR1k+FHgW+rYIPWQUfsQw6YhF00Dzg4L7SqdPxIxCjQ50WGDkaKA/qBy9H9EfcIqmn
+ hRVQxNEjxwDu/wM9agITO4kFyuMmETvf0x9xC9j5nqNDpSEU+kB1G+Lr83uUw7IOVA5CHdwnL8+H+ATm
+ PW9IBYtIYSACN1JNBSO+5CWsZRLLFpEoHKabEuFhrb4USl6B8mVy6SqhbAUHqYPaRWbTS5X2af3GLt3a
+ crXqNNXycNUSH41SV62y2zolV7VKnVTLHFXKz6tXOmnVOOlUOGpVnNOucNKuvKBXdcm46rJh5RXd6qva
+ dTd06m9rN7rrNPrrNkVrtxVq9LfrjD/QHX2t0r/O6NrGtu3IdbyT7dqV793B9u/iB97hBnbhQKJ3+D7Q
+ LqBPAvUijAKvyNi3C9SS+3cBcdrQNnV4mzK8TYZxaIcCpj4A2v1/RQGbB8HJPiB+C2IViuQYgXvP2vfc
+ Hbk24IxECOgSdW/AHJcM6lgnIBlJmOCug0gta+RmCeh1klIFWKmFDHr5ErlkkVLyllayyCh6xSh8xS54
+ Ji55ol31yLRm2Kau9Uxj+fnmrGsdSfd6Yu/3Rd3vjbzfH+PeH+veH+fel3C/O96tPc6lI/Zud9y9rljn
+ joRbrYnXm1Iu1aU5VWaeK892LMs+U5Rlk5N9Ki3XKjbfKLhIw6tIcK+QdbuAeT2fcS2XfiWbeimTfDGD
+ DIifSwEjJ5xNJNnH4+1iUbYx8qci5WyiZa2jZKwipazCpS3CpD4p9LhF6HGzoCOmAYfMgemQo8C6ZAS+
+ D5sHHbYIPGIZeNjM7+C+4onTMUO4oA6MfxfGrxsFoYtk+VNRErpgkYIWSYz+B+jg6LD2idg51ItPIY4O
+ oMdPYiB02eP7/4fyPdAznpAlFYVAPNTKgoUD3EjBt6SKEBkhOv9/QCdCcILYNtD8e2oFWbpfJJa9+T/Q
+ IRwvg0qMt6TiN/iCRagoxNa+YrQ8FTePatSV6JREK+V7cAqu83LP8bJPi/Lt1AptNIpOaRRYKOWfFBSY
+ ikosVMos1QtNVfJNVQvM1YottcosT5Rbnai00a2y0ym306k4o13jpFt9Savqhmq1j0pzkkZfve7QhObI
+ a9HABrVnF931TrrnvSyQPbCLG3iHGdzFDEnGwXdYhPt3eLgAJNcAAdEuYEocfE9C9I488o4yvEsd3aGO
+ bJNHgfVd0hDQ/44Mdw2+ow69p0qIB6wRwZl+xOPhCz+Bvsf3H6BDpP7/A3rPhgT0DWLnOqLfk5vEtrXf
+ QYfKlk+gQ8oFybSUSF7M0jeUkpfU4mfcoseq5dP6VcOWdS32LeWXujJv9sa5Dkf6DYf6D4Z4DYd6j4SD
+ fIbDvQbCPHpD3XtC3fvC3QfC7w9EuPWHu/SEO/dEOXfH3umIv92RcLs94WZL3NX6hMvlKRezUu1i0439
+ 09Xc0nl3MjjXM5mXMhkX0qnnU0iOyXiHRNzZRIJDAvFMHNEuBmcbgzoVKW8eKmUZLmUZJmUOZMMYJmUW
+ chxkGnzsZNBR44BDpoFHzAKOmAUeNQ86ZhZ89GTQYZOAgyb+B0z8DpgFHDT1PbCvZNwueggb2ImsDfl3
+ IdF5UB8qpE8hqO9T6PL/gA41LeDokGGE5U8kdEEWQcHOIUZ/AHzD8hAC+h/ai1v2xvTHJKSubR5qxHFQ
+ N7tXBf475aQ8KPV8CaWzJMi67Jk6xOVg5ODf4OKS0pRPiMN2hDJIIMIyPpTXvYVlDqjeRs5AuWzNa1rL
+ glLLiHpFgSjPhZvhSEu1Iqeb0DONOZkmomxTlXQjcbohP8OYk2HIAmUZc7KN+ekGnBR9dqoBL91QkKUv
+ zj4hzDUSZxkrpxkppZkoZ1uoFp4Sl50WlV0Qld8T1McotVerDT1QH3krHtqk9W4r9u4o9O2Al0Pogh7a
+ lWgHNQi4S1jvf4fph4P3mMH3ONAe/Yh28UO7xJFd0sgOaXQHGUfekYbfkYfek4c+UP6gfOg9IL5n8Aju
+ QPb/G7oA9H9YO4COGD84PXj8JrZ3E9e7iZh9F0yOIYv/KZFPhiw+TKzBzptWSODojQjrUAIABS0wASUX
+ vyVCTVvZa2r5c1bFE+XKqRPVg2bVzbb1Jec6sm/1JroORniMhPgNBfv2h3oMhHoMhtwfDL4/EHy/J9Cl
+ K/BeZ6Bzd/C93tC7QHl/6L3+kHsDIa6DYfeHwu4Ph7sPR7r3h7l0h99ribxTEX0xK9YmMk7PK07pbjzv
+ SiLzfAr9fArVMYmMxCoJuNNxuNOxBJB1JPpUBMoqQl4C9BHT4KMSuI9Kbh4/GXTMJBAoP2Lod8jE/8jJ
+ gCMn/cHaj5oGHjUKOGzoe9DI54CRz35g3cRn/76icZuIAZR/O0xGocQcKMeEDGBDwdQBdziQ2PmeIHRB
+ YvQxyDBCygWRZD6KZF0kM9FPXr4H+qeIBQo155CCxPQ5qN6EqmUoU4atD7BFBQnNP3n5C0jQ0vYEleL5
+ rykFi5QiJDWO5MiLXkIZNJREwz6aPUcnSOoNoU4a1jjIVYvkmjdQxk2G2u7G1/z2ae3aCtVUd0rsSVy8
+ Lj5OkxijTo3VYMfpCaJPCCK0OZG6jHh9avwJSrwuOVGflmzIiD9Bi9GlxurS4/WYSTrMBC16uBrFW0S5
+ r0S7r0T3UaHG6DLyzAXFtqJ8e06RM782TrW3SWd63mhmXWtsi923BhNBiBzIfVu4gR3MMCI04usI6IA4
+ GjT4ATP0AS8B/Xd3f4cfekcYlmjkHXn0PWXkA23kAx2c+5OL74C7U8H74a1g8D1i85IEy/9NQPeg3wtg
+ EPrhMpBcCRC6wDMBQeaxewvy9JCYR5alICnUDjNRJEyHlIuklmaF2CCpcqlYphS/pRa+IcDSBGwxKXnG
+ LplVKh/Xq+owr6i2Kc85W5VwuT7yTlPQ3VZf13af+21+ri0Bd1oDbrf73en0d+7wu9Pqc7PZ52aT363W
+ wDttwbc7Q+72hbkOhroPhXiMhnmPh3hPhPpOhPuOhnoNhnj2BLk1Bd4oCbFPCjMOjFC/Hym+GstxTKKf
+ TaTaxxNPx+JPx+FtozE2UZhTEWiLUEXLMEWLMLmTQVImQUD5MdMQCd8Bx4wDjhkFHDX2P2LoewhAN/Q7
+ DDLyPWTse9jE/6iR31FDn0NGPgcNvQ8Y+R4w8gbQJ20ih1B+bQr+nYoB3ejgXmz4AD6sH9KLSOgCi/97
+ lP/fZBRAn8DC+v8e6JLJ6N5MFMm6/GHnn0BH8i17+gN0ZOcVgJ75FL23DSLnOfUP5b4gg/JekgtfEIue
+ E4pfwL4HZG8LbEUreYPMRKFktPgtDtY+YfZZukQqh1JSycYcWMZrfK5U26mWEkjzM5cNUJIJV8aGKpP9
+ RRRPIc1VTL0nIrsokf20mFF6jGgdWrg6KVqLGqfHjNVhRGjSwjXoUZqMWB1ahBbtDht9Gi/vwKSd57Ev
+ sUjeYmq6kSjfQpRnxc93UipxV69O0Gip0xyZ059c1R5aF3RuQm6EhoTX27iRHezwDqp/BzUAAcwnCwcv
+ Jwx9gKAFyQZK7JkED4asy9B70sgudWSXNvIOEKcNvWMMf2APvGMMbFGG39FGP9CHweDhYe9IoEHw7O3/
+ Yx2YBkf/HX1JukZCOTymbwffu4Xr2sB0bGLaN7HtSKYfybq0IJQTm5FKGyiHJELuFWp3q1cIZcvkwjfk
+ 3EV8DpK0ZZU+Vi8Y1c9rMM1Jt0gLs0r1dshwuZxz51rWtUuZV51yblzMu32pwPlSyd3LFc5Xq+9eq3W5
+ Xn3vWo3LjWq3W/Wedxp9bjf7OncF3e8L9ewP8RwJ952I9J+KDJyICRiJ8B0O8e73d2/3vl3j75QTYhUd
+ ru8drnktimcfTz0TTzkdS7SNxgHl1pEoq3BEFqEoixAFs2AZ0yAp02CIVaSAcmP/Y0Z+xwx9jxj4HzX0
+ O3rC+yAcG/oeNfA5ou992NDnsLHPESNvyehzWN/zgIH3fgOvH/YVTVgD6P7tCn5dKP8uAB0X3o8PR2rN
+ gfK9Wlxi+AgUmpMg6yIRODom+pOpwwEmdgoJ0EHxD9AJ0yBMImRdJNNQSR4dAT39MZTkU/ccPROqbRFH
+ h/kolB9SQDnPqDnP6DDC2hso/xU4OqX4BQnWPvfyiUA5sF70Gtb2sYWLGGRf4yK2aIkIb7iwzFG2SK54
+ yayaEeUWcYMuK7qqH/USyAaIsF48/F024RwRZY2TtyUpOtCxzmJ6sAY7UosTqkoPVaJGqTKi1VnhyoxQ
+ JVaEGidSix6oznAio3WlpI0J1DN8ZUcm8w6bGqPFzzTiZZ3k59qpFtzQyvFQTguhxsdLV7bTRpY0+zY4
+ PZv0vi0KJMWHd3HDu+gBRBjJ3BQsHIAmD3+gSsJrEHg2BNzgwSQIUUY+MAZ3GcPv2EPvuEPvhaP/VB58
+ LxjYZgy/h/MQw8BjSMMfIKohDkJW5/f04h7fiKNDmmULMfU90GHW27ODgyWkLkB8A90OuX9J7Q1SabOF
+ FIRBWSXUh4GdS6qFkYL18hVcMewTB9CRpToIzcVFDwySG00i441DnA28HPU8rU/6mFv7m9r4nbT0MTP1
+ tjTxszYNs7OMOWObaH865cyZdHv7DAeHbKfzeVcul9y6UX77RvmdG/Xud5u8Xdp83Xohmo/wHY8KGIsL
+ HIzxHwjzAdA7vG41+F4uDjqdEG7qG6p9JYx3JppsG0O0icKfisRYRYCFy5uHyFuEKsB4MlD2ZJA0gG4S
+ cNwkQGLVCNYQrhzR8zmo531Q3/ewvvehPel5HTTwOmjkfcTA84i+5yGJDhh47dfzBNDHTkUPogPbMX7t
+ GF+I1LtxIT3YsD4slCvCTFTi6PjIYULUMDF6hBg1QgLFjMKOZgKiMXzsGC4OHH0KYhh07LRi3Aw6bgaW
+ S/GwHS55lpw0C7unSCmwZoRs39wLXWBfLfr3zZpQNQ6g7zk6DVjPfU6H4s+C14yC1zBSkCLE18RCSMIs
+ wvoormBxr2wLNnTCHl4srGggk6dl2DVMqnrKrehVSolm+lrLuyhJuQkVvUUEdzb+KhFtdPyI+rFjxnj8
+ GTrtCovmKqQGaHBC1FmBInKAkBSoRA5WpoUoMSJUWZEazEAV9jUq+cQRKZWDx4xRuNME3C06MVyVmaJH
+ TzdgZ1qJ0pxEMZfIkdeOR7v8kBFzfHhSY3ZLZ2Rd1LUMKzvEvg8QroCdK/bvKEJE3vMO1wvhygfSwAck
+ 0zIIgoDkHQ0gBoceekfs/0Dpf88a+6f62Dvttpfiuse8nhXVoV3R6C4LJqY97/A9PwHrlFHEy3Hd77CQ
+ yUFmtO/gy+G6IiMLq8giFL7nHZK77NqF5Sp8xza4OKZ1Cym/Qcq8NnHNG3hQ0wbUukC1OlKejmw8XSKW
+ LRGB8qK31ILXUHNLzXvKKJlTz+kyCEzUdrmh7Wypd+WExiUtzavq2jfUdG6oaV9V14Sbl7U1buhoOOtq
+ u+npeZw44amn7613ws/IMMjMLOyUVaSNTezp00ln7dOdHHOuOJXcvlrnfqfNz609yLMz3Ae47/R3bfa5
+ 3eh7o8rvfH6oTUS4gXOo8Hw4zTaSYBWJtY5AnQpDmYYonAyRMwcjDzxuFHTMEGJukP8RA7/D+n5HDIBs
+ 38MwnvA6qOuxX9fzgI77j3Cg53kAxhOeB/S9EOKRuzwO6CHjDzoeP+wrHjsVC6C3YfzasL5QBdCJCexE
+ BfegYYMFrB8hFV2DuPAhHLJaBLuHRsgxyL4hUgxsqoDNFiP4mFFcNBLGYGIeoGJmUDEPMXEzuNgZQsIj
+ ctIcJXEW2S8IG2PTnsA+QpiM7oEOK0TIOigSoyOxCuVT6IKAzsh/ycx/xUAKQV9T8l6R9jbkwmb7vYou
+ SXEiwnrha1zpKwri9EsQwROr5/llLUpxPhQPQ1kXgYybAO0lJLqziOfQ8io//sj68YAWCm1LZV6kM5x5
+ FG8VZoAqK0CJ6i0keArxfsrkQBVasBI1WJkapMJ25TLtFFEGR48ZHjvmgJHz4dPjNNgpuowcI262hSDx
+ NCP2HCbx0vGMWwfT7h8caFF9+Ex/5KX64Ipy9waz9z2x/z26fxfVt4OFzDeQ2gv5lg/kvvdInnEvdAHK
+ IWU+tEmE9HnvNmvsJ63JHZOSXrpb5N9u+f05t5Uwsq46scUb2KZ2vCe0Q8CzRR7foAxtk5Dvhpg6rKdK
+ QhSgHFwc1qd28d07+K5dYscOlIXh27axrRIB2U1QPLyG1McjdV0bSGlXzSpsK4GVY1jwJ8GbZOFbqNqn
+ FYC5vGAUzPPyxtVii1WdXTgXLYWXdbWdNDQcVVXPqaheUNG4oKxxXqzmqKLmqKbqqKJ0TlnpgpLKJSXl
+ K8qqV5RUrquo3VRTv6mheUtT+66OrtsJfS9DAz8To2BLszh728xL5/NvXyt3d27wvNfo7dzg69zgc7vB
+ 91pFyPmkSAuPcNULoVSrUIx5mOKpcNSpEMWTQXJGQTInA46Z+B82CDx6IuCofgBEKQD6kRM+R/W8D+t5
+ H9L1PKjneUjHfb+m6w/abgA64L4fpAfnvQ7pehzUcYczgP4BPS/kAthXNmadMIgJgdLFTmSnHIAe1AlZ
+ F4jRoagLSnMRR4c9RNCXAvYTRUgaEsBN5Aw4/SA2YgTqdSFkR0VNKkZOKUZNo6OhugtqYKAGfZaUOEtM
+ miMkPSakwJ78T6DjJfVbku33yDZyUs5zAF3COoQu4Ogv6DArzX9Jhc2dEKwj3SYQ7bEOVS571S/I5uWS
+ l0RItxcuomEjY9UjXnGNMOouyVVN6g7l2F2mgicPf59DtMfIKu3/nvb9d+Ljxy0I5EsMmiuH5MkHkX3E
+ FC8x0UOI9xTgvYUkPyHRX0z0V6Z6iRl3ubSrDNIVCsaNRYxX5WVo8TP0OHnG/BwLXpodI+8qvcaNXn+f
+ UOAsleEtNTZ4cv71mdEXJ4bXVPrWaX1beIhhBnepfbv03neUXpgpvqeCkPQ5ZAl3KbAAhETkEJ9sC6d2
+ DaY3bLLKKV7e/7h34yuXy1/FRR0eeqzyYE1teIvb9Z7W+YHav0UdWacNb9KRVdJdYB0H601/qAtBHN/5
+ Dg+UQ6ELUsu1CRWUuNZtKNEhNG1AuIJs2kC2q6592ksKTQRgeaj8DYCObJaF7UKwiwI2B+U/FmX0qgQl
+ CK5doF8w5F1S13RU07BX0zitom6vrO4oVndUUnNUVnNUVXNQVT2rrOIoVj4nVDrLE5/li51EKhfEqheU
+ 1C4qa1xS0bisqn5VDaR6XUPdWUfLw8gg2MYq4eK5gtvXK1xvV7k713rerve+WRl0OSPawS9K9yKAHoIx
+ hXAlWNYyWM44SE4/UBos3AgB/diJgOMQjgPlBhCR+xzT8zqi53lY1+OwjvtBLbcDWm77YdS+f0DT9UcQ
+ HOh6AOh/6LCE+0MAum3iEC60AxfcRQiGfhXduOBudCgSuuCDYL8FUomOA8rD4DFQ7oKUeUGBF3IGEeL0
+ 2PBRTNgolO8qhk+iIh9goqYRRU+j4x7i4qBGdw6b/ASfOg/dVJCd4RCjI50nJKxL2oPA/nxi5jNSJnQF
+ eUbOBtzhRX8J6ZdPAtx/Zx3KXSC/LglmECEpyIKXEMagSl+Qqh/wC4v4oZdwLjyp25jjt8iyrmzUPRbm
+ AgVlgpLWkD2mryh/lkp1ZjN8uWQfNs6ViXJhodx4GC8RwUuA9xLg/MTEIDEpQEz0E5N9RBQfFVqQGjNC
+ jZWkxsrU4KbpsHONBXkW/FRLcr4TudWF3e3Kar5Hy7+LL4kTPRhxnJyzHn+lP/RWMLhKH9hk9G9x+3cE
+ fTucnm1m9xazd4fds8Pq3mb27LB7d9kDO9zhTeHouu7kW+u6Nl3vu4cCrxxKuSWdfv1YTRRh5aXN8xXj
+ kRVx7zte93tG3zZtYAOuH0b3Dq1jB9e+hYWl2c4tPFJusA0jrn0H375LQLQjAX0L1wT7PCAo3yQ1bpIb
+ Nsj1kj2mdetEmHpWLkM5LkziySVvYIUIiuHIhVCW+IyeNc/OnlFOalLzCuBfOcW4rC26qKJ1RknTWlXT
+ Wk3LRkXTXlnTUVXzvLL6WWVVexVVB2UVeyVlB5HSab6SPU/sKFJ1UtJwUtZ0UtW6qKbtpKZ5QU3zvIrq
+ eWXl80riCypK17S1PMxNE5zO5ty8WuRys9Ltdp3HrUq/K1lhjn5huk6BJIsglGmwnFmgtFmgjEGQzIlA
+ KcOA40b+xwwDpPR9j+v7HAMv1/c+putxRPv+IZ37APoRbbdDmi4A+kFN1wPaMEqO4QAZ7x+CA8kjD2m7
+ H9S5Dyujo7ZJQ/iwdmxwBy6oC4QJ7saE9GBC+5BN0AA6TEmDB6AWAI3MTaG6CxFi82GDOKAfWA8bwoQO
+ o8KB9UnMH6DHzKBjH6JjZxQToEZ3/g/Q91iHcheo6PpUtAhJmIynRETQGwRYf0HJfknNeQmFcrTcV2Dt
+ e6Ajo2TrPlJptKcCCeiFr1EVL8j106LCfEHIeZwbV9YFJ3eTLHWLIXOHIXeDhb7IwjnQcedpxLtsuheb
+ GsQlBnAJHmwMXAn3eWgfEc5XhPMRYf3F+EABIURIChOTQ1TJIRrUcC1GtCYjXo2eoslKPcHNNORlmXET
+ zQiptpiqS5SuO4JeN7UaN+UQJ2xmmPDtK9epecux51rjb1WHl5X7VtUGN3X61jV619RBPevqvZvq3Rsq
+ XesQ4ah0rygNLWk/XD9T06rjdv2Ar8PB7Kvo5nvEfndKrz/+bZ/J7tr50Zeavevi7m1O9xa9CyoZdyit
+ u5Q2KE7cxkMiBSiHcBwqFsHFwbz3LLxlG9e0hWncxDTAjqdNAmwkrYeK83UoZZH0yVghVq6QkLh8kVj8
+ BkrlKAXPSYXPiQXPqdkL9Iw5XtqoalSxuus9wWVj7hU1NUeRpjlHyYCrZChSM1PSsFbSsFfWcBSrAtxn
+ xOIzIkSnhcLTArG9UPmsUMVRpHZeSd1RWf2cioajiuZZFQ0HseoZoZIdl2vL5doJBZe01f0sTePOn8m8
+ eakY0jX3b1T5XM4MsPf21z7rQzD1ByOXMg04bhoopR94XC8IAd3Q75iB7/ET3uDiR3U9j+p5HAV2tVwP
+ /iGNe/vV7/4II5yBEdAHSe6FUXLgcgi+BK6BT6CHtEHEggnuwgZ3oUNhMgpJxj5cGDSxGEBidITyfgyQ
+ HQ55RqSSUZJZH8BDIjIc0jII8RjYXxc+jomYQkU9gOgFoTxmWiFuRjHpMZg6JhnZCQqUQ6MVctZTABrp
+ GQQLRrA4ivQPQvY7Qx8cpBVOLggC9+eU3OdQ/UzLgx3QLxDQC16BIJIBxCl7Qvb2w+701+iql5SGaXFe
+ DjfQAePKlbtHVLxBlb7FPH6XLX2XLX+HjbnFxt9h4iFk92MR/JkYbw7Wkw8ujvfm47zYaD8Ed4jp0f4C
+ YpCAEiqihqlQwzQYYZqMCC1GjBYzUYcLGcYsY1G6KTfekhxjhco5Q6i+yK6+Iiq8phxzmRt8hVKcrNnf
+ bT49Zzn72vLBG4vJJeuRt9Y9LyxAA4tWnc+Mu1+ZDq5YDq5a9Lw1GVg2f7Bk3zVq6e5y/J7tj1lXCa13
+ qJNutNcB3Hl/2kQCe3PO6clb+8G3hn3r6l2bvJZtav07Yv0uoRmi8B0iCAlUdpANTZBOadnCN28C4p/U
+ sIWt38bXbZKgDYYEcXIVUogPQTm+dAl2pRBgp3Mh7I57TYXd5fkvSDlPyZmPaZmPhCn9moHpajcvCa7o
+ K19S0bJiK2sQWAIsXUzl6HCUTvKVrQXKAO5ZschBLDgjFJwW8O34fFs+HCvbC1UcBCoOItXTIhUboYqN
+ QNmKKz7FU7bmK1mxeafYXCse96yq0l0D3UBb86QrZwvvXK5yu1LlcynT387DR83eG2cWIA+gnww8ZhYk
+ rR8opRd43CAAKD+m74OArut5RMf9sK474uJ7KAO7GvcOqN/dr+b8I4yaLgD6AYAb7tW5f0Tn/jFdd9Bx
+ 5MDjqJbb4X3FwzYJULbVig5uxwR3YkI60eHdmIgebHgPLmKAGNGPjxzARQ7iIkADgD4IGzYANwmRg8TI
+ QZJkJCC4I3ulMZHjKGQfxgwGMjAxD+TiHyokz6GTZlHJyCY6KOqCdivI7BNJlkMJ6GvSXq3iXv+33w/g
+ GpC0zkJS7Ajl0LtCYuef4vU/QIfCxoKX6OJFTNUrSt2UIDOD7mUn7cyRvktWvMkAvmXvMaXcWLJubJQb
+ C+vGxHmwcL48oicbf4+NdRUQIUHuL6IF8ilBYkqAOtVHg+YP2RhNQZS2OEZbFK0jCtXkhmgyI7TZUZrM
+ BB12hqEg1ZQfd4oRboWLMkfFGCtEmWLDrWkhZ9ne9mS3s/LjnZfnJq90thvPzF6cenK1oEYnp1y7oFov
+ v1o7s1ytoF6ncciqbdK2esC4bdK6d8zey1Pxut3BpBv02jvsgdvUl16cD2HKSwGCkWDBu1nP5c2gJ5v3
+ prfO9a/rNW5wqnfINbuExm1S8zapZZvQukNELBy27W2TWrdITZvEhi0CqHGb0LBNqN8i1m4Qa9YJ1cD6
+ OgVAr1iFZQdk/UGSwoLKOdhIQcmBN88XFKQuA1pTzIhSuvV8E9SvnhNf0FE9I1DXpwkFGBpFjkCVJ4oI
+ TEO2yJIntuUL7UUggZ2AZ8vn2ggE1gLhaXBusfppoRrIiq9szBLpUXhaBLYOiW1EF5gxBBYsviVXcFqk
+ dFVD3cPUIPKcTeb1syXOTuXeFzL87O77qJ7xxZsC6MFSJ4OPWQTLGARK6QYc10eS5ccNfKUN/aT1vYDa
+ I7ruR2EEyhGa3Q5ruhzSuAd8A/Tg2YiFa7vBY46d8Dx+wlNK30vawFsGxhNex/U8j8KeUdt4SJkD6G2o
+ 4A7FsE50RDcmshsb3o2L6MWH92AjejERYOe94PRocHrE7BH6iVFA+QApauAT63AlhA3Djmk0bK6Le4CK
+ nZKPBdBn5JJmFGD7HOww2gMdci/AumQOiti5BG4csmnoxafNRHvHSFcWsPZn5L2mUL+D/sfcFAJ02MUI
+ 3Vowe6DXPuBn5tB9HWWdxVLONPlbNEVnurwrTcabqejHxnowwcLJfhrsAG2+v66Sv6F6iKlOjKl+uoVx
+ ka1lmZNd8bXTOTfO5N46V+p8qc7lRtPdm023rxY52sYaqYVrciPVGAlqtMwTvDRTQbQlK8iS4Gck76V1
+ zF1P5p6B4n1Lkrc9654t/lGX98KwT2IgLzlCnBijHuQnCPTkezhTPe5RAnw4Pl700DBecCgrIpqflaMT
+ GEC9fvZ4+A1Ozg1R2TlC/yX86/ucDwFqT+6wR4JO/Hsx95//rd/5X8mbnyNG1xwalpWrN+i1W8T6TVLD
+ FrkR9llvwUYKyBjim7fIsIEa6fSygUThtRv42g1czTpsmMJVruEr14lVG+SKVWLZMq7oDbrgFfRSxEJx
+ KKSzcl6RM16QUp4S06Cr1hwtY0qU1Kp7P1zN0VZor6VmLdTQpgu5OAZRnkCQxnBQ5BMMnjlXYMXl2CB8
+ 8234/FN8gZVQfEqsaiNWtxFp2Ai1rISahhwVDTJfhGFx5Wk8Oao6gWtAF5mxlC05yjZcZZizOp/Q9bc1
+ T7x4Ov+WY5H7+TT/Mx7+Wvb+ZOMAOaNgaVj4NAs6bhQoDWG6of9xI18pI39pY39ZA/B1r6P6XkDwMV2g
+ 3PUg6PcABvg+DOjDNQCxjYG3lKGPjKGPrJGvHAgOjP1glEFAj4PirWZUcKticJtieAc6qgsb3YWL7MaF
+ d2LCOlHh3ajQbsWwHlRYNzrsU1QjAb2fCFdCZC8+oo8QAXNWJIhXCBtRjBxXjJ5UjJ2Ui5uUjXsgkzAt
+ B7tFJfugJc0QoU0c0iyOiKwWfQIdyMbkS7S3DpoP0CMdWoh5zyCS2WvnCdG5JFx59X/oF72AanVorIOu
+ fEmufyTMLWYFXFFwVpNyZincpqLuUORdKXLedJQ3C1aOyGEGapkXrUpcLjYGunfFhgwmR0+lJCykpjyN
+ iZmPiXydn7bUVPy2veJVZf7r/Kx3ZWU/1dYspiWXOdiEKLGiVVnp6uxcfRGUvsRY8n1NSW4n5O5pSTnr
+ yV7Xk719Eu9hz/U8y+kuurc0mRR5T3zJ7Pg1W3RqiHnADVXPC7wQZ9WUQGN/Z8FFG+nbF7Aet2jBnsLb
+ FzDuTpTwC7wEG3KuqVynPXr2Km3hMqPfgfos5+ZvO52/fZz578fR3V8qHm24tb01rF8T1G3Sajcp9VuU
+ hi1S4xaxCeaa0DJgk9q4QYFAvG6TXLsB/k1AEIcc4hq2YhWPtKmBus5laMwEqxCwOxGV8wqd8wqTDdu4
+ XhLTn5MSnxKTF4jQnSJtTBTfqOMapH7aSminoXZKSVObJebjWQwUlY2mqZA4hkyBGYdvwWZZ8tiWPL6l
+ QGQuVDIXKVuK1ayUtKyVdK1EOid5WjosFRGBz1RgUWToNGmaGMvToYmNmCqmTJVTLJVzfJWbmtoe5saR
+ DtYZV+2z751N9bH3CtRzCKKeDFI0CZWGmi3zECmTUFnjUHmTIBnjAGkjkJ80Yu0+iGA+qu959AS4+/3D
+ MNGUCAlpQHANwJVg6CNt5Atwy5v4K5wMUASZBaFM/OX3lY7YJsAGiyb5kFb5sHbFqE5sfA8pvpsc00OM
+ 6MKGQyTTBR6PjurFRPfho/oQssP7CRDSIOojhIN6CRDShA4oBg/Kho7IQ/QSNQn7phXipuTip2STZuTT
+ 5hQy5tAZj/GZT4jZ86Qc0FMitNSC9izQwQL2WBQ8wxS+QJQP7VlgVyi0+US6tUCrLUnDreeQW0RCl73o
+ BWnnAmcgA4OU8mJKXmIrX1JqHvFyKhh+N2VvqB68TpW6TpW5Q5N1oyjcpyrcY2IDdZVr7t98XpK91Vq/
+ 29b4U1fbL+NDvz4Y/62raz0m4ekd13cxMR9bGz72df2UnfX89t0dD9+PmVn/io9ptrUIZZFjhMw0NX62
+ vlK2hUaCtbKPOeOGnuJFdamLasev6CpcNSW6OvCDb2rmR54dawhJ9DZxO8t1OcPxdlK5bycKvKDlfVYU
+ cEnZ/5L4mgXuojnG44ooxOWEq6PA11GQeFE525ZWZKLQYoXutEK1W8i3nmdudsR8fD/18ZcXv/z67J//
+ G3zzLql/0b5pRbN+Q9CwzqzfoNZDIgV8fZvauElr2qQ2bVIk2RVa3TpVwjqxcpVQvoovByNfIZctU5CC
+ ROj88ZaQDYi/xmQvYgH0nBekzOeUlAVyyhNyykNG6ohabI2hi6+Og4WKrZqqlVjdgK+sQeerEnmaVLEe
+ S9mYq2TGEVpxOJZsjgWPb8EXmgnEZkIlM6GqhUjDSknXQqB1kq91gq0uJApoaA5ZgUmVo4twHC2qQI/G
+ g3gGTP20UOWimpqL0YkQW/M4J5vk22eSPc74+J84H8I4FaxoHnrULOywefgxs1DZk8EKxsHyJiFykIo5
+ GShjEiAFK/+GvrD4fxxGA29w96Pg8RIdQ+iXIC6hHBLwcib+IHmg3DQQkjko0yD0vvJhm+R+TGizXGir
+ fOge6L3EhB5ydDcBHD2iCxPViwXKI7pQENIA69G9QLkE9D5cVB8hEny9jwTohw0ohgzKhQ3LIzn1CVT0
+ uELMBJi6JHSZVUybVcyYxWc9JmZDk755aCKHz0a6JaKhyVb+c1zRM2SbRcFzdN5zVB5si36GlrQrgi5F
+ cDEgoEP0skc5HMB5OAldFCV2DsJVvCRXz7Fza2n+zjLXVA5cIh6+TD5+kyrrQgVTV7hHw0SeUJsK9//Y
+ 1faxq/N9UupKcNj/Sss/tnf8lpn77NS5XopgVtPg57tuv/gGL546209gTRBZb7UN5vUNKtnMGBw6jk1J
+ UOEk6whTTVXjrFVcDMkO6tJnNKRtxUfOays4n+a5X1CN8TIvT7tZn+tSmnTN+6q68xneDSvWZWO61znN
+ aydp10zJbg788HsGd+y5QfcMEgJOB9464WnPD7Vj5tgzq+woJQbSBVoHu85S+9y0dkezP75/+PG/b3/9
+ beV/H+fe/7fy0frdjmXjhhWl+jVW3RpCM/h33QalFoEeKKc0SUCvX6PWrJOq1wiV0CsUYZ0EoJcuU0uR
+ bluUoiVyzms8KOsVIeslZLdomc/pqc/IafP09IeCzBG92LKT99z0HE4q2ygrWYpUTETKBhyxPl1syFQx
+ 5Kgac5VN2HxzFseSxTXnCMy5IjO+khlfxZSnasZXM+Wqm3HUzfgaBmxVZQqfQ+Sy8FwuhqOC5+mQ+fpU
+ 7kmmyJytZCNQOa+qfldfP8DaKvr86bgbpxPdzwQEG12O4NiFoa0ijllEH7WIOm4ZIWsRLm8WLmseLmsR
+ CrUAcuYhslDxchKSj8FypkGyxv6IzUuiGhkjfxkYIbw5GQAuLmcaqGAWhLi4RBjzYIxlGNYiBLevYsQ2
+ bQAX3qIQ1qYQ1o6K6EBHd2NjIXSR2LnEy7HRfVg4COuCYxy09YJ5aiQQ30+I7iNE9xKj+ylR/RCvY8MG
+ 5RHQR1FRY4rRY/Ix4wjoiQ8UUx8ppj1CZTzCZT0mZAPrj/HZjzHZj1HZT1C5C5jC57hiYB3xdXQeCAEd
+ GikiNP8Beu5zJEyXdFRE0EdAf06Ewi+kmhfabkELtWfMojZmiLvCDbVDl0nHLpGPXwHWiVL3SLKeFFQ4
+ h9Jrb/ufqMhf/IIeaxr3ELkvjaz+dfXOO4szs/LU0T8fHv/u2AKWvEjiPTqCGvvL/omvf5j+9vDg/mNV
+ R49nKcimsEhxquwEPUG8iTjCUnxDB2clOmKhfMz9nEqCp3Woi6nXdb1o71PRftZ+dw1crmoYqUvZm1LO
+ nqSYKEnfPqPkfkHL55rOVRv2ZRuWy2U150tqgW4nwz1M75/lwUQ2y4lXepaRbSCdpnmw3oE+FGS2Opr2
+ 67vJX/75+tf/rX387flvH3vW/x0zsnq6aVG1fpVTu0qvXafUbFCqNwBrcHEaeHzjJnUP9No1sgR0AsTl
+ Fauk8mUokSDDOj/sTYGlfqgMzXtNzX5FyXpBy3rBTn/BSn1By3jKzXqglt1jHJtlce+WnoOBsrVIbCkU
+ nxSKTbhiE7aSMVPZkCmGyagJi2fO5JgzuaYsoRlHyYIHlCubwAXAUTblqJixVU6ylQ3YYngfUKIJlKgi
+ FSJfiygwoAiN6XwTmtCMqWwrVD+vpnNTx8jD1DLEwS7ymm2Mm51/qPHVGJ59JN46WsYqTsoathHFKljH
+ yJ2KkbKEbRYRsGVOHip1AXeofkHKvEIVTYOgBgYhHsY/ZB6iaB6CsghGI2MIxioUZx1OBNlEEq3C8fsq
+ hqzT+rERLYrhCOgKYZ2KEd2oqG50ZBdMSdHRfZiYfkw0xC092OheHHRpjOohRPbgYwaI0PYofoAU10eO
+ 7aPEDJCiB3GRA4oRQ3JRo0C5Ysy4QvyEYvykfNKUYtpDDCjrES57Fpczh8+ZhQN0zhwq7wk6/ym24Cm2
+ 8Bmu8Dm+8Dk2H1ooIgG6pDUXwjRxD+u9cGVvVvopdIGOuC8haicWvkEXQ9fPl9SKfk5UIPam5sEr1GNX
+ KDI3qHIwK/Wko0Np2HisXA2ZNCtUeUEXTO6XH/jLocn90nNSqPnDcs+//uHt59+9/tPfn/3pry+/+Obl
+ l9+++urvb7785uXn30x/9W3b9/sL5Y+lsYlxGpx4fUGciTjwpOCSBuYk/7Ch6Og9J/Xc2Jup4ZfuXdK6
+ dFpw+azIzBCvrnyUgv9aT0ORTf5WmbXfQhdnpU+6eV5dR+mYuuDwGSuWrQX91iX1YHfjGHejpDtaiY7s
+ fEd2oS01zVAhxUihzd9061H+L9sjv/z86tf/rH383+uPH4d/+jXz4daVtrc6jeuChg1Wwxa9bptRu82q
+ 3WLWbTDrN+l1G1TE6ZE2pbDTGfY4k6rWoJ8HBTpYIKCvUgqXaIWwdWiRWfyGkQ+fCPKanvWSk/6Cm/qc
+ m/ZInNajlVxmEhJqcuustr222FogMOPxjbhcxMLZIjOm2IQuMGbwjRkcUwbLjAn2LDDliMz5yic5ImO4
+ GHhKJ3liM67YmCXQZ/G0mFx1pkCdIdaii09AgE4VnqSDBBZsJWsBrK3qXNYwdNY39bKxCrxoGeZi5Rds
+ eD2ady6GZBenaJ0gZ5uIsotH28LeuTgZ6xhp2BtqG6NoE614KgKEsgxDW4ZhrMLBp4F44B7qeOEkcnwq
+ AnsqAncqHJF1BB74tosi2UbB7g1kBNBPJfeiw5sVwlohTJcNbpMO65QN75IL75CP7EbF9gPo6KgeVHQP
+ Jr6PGNtHAEcH1mP68HH9hIQBUsIA4E4G6GMHcVEA+qBc1Ihi3Dg6YQKbALW7k4rJD1CpM5iMR9ish/ic
+ R7jcR8iY/QiTM4fOm8fmLWDy5jEFC3ug4wB0CNCh3ZwkNAfKIVABX5fE67+z/snRIZh5DcXrxLzXirBm
+ VP2SVj0siI8k3tY7dIV2+DpZ5hZN3oWp6MvAJjOIZVhU04FDo3/7YfZP3z/+6vv5r75//OU3j7746/yX
+ f1n57Ovdz75e//zL5c+/2Nj31drnf1r76uvVz79a2ff108//2v33bwql9qcwMHFqrBhtzh7odnxpDco/
+ bA0BX41b5zWvOapePCMy1EWfc1A2MaaQyd9gcX/W1MQQCN8I2Af1tbFGJ0hWpkwy/q+qIiljfbz5SfIF
+ e4H3be2cSPvqaMesW5p5F8U5pzmRJxQ8xQfybmn+d7nx40+Tv354+uvPbyBS//hxaOd/mQ+2r7Yt6TZt
+ CJu3eI077MZddsMOp26bU7/Frt9k1kJ0Dlv6V6n160A8dPVA+m9BcznYJ162QilZpRctM4tgM+hbdulb
+ eglUn7+GOgtu9oI4c049pV8rqkgrIFTH/eaJG5baDmpiax7XjMsx4nJM2dxTHJEdR+UMX81BrGHHV7Fh
+ C62R3AukGkVmPJEJh2/M5ZvwBSY8vgmXb8zmGjBZOky2NpuvBUlGprIBQ2xIE5owhKYMgTlbCMnHM0pa
+ TuonbpwwcTE3dT9r4n/H1C9I/06M8HIc1SEBY5eCPpNKsE8mOySRYKsRbPWHDdGwZ/R0LMomCoS1jsTa
+ ROGgztE6EgrWYfPRXkGvItSv20Yjd9lG4+2iiadjSGdiKTDaxRDt48n2cZR9lSPWKX2K4c1yIa1ywW0y
+ Qa3HgzukQtqPh7ZLR/UoxPYrxvShIrsVIroUYvvwsX3EiE4IabBREN70YRIHcImDxIRBUsIQKX4IGzWs
+ GDkgHzWkkDCOTZrEJk5gkqYwiVOo5AfojEeErIe4nBl83kNC7iw+G3Cfw+U+wQLlwHr+Aq7gKR6UD14O
+ O6ahx8sn0AlAOTTR3YN7T59idKQJDAjplFvyjNT4XFQ/qJYSyXAzkb7FOX6HInOLLO1MlfWnodOouHpF
+ hYHvfpz/6m9Ln/996ctvlj//29Jnf3rz+ddLX/xp+7Ovft735e5nX21+/uXO519vf/n16pdfAPebn339
+ 6vO/Df3t24qjBzMpigkqjBhNVoyRMMhCbME7JiT81VQH63iKb2lIUeEfNjEg2lrzHR01beyUeUIZedSf
+ lVRRLN5RJuugtjbO+pRYUwPP4xzXUscYnoBHck5bMS5a03yuqGT7W9WGnKl0O5lwiumtIXtX+Uium8F/
+ lxp/We/5ZWvq47v5X39+9M//dLx4HzOwdr5lSad5DUDnN+9wW3Y5Ddvsuk2gHAEdCWBWIXShIwHMOoJ7
+ PRLVMGpWadCqpWyFUbbCKV3hlS2zkHgddpS/ZJcsKBXM6KR26oXmarp6K107J75hrX3DRO+cuoodn3dK
+ KDATCKw4/NNM0XUl3QATu4SzN2JOX/I3POVpYO5ldsrT7NRdw5MXNXTOIqUvamdUVGyVxNZ8oQVHYMjk
+ 6jC4J1gifZbYgCHSp/MNGXwTJt+MLbDgia1FqvbKWpe1DW4aGzrbGrhfNvTx0b8fp3onhXcpneSYiT+X
+ TXXKZF1IZzul0s+l4hxToJELyiEJtkhjTsciO4/OwOaMWJxdLAZ22cGOUkQxqDNxuDPxcB4LB7CBwyGB
+ Cjs5QI6JNKc0hlMqA0C3SumTC2uSDm6WCWiWDmg+FtR6NLT9WHgngC77u+QBdCR6gQC9AxPegYrqQccN
+ oBMG0fEDmLgBXOwANnYIFTOsCGPsMCp+FJM4jk0YRydOoBMn0SnT2PSH+OxpYu5DUt5DUs4jAoCeDbjP
+ YbMfYwH3/Hl83jwODD73KQamp5I20IiL72H9/w/63ty0ADpfPiNXPOfUPRY3P9CpbdBO8mHfN5R34cu6
+ MuRvQRUAWdqPgYDeoCA/+v2Pz77668rnf1n7/M8A8da+r9Y//2rji6/fffb1T/v+tPvZnzY+//PGvj+v
+ f/7n1T99vf7VX1a//NvTL7/p+8u3VYcP5pPRaWqsRD1+pLHQx1Jor4c31UXbWDANdDCayjKaajIn9HBm
+ ZqxTNkqnz2pr6FLkMH9ncqV0YcVFJCsQyIqV0ATCfl1t2gldqqEe8ZyD2M6CdsYEd0ZfwcNRUOBrU+Bq
+ 6n+SfEP5mAPnh/g7urtPq9+/bPrPyuDHdw9//Wly50PD9HJAxxv75hW91jVR6yavdZvbvMNp2GJBGNO0
+ xWnaZjduMpvWGQ3A+jq1AShfgyYWIKCfAZ+oUb3KqFrjVqzyy9ZY0J+ocpFV/Uy1Ysogp9U4NFn3lrPo
+ jCXbTld0WV/3qp6Oo7KSLZ9jIxSc4otsGPxzFKGHyKDkvMtkcPqgV0yp/a2yc3f6A2JGwpOa3AOzLtxI
+ drySfuV26uWb0Q5Ofma2rgaWN3RNTitrm/PUTrKUDGl8AxpPH8IeltCULTzJEVgKlOyUNM6r617Q0b1i
+ rHvnjIGni5FPtK5nhurtXMblPMKFfJJTDu1yFvNKFuNSFhFaK17MxDql4c4l4/c2TSObSpPwjsk4x2Ss
+ ZNwT4VwKAbacOibBAflcChUZkylOqTTYjQqdM/ZVjlqk9MuGNh0PapIKaDoe0HQksPlwSNvRiC7pyG6Z
+ 8E4pUGS3bHinPEQvMb2EyE5IsWNi+tBx/YpxAyAUKGZAMXpIIWZIPm5QMRY0oAAHCSOoRIhhxhWTJtGp
+ M7jsaVLeDDn3ESXnESkbDP4RPmsOn/UE0i/4vMcEwD0Hpqfz6OwFNLSBzoauRpKIRdIT/f/L0fc+QAsW
+ jIpekMsXGHWzStX9asUVKumRvEAH3F1lqXssGVem7E2a1E1YMGKhUxm4OkX5wR9/fPz1X15/+ae3X3wJ
+ iG9+9tXaZ18uf/blBkL53zbB6b/45s3n37z98u9v//S3V1/97emf/j7z9x9av/2+6OD+XCImQ4WdoMsP
+ NuL62avdv6F79ZqGmSVVU0PG3IRsZ8szNqFo6eBO2ala22mctFTiCBWk5f/GYEsRiT+IRCgU+gcMZr+p
+ icDhtMalcxq25nR7S9ql0+zzluQrlpTQa5qJt/TvGeLPCg9bcr+PcTV5t9j+01LP+zfdPy/1/bTZu7JZ
+ Pv7aq3PRoXPVsH1d3L7Ba93iNG+xGjcBdHbTFrd5m928xWzaYALoiNapjev0xjUQo3GN2bTGbFxn1q5x
+ gPWydWYlHC+Jauf085vNIuMN7t1QtTVkGvHolnyVC1oGl7R0HZTEdgKOLdg5U2hH4l1E89wIqgWGFybv
+ RPVf9CvVP9d+5t5OUsWvxR1PgtKaLnv2uIa9Sq9Yya+fisxsuBdUeS+wwiMk+dr9O0Y21hw1YwrXmMoz
+ pPGMGEIjupIhQ2AKpQFQLCDWPi1SP62ieslEz+3GyYAIE98MTbci7t0K6o0K0rUSCrRcvFVEu1FIvJYP
+ /eigZRcJOr1czIBmL6RzqYTzaQSndPyFdDyMTum482k46JBxMYN0ARoewV1p5AvpSFsY0MUM2kWkRQwJ
+ 0oumib1SwQ1HA+qP+jccDWw6GtB0OLj5cFjnsfBuqZCOowA9uHtYh2xkFzqmBx/dDTE6JqpPMbZfIW5I
+ MW5QIW5AIWZQIWpQPnJANnpQIXpAIbJPJqpPFohPGEUnjCpCDJPygJA1Tc6ZoeQ+pGY/ImfPkrJmiZkA
+ OiRh5om5SB4GmzWHyXqCzgLQod5rHp31BAsfWYHE6OD0kHGH9v5P4UNaAHH43BwyfMBI+VN67SNhZac4
+ I4MVdp/gZiN7RXjoEvHALfLRe3Spmwzpm2x5Tx46kYMvx8h3Htw/8dc/z//5q+dffvnmM9BXL7/60/O/
+ /PXln/725otvXn357bO//vD6B6nXB6Qf//3H6b99//i43FMqvQuPz5I9nohVSBTSYrU54ebi0GsGrs76
+ TtfUDSzIxhBtX1C7cFHT6CRd34R+9aaFiblYU4dhbq0uh/qHlOyfFdF/JZMPMJnSp6w0zE2Vrl8+ee/G
+ yfO2MG0V3r6seuOc0NGMdNWUHHnzRJCTyjUjvI3KkfQwx1/+Nfuv3cnV561PJopePC5+vZz3YNF/4O3l
+ 3nXTjhVh5wavfYvdssFoBjtfZzVtgoBywJrZuEprXKM1roPgXmbzOlNykoJ0cVln1WxwKtc5NSvC2mfa
+ RV1G0ckGztdUzxnyjThUTTzNhC46p6YHoch5VRV7pJRFdIaldI4guCzNvnuME4nTLOBZ5tBNUjDa1SK7
+ 59ci1zwz+qzd8lTtWu1cd5JqfivpfRqY3XTBu88j7mVW7URSUcI5ZyeBriVFcBIBnWNAExhQlfWpIiOW
+ EGrFLDmq5gwxRO32mtq3zxr5+xoHJOr4FIk8ahl3q/G3K/F3yknOZeRbJcSbxaRbRdTreZSrudDeiH4l
+ h3olm3Ilm4Q0rMuBUXKQTbyaQ76eT72WS72aS7maQ72aQ5eIcT2PdaOAAWf2Fffpx7YdDKo7FgBqPAaU
+ BzYfCm45HNC2P6jjUFDH4aD2Y6EdMqEdcpBKj+0lQKQe3asg4Vgufkg+YVg+fkguFhpJDypEDsjBZBQU
+ PiAb2S8X0Y9wHz+GSX5ATHtIznxEzZ1j5s8yc2dpObPUnFlKNrQGmCNkPgZBYyNM2hPoVofKeAKmjs2B
+ SOYJvmAeOqGR8+cJhQu4oqeEwqdQcMcoekovnCeUL5Ab50R1nao5iZyw6xgXYxknrpQd+qgD6sAN0gE3
+ 2vHb9OPX2dJuQnS0kJRPxjTJSI8dP/ZSXv7FwaPP/vLDk28PL+BJL5XEzynMme+Pjv39x2cE6k9GVu/M
+ bGeFqoMs3vJp+397uj92csji0iLICnHKsIGaFWkpjr5tcvOKhrkt+6Qd395J+8pNk7NXdTWMKCIt/C03
+ B11DPp0j5+x6VkuPgcF/iyH+DYX/i72j5vXrZkyG9I0rloHeTrcuG9y6rO1yXcv1qsYVW85ZQ4L/VZ30
+ gNPJPqfi3E3bK/3/+5+Zd9sTa4t9M2O5C7PZm9vlz9YThpddepZt+lZU+9b5PZvMrnVaJ7RSWoV2eXSk
+ yxw03V1ntW0wkP5EiOCA3bLOal6lNq1B70Vc0xqrcUXUvKTW8vREVZ95fLKhxx21i6aCU0K+HpWmSiAa
+ cBkO2spOmmrnVJQdhKKzAuXzHJWLeMFNKfb9/ayg79lx+wVJ3/OSv+XmHFapp1i0sO0KUSeSj6sU08xm
+ zvgt3orvMr2TybWoM7/51D9jyjslxezyVaaGNVl4EqIXBt+AzjeiCA0oXCMm7yRXpM8Q6JE5JkyBvZqW
+ s8WJkLtGkZEnQ7N1/Mr57jUk+GQ5tzqSaw3NtZp+r4LiXEa5VUQCd4eGdbeKabcLqXcKabcKocsuBVo0
+ wmUA3o+omHq7hHobRuSA7lzKdC5l3ynh3C1jQ4+kfUU9+jGtBwOqjvjXgpcfDWg+FNhyKLj1cGD7Qf/2
+ gwHth4Pajga1HQ/rQEIXJNMySIgfRsUC3AMy8UOySaMKCcNyMUMyYOdAdlifdGifdDg4er8cKH4YnTRB
+ SJ4ipc1QsmcZBU84hU84+Y9ZEtYp2XMI6Olz8PktmIwniqlz8qmP5dOfyIOp5y9gC+ZxBfOE/AUi4F4w
+ jy9YgNkq1JSSixZo5U9p9fPsukF+dgbTx0nuls7hi9yDp7EHreWO2CsevU485sqQBdCv0Y85M+WC2fg0
+ OqmKRl3Q1//v5Ss7p07PctUeKult33H7X0Lyvzz859QMhxmizYs3Pmbmfyyp2I2KWfIL+KW48GNXy7uc
+ pEpboygxNVaNBh0Ewi1F/hc1HWzYBmbk05e1brjZ3HY/fcnV3Payrv4p0W1PJ0t7XRUdWmDk7Ys3TNR1
+ cWzBIRb/YFDohYsX9Gjkw8G+133uO9pZCK9f0Az0tIzyt/O+pX/3vIr7BbW0oDOVqbfaSrzmRtN/+e/4
+ Pz9Mba0OPX5Q9GQ6c3O94s1OzvR60MjG5ZFNw75VUe8Gqw/aMq5Re9bones0SYd1Sscmq2OL3grN1Deh
+ tzqzZZ2DaIMOe6KhsWjLsqBzSbsNGhJ1a6Vn6nncUr1ozrPRYJrwGHockqEy+bSR4KKZ+nldNQdl5dMC
+ 4Rm+2JGtfAEvuC3F8fqRFfotJ/4bfsrfeGl/4Wb9hZv7V37uXwVZf+Wn/ZWb+Z2oGm1cT7bMk9GJPihK
+ xxvUaJ4v0DjrT9e7hBHaEHimNL4hEyJ1vjGVb0SFGIZnxOLrUDlaJIYxU+Cooe1mpR99yyQ+2Dw6VT+8
+ TCWwnuHbSPRqIns00t0bmG610CidcqeMdKeMeK+Cerec5lxGvVvGuFfOdC6jQ9NdGPf6NkJ3UrgLRmg5
+ fa+C5lrFcaviulZyXKtB3H0lfUax7Uf9q4/61R72azjo37Q/sGl/cPOBoLZDfi0HA1uPAOWQignrkIvu
+ QcX1Y2CXRuKoYvyITOzg8YQRmaQxoFkmevB49LBs1KAsIB7WKx3eIx3ZKwuxTcooPm2SnDJBSp+kZD2g
+ 5j9iFs6xCsDUJQGMJHqBj72FShj59Dm5tFmZtDnpjHmZrCdyeQuKBQvoQknmseApCQR5mGxYMX2mWLRA
+ qpnn1o7y8goIAXekr2gevMA47ID/0UL676ZH/2ErdeQKVvY2TeYGODr1+G2SlC8VE0mnlGprrgT6f6yt
+ /a2yajc+dScp47f2ro8PZj52D36IzVj2CP5PfunHicmPc3Mfxyd+7ev5daT/p46Gx4mhBQ4no7V5sRqM
+ 6BOsEAvBNRO6yQm0ngnJ4doJt4AL93yd7vifveHtcMfvgn+s6xWXM2evmQTH3LnuYm5oQdeGOastPyPT
+ /cZ1E2MDTlKcp4eL/eVzuq7OpjFhTplJt6L9T4e4nrx3Tux5TS3ez7Imx/nJZNr/fhl+vz20s9L//FH5
+ k8mspZelWx+qXn5Im1y/N7FtPbih0b3O6tkk926Q+tZoPev0jg1a+wa1ewv6hyGtd1u3aK0brOY1AJ3d
+ tsFq3aQ3rzE73mh2z5+saddNSxb7OQsvm/OtVZkGXMIJMeaUCdH5lqrPfeO7Fw3PG2raKinZQaG5WOkM
+ R+yE598+yvb5jhX1DT/pr4LUPwvSvuJlfc4p2Mcu+oxb8Dkv/0te3he83K/5mV/z0r7iJP6ZE/+tMP6A
+ SshBoesR7lUFvh2eZ0rhGTKQMN2Ewjeh8owZXH06T4vE1ibQYXn1mqFx4BmL+GtmqX4WGSknU0q0ous4
+ Ya3UgHaqZzvNvYUGH31xv57iWk12raFKPgCDIfkMDBiZrlXQYJrhVoW04YVmvG5VDNcquIvmVk2DPuue
+ 9VzPBp5HLYxcz3revpIBk5i2Y/7VR/zqDvs27A9o+iGw8fvg5h+Dmg/CrDSo5Xhwi3Rwi0xIK0xM5WL7
+ UYnD6IQx2biR43HDx+KGjsYNHYuH4xHphAmFWCh0QYxcNrJHJqZXPmkAkzFOzpth5k4zciZp2VOU3AfU
+ nAeU3GmI1CnZj4hZyEcoolJn5dMfyWTOIcp6IpM9L4uMEtbz51EFsHT6lJj3lAIfWJz+VC5rQa58nlwz
+ zs8poYa6St02/OEa/9hlguxZhaMWx78zPvh388M/OKKO36TL3mHK3aEr3CXLeVDRARxqroXJfHzUf7rb
+ Pk4MfBzu+Tjc93Fu8n9Ppv4zPPBTRcVqatpifu5Cfc1CQ/2LhvqFipKH6SmdPm55DhZxhurQBSBBi5lg
+ xA024V7Uwaty9/OVjp2/aXTxjpWZg7a6GY+piedqUyydjFwCbwTFuwXG3LrkbGJgTrO04dxzs0hKdQ2P
+ vBUZ4ZKfG52RHlSQH5aT7VNU4NtUF1WQfi/K3/rGWbbTKaLrFVG4p9Fge8j//ju09qZ9823fq7maJ+PZ
+ zx7mbe/Urf4z/9GGz+TmuaENg951HrRl7N8kDGyQ+9eBdVrXJoBOhhbsEtDhswOYwHr7BqNzk9G2QW9d
+ EbTPG9R1mGXE6/hfF1w349qpsU8KyAZKGFsboneQOKfcKivf3tfd3MlM10ZVxU4ZEiPKpzliCF3uHeEG
+ /IMb+2dB0pf89C+FmV8K8j4XlH4uKvtMWPa5qPRzYclnwoLP+NmfcTM/52R9JUz9nJ/wOTv8TyzP79k3
+ ZLj2WL45GDmTawSgk3gwNz3J5BvS+bpkrh6RZSNSdbU6FXvRPv2aTa6ndVG8VUGhfka9OKaDGdRL9e6j
+ +nTT4MOMvJpI8KlGHo0gqnsD/X497X4dHT4Vw70WPvsFPgcG+YQM9zq6Rx1y7FEPIx0+FgZ6rvu28Hyb
+ eYEdAv82AYB+Mqb9OIDuX3/It/HHwOYfglu+C275Ibj1UAjk1EEt0mFtciFtshCpQ3YlYRSVMCaTMC6V
+ NCaVOAqUH00ck06eUoCFodhRCMpl4yDJ2C+fOIDNHqcWzLCLZ7mls/zSOV7RI2buFDV7kpzzgJzzkJQ9
+ S4DZZ/qcQuojmYyH0llA+Zxs9hOZ3HnZ7MfAulyuxNRBeQv43AXYVArbMtAF8+jyCWpBNT3MS97N7NhN
+ 0SFnltx1nMI5eTlLqUOGR/5mcvyv54iH7jDlnanyt4hy9+io+2yMt4gUbaxWcf38cLjvbEbEQnb005zY
+ ucL4yazIoUivHs+bTVCge/VMzrXz2dec8q9dyHFySLU2izPQCtcQRqrzYlTZ8er0WB16pAnPWZ+mQvmG
+ iP8zVyyroksxtlHH8eSPEg8cIRxUZMkpGwjCkn0iUj2uuViaWvPOXtTyDjqXkuWdlO7n6GRuYKxqdFLV
+ /ryRT+ClmLjbpcWBrQ1RiVGOV88ynWyJN84x7l7hD3aE//rv8fdrwz9vTL1727v4qOzJRObyYtHm+7wX
+ OyFjKxcGlo0GtsRDu9CXFDe0iR9cJ/Wtk3vWyd2b0HYdQIfG0ERoDN2+QemGXmKbjJ4Nbttrtao+g6RE
+ A59rGjdOCu01OFZqNCs93HlHim+USmGrQfPkqaIGm4AAswtWOrYaKnYqynZiZXum6DKG73qEF/QNN/Zr
+ QdLn/IwvhNlfifO+EJV8ISr7QglAL/tcDGPx56KiL0RwARR8Kcr9jJe5j534FTfoO+4dKZ4jTmBJExgz
+ eSZ0rgmJa0LlQDbdhAF1AXxDKu+0WM3dyirxyrnCO+dLvBzKo2wrCkxz6tVjOjjB/VT/EVrgAN2vleLT
+ TPRtJQPuno0kD+RD6ijuDfAJXmDb4Pd0ryamRwMNPtDLu5nu28L0bWX5trH9/k+coG5+UBd/X/GAWUyH
+ dEDtMb+6A951/who+geAHtr6Y2j70VBk5UgqpE0uvFMxpF0uuEMappuxo/Ipk/LpM6i0BwppU/KpU3Jp
+ Uwqp06iESYXYUZnYYbmEYVTiICZthJQ3ySp7JKiYFZU9FFQ+EVY94RfOMHKmKDkPSLkzpJw5AhQCZD6S
+ zZyVzXwknfVQKvuRVPacVO6cdM5j6ZwnsjkL8rlPFfLnFSG/nj9Pyn9KKlwgls/QSproMaEoV+vDd5Tk
+ nNkyd6hSV1DS9nIypjIHDBT/bMv55rrqobscqdsU6ZtEmbsMlAsH5SZAeSkRwnUEySc1Mmx0s+30888a
+ 5547mXnWOMVaO8FEJUZfGKrNCtVghWlyIrWFYWq8ICVmkJgeqgL9XtiRyowYZXK0KilSnx18SuyggxMy
+ f6TS99uf17vt4YjhyCowZGVpsnI0WVmS1C2vyzGpvm7e5x0v6V+/axkef7ewKv6Gi8P+49+gCDJEJkpV
+ n6usSzlpwb1yVTcz7XZ22lX3O6pXz1JtT8oEe5/YXGr477vRf21Nf/z3SyjtWpmvnhlKer2Qtb2TtfQu
+ fGLpyuCS6eCWMnQkHdnFjGxihtbx/esEpGGY5OMGoCMXfB4B8sEb68S+DebwOn/gjWrdiGZShobLNeWL
+ JqLTQLk61cGCfPceNSpDubTXuHX+RNu8XkmHaUCwsZOV2mlNpdMqamcEqueZSlcxvLuHWd5/YYR/zYn/
+ kyDxS17ql/zsLwWFXwhKvwTEBRVfiCo+E5WDvhQXfQHWzsvbx8nex0n6kh38D85dGcFZgtACCl0YvJN0
+ vilNYM4UQG7RkqdiDKUvDCFsMPWzsU65er7Q+UK5l1NNlGNVrlVurU5CtyBsiB48Rg0Zood0UQLbSUHt
+ JP9WENkPPtCr9dMnM/o007ybQFTfFqpfKw3k38aAT0QK7uKGdCMK7mIHdbLD+rmhvdx9hQMnozuPB9Qe
+ 8as/4NvwXVDrP0JavwdHD2k/Etp5PKhNOqhNLrhdIahdNgiSjP0y0cNyKZPojBlc2gN0+gNFYD1tShFA
+ jx+XixmViRlCQE8bI2ZP0AumuOWPxBUPxSVTvLIZbuVjXskjdt4DGoCe84iIVLw8ks96KAOIZz48mjlz
+ OOvh4ZzZI3lzR3OfHM+dl8p6IpU5L53/FIle8h9jCmbxZdP0qn5+djrd3+nYbbUDd9goFyb6DknqouIx
+ K7nDBqg/W4q+9rmCCr1CuKd29Br14A2y1E2SrDND3oUr58aV9+bhAoSUQBV6kCozWJ0drsOP0OWH6/BC
+ NdkBKjQ/JUKwEj5EiRCiTAlWpgRCDwxlSog6I0iDDmeQrIsKOVybGmGj5HNO44Kd0P6s6sXrhq7+54lC
+ +e+k/3wcf+h7mW+/OfpXC3ujqEQf/6Dr125aePg6ZRYEx6X6ssX4/VLfMkVkRYqMQJtK5B6/cM3gyvUT
+ erqyVuZYQ50jBpo/nFD7tqLozsf/Tf53d/S/7x799q/nH3+eXZ6vmR5IWlzI2drKXP8pemH73tSm7TCA
+ vk0a3cWObaGH17FDG7jBTTx0v4DO0V3IdmmkGR1MVYdXlYYXtdtHdXOztf3uKV+2FNrpsKx0iGetia5e
+ 9MQiXvWYVucL/c43wtYX4pIuo6AQw4unlOy1BA6w51+oeZGjehnLu3WMef87RsC3rPBvuWF/YUZ9zUz6
+ EzvzSw5E50Vf8Eu/FFR8KSz/QljylbDgS17WF+z0L1hpn7Pi/8QI+pFzR17gQBZYQgUYW2jGEkKtixVP
+ 2VasbivWtOFr2As07ugahp2xS710Lu/G+fL7F+rCnaoybApq9FN7VGJHODHj9KhhanQfLaKHEtZNDe0C
+ 0YI7qP5tEMHTAiUKaKUFwudndNCD2uEuRlgPO6qfG9HHj+jjRfbxIvrgGD5bhh81wN9X0G8c1XE0oA5m
+ ogcCW/aHdx0I7zwQ3PpjcPuhkM7jge1SAa2ygW3ygeDosH40CDQrJE9i06eJaZOY1AnFlEmFzBlM5iwu
+ cVIhekQ6alAGQpf0cVLOBLPwAb90WlQ+LSqb5JY+4JTP8Upm2QUPAXRi1gw25xE6d1Y+55FM9syRrIcH
+ sh7uz5r5IefRj7lzB/KfHsl7djRz/nDG/NFcBHRU0RNU+SyxapSTV8qKvI/2MD7szD98B9yagblNkHdC
+ yVig95txvrrrdKgkW60wUcPjlOwl7vcwGb1FlL9Nlr3HkoMIxxXZKkrxUaaBfIFmdUaoGogZog59i6jQ
+ BcBfieiLdLygBKhQA9WoAaoUP2Wijwo+QIkQrUyNU2dG6DJCzPlBl7TdbujdczMzPyM0tOEZ2SpRxHKK
+ tCNS+APHsQcNzdX8Am/5+lx1dz8bFnEzuyD46i1LWdR3f//hc3ncEXni4WPYb4Ta2KzCwMwcT3t7trmp
+ 4ilTBauTUvduCF4tlH787cE/t/r+uT3167+effz3439u9r9+VPRkInH5Tcr7/yav/tf3yQeHkU3loW3y
+ 2C52fBsztoEd3cQMb2MGdnDQ7rR7G4v0a98kDa7yR9/odz0wKi06EXtP19Va7cIJvt0J0vkzBK8AdmqF
+ cvWkSveiWteSoGOF2LHIL+02DA4xuGItdNTknFPSPC/Uu8BVuYDjXpNh3pfmRuG0Ugknoo6KQv7BiP2O
+ m/6jUtZ34uy/C7K+5ub9hZ//F0Hm15yMP7FT/8JJ/Asj8U/06L/R/Q9xbqB4p6lCS47YggMpc7EVX8Va
+ pALLog5qeufVDK9rmHiftI5zdMy+dqns7vXq+1erA85VpFgXVBmld6snD/OSRxkJQ7T4AXpsPz2qlxGJ
+ iBnWTQ/uBDHgs77gs5BCOuAyYIV3M8O6GFH97OhP4kf18qP6+NH93OgBbjzyaWECcHSTmK7jIU0Qjh8O
+ bPkxtPNAeMeB0Lb9IV1HQnukg7tkA8HOO1AhsMmoTyFiAGhGJU3g0x6Q0iZwKePo1El09kNC5iwBidHH
+ IEYH0FGpE6TcKVbJQ1HptLhyRlT9UFT5SFg2yy+aZRXOMvMeUrKmMJkP5LNmpHNmjuc8PJQ9+332o28R
+ zf4jb25/wbODuc8PZC78mLlwOHNeLmdOvuwJtvoBtbiBHh2M8bKVvq9y/D5b1pmucIescBMv74g+Zkn5
+ xtHkbwlx+K5+w5pqbb+r8peV/nGTfhz2j94hKdxlKN6gyd1ioO9xSO5CgrsID+1coLNFmDI1WIkSICYh
+ jS5EJF9lKrQX9VOi+YkpkqYXRG8h1k8ZF6JKiFKhxGqxIg3ZAebc8NtGAV6W3iH2Z2+dIIsOm9qLnX0c
+ bro7XLhprWss4omwV66a+3g5Bfo6xUTfjE9wsbASaenQOAK0PHa/NOZbEvfwbQ+rnBJ/ZxfDgACLwrzr
+ mclnwv1ODHSG/PqvoY//Gvmw0fVudfB/H+Y+/ufxx38/WJ6v7Kr3mpsO+9d/M1f/6TO3dWZqW2N8lzG2
+ gxvfwYxto8e2FIe3FQclPXv7oV/SDnFwkzH8VrVn2rC8Rj/eXzvo9AkXE80rJtyb55mhkaL8Bs3GGS2A
+ u3eN3b1G6VrDdL3hVvaeCAvTuWrLO6fJdRJrOwlPnOcrnSNxL2O4EaKTbWfvj1wOKFQ/HaaolEEzaNG5
+ 2KBknymvGfMdO+OIShnGIF9eO/5HXtg3jIjvmTHfs0P+QXc/zLyC4doyRJZcJZAFV/mUUBX23dkpazlp
+ Gl7VNnYzsgi1PZPodC77xqUS56vl9y5VeJ+rSLArLjPL6dTOGOKnjTCSB6lJQ4zEYWb8IDNukBEzyIzu
+ Z0b1siL62FGgXmZEDz28ixbeTYvuY8UMsGMHWbEDzOg+TkyvIK5fEDfAjxvgJY0JEkf4+4oGT8b1SgPo
+ Ia1HwdGD234M7TgQ1n4oFOjvlgrqlAuEAL0bFd6HDh9Ah/ejogawcSOE5AlIGpJTJwhpE4QMWA+axidP
+ YePH5WOHZBJH0WkTkGzhFj8UlUwD4kp1j5Vr58Rlj7gFD1l5DxlZU8S0CVTqpHTm9NGshwezH32fNff3
+ zCd/yZn/W/bsd3mPD+fM789+/l3mi+8ynx/OXpAtnMPVznDLmzlp0QT/s8c8NaV8eRg/FsmVoXCLLHMF
+ f/g08e/2Wn/29jxe1sbtWVCv7WNFeErdUv/HLcqx23i5mxRAHB6pcIeCdmHiXdlYFzbmPhvryyUGCamB
+ fLI/nxwIferENB8x1UeA3ITzEOR4C3BeQmywKjlMlRSqig/XpoUYcoLtNSI9bdy9re1v6lpd0dS34Yi1
+ FJ2uGoRF3Y2OvW93WofDlL54Ts/H/XSgr0Nk2CUfrzPqqlgXlzMFhVFRsS7uPmdcvazOXVG2O0dzvqea
+ l32pofJebpJNdf6lD2t1H/83/tvPE+9X+j6sDvz6fvrjP2H7xaN3y629Td7Tw/4/76SvbvnOr519uKUz
+ scMY3sUMv1MY2ZUb3ZEZ2ZUeeCcHzU0Ht/Bj2/TJNcHAQ9XKKs24YA1vJ1UXC1XnUwLvm/z4RJXKLt2u
+ Z9p9q4LudfjkI+Sjv7pW8b1v+XWDarFxStftOWc1RU7KJ86LdB0EMB8V3ORrFF+8+yaj/H1xy+D98AyT
+ s82X7q/GFz0PSC3Rc4giqJdrnH54OWDY/n46Gzb38CMImil0w1C06m1pzgU835atYs5TsRSoQU2vpVAF
+ tqJCGcwFzRM3dHW8LY0iz1nFOFnFOVklXbLOvG6f7+pYGHa6tMCirMMgd1gpdZSZMEJOHCElj1GSxmiJ
+ w9T4QVr8ID1hkJUwzI4fBLLB46nh3WSIbaL76bFD8Hg2KH6QkzDATxriJ40IQKnjopRR4b6SYbPEfrlQ
+ yCG2Hg1uO4hQ3nkgqudoeO/x0G6p4A6YgyKgh/WiwvpQoT2ocNiHMUhIHCXDj08ZI6WNE9OmCKlTeIhn
+ 4kZhiVQWQE+BGH2SWTAtKJ4WQYxe80hYNcsvmeUWPEJAT5vAJo7Kpk1JZT08kvXoR4A7a+7b7Cd/zVv4
+ W87jH3IeH86a/xFAz3nxY86zYyVPCfWPBTXdStnx9IhLir76x7zF0j5sBW822oUte4Mu5Ujaf07pH+43
+ jhTWMboWVHoWRY3jzJQoRRf9H26QDl3HHr9Glr5Olb1NUnAmoe/S0XeZaFcW1pWJ9eGRg0T0ICE9AMGa
+ 5i+m+4ppvkIqwr2IEiim+oqIPkJcqDIpXJUUooYP1qb6GXIjr50M83O87mKmZ8fRs+NanFfhqcmINRTO
+ OGpeuWpibMQ5aci+4qR39+bJAO8zEUGO7nfNdNWxVy8YlJdEFhcFJSZev+Osa2wqd/E6s6jkRkeLd3Xx
+ lcpcx9nR8J/Wan7799BvP4//a33kP1tj/4PddP+a/fi/Jz9vdPU1eT8aCfzl54Kf/h33Zuf6ow3dyV36
+ 2AfU0AfZoV3pYURSA7vS/ZvyIwD6BmP8laC5i58YzfS4zLlhwb55iul7h5edqdk6qD/wQntoTTy4w4SW
+ 013Qa3cD3bVG7FsStkxoJKer3D7PO6sldlLWO6ekc1osPisUu50wKrvr/jS7eKOqaSYxq9Mv/El60X9b
+ h95Xdgx6RFTZ35zyiv5fWftWRmXDOeckPesKhzt9t4KKza/eZ2mdp4ptuAjl1kLIwcKmOxUbsaa9ig5U
+ 1Dgb6QY5mMRet4q6bBRgr+VroxVibxB96WSKp2le+snSdsO8IdX0MW7KOB3BbJwGSh6jQzCTPMpMHWen
+ jnOSR1mJw3RAP26AGjdASxhiJo7ABcCKHwL7ZyUOciD4SR0TpI4J08eFcACObp7QrxDRLg85lojuY5E9
+ hyN6DsYPHo8ZlInolQkB0JGZqGIgPAB2SfcgXV+iYdfFINTlEpNHyRlTlPRJYjJUn4/KxwxJxw7JQi1X
+ 0gguY4xW9FBY8UStdkG1bl6p+rGw6BErZ5qWOUWExdT44eMpE0cyZw5mzEDQ8o/sx99lP/l77pNvsud+
+ yHp8MOfZgdznP+Q/O1z2DFs3C5SL89J4MdeIQYYofyV5b66MO+PofajF5R6/CEtF3B9czklnF9G6F0R9
+ K9zuZW7bnCAvm+hlfeA67YeruCPXKdI3qDK3SfK3CQq3KQrOTBSY+j0m2oNDCBDRA8V0PwHFh0/yEVD8
+ hTQ/IfBN9oMARkQJUCIHignhyqQodWqwBtFXhxZkoxF577TbPdsr96zOuZraXdezuaRhcpqrb8k4e0X7
+ 2m3TS1cMoSjX1kp4xUnX9Y6ph7Ox5x1T56uGNy7oet41c7IXWJpiLM0VLl2iJyRZl5Rey844Ex2s29vs
+ +nIuaett6W//6v/t35O//fTw44fp394/QED/7+x7cPRG74WJiI//rfrvr1kr7+8+3jZ48I429pPc8M/H
+ hz9IDb+XBUcf3j46uCE9vk2cWucMPOYWlZN83eRv2ZFu2tC979KzMlW6hk9MLUOfRx7Sq/onOrSohqZ5
+ 8AGlnWsEcPT2aZ30XLW7l3jndAUXVLUdVbRsxCJ7ZfFNXd1gG7vsm3cqPXwqvf2qfQM6o5MmM4sfZJX2
+ hCe1+IQ/TM5/39SzVds+HJFU7xk4kZq7WFLXGxAbYnz6PF/DGuJyoboNX82MIzaFzjBizbNq2ld0dFzM
+ T4RdNk2+b5Xobhx4Te2uDfe2OdfdTinojnpignZh84m8QdXMEU7GBDNjkpk+AeYIYqZNsEAZkxxQ+gQ7
+ bZyVOsZMGWWkjDLTxrlpY5yUEU7SECthgJU0wE4e5KQMc1OGeJkTgoxxwb6CPtO4bvnIDlR4l2xE3/Ho
+ /iOxA0eShqUSRuSSRjAJg1CDToiEDl6tChCsB3YA6/joPmLcADlhiJwyRsmYoqWOExKQAl3Z2CHpOPgq
+ cPQRfM4ko2pepeW1XueSQfuiduNz1aI5eIpwbaDih6Uh+548cTD9wY+ZD78D0DNnv8t+/G3e/Hc5T/Zn
+ LxzKfnYgf+FA2YJ89QN6VRM/E7z8Bt7XCO0P80I+wY+L8WHLe3DlrrGPOnJ+uGqyPyGW2Dwh7Fliwbyq
+ c4Xe80pUWccMvX7stvDHK8RDV8hHrxCP3STI3iJA7a4cgH4XkeJ9Lg5aLgLW3gIiNGr0EZDA2n1FFC8R
+ yQu8XESCrEuwEilSmQqt0321yf7mwmyfS1E+V+1O6xrbadhcNTx7y+iah4VLiL2z/+krLqbnr524dddS
+ W5vAYhw6oU00M6CcMqZcP6eek+wSFXguKtghyNfishP7khMtOso0IdH8vhs/LtJwoMPj6XT0aLfvyqui
+ j7+M/PbLzP/ezfxvZ+ojhC4/P/z470fvl9r6G/2m+wL/tVX408/pK7v3F3ZMpnapw++ODL7/cfinI8Mf
+ ZAd3jw/tHBzcPDq6iZraYPZBTVE52t9LyuM6IcCDmV3A7xjRnHyjPr3LGnuPGfsZM/4v2sh7zsB7Vtc7
+ UtcGqXeZ1zKlnpat5HKFfU6P46QKDRY1bJUhQ6Jkrwy7PNUua2pf19N3NjF1tbD0Ou0QfOFa9HXnmCu3
+ Yy/fyr7rWR0U3hod3xQZ2xAd05eVNZ5f0BASEWzj6CjWtBao2IjUrflq5lxlc4GqnZL6eXWtmyd0PG1O
+ RNw0Tg8yz0u0SI028HcROTsy7jjQ799mRcaJcps0ioaVCyd4eVOc7GlO1gN25iTAwwLiQYA+HGdOsLIn
+ 2NkTnKwJdtY4jJzMMW7GGCdjlJM+zEkdYqcNsVMH2SmDcC8vc4y3r6DXNKZTNqIdQId1TZm4EanEkWOp
+ o9Lp44rZk6SCGV7+tDh9hBPZhQ9oVvBtAneHLXbE+H5K0hA1EXbTDeJi+hSje2Uie6XihuWSxtEp47js
+ SUrZnKDuqVrzM62O17rtL7UaFsTg6OnjuIRBmfjBo4mjRySgf58z+0PO7HdpD7/NnvtH/vwPeQsHcp4d
+ znp2sODxkYpJbFEdJT2YGHpBweukjIuqrLsQ48mFJkQkfw7Zk0W4zDx+Qe0foR6y9V28vlfC7lV6xzKx
+ 8y2p6zWjdZifEUJw1zlylXrwEunwZfyRGzjpWwTYXKdwjwU9GSG5jnbn47zFJD9lqq+Y7MUn+PCJ/jyS
+ j5DoKSJ6KZF8xEhH6VAVSqQ6M0iT7mHETrvvUJMZ4uFy3tRGW8NEaHRGw9xR/fR1XddwJ/+EG9c9T111
+ MfMNuXTmrKa83F+OH/lSTupPZNzfr1/SLi3wDQ2wj4m60NQYXFfrXVZ2p6z8Rmq6TXnp5fYm16mBwKaK
+ qzkp1lPD0f/7z8jH3578d3fmX+tj4Oi//TTz8d8PP6x0DDT699TcXXwc99//FO78M2huzXRiiz767vjw
+ ++9Hfzow/P5o3/bB/t0f+ncODm1KjW3gBl6SGgcIBeWE3Fx2aRWv64HwwZLKzDZ38h1h/ANq4p/YiX9R
+ xn5iwkcP9EJz6k1qH4A+qZKWI3K9yjp/gnVeVXxOVcteVdNGDG23YKOnpOEWD2lgZMUX26ponFHXPauh
+ e17rxEUdg6snjO6cNHe3sfV3dAy5dDHq+tWYa9cCzjjc0DWwg35dfKVTQrVTADpE6iK1M8rqlzTUXUx0
+ A88axrga5ySaVZZbllSapGbphIQIvbyYfsGMuBxeboeocJRXNMkpnObkPeTlTHOzp1hZU6yMCXrGBC1z
+ kpE5xcieYEBmL3eSnTPBzhnn5I5zc8Z5OWO8rFFuxiAnbYANSh/kpA6ws0Z5mSMI6Cax4OXtqNBOuahB
+ ueRx+ZQxmewpxcKH5KIZZtkjpao5nYJJ1eQ+Vngb3q8J5dOICmmFxi8EYD2mlxjaLhvaLgXF69H9suDr
+ yWP4jEly8SN2/YJq84JG02O1jmeaoBb4TJzHvLxpctqoQtLw8eSxw+kPDmY9OpD/5ADE6GmPvs15/I+8
+ J9/lLuzPeX4k7/mx0kdyJe245ChUqK2sp/YxNw2puyqyLsoKHkrQJ5HizaK5UPHXhdIe9kdLCkkD87yB
+ ZRbSoXyJ0r0CFSCU/nmlsnROgJnsHc6xW0zYVidzhyTnTFGALrsubLQrB+PGxXoICD5KkECk+ojJ3nyC
+ Nxfvw4EmdQQvJbKvKjVQlR6sQgtVp4dqM/0MOH7nTiQGXg30vmRpq61mKGCqECkiBZqSLE8HY+CgfNXb
+ 9l6w470AB7+Iy2HRN41MOAf3f3Zg/2fm5ryiIv+cXI/snPtFpd7jD3KfPC/tH41vbPEpKLjU0eoz1BMc
+ FaB73lrG7SanrvzeTzt9H39d+OXdo39tTfwGM9H/PPrtnxPLCxV9db4tJTemhwL+90v5f35Lfb5xbmpV
+ NLGDGn1/aOTdj8O7+4d2f+zb/b5v98eB7cMj2zIj65jBV8SBeXr/HH9wgTu1ynywSZ/aok7tkh78RHrw
+ M3nqZ8rUz7TR97ShD7ThXcbomrDroVpOvtL968zz+qzzasoX1XXPq+rZK2nZCSH8ULXiKkOK0IwtQips
+ +ciKD9y04gH9kFERW3KF1gIxspgKTaWh9a6Sqp1QbCtQOiWAZAtsyFK24CvDNmoLoaqDitp1bQ1P8xPh
+ F0ySfE4W5JiWNxoWtWsUdGjn1mullqiklAuymnn5A+yCMXrxFKPwASsfWH/AyZ2CZXVGNhSSTFJzpiBD
+ TcudokPUkDvBzB5n5oyzcsYAdEHOiCB9kJvax07pZaX2cdL7eTAC93BmX1G/YWyPdChsi+6Sh80TyeOK
+ qWPyBQ+J5bOMqllR3ZxW7aMTJaPqmf38mHZKcAvOvxEd1IiKaMPG9hKhTYA/7NhoOR7WLh3fj4rrU4jr
+ Q2dN0mDNv2lBvffliYGX+mOLRhOLRiOv9dtfqdfO84pnyNmTChmTxzKmDmbPHsiDWOXJ99nz3+fOf5f7
+ +B+Qb8l5fiz/qXTJqGJmDirw0lFvzeOeIhlPFcX7yoruKoreahhvZcCRDF2K3IyP58ZQByfEEyucwTVK
+ 9xKpe4WC5BCg2ukpvzSDHmghc48n7cyWvwtrRlTUXZr8HYY8YucS1t2h/aKQCJT7CEnePLwvl+DLwXtx
+ 8eDofioUoDxQDD11Ke7qJD9b9ZSI2/Fxbnfunz1/08rC0UDHTKRIPXgU85djxL/9gPmT+CTFOejMbV9b
+ Z1/b+35nHC/qUBgHBEqy971OxSZdj066Ghl/KSX31sBkev9ESnLOFVcf3WvX+R5uGpHBJ/NTHCN8T+Qm
+ n337tOzX/0x//OXpx38/+/VnyC0++vjL7L93BqcH0uqL7jYUXH82nfDr/xp/+1i+/sHv4fLJyVXm5K7C
+ 2O7B8d0fx94fGHq3f2Dnh8Ht/cM7R0e3ZUY35ad2sDPvSNM7+Jkd3PQ2bmaX8vAdDdF7xsMP9Ec/06d/
+ Zkz+xJr+wJnZVhmY084vUvG4wUZAV1W5oKpzXlkXtkfYCTQsIepgI4lwc4Cbq2wN3swWWyALQCJzKC5n
+ CUyhiRcLtoQKzJg8CzhgwZZqZIeoJH0O1edCKN4y5ypZCVTPKave0dXyszSIuXoyI8S8qORkaYdOXj8/
+ Z4RfOKRSPKBSPCTMH2bkT1CKpqklMwA6M38aWGflP2DmPaDnTgHfNDgAwZn8SWbOGC17lJ41zMga5mSP
+ 8LKG+Km9nJQedjKMvdy0fl5aHz99kJfcy9pXMmgU13s8GNb5e1HRg6iEEcWkIbnscWzJFK1iUlD3SKd2
+ Wr9iTDd/SDW+nRHWTApuwgU3osJboKEXDvrA+DVJ+TUcC22TSehDR0Pw0yGXOkSsmlPqeXVi/K3pzJLF
+ 3Krl4xWr2RWLkRWDjteq9Quc0jli3kO5rOnD2Y8O5D4+gLj4s/05C//Iffz/aeotoNpOu7fd/nXmnZlO
+ FXeXBAhxwb20FChaSmmhSt3djVLaUtzd3d2dACEkgUCE4E4I7uR3Hub9znfOWs/KIim0a9E7O/ez5dqH
+ kwZEE7nSySz51HqVsB8KH1ylXxnIvyaqfNCHvSepf9BT+2yi9t5E6bWx0ptTimEvNFtqLJgTlt18dOu8
+ etOsRv0MrHJSpRponYPPikV8cpR+iJbcD+cI+afaSo8BFwCtAGjRz7GqQO5A6yCoA5sObqJvcRpA5R+w
+ AESq9hLcAfRhnwAxnQR/a6zzzFo39JVXUWFUbPqv97+ePvp44+5Lr5sP3WycSQgDOTnUkSNq/yOl8y9z
+ F6077xzvvXO4ct/ilLM2HH2IZCrjedPg4evTLz85+gV7ZZd+TC988+WX+7X7JAtbyRMnJV2dVFwdFF89
+ MM6Ov1uS8XykPxva7Nufht4aFG72QztMSMhcnq1vrQ6I+eWZHHpxdCAO2muAoIqN3TjuzC3KqAl1Dk4V
+ SFEXj3UtHe9YEm3lH21bOE5ZkupakukUyFCWpXrWJGkrMvQF5V6Bev8ynLmszVxC9C7q9i4h+ldRfWsY
+ xhqudxnXyzdu77dKTjV+fRd7zQZ32cT4irGlp6GFh74ZILGAsA3QRaC6CZR9Fqfvjjc8hwPoUAAkMtjX
+ Ona/JHQWb+iMNwB0AAeMngMYmcMbAHG7YA1AWdQFBXQPRA8QAMbexiavbE9+93AIve8S/8M1Lcc+s8ki
+ gYyK7IQldCHTqLgUKjKxB5bI0Ejp1U6loVOoIHeHSqWDR3Qy0Df1/zspQOgUJBB6Yicqvh0b04KObcHG
+ NOEj6tGhtdjwekJEAymyUQ8coHXwdF/owU1SXyplvtaDUQkwRaEU3CgfWq+Y1ITIJRtUMuzKe+yLqXYZ
+ HZaR9fhfVYhv5Ro+JQq+ZYrfq1V8qxR9KmQ+lUr4VsoEADJjpcKPKqVYsm4ew7CRZ0OfdGNNuXOnz/Om
+ 3Qdn3ajTDk2jVpWDegUc3Uww/L/f0CKRxBZP5IgkgRwL53By/+HkgeNJXIl0tmJarVrgR9k3ZyRf6Mm/
+ xqmCnPdnHMyHoPnVSO2juexba+mwu1o1WQa8QUcO/xR1AU8GSyDmQUpYtWJSqXpKo3EAmxGn9d5J7B5K
+ 5L6O7ENwB9VSeopUeoZRfopWAecJRvU5Xh0E9TdEOPD9r9Cqb9Cqb9FqL1DKL7CqH4gwH3A3BZdUY50X
+ tthE/4eFFQkxuSEfgl88eH/V87bdaSeCnYu+pQNWnSRxRO2/DyockNH5XwM7Ra8HRvfenrp4W8/xIsrG
+ DebgqX3jmem3kItZJe+Scp55PzCwcVK0c1U97aBsbi5uaiRibSLifELyspPKm7vGKRH3J7iVkHBUuDUk
+ 3GJBe0DrvYKZekpzRHTApfDvZ+ktATur9YB+IYQqpxe/MUad6JNoBl+JsShJXZTsFEi2zR/t4B+jLkt1
+ L8l0L0lTlsS6V0Roy1K9C6oDi5qsRThnRZezguxf0u1b1B1YxvStYBmrWMYSto9v1DlwIjXF+M1d3PXT
+ +EuAGW18AnBDz+ubuJGA8TBwxJMAtAgMPp/F6rkBWeP0XTD7Egcx2xG8TjAA3+NCNHTe514Y7tNgCIaO
+ BPAJoO8Kvg1JdEGBRgBDD6IpWA7zycku5Kpb7ONzqYHnsvLt0xuNYtrhoPIYS4HFUeDxVNj++HwvLBEM
+ XnYjk7pBRNdNoSNT6ciUHiQQ/f970Kl0DIjoiV2oxE5sHBkb2YwCdiW8HhNcg/pVgQqpxofX64WBHGst
+ Mbxh//FAVod9QJPExzJJn3q5Hy0KYNYzuEElsEw1rgJTTLZuYrg001xqehxyyJYxTYSQBuTPGs2vpXK+
+ ZfL71ADw9qgS86k6/rVa8nu18tcSZWBp4snYHKpB/cDJ3gkgcS/e5IWRafexOXfurDt1wr5h2Lh0GJsN
+ NuhyFBPY0ols0STOkbTBw+mcY6kDx5PZx5OHRHN4SnkApOEr98b52Btjmff6Kp+IGj54rR9GiG9m6p9O
+ SgdeVqmNMx/jOCysnB0W2FDnSO1zyKYpjdop1app5dpJ9WYWOiVR5ZXboVvIQ3fgUvfgAAqg8ASpAjLo
+ z3DqgKb7BKv2gqgBhP4Sr/4So/ISpfwKHLTKC7TKa6zGJwLcj6Tlb6T70VT3tS0+5vv9+OyArxEfH364
+ deH2aduzWMtTCBMLhJk1AmUkd1D+P/+UPSCP/BfBWsrGQ/X6M9Ljzyfe/XJ59cP59S8X36gLHwIdnn60
+ uuiNOuOqbntGze6MuqmplDbsvwnI386Yi9w9r/7YC/7urp7/O2dObx4EjQi3R/Y2OdDOvtBnRsorCr8G
+ fHUP++beXPpFMFEG7TIhiLGxXTgleMqZNh2YhfXxVWgLql182Y6FY52CI50LYpRF2Z5lmZ5FSdqiNGNR
+ mSmADSzA2QIEbwk9uIRlC9DMBWTfEpq2iKIt6/YtYQf4JhSmdUaK2Zu7+KunCV6mppeMTlzUM/MgmgBZ
+ 73eqACuCA84E2JL9+O2K1XdEEcGwsxPgdeH1HUHwJhr+I3QDJ3CIQOWG4MX9p+DH0fpOSD1XlIEnASzA
+ sPjoYhty7Wzcs3PZYecLis9kNAFImGZkp2pMlyYQOmjkTqIhk+ioZODCu3USuhH/CF03la6bTkemgy96
+ dFOp4KDSqci0LmQiGRnbho5sQUY2IyOaUGH16KBq9K9yTGAFPriKEFyND64hhNYRQmrwB3K6zoB84scK
+ Cd9GWf82ENFVgupU/EtUo0uxeU1WVR22jT1ONT1nsvdtOj6yGR3SpO1XCYQu/aUMNDZK/6iX/l4n/rVK
+ wqdE+kuxgn8VLLoFk9WpX8WwpnBdB0YvsscucoHip9wHZ9wZE/YtwybVo4QCMNrMAW2J0iBxHs/8O5l9
+ OJUtksoSSwX5Fp5oFle2EHTzJqj73BJ/d1LqvaniB0NVHwPYN1P4F0sFv/PyBb8Mx6hX1pYuLW64ja3Y
+ 0vmGHbPopilY3ZRazZxq/ZxGMxeVmqz8/Nyf11B/emuI3VCXugWXuYdQfIhUfYxVfYJTBVkXsO7iFVH9
+ BfgarfQUpfQMpfQcqfQcowrSjh8Imt/04P6myI/mup/PGmeFv0nOCw1I+PHc54HHjVMXrpmdv2h20gZn
+ 44CzOKMjofGbmMZ/GToqOXnr2l1ROu0l6/EAWHarxz4nn361ufPG2OW6+ik3GXt3lbMXtJxcNM1MxEm4
+ v62Mxb+8OlmYfKsi/WZp0tWytLttld/441XCnQHhHkAXDYDi/7qgqYccGx95x+etffj38xXZL8aY6Xur
+ PcLdfghqXdsOGuO7cOcwrH0Xrt4tUKQsi1NWRDoXJLsXFWnLCrRFGZpAniFQYy5qshe1BpeRw8uYoSUM
+ G6icrwOCOl2A6FnUZS7hWHxTCvNkRqrp6zv4qzZELxOLy0bWnkDoJHANNQSR23HfeQPUFngEhgTEacBp
+ Ie7zuv4tdODC9wFG+1+DL8AB4RzMy4HXHYCDRxNB0+JZrMFFkvFdS4sPbvbBN88lvruYG3uhoMI+s40U
+ 3wGL6lQDET2Bqg16/lIY2NReTBoDxGzUvsQZuml03TSaTgYdkUnTzWSgshiYLAYuh4bNoqKS2pExLcio
+ 5n+E3owKqUUGVaKCqvCBFYSgSkJQFeFXBfZXJSqgCgMWAdgGNIp8KBP1qZP60bI/FvQD9G/lKwYXIOPL
+ 9NMrjYtbrAvbT6Q0G0Q1Y2LaURFtWj8A5Khyv0kdTEz71YD5I9HP5aKfikQ+FkgGVMESWgnZnYbFneaV
+ ndYN3fbkPmcaz405do47da5/0r5t2Lh6BFM4rJ3BVUliSsQzQIYR3EGPJLNEU1mSacC3DEukc6Xy+tTz
+ 6nUiApW/XpV/Yyf11krqk6Wij7XCd1e5lI/IvsZzm4uPd3a8BZtuUxsO3GWLXgGhY067cVq9jq8OthC2
+ DWMz0tWee+wL/ZqG2BU1iUtqElc1ZLzhcre1wDip/L/vo+AAs/5AV+Gejtx98DbQVX6AVn2AUnkOyEd4
+ 1Y966m9MtV+dIUb53kvICg6I//n4450bT1zcLhna2OranMacu2jk6kVUw/6JsxK999nyyhuMwy2Z01fF
+ nG4qetzTcr+pc+GWrvsNuPNVVWs3ydNucm5eMHcPmJO93M1LiJyEm8tT2cKVgq25lNXppLW5nJ2V2p2V
+ tt3Nvr3dwa016gq/brA/pabCNz76jt8nJ//PTqkRV4B72VpoEW4B+07d2c2cXbrHnTVhLWgzlzV6llWB
+ KaesiVOWZLsEit0CeapAtge48wWN/iUN7qr28BpydA0zvILmLugOzGsPLOj0LugwljH9SwQQ0aks65ws
+ s7cPwGWUcMnE8pIBELrp+X2hGwGPvp88AYEcWBHQtQJsNyB1oYhAxP9/cf9b8fsHr/9vlZ/BEO2ReHtd
+ vBOKCDw92BFw94TVx/NOofc8Er545qR45NfaZoJsCRUW3wNPpIEshTaonWcM4DP7cVn/52Cy+zFZTFQm
+ UDkDkduLBpDNwn5C8QAJ1MvzevFpnZj4VuDO0RFNuiH1iIBK7cAKVHAVCOfE0Bq94Griz3KMfwXKvxwF
+ aLo2P2sPvy8+6lMr+b1FDkz+fymR+pwr/6sAFVmsF1esn1xulFprHN9IDGvQiWjXjiTDg5tUgpvVwtq1
+ AgFqtE7Jt0r2SzkYUBL9WiIT2YTM6jYp6DEv7DAtaDYtabWuptg19Tl2j7gNTLsyZ+xbx4zLeLp5XM0M
+ jhJoyk1gioCyfzz7WBJHLJUjlcaVTB+SACeLJZtH08iu0UmORga/VvG5IvHVXTL4imKeL55R6bQ0fWtn
+ ++7WztWVzfPzm45jq5bcRRJ1RrtlSrWBDxZ8apPHCNnZmi8v/n0VffCapsRVdZnLGjJXNeSuqslcURX1
+ 1pS8qwPErXRHW+mWlqK3pvxVddkr6nKXNBSvaClfhSve1JJ9jlF8jJS9hZa/Yqj5/fW19MKY4IRf9155
+ O3iY2DggrE9qOTgSLlw2druEMbYRufOWFFvq+SaS6PFGwuHuUdd7cm631S/e1rl6D3n5AcLthpqNh5TD
+ RXmPKxqXLmm8eIiLD3ai1DwfofsujoXuLMRDq1nQRvl+WXSHIdzlCoW83Q3q1Gg+tTOkvORTTOStHz7n
+ Pr+2DfSxbyh5uTpdBa337V9bhU3rOyHD8+f6ZzHMJa2eFY3OZemuVYnOZZmuRUWKQL57QZa+qNq3qNm/
+ pMZZhvOA1ld1h1aQg0sI9oIWOEy+du8SqncBw5zT6+GZF5aYfHyOvG6PvWxq6WVwwhOgufRMgMpBBzkQ
+ +r7hBpkT0KEFhP7viA4yKv/E7/8b1x2wAFW3H+b3YzmWBM4ZMAqNwoOczDmi0SUT80c2p30unAUQ3YRv
+ F/OzLpa0nsmnEdMY2qm9iPQ+VFovKnsAn8cl5bHBIeSx8HksXD4bm8/C5PWh8vuQhUxMETh92BImvqSf
+ WNhHyuzGJ5FxsS0gmYgKr0eF1aLDqnEhVYSQalJojX5IDSmkhhhaRwyswgKPbhfcIvGpTPRrvZRfsyxo
+ zf1UIu1TqPKjEB1apBdXYZJcZZZcZxJejflRphbcqBHVqRVB1oroQMR0o0LJCADu8qtS8SmT+1IsE9YA
+ z2EYlbNPVrJOllGtClrNC5otc5vMC8iWdSx7ysQZysypujH9fC4YoVBJ5ijFMqVimWIxAyLR7GPxXLFE
+ tngKkDtPNHVIJHNYOpenXNivU9pOKiolpkRrJX3XrIzW5zW5bYzfEm7c2t29urPjtb59XrDhML1uNbps
+ 0DuDaBlXrZ9WAdM3HaOEnGz4q4tHrmMOe8NlvGGKN3RUbumo3oQpXVGXuqwqcV1DzltT4aq6vKey3Dl5
+ GScpCTuxY/bSom4aCu4wuUta0k+JqvfQMhd0JG7ZEONDPuRXpHwN/uh+zemsl/l5LyMnF7zbOYPLN8y9
+ buIu3FKJzHFo4N5Ob7d5Favi8fLwuQeSZ28pX7qvc+sZ0uu+xtkbig6XZR0uyly9DXv0QOfTC0xeghOb
+ /Hii7+0s9+u2IH5nOVu4WSfcogn3RiDhFASNC3d7B/oSigtfpKc9jgy78+PrxddPT318YZafcm1uMOef
+ 7oABaJe+JyycX3/DmT3ZNw+yhHDqqjxlXaprRZaypEhdluteku1ZVO5dVO8VAJuuxlqC8dZ0h9dQvGUk
+ d1GXvaADhA7cC30B0Qt+fNywqtng2xetm67aly2MvMBl1MgcXEZBqsSNaALS5yCJvm9aQOIF2PT9rOL+
+ TRRo2uEfTe8HbzBGBGSNIQFK0b8V74DZH7lwwhDd8AYX9E2vm1s/t3fwv+QR+fhC0g+P4sKLlZ0O5Uyj
+ vH5kFlM3dwCTz8YXcojFXL1iLqmYSwCnhIsr4WJLONgyDq6cjavk4KvYhMp+XFkfpqyfWMQk5dKJqZ34
+ +FZMbBMqphEd00iIqidF1OiFVpFCqojhtaSIuv3ES0gNDkT0M2HtciAqg5RLYIeKX4PC53L5ryUaX/N0
+ f+YRoitMEmssYqqN/UuQvsVqYS06caD3oAtUYtExVCB07e91ar5lKj8q1EMaEIndpPw+k5rBU02jZxo5
+ Z6rotgXtVmkNxmktxrndpqX9JmWDBvk8dBpHI3ZAIXpALqxXMowhGsEUixwQieWIx3PEkgfFknkiqcOi
+ WWOymSOKWRz1kgFsLdOwvsuI3GI5TD+7Mn5ld+mqcMNzZ/Pczvb51Q3n2ZWTY4vGPAGRMaNDnlRvmlZr
+ mdcmDxNzcrTeeB0FXek3dGRu6ijcRinfQqrcRChd1Za+DJO8oil9Ha5wWUPeQ0n2jKSo+d9/6f/xv6ai
+ fzjBpVwRkucRxx8aKdwhSV01VAx5d7UwNzQuK+D2y2tOnjZnL1u4eJCcXHAuZ/W871rffGLw8huuuMOj
+ e/ZK+4xzVpf+uxgFrxciDtdEvR6o33uL9Lyv5Hxd0vW67AVvpYePETHhduTae9yup1N9T5eGP2/Ohws3
+ c4XbtXs7VGhvFBIKoL1FCJpbXe0qLnz11cfW39/j+/dLr185Pbpn/vqZcXrMxaGeqM25RtCqDu2yIGHb
+ ljB+ZvlO37QZdUGHvqrasyZHXZHvXlagrsh2L0t3L8nTllToC6o9c8r0eXX2CmJwFc1dwQyuYNhLKOYC
+ gr6IoC8h6AJd0CHTyCCFR2o9vq51zYZ0CWxdBHUfQ9PzIMNoYO5KNAF9tgA4ChyI2z9yB6IHYR6kXPb1
+ jSHaofC2KLw9inBmnz5H+PeL+7pHE10whHM4PVAl9TY78drBIfCye+Qjl/ifDkVlZ2vpjvVci0ousewf
+ NQOyQ8UgqWbEsHoIzIIQKwfxVYM4cGp4+LphPdDZ0Thq2DJq3DRkUM3CVbDwIK4XguG1Hr00UBNtQcc0
+ oGIacLENetF1+hGVxNAKXFSdXkyDflyzPkgYHshsdw5rVflWpfQdGJIuTVA2+lKm+LlY810m7FMG4kcO
+ 9nsm2jcL+aMEHVCNDGtGRu830KCiuhERXZohZMCVVvIpVfpephHWiEmmGObRTGs5dh0T5zpGz1XR7Uqp
+ 9mW9ToV99vn9p3L7jbPZhFSOLqBZRPTLhzNlg+mSgT2iIQyxiD4wYyEVx5FKGBRP4omnDEqmcGVTuEpp
+ bFjBAKaGa9gxYt03ZDc84jg/47q24ra15bq15bS16SpYsR3hG7PmCH1z6O4ZbfKUJsi9tM4iWodxWXmw
+ t1ePXSceuqItclVbzBstdQMjcwMj7U2QuEGQvIGRuo1TuI1R9kYqnlWVsBT90+DIfxlL/5cj+qiHoYiX
+ 4aEnpyQfWIvfd1bPSnxeUBIQnPjp1stL52862LnrO5zDO7riz3mY3H/mcuOJ4csfuHKqW++Cew/fijxl
+ UUoz8UvWvvFO5uZ7tfs+upefKbndlDh/Q+bxS2R2ltdgny8Q9+qoz+rY5/Wpn7vLqdB2LbRH2xMOC6EF
+ IbQBCdcgiL+y3JmVde/RY9zjJ6b3H1jduml2/57Jp/fWhZl3xwZidgRV0Cb1H6H3QlDVxk4Qb/Z8zwyu
+ dwnOWFWhrah0LykBp74vdBDXBSo9C2o9fDXagiZzBTmwimWtYdlruIEVbO8imiZAdi/odM3DO+cQLRxc
+ RgHu0yv0LWfCFUuTi8Ym5w3AqITJOcD5J5ruC/0f0wLCORD6fqkIpw8iN9A0eARCP62LBYBFB7QeEPd+
+ gEfhz6DAKDT+H6GT3Il6NyzNP7ieCbruGP7sVEKIVXGdfT3Lvp5nVs0hVA8CKZMaRvSbx4xaxo2aRvXr
+ eYR6Hq5hCA9O8wixddSgZcSwZcyobcy4dRQkrwm1HFw1m1QxYFjWb5JPN0wl45JaMMnNxKQm/fhavehq
+ UlQVIbqGEFNHSGgmxDXiDmSR3cPAjsVK0Iur9L1Z2Qe0K1apfS6GvcnQeJ2i+Toe9ipG/V0y/Fsxxr8S
+ HVSnGwWyOR2I8C5YRDeYklb4Xi/1pUzWp1g5sAaR1GGQSzGvYzlQJi+2cFyr6I7NQ1dap25Uj3kVDzvn
+ 8ixT2fgElnZUv3IIQzqEJh1Clw7qFg/tkYjqlY0ZUIjnKSQMySbywPicAhiCTuPopPWj8miEqn6zlkGr
+ Do4ZbdBsePrkwoqdYO3k8sbJlXW7aYFV/zShe0qXPA1vndJomdJsmFBvnkM0DqMzi+Ef7opcMzp4Sffw
+ Zd0j1/DHrxFEvEkid8xEH1lJPTCVfGwm98xM5ZGxymW0lJPKYWuF/z2l/R8eZodvOog9dhP9fFnmzQWJ
+ Tw908wsfZ1V8Dkl5f/u116UHLm7XLM644Z3OkryuWN996nzpnoHHfbnQHDxr2a1v0Ygxb8CYt61k2ITm
+ El6GaD0JRN/z1b71Wu3VF1xJ+Z3VpQxoK2tXELY75789/WtrLhraLId2qXvCkV2Ivwet70Fb4BGC5tfX
+ qVnZd69eV3M9p3z2HOLmbdO3b0798DuTm/WA1x++Op+9u94A7fYLhWwI6oCEuQurz/tnTelzIDZr0pZg
+ Pcsa1GWFfwudIlCmLqjTBHD6MoKxgmauE/rXicw1fO8aFvR4dc9jO8Fa6hmNzjntzgliVbt5eJDpk0tG
+ 3qfNL5mbexiZnDMwddMzc8ED97If0cE11BVDArdSQMcFqRjgT/6/iI7E/Z+IjtqP6KBc6rgf0XFA6O44
+ vfP6pJunTD+62wTdson+YJmebFnafqqWa13DM6rnEZuHiO3jBuRx47YRw7ZRg7YRUtsQoX0E3zqKbxnG
+ twwR2kb0W4fBHxm1jBi0jOq3juo1DOLruXq1bKPKAeNihlEGGZvSgk5tJiTXE+NriLHgVBNiawhxtbiE
+ RmxcPRZEdLegRmWfcmXQi/seVH+qFb7Vwb+Uwj/mab3PgL9N0vTNRIVVmgZXGfiVIH5VaUW2ISP2e9AB
+ 71wpolv6e73op1KpN3myviXqkQ24jHazOo5r5+TlSoZjLet8x/TdxokbZcMXi4ZcsrgW8X2YqF4Y2GoU
+ SJUK7JYK7pYJ6pQK7JQK61GIYirHcVTiuIpJQ8opPFXAlY6nA14AOouiV8owL+sxqWYYtrGNacPGnCmj
+ Eb7+1JLR1KI5e4bUNqJVPaRaOapSPaFeP61dN6NVO6NdMaiTXq7x6YWo98k/rxgd9rYUvW593NP0jxun
+ /n5+QfTdFamX58VenpUAHIE7ViKepL/OYv5wxv2Ph/V/P7oi8u6+5K83CnG+GmEflJLCjCqrH8RmPQhJ
+ efvmx/1L913Oe5+4cN383EVTzyvWN+7bnr2MNHc++PqXxsia5+SW9fiG4ciKFXPWsa7PLqJI72Mc5sUv
+ 3UcfteNSzwoEadBWIZ8X1FPr3dd8a4H3U7heKNxqFgo5QmhmD1regzaFwi2hEAh9mr9Ym5l/7fINGTvn
+ Yx6XYU9fWn74aPv1q31C3LX66rc9ZL+JobSt9U4IGhAKu/drpXu/hhddaFO4nnkt+rJW7zqCvgbrWVGi
+ ArMuUO7ha/YsaNMWEbRFJH0JQ1vCdi+iu5dQFD66axZNnkc0zmk2zWu2TGOraObRsZbPb5hdtzW9bGlx
+ wRSsFzU7a2DuQjB1JZiAdYr7SxXx+mdB9hBYlP37qD4ohe6j09EgqAOvQnJAkc4gcWdQ+9P+oPgPRkWB
+ p/cg6IOt6ndszT9csA57dioz4nRFjW01w6qKY1g5TGoa1ydPGJDH9NuGSe3DJPIIsWP0nzNGII8DuRPa
+ ePhWHrF1iEQeNmgeJDUPkVqGiQ0cXDPPoJFrVNVvXNZnlNOJT2lGpzThk+sIibWk5Hq9lAa9pHpiYi0u
+ qQGTUI8+kN5xJrAJJMXlP1cpvS+V+QAQLgAjWof4Xo74VqgVWILJJJ+q414q7XeNbzEIrFIPbgEYaLVY
+ ukICTTKy87hv1ZH3xZIv8+ReZyt8K4IltpoW9DqWc9yLel0rBz3rJq6V8jwK2GfzOU7pLNNYOiaCBg+i
+ Kv6kyP7skv7VIePfJve9STawUzGCoR7dr7Evd7YKIC3GMjViehExoD+zg5DQTkpuIeV1GVb3mjX167ey
+ 0V3D2rQxbeqoTusIrISrmN4vkcGRLhhTKxrXKJ6AFY5qFgzAkytUPn86dsv99xvux+7fkLl187iX5389
+ uPvHJ5/jfj/EfX1EfN6LvH8qcvvibxcd/uOiw39e9/jtyZODfj/FY2MVC3IQDRXE+jIiucmpo+VRaITn
+ z6iHH389uvHY49Yj10s3T7pfNPO4ZHnjnvXFa2ijE78FROqt7Dxc2TmzuGWwsGU0tWrLnnVrYTnlNlvH
+ 5p3yC7ZoankLQbULY7G9TW87y2+0Fl9itL7d3QBZ826hcHgP4guhFQjEcuE2tLsICfv5S1m17d5v/NQ8
+ bx+9clf2wXPU01dmHz+fiYq6lJF6vyT3RQ85cHY8b2u1RbhDhqCaHSh2avVm77QpnY/oWdLoWdGirSBB
+ CKcvqtAESowFOEOgwxAgaAu61HlE1zzYua7TNodom9ECH4PNsxrVcyrls6DWhsqj6PkG4a6cR7qZYd0N
+ 9d0MjV0NwD4ji7Ogs5wA2gFM3PX392G46QGDDkRMcgGlIlAbAnLHGpxB69uj9PfdCwprj0bbYwF6ztiJ
+ YH4Wb3pJz+SOmdXTMzafrtlE+dqVlzi39Tk2AdPCAygOXMOEQduEUdsoEDqRPIwnD2M7RnDto7jWMSx5
+ FN8BXuHhWjiYdh6+a0SvlUvYP4PEFi54Vxg3cw3rBwwrew2yydjUJmxaMzGxHptQi0+t00ttIKXU45Nr
+ sakNqOQ63QMZnacDmsQ+lUp/rlT4UCb7qkD8Y5l8cAsmqA4TVIlJI5u3DF/om7vdM+tdzXOMbNL+WScb
+ RlGIp8sndItHtBz9VnbkfZHkmwLFF5myb7MVgqqw4bX6kQ0miWTbApZHyeDFApZbAcctj+2Q0msY24OO
+ 6UOE09V+dEj7tkn4tUl9bwZwL4XgLrUIOtiNBgunK4OFR0k87bRhbOqgXkKffkQ7MagGHVymCz6Dctr1
+ yumE2gHd2gHlaqZ0zYBMOUc2my2R0HcMpCnTuLJpHDnwmMqWS6Mrx1XIfwk48ujJn89eib3xk3v1U/y5
+ 718+YUdDs6USypRSy1SSCpWj0xR9A0Wefzr40uewb6h0eLpyWplmfSexl2PNG7IbH3GanrjJY72Njrnw
+ 5tuFl1+u3X527tp9e09vkHgxvuxtfcnb5Pod0mmXYzEplpt7z9d2nJY2DJd3TGc3To8uuXEFl1j828zx
+ tyGRdj9+nOyj+i+MpSzw4oX8lFnur+byJysLpTvb3cC37EFLwLQIoc19g743KtxrnFsMHhi9V9J80i9C
+ 8+5LsWv3pW491H71zvy7v2NkuFdO+sPmuk8sWujwQOrKQhUkrBdCqQsbL1lztvR5VPeCWhdfjbKg07N/
+ 0dTsWdz3LYwFLTpfi8rX6ZrT6prTbp2GN03CmibVmiaU6qeUy8flSydUysF2gFrUg5cqNpayVih1W6Qu
+ APvb4vUcwE2UAFptzcFilvNGpu6GYN+isTuA7hqTzhnhXA0wznpYJ+J+f4stWv80GhDncLZYzGk8wY5o
+ 7Gxodd4QjHFY3La0fOxo9cHbMi7YtrndnTbq0j5hUTtKrB4l1o8bto0bkceN2kdAzAbK/rfQ8a1jOBDO
+ wSttPFwzFwvieseIQdsgsZ23H9rJI8YdY6atw8YNbP2KXhAHsZltQOj4hDpsYi3QNzGlnphaT0iuxaU1
+ oveFDjjZoW2iPuWSH8uk3pVJv8wXe50vEdCICK7HRtTiM9qN2gZdeYu32Ys36wbto5u1/SokA5rEI9pE
+ o1uOpXbLJ3Vpfq9UfJMj8yZP/mWWzIs0mVdpiq/TNb+V6EW2nE6hOmf1uWYxHVJolmDWFQxqxDFRkQy4
+ P1nep0nCt1n2B9hBAJrJyGpBVM2IXq2IXgBhVM+YABhvg8xRkzSORWKPZXi9vn+x7s98eEQFLK1dq4Cu
+ VsSUKWeLlw2K5oAWduaR+L5jcX3HQYtYElMsBeieKRbXLRVRK+WfIe4TJ/49Xd6/SMm/RDqgSCSqWiyB
+ LJ5Bky3qVynqU82lqCTUy4VWSAdVycQ2q2Z2wEoYOh2j+pzZE2Pzp6dnHfhz11cE38srn7z8cvbRuwt3
+ Xpy9dMfK85bp+StGnlctPK+ZXLyGuXJbPafYeXX76dKG48qO5eKOxeSq1dDiGa7g3Mz209F5n0fPEEaG
+ v3ueU4oPvdRZ78vu/jEzGDk9lLI4V7q327OzMwrsyp5QuAdt7EGzwIrs7CRP85+N8C+xpt0aek7G5aJ9
+ gtXf+iLe+Rp89LX8FeSalHStpPBhU82b5qov49x0aLcOEmZv7PmNCNx7ptDdc6oUvkr3Apy2pNO9oN4x
+ p0yZ06DyYdR5GE2gQ11AUOZ12mdg4NbeNKPWPKvcOKVUPixfzlMvYiCD0uCXvGXNDWUNtVRM1TWMNWAG
+ GjpmOngblJE9WKGIM3EkGrvombobmV06YXzL2ej+ef277sTLZ/AuplhbAsEaTTyBJpzCEU7h8dY4gjVY
+ 9qJv7mpg7m5gAtZGAwzBy2t6SdGn2qlnaRNn2ifN6sf068YMGieM2oHKR4F2/6/QsW0j+OZhXMswrnkI
+ 0zSIbuSgmwexwMC08Ihtw3odo4aUKQvKlCV5zKRxkFQ9QKgA85ZUfFozNrmJkFRPSKjBJtUBiRNTGvCp
+ TZjEOsSBUuaZRLrMj1rp90Vi74uk3hRIvM4T96/X+lWtG1qtm95KbOg7wZ69wOF71bCsQkBDS6mYf+2x
+ sIajuQw12uIp5qpbQZ+JT77Cx0LFt/nyT9PEn6XJPM9Q/ViE/w6cfcOJ2E6bpG7rGLJBeKtuZIduAhMb
+ 34+OoGvtQ3ebQb8k/Fu92rcWlZ8Ae0SDRTA0ADE9ZxKfO2WQMWyYxjFLZ9rEdloEVGP9imG/ylWjW1RS
+ aXI5A2JFQyL5g4fSmH/E0f6I7v47rvtoCk0svU88pV8kue94AkMqmiwV06YY0aYc1qkUDnad0qQiKSJx
+ PccT+45msI+XTshWTiqUDMvnchRT+xXi+mTi++TSmapFPKAAdO+80YjAem7ZaW39FiQMY/T6v/p8/t7L
+ 83dfnL9408rD28Tjqtk5T+DUjV3OI67eUiupOr+y/Wxx03Vx5+TMuvnYiiVvyYYjcJjdfjgj+HHthipC
+ +4CZ4dHXj09FBl5Lir5KafZbmSlfX6wX7tJ2tof3dtcg4S4ELQoh9q6wdGX949TCxbElO57gFHf+TCfP
+ przTqrDBIaPsfGi8y49gR/+AM6HBdjHhTrkptwYZsdA26PQq2hUGz65eZ04Te2aVuvmgLKpCW4RRF9Qo
+ 86o9ApAv1+nhawHr0s3X7piBk2fgLTMwQIMhCzRaZjUqB5WLaOpptTqfAtQveCqfMIUZ6MAN1DRwSmpI
+ RTWsqraeBtpQA2cCJ5rrGliijECY9zil9/SGyc9P1qE/7d4/NXOx0TJGq+tr6Rhqoc0QODNdnIE2xhCB
+ M0frnUIDP6PnSiRdssa9uaOXlnKyjX6mZ/xkx4Rxy7g+yLG0jhu3jukD2902TCCPEsEdFJyWYWz9IKZ+
+ EF3HRdZydOs4qIZBTOMgtnmI0DpMAlrvnDTtmDAHd9P6QXw1B2QbcflUdHobOKQUoPU6TGoDPr2ZmNyI
+ TWpCxtVpH6gccErrU/CvlflYLPWuQPpltsTLHPHARu3vFbAfxWpJDcjCdnw1xaBlwKqSaRpUIf8p73BA
+ 7fEsuhpVcIIPXV+FHvcveEZUaH7Ok/lULPsmX+J5lviTTPkPJdivFfrfKg3DWs2TqCeSuk0TqHoJNGIC
+ A580QIyk6X5vVP7eoPazSftLrcqXevkfAKZOUY+ggU0v8IJJbOGUXgaPmMrSTx+wTKSbhbbgghoQwc2a
+ UV2KCQyJlIFDmUN/Zg/+kdb/RyL1r3jykYJe5eYJbPOkbhlPoYArk8mWS2bKJferxLFUIthyoSzxMNbx
+ cOaxWObRVNbRwhGxxgVF8opG6yK8ZgZWOKaRATbNshRSOeq5w/DKCWTXvNHYmsPq3uXdvWcQFMfsD3n4
+ yvnW4/PX77udu2rldevkhWunznpYXbhk7XmFdMb1SEK6xbLwpWD3/OyOzX6ZVmDZy7dgzFmPrt6YFPh5
+ XlfD4f/l7oa8dsn03ctzfr4XUuLuzQ+V7K627m50QbvDwp2Ff6w5Twg1bexG81duTCzaji6ZDgqMeEuW
+ nKXTfbPOwAWx5z/Udj/9GWn38Dn6yXPdN69xUSFnGR1B0HotJCzdE0YINu5y541p04qMBdneRSX6ggp7
+ HcHdwHDXSdw10sASjikAExgIIPT2ac22GVj7PLydDwwMvIShnFGvEpuj8/Yz/JInws4KY4xE6Gtp6+lo
+ Y+CaMCVFLSVlhJImUlkHrYZFq6NJWlp2llrPHhFj42yyc51/+hu7OMoZEqSI2ip4VU19DYQ+DEmCofS0
+ MUYIrAWAo6OIrkSDKzakD4+MMjNOtjNsKWPmHRP6beN67WOGLSP6DcOEpiFi8wihZQTfNkZoHsHVDaKr
+ OchKFqJsQLuCjajiIGu4qFoepmEEJBz3047t4H0yZlQ/TKgbwtUMYkr7ASwfk0PFpbVj0lpxqa24tBZc
+ Wis+qQmV2KQT2wA/UEJzTOiS/VEp97lYEXiPu3FHHiYdBdt0/WvgH7OkwipUM5p181sxJV16hT2gzqTy
+ Me9v/6ojBQOI7oXTMztXN6EnvJkLeU3o6BqNn2Uyb3IPPc86/Cxb8lOlzucqjG8VIaLNNJlqmUazyGSa
+ Z7IsEuj6sTR8NBWwCuB+9Zq+NbD3pUrvAYOgVh5sEAjvVEzuVy+eQJdMEnJ4YM8OIYNjnNxvGNODi+xG
+ RgLyEVM5gSWWNHgwY+SPLO7vKbR/FbJk28aJQysO87vnJ7fP9C3oN43DS4dVC0bUc0ZhqSPqMYOyof1H
+ g/sORfQfix84nsMWqZ2S716BMzdQA+vEniX95jm96mlS9iAyqV87m6dbMoprm7YcWXNb3vXeFb6CoEQK
+ xe/uU/s7Ty/cfHTxgret5007Zw8LeyfjU7YYI1MxktGBmBT9DejZ/J7r6JZVv8CEOmvUMWPUNWPOXLgw
+ tvI1q/S2lzfaxELK0lLN1ZXo7W356b0roz12dri4ozGUP9kI7Q5BewMQ1Ly1l8xffz27dn5i7QRvSY+7
+ qMdZMhpYMusTnGAK3NnLj2sZd3zDza/cU716R+neA5jvF8vCzAcTnIS97UIIStjYezOycGpgVou7rM5b
+ hY9uoCZ3iBObhlNbVjO7Jye3LEfWjVmLhJ5ZXfIUDAT1znntdkCuG4MXd6sll6sGx8CevYRfuYB0sESf
+ IKIsiSgbC5yFiTYSIaEDE0PC5BDqqnAVuKaSBlpb4dQJ+RdvdPPK7erIrlGJerfuyJ89q3jSVFUfrkxS
+ VyJoqOprwwzROsYYHXO0zikM+qwB6ao94cNTvYwsi9beE+3Dhq0jhMYhTCMPU81Bl3NQVVxMFRdVztau
+ 4OiUs3TKBhBFTK2CXlh+r1bxAKKcjawE0ueiKtg6Zf1a5WyduiFM/Qi+ZhhTwwM/i6rg7BebipnEjA5k
+ OhmZ0YFJa0cntejGN2jFNmpG16sdyG2zj2iQ8i2U8ylU/1qCeJ2l/CDx2LcKlbAW5Ndi+Z/F8omN2sDm
+ F/YY5dP1Yls0flaJ/6oTSelWL2EQKBybmdnLXK4TjXWSOnoqs0PLp/jQp5Ij+6vtGmA+NVq+VboRbQap
+ PRZZvdYZvZaZA1apfWaRHdjoLlxIG+prtcaHMtV3xcqfy5W/1agGNqpFtCnFUwAvV7N4BFU8Qiwc1c/i
+ EVPYuCQWLpGNje3XjmMpx7JE4zh/JnJ/S+3/PYt5tHkSO7Fzfgt6sAs92ISuzW+7MPkGILTXTCLLJpEF
+ E1qpPPmYvmORjEOx/ccS+48VcEVb55QH1nXHt0nTu5bj27asVXvqol3VmHlKLzaThS8e1GsZPzm0cn5+
+ /cq28DUExbe0vPG+b3r1rv2lW45eN+0ueNucdjU4aY/FEkVUYQfMTv5HaaONALo2sm05sErqmMM2TuEb
+ pvUap4xoAmfO6iva6M9vYa5wzH/iDcT0DOUMTeRcXBGBvp6FWe9uXyXmZb6BIKpQWL22FTu38mZ8+crU
+ luPkjhVv1YC9qN+/ZNC/ot8jIJGnTTqnXXNand4HE7zuK1+8KX/lhtLzF6Rgf6eqoierixkQlL0L+U8v
+ XhieNxpfIsyukxZ3zPgbFkvbp9cg11Whq2DXfmbr5Oiq2YCAQJ3T7ZrV6ZyDt02pNw6pl/ZoJJWpfwtW
+ ffRY9fJ5uIO55ik9DScL7WsX9S556jo5y7m7q3t6YJ3tiUQsXENFCoMSdXGR+xVGIDPODoy5Vzebh0Yg
+ XrxU83CVNydK6CNEidri+hhZUyMVc1NVCwPlEyQ1VwuEtxvy/WvdxGxcNV0flHuquEig17IBeEm/dtEA
+ ohAoe0CrsB+Wx9TM6dXI64Pn9MKy6LBsOrygX7eEg67gYSuHMCUD8CKmeiFTrYwNqxlC1A6jKrn7b4Oq
+ IWLNiH45h5jbg8ihIvLpILqjk9uBytVjGlWiGpUOgC0fgWWSX/MUf5ajk8GARb8TGCP6XCQf1AD/WaX6
+ uUAysFItrgmVTTXKZxql94BxD614mmpcp0p6q05dp/EY79zskNv8zDnBzmXqrHVyt0pgk1hQq7w/oB01
+ gdW7OkEgi9lhlNN7MrnbLJFqmkI3C2zQDWgAg9VY/waEbzX8SyXMt1Lbv0YnuA4e3aIW1SwVT5bO7lMv
+ 4mLyuLgUlk7igE4SC5XIQceztaMGFCKZxyOZ/4pi/k/OiGjzEoq9ZS+AbkMQUOQ7CHq0Dl2e2LDrAbzZ
+ GUCiIlVPY8qntHIHZRLA/DXjYGLfwfzBw218Bdaa1tQ2aQk6LYDOjm64sdfc22ccc0GNjW1cNmjSOmrD
+ E7jPrnourt+HoIiC4iv65oc9rhvdeOR49cEZrzsn3a6YuFzE65kewxn9x8efcPqYJ2vJmraMoKwAtKd2
+ 5f57DFM+gWudP8lYvl/LfPI57KSe9UGimQhM9y9Ftf82MpO8epng6YFyc9FsafaHoMaljWjO1P2BaQ+u
+ wGFo3WpwzZC9QurlE3vm8MxVPHlOq35Mq3HcLKXB7FUQ0uuR6sXbKp7Xle89xHz+ZBkd4TLMC4egMvCe
+ XN16PrfsOr98cnn95NqG7ca6496uuxDy2hZ6rO+dXdw6Ay7KvBXwKUGi8tGgY6JlVKGGJV9IUU4sU/se
+ rv7iJczLTcXWSNLWQPq6K/zbB6vgIKuIKPP0DJfM9Mv+39zPOevhUJLGBqIPHyGKK+0GJy9M8F36uTbV
+ tUaRYfC7NySszQ9aGB4yMzpmYipqbSdrf1bZzlHJ5oSc82nF614q77+ox+TB8ik6hf3wvAF4Th8sp08j
+ p1czu1czk6EB/tOzgcpZ8EymRipDLZWmlk6DZfXqFrIwxSDq87DVo7hyHqKUAysaUCseUC3jqFcOalUO
+ IisGMTXDejXDpFIWuqBPu5CJKB7AFPZhMijaMc0qkQ3yUY1yB9LrnL9kinzKUPpehAmqMQZJksgmfb9S
+ Vd8K+c+lsu/zJH9WakY14+Ja8VkMowKWccGgYQoTFdUKS6lDVzeZj3M9l8eurMxeXN+5NrbuWjkE6ALy
+ YWRZv0bJL7UgqQL/Wasb2oBLp1lHtxqFNhCi20jhoNeMjI2m4EPb0CC986Nex7cM7lcKi2hApnZiE1s0
+ I2tkEtuUMnpgyVS1BIZaMgueykEARHpsv1p0r3QU/WgE/fdoxm8lE7LUTaPBHdfxtev8pXtrq0+3t+5v
+ 711d2HblCix6Z0275y3IcyZdi8YdAlLdlFbFmHIhT6Z0UIw8p8hcVR/a0p2DzGeF9sObzqw11/YZu1Ku
+ RfmQdc3IqdZR297pM0MLjuwx56HRu9V1164/1Pa6Rbj52Mbr7gm3G0bnb5rYnYMRTP/75mNZ5vCN0VVP
+ 2hy+U6AKWFkAk1I+iSgaRxaPoeumzdpnL+R1nXsdgne/rXj5LsraXlEX9S93D/ibdza/fl1sbw8Da0TX
+ NjJ50y8ZE+69fLu+RYv+ddP+NQPmMomxQKDMosiz8NoxxbpxzYZxk+RGsxfB2peeKl97CLtyS+PuA+Sb
+ 10a/fpyidH4T7lVDUO7O9s+VpXsrS17raxe31j13Ny7tbl/eE16FoCt70MX1XefpFcvhZSPWMpE8CW/g
+ yTVwJauZEkUdMmmVKtEZut9/YS55SNqa/O11RubLE0xuolNTnVc3xbuf8ay77WVOyvXn94ztrSQ8XMUj
+ Igx7es9Nz5+bX7AfGT5B6TTKToe9fnX8nNvvzk4H7Z2OnnQ85OApcumB8pV7aq7npV1cJG7ekfsaqhZT
+ oZrSpZzGUE4Dau7VSOtVT2eop9PBo2b6/lONtP79k0hTTaJpZPQhQLNXAQdXyMGUAqGP46vGUKWDmoVs
+ lUKWStGAeglbq5KHqx7Sr+DqlbBwRUxkGQdVxgWBHwWe5vdhEtrVI5rkIppkDuS3ef7Ik/2YpvwpS+tV
+ knpojVFkg+HPMtjnMplX+aDFXO57OSy8ARtWh4prI+SzrPM41gk0/dBq1K8kzaRkHKXJhd5iPz7otr19
+ a37Do3XCNIepE0OR+1Z/7G3psW/1YNk0yrcUHliN8StFfi/VDalDRbYCeDQ+loIKbdUKaoEHNmt/K1X/
+ Xgz6t2zIUxeq++2z2/UyO1DJHWox7TKJdOX0/Y0XsES2ahygS/eIx1KPhHb8bxTl9+Ih+Y5FfQr/VBPb
+ prTBoKaOxOacWl4/t7LpNDpnzpm14C7aDq47je6cm4Y8xoUu7A1rKl+PPArvmlalr6j0bqqyd1GsTQPm
+ mlUX36p21KhiyLScd6Jy8FTLhC119lT3uGkH06yl9dTg4LOxudBn74yMToidOqtu6qSIPyGprfff9u5/
+ 1DQ78te9p9Yd2CsE2rJGmwDwgMB1FlE2gSofx9ZOmTVN2Rb32n5N1L3wQNzrtrL3be1LXqpv35mEx3iQ
+ OwL3dluhvfrllYSRuefMORfKvCFliUhdM6KvGtAExO55ZPssrHZCuZSnWDUGrxs1S2ywfBagceGBxOV7
+ StfuaNx/iHz1Uu/Te+Pc7Nvz89kQBBoKEnZXv2+vvN3bfgPtvtrbfgL6mXf2vIV713Z3PFY27KaWjHlL
+ hD4+aAdSbOBKtwxJtQ7K1PbIVrRrljcZRCdhrl3728P5rw9PtdKjrbrqL42wHk8Mvh5jfRns+dpc/jD2
+ 18kPj9UCvqnX1pgPDTssLNgtzVlP8czpXaSiIrUfv449ePyX9+1DF24edbz6l/uDw/e/yD/xUb3yQPL8
+ 5aMPnssEJGskNGkk9Mgn9MmD1eHxdKX4HoVEcKjKiT1q/xyV+G75eKpCbJdSUg8sg4nK6kfngk5dNqZ4
+ CFc1pVc5iSnkaeSylXIHVIG9ye/TLWERytiABkfIpaFBH2/FIBFIv4SFLOXgCweIaVREHFk5vEX2QBHZ
+ C9w4P2cqvk1VepGk5FcMxjGw38EQdIXyhyKZwDrEz3JYUJVWYIVGQLlaOtU4hWoc2UoMLMN+DFd9/k7s
+ 0wfxkADZ4hIMk2vHnnZoHDYp6AdkArCDV+xjudinMgDS0P5RpuNfjvQp1PpWph1Qgwit106n66fRAUUS
+ GBjAh0H+LNYIKtGmzlwXCH34m58nV1/SZi4W0oiJZKVEhkIa2MbIUo7pl4+mS0dRxKKpIgFt/wrtPJg+
+ IFs+hqoeMc7pIAQmiYfGHW7vwcwt28ysWnH5BtxFq/Et56ldj9kdrxUI/M0XR9ftWAIT+iyia1YJzA6T
+ BdLkecWOaa3OKVLLmGHNoEH1kFk527So17CGY9I2YkodNWdwbZhMV/7cy10opqTy2inHg1izA9iTB7RN
+ D1x5olza5Dy9fGtuzX164/TwGom9odu3ju5axjTOI6umdSqndGumAW3Gon7cNqP1xItfmudvSly8Ku/p
+ Je91RenyDdWIqAtzs0XAtwiFORtC/4nN67R5064FfOciiTyPbZ3VaZ7RqJtWLpuQLRlTqJqC1YKI3mT6
+ +Kfi2VtHLt6WvX5H4+59nQcPdJ49QX7xMe2ift93L3vJwp0w4Q6wQz8h6Ack/ArtvRPuPRDu3Njevriw
+ ajMkIPULkLQ5DfKYQueYHH1GuXdalcpT6+FiKQNmOSWYtx+Of/4on51s1VJ9Yaj/4fz4G/7Ep7kRv9nB
+ n0PU95Rq76YSR3LjaTbLbnzSep5vtsQ3mxo2onfjKqo0ouIlP347/uS9iPfLY+cf/Xn5zaEXIQrvQlUf
+ fpa+9uDvpx9FQ7NVUsiqSQzpWIZENEM6hi4bA/ibPdLxNPl4muL+6ZGL7ZaK7hSP7ZZPpmum0hFpfahs
+ Fi6XjSsawldPGVZO4AuGtHLZatlMtX0Hz9DJZ+LzGficHiwQenE/vopHAm6+hIMqZeML+vHpNFR8h2pY
+ i9yBiu6rsfUaPjmyr9Ok3mWr/KzC/KrDfavS8avQ+FamHtEMyAFIvxKFj1kiX/MlIxu1w+oQvyrQfrmo
+ 91Ga996L3Xj8+6svhyLTVau6jOr6zHIpmCwaKp2hG94GdgfIvi+U/lioFFSNCa0n+pXr/qpF+lfCkjoJ
+ 7QtnG6cdMhkGUW3I2Hbsr0LNyGLM6BLw2dEgnQdBUYu7nzvHzhX3EXO4YG+6CmDqhvdJB1NEwyhiUXSp
+ n60HgzqPRNEkwjulASsmqV0nIOt4ZPbf5H7tEYE+m48BtCrqvB5TcGJoyXl69cLq7tXlLY/JFduhFRPG
+ knbrrHTjvFgTX6J2TLx+WJk8QeiYMOuYPNE5Y9cxfbpxxLSWRWrlGvWOn+aMOk9OXtzaAElGv/Utv4rG
+ s1FZZiFZRr4JKMqIN3/v4diiMyj4z2xYTWzoj2/heZt45hqBsoZtXoJXz6lVTmvVzeLaFiybJhwjS0hX
+ nom6XDns4nnc8dyRW/fVq2peb23VQVCdUJi5CwWtCp9NrJ/tE5i2zeKbJpENE5p1s8pVc3IlM9KlM4qV
+ 8/C6aYPMDoOXoXKut/865y1x8Zri5evK3jeV79xVufdAPb/42tZ2CiSMgYShQmHgHhQohIIgcIDWtx/t
+ rl9b3zg3tWI5sIACbYxUvnLPnEL/gurQMmx0UXtyETu1aDQ2a9kFQJllWuWV+J5OVx7z9tz4c8H069X5
+ r+vzAatT/osjPnzuq1nenYnh8+NjpyemjGbm9RaXjKenDei9qOpGjZR8Of9Y8XdBIve+/n310++3f/71
+ Nk7sY7zUi2CROx/+9/WvPyPLpNN65JKY4jHM41GgPbtPLIx+LKLnWAxdHBBn/zli0d3HIjqPxlKlkuhK
+ gAgAuFfZLEIeh1Q4pF85YVwxpl86gisYBMN16kmdaikUeAYNlU1D5/Sg/xE6rm7EoGoIV8zSLWHji1mk
+ zB50HFktokn+QCPjZgmDGF6l8TFbxqdYI6xVL7LDADAtwpsB6whMQyNCG7XeZR95mfrbtyLRiAZ1/1KV
+ 9+nK75I1PiZpvwpXfeIv/SFSJixPI6+dVEw1TG7FRDVp7QOW6ADgq+FbpfI8XTygGhXZZhTYiA9uQH8r
+ VspjGA9BN3vXPSu4J1IppMQ2QkwltrTDbnnzBwQlQVAaBCULofBB/oNqllXhMDJlUDW6Xz6IKva16a8f
+ bYeCuo9/afzta8vvfu1/fmk4GNwhF9WuFFl7PK3xSA1dqp2n2Daq3DipWT+m0zSEZ0ydHJp3nl9x5687
+ T22cGtk2Afz8wsGDpZNHqxckKiYla8fVKHxD+sIp5qITa+XcwKpz79IpyrRp97gVY9SOO+4qWLyyu3dn
+ T3h3R/hgRfhgdO0mY86TuXSlX+DBW3IZX7MfW7OY2jKb3TWe3zUa3ySywFz9JpK8rl4jkKuel29c0mxf
+ wZEXLUqZpu8iZD3u/+589bfL98XTc8+vreeCNhXhXikEpUNQyB70YXXvFk9g3z6q3zSMbRiG104qV83L
+ lS/IVvCVqgHnf55YxjIMKoBff3fcxfuoo6fI2UuSF69IXr8p+/qdVmEZaFr2E+79EAq/QJCPEPoKCX2h
+ 3W/Q5ivh8pV1gcvcoiUH5NHBGPWCHBg+Ygjkh1Y15jbRixv4zW2T7W2L1Q3Lab7p8KjxEM90ctiFP3Zr
+ afrx6uzrDb7f1kLwjiB8eyFwa953Y+Hl7JQnb9h6eMJgdBY/No/jTuM7WToVnaqp1TKh+eJ+6cdfx/z9
+ JPqPZ/G/v884+DX7yMeUQ69j/sc3/Y/YZtEUulgSSySOcyyKdTyk93AA/e9g2t+h1EORPUej6ceie49G
+ Uv8Kp/wV1S0S1SUVSVZKooKIrlfIMykdNS8dNSnk6RcMEnJYANOlHktWS+iAJXVpp/cgcxiYXDq6qB9b
+ P2ZYN0Ks5uLqeYa1PLMqjnFhHza9U+tAE927nm2U2oz2y1faxxp16Ee3E32KlOM79eI69AJqNL+Xy77K
+ /P1L0aHwFqWMXkJgjfqrFLkP6Zo+2chPGTpfMrX8C7SCSjQTGkHyUg+ANX5WwMOadVNZxuGdup8rFF/k
+ SP6oQUaQTcBb6Hul1qvk42ntOM7W1d4lj5rBk3m9JsnthJwOM9rEjbkVn9WVkLWFyL3NRJAVnlh6Vc08
+ lcvBZvJ0Y5lq/p1i39uOBFOPh/eKhPYeD+47+q7xPz81/SugSyyCIpnSI1Y8IFrJFqlkiZazpSpG1KpH
+ EbU8LHXqxOjK2ZkNt9F1G+6GKYD1lI9KJ/X+Dsi9WWPHCiakamY12xaIrdOGNIEtd819YPUMdcGMMmfa
+ M2NDHbFljTuubV3dhS6vbDksbNrObtt1TxiA9v/OeRvmmhNn2WFo1XZ4w3Jq12oeslqAzCa3iexVBH0d
+ 1rGu3Lgs07om17Wt0rGu1rEM+qgMSnv0gkDz8y/piDQ9Fg/kiLIgKAeCQGYwFoJ+CaFPmztPJvie/ZMO
+ AzO21Am95jH12mnZqnmZqnmFmln1VgGudc6kpN/kZybi+lupM5cPOnkdcvb86+od8ZRs65Gpuxs7L3f3
+ Hu1Bd/f27gih+9DeA2jrgXD16ta83TLfbJwPhv+VugQSbYvHyQsiPQKpkQ3Y0iZhY9NQuGUObZrvbpps
+ rBuvrZisCsxX5+zXprxWJ2+uTj9bm/HdEoRB64nCjfi9jUjh9s/1tacTU659g/oUjlb3qHbHqE4tR6OI
+ rpjeLh3bIB5Sfdyv5IhPyaGPRX/4lP75o/LvnxUHAyoPhjYfius5ltR/PGVQFOA1IwaO/qL//ZN2MJB+
+ MKT7j+DuP8Jof0bQ/4yg/RFG+TOk/XBg87HABpnodp30Pr2iYfPiEctCnmkuWy9rACzGAjAjrYRu7aRu
+ nZRuxL7Q6SDfgitkYisH9ZtGjGizp3rnHemzzn38s8wF565JmwN1lCuVvXpxNTo/C1QT2w2imvAxbfjg
+ OlhUCzq0QTuwFuCKJN/nH/atEI3q0MhmGYY3677NkH+brvwmTeV1qpJfkfbPUs1fZWpB5WphNYiQWuKP
+ cvT3Kt0oil7agGlAs9bbAtkvFfBoikVYq75PkeqbFNEMMr5nzq1z0rGCaZZHN4xu1E1v1+8c8+zheTe1
+ exUVOdN7n67thvIEz0rpJ/LZ+gVjBtG9moFdUvFspbQh5WSebPqEQvyw1Lu6Az4tfwVQxCLp4hks0SLe
+ kdKhIyWDogVsqcw+0NelkD+g3TF7Ymj73OiuW/ecUc2IVuWoej5XLoryt1/Lf/t3/Z7IEauYh9XPo8qH
+ dVpmjegCu65Zi6ZxYs0QoXHYFKDWKUPWM2vnFredxlf0mbMw2qxGw5hq9TiyVWDas3aaseRAm7ftXbYa
+ 3DoxsWs1um0wtIVhbQLgm1rbqmLzsmz7uhx1V4GyLt29okhf1mXwTVvYFuUdJi29ztOCV1u7Pzd3fmzu
+ +G7tft6DPu4K32xtvdjcerm283J999Hi9oWhFWMqH3SkyNTPyDbOqrUvojuXDVrmTuTRzL+lIa59EPV6
+ fujqi79ffpdrpjuv7Hkv71xY3nRe2bKfX7RcXjm9s+a0t+y4y7ddnTJdWdCfWtTqF0h1LB5q4P/ROHew
+ gy/KXladXcWsrhntbVhCGxZ7q/pbK4SNZdLmouEW/8TWnMvatBd/+AZ/7PWGIES4mSTcSRHuJQNjuQd9
+ XVm7xx492TqAqB/UrOCpFXKVsvtlM3plUmhS8d0SoWSRgNZjfk2H/Br+9G/6I4T8d3QPwD0AlYuCA/qR
+ kthS0UzxgO5DPykHAyl/hVD+COr6PZT6r32h0/8MpfwV0HrYv0kEjOpHk5HJdL08rnke1zSbZZA1QARo
+ gGQa2AMHYF3IZCoirQeZSUdl0pC5DExeL6Z4AExpWPTPOQ7Mn2PPewwJLo0uew0tnD9Q2nYhqx0ZWKT+
+ LVctoAwZUoOMaUMnUTDfimU+5Ij7lsq/zhb5WAhWp8v4lMtFtKCC6pAf85RfZyg8jBG9EXzwVYrUp2zZ
+ kCpYYKmGfwk8ttUqpNbkWzHqVxU6m3Minop/ny/7IksuGKQsq1DvsxX88pQqmJbdk851LPMsMiaZjPEv
+ UwooU44BcxvVpNgC0s8YnaRCs67hmy2889mdBoWDxmXT5vFMeBhVNm1IPWNYNYkjnT2plMST/VD7L58m
+ kbAeJVC9T2Mfyx78V+Ho36C8n8WRBtedmG7pJLpq5bgRdcWxd9WZPGNZyNDKpqnn9KnHdcm+Lv7tWcl/
+ BHYfyh1XrAYLaSdhZTx4BRezf2HnAuKkTjYdVUDDVvRi65hoyii+l49sn5RqmBCpmpQpn4HVLhKbl82b
+ pk81jts2T1u3zZp2zBOpoDN2A07bUCWvKjYK5Bv48u1LQOWylGXR3lVpzjocVGqY0xbcORdA/hid8xyZ
+ 8RgYcWBwTw5OOArWrm5sP9jdBd28fiC6QxCwH/dWheATA0WZl2mflyUvqJEFOuQVQuuiWe3EiRy6aWQN
+ KrxCO7oCltOGo0xYc1Ys+vmE/lkd+phKz6DS4ITO3Ize6qTx6qjxwrDhyqzB3DKCtSTTNv9X3dzvdfMH
+ G2eOkqel+uc1JpYIq6vmOxvmexuG2+v49WXcmoC0sWC6wbdZmXMVTF8VzDzdXvOH9mKApYSgFCB0IeS7
+ B72aWznPmDZuHNPN5SpmceUyWNIZbOkUlnTigFRsv1QoXfRHx2Hf5j++t/we3PVXVN/h2P6jgLaZ2CuV
+ yVZO61OMp0pFkEWDWo+Edx0L6/orjPJXRM9fUfSDEbQ/QygHA9pEAJ85olM7tgsX0wny0chYQOSiIOMo
+ OrFdsMQe7RQaMo0G1gcBlaNz+nFA6+mALQegnxzD1jEAz3Kmj7vRR90Gxj1G+V7gV30gs8EpvEr5Z5Hm
+ lxyND9lqwXUoQG2M74T5FIu9SD30PPXo4+Sjb/NlQGB+nScd1ID2LdP6Uqb5pVTjWbLEg6jDr5JFP6RL
+ BFVoRtWjA0oR8S3W4bUWAWWkgHJUYhdxP7XSjnqTKRNYi4voMHqXLf89X7F6wJI+51zHNslo1wmqVPhW
+ IuVbIuFfLhdYqhJejoipJEWWETLbzTM7jdIpuOx+bEqfVnSPUkSPDGi9SmHJgl9Wzrgi2Ob1qfL4zxaV
+ aLp2xjCseEa2ZOZQyfTfBVOiOWNyWcPKGUPqKRyNfND2ueLAWDnLXr1In3Upo5OyOrUS2tX8aiTflv3x
+ tfG31EGJ0mn54lGZHKZUUpdkcrdiKgOWRNdK6tHO6kOUczFNI4TOaSJIZrfMS9QtHCubl8ifVSxd1q3b
+ MKqds66bdKyetC3m6RcOagIAAXlDsXVNqmlJumZOtmpCrmUOzG7KM9YlBrdlpiGtuV2DuU2QFHKdFDhN
+ LNoMz5sxhtFtfSq0IcTEovXa9pV/al4gYRLxT87k8S7kxN/FDG0rM7cV6VsqXeuwzk1C64pB7Zxh5ZRx
+ 5YRB06xB+wKJzMeQBcjaKeXGGfnWKcnGoaN1/YfoI8p8gcHu8gkQ0bfmnbaXbBZWcGyBbPvcX/Vzv9Xx
+ /6ydO9Q0J9a9qDy0iuGvG21vm0GQKSQkbW5gNleJW2umK6tma2sOezu3drae721/3lz7srX2HdoNgaBg
+ CPoEQc+3oMsj61bgXy8cVcocks4akswalkoblk4dkU4Zl0sekw8bEP/RdfB7x+/+lN9C6L9H0v9M7BMr
+ HdchL5p2CMyap/RqR7GVw+jiEVg8QySqByz2AdSTY8DYJLBATkYhsgcWS0Mn0QhxFHRQo5p/rWJQk3J4
+ m0osRSO1X2c/89iHyWZicgYweWxcLqBi9KKyerGFHNAAY9055tA9eraL69I34jGzdG1i1uNAVrObf5HC
+ +1TlT5mw95nq30o0I1p1AmoVPhdJPE87/DT5yMsMcdCT+CJT5mWW4utMtceJCh8KYN/KEa8z5D9lK7xL
+ k3yTLPo9TzGqBhVUovstE/M1Ax9cZuxXgPhWoprWq5/LNv+UB4ZKAYbO7mO+8rsU8YxWbB3bIp+CSm6F
+ BVXL+ZVLgdbfoHqFgCp5/xKliFpsRA0hsh4X24EDNXlw7/5Wefhn4/HIHvmYHtlUllwiUxz0nQe1ir3O
+ FfWtAts1dDIHkVUL8BqBeOH47wXjoqUz8NIZdP4EGmzijWeo5XEAa9dxcvvFCvRtRPCIMnq2mG4c1aj+
+ vea4T/X/hHcezGJLgD6wnH7phC7xJKpiVr8uqFMkUtSA82meIA2snBzasqXOY+onpavmjhfOHk8bE8sY
+ Vyzl4xuX7Jv4Z8vGbHI5hAyQ1h2WLpuVqOCLlc9KlE7KFA3JVI/Ldy6q9q4qD22q8iH0OmS6Bzlv751f
+ 3nBa2LCYXiWyZjQoI9Kdw/LsedzitssedAeCQEeADyj0CqHr25DtGkSYgTSYmzLUNTnKBqx1BV09iyqe
+ QJVNYeoFuI41dNeGduOSXMmUSPbokeKpY2Xjh0rZv1f1/UUbUeEvWUC7HtD2LWjnCbR3c23Tjjuv1T52
+ tGb09/LR30pH/6qbkeha0GAv4WfXTLd3rCHIQrhDWltFrazg1zeNljdNBGs2S+sXBEs3Bgc9u9rPdLc7
+ jbG8N5efQ8JHQujmmtCdu2rSPAMvHJVLAVgetnjaoFTqsEzaqEzKiEzqmHIMV96/+9i39j/82n8LoPwe
+ 0f0XYN5TViwnII9Z6NLk7oXhddeRnXN9m6dKRjSTesWTB8QzhqSzxxTTeCopLHhCLyqOhomjYMLbdb7X
+ KvlUSvk3A/q+WlSHShwF1E11s3sJQOuZTGQaA5ECEF9dyOgWncQOTCHNpIxqnt1IyqkjNFKt2GMunCGH
+ /wd6M4989rNkxAAAAABJRU5ErkJggg==
+</value>
+ </data>
+ <data name="panelHeader.BackgroundImage" type="System.Drawing.Bitmap, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
+ <value>
+ iVBORw0KGgoAAAANSUhEUgAAAAcAAABWCAIAAACM4hvWAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8
+ YQUAAAAgY0hSTQAAeiYAAICEAAD6AAAAgOgAAHUwAADqYAAAOpgAABdwnLpRPAAAAAlwSFlzAAAOxAAA
+ DsQBlSsOGwAABnlJREFUSEstlFlvG9cVx6/0Bbxq4b7NcBZKceA4TRAkafoBnMiy5TayaCMFWqDoQ4H2
+ KZZkiZydiyQuWl03iR2L2i2JywxnOCuHHJKS7TZp2iBAP0Qf8hQgyJUT4GCA+5v//M85c++5fdI/+f//
+ +L/vv/8B9Pf/2Af6+vr6+/vBhnoz136NUUhKiyW1Udq4wlivg8/NKb6Ozxyg94+wT8vYp8f4dIUAX1jx
+ VIN8cITNVLDpCna/gs3WCPC4GU9r5FwZe1DFZ2v4TI2Yk2Lgy9bdrDGSqOJJiZyXCPhiTiLBUye+aMWo
+ GpaUiESdmKvjCZkEm87dJYukqmhCxOYlbF7GEgoBaXzRJF9p8VcUp9QY2OzE861RViZhyUmFSMLCId3q
+ 3i06r3EKwSokrZK0PsLoMVDqxgttkqlHmQbO6gRrxjhzBDpMFZwYK2OMgv1CrRFQ6kwVOzFeI1kVp3Wc
+ NUjOgtm6Hxe6uHC2xhgTp034xECpN5nvYIIeZU2MbeKMFaVNFOy8iBd7BKdhtIbCNQymGQU7L+8Vu9AX
+ pzSUMaMQcTYKtp7fyzkEp2K0jlEGCh14Owq2T+8uOTir4ZxF0DqUo1wTBaXTqWwLpRswG84YEGFCCwel
+ F1MLDkopEVpDGCPKW3imRYLS8zuQ0o0IpaO0DimRsSE9mVxqR9kGRjWwJHQ3CMHEwdbJZB5SBaMUPAmb
+ 1jBWQ8HuyZ3lNibAH6aRjE5Ayr2i8RWHSKkEr8d4IwYdeAP21ouvOmS6gfMqwekwMN7AfqGCAj/EeB3n
+ 9WjqLFtnarlFpOQo38B4DRO0aAZqt7rxYgsXIFVQXkXSWjT7My3YOC+hvIzwCpJRo4s6Dna68WUb48Ww
+ IIfTDWRRw4vmKNjrTK3ZWEoKpeRw6mcKd3O/G3/YIjL1SFqJpBtoVo0uGTjYd+48tPFsHcmc0UhaQ7IG
+ CunkmhXNSJG0DK2DvOJPa0Fw0L2z3kQyUkiQQ7wS4GQvr/og/Xi9GUqLfl4KMJKfkTyc7AYHvd+u28GU
+ 6OVEHyN6GdHFSsOwslsrlo+vupmKm666WdHNiMNgq3k9rwxyZQ8Do+aBiJWGQMm8npMHmWcu+hiq3Iw0
+ xNaHwHZzLN9w04du6niYqg7S4mVWvAy2W+M5xUMfuqjycLJ6mREvsrULkN7MNbyQ0pWhZO0SK13k6+fB
+ Vmsip/qZYw9VHpgvn2PEc2d0s3VzSfMyxy6qMpCsnufkc4J8ATy1byyqbqYMUw2w9csZfSCjDUDteE73
+ CqKXl4fZ+qWUNpBRB0CpfbNg+SEVZDfU8sqlFKTbnYkVO5Sq+3nZzSuDEKWhQ6l9e9mOZBvhlOrLGp4F
+ czhrDoKnzVsFI7ygohk9mG16l2xXvuUCT61bOS2YbUAaXrADha5vpeuBdDwPVQ00pYUW26G10/D6SQCU
+ 7I/ypj8F91EPLzmRtVNk4yQMtttjBcvLK6GMhS610eUusuqEzmix6WFlH2+Esna46ESKVhDs9m7kmi5K
+ dHNGINsKFdpIEc7x/uk4LIWquwTLv+hALVrQ4XT3PspZA0lxkNM92VYwf9ZRCJ7J6wva+UTlIqe5M61A
+ xvCxVQ/0HSu2XLQ4yBuetO0XNC9d9ULt+GonICh+WEC+h6bNECMFwU7n9mobTSmRTBMtnpKwF1YMg93u
+ 1GqLFGREMCIZG+FUeAbhZHUmCxbCSQjXQBJ1H90IpfQRsNUbzze9rBhklEii5kvWA4JGgO2TD3PNIbrm
+ ZeRQUvTPHA1TYhDsv/xwxRnkJDcleuZF7/Th0OyhC9T+M/74K19W8ybKQ4my58GRa/bZMJC/nSh9E1rU
+ fFTFM3/knd5zTe8Pg+q/bn1x6s/KAaYSnt71/eWzC397chEcODfWTY9wHOCOcb76+uwu8tfHl0BJH8uL
+ bv5ZeEG69uXLse3/TqSlUbCl32Z3BujtSKb8ZkH9ze43v9tovgcOnU+yz/xUCaF3R2ee4CvqBxvmr0G5
+ +/tVeH3vhOc2I/efRNKVN3LKr0D9xR//YRDcvn+25JnfQxeUN5f0t4D19Z+rX7+zphDUno+rEKvtdzd6
+ 74PmV38yvvtg076WPoxsNN961Hvv7513gP78D/K/3/1MvbJwFH3cef9h8+1Hztug5tzb7VzNl/HUAZYT
+ ry6rVx8518COObGmIAtlkt0nkntYUX/j89NrYNeeXCyHEpsIvRNL7OCpKrnevvITI0lytudA8kUAAAAA
+ SUVORK5CYII=
+</value>
+ </data>
+ <metadata name="saveFileDialogExport.TrayLocation" type="System.Drawing.Point, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a">
+ <value>17, 17</value>
+ </metadata>
+ <metadata name="menuStrip1.TrayLocation" type="System.Drawing.Point, System.Drawing, Version=2.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a">
+ <value>171, 17</value>
+ </metadata>
+</root> \ No newline at end of file
diff --git a/chromium/tools/stats_viewer/win32.cs b/chromium/tools/stats_viewer/win32.cs
new file mode 100644
index 00000000000..5aaf231af22
--- /dev/null
+++ b/chromium/tools/stats_viewer/win32.cs
@@ -0,0 +1,50 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Collections.Generic;
+using System.Runtime.InteropServices;
+using System.Text;
+
+namespace StatsViewer {
+ /// <summary>
+ /// Win32 API constants, structs, and wrappers for access via C#.
+ /// </summary>
+ class Win32 {
+ #region Constants
+ public enum MapAccess {
+ FILE_MAP_COPY = 0x0001,
+ FILE_MAP_WRITE = 0x0002,
+ FILE_MAP_READ = 0x0004,
+ FILE_MAP_ALL_ACCESS = 0x001f,
+ }
+
+ public const int GENERIC_READ = unchecked((int)0x80000000);
+ public const int GENERIC_WRITE = unchecked((int)0x40000000);
+ public const int OPEN_ALWAYS = 4;
+ public static readonly IntPtr INVALID_HANDLE_VALUE = new IntPtr(-1);
+ #endregion
+
+ [DllImport("kernel32", SetLastError=true, CharSet=CharSet.Auto)]
+ public static extern IntPtr CreateFile (
+ String lpFileName, int dwDesiredAccess, int dwShareMode,
+ IntPtr lpSecurityAttributes, int dwCreationDisposition,
+ int dwFlagsAndAttributes, IntPtr hTemplateFile);
+
+ [DllImport("kernel32", SetLastError=true)]
+ public static extern IntPtr MapViewOfFile (
+ IntPtr hFileMappingObject, int dwDesiredAccess, int dwFileOffsetHigh,
+ int dwFileOffsetLow, int dwNumBytesToMap);
+
+ [DllImport("kernel32", SetLastError=true, CharSet=CharSet.Auto)]
+ public static extern IntPtr OpenFileMapping (
+ int dwDesiredAccess, bool bInheritHandle, String lpName);
+
+ [DllImport("kernel32", SetLastError=true)]
+ public static extern bool UnmapViewOfFile (IntPtr lpBaseAddress);
+
+ [DllImport("kernel32", SetLastError = true)]
+ public static extern bool CloseHandle(IntPtr handle);
+ }
+}
diff --git a/chromium/tools/strict_enum_value_checker/OWNERS b/chromium/tools/strict_enum_value_checker/OWNERS
new file mode 100644
index 00000000000..0d6b2067616
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/OWNERS
@@ -0,0 +1,3 @@
+rdevlin.cronin@chromium.org
+rockot@chromium.org
+
diff --git a/chromium/tools/strict_enum_value_checker/changed_file_1.h b/chromium/tools/strict_enum_value_checker/changed_file_1.h
new file mode 100644
index 00000000000..e74e4083ff5
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/changed_file_1.h
@@ -0,0 +1,30 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+// Here is our mock enum. Beyond testing it is completely meaningless.
+// MockEnum follows strict rules for valid modifications:
+// 1. NO reordering of entries
+// 2. NO deletions of entries
+// 3. New entries must be added just before mBoundary, never after
+//
+enum MockEnum {
+ mEntry1,
+ mEntry2,
+ mData1,
+ mData2,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mError1,
+ mFunction1,
+ mInfo2,
+ mData4,
+ mValidInsertion1,
+ mBoundary // Do not add below here
+};
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/changed_file_10.h b/chromium/tools/strict_enum_value_checker/changed_file_10.h
new file mode 100644
index 00000000000..d7b370d764d
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/changed_file_10.h
@@ -0,0 +1,23 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+enum MockEnum {
+ mEntry1,
+ mEntry2,
+ mData1,
+ mData2,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mError1,
+ mFunction1,
+ mInfo2,
+ mData4,
+ mBoundary // Do not add below here
+};
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/changed_file_2.h b/chromium/tools/strict_enum_value_checker/changed_file_2.h
new file mode 100644
index 00000000000..04a801bf5d8
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/changed_file_2.h
@@ -0,0 +1,28 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+// Here is our mock enum. Beyond testing it is completely meaningless.
+// MockEnum follows strict rules for valid modifications:
+// 1. NO reordering of entries
+// 2. NO deletions of entries
+// 3. New entries must be added just before mBoundary, never after
+//
+enum MockEnum {
+ mEntry1,
+ mEntry2,
+ mData1,
+ mData2,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mFunction1,
+ mInfo2,
+ mData4,
+ mBoundary // Do not add below here
+};
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/changed_file_3.h b/chromium/tools/strict_enum_value_checker/changed_file_3.h
new file mode 100644
index 00000000000..f839c142322
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/changed_file_3.h
@@ -0,0 +1,29 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+// Here is our mock enum. Beyond testing it is completely meaningless.
+// MockEnum follows strict rules for valid modifications:
+// 1. NO reordering of entries
+// 2. NO deletions of entries
+// 3. New entries must be added just before mBoundary, never after
+//
+enum MockEnum {
+ mEntry1,
+ mEntry2,
+ mData1,
+ mData2,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mErrata1,
+ mFunction1,
+ mInfo2,
+ mData4,
+ mBoundary // Do not add below here
+};
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/changed_file_4.h b/chromium/tools/strict_enum_value_checker/changed_file_4.h
new file mode 100644
index 00000000000..157153edf9a
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/changed_file_4.h
@@ -0,0 +1,26 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+// Here is our mock enum. Beyond testing it is completely meaningless.
+// MockEnum follows strict rules for valid modifications:
+// 1. NO reordering of entries
+// 2. NO deletions of entries
+// 3. New entries must be added just before mBoundary, never after
+//
+ mData1,
+ mData2,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mError1,
+ mFunction1,
+ mInfo2,
+ mData4,
+ mBoundary // Do not add below here
+};
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/changed_file_5.h b/chromium/tools/strict_enum_value_checker/changed_file_5.h
new file mode 100644
index 00000000000..b7882890452
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/changed_file_5.h
@@ -0,0 +1,27 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+// Here is our mock enum. Beyond testing it is completely meaningless.
+// MockEnum follows strict rules for valid modifications:
+// 1. NO reordering of entries
+// 2. NO deletions of entries
+// 3. New entries must be added just before mBoundary, never after
+//
+enum MockEnum {
+ mEntry1,
+ mEntry2,
+ mData1,
+ mData2,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mError1,
+ mFunction1,
+ mInfo2,
+ mData4,
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/changed_file_6.h b/chromium/tools/strict_enum_value_checker/changed_file_6.h
new file mode 100644
index 00000000000..7bbbf837520
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/changed_file_6.h
@@ -0,0 +1,29 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+// Here is our mock enum. Beyond testing it is completely meaningless.
+// MockEnum follows strict rules for valid modifications:
+// 1. NO reordering of entries
+// 2. NO deletions of entries
+// 3. New entries must be added just before mBoundary, never after
+//
+ mBoundary // Do not add below here
+enum MockEnum {
+ mEntry1,
+ mEntry2,
+ mData1,
+ mData2,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mError1,
+ mFunction1,
+ mInfo2,
+ mData4,
+};
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/changed_file_7.h b/chromium/tools/strict_enum_value_checker/changed_file_7.h
new file mode 100644
index 00000000000..61a2792e73c
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/changed_file_7.h
@@ -0,0 +1,33 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+// Here is our mock enum. Beyond testing it is completely meaningless.
+// MockEnum follows strict rules for valid modifications:
+// 1. NO reordering of entries
+// 2. NO deletions of entries
+// 3. New entries must be added just before mBoundary, never after
+//
+enum MockEnum {
+ mEntry1,
+ mEntry2,
+ mData1,
+ mData2,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mInvalid1,
+ mError1,
+ mFunction1,
+ mInvalid2,
+ mInvalid3,
+ mInfo2,
+ mInvalid4,
+ mData4,
+ mBoundary // Do not add below here
+};
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/changed_file_8.h b/chromium/tools/strict_enum_value_checker/changed_file_8.h
new file mode 100644
index 00000000000..56e9483d720
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/changed_file_8.h
@@ -0,0 +1,30 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+// Here is our mock enum. Beyond testing it is completely meaningless.
+// MockEnum follows strict rules for valid modifications:
+// 1. NO reordering of entries
+// 2. NO deletions of entries
+// 3. New entries must be added just before mBoundary, never after
+//
+enum MockEnum {
+ mEntry1,
+ mEntry2,
+ mData1,
+ mData2,
+ mInsertion,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mError1,
+ mFunction1,
+ mInfo2,
+ mData4,
+ mBoundary // Do not add below here
+};
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/changed_file_9.h b/chromium/tools/strict_enum_value_checker/changed_file_9.h
new file mode 100644
index 00000000000..bea98f0eecb
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/changed_file_9.h
@@ -0,0 +1,32 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+// Here is our mock enum. Beyond testing it is completely meaningless.
+// MockEnum follows strict rules for valid modifications:
+// 1. NO reordering of entries
+// 2. NO deletions of entries
+// 3. New entries must be added just before mBoundary, never after
+//
+enum MockEnum {
+ mEntry1,
+ mEntry2,
+ mData1,
+ mData2,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mError1,
+ mFunction1,
+ mInfo2,
+ mData4,
+ mValidInsertion1,
+ mValidInsertion2,
+ mValidInsertion3,
+ mBoundary // Do not add below here
+};
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/mock_enum.h b/chromium/tools/strict_enum_value_checker/mock_enum.h
new file mode 100644
index 00000000000..86e80b7f79d
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/mock_enum.h
@@ -0,0 +1,29 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MOCK_ENUM_H
+#define MOCK_ENUM_H
+
+// Here is our mock enum. Beyond testing it is completely meaningless.
+// MockEnum follows strict rules for valid modifications:
+// 1. NO reordering of entries
+// 2. NO deletions of entries
+// 3. New entries must be added just before mBoundary, never after
+//
+enum MockEnum {
+ mEntry1,
+ mEntry2,
+ mData1,
+ mData2,
+ mEntry3,
+ mInfo1,
+ mData3,
+ mError1,
+ mFunction1,
+ mInfo2,
+ mData4,
+ mBoundary // Do not add below here
+};
+
+#endif
diff --git a/chromium/tools/strict_enum_value_checker/strict_enum_value_checker.py b/chromium/tools/strict_enum_value_checker/strict_enum_value_checker.py
new file mode 100644
index 00000000000..22a0276ee09
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/strict_enum_value_checker.py
@@ -0,0 +1,284 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+class StrictEnumValueChecker(object):
+ """Verify that changes to enums are valid.
+
+ This class is used to check enums where reordering or deletion is not allowed,
+ and additions must be at the end of the enum, just prior to some "boundary"
+ entry. See comments at the top of the "extension_function_histogram_value.h"
+ file in chrome/browser/extensions for an example what are considered valid
+ changes. There are situations where this class gives false positive warnings,
+ i.e. it warns even though the edit is legitimate. Since the class warns using
+ prompt warnings, the user can always choose to continue. The main point is to
+ attract the attention to all (potentially or not) invalid edits.
+
+ """
+ def __init__(self, input_api, output_api, start_marker, end_marker, path):
+ self.input_api = input_api
+ self.output_api = output_api
+ self.start_marker = start_marker
+ self.end_marker = end_marker
+ self.path = path
+ self.results = []
+
+ class EnumRange(object):
+ """Represents a range of line numbers (1-based)"""
+ def __init__(self, first_line, last_line):
+ self.first_line = first_line
+ self.last_line = last_line
+
+ def Count(self):
+ return self.last_line - self.first_line + 1
+
+ def Contains(self, line_num):
+ return self.first_line <= line_num and line_num <= self.last_line
+
+ def LogInfo(self, message):
+ self.input_api.logging.info(message)
+ return
+
+ def LogDebug(self, message):
+ self.input_api.logging.debug(message)
+ return
+
+ def ComputeEnumRangeInContents(self, contents):
+ """Returns an |EnumRange| object representing the line extent of the
+ enum members in |contents|. The line numbers are 1-based,
+ compatible with line numbers returned by AffectedFile.ChangeContents().
+ |contents| is a list of strings reprenting the lines of a text file.
+
+ If either start_marker or end_marker cannot be found in
+ |contents|, returns None and emits detailed warnings about the problem.
+
+ """
+ first_enum_line = 0
+ last_enum_line = 0
+ line_num = 1 # Line numbers are 1-based
+ for line in contents:
+ if line.startswith(self.start_marker):
+ first_enum_line = line_num + 1
+ elif line.startswith(self.end_marker):
+ last_enum_line = line_num
+ line_num += 1
+
+ if first_enum_line == 0:
+ self.EmitWarning("The presubmit script could not find the start of the "
+ "enum definition (\"%s\"). Did the enum definition "
+ "change?" % self.start_marker)
+ return None
+
+ if last_enum_line == 0:
+ self.EmitWarning("The presubmit script could not find the end of the "
+ "enum definition (\"%s\"). Did the enum definition "
+ "change?" % self.end_marker)
+ return None
+
+ if first_enum_line >= last_enum_line:
+ self.EmitWarning("The presubmit script located the start of the enum "
+ "definition (\"%s\" at line %d) *after* its end "
+ "(\"%s\" at line %d). Something is not quite right."
+ % (self.start_marker, first_enum_line,
+ self.end_marker, last_enum_line))
+ return None
+
+ self.LogInfo("Line extent of (\"%s\") enum definition: "
+ "first_line=%d, last_line=%d."
+ % (self.start_marker, first_enum_line, last_enum_line))
+ return self.EnumRange(first_enum_line, last_enum_line)
+
+ def ComputeEnumRangeInNewFile(self, affected_file):
+ return self.ComputeEnumRangeInContents(affected_file.NewContents())
+
+ def GetLongMessage(self, local_path):
+ return str("The file \"%s\" contains the definition of the "
+ "(\"%s\") enum which should be edited in specific ways "
+ "only - *** read the comments at the top of the header file ***"
+ ". There are changes to the file that may be incorrect and "
+ "warrant manual confirmation after review. Note that this "
+ "presubmit script can not reliably report the nature of all "
+ "types of invalid changes, especially when the diffs are "
+ "complex. For example, an invalid deletion may be reported "
+ "whereas the change contains a valid rename."
+ % (local_path, self.start_marker))
+
+ def EmitWarning(self, message, line_number=None, line_text=None):
+ """Emits a presubmit prompt warning containing the short message
+ |message|. |item| is |LOCAL_PATH| with optional |line_number| and
+ |line_text|.
+
+ """
+ if line_number is not None and line_text is not None:
+ item = "%s(%d): %s" % (self.path, line_number, line_text)
+ elif line_number is not None:
+ item = "%s(%d)" % (self.path, line_number)
+ else:
+ item = self.path
+ long_message = self.GetLongMessage(self.path)
+ self.LogInfo(message)
+ self.results.append(
+ self.output_api.PresubmitPromptWarning(message, [item], long_message))
+
+ def CollectRangesInsideEnumDefinition(self, affected_file,
+ first_line, last_line):
+ """Returns a list of triplet (line_start, line_end, line_text) of ranges of
+ edits changes. The |line_text| part is the text at line |line_start|.
+ Since it used only for reporting purposes, we do not need all the text
+ lines in the range.
+
+ """
+ results = []
+ previous_line_number = 0
+ previous_range_start_line_number = 0
+ previous_range_start_text = ""
+
+ def addRange():
+ tuple = (previous_range_start_line_number,
+ previous_line_number,
+ previous_range_start_text)
+ results.append(tuple)
+
+ for line_number, line_text in affected_file.ChangedContents():
+ if first_line <= line_number and line_number <= last_line:
+ self.LogDebug("Line change at line number " + str(line_number) + ": " +
+ line_text)
+ # Start a new interval if none started
+ if previous_range_start_line_number == 0:
+ previous_range_start_line_number = line_number
+ previous_range_start_text = line_text
+ # Add new interval if we reached past the previous one
+ elif line_number != previous_line_number + 1:
+ addRange()
+ previous_range_start_line_number = line_number
+ previous_range_start_text = line_text
+ previous_line_number = line_number
+
+ # Add a last interval if needed
+ if previous_range_start_line_number != 0:
+ addRange()
+ return results
+
+ def CheckForFileDeletion(self, affected_file):
+ """Emits a warning notification if file has been deleted """
+ if not affected_file.NewContents():
+ self.EmitWarning("The file seems to be deleted in the changelist. If "
+ "your intent is to really delete the file, the code in "
+ "PRESUBMIT.py should be updated to remove the "
+ "|StrictEnumValueChecker| class.");
+ return False
+ return True
+
+ def GetDeletedLinesFromScmDiff(self, affected_file):
+ """Return a list of of line numbers (1-based) corresponding to lines
+ deleted from the new source file (if they had been present in it). Note
+ that if multiple contiguous lines have been deleted, the returned list will
+ contain contiguous line number entries. To prevent false positives, we
+ return deleted line numbers *only* from diff chunks which decrease the size
+ of the new file.
+
+ Note: We need this method because we have access to neither the old file
+ content nor the list of "delete" changes from the current presubmit script
+ API.
+
+ """
+ results = []
+ line_num = 0
+ deleting_lines = False
+ for line in affected_file.GenerateScmDiff().splitlines():
+ # Parse the unified diff chunk optional section heading, which looks like
+ # @@ -l,s +l,s @@ optional section heading
+ m = self.input_api.re.match(
+ r"^@@ \-([0-9]+)\,([0-9]+) \+([0-9]+)\,([0-9]+) @@", line)
+ if m:
+ old_line_num = int(m.group(1))
+ old_size = int(m.group(2))
+ new_line_num = int(m.group(3))
+ new_size = int(m.group(4))
+ line_num = new_line_num
+ # Return line numbers only from diff chunks decreasing the size of the
+ # new file
+ deleting_lines = old_size > new_size
+ continue
+ if not line.startswith("-"):
+ line_num += 1
+ if deleting_lines and line.startswith("-") and not line.startswith("--"):
+ results.append(line_num)
+ return results
+
+ def CheckForEnumEntryDeletions(self, affected_file):
+ """Look for deletions inside the enum definition. We currently use a
+ simple heuristics (not 100% accurate): if there are deleted lines inside
+ the enum definition, this might be a deletion.
+
+ """
+ range_new = self.ComputeEnumRangeInNewFile(affected_file)
+ if not range_new:
+ return False
+
+ is_ok = True
+ for line_num in self.GetDeletedLinesFromScmDiff(affected_file):
+ if range_new.Contains(line_num):
+ self.EmitWarning("It looks like you are deleting line(s) from the "
+ "enum definition. This should never happen.",
+ line_num)
+ is_ok = False
+ return is_ok
+
+ def CheckForEnumEntryInsertions(self, affected_file):
+ range = self.ComputeEnumRangeInNewFile(affected_file)
+ if not range:
+ return False
+
+ first_line = range.first_line
+ last_line = range.last_line
+
+ # Collect the range of changes inside the enum definition range.
+ is_ok = True
+ for line_start, line_end, line_text in \
+ self.CollectRangesInsideEnumDefinition(affected_file,
+ first_line,
+ last_line):
+ # The only edit we consider valid is adding 1 or more entries *exactly*
+ # at the end of the enum definition. Every other edit inside the enum
+ # definition will result in a "warning confirmation" message.
+ #
+ # TODO(rpaquay): We currently cannot detect "renames" of existing entries
+ # vs invalid insertions, so we sometimes will warn for valid edits.
+ is_valid_edit = (line_end == last_line - 1)
+
+ self.LogDebug("Edit range in new file at starting at line number %d and "
+ "ending at line number %d: valid=%s"
+ % (line_start, line_end, is_valid_edit))
+
+ if not is_valid_edit:
+ self.EmitWarning("The change starting at line %d and ending at line "
+ "%d is *not* located *exactly* at the end of the "
+ "enum definition. Unless you are renaming an "
+ "existing entry, this is not a valid change, as new "
+ "entries should *always* be added at the end of the "
+ "enum definition, right before the \"%s\" "
+ "entry." % (line_start, line_end, self.end_marker),
+ line_start,
+ line_text)
+ is_ok = False
+ return is_ok
+
+ def PerformChecks(self, affected_file):
+ if not self.CheckForFileDeletion(affected_file):
+ return
+ if not self.CheckForEnumEntryDeletions(affected_file):
+ return
+ if not self.CheckForEnumEntryInsertions(affected_file):
+ return
+
+ def ProcessHistogramValueFile(self, affected_file):
+ self.LogInfo("Start processing file \"%s\"" % affected_file.LocalPath())
+ self.PerformChecks(affected_file)
+ self.LogInfo("Done processing file \"%s\"" % affected_file.LocalPath())
+
+ def Run(self):
+ for file in self.input_api.AffectedFiles(include_deletes=True):
+ if file.LocalPath() == self.path:
+ self.ProcessHistogramValueFile(file)
+ return self.results
diff --git a/chromium/tools/strict_enum_value_checker/strict_enum_value_checker_test.py b/chromium/tools/strict_enum_value_checker/strict_enum_value_checker_test.py
new file mode 100755
index 00000000000..4f95efe0cfd
--- /dev/null
+++ b/chromium/tools/strict_enum_value_checker/strict_enum_value_checker_test.py
@@ -0,0 +1,235 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import difflib
+import os
+import re
+import unittest
+
+from strict_enum_value_checker import StrictEnumValueChecker
+
+class MockLogging(object):
+ def __init__(self):
+ self.lines = []
+
+ def info(self, message):
+ self.lines.append(message)
+
+ def debug(self, message):
+ self.lines.append(message)
+
+class MockInputApi(object):
+ def __init__(self):
+ self.re = re
+ self.os_path = os.path
+ self.files = []
+ self.is_committing = False
+ self.logging = MockLogging()
+
+ def AffectedFiles(self, include_deletes=None):
+ return self.files
+
+
+class MockOutputApi(object):
+ class PresubmitResult(object):
+ def __init__(self, message, items=None, long_text=""):
+ self.message = message
+ self.items = items
+ self.long_text = long_text
+
+ class PresubmitError(PresubmitResult):
+ def __init__(self, message, items, long_text=""):
+ MockOutputApi.PresubmitResult.__init__(self, message, items, long_text)
+ self.type = "error"
+
+ class PresubmitPromptWarning(PresubmitResult):
+ def __init__(self, message, items, long_text=""):
+ MockOutputApi.PresubmitResult.__init__(self, message, items, long_text)
+ self.type = "warning"
+
+ class PresubmitNotifyResult(PresubmitResult):
+ def __init__(self, message, items, long_text=""):
+ MockOutputApi.PresubmitResult.__init__(self, message, items, long_text)
+ self.type = "notify"
+
+
+class MockFile(object):
+ def __init__(self, local_path, old_contents, new_contents):
+ self._local_path = local_path
+ self._new_contents = new_contents
+ self._old_contents = old_contents
+ self._cached_changed_contents = None
+
+ def ChangedContents(self):
+ return self._changed_contents
+
+ def NewContents(self):
+ return self._new_contents
+
+ def LocalPath(self):
+ return self._local_path
+
+ def IsDirectory(self):
+ return False
+
+ def GenerateScmDiff(self):
+ result = ""
+ for line in difflib.unified_diff(self._old_contents, self._new_contents,
+ self._local_path, self._local_path):
+ result += line
+ return result
+
+ # NOTE: This method is a copy of ChangeContents method of AffectedFile in
+ # presubmit_support.py
+ def ChangedContents(self):
+ """Returns a list of tuples (line number, line text) of all new lines.
+
+ This relies on the scm diff output describing each changed code section
+ with a line of the form
+
+ ^@@ <old line num>,<old size> <new line num>,<new size> @@$
+ """
+ if self._cached_changed_contents is not None:
+ return self._cached_changed_contents[:]
+ self._cached_changed_contents = []
+ line_num = 0
+
+ if self.IsDirectory():
+ return []
+
+ for line in self.GenerateScmDiff().splitlines():
+ m = re.match(r"^@@ [0-9\,\+\-]+ \+([0-9]+)\,[0-9]+ @@", line)
+ if m:
+ line_num = int(m.groups(1)[0])
+ continue
+ if line.startswith("+") and not line.startswith("++"):
+ self._cached_changed_contents.append((line_num, line[1:]))
+ if not line.startswith("-"):
+ line_num += 1
+ return self._cached_changed_contents[:]
+
+
+class MockChange(object):
+ def __init__(self, changed_files):
+ self._changed_files = changed_files
+
+ def LocalPaths(self):
+ return self._changed_files
+
+
+class StrictEnumValueCheckerTest(unittest.TestCase):
+ TEST_FILE_PATTERN = "changed_file_%s.h"
+ MOCK_FILE_LOCAL_PATH = "mock_enum.h"
+ START_MARKER = "enum MockEnum {"
+ END_MARKER = " mBoundary"
+
+ def _ReadTextFileContents(self, path):
+ """Given a path, returns a list of strings corresponding to the text lines
+ in the file. Reads files in text format.
+
+ """
+ fo = open(path, "r")
+ try:
+ contents = fo.readlines()
+ finally:
+ fo.close()
+ return contents
+
+ def _ReadInputFile(self):
+ return self._ReadTextFileContents("mock_enum.h")
+
+ def _PrepareTest(self, new_file_path):
+ old_contents = self._ReadInputFile()
+ if not new_file_path:
+ new_contents = []
+ else:
+ new_contents = self._ReadTextFileContents(new_file_path)
+ input_api = MockInputApi()
+ mock_file = MockFile(self.MOCK_FILE_LOCAL_PATH,
+ old_contents,
+ new_contents)
+ input_api.files.append(mock_file)
+ output_api = MockOutputApi()
+ return input_api, output_api
+
+ def _RunTest(self, new_file_path):
+ input_api, output_api = self._PrepareTest(new_file_path)
+ checker = StrictEnumValueChecker(input_api, output_api, self.START_MARKER,
+ self.END_MARKER, self.MOCK_FILE_LOCAL_PATH)
+ results = checker.Run()
+ return results
+
+ def testDeleteFile(self):
+ results = self._RunTest(new_file_path=None)
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(1, len(results),
+ "We should get a single warning about file deletion.")
+
+ def testSimpleValidEdit(self):
+ results = self._RunTest(self.TEST_FILE_PATTERN % "1")
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(0, len(results),
+ "We should get no warning for simple edits.")
+
+ def testSingleDeletionOfEntry(self):
+ results = self._RunTest(self.TEST_FILE_PATTERN % "2")
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(1, len(results),
+ "We should get a warning for an entry deletion.")
+
+ def testSingleRenameOfEntry(self):
+ results = self._RunTest(self.TEST_FILE_PATTERN % "3")
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(1, len(results),
+ "We should get a warning for an entry rename, even "
+ "though it is not optimal.")
+
+ def testMissingEnumStartOfEntry(self):
+ results = self._RunTest(self.TEST_FILE_PATTERN % "4")
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(1, len(results),
+ "We should get a warning for a missing enum marker.")
+
+ def testMissingEnumEndOfEntry(self):
+ results = self._RunTest(self.TEST_FILE_PATTERN % "5")
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(1, len(results),
+ "We should get a warning for a missing enum marker.")
+
+ def testInvertedEnumMarkersOfEntry(self):
+ results = self._RunTest(self.TEST_FILE_PATTERN % "6")
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(1, len(results),
+ "We should get a warning for inverted enum markers.")
+
+ def testMultipleInvalidEdits(self):
+ results = self._RunTest(self.TEST_FILE_PATTERN % "7")
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(3, len(results),
+ "We should get 3 warnings (one per edit).")
+
+ def testSingleInvalidInserts(self):
+ results = self._RunTest(self.TEST_FILE_PATTERN % "8")
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(1, len(results),
+ "We should get a warning for a single invalid "
+ "insertion inside the enum.")
+
+ def testMulitpleValidInserts(self):
+ results = self._RunTest(self.TEST_FILE_PATTERN % "9")
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(0, len(results),
+ "We should not get a warning mulitple valid edits")
+
+ def testSingleValidDeleteOutsideOfEnum(self):
+ results = self._RunTest(self.TEST_FILE_PATTERN % "10")
+ # TODO(rpaquay) How to check it's the expected warning?'
+ self.assertEquals(0, len(results),
+ "We should not get a warning for a deletion outside of "
+ "the enum")
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/symsrc/COPYING-pefile b/chromium/tools/symsrc/COPYING-pefile
new file mode 100644
index 00000000000..b7105fcef61
--- /dev/null
+++ b/chromium/tools/symsrc/COPYING-pefile
@@ -0,0 +1,27 @@
+Copyright (c) 2004, 2005, 2006 Ero Carrera <ero@dkbza.org>. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+this list of conditions and the following disclaimer in the documentation
+and/or other materials provided with the distribution.
+
+3. The name of the author may not be used to endorse or promote products
+derived from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
+OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
+IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
+OF SUCH DAMAGE.
+
+
diff --git a/chromium/tools/symsrc/README.chromium b/chromium/tools/symsrc/README.chromium
new file mode 100644
index 00000000000..0398fe0c89f
--- /dev/null
+++ b/chromium/tools/symsrc/README.chromium
@@ -0,0 +1,14 @@
+These files are related to the symbol and source server.
+
+This requires some Microsoft utilities. From a 32-bit WinDbg installation with
+source server support, copy the following files to this directory:
+ dbghelp.dll
+ pdbstr.exe
+ srctool.exe
+
+The follow files were copied from the BSD licensed pefile-1.2.9.1:
+ COPYING-pefile
+ pefile.py
+
+pefile is available from:
+ http://code.google.com/p/pefile/
diff --git a/chromium/tools/symsrc/img_fingerprint.py b/chromium/tools/symsrc/img_fingerprint.py
new file mode 100755
index 00000000000..3d6708e2a14
--- /dev/null
+++ b/chromium/tools/symsrc/img_fingerprint.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Retrieves an image's "fingerprint".
+
+This is used when retrieving the image from the symbol server. The .dll (or cab
+compressed .dl_) or .exe is expected at a path like:
+ foo.dll/FINGERPRINT/foo.dll
+"""
+
+import sys
+import pefile
+
+
+def GetImgFingerprint(filename):
+ """Returns the fingerprint for an image file"""
+ pe = pefile.PE(filename)
+ return "%08X%x" % (
+ pe.FILE_HEADER.TimeDateStamp, pe.OPTIONAL_HEADER.SizeOfImage)
+
+
+def main():
+ if len(sys.argv) != 2:
+ print "usage: file.dll"
+ return 1
+
+ print GetImgFingerprint(sys.argv[1])
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/symsrc/pdb_fingerprint_from_img.py b/chromium/tools/symsrc/pdb_fingerprint_from_img.py
new file mode 100755
index 00000000000..e99447541a7
--- /dev/null
+++ b/chromium/tools/symsrc/pdb_fingerprint_from_img.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This will retrieve a PDBs "fingerprint" from it's corresponding executable
+image (.dll or .exe). This is used when retrieving the PDB from the symbol
+server. The .pdb (or cab compressed .pd_) is expected at a path like:
+ foo.pdb/FINGERPRINT/foo.pdb
+
+We can retrieve the same information from the .PDB file itself, but this file
+format is much more difficult and undocumented. Instead, we can look at the
+DLL's reference to the PDB, and use that to retrieve the information."""
+
+import sys
+import pefile
+
+
+__CV_INFO_PDB70_format__ = ('CV_INFO_PDB70',
+ ('4s,CvSignature', '16s,Signature', 'L,Age'))
+
+__GUID_format__ = ('GUID',
+ ('L,Data1', 'H,Data2', 'H,Data3', '8s,Data4'))
+
+
+def GetPDBInfoFromImg(filename):
+ """Returns the PDB fingerprint and the pdb filename given an image file"""
+
+ pe = pefile.PE(filename)
+
+ for dbg in pe.DIRECTORY_ENTRY_DEBUG:
+ if dbg.struct.Type == 2: # IMAGE_DEBUG_TYPE_CODEVIEW
+ off = dbg.struct.AddressOfRawData
+ size = dbg.struct.SizeOfData
+ data = pe.get_memory_mapped_image()[off:off+size]
+
+ cv = pefile.Structure(__CV_INFO_PDB70_format__)
+ cv.__unpack__(data)
+ cv.PdbFileName = data[cv.sizeof():]
+ guid = pefile.Structure(__GUID_format__)
+ guid.__unpack__(cv.Signature)
+ guid.Data4_0 = ''.join("%02X" % ord(x) for x in guid.Data4[0:2])
+ guid.Data4_1 = ''.join("%02X" % ord(x) for x in guid.Data4[2:])
+
+ return ("%08X%04X%04X%s%s%d" % (
+ guid.Data1, guid.Data2, guid.Data3,
+ guid.Data4_0, guid.Data4_1, cv.Age),
+ cv.PdbFileName.split('\x00', 1)[0])
+
+ break
+
+
+def main():
+ if len(sys.argv) != 2:
+ print "usage: file.dll"
+ return 1
+
+ (fingerprint, filename) = GetPDBInfoFromImg(sys.argv[1])
+ print "%s %s" % (fingerprint, filename)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/symsrc/pefile.py b/chromium/tools/symsrc/pefile.py
new file mode 100644
index 00000000000..e22fd1ae245
--- /dev/null
+++ b/chromium/tools/symsrc/pefile.py
@@ -0,0 +1,3729 @@
+# -*- coding: Latin-1 -*-
+"""pefile, Portable Executable reader module
+
+
+All the PE file basic structures are available with their default names
+as attributes of the instance returned.
+
+Processed elements such as the import table are made available with lowercase
+names, to differentiate them from the upper case basic structure names.
+
+pefile has been tested against the limits of valid PE headers, that is, malware.
+Lots of packed malware attempt to abuse the format way beyond its standard use.
+To the best of my knowledge most of the abuses are handled gracefully.
+
+Copyright (c) 2005, 2006, 2007, 2008 Ero Carrera <ero@dkbza.org>
+
+All rights reserved.
+
+For detailed copyright information see the file COPYING in
+the root of the distribution archive.
+"""
+
+__author__ = 'Ero Carrera'
+__version__ = '1.2.9.1'
+__contact__ = 'ero@dkbza.org'
+
+
+import os
+import struct
+import time
+import math
+import re
+import exceptions
+import string
+import array
+
+sha1, sha256, sha512, md5 = None, None, None, None
+
+try:
+ import hashlib
+ sha1 = hashlib.sha1
+ sha256 = hashlib.sha256
+ sha512 = hashlib.sha512
+ md5 = hashlib.md5
+except ImportError:
+ try:
+ import sha
+ sha1 = sha.new
+ except ImportError:
+ pass
+ try:
+ import md5
+ md5 = md5.new
+ except ImportError:
+ pass
+
+
+fast_load = False
+
+IMAGE_DOS_SIGNATURE = 0x5A4D
+IMAGE_OS2_SIGNATURE = 0x454E
+IMAGE_OS2_SIGNATURE_LE = 0x454C
+IMAGE_VXD_SIGNATURE = 0x454C
+IMAGE_NT_SIGNATURE = 0x00004550
+IMAGE_NUMBEROF_DIRECTORY_ENTRIES= 16
+IMAGE_ORDINAL_FLAG = 0x80000000L
+IMAGE_ORDINAL_FLAG64 = 0x8000000000000000L
+OPTIONAL_HEADER_MAGIC_PE = 0x10b
+OPTIONAL_HEADER_MAGIC_PE_PLUS = 0x20b
+
+
+directory_entry_types = [
+ ('IMAGE_DIRECTORY_ENTRY_EXPORT', 0),
+ ('IMAGE_DIRECTORY_ENTRY_IMPORT', 1),
+ ('IMAGE_DIRECTORY_ENTRY_RESOURCE', 2),
+ ('IMAGE_DIRECTORY_ENTRY_EXCEPTION', 3),
+ ('IMAGE_DIRECTORY_ENTRY_SECURITY', 4),
+ ('IMAGE_DIRECTORY_ENTRY_BASERELOC', 5),
+ ('IMAGE_DIRECTORY_ENTRY_DEBUG', 6),
+ ('IMAGE_DIRECTORY_ENTRY_COPYRIGHT', 7),
+ ('IMAGE_DIRECTORY_ENTRY_GLOBALPTR', 8),
+ ('IMAGE_DIRECTORY_ENTRY_TLS', 9),
+ ('IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG', 10),
+ ('IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT', 11),
+ ('IMAGE_DIRECTORY_ENTRY_IAT', 12),
+ ('IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT', 13),
+ ('IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR',14),
+ ('IMAGE_DIRECTORY_ENTRY_RESERVED', 15) ]
+
+DIRECTORY_ENTRY = dict([(e[1], e[0]) for e in directory_entry_types]+directory_entry_types)
+
+
+image_characteristics = [
+ ('IMAGE_FILE_RELOCS_STRIPPED', 0x0001),
+ ('IMAGE_FILE_EXECUTABLE_IMAGE', 0x0002),
+ ('IMAGE_FILE_LINE_NUMS_STRIPPED', 0x0004),
+ ('IMAGE_FILE_LOCAL_SYMS_STRIPPED', 0x0008),
+ ('IMAGE_FILE_AGGRESIVE_WS_TRIM', 0x0010),
+ ('IMAGE_FILE_LARGE_ADDRESS_AWARE', 0x0020),
+ ('IMAGE_FILE_16BIT_MACHINE', 0x0040),
+ ('IMAGE_FILE_BYTES_REVERSED_LO', 0x0080),
+ ('IMAGE_FILE_32BIT_MACHINE', 0x0100),
+ ('IMAGE_FILE_DEBUG_STRIPPED', 0x0200),
+ ('IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP', 0x0400),
+ ('IMAGE_FILE_NET_RUN_FROM_SWAP', 0x0800),
+ ('IMAGE_FILE_SYSTEM', 0x1000),
+ ('IMAGE_FILE_DLL', 0x2000),
+ ('IMAGE_FILE_UP_SYSTEM_ONLY', 0x4000),
+ ('IMAGE_FILE_BYTES_REVERSED_HI', 0x8000) ]
+
+IMAGE_CHARACTERISTICS = dict([(e[1], e[0]) for e in
+ image_characteristics]+image_characteristics)
+
+
+section_characteristics = [
+ ('IMAGE_SCN_CNT_CODE', 0x00000020),
+ ('IMAGE_SCN_CNT_INITIALIZED_DATA', 0x00000040),
+ ('IMAGE_SCN_CNT_UNINITIALIZED_DATA', 0x00000080),
+ ('IMAGE_SCN_LNK_OTHER', 0x00000100),
+ ('IMAGE_SCN_LNK_INFO', 0x00000200),
+ ('IMAGE_SCN_LNK_REMOVE', 0x00000800),
+ ('IMAGE_SCN_LNK_COMDAT', 0x00001000),
+ ('IMAGE_SCN_MEM_FARDATA', 0x00008000),
+ ('IMAGE_SCN_MEM_PURGEABLE', 0x00020000),
+ ('IMAGE_SCN_MEM_16BIT', 0x00020000),
+ ('IMAGE_SCN_MEM_LOCKED', 0x00040000),
+ ('IMAGE_SCN_MEM_PRELOAD', 0x00080000),
+ ('IMAGE_SCN_ALIGN_1BYTES', 0x00100000),
+ ('IMAGE_SCN_ALIGN_2BYTES', 0x00200000),
+ ('IMAGE_SCN_ALIGN_4BYTES', 0x00300000),
+ ('IMAGE_SCN_ALIGN_8BYTES', 0x00400000),
+ ('IMAGE_SCN_ALIGN_16BYTES', 0x00500000),
+ ('IMAGE_SCN_ALIGN_32BYTES', 0x00600000),
+ ('IMAGE_SCN_ALIGN_64BYTES', 0x00700000),
+ ('IMAGE_SCN_ALIGN_128BYTES', 0x00800000),
+ ('IMAGE_SCN_ALIGN_256BYTES', 0x00900000),
+ ('IMAGE_SCN_ALIGN_512BYTES', 0x00A00000),
+ ('IMAGE_SCN_ALIGN_1024BYTES', 0x00B00000),
+ ('IMAGE_SCN_ALIGN_2048BYTES', 0x00C00000),
+ ('IMAGE_SCN_ALIGN_4096BYTES', 0x00D00000),
+ ('IMAGE_SCN_ALIGN_8192BYTES', 0x00E00000),
+ ('IMAGE_SCN_ALIGN_MASK', 0x00F00000),
+ ('IMAGE_SCN_LNK_NRELOC_OVFL', 0x01000000),
+ ('IMAGE_SCN_MEM_DISCARDABLE', 0x02000000),
+ ('IMAGE_SCN_MEM_NOT_CACHED', 0x04000000),
+ ('IMAGE_SCN_MEM_NOT_PAGED', 0x08000000),
+ ('IMAGE_SCN_MEM_SHARED', 0x10000000),
+ ('IMAGE_SCN_MEM_EXECUTE', 0x20000000),
+ ('IMAGE_SCN_MEM_READ', 0x40000000),
+ ('IMAGE_SCN_MEM_WRITE', 0x80000000L) ]
+
+SECTION_CHARACTERISTICS = dict([(e[1], e[0]) for e in
+ section_characteristics]+section_characteristics)
+
+
+debug_types = [
+ ('IMAGE_DEBUG_TYPE_UNKNOWN', 0),
+ ('IMAGE_DEBUG_TYPE_COFF', 1),
+ ('IMAGE_DEBUG_TYPE_CODEVIEW', 2),
+ ('IMAGE_DEBUG_TYPE_FPO', 3),
+ ('IMAGE_DEBUG_TYPE_MISC', 4),
+ ('IMAGE_DEBUG_TYPE_EXCEPTION', 5),
+ ('IMAGE_DEBUG_TYPE_FIXUP', 6),
+ ('IMAGE_DEBUG_TYPE_OMAP_TO_SRC', 7),
+ ('IMAGE_DEBUG_TYPE_OMAP_FROM_SRC', 8),
+ ('IMAGE_DEBUG_TYPE_BORLAND', 9),
+ ('IMAGE_DEBUG_TYPE_RESERVED10', 10) ]
+
+DEBUG_TYPE = dict([(e[1], e[0]) for e in debug_types]+debug_types)
+
+
+subsystem_types = [
+ ('IMAGE_SUBSYSTEM_UNKNOWN', 0),
+ ('IMAGE_SUBSYSTEM_NATIVE', 1),
+ ('IMAGE_SUBSYSTEM_WINDOWS_GUI', 2),
+ ('IMAGE_SUBSYSTEM_WINDOWS_CUI', 3),
+ ('IMAGE_SUBSYSTEM_OS2_CUI', 5),
+ ('IMAGE_SUBSYSTEM_POSIX_CUI', 7),
+ ('IMAGE_SUBSYSTEM_WINDOWS_CE_GUI', 9),
+ ('IMAGE_SUBSYSTEM_EFI_APPLICATION', 10),
+ ('IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER', 11),
+ ('IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER', 12),
+ ('IMAGE_SUBSYSTEM_EFI_ROM', 13),
+ ('IMAGE_SUBSYSTEM_XBOX', 14)]
+
+SUBSYSTEM_TYPE = dict([(e[1], e[0]) for e in subsystem_types]+subsystem_types)
+
+
+machine_types = [
+ ('IMAGE_FILE_MACHINE_UNKNOWN', 0),
+ ('IMAGE_FILE_MACHINE_AM33', 0x1d3),
+ ('IMAGE_FILE_MACHINE_AMD64', 0x8664),
+ ('IMAGE_FILE_MACHINE_ARM', 0x1c0),
+ ('IMAGE_FILE_MACHINE_EBC', 0xebc),
+ ('IMAGE_FILE_MACHINE_I386', 0x14c),
+ ('IMAGE_FILE_MACHINE_IA64', 0x200),
+ ('IMAGE_FILE_MACHINE_MR32', 0x9041),
+ ('IMAGE_FILE_MACHINE_MIPS16', 0x266),
+ ('IMAGE_FILE_MACHINE_MIPSFPU', 0x366),
+ ('IMAGE_FILE_MACHINE_MIPSFPU16',0x466),
+ ('IMAGE_FILE_MACHINE_POWERPC', 0x1f0),
+ ('IMAGE_FILE_MACHINE_POWERPCFP',0x1f1),
+ ('IMAGE_FILE_MACHINE_R4000', 0x166),
+ ('IMAGE_FILE_MACHINE_SH3', 0x1a2),
+ ('IMAGE_FILE_MACHINE_SH3DSP', 0x1a3),
+ ('IMAGE_FILE_MACHINE_SH4', 0x1a6),
+ ('IMAGE_FILE_MACHINE_SH5', 0x1a8),
+ ('IMAGE_FILE_MACHINE_THUMB', 0x1c2),
+ ('IMAGE_FILE_MACHINE_WCEMIPSV2',0x169),
+ ]
+
+MACHINE_TYPE = dict([(e[1], e[0]) for e in machine_types]+machine_types)
+
+
+relocation_types = [
+ ('IMAGE_REL_BASED_ABSOLUTE', 0),
+ ('IMAGE_REL_BASED_HIGH', 1),
+ ('IMAGE_REL_BASED_LOW', 2),
+ ('IMAGE_REL_BASED_HIGHLOW', 3),
+ ('IMAGE_REL_BASED_HIGHADJ', 4),
+ ('IMAGE_REL_BASED_MIPS_JMPADDR', 5),
+ ('IMAGE_REL_BASED_SECTION', 6),
+ ('IMAGE_REL_BASED_REL', 7),
+ ('IMAGE_REL_BASED_MIPS_JMPADDR16', 9),
+ ('IMAGE_REL_BASED_IA64_IMM64', 9),
+ ('IMAGE_REL_BASED_DIR64', 10),
+ ('IMAGE_REL_BASED_HIGH3ADJ', 11) ]
+
+RELOCATION_TYPE = dict([(e[1], e[0]) for e in relocation_types]+relocation_types)
+
+
+dll_characteristics = [
+ ('IMAGE_DLL_CHARACTERISTICS_RESERVED_0x0001', 0x0001),
+ ('IMAGE_DLL_CHARACTERISTICS_RESERVED_0x0002', 0x0002),
+ ('IMAGE_DLL_CHARACTERISTICS_RESERVED_0x0004', 0x0004),
+ ('IMAGE_DLL_CHARACTERISTICS_RESERVED_0x0008', 0x0008),
+ ('IMAGE_DLL_CHARACTERISTICS_DYNAMIC_BASE', 0x0040),
+ ('IMAGE_DLL_CHARACTERISTICS_FORCE_INTEGRITY', 0x0080),
+ ('IMAGE_DLL_CHARACTERISTICS_NX_COMPAT', 0x0100),
+ ('IMAGE_DLL_CHARACTERISTICS_NO_ISOLATION', 0x0200),
+ ('IMAGE_DLL_CHARACTERISTICS_NO_SEH', 0x0400),
+ ('IMAGE_DLL_CHARACTERISTICS_NO_BIND', 0x0800),
+ ('IMAGE_DLL_CHARACTERISTICS_RESERVED_0x1000', 0x1000),
+ ('IMAGE_DLL_CHARACTERISTICS_WDM_DRIVER', 0x2000),
+ ('IMAGE_DLL_CHARACTERISTICS_TERMINAL_SERVER_AWARE', 0x8000) ]
+
+DLL_CHARACTERISTICS = dict([(e[1], e[0]) for e in dll_characteristics]+dll_characteristics)
+
+
+# Resource types
+resource_type = [
+ ('RT_CURSOR', 1),
+ ('RT_BITMAP', 2),
+ ('RT_ICON', 3),
+ ('RT_MENU', 4),
+ ('RT_DIALOG', 5),
+ ('RT_STRING', 6),
+ ('RT_FONTDIR', 7),
+ ('RT_FONT', 8),
+ ('RT_ACCELERATOR', 9),
+ ('RT_RCDATA', 10),
+ ('RT_MESSAGETABLE', 11),
+ ('RT_GROUP_CURSOR', 12),
+ ('RT_GROUP_ICON', 14),
+ ('RT_VERSION', 16),
+ ('RT_DLGINCLUDE', 17),
+ ('RT_PLUGPLAY', 19),
+ ('RT_VXD', 20),
+ ('RT_ANICURSOR', 21),
+ ('RT_ANIICON', 22),
+ ('RT_HTML', 23),
+ ('RT_MANIFEST', 24) ]
+
+RESOURCE_TYPE = dict([(e[1], e[0]) for e in resource_type]+resource_type)
+
+
+# Language definitions
+lang = [
+ ('LANG_NEUTRAL', 0x00),
+ ('LANG_INVARIANT', 0x7f),
+ ('LANG_AFRIKAANS', 0x36),
+ ('LANG_ALBANIAN', 0x1c),
+ ('LANG_ARABIC', 0x01),
+ ('LANG_ARMENIAN', 0x2b),
+ ('LANG_ASSAMESE', 0x4d),
+ ('LANG_AZERI', 0x2c),
+ ('LANG_BASQUE', 0x2d),
+ ('LANG_BELARUSIAN', 0x23),
+ ('LANG_BENGALI', 0x45),
+ ('LANG_BULGARIAN', 0x02),
+ ('LANG_CATALAN', 0x03),
+ ('LANG_CHINESE', 0x04),
+ ('LANG_CROATIAN', 0x1a),
+ ('LANG_CZECH', 0x05),
+ ('LANG_DANISH', 0x06),
+ ('LANG_DIVEHI', 0x65),
+ ('LANG_DUTCH', 0x13),
+ ('LANG_ENGLISH', 0x09),
+ ('LANG_ESTONIAN', 0x25),
+ ('LANG_FAEROESE', 0x38),
+ ('LANG_FARSI', 0x29),
+ ('LANG_FINNISH', 0x0b),
+ ('LANG_FRENCH', 0x0c),
+ ('LANG_GALICIAN', 0x56),
+ ('LANG_GEORGIAN', 0x37),
+ ('LANG_GERMAN', 0x07),
+ ('LANG_GREEK', 0x08),
+ ('LANG_GUJARATI', 0x47),
+ ('LANG_HEBREW', 0x0d),
+ ('LANG_HINDI', 0x39),
+ ('LANG_HUNGARIAN', 0x0e),
+ ('LANG_ICELANDIC', 0x0f),
+ ('LANG_INDONESIAN', 0x21),
+ ('LANG_ITALIAN', 0x10),
+ ('LANG_JAPANESE', 0x11),
+ ('LANG_KANNADA', 0x4b),
+ ('LANG_KASHMIRI', 0x60),
+ ('LANG_KAZAK', 0x3f),
+ ('LANG_KONKANI', 0x57),
+ ('LANG_KOREAN', 0x12),
+ ('LANG_KYRGYZ', 0x40),
+ ('LANG_LATVIAN', 0x26),
+ ('LANG_LITHUANIAN', 0x27),
+ ('LANG_MACEDONIAN', 0x2f),
+ ('LANG_MALAY', 0x3e),
+ ('LANG_MALAYALAM', 0x4c),
+ ('LANG_MANIPURI', 0x58),
+ ('LANG_MARATHI', 0x4e),
+ ('LANG_MONGOLIAN', 0x50),
+ ('LANG_NEPALI', 0x61),
+ ('LANG_NORWEGIAN', 0x14),
+ ('LANG_ORIYA', 0x48),
+ ('LANG_POLISH', 0x15),
+ ('LANG_PORTUGUESE', 0x16),
+ ('LANG_PUNJABI', 0x46),
+ ('LANG_ROMANIAN', 0x18),
+ ('LANG_RUSSIAN', 0x19),
+ ('LANG_SANSKRIT', 0x4f),
+ ('LANG_SERBIAN', 0x1a),
+ ('LANG_SINDHI', 0x59),
+ ('LANG_SLOVAK', 0x1b),
+ ('LANG_SLOVENIAN', 0x24),
+ ('LANG_SPANISH', 0x0a),
+ ('LANG_SWAHILI', 0x41),
+ ('LANG_SWEDISH', 0x1d),
+ ('LANG_SYRIAC', 0x5a),
+ ('LANG_TAMIL', 0x49),
+ ('LANG_TATAR', 0x44),
+ ('LANG_TELUGU', 0x4a),
+ ('LANG_THAI', 0x1e),
+ ('LANG_TURKISH', 0x1f),
+ ('LANG_UKRAINIAN', 0x22),
+ ('LANG_URDU', 0x20),
+ ('LANG_UZBEK', 0x43),
+ ('LANG_VIETNAMESE', 0x2a),
+ ('LANG_GAELIC', 0x3c),
+ ('LANG_MALTESE', 0x3a),
+ ('LANG_MAORI', 0x28),
+ ('LANG_RHAETO_ROMANCE',0x17),
+ ('LANG_SAAMI', 0x3b),
+ ('LANG_SORBIAN', 0x2e),
+ ('LANG_SUTU', 0x30),
+ ('LANG_TSONGA', 0x31),
+ ('LANG_TSWANA', 0x32),
+ ('LANG_VENDA', 0x33),
+ ('LANG_XHOSA', 0x34),
+ ('LANG_ZULU', 0x35),
+ ('LANG_ESPERANTO', 0x8f),
+ ('LANG_WALON', 0x90),
+ ('LANG_CORNISH', 0x91),
+ ('LANG_WELSH', 0x92),
+ ('LANG_BRETON', 0x93) ]
+
+LANG = dict(lang+[(e[1], e[0]) for e in lang])
+
+
+# Sublanguage definitions
+sublang = [
+ ('SUBLANG_NEUTRAL', 0x00),
+ ('SUBLANG_DEFAULT', 0x01),
+ ('SUBLANG_SYS_DEFAULT', 0x02),
+ ('SUBLANG_ARABIC_SAUDI_ARABIA', 0x01),
+ ('SUBLANG_ARABIC_IRAQ', 0x02),
+ ('SUBLANG_ARABIC_EGYPT', 0x03),
+ ('SUBLANG_ARABIC_LIBYA', 0x04),
+ ('SUBLANG_ARABIC_ALGERIA', 0x05),
+ ('SUBLANG_ARABIC_MOROCCO', 0x06),
+ ('SUBLANG_ARABIC_TUNISIA', 0x07),
+ ('SUBLANG_ARABIC_OMAN', 0x08),
+ ('SUBLANG_ARABIC_YEMEN', 0x09),
+ ('SUBLANG_ARABIC_SYRIA', 0x0a),
+ ('SUBLANG_ARABIC_JORDAN', 0x0b),
+ ('SUBLANG_ARABIC_LEBANON', 0x0c),
+ ('SUBLANG_ARABIC_KUWAIT', 0x0d),
+ ('SUBLANG_ARABIC_UAE', 0x0e),
+ ('SUBLANG_ARABIC_BAHRAIN', 0x0f),
+ ('SUBLANG_ARABIC_QATAR', 0x10),
+ ('SUBLANG_AZERI_LATIN', 0x01),
+ ('SUBLANG_AZERI_CYRILLIC', 0x02),
+ ('SUBLANG_CHINESE_TRADITIONAL', 0x01),
+ ('SUBLANG_CHINESE_SIMPLIFIED', 0x02),
+ ('SUBLANG_CHINESE_HONGKONG', 0x03),
+ ('SUBLANG_CHINESE_SINGAPORE', 0x04),
+ ('SUBLANG_CHINESE_MACAU', 0x05),
+ ('SUBLANG_DUTCH', 0x01),
+ ('SUBLANG_DUTCH_BELGIAN', 0x02),
+ ('SUBLANG_ENGLISH_US', 0x01),
+ ('SUBLANG_ENGLISH_UK', 0x02),
+ ('SUBLANG_ENGLISH_AUS', 0x03),
+ ('SUBLANG_ENGLISH_CAN', 0x04),
+ ('SUBLANG_ENGLISH_NZ', 0x05),
+ ('SUBLANG_ENGLISH_EIRE', 0x06),
+ ('SUBLANG_ENGLISH_SOUTH_AFRICA', 0x07),
+ ('SUBLANG_ENGLISH_JAMAICA', 0x08),
+ ('SUBLANG_ENGLISH_CARIBBEAN', 0x09),
+ ('SUBLANG_ENGLISH_BELIZE', 0x0a),
+ ('SUBLANG_ENGLISH_TRINIDAD', 0x0b),
+ ('SUBLANG_ENGLISH_ZIMBABWE', 0x0c),
+ ('SUBLANG_ENGLISH_PHILIPPINES', 0x0d),
+ ('SUBLANG_FRENCH', 0x01),
+ ('SUBLANG_FRENCH_BELGIAN', 0x02),
+ ('SUBLANG_FRENCH_CANADIAN', 0x03),
+ ('SUBLANG_FRENCH_SWISS', 0x04),
+ ('SUBLANG_FRENCH_LUXEMBOURG', 0x05),
+ ('SUBLANG_FRENCH_MONACO', 0x06),
+ ('SUBLANG_GERMAN', 0x01),
+ ('SUBLANG_GERMAN_SWISS', 0x02),
+ ('SUBLANG_GERMAN_AUSTRIAN', 0x03),
+ ('SUBLANG_GERMAN_LUXEMBOURG', 0x04),
+ ('SUBLANG_GERMAN_LIECHTENSTEIN', 0x05),
+ ('SUBLANG_ITALIAN', 0x01),
+ ('SUBLANG_ITALIAN_SWISS', 0x02),
+ ('SUBLANG_KASHMIRI_SASIA', 0x02),
+ ('SUBLANG_KASHMIRI_INDIA', 0x02),
+ ('SUBLANG_KOREAN', 0x01),
+ ('SUBLANG_LITHUANIAN', 0x01),
+ ('SUBLANG_MALAY_MALAYSIA', 0x01),
+ ('SUBLANG_MALAY_BRUNEI_DARUSSALAM', 0x02),
+ ('SUBLANG_NEPALI_INDIA', 0x02),
+ ('SUBLANG_NORWEGIAN_BOKMAL', 0x01),
+ ('SUBLANG_NORWEGIAN_NYNORSK', 0x02),
+ ('SUBLANG_PORTUGUESE', 0x02),
+ ('SUBLANG_PORTUGUESE_BRAZILIAN', 0x01),
+ ('SUBLANG_SERBIAN_LATIN', 0x02),
+ ('SUBLANG_SERBIAN_CYRILLIC', 0x03),
+ ('SUBLANG_SPANISH', 0x01),
+ ('SUBLANG_SPANISH_MEXICAN', 0x02),
+ ('SUBLANG_SPANISH_MODERN', 0x03),
+ ('SUBLANG_SPANISH_GUATEMALA', 0x04),
+ ('SUBLANG_SPANISH_COSTA_RICA', 0x05),
+ ('SUBLANG_SPANISH_PANAMA', 0x06),
+ ('SUBLANG_SPANISH_DOMINICAN_REPUBLIC', 0x07),
+ ('SUBLANG_SPANISH_VENEZUELA', 0x08),
+ ('SUBLANG_SPANISH_COLOMBIA', 0x09),
+ ('SUBLANG_SPANISH_PERU', 0x0a),
+ ('SUBLANG_SPANISH_ARGENTINA', 0x0b),
+ ('SUBLANG_SPANISH_ECUADOR', 0x0c),
+ ('SUBLANG_SPANISH_CHILE', 0x0d),
+ ('SUBLANG_SPANISH_URUGUAY', 0x0e),
+ ('SUBLANG_SPANISH_PARAGUAY', 0x0f),
+ ('SUBLANG_SPANISH_BOLIVIA', 0x10),
+ ('SUBLANG_SPANISH_EL_SALVADOR', 0x11),
+ ('SUBLANG_SPANISH_HONDURAS', 0x12),
+ ('SUBLANG_SPANISH_NICARAGUA', 0x13),
+ ('SUBLANG_SPANISH_PUERTO_RICO', 0x14),
+ ('SUBLANG_SWEDISH', 0x01),
+ ('SUBLANG_SWEDISH_FINLAND', 0x02),
+ ('SUBLANG_URDU_PAKISTAN', 0x01),
+ ('SUBLANG_URDU_INDIA', 0x02),
+ ('SUBLANG_UZBEK_LATIN', 0x01),
+ ('SUBLANG_UZBEK_CYRILLIC', 0x02),
+ ('SUBLANG_DUTCH_SURINAM', 0x03),
+ ('SUBLANG_ROMANIAN', 0x01),
+ ('SUBLANG_ROMANIAN_MOLDAVIA', 0x02),
+ ('SUBLANG_RUSSIAN', 0x01),
+ ('SUBLANG_RUSSIAN_MOLDAVIA', 0x02),
+ ('SUBLANG_CROATIAN', 0x01),
+ ('SUBLANG_LITHUANIAN_CLASSIC', 0x02),
+ ('SUBLANG_GAELIC', 0x01),
+ ('SUBLANG_GAELIC_SCOTTISH', 0x02),
+ ('SUBLANG_GAELIC_MANX', 0x03) ]
+
+SUBLANG = dict(sublang+[(e[1], e[0]) for e in sublang])
+
+
+class UnicodeStringWrapperPostProcessor:
+ """This class attemps to help the process of identifying strings
+ that might be plain Unicode or Pascal. A list of strings will be
+ wrapped on it with the hope the overlappings will help make the
+ decission about their type."""
+
+ def __init__(self, pe, rva_ptr):
+ self.pe = pe
+ self.rva_ptr = rva_ptr
+ self.string = None
+
+
+ def get_rva(self):
+ """Get the RVA of the string."""
+
+ return self.rva_ptr
+
+
+ def __str__(self):
+ """Return the escaped ASCII representation of the string."""
+
+ def convert_char(char):
+ if char in string.printable:
+ return char
+ else:
+ return r'\x%02x' % ord(char)
+
+ if self.string:
+ return ''.join([convert_char(c) for c in self.string])
+
+ return ''
+
+
+ def invalidate(self):
+ """Make this instance None, to express it's no known string type."""
+
+ self = None
+
+
+ def render_pascal_16(self):
+
+ self.string = self.pe.get_string_u_at_rva(
+ self.rva_ptr+2,
+ max_length=self.__get_pascal_16_length())
+
+
+ def ask_pascal_16(self, next_rva_ptr):
+ """The next RVA is taken to be the one immediately following this one.
+
+ Such RVA could indicate the natural end of the string and will be checked
+ with the possible length contained in the first word.
+ """
+
+ length = self.__get_pascal_16_length()
+
+ if length == (next_rva_ptr - (self.rva_ptr+2)) / 2:
+ self.length = length
+ return True
+
+ return False
+
+
+ def __get_pascal_16_length(self):
+
+ return self.__get_word_value_at_rva(self.rva_ptr)
+
+
+ def __get_word_value_at_rva(self, rva):
+
+ try:
+ data = self.pe.get_data(self.rva_ptr, 2)
+ except PEFormatError, e:
+ return False
+
+ if len(data)<2:
+ return False
+
+ return struct.unpack('<H', data)[0]
+
+
+ #def render_pascal_8(self):
+ # """"""
+
+
+ def ask_unicode_16(self, next_rva_ptr):
+ """The next RVA is taken to be the one immediately following this one.
+
+ Such RVA could indicate the natural end of the string and will be checked
+ to see if there's a Unicode NULL character there.
+ """
+
+ if self.__get_word_value_at_rva(next_rva_ptr-2) == 0:
+ self.length = next_rva_ptr - self.rva_ptr
+ return True
+
+ return False
+
+
+ def render_unicode_16(self):
+ """"""
+
+ self.string = self.pe.get_string_u_at_rva(self.rva_ptr)
+
+
+class PEFormatError(Exception):
+ """Generic PE format error exception."""
+
+ def __init__(self, value):
+ self.value = value
+
+ def __str__(self):
+ return repr(self.value)
+
+
+class Dump:
+ """Convenience class for dumping the PE information."""
+
+ def __init__(self):
+ self.text = ''
+
+
+ def add_lines(self, txt, indent=0):
+ """Adds a list of lines.
+
+ The list can be indented with the optional argument 'indent'.
+ """
+ for line in txt:
+ self.add_line(line, indent)
+
+
+ def add_line(self, txt, indent=0):
+ """Adds a line.
+
+ The line can be indented with the optional argument 'indent'.
+ """
+
+ self.add(txt+'\n', indent)
+
+
+ def add(self, txt, indent=0):
+ """Adds some text, no newline will be appended.
+
+ The text can be indented with the optional argument 'indent'.
+ """
+
+ if isinstance(txt, unicode):
+ s = []
+ for c in txt:
+ try:
+ s.append(str(c))
+ except UnicodeEncodeError, e:
+ s.append(repr(c))
+
+ txt = ''.join(s)
+
+ self.text += ' '*indent+txt
+
+
+ def add_header(self, txt):
+ """Adds a header element."""
+
+ self.add_line('-'*10+txt+'-'*10+'\n')
+
+
+ def add_newline(self):
+ """Adds a newline."""
+
+ self.text += '\n'
+
+
+ def get_text(self):
+ """Get the text in its current state."""
+
+ return self.text
+
+
+
+class Structure:
+ """Prepare structure object to extract members from data.
+
+ Format is a list containing definitions for the elements
+ of the structure.
+ """
+
+
+ def __init__(self, format, name=None, file_offset=None):
+ # Format is forced little endian, for big endian non Intel platforms
+ self.__format__ = '<'
+ self.__keys__ = []
+# self.values = {}
+ self.__format_length__ = 0
+ self.__set_format__(format[1])
+ self._all_zeroes = False
+ self.__unpacked_data_elms__ = None
+ self.__file_offset__ = file_offset
+ if name:
+ self.name = name
+ else:
+ self.name = format[0]
+
+
+ def __get_format__(self):
+ return self.__format__
+
+
+ def get_file_offset(self):
+ return self.__file_offset__
+
+ def set_file_offset(self, offset):
+ self.__file_offset__ = offset
+
+ def all_zeroes(self):
+ """Returns true is the unpacked data is all zeroes."""
+
+ return self._all_zeroes
+
+
+ def __set_format__(self, format):
+
+ for elm in format:
+ if ',' in elm:
+ elm_type, elm_name = elm.split(',', 1)
+ self.__format__ += elm_type
+
+ elm_names = elm_name.split(',')
+ names = []
+ for elm_name in elm_names:
+ if elm_name in self.__keys__:
+ search_list = [x[:len(elm_name)] for x in self.__keys__]
+ occ_count = search_list.count(elm_name)
+ elm_name = elm_name+'_'+str(occ_count)
+ names.append(elm_name)
+ # Some PE header structures have unions on them, so a certain
+ # value might have different names, so each key has a list of
+ # all the possible members referring to the data.
+ self.__keys__.append(names)
+
+ self.__format_length__ = struct.calcsize(self.__format__)
+
+
+ def sizeof(self):
+ """Return size of the structure."""
+
+ return self.__format_length__
+
+
+ def __unpack__(self, data):
+
+ if len(data)>self.__format_length__:
+ data = data[:self.__format_length__]
+
+ # OC Patch:
+ # Some malware have incorrect header lengths.
+ # Fail gracefully if this occurs
+ # Buggy malware: a29b0118af8b7408444df81701ad5a7f
+ #
+ elif len(data)<self.__format_length__:
+ raise PEFormatError('Data length less than expected header length.')
+
+
+ if data.count(chr(0)) == len(data):
+ self._all_zeroes = True
+
+ self.__unpacked_data_elms__ = struct.unpack(self.__format__, data)
+ for i in xrange(len(self.__unpacked_data_elms__)):
+ for key in self.__keys__[i]:
+# self.values[key] = self.__unpacked_data_elms__[i]
+ setattr(self, key, self.__unpacked_data_elms__[i])
+
+
+ def __pack__(self):
+
+ new_values = []
+
+ for i in xrange(len(self.__unpacked_data_elms__)):
+
+ for key in self.__keys__[i]:
+ new_val = getattr(self, key)
+ old_val = self.__unpacked_data_elms__[i]
+
+ # In the case of Unions, when the first changed value
+ # is picked the loop is exited
+ if new_val != old_val:
+ break
+
+ new_values.append(new_val)
+
+ return struct.pack(self.__format__, *new_values)
+
+
+ def __str__(self):
+ return '\n'.join( self.dump() )
+
+ def __repr__(self):
+ return '<Structure: %s>' % (' '.join( [' '.join(s.split()) for s in self.dump()] ))
+
+
+ def dump(self, indentation=0):
+ """Returns a string representation of the structure."""
+
+ dump = []
+
+ dump.append('[%s]' % self.name)
+
+ # Refer to the __set_format__ method for an explanation
+ # of the following construct.
+ for keys in self.__keys__:
+ for key in keys:
+
+ val = getattr(self, key)
+ if isinstance(val, int) or isinstance(val, long):
+ val_str = '0x%-8X' % (val)
+ if key == 'TimeDateStamp' or key == 'dwTimeStamp':
+ try:
+ val_str += ' [%s UTC]' % time.asctime(time.gmtime(val))
+ except exceptions.ValueError, e:
+ val_str += ' [INVALID TIME]'
+ else:
+ val_str = ''.join(filter(lambda c:c != '\0', str(val)))
+
+ dump.append('%-30s %s' % (key+':', val_str))
+
+ return dump
+
+
+
+class SectionStructure(Structure):
+ """Convenience section handling class."""
+
+ def get_data(self, start, length=None):
+ """Get data chunk from a section.
+
+ Allows to query data from the section by passing the
+ addresses where the PE file would be loaded by default.
+ It is then possible to retrieve code and data by its real
+ addresses as it would be if loaded.
+ """
+
+ offset = start - self.VirtualAddress
+
+ if length:
+ end = offset+length
+ else:
+ end = len(self.data)
+
+ return self.data[offset:end]
+
+
+ def get_rva_from_offset(self, offset):
+ return offset - self.PointerToRawData + self.VirtualAddress
+
+
+ def get_offset_from_rva(self, rva):
+ return (rva - self.VirtualAddress) + self.PointerToRawData
+
+
+ def contains_offset(self, offset):
+ """Check whether the section contains the file offset provided."""
+
+ if not self.PointerToRawData:
+ # bss and other sections containing only uninitialized data must have 0
+ # and do not take space in the file
+ return False
+ return self.PointerToRawData <= offset < self.VirtualAddress + self.SizeOfRawData
+
+
+ def contains_rva(self, rva):
+ """Check whether the section contains the address provided."""
+
+ # PECOFF documentation v8 says:
+ # The total size of the section when loaded into memory.
+ # If this value is greater than SizeOfRawData, the section is zero-padded.
+ # This field is valid only for executable images and should be set to zero
+ # for object files.
+
+ if len(self.data) < self.SizeOfRawData:
+ size = self.Misc_VirtualSize
+ else:
+ size = max(self.SizeOfRawData, self.Misc_VirtualSize)
+
+ return self.VirtualAddress <= rva < self.VirtualAddress + size
+
+ def contains(self, rva):
+ #print "DEPRECATION WARNING: you should use contains_rva() instead of contains()"
+ return self.contains_rva(rva)
+
+
+ def set_data(self, data):
+ """Set the data belonging to the section."""
+
+ self.data = data
+
+
+ def get_entropy(self):
+ """Calculate and return the entropy for the section."""
+
+ return self.entropy_H( self.data )
+
+
+ def get_hash_sha1(self):
+ """Get the SHA-1 hex-digest of the section's data."""
+
+ if sha1 is not None:
+ return sha1( self.data ).hexdigest()
+
+
+ def get_hash_sha256(self):
+ """Get the SHA-256 hex-digest of the section's data."""
+
+ if sha256 is not None:
+ return sha256( self.data ).hexdigest()
+
+
+ def get_hash_sha512(self):
+ """Get the SHA-512 hex-digest of the section's data."""
+
+ if sha512 is not None:
+ return sha512( self.data ).hexdigest()
+
+
+ def get_hash_md5(self):
+ """Get the MD5 hex-digest of the section's data."""
+
+ if md5 is not None:
+ return md5( self.data ).hexdigest()
+
+
+ def entropy_H(self, data):
+ """Calculate the entropy of a chunk of data."""
+
+ if len(data) == 0:
+ return 0.0
+
+ occurences = array.array('L', [0]*256)
+
+ for x in data:
+ occurences[ord(x)] += 1
+
+ entropy = 0
+ for x in occurences:
+ if x:
+ p_x = float(x) / len(data)
+ entropy -= p_x*math.log(p_x, 2)
+
+ return entropy
+
+
+
+class DataContainer:
+ """Generic data container."""
+
+ def __init__(self, **args):
+ for key, value in args.items():
+ setattr(self, key, value)
+
+
+
+class ImportDescData(DataContainer):
+ """Holds import descriptor information.
+
+ dll: name of the imported DLL
+ imports: list of imported symbols (ImportData instances)
+ struct: IMAGE_IMPORT_DESCRIPTOR sctruture
+ """
+
+class ImportData(DataContainer):
+ """Holds imported symbol's information.
+
+ ordinal: Ordinal of the symbol
+ name: Name of the symbol
+ bound: If the symbol is bound, this contains
+ the address.
+ """
+
+class ExportDirData(DataContainer):
+ """Holds export directory information.
+
+ struct: IMAGE_EXPORT_DIRECTORY structure
+ symbols: list of exported symbols (ExportData instances)
+"""
+
+class ExportData(DataContainer):
+ """Holds exported symbols' information.
+
+ ordinal: ordinal of the symbol
+ address: address of the symbol
+ name: name of the symbol (None if the symbol is
+ exported by ordinal only)
+ forwarder: if the symbol is forwarded it will
+ contain the name of the target symbol,
+ None otherwise.
+ """
+
+
+class ResourceDirData(DataContainer):
+ """Holds resource directory information.
+
+ struct: IMAGE_RESOURCE_DIRECTORY structure
+ entries: list of entries (ResourceDirEntryData instances)
+ """
+
+class ResourceDirEntryData(DataContainer):
+ """Holds resource directory entry data.
+
+ struct: IMAGE_RESOURCE_DIRECTORY_ENTRY structure
+ name: If the resource is identified by name this
+ attribute will contain the name string. None
+ otherwise. If identified by id, the id is
+ availabe at 'struct.Id'
+ id: the id, also in struct.Id
+ directory: If this entry has a lower level directory
+ this attribute will point to the
+ ResourceDirData instance representing it.
+ data: If this entry has no futher lower directories
+ and points to the actual resource data, this
+ attribute will reference the corresponding
+ ResourceDataEntryData instance.
+ (Either of the 'directory' or 'data' attribute will exist,
+ but not both.)
+ """
+
+class ResourceDataEntryData(DataContainer):
+ """Holds resource data entry information.
+
+ struct: IMAGE_RESOURCE_DATA_ENTRY structure
+ lang: Primary language ID
+ sublang: Sublanguage ID
+ """
+
+class DebugData(DataContainer):
+ """Holds debug information.
+
+ struct: IMAGE_DEBUG_DIRECTORY structure
+ """
+
+class BaseRelocationData(DataContainer):
+ """Holds base relocation information.
+
+ struct: IMAGE_BASE_RELOCATION structure
+ entries: list of relocation data (RelocationData instances)
+ """
+
+class RelocationData(DataContainer):
+ """Holds relocation information.
+
+ type: Type of relocation
+ The type string is can be obtained by
+ RELOCATION_TYPE[type]
+ rva: RVA of the relocation
+ """
+
+class TlsData(DataContainer):
+ """Holds TLS information.
+
+ struct: IMAGE_TLS_DIRECTORY structure
+ """
+
+class BoundImportDescData(DataContainer):
+ """Holds bound import descriptor data.
+
+ This directory entry will provide with information on the
+ DLLs this PE files has been bound to (if bound at all).
+ The structure will contain the name and timestamp of the
+ DLL at the time of binding so that the loader can know
+ whether it differs from the one currently present in the
+ system and must, therefore, re-bind the PE's imports.
+
+ struct: IMAGE_BOUND_IMPORT_DESCRIPTOR structure
+ name: DLL name
+ entries: list of entries (BoundImportRefData instances)
+ the entries will exist if this DLL has forwarded
+ symbols. If so, the destination DLL will have an
+ entry in this list.
+ """
+
+class BoundImportRefData(DataContainer):
+ """Holds bound import forwader reference data.
+
+ Contains the same information as the bound descriptor but
+ for forwarded DLLs, if any.
+
+ struct: IMAGE_BOUND_FORWARDER_REF structure
+ name: dll name
+ """
+
+
+class PE:
+ """A Portable Executable representation.
+
+ This class provides access to most of the information in a PE file.
+
+ It expects to be supplied the name of the file to load or PE data
+ to process and an optional argument 'fast_load' (False by default)
+ which controls whether to load all the directories information,
+ which can be quite time consuming.
+
+ pe = pefile.PE('module.dll')
+ pe = pefile.PE(name='module.dll')
+
+ would load 'module.dll' and process it. If the data would be already
+ available in a buffer the same could be achieved with:
+
+ pe = pefile.PE(data=module_dll_data)
+
+ The "fast_load" can be set to a default by setting its value in the
+ module itself by means,for instance, of a "pefile.fast_load = True".
+ That will make all the subsequent instances not to load the
+ whole PE structure. The "full_load" method can be used to parse
+ the missing data at a later stage.
+
+ Basic headers information will be available in the attributes:
+
+ DOS_HEADER
+ NT_HEADERS
+ FILE_HEADER
+ OPTIONAL_HEADER
+
+ All of them will contain among their attrbitues the members of the
+ corresponding structures as defined in WINNT.H
+
+ The raw data corresponding to the header (from the beginning of the
+ file up to the start of the first section) will be avaiable in the
+ instance's attribute 'header' as a string.
+
+ The sections will be available as a list in the 'sections' attribute.
+ Each entry will contain as attributes all the structure's members.
+
+ Directory entries will be available as attributes (if they exist):
+ (no other entries are processed at this point)
+
+ DIRECTORY_ENTRY_IMPORT (list of ImportDescData instances)
+ DIRECTORY_ENTRY_EXPORT (ExportDirData instance)
+ DIRECTORY_ENTRY_RESOURCE (ResourceDirData instance)
+ DIRECTORY_ENTRY_DEBUG (list of DebugData instances)
+ DIRECTORY_ENTRY_BASERELOC (list of BaseRelocationData instances)
+ DIRECTORY_ENTRY_TLS
+ DIRECTORY_ENTRY_BOUND_IMPORT (list of BoundImportData instances)
+
+ The following dictionary attributes provide ways of mapping different
+ constants. They will accept the numeric value and return the string
+ representation and the opposite, feed in the string and get the
+ numeric constant:
+
+ DIRECTORY_ENTRY
+ IMAGE_CHARACTERISTICS
+ SECTION_CHARACTERISTICS
+ DEBUG_TYPE
+ SUBSYSTEM_TYPE
+ MACHINE_TYPE
+ RELOCATION_TYPE
+ RESOURCE_TYPE
+ LANG
+ SUBLANG
+ """
+
+ #
+ # Format specifications for PE structures.
+ #
+
+ __IMAGE_DOS_HEADER_format__ = ('IMAGE_DOS_HEADER',
+ ('H,e_magic', 'H,e_cblp', 'H,e_cp',
+ 'H,e_crlc', 'H,e_cparhdr', 'H,e_minalloc',
+ 'H,e_maxalloc', 'H,e_ss', 'H,e_sp', 'H,e_csum',
+ 'H,e_ip', 'H,e_cs', 'H,e_lfarlc', 'H,e_ovno', '8s,e_res',
+ 'H,e_oemid', 'H,e_oeminfo', '20s,e_res2',
+ 'L,e_lfanew'))
+
+ __IMAGE_FILE_HEADER_format__ = ('IMAGE_FILE_HEADER',
+ ('H,Machine', 'H,NumberOfSections',
+ 'L,TimeDateStamp', 'L,PointerToSymbolTable',
+ 'L,NumberOfSymbols', 'H,SizeOfOptionalHeader',
+ 'H,Characteristics'))
+
+ __IMAGE_DATA_DIRECTORY_format__ = ('IMAGE_DATA_DIRECTORY',
+ ('L,VirtualAddress', 'L,Size'))
+
+
+ __IMAGE_OPTIONAL_HEADER_format__ = ('IMAGE_OPTIONAL_HEADER',
+ ('H,Magic', 'B,MajorLinkerVersion',
+ 'B,MinorLinkerVersion', 'L,SizeOfCode',
+ 'L,SizeOfInitializedData', 'L,SizeOfUninitializedData',
+ 'L,AddressOfEntryPoint', 'L,BaseOfCode', 'L,BaseOfData',
+ 'L,ImageBase', 'L,SectionAlignment', 'L,FileAlignment',
+ 'H,MajorOperatingSystemVersion', 'H,MinorOperatingSystemVersion',
+ 'H,MajorImageVersion', 'H,MinorImageVersion',
+ 'H,MajorSubsystemVersion', 'H,MinorSubsystemVersion',
+ 'L,Reserved1', 'L,SizeOfImage', 'L,SizeOfHeaders',
+ 'L,CheckSum', 'H,Subsystem', 'H,DllCharacteristics',
+ 'L,SizeOfStackReserve', 'L,SizeOfStackCommit',
+ 'L,SizeOfHeapReserve', 'L,SizeOfHeapCommit',
+ 'L,LoaderFlags', 'L,NumberOfRvaAndSizes' ))
+
+
+ __IMAGE_OPTIONAL_HEADER64_format__ = ('IMAGE_OPTIONAL_HEADER64',
+ ('H,Magic', 'B,MajorLinkerVersion',
+ 'B,MinorLinkerVersion', 'L,SizeOfCode',
+ 'L,SizeOfInitializedData', 'L,SizeOfUninitializedData',
+ 'L,AddressOfEntryPoint', 'L,BaseOfCode',
+ 'Q,ImageBase', 'L,SectionAlignment', 'L,FileAlignment',
+ 'H,MajorOperatingSystemVersion', 'H,MinorOperatingSystemVersion',
+ 'H,MajorImageVersion', 'H,MinorImageVersion',
+ 'H,MajorSubsystemVersion', 'H,MinorSubsystemVersion',
+ 'L,Reserved1', 'L,SizeOfImage', 'L,SizeOfHeaders',
+ 'L,CheckSum', 'H,Subsystem', 'H,DllCharacteristics',
+ 'Q,SizeOfStackReserve', 'Q,SizeOfStackCommit',
+ 'Q,SizeOfHeapReserve', 'Q,SizeOfHeapCommit',
+ 'L,LoaderFlags', 'L,NumberOfRvaAndSizes' ))
+
+
+ __IMAGE_NT_HEADERS_format__ = ('IMAGE_NT_HEADERS', ('L,Signature',))
+
+ __IMAGE_SECTION_HEADER_format__ = ('IMAGE_SECTION_HEADER',
+ ('8s,Name', 'L,Misc,Misc_PhysicalAddress,Misc_VirtualSize',
+ 'L,VirtualAddress', 'L,SizeOfRawData', 'L,PointerToRawData',
+ 'L,PointerToRelocations', 'L,PointerToLinenumbers',
+ 'H,NumberOfRelocations', 'H,NumberOfLinenumbers',
+ 'L,Characteristics'))
+
+ __IMAGE_DELAY_IMPORT_DESCRIPTOR_format__ = ('IMAGE_DELAY_IMPORT_DESCRIPTOR',
+ ('L,grAttrs', 'L,szName', 'L,phmod', 'L,pIAT', 'L,pINT',
+ 'L,pBoundIAT', 'L,pUnloadIAT', 'L,dwTimeStamp'))
+
+ __IMAGE_IMPORT_DESCRIPTOR_format__ = ('IMAGE_IMPORT_DESCRIPTOR',
+ ('L,OriginalFirstThunk,Characteristics',
+ 'L,TimeDateStamp', 'L,ForwarderChain', 'L,Name', 'L,FirstThunk'))
+
+ __IMAGE_EXPORT_DIRECTORY_format__ = ('IMAGE_EXPORT_DIRECTORY',
+ ('L,Characteristics',
+ 'L,TimeDateStamp', 'H,MajorVersion', 'H,MinorVersion', 'L,Name',
+ 'L,Base', 'L,NumberOfFunctions', 'L,NumberOfNames',
+ 'L,AddressOfFunctions', 'L,AddressOfNames', 'L,AddressOfNameOrdinals'))
+
+ __IMAGE_RESOURCE_DIRECTORY_format__ = ('IMAGE_RESOURCE_DIRECTORY',
+ ('L,Characteristics',
+ 'L,TimeDateStamp', 'H,MajorVersion', 'H,MinorVersion',
+ 'H,NumberOfNamedEntries', 'H,NumberOfIdEntries'))
+
+ __IMAGE_RESOURCE_DIRECTORY_ENTRY_format__ = ('IMAGE_RESOURCE_DIRECTORY_ENTRY',
+ ('L,Name',
+ 'L,OffsetToData'))
+
+ __IMAGE_RESOURCE_DATA_ENTRY_format__ = ('IMAGE_RESOURCE_DATA_ENTRY',
+ ('L,OffsetToData', 'L,Size', 'L,CodePage', 'L,Reserved'))
+
+ __VS_VERSIONINFO_format__ = ( 'VS_VERSIONINFO',
+ ('H,Length', 'H,ValueLength', 'H,Type' ))
+
+ __VS_FIXEDFILEINFO_format__ = ( 'VS_FIXEDFILEINFO',
+ ('L,Signature', 'L,StrucVersion', 'L,FileVersionMS', 'L,FileVersionLS',
+ 'L,ProductVersionMS', 'L,ProductVersionLS', 'L,FileFlagsMask', 'L,FileFlags',
+ 'L,FileOS', 'L,FileType', 'L,FileSubtype', 'L,FileDateMS', 'L,FileDateLS'))
+
+ __StringFileInfo_format__ = ( 'StringFileInfo',
+ ('H,Length', 'H,ValueLength', 'H,Type' ))
+
+ __StringTable_format__ = ( 'StringTable',
+ ('H,Length', 'H,ValueLength', 'H,Type' ))
+
+ __String_format__ = ( 'String',
+ ('H,Length', 'H,ValueLength', 'H,Type' ))
+
+ __Var_format__ = ( 'Var', ('H,Length', 'H,ValueLength', 'H,Type' ))
+
+ __IMAGE_THUNK_DATA_format__ = ('IMAGE_THUNK_DATA',
+ ('L,ForwarderString,Function,Ordinal,AddressOfData',))
+
+ __IMAGE_THUNK_DATA64_format__ = ('IMAGE_THUNK_DATA',
+ ('Q,ForwarderString,Function,Ordinal,AddressOfData',))
+
+ __IMAGE_DEBUG_DIRECTORY_format__ = ('IMAGE_DEBUG_DIRECTORY',
+ ('L,Characteristics', 'L,TimeDateStamp', 'H,MajorVersion',
+ 'H,MinorVersion', 'L,Type', 'L,SizeOfData', 'L,AddressOfRawData',
+ 'L,PointerToRawData'))
+
+ __IMAGE_BASE_RELOCATION_format__ = ('IMAGE_BASE_RELOCATION',
+ ('L,VirtualAddress', 'L,SizeOfBlock') )
+
+ __IMAGE_TLS_DIRECTORY_format__ = ('IMAGE_TLS_DIRECTORY',
+ ('L,StartAddressOfRawData', 'L,EndAddressOfRawData',
+ 'L,AddressOfIndex', 'L,AddressOfCallBacks',
+ 'L,SizeOfZeroFill', 'L,Characteristics' ) )
+
+ __IMAGE_TLS_DIRECTORY64_format__ = ('IMAGE_TLS_DIRECTORY',
+ ('Q,StartAddressOfRawData', 'Q,EndAddressOfRawData',
+ 'Q,AddressOfIndex', 'Q,AddressOfCallBacks',
+ 'L,SizeOfZeroFill', 'L,Characteristics' ) )
+
+ __IMAGE_BOUND_IMPORT_DESCRIPTOR_format__ = ('IMAGE_BOUND_IMPORT_DESCRIPTOR',
+ ('L,TimeDateStamp', 'H,OffsetModuleName', 'H,NumberOfModuleForwarderRefs'))
+
+ __IMAGE_BOUND_FORWARDER_REF_format__ = ('IMAGE_BOUND_FORWARDER_REF',
+ ('L,TimeDateStamp', 'H,OffsetModuleName', 'H,Reserved') )
+
+
+ def __init__(self, name=None, data=None, fast_load=None):
+
+ self.sections = []
+
+ self.__warnings = []
+
+ self.PE_TYPE = None
+
+ if not name and not data:
+ return
+
+ # This list will keep track of all the structures created.
+ # That will allow for an easy iteration through the list
+ # in order to save the modifications made
+ self.__structures__ = []
+
+ if not fast_load:
+ fast_load = globals()['fast_load']
+ self.__parse__(name, data, fast_load)
+
+
+
+ def __unpack_data__(self, format, data, file_offset):
+ """Apply structure format to raw data.
+
+ Returns and unpacked structure object if successful, None otherwise.
+ """
+
+ structure = Structure(format, file_offset=file_offset)
+ #if len(data) < structure.sizeof():
+ # return None
+
+ try:
+ structure.__unpack__(data)
+ except PEFormatError, err:
+ self.__warnings.append(
+ 'Corrupt header "%s" at file offset %d. Exception: %s' % (
+ format[0], file_offset, str(err)) )
+ return None
+
+ self.__structures__.append(structure)
+
+ return structure
+
+
+
+ def __parse__(self, fname, data, fast_load):
+ """Parse a Portable Executable file.
+
+ Loads a PE file, parsing all its structures and making them available
+ through the instance's attributes.
+ """
+
+ if fname:
+ fd = file(fname, 'rb')
+ self.__data__ = fd.read()
+ fd.close()
+ elif data:
+ self.__data__ = data
+
+
+ self.DOS_HEADER = self.__unpack_data__(
+ self.__IMAGE_DOS_HEADER_format__,
+ self.__data__, file_offset=0)
+
+ if not self.DOS_HEADER or self.DOS_HEADER.e_magic != IMAGE_DOS_SIGNATURE:
+ raise PEFormatError('DOS Header magic not found.')
+
+ # OC Patch:
+ # Check for sane value in e_lfanew
+ #
+ if self.DOS_HEADER.e_lfanew > len(self.__data__):
+ raise PEFormatError('Invalid e_lfanew value, probably not a PE file')
+
+ nt_headers_offset = self.DOS_HEADER.e_lfanew
+
+ self.NT_HEADERS = self.__unpack_data__(
+ self.__IMAGE_NT_HEADERS_format__,
+ self.__data__[nt_headers_offset:],
+ file_offset = nt_headers_offset)
+
+ # We better check the signature right here, before the file screws
+ # around with sections:
+ # OC Patch:
+ # Some malware will cause the Signature value to not exist at all
+ if not self.NT_HEADERS or not self.NT_HEADERS.Signature:
+ raise PEFormatError('NT Headers not found.')
+
+ if self.NT_HEADERS.Signature != IMAGE_NT_SIGNATURE:
+ raise PEFormatError('Invalid NT Headers signature.')
+
+ self.FILE_HEADER = self.__unpack_data__(
+ self.__IMAGE_FILE_HEADER_format__,
+ self.__data__[nt_headers_offset+4:],
+ file_offset = nt_headers_offset+4)
+ image_flags = self.retrieve_flags(IMAGE_CHARACTERISTICS, 'IMAGE_FILE_')
+
+ if not self.FILE_HEADER:
+ raise PEFormatError('File Header missing')
+
+ # Set the image's flags according the the Characteristics member
+ self.set_flags(self.FILE_HEADER, self.FILE_HEADER.Characteristics, image_flags)
+
+ optional_header_offset = \
+ nt_headers_offset+4+self.FILE_HEADER.sizeof()
+
+ # Note: location of sections can be controlled from PE header:
+ sections_offset = optional_header_offset + self.FILE_HEADER.SizeOfOptionalHeader
+
+ self.OPTIONAL_HEADER = self.__unpack_data__(
+ self.__IMAGE_OPTIONAL_HEADER_format__,
+ self.__data__[optional_header_offset:],
+ file_offset = optional_header_offset)
+
+ # According to solardesigner's findings for his
+ # Tiny PE project, the optional header does not
+ # need fields beyond "Subsystem" in order to be
+ # loadable by the Windows loader (given that zeroes
+ # are acceptable values and the header is loaded
+ # in a zeroed memory page)
+ # If trying to parse a full Optional Header fails
+ # we try to parse it again with some 0 padding
+ #
+ MINIMUM_VALID_OPTIONAL_HEADER_RAW_SIZE = 69
+
+ if ( self.OPTIONAL_HEADER is None and
+ len(self.__data__[optional_header_offset:])
+ >= MINIMUM_VALID_OPTIONAL_HEADER_RAW_SIZE ):
+
+ # Add enough zeroes to make up for the unused fields
+ #
+ padding_length = 128
+
+ # Create padding
+ #
+ padded_data = self.__data__[optional_header_offset:] + (
+ '\0' * padding_length)
+
+ self.OPTIONAL_HEADER = self.__unpack_data__(
+ self.__IMAGE_OPTIONAL_HEADER_format__,
+ padded_data,
+ file_offset = optional_header_offset)
+
+
+ # Check the Magic in the OPTIONAL_HEADER and set the PE file
+ # type accordingly
+ #
+ if self.OPTIONAL_HEADER is not None:
+
+ if self.OPTIONAL_HEADER.Magic == OPTIONAL_HEADER_MAGIC_PE:
+
+ self.PE_TYPE = OPTIONAL_HEADER_MAGIC_PE
+
+ elif self.OPTIONAL_HEADER.Magic == OPTIONAL_HEADER_MAGIC_PE_PLUS:
+
+ self.PE_TYPE = OPTIONAL_HEADER_MAGIC_PE_PLUS
+
+ self.OPTIONAL_HEADER = self.__unpack_data__(
+ self.__IMAGE_OPTIONAL_HEADER64_format__,
+ self.__data__[optional_header_offset:],
+ file_offset = optional_header_offset)
+
+ # Again, as explained above, we try to parse
+ # a reduced form of the Optional Header which
+ # is still valid despite not including all
+ # structure members
+ #
+ MINIMUM_VALID_OPTIONAL_HEADER_RAW_SIZE = 69+4
+
+ if ( self.OPTIONAL_HEADER is None and
+ len(self.__data__[optional_header_offset:])
+ >= MINIMUM_VALID_OPTIONAL_HEADER_RAW_SIZE ):
+
+ padding_length = 128
+ padded_data = self.__data__[optional_header_offset:] + (
+ '\0' * padding_length)
+ self.OPTIONAL_HEADER = self.__unpack_data__(
+ self.__IMAGE_OPTIONAL_HEADER64_format__,
+ padded_data,
+ file_offset = optional_header_offset)
+
+
+ if not self.FILE_HEADER:
+ raise PEFormatError('File Header missing')
+
+
+ # OC Patch:
+ # Die gracefully if there is no OPTIONAL_HEADER field
+ # 975440f5ad5e2e4a92c4d9a5f22f75c1
+ if self.PE_TYPE is None or self.OPTIONAL_HEADER is None:
+ raise PEFormatError("No Optional Header found, invalid PE32 or PE32+ file")
+
+ dll_characteristics_flags = self.retrieve_flags(DLL_CHARACTERISTICS, 'IMAGE_DLL_CHARACTERISTICS_')
+
+ # Set the Dll Characteristics flags according the the DllCharacteristics member
+ self.set_flags(
+ self.OPTIONAL_HEADER,
+ self.OPTIONAL_HEADER.DllCharacteristics,
+ dll_characteristics_flags)
+
+
+ self.OPTIONAL_HEADER.DATA_DIRECTORY = []
+ #offset = (optional_header_offset + self.FILE_HEADER.SizeOfOptionalHeader)
+ offset = (optional_header_offset + self.OPTIONAL_HEADER.sizeof())
+
+
+ self.NT_HEADERS.FILE_HEADER = self.FILE_HEADER
+ self.NT_HEADERS.OPTIONAL_HEADER = self.OPTIONAL_HEADER
+
+
+ # The NumberOfRvaAndSizes is sanitized to stay within
+ # reasonable limits so can be casted to an int
+ #
+ if self.OPTIONAL_HEADER.NumberOfRvaAndSizes > 0x10:
+ self.__warnings.append(
+ 'Suspicious NumberOfRvaAndSizes in the Optional Header. ' +
+ 'Normal values are never larger than 0x10, the value is: 0x%x' %
+ self.OPTIONAL_HEADER.NumberOfRvaAndSizes )
+
+ for i in xrange(int(0x7fffffffL & self.OPTIONAL_HEADER.NumberOfRvaAndSizes)):
+
+ if len(self.__data__[offset:]) == 0:
+ break
+
+ if len(self.__data__[offset:]) < 8:
+ data = self.__data__[offset:]+'\0'*8
+ else:
+ data = self.__data__[offset:]
+
+ dir_entry = self.__unpack_data__(
+ self.__IMAGE_DATA_DIRECTORY_format__,
+ data,
+ file_offset = offset)
+
+ if dir_entry is None:
+ break
+
+ # Would fail if missing an entry
+ # 1d4937b2fa4d84ad1bce0309857e70ca offending sample
+ try:
+ dir_entry.name = DIRECTORY_ENTRY[i]
+ except (KeyError, AttributeError):
+ break
+
+ offset += dir_entry.sizeof()
+
+ self.OPTIONAL_HEADER.DATA_DIRECTORY.append(dir_entry)
+
+ # If the offset goes outside the optional header,
+ # the loop is broken, regardless of how many directories
+ # NumberOfRvaAndSizes says there are
+ #
+ # We assume a normally sized optional header, hence that we do
+ # a sizeof() instead of reading SizeOfOptionalHeader.
+ # Then we add a default number of drectories times their size,
+ # if we go beyond that, we assume the number of directories
+ # is wrong and stop processing
+ if offset >= (optional_header_offset +
+ self.OPTIONAL_HEADER.sizeof() + 8*16) :
+
+ break
+
+
+ offset = self.parse_sections(sections_offset)
+
+ # OC Patch:
+ # There could be a problem if there are no raw data sections
+ # greater than 0
+ # fc91013eb72529da005110a3403541b6 example
+ # Should this throw an exception in the minimum header offset
+ # can't be found?
+ #
+ rawDataPointers = [
+ s.PointerToRawData for s in self.sections if s.PointerToRawData>0]
+
+ if len(rawDataPointers) > 0:
+ lowest_section_offset = min(rawDataPointers)
+ else:
+ lowest_section_offset = None
+
+ if not lowest_section_offset or lowest_section_offset<offset:
+ self.header = self.__data__[:offset]
+ else:
+ self.header = self.__data__[:lowest_section_offset]
+
+
+ # Check whether the entry point lies within a section
+ #
+ if self.get_section_by_rva(self.OPTIONAL_HEADER.AddressOfEntryPoint) is not None:
+
+ # Check whether the entry point lies within the file
+ #
+ ep_offset = self.get_offset_from_rva(self.OPTIONAL_HEADER.AddressOfEntryPoint)
+ if ep_offset > len(self.__data__):
+
+ self.__warnings.append(
+ 'Possibly corrupt file. AddressOfEntryPoint lies outside the file. ' +
+ 'AddressOfEntryPoint: 0x%x' %
+ self.OPTIONAL_HEADER.AddressOfEntryPoint )
+
+ else:
+
+ self.__warnings.append(
+ 'AddressOfEntryPoint lies outside the sections\' boundaries. ' +
+ 'AddressOfEntryPoint: 0x%x' %
+ self.OPTIONAL_HEADER.AddressOfEntryPoint )
+
+
+ if not fast_load:
+ self.parse_data_directories()
+
+
+ def get_warnings(self):
+ """Return the list of warnings.
+
+ Non-critical problems found when parsing the PE file are
+ appended to a list of warnings. This method returns the
+ full list.
+ """
+
+ return self.__warnings
+
+
+ def show_warnings(self):
+ """Print the list of warnings.
+
+ Non-critical problems found when parsing the PE file are
+ appended to a list of warnings. This method prints the
+ full list to standard output.
+ """
+
+ for warning in self.__warnings:
+ print '>', warning
+
+
+ def full_load(self):
+ """Process the data directories.
+
+ This mathod will load the data directories which might not have
+ been loaded if the "fast_load" option was used.
+ """
+
+ self.parse_data_directories()
+
+
+ def write(self, filename=None):
+ """Write the PE file.
+
+ This function will process all headers and components
+ of the PE file and include all changes made (by just
+ assigning to attributes in the PE objects) and write
+ the changes back to a file whose name is provided as
+ an argument. The filename is optional.
+ The data to be written to the file will be returned
+ as a 'str' object.
+ """
+
+ file_data = list(self.__data__)
+ for struct in self.__structures__:
+
+ struct_data = list(struct.__pack__())
+ offset = struct.get_file_offset()
+
+ file_data[offset:offset+len(struct_data)] = struct_data
+
+ if hasattr(self, 'VS_VERSIONINFO'):
+ if hasattr(self, 'FileInfo'):
+ for entry in self.FileInfo:
+ if hasattr(entry, 'StringTable'):
+ for st_entry in entry.StringTable:
+ for key, entry in st_entry.entries.items():
+
+ offsets = st_entry.entries_offsets[key]
+ lengths = st_entry.entries_lengths[key]
+
+ if len( entry ) > lengths[1]:
+
+ uc = zip(
+ list(entry[:lengths[1]]), ['\0'] * lengths[1] )
+ l = list()
+ map(l.extend, uc)
+
+ file_data[
+ offsets[1] : offsets[1] + lengths[1]*2 ] = l
+
+ else:
+
+ uc = zip(
+ list(entry), ['\0'] * len(entry) )
+ l = list()
+ map(l.extend, uc)
+
+ file_data[
+ offsets[1] : offsets[1] + len(entry)*2 ] = l
+
+ remainder = lengths[1] - len(entry)
+ file_data[
+ offsets[1] + len(entry)*2 :
+ offsets[1] + lengths[1]*2 ] = [
+ u'\0' ] * remainder*2
+
+ new_file_data = ''.join( [ chr(ord(c)) for c in file_data ] )
+
+ if filename:
+ f = file(filename, 'wb+')
+ f.write(new_file_data)
+ f.close()
+
+ return new_file_data
+
+
+
+ def parse_sections(self, offset):
+ """Fetch the PE file sections.
+
+ The sections will be readily available in the "sections" attribute.
+ Its attributes will contain all the section information plus "data"
+ a buffer containing the section's data.
+
+ The "Characteristics" member will be processed and attributes
+ representing the section characteristics (with the 'IMAGE_SCN_'
+ string trimmed from the constant's names) will be added to the
+ section instance.
+
+ Refer to the SectionStructure class for additional info.
+ """
+
+ self.sections = []
+
+ for i in xrange(self.FILE_HEADER.NumberOfSections):
+ section = SectionStructure(self.__IMAGE_SECTION_HEADER_format__)
+ if not section:
+ break
+ section_offset = offset + section.sizeof() * i
+ section.set_file_offset(section_offset)
+ section.__unpack__(self.__data__[section_offset:])
+ self.__structures__.append(section)
+
+ if section.SizeOfRawData > len(self.__data__):
+ self.__warnings.append(
+ ('Error parsing section %d. ' % i) +
+ 'SizeOfRawData is larger than file.')
+
+ if section.PointerToRawData > len(self.__data__):
+ self.__warnings.append(
+ ('Error parsing section %d. ' % i) +
+ 'PointerToRawData points beyond the end of the file.')
+
+ if section.Misc_VirtualSize > 0x10000000:
+ self.__warnings.append(
+ ('Suspicious value found parsing section %d. ' % i) +
+ 'VirtualSize is extremely large > 256MiB.')
+
+ if section.VirtualAddress > 0x10000000:
+ self.__warnings.append(
+ ('Suspicious value found parsing section %d. ' % i) +
+ 'VirtualAddress is beyond 0x10000000.')
+
+ #
+ # Some packer used a non-aligned PointerToRawData in the sections,
+ # which causes several common tools not to load the section data
+ # properly as they blindly read from the indicated offset.
+ # It seems that Windows will round the offset down to the largest
+ # offset multiple of FileAlignment which is smaller than
+ # PointerToRawData. The following code will do the same.
+ #
+
+ #alignment = self.OPTIONAL_HEADER.FileAlignment
+ section_data_start = section.PointerToRawData
+
+ if ( self.OPTIONAL_HEADER.FileAlignment != 0 and
+ (section.PointerToRawData % self.OPTIONAL_HEADER.FileAlignment) != 0):
+ self.__warnings.append(
+ ('Error parsing section %d. ' % i) +
+ 'Suspicious value for FileAlignment in the Optional Header. ' +
+ 'Normally the PointerToRawData entry of the sections\' structures ' +
+ 'is a multiple of FileAlignment, this might imply the file ' +
+ 'is trying to confuse tools which parse this incorrectly')
+
+ section_data_end = section_data_start+section.SizeOfRawData
+ section.set_data(self.__data__[section_data_start:section_data_end])
+
+ section_flags = self.retrieve_flags(SECTION_CHARACTERISTICS, 'IMAGE_SCN_')
+
+ # Set the section's flags according the the Characteristics member
+ self.set_flags(section, section.Characteristics, section_flags)
+
+ if ( section.__dict__.get('IMAGE_SCN_MEM_WRITE', False) and
+ section.__dict__.get('IMAGE_SCN_MEM_EXECUTE', False) ):
+
+ self.__warnings.append(
+ ('Suspicious flags set for section %d. ' % i) +
+ 'Both IMAGE_SCN_MEM_WRITE and IMAGE_SCN_MEM_EXECUTE are set.' +
+ 'This might indicate a packed executable.')
+
+ self.sections.append(section)
+
+ if self.FILE_HEADER.NumberOfSections > 0 and self.sections:
+ return offset + self.sections[0].sizeof()*self.FILE_HEADER.NumberOfSections
+ else:
+ return offset
+
+
+ def retrieve_flags(self, flag_dict, flag_filter):
+ """Read the flags from a dictionary and return them in a usable form.
+
+ Will return a list of (flag, value) for all flags in "flag_dict"
+ matching the filter "flag_filter".
+ """
+
+ return [(f[0], f[1]) for f in flag_dict.items() if
+ isinstance(f[0], str) and f[0].startswith(flag_filter)]
+
+
+ def set_flags(self, obj, flag_field, flags):
+ """Will process the flags and set attributes in the object accordingly.
+
+ The object "obj" will gain attritutes named after the flags provided in
+ "flags" and valued True/False, matching the results of applyin each
+ flag value from "flags" to flag_field.
+ """
+
+ for flag in flags:
+ if flag[1] & flag_field:
+ setattr(obj, flag[0], True)
+ else:
+ setattr(obj, flag[0], False)
+
+
+
+ def parse_data_directories(self):
+ """Parse and process the PE file's data directories."""
+
+ directory_parsing = (
+ ('IMAGE_DIRECTORY_ENTRY_IMPORT', self.parse_import_directory),
+ ('IMAGE_DIRECTORY_ENTRY_EXPORT', self.parse_export_directory),
+ ('IMAGE_DIRECTORY_ENTRY_RESOURCE', self.parse_resources_directory),
+ ('IMAGE_DIRECTORY_ENTRY_DEBUG', self.parse_debug_directory),
+ ('IMAGE_DIRECTORY_ENTRY_BASERELOC', self.parse_relocations_directory),
+ ('IMAGE_DIRECTORY_ENTRY_TLS', self.parse_directory_tls),
+ ('IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT', self.parse_delay_import_directory),
+ ('IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT', self.parse_directory_bound_imports) )
+
+ for entry in directory_parsing:
+ # OC Patch:
+ #
+ try:
+ dir_entry = self.OPTIONAL_HEADER.DATA_DIRECTORY[
+ DIRECTORY_ENTRY[entry[0]]]
+ except IndexError:
+ break
+ if dir_entry.VirtualAddress:
+ value = entry[1](dir_entry.VirtualAddress, dir_entry.Size)
+ if value:
+ setattr(self, entry[0][6:], value)
+
+
+ def parse_directory_bound_imports(self, rva, size):
+ """"""
+
+ bnd_descr = Structure(self.__IMAGE_BOUND_IMPORT_DESCRIPTOR_format__)
+ bnd_descr_size = bnd_descr.sizeof()
+ start = rva
+
+ bound_imports = []
+ while True:
+
+ bnd_descr = self.__unpack_data__(
+ self.__IMAGE_BOUND_IMPORT_DESCRIPTOR_format__,
+ self.__data__[rva:rva+bnd_descr_size],
+ file_offset = rva)
+ if bnd_descr is None:
+ # If can't parse directory then silently return.
+ # This directory does not necesarily have to be valid to
+ # still have a valid PE file
+
+ self.__warnings.append(
+ 'The Bound Imports directory exists but can\'t be parsed.')
+
+ return
+
+ if bnd_descr.all_zeroes():
+ break
+
+ rva += bnd_descr.sizeof()
+
+ forwarder_refs = []
+ for idx in xrange(bnd_descr.NumberOfModuleForwarderRefs):
+ # Both structures IMAGE_BOUND_IMPORT_DESCRIPTOR and
+ # IMAGE_BOUND_FORWARDER_REF have the same size.
+ bnd_frwd_ref = self.__unpack_data__(
+ self.__IMAGE_BOUND_FORWARDER_REF_format__,
+ self.__data__[rva:rva+bnd_descr_size],
+ file_offset = rva)
+ # OC Patch:
+ if not bnd_frwd_ref:
+ raise PEFormatError(
+ "IMAGE_BOUND_FORWARDER_REF cannot be read")
+ rva += bnd_frwd_ref.sizeof()
+
+ name_str = self.get_string_from_data(
+ start+bnd_frwd_ref.OffsetModuleName, self.__data__)
+
+ if not name_str:
+ break
+ forwarder_refs.append(BoundImportRefData(
+ struct = bnd_frwd_ref,
+ name = name_str))
+
+ name_str = self.get_string_from_data(
+ start+bnd_descr.OffsetModuleName, self.__data__)
+
+ if not name_str:
+ break
+ bound_imports.append(
+ BoundImportDescData(
+ struct = bnd_descr,
+ name = name_str,
+ entries = forwarder_refs))
+
+ return bound_imports
+
+
+ def parse_directory_tls(self, rva, size):
+ """"""
+
+ if self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE:
+ format = self.__IMAGE_TLS_DIRECTORY_format__
+
+ elif self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE_PLUS:
+ format = self.__IMAGE_TLS_DIRECTORY64_format__
+
+ tls_struct = self.__unpack_data__(
+ format,
+ self.get_data(rva),
+ file_offset = self.get_offset_from_rva(rva))
+
+ if not tls_struct:
+ return None
+
+ return TlsData( struct = tls_struct )
+
+
+ def parse_relocations_directory(self, rva, size):
+ """"""
+
+ rlc = Structure(self.__IMAGE_BASE_RELOCATION_format__)
+ rlc_size = rlc.sizeof()
+ end = rva+size
+
+ relocations = []
+ while rva<end:
+
+ # OC Patch:
+ # Malware that has bad rva entries will cause an error.
+ # Just continue on after an exception
+ #
+ try:
+ rlc = self.__unpack_data__(
+ self.__IMAGE_BASE_RELOCATION_format__,
+ self.get_data(rva, rlc_size),
+ file_offset = self.get_offset_from_rva(rva) )
+ except PEFormatError:
+ self.__warnings.append(
+ 'Invalid relocation information. Can\'t read ' +
+ 'data at RVA: 0x%x' % rva)
+ rlc = None
+
+ if not rlc:
+ break
+
+ reloc_entries = self.parse_relocations(
+ rva+rlc_size, rlc.VirtualAddress, rlc.SizeOfBlock-rlc_size)
+
+ relocations.append(
+ BaseRelocationData(
+ struct = rlc,
+ entries = reloc_entries))
+
+ if not rlc.SizeOfBlock:
+ break
+ rva += rlc.SizeOfBlock
+
+ return relocations
+
+
+ def parse_relocations(self, data_rva, rva, size):
+ """"""
+
+ data = self.get_data(data_rva, size)
+
+ entries = []
+ for idx in xrange(len(data)/2):
+ word = struct.unpack('<H', data[idx*2:(idx+1)*2])[0]
+ reloc_type = (word>>12)
+ reloc_offset = (word&0x0fff)
+ entries.append(
+ RelocationData(
+ type = reloc_type,
+ rva = reloc_offset+rva))
+
+ return entries
+
+
+ def parse_debug_directory(self, rva, size):
+ """"""
+
+ dbg = Structure(self.__IMAGE_DEBUG_DIRECTORY_format__)
+ dbg_size = dbg.sizeof()
+
+ debug = []
+ for idx in xrange(size/dbg_size):
+ try:
+ data = self.get_data(rva+dbg_size*idx, dbg_size)
+ except PEFormatError, e:
+ self.__warnings.append(
+ 'Invalid debug information. Can\'t read ' +
+ 'data at RVA: 0x%x' % rva)
+ return None
+
+ dbg = self.__unpack_data__(
+ self.__IMAGE_DEBUG_DIRECTORY_format__,
+ data, file_offset = self.get_offset_from_rva(rva+dbg_size*idx))
+
+ if not dbg:
+ return None
+
+ debug.append(
+ DebugData(
+ struct = dbg))
+
+ return debug
+
+
+ def parse_resources_directory(self, rva, size=0, base_rva = None, level = 0):
+ """Parse the resources directory.
+
+ Given the rva of the resources directory, it will process all
+ its entries.
+
+ The root will have the corresponding member of its structure,
+ IMAGE_RESOURCE_DIRECTORY plus 'entries', a list of all the
+ entries in the directory.
+
+ Those entries will have, correspondingly, all the structure's
+ members (IMAGE_RESOURCE_DIRECTORY_ENTRY) and an additional one,
+ "directory", pointing to the IMAGE_RESOURCE_DIRECTORY structure
+ representing upper layers of the tree. This one will also have
+ an 'entries' attribute, pointing to the 3rd, and last, level.
+ Another directory with more entries. Those last entries will
+ have a new atribute (both 'leaf' or 'data_entry' can be used to
+ access it). This structure finally points to the resource data.
+ All the members of this structure, IMAGE_RESOURCE_DATA_ENTRY,
+ are available as its attributes.
+ """
+
+ # OC Patch:
+ original_rva = rva
+
+ if base_rva is None:
+ base_rva = rva
+
+ resources_section = self.get_section_by_rva(rva)
+
+ try:
+ # If the RVA is invalid all would blow up. Some EXEs seem to be
+ # specially nasty and have an invalid RVA.
+ data = self.get_data(rva)
+ except PEFormatError, e:
+ self.__warnings.append(
+ 'Invalid resources directory. Can\'t read ' +
+ 'directory data at RVA: 0x%x' % rva)
+ return None
+
+ # Get the resource directory structure, that is, the header
+ # of the table preceding the actual entries
+ #
+ resource_dir = self.__unpack_data__(
+ self.__IMAGE_RESOURCE_DIRECTORY_format__, data,
+ file_offset = self.get_offset_from_rva(rva) )
+ if resource_dir is None:
+ # If can't parse resources directory then silently return.
+ # This directory does not necesarily have to be valid to
+ # still have a valid PE file
+ self.__warnings.append(
+ 'Invalid resources directory. Can\'t parse ' +
+ 'directory data at RVA: 0x%x' % rva)
+ return None
+
+ dir_entries = []
+
+ # Advance the rva to the positon immediately following the directory
+ # table header and pointing to the first entry in the table
+ #
+ rva += resource_dir.sizeof()
+
+ number_of_entries = (
+ resource_dir.NumberOfNamedEntries +
+ resource_dir.NumberOfIdEntries )
+
+ strings_to_postprocess = list()
+
+ for idx in xrange(number_of_entries):
+
+ res = self.parse_resource_entry(rva)
+ if res is None:
+ self.__warnings.append(
+ 'Error parsing the resources directory, ' +
+ 'Entry %d is invalid, RVA = 0x%x. ' %
+ (idx, rva) )
+ break
+
+
+ entry_name = None
+ entry_id = None
+
+ # If all named entries have been processed, only Id ones
+ # remain
+
+ if idx >= resource_dir.NumberOfNamedEntries:
+ entry_id = res.Name
+ else:
+ ustr_offset = base_rva+res.NameOffset
+ try:
+ #entry_name = self.get_string_u_at_rva(ustr_offset, max_length=16)
+ entry_name = UnicodeStringWrapperPostProcessor(self, ustr_offset)
+ strings_to_postprocess.append(entry_name)
+
+ except PEFormatError, excp:
+ self.__warnings.append(
+ 'Error parsing the resources directory, ' +
+ 'attempting to read entry name. ' +
+ 'Can\'t read unicode string at offset 0x%x' %
+ (ustr_offset) )
+
+
+ if res.DataIsDirectory:
+ # OC Patch:
+ #
+ # One trick malware can do is to recursively reference
+ # the next directory. This causes hilarity to ensue when
+ # trying to parse everything correctly.
+ # If the original RVA given to this function is equal to
+ # the next one to parse, we assume that it's a trick.
+ # Instead of raising a PEFormatError this would skip some
+ # reasonable data so we just break.
+ #
+ # 9ee4d0a0caf095314fd7041a3e4404dc is the offending sample
+ if original_rva == (base_rva + res.OffsetToDirectory):
+
+ break
+
+ else:
+ entry_directory = self.parse_resources_directory(
+ base_rva+res.OffsetToDirectory,
+ base_rva=base_rva, level = level+1)
+
+ if not entry_directory:
+ break
+ dir_entries.append(
+ ResourceDirEntryData(
+ struct = res,
+ name = entry_name,
+ id = entry_id,
+ directory = entry_directory))
+
+ else:
+ struct = self.parse_resource_data_entry(
+ base_rva + res.OffsetToDirectory)
+
+ if struct:
+ entry_data = ResourceDataEntryData(
+ struct = struct,
+ lang = res.Name & 0xff,
+ sublang = (res.Name>>8) & 0xff)
+
+ dir_entries.append(
+ ResourceDirEntryData(
+ struct = res,
+ name = entry_name,
+ id = entry_id,
+ data = entry_data))
+
+ else:
+ break
+
+
+
+ # Check if this entry contains version information
+ #
+ if level == 0 and res.Id == RESOURCE_TYPE['RT_VERSION']:
+ if len(dir_entries)>0:
+ last_entry = dir_entries[-1]
+
+ rt_version_struct = None
+ try:
+ rt_version_struct = last_entry.directory.entries[0].directory.entries[0].data.struct
+ except:
+ # Maybe a malformed directory structure...?
+ # Lets ignore it
+ pass
+
+ if rt_version_struct is not None:
+ self.parse_version_information(rt_version_struct)
+
+ rva += res.sizeof()
+
+
+ string_rvas = [s.get_rva() for s in strings_to_postprocess]
+ string_rvas.sort()
+
+ for idx, s in enumerate(strings_to_postprocess):
+ s.render_pascal_16()
+
+
+ resource_directory_data = ResourceDirData(
+ struct = resource_dir,
+ entries = dir_entries)
+
+ return resource_directory_data
+
+
+ def parse_resource_data_entry(self, rva):
+ """Parse a data entry from the resources directory."""
+
+ try:
+ # If the RVA is invalid all would blow up. Some EXEs seem to be
+ # specially nasty and have an invalid RVA.
+ data = self.get_data(rva)
+ except PEFormatError, excp:
+ self.__warnings.append(
+ 'Error parsing a resource directory data entry, ' +
+ 'the RVA is invalid: 0x%x' % ( rva ) )
+ return None
+
+ data_entry = self.__unpack_data__(
+ self.__IMAGE_RESOURCE_DATA_ENTRY_format__, data,
+ file_offset = self.get_offset_from_rva(rva) )
+
+ return data_entry
+
+
+ def parse_resource_entry(self, rva):
+ """Parse a directory entry from the resources directory."""
+
+ resource = self.__unpack_data__(
+ self.__IMAGE_RESOURCE_DIRECTORY_ENTRY_format__, self.get_data(rva),
+ file_offset = self.get_offset_from_rva(rva) )
+
+ if resource is None:
+ return None
+
+ #resource.NameIsString = (resource.Name & 0x80000000L) >> 31
+ resource.NameOffset = resource.Name & 0x7FFFFFFFL
+
+ resource.__pad = resource.Name & 0xFFFF0000L
+ resource.Id = resource.Name & 0x0000FFFFL
+
+ resource.DataIsDirectory = (resource.OffsetToData & 0x80000000L) >> 31
+ resource.OffsetToDirectory = resource.OffsetToData & 0x7FFFFFFFL
+
+ return resource
+
+
+ def parse_version_information(self, version_struct):
+ """Parse version information structure.
+
+ The date will be made available in three attributes of the PE object.
+
+ VS_VERSIONINFO will contain the first three fields of the main structure:
+ 'Length', 'ValueLength', and 'Type'
+
+ VS_FIXEDFILEINFO will hold the rest of the fields, accessible as sub-attributes:
+ 'Signature', 'StrucVersion', 'FileVersionMS', 'FileVersionLS',
+ 'ProductVersionMS', 'ProductVersionLS', 'FileFlagsMask', 'FileFlags',
+ 'FileOS', 'FileType', 'FileSubtype', 'FileDateMS', 'FileDateLS'
+
+ FileInfo is a list of all StringFileInfo and VarFileInfo structures.
+
+ StringFileInfo structures will have a list as an attribute named 'StringTable'
+ containing all the StringTable structures. Each of those structures contains a
+ dictionary 'entries' with all the key/value version information string pairs.
+
+ VarFileInfo structures will have a list as an attribute named 'Var' containing
+ all Var structures. Each Var structure will have a dictionary as an attribute
+ named 'entry' which will contain the name and value of the Var.
+ """
+
+
+ # Retrieve the data for the version info resource
+ #
+ start_offset = self.get_offset_from_rva( version_struct.OffsetToData )
+ raw_data = self.__data__[ start_offset : start_offset+version_struct.Size ]
+
+
+ # Map the main structure and the subsequent string
+ #
+ versioninfo_struct = self.__unpack_data__(
+ self.__VS_VERSIONINFO_format__, raw_data,
+ file_offset = start_offset )
+
+ if versioninfo_struct is None:
+ return
+
+ ustr_offset = version_struct.OffsetToData + versioninfo_struct.sizeof()
+ try:
+ versioninfo_string = self.get_string_u_at_rva( ustr_offset )
+ except PEFormatError, excp:
+ self.__warnings.append(
+ 'Error parsing the version information, ' +
+ 'attempting to read VS_VERSION_INFO string. Can\'t ' +
+ 'read unicode string at offset 0x%x' % (
+ ustr_offset ) )
+
+ versioninfo_string = None
+
+ # If the structure does not contain the expected name, it's assumed to be invalid
+ #
+ if versioninfo_string != u'VS_VERSION_INFO':
+
+ self.__warnings.append('Invalid VS_VERSION_INFO block')
+ return
+
+
+ # Set the PE object's VS_VERSIONINFO to this one
+ #
+ self.VS_VERSIONINFO = versioninfo_struct
+
+ # The the Key attribute to point to the unicode string identifying the structure
+ #
+ self.VS_VERSIONINFO.Key = versioninfo_string
+
+
+ # Process the fixed version information, get the offset and structure
+ #
+ fixedfileinfo_offset = self.dword_align(
+ versioninfo_struct.sizeof() + 2 * (len(versioninfo_string) + 1),
+ version_struct.OffsetToData)
+ fixedfileinfo_struct = self.__unpack_data__(
+ self.__VS_FIXEDFILEINFO_format__,
+ raw_data[fixedfileinfo_offset:],
+ file_offset = start_offset+fixedfileinfo_offset )
+
+ if not fixedfileinfo_struct:
+ return
+
+
+ # Set the PE object's VS_FIXEDFILEINFO to this one
+ #
+ self.VS_FIXEDFILEINFO = fixedfileinfo_struct
+
+
+ # Start parsing all the StringFileInfo and VarFileInfo structures
+ #
+
+ # Get the first one
+ #
+ stringfileinfo_offset = self.dword_align(
+ fixedfileinfo_offset + fixedfileinfo_struct.sizeof(),
+ version_struct.OffsetToData)
+ original_stringfileinfo_offset = stringfileinfo_offset
+
+
+ # Set the PE object's attribute that will contain them all.
+ #
+ self.FileInfo = list()
+
+
+ while True:
+
+ # Process the StringFileInfo/VarFileInfo struct
+ #
+ stringfileinfo_struct = self.__unpack_data__(
+ self.__StringFileInfo_format__,
+ raw_data[stringfileinfo_offset:],
+ file_offset = start_offset+stringfileinfo_offset )
+
+ if stringfileinfo_struct is None:
+ self.__warnings.append(
+ 'Error parsing StringFileInfo/VarFileInfo struct' )
+ return None
+
+ # Get the subsequent string defining the structure.
+ #
+ ustr_offset = ( version_struct.OffsetToData +
+ stringfileinfo_offset + versioninfo_struct.sizeof() )
+ try:
+ stringfileinfo_string = self.get_string_u_at_rva( ustr_offset )
+ except PEFormatError, excp:
+ self.__warnings.append(
+ 'Error parsing the version information, ' +
+ 'attempting to read StringFileInfo string. Can\'t ' +
+ 'read unicode string at offset 0x%x' % ( ustr_offset ) )
+ break
+
+ # Set such string as the Key attribute
+ #
+ stringfileinfo_struct.Key = stringfileinfo_string
+
+
+ # Append the structure to the PE object's list
+ #
+ self.FileInfo.append(stringfileinfo_struct)
+
+
+ # Parse a StringFileInfo entry
+ #
+ if stringfileinfo_string == u'StringFileInfo':
+
+ if stringfileinfo_struct.Type == 1 and stringfileinfo_struct.ValueLength == 0:
+
+ stringtable_offset = self.dword_align(
+ stringfileinfo_offset + stringfileinfo_struct.sizeof() +
+ 2*(len(stringfileinfo_string)+1),
+ version_struct.OffsetToData)
+
+ stringfileinfo_struct.StringTable = list()
+
+ # Process the String Table entries
+ #
+ while True:
+ stringtable_struct = self.__unpack_data__(
+ self.__StringTable_format__,
+ raw_data[stringtable_offset:],
+ file_offset = start_offset+stringtable_offset )
+
+ if not stringtable_struct:
+ break
+
+ ustr_offset = ( version_struct.OffsetToData + stringtable_offset +
+ stringtable_struct.sizeof() )
+ try:
+ stringtable_string = self.get_string_u_at_rva( ustr_offset )
+ except PEFormatError, excp:
+ self.__warnings.append(
+ 'Error parsing the version information, ' +
+ 'attempting to read StringTable string. Can\'t ' +
+ 'read unicode string at offset 0x%x' % ( ustr_offset ) )
+ break
+
+ stringtable_struct.LangID = stringtable_string
+ stringtable_struct.entries = dict()
+ stringtable_struct.entries_offsets = dict()
+ stringtable_struct.entries_lengths = dict()
+ stringfileinfo_struct.StringTable.append(stringtable_struct)
+
+ entry_offset = self.dword_align(
+ stringtable_offset + stringtable_struct.sizeof() +
+ 2*(len(stringtable_string)+1),
+ version_struct.OffsetToData)
+
+ # Process all entries in the string table
+ #
+
+ while entry_offset < stringtable_offset + stringtable_struct.Length:
+
+ string_struct = self.__unpack_data__(
+ self.__String_format__, raw_data[entry_offset:],
+ file_offset = start_offset+entry_offset )
+
+ if not string_struct:
+ break
+
+ ustr_offset = ( version_struct.OffsetToData + entry_offset +
+ string_struct.sizeof() )
+ try:
+ key = self.get_string_u_at_rva( ustr_offset )
+ key_offset = self.get_offset_from_rva( ustr_offset )
+ except PEFormatError, excp:
+ self.__warnings.append(
+ 'Error parsing the version information, ' +
+ 'attempting to read StringTable Key string. Can\'t ' +
+ 'read unicode string at offset 0x%x' % ( ustr_offset ) )
+ break
+
+ value_offset = self.dword_align(
+ 2*(len(key)+1) + entry_offset + string_struct.sizeof(),
+ version_struct.OffsetToData)
+
+ ustr_offset = version_struct.OffsetToData + value_offset
+ try:
+ value = self.get_string_u_at_rva( ustr_offset,
+ max_length = string_struct.ValueLength )
+ value_offset = self.get_offset_from_rva( ustr_offset )
+ except PEFormatError, excp:
+ self.__warnings.append(
+ 'Error parsing the version information, ' +
+ 'attempting to read StringTable Value string. ' +
+ 'Can\'t read unicode string at offset 0x%x' % (
+ ustr_offset ) )
+ break
+
+ if string_struct.Length == 0:
+ entry_offset = stringtable_offset + stringtable_struct.Length
+ else:
+ entry_offset = self.dword_align(
+ string_struct.Length+entry_offset, version_struct.OffsetToData)
+
+ key_as_char = []
+ for c in key:
+ if ord(c)>128:
+ key_as_char.append('\\x%02x' %ord(c))
+ else:
+ key_as_char.append(c)
+
+ key_as_char = ''.join(key_as_char)
+
+ setattr(stringtable_struct, key_as_char, value)
+ stringtable_struct.entries[key] = value
+ stringtable_struct.entries_offsets[key] = (key_offset, value_offset)
+ stringtable_struct.entries_lengths[key] = (len(key), len(value))
+
+
+ stringtable_offset = self.dword_align(
+ stringtable_struct.Length + stringtable_offset,
+ version_struct.OffsetToData)
+ if stringtable_offset >= stringfileinfo_struct.Length:
+ break
+
+ # Parse a VarFileInfo entry
+ #
+ elif stringfileinfo_string == u'VarFileInfo':
+
+ varfileinfo_struct = stringfileinfo_struct
+ varfileinfo_struct.name = 'VarFileInfo'
+
+ if varfileinfo_struct.Type == 1 and varfileinfo_struct.ValueLength == 0:
+
+ var_offset = self.dword_align(
+ stringfileinfo_offset + varfileinfo_struct.sizeof() +
+ 2*(len(stringfileinfo_string)+1),
+ version_struct.OffsetToData)
+
+ varfileinfo_struct.Var = list()
+
+ # Process all entries
+ #
+
+ while True:
+ var_struct = self.__unpack_data__(
+ self.__Var_format__,
+ raw_data[var_offset:],
+ file_offset = start_offset+var_offset )
+
+ if not var_struct:
+ break
+
+ ustr_offset = ( version_struct.OffsetToData + var_offset +
+ var_struct.sizeof() )
+ try:
+ var_string = self.get_string_u_at_rva( ustr_offset )
+ except PEFormatError, excp:
+ self.__warnings.append(
+ 'Error parsing the version information, ' +
+ 'attempting to read VarFileInfo Var string. ' +
+ 'Can\'t read unicode string at offset 0x%x' % (ustr_offset))
+ break
+
+
+ varfileinfo_struct.Var.append(var_struct)
+
+ varword_offset = self.dword_align(
+ 2*(len(var_string)+1) + var_offset + var_struct.sizeof(),
+ version_struct.OffsetToData)
+ orig_varword_offset = varword_offset
+
+ while varword_offset < orig_varword_offset + var_struct.ValueLength:
+ word1 = self.get_word_from_data(
+ raw_data[varword_offset:varword_offset+2], 0)
+ word2 = self.get_word_from_data(
+ raw_data[varword_offset+2:varword_offset+4], 0)
+ varword_offset += 4
+
+ var_struct.entry = {var_string: '0x%04x 0x%04x' % (word1, word2)}
+
+ var_offset = self.dword_align(
+ var_offset+var_struct.Length, version_struct.OffsetToData)
+
+ if var_offset <= var_offset+var_struct.Length:
+ break
+
+
+
+ # Increment and align the offset
+ #
+ stringfileinfo_offset = self.dword_align(
+ stringfileinfo_struct.Length+stringfileinfo_offset,
+ version_struct.OffsetToData)
+
+ # Check if all the StringFileInfo and VarFileInfo items have been processed
+ #
+ if stringfileinfo_struct.Length == 0 or stringfileinfo_offset >= versioninfo_struct.Length:
+ break
+
+
+
+ def parse_export_directory(self, rva, size):
+ """Parse the export directory.
+
+ Given the rva of the export directory, it will process all
+ its entries.
+
+ The exports will be made available through a list "exports"
+ containing a tuple with the following elements:
+
+ (ordinal, symbol_address, symbol_name)
+
+ And also through a dicionary "exports_by_ordinal" whose keys
+ will be the ordinals and the values tuples of the from:
+
+ (symbol_address, symbol_name)
+
+ The symbol addresses are relative, not absolute.
+ """
+
+ try:
+ export_dir = self.__unpack_data__(
+ self.__IMAGE_EXPORT_DIRECTORY_format__, self.get_data(rva),
+ file_offset = self.get_offset_from_rva(rva) )
+ except PEFormatError:
+ self.__warnings.append(
+ 'Error parsing export directory at RVA: 0x%x' % ( rva ) )
+ return
+
+ if not export_dir:
+ return
+
+ try:
+ address_of_names = self.get_data(
+ export_dir.AddressOfNames, export_dir.NumberOfNames*4)
+ address_of_name_ordinals = self.get_data(
+ export_dir.AddressOfNameOrdinals, export_dir.NumberOfNames*4)
+ address_of_functions = self.get_data(
+ export_dir.AddressOfFunctions, export_dir.NumberOfFunctions*4)
+ except PEFormatError:
+ self.__warnings.append(
+ 'Error parsing export directory at RVA: 0x%x' % ( rva ) )
+ return
+
+ exports = []
+
+ for i in xrange(export_dir.NumberOfNames):
+
+
+ symbol_name = self.get_string_at_rva(
+ self.get_dword_from_data(address_of_names, i))
+
+ symbol_ordinal = self.get_word_from_data(
+ address_of_name_ordinals, i)
+
+
+ if symbol_ordinal*4<len(address_of_functions):
+ symbol_address = self.get_dword_from_data(
+ address_of_functions, symbol_ordinal)
+ else:
+ # Corrupt? a bad pointer... we assume it's all
+ # useless, no exports
+ return None
+
+ # If the funcion's rva points within the export directory
+ # it will point to a string with the forwarded symbol's string
+ # instead of pointing the the function start address.
+
+ if symbol_address>=rva and symbol_address<rva+size:
+ forwarder_str = self.get_string_at_rva(symbol_address)
+ else:
+ forwarder_str = None
+
+
+ exports.append(
+ ExportData(
+ ordinal = export_dir.Base+symbol_ordinal,
+ address = symbol_address,
+ name = symbol_name,
+ forwarder = forwarder_str))
+
+ ordinals = [exp.ordinal for exp in exports]
+
+ for idx in xrange(export_dir.NumberOfFunctions):
+
+ if not idx+export_dir.Base in ordinals:
+ symbol_address = self.get_dword_from_data(
+ address_of_functions,
+ idx)
+
+ #
+ # Checking for forwarder again.
+ #
+ if symbol_address>=rva and symbol_address<rva+size:
+ forwarder_str = self.get_string_at_rva(symbol_address)
+ else:
+ forwarder_str = None
+
+ exports.append(
+ ExportData(
+ ordinal = export_dir.Base+idx,
+ address = symbol_address,
+ name = None,
+ forwarder = forwarder_str))
+
+ return ExportDirData(
+ struct = export_dir,
+ symbols = exports)
+
+
+ def dword_align(self, offset, base):
+ offset += base
+ return (offset+3) - ((offset+3)%4) - base
+
+
+
+ def parse_delay_import_directory(self, rva, size):
+ """Walk and parse the delay import directory."""
+
+ import_descs = []
+ while True:
+ try:
+ # If the RVA is invalid all would blow up. Some PEs seem to be
+ # specially nasty and have an invalid RVA.
+ data = self.get_data(rva)
+ except PEFormatError, e:
+ self.__warnings.append(
+ 'Error parsing the Delay import directory at RVA: 0x%x' % ( rva ) )
+ break
+
+ import_desc = self.__unpack_data__(
+ self.__IMAGE_DELAY_IMPORT_DESCRIPTOR_format__,
+ data, file_offset = self.get_offset_from_rva(rva) )
+
+
+ # If the structure is all zeores, we reached the end of the list
+ if not import_desc or import_desc.all_zeroes():
+ break
+
+
+ rva += import_desc.sizeof()
+
+ try:
+ import_data = self.parse_imports(
+ import_desc.pINT,
+ import_desc.pIAT,
+ None)
+ except PEFormatError, e:
+ self.__warnings.append(
+ 'Error parsing the Delay import directory. ' +
+ 'Invalid import data at RVA: 0x%x' % ( rva ) )
+ break
+
+ if not import_data:
+ continue
+
+
+ dll = self.get_string_at_rva(import_desc.szName)
+ if dll:
+ import_descs.append(
+ ImportDescData(
+ struct = import_desc,
+ imports = import_data,
+ dll = dll))
+
+ return import_descs
+
+
+
+ def parse_import_directory(self, rva, size):
+ """Walk and parse the import directory."""
+
+ import_descs = []
+ while True:
+ try:
+ # If the RVA is invalid all would blow up. Some EXEs seem to be
+ # specially nasty and have an invalid RVA.
+ data = self.get_data(rva)
+ except PEFormatError, e:
+ self.__warnings.append(
+ 'Error parsing the Import directory at RVA: 0x%x' % ( rva ) )
+ break
+
+ import_desc = self.__unpack_data__(
+ self.__IMAGE_IMPORT_DESCRIPTOR_format__,
+ data, file_offset = self.get_offset_from_rva(rva) )
+
+ # If the structure is all zeores, we reached the end of the list
+ if not import_desc or import_desc.all_zeroes():
+ break
+
+ rva += import_desc.sizeof()
+
+ try:
+ import_data = self.parse_imports(
+ import_desc.OriginalFirstThunk,
+ import_desc.FirstThunk,
+ import_desc.ForwarderChain)
+ except PEFormatError, excp:
+ self.__warnings.append(
+ 'Error parsing the Import directory. ' +
+ 'Invalid Import data at RVA: 0x%x' % ( rva ) )
+ break
+ #raise excp
+
+ if not import_data:
+ continue
+
+ dll = self.get_string_at_rva(import_desc.Name)
+ if dll:
+ import_descs.append(
+ ImportDescData(
+ struct = import_desc,
+ imports = import_data,
+ dll = dll))
+
+ return import_descs
+
+
+
+ def parse_imports(self, original_first_thunk, first_thunk, forwarder_chain):
+ """Parse the imported symbols.
+
+ It will fill a list, which will be avalable as the dictionary
+ attribute "imports". Its keys will be the DLL names and the values
+ all the symbols imported from that object.
+ """
+
+ imported_symbols = []
+ imports_section = self.get_section_by_rva(first_thunk)
+ if not imports_section:
+ raise PEFormatError, 'Invalid/corrupt imports.'
+
+
+ # Import Lookup Table. Contains ordinals or pointers to strings.
+ ilt = self.get_import_table(original_first_thunk)
+ # Import Address Table. May have identical content to ILT if
+ # PE file is not bounded, Will contain the address of the
+ # imported symbols once the binary is loaded or if it is already
+ # bound.
+ iat = self.get_import_table(first_thunk)
+
+ # OC Patch:
+ # Would crash if iat or ilt had None type
+ if not iat and not ilt:
+ raise PEFormatError(
+ 'Invalid Import Table information. ' +
+ 'Both ILT and IAT appear to be broken.')
+
+ if not iat and ilt:
+ table = ilt
+ elif iat and not ilt:
+ table = iat
+ elif ilt and ((len(ilt) and len(iat)==0) or (len(ilt) == len(iat))):
+ table = ilt
+ elif (ilt and len(ilt))==0 and (iat and len(iat)):
+ table = iat
+ else:
+ return None
+
+ for idx in xrange(len(table)):
+
+ imp_ord = None
+ imp_hint = None
+ imp_name = None
+ hint_name_table_rva = None
+
+ if table[idx].AddressOfData:
+
+ if self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE:
+ ordinal_flag = IMAGE_ORDINAL_FLAG
+ elif self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE_PLUS:
+ ordinal_flag = IMAGE_ORDINAL_FLAG64
+
+ # If imported by ordinal, we will append the ordinal number
+ #
+ if table[idx].AddressOfData & ordinal_flag:
+ import_by_ordinal = True
+ imp_ord = table[idx].AddressOfData & 0xffff
+ imp_name = None
+ else:
+ import_by_ordinal = False
+ try:
+ hint_name_table_rva = table[idx].AddressOfData & 0x7fffffff
+ data = self.get_data(hint_name_table_rva, 2)
+ # Get the Hint
+ imp_hint = self.get_word_from_data(data, 0)
+ imp_name = self.get_string_at_rva(table[idx].AddressOfData+2)
+ except PEFormatError, e:
+ pass
+
+ imp_address = first_thunk+self.OPTIONAL_HEADER.ImageBase+idx*4
+
+ if iat and ilt and ilt[idx].AddressOfData != iat[idx].AddressOfData:
+ imp_bound = iat[idx].AddressOfData
+ else:
+ imp_bound = None
+
+ if imp_name != '' and (imp_ord or imp_name):
+ imported_symbols.append(
+ ImportData(
+ import_by_ordinal = import_by_ordinal,
+ ordinal = imp_ord,
+ hint = imp_hint,
+ name = imp_name,
+ bound = imp_bound,
+ address = imp_address,
+ hint_name_table_rva = hint_name_table_rva))
+
+ return imported_symbols
+
+
+
+ def get_import_table(self, rva):
+
+ table = []
+
+ while True and rva:
+ try:
+ data = self.get_data(rva)
+ except PEFormatError, e:
+ self.__warnings.append(
+ 'Error parsing the import table. ' +
+ 'Invalid data at RVA: 0x%x' % ( rva ) )
+ return None
+
+ if self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE:
+ format = self.__IMAGE_THUNK_DATA_format__
+ elif self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE_PLUS:
+ format = self.__IMAGE_THUNK_DATA64_format__
+
+ thunk_data = self.__unpack_data__(
+ format, data, file_offset=self.get_offset_from_rva(rva) )
+
+ if not thunk_data or thunk_data.all_zeroes():
+ break
+
+ rva += thunk_data.sizeof()
+
+ table.append(thunk_data)
+
+ return table
+
+
+ def get_memory_mapped_image(self, max_virtual_address=0x10000000, ImageBase=None):
+ """Returns the data corresponding to the memory layout of the PE file.
+
+ The data includes the PE header and the sections loaded at offsets
+ corresponding to their relative virtual addresses. (the VirtualAddress
+ section header member).
+ Any offset in this data corresponds to the absolute memory address
+ ImageBase+offset.
+
+ The optional argument 'max_virtual_address' provides with means of limiting
+ which section are processed.
+ Any section with their VirtualAddress beyond this value will be skipped.
+ Normally, sections with values beyond this range are just there to confuse
+ tools. It's a common trick to see in packed executables.
+
+ If the 'ImageBase' optional argument is supplied, the file's relocations
+ will be applied to the image by calling the 'relocate_image()' method.
+ """
+
+ # Collect all sections in one code block
+ data = self.header
+ for section in self.sections:
+
+ # Miscellanous integrity tests.
+ # Some packer will set these to bogus values to
+ # make tools go nuts.
+ #
+ if section.Misc_VirtualSize == 0 or section.SizeOfRawData == 0:
+ continue
+
+ if section.SizeOfRawData > len(self.__data__):
+ continue
+
+ if section.PointerToRawData > len(self.__data__):
+ continue
+
+ if section.VirtualAddress >= max_virtual_address:
+ continue
+
+ padding_length = section.VirtualAddress - len(data)
+
+ if padding_length>0:
+ data += '\0'*padding_length
+ elif padding_length<0:
+ data = data[:padding_length]
+
+ data += section.data
+
+ return data
+
+
+ def get_data(self, rva, length=None):
+ """Get data regardless of the section where it lies on.
+
+ Given a rva and the size of the chunk to retrieve, this method
+ will find the section where the data lies and return the data.
+ """
+
+ s = self.get_section_by_rva(rva)
+
+ if not s:
+ if rva<len(self.header):
+ if length:
+ end = rva+length
+ else:
+ end = None
+ return self.header[rva:end]
+
+ raise PEFormatError, 'data at RVA can\'t be fetched. Corrupt header?'
+
+ return s.get_data(rva, length)
+
+
+ def get_rva_from_offset(self, offset):
+ """Get the rva corresponding to this file offset. """
+
+ s = self.get_section_by_offset(offset)
+ if not s:
+ raise PEFormatError("specified offset (0x%x) doesn't belong to any section." % offset)
+ return s.get_rva_from_offset(offset)
+
+ def get_offset_from_rva(self, rva):
+ """Get the file offset corresponding to this rva.
+
+ Given a rva , this method will find the section where the
+ data lies and return the offset within the file.
+ """
+
+ s = self.get_section_by_rva(rva)
+ if not s:
+
+ raise PEFormatError, 'data at RVA can\'t be fetched. Corrupt header?'
+
+ return s.get_offset_from_rva(rva)
+
+
+ def get_string_at_rva(self, rva):
+ """Get an ASCII string located at the given address."""
+
+ s = self.get_section_by_rva(rva)
+ if not s:
+ if rva<len(self.header):
+ return self.get_string_from_data(rva, self.header)
+ return None
+
+ return self.get_string_from_data(rva-s.VirtualAddress, s.data)
+
+
+ def get_string_from_data(self, offset, data):
+ """Get an ASCII string from within the data."""
+
+ # OC Patch
+ b = None
+
+ try:
+ b = data[offset]
+ except IndexError:
+ return ''
+
+ s = ''
+ while ord(b):
+ s += b
+ offset += 1
+ try:
+ b = data[offset]
+ except IndexError:
+ break
+
+ return s
+
+
+ def get_string_u_at_rva(self, rva, max_length = 2**16):
+ """Get an Unicode string located at the given address."""
+
+ try:
+ # If the RVA is invalid all would blow up. Some EXEs seem to be
+ # specially nasty and have an invalid RVA.
+ data = self.get_data(rva, 2)
+ except PEFormatError, e:
+ return None
+
+ #length = struct.unpack('<H', data)[0]
+
+ s = u''
+ for idx in xrange(max_length):
+ try:
+ uchr = struct.unpack('<H', self.get_data(rva+2*idx, 2))[0]
+ except struct.error:
+ break
+
+ if unichr(uchr) == u'\0':
+ break
+ s += unichr(uchr)
+
+ return s
+
+
+ def get_section_by_offset(self, offset):
+ """Get the section containing the given file offset."""
+
+ sections = [s for s in self.sections if s.contains_offset(offset)]
+
+ if sections:
+ return sections[0]
+
+ return None
+
+
+ def get_section_by_rva(self, rva):
+ """Get the section containing the given address."""
+
+ sections = [s for s in self.sections if s.contains_rva(rva)]
+
+ if sections:
+ return sections[0]
+
+ return None
+
+ def __str__(self):
+ return self.dump_info()
+
+
+ def print_info(self):
+ """Print all the PE header information in a human readable from."""
+ print self.dump_info()
+
+
+ def dump_info(self, dump=None):
+ """Dump all the PE header information into human readable string."""
+
+
+ if dump is None:
+ dump = Dump()
+
+ warnings = self.get_warnings()
+ if warnings:
+ dump.add_header('Parsing Warnings')
+ for warning in warnings:
+ dump.add_line(warning)
+ dump.add_newline()
+
+
+ dump.add_header('DOS_HEADER')
+ dump.add_lines(self.DOS_HEADER.dump())
+ dump.add_newline()
+
+ dump.add_header('NT_HEADERS')
+ dump.add_lines(self.NT_HEADERS.dump())
+ dump.add_newline()
+
+ dump.add_header('FILE_HEADER')
+ dump.add_lines(self.FILE_HEADER.dump())
+
+ image_flags = self.retrieve_flags(IMAGE_CHARACTERISTICS, 'IMAGE_FILE_')
+
+ dump.add('Flags: ')
+ flags = []
+ for flag in image_flags:
+ if getattr(self.FILE_HEADER, flag[0]):
+ flags.append(flag[0])
+ dump.add_line(', '.join(flags))
+ dump.add_newline()
+
+ if hasattr(self, 'OPTIONAL_HEADER') and self.OPTIONAL_HEADER is not None:
+ dump.add_header('OPTIONAL_HEADER')
+ dump.add_lines(self.OPTIONAL_HEADER.dump())
+
+ dll_characteristics_flags = self.retrieve_flags(DLL_CHARACTERISTICS, 'IMAGE_DLL_CHARACTERISTICS_')
+
+ dump.add('DllCharacteristics: ')
+ flags = []
+ for flag in dll_characteristics_flags:
+ if getattr(self.OPTIONAL_HEADER, flag[0]):
+ flags.append(flag[0])
+ dump.add_line(', '.join(flags))
+ dump.add_newline()
+
+
+ dump.add_header('PE Sections')
+
+ section_flags = self.retrieve_flags(SECTION_CHARACTERISTICS, 'IMAGE_SCN_')
+
+ for section in self.sections:
+ dump.add_lines(section.dump())
+ dump.add('Flags: ')
+ flags = []
+ for flag in section_flags:
+ if getattr(section, flag[0]):
+ flags.append(flag[0])
+ dump.add_line(', '.join(flags))
+ dump.add_line('Entropy: %f (Min=0.0, Max=8.0)' % section.get_entropy() )
+ if md5 is not None:
+ dump.add_line('MD5 hash: %s' % section.get_hash_md5() )
+ if sha1 is not None:
+ dump.add_line('SHA-1 hash: %s' % section.get_hash_sha1() )
+ if sha256 is not None:
+ dump.add_line('SHA-256 hash: %s' % section.get_hash_sha256() )
+ if sha512 is not None:
+ dump.add_line('SHA-512 hash: %s' % section.get_hash_sha512() )
+ dump.add_newline()
+
+
+
+ if (hasattr(self, 'OPTIONAL_HEADER') and
+ hasattr(self.OPTIONAL_HEADER, 'DATA_DIRECTORY') ):
+
+ dump.add_header('Directories')
+ for idx in xrange(len(self.OPTIONAL_HEADER.DATA_DIRECTORY)):
+ directory = self.OPTIONAL_HEADER.DATA_DIRECTORY[idx]
+ dump.add_lines(directory.dump())
+ dump.add_newline()
+
+
+ if hasattr(self, 'VS_VERSIONINFO'):
+ dump.add_header('Version Information')
+ dump.add_lines(self.VS_VERSIONINFO.dump())
+ dump.add_newline()
+
+ if hasattr(self, 'VS_FIXEDFILEINFO'):
+ dump.add_lines(self.VS_FIXEDFILEINFO.dump())
+ dump.add_newline()
+
+ if hasattr(self, 'FileInfo'):
+ for entry in self.FileInfo:
+ dump.add_lines(entry.dump())
+ dump.add_newline()
+
+ if hasattr(entry, 'StringTable'):
+ for st_entry in entry.StringTable:
+ [dump.add_line(' '+line) for line in st_entry.dump()]
+ dump.add_line(' LangID: '+st_entry.LangID)
+ dump.add_newline()
+ for str_entry in st_entry.entries.items():
+ dump.add_line(' '+str_entry[0]+': '+str_entry[1])
+ dump.add_newline()
+
+ elif hasattr(entry, 'Var'):
+ for var_entry in entry.Var:
+ if hasattr(var_entry, 'entry'):
+ [dump.add_line(' '+line) for line in var_entry.dump()]
+ dump.add_line(
+ ' ' + var_entry.entry.keys()[0] +
+ ': ' + var_entry.entry.values()[0])
+
+ dump.add_newline()
+
+
+
+ if hasattr(self, 'DIRECTORY_ENTRY_EXPORT'):
+ dump.add_header('Exported symbols')
+ dump.add_lines(self.DIRECTORY_ENTRY_EXPORT.struct.dump())
+ dump.add_newline()
+ dump.add_line('%-10s %-10s %s' % ('Ordinal', 'RVA', 'Name'))
+ for export in self.DIRECTORY_ENTRY_EXPORT.symbols:
+ dump.add('%-10d 0x%08Xh %s' % (
+ export.ordinal, export.address, export.name))
+ if export.forwarder:
+ dump.add_line(' forwarder: %s' % export.forwarder)
+ else:
+ dump.add_newline()
+
+ dump.add_newline()
+
+ if hasattr(self, 'DIRECTORY_ENTRY_IMPORT'):
+ dump.add_header('Imported symbols')
+ for module in self.DIRECTORY_ENTRY_IMPORT:
+ dump.add_lines(module.struct.dump())
+ dump.add_newline()
+ for symbol in module.imports:
+
+ if symbol.import_by_ordinal is True:
+ dump.add('%s Ordinal[%s] (Imported by Ordinal)' % (
+ module.dll, str(symbol.ordinal)))
+ else:
+ dump.add('%s.%s Hint[%s]' % (
+ module.dll, symbol.name, str(symbol.hint)))
+
+ if symbol.bound:
+ dump.add_line(' Bound: 0x%08X' % (symbol.bound))
+ else:
+ dump.add_newline()
+ dump.add_newline()
+
+
+ if hasattr(self, 'DIRECTORY_ENTRY_BOUND_IMPORT'):
+ dump.add_header('Bound imports')
+ for bound_imp_desc in self.DIRECTORY_ENTRY_BOUND_IMPORT:
+
+ dump.add_lines(bound_imp_desc.struct.dump())
+ dump.add_line('DLL: %s' % bound_imp_desc.name)
+ dump.add_newline()
+
+ for bound_imp_ref in bound_imp_desc.entries:
+ dump.add_lines(bound_imp_ref.struct.dump(), 4)
+ dump.add_line('DLL: %s' % bound_imp_ref.name, 4)
+ dump.add_newline()
+
+
+ if hasattr(self, 'DIRECTORY_ENTRY_DELAY_IMPORT'):
+ dump.add_header('Delay Imported symbols')
+ for module in self.DIRECTORY_ENTRY_DELAY_IMPORT:
+
+ dump.add_lines(module.struct.dump())
+ dump.add_newline()
+
+ for symbol in module.imports:
+ if symbol.import_by_ordinal is True:
+ dump.add('%s Ordinal[%s] (Imported by Ordinal)' % (
+ module.dll, str(symbol.ordinal)))
+ else:
+ dump.add('%s.%s Hint[%s]' % (
+ module.dll, symbol.name, str(symbol.hint)))
+
+ if symbol.bound:
+ dump.add_line(' Bound: 0x%08X' % (symbol.bound))
+ else:
+ dump.add_newline()
+ dump.add_newline()
+
+
+ if hasattr(self, 'DIRECTORY_ENTRY_RESOURCE'):
+ dump.add_header('Resource directory')
+
+ dump.add_lines(self.DIRECTORY_ENTRY_RESOURCE.struct.dump())
+
+ for resource_type in self.DIRECTORY_ENTRY_RESOURCE.entries:
+
+ if resource_type.name is not None:
+ dump.add_line('Name: [%s]' % resource_type.name, 2)
+ else:
+ dump.add_line('Id: [0x%X] (%s)' % (
+ resource_type.struct.Id, RESOURCE_TYPE.get(
+ resource_type.struct.Id, '-')),
+ 2)
+
+ dump.add_lines(resource_type.struct.dump(), 2)
+
+ if hasattr(resource_type, 'directory'):
+
+ dump.add_lines(resource_type.directory.struct.dump(), 4)
+
+ for resource_id in resource_type.directory.entries:
+
+ if resource_id.name is not None:
+ dump.add_line('Name: [%s]' % resource_id.name, 6)
+ else:
+ dump.add_line('Id: [0x%X]' % resource_id.struct.Id, 6)
+
+ dump.add_lines(resource_id.struct.dump(), 6)
+
+ if hasattr(resource_id, 'directory'):
+ dump.add_lines(resource_id.directory.struct.dump(), 8)
+
+ for resource_lang in resource_id.directory.entries:
+ # dump.add_line('\\--- LANG [%d,%d][%s]' % (
+ # resource_lang.data.lang,
+ # resource_lang.data.sublang,
+ # LANG[resource_lang.data.lang]), 8)
+ dump.add_lines(resource_lang.struct.dump(), 10)
+ dump.add_lines(resource_lang.data.struct.dump(), 12)
+ dump.add_newline()
+
+ dump.add_newline()
+
+
+ if ( hasattr(self, 'DIRECTORY_ENTRY_TLS') and
+ self.DIRECTORY_ENTRY_TLS and
+ self.DIRECTORY_ENTRY_TLS.struct ):
+
+ dump.add_header('TLS')
+ dump.add_lines(self.DIRECTORY_ENTRY_TLS.struct.dump())
+ dump.add_newline()
+
+
+ if hasattr(self, 'DIRECTORY_ENTRY_DEBUG'):
+ dump.add_header('Debug information')
+ for dbg in self.DIRECTORY_ENTRY_DEBUG:
+ dump.add_lines(dbg.struct.dump())
+ try:
+ dump.add_line('Type: '+DEBUG_TYPE[dbg.struct.Type])
+ except KeyError:
+ dump.add_line('Type: 0x%x(Unknown)' % dbg.struct.Type)
+ dump.add_newline()
+
+
+ if hasattr(self, 'DIRECTORY_ENTRY_BASERELOC'):
+ dump.add_header('Base relocations')
+ for base_reloc in self.DIRECTORY_ENTRY_BASERELOC:
+ dump.add_lines(base_reloc.struct.dump())
+ for reloc in base_reloc.entries:
+ try:
+ dump.add_line('%08Xh %s' % (
+ reloc.rva, RELOCATION_TYPE[reloc.type][16:]), 4)
+ except KeyError:
+ dump.add_line('0x%08X 0x%x(Unknown)' % (
+ reloc.rva, reloc.type), 4)
+ dump.add_newline()
+
+
+ return dump.get_text()
+
+ # OC Patch
+ def get_physical_by_rva(self, rva):
+ """Gets the physical address in the PE file from an RVA value."""
+ try:
+ return self.get_offset_from_rva(rva)
+ except Exception:
+ return None
+
+
+ ##
+ # Double-Word get/set
+ ##
+
+ def get_data_from_dword(self, dword):
+ """Return a four byte string representing the double word value. (little endian)."""
+ return struct.pack('<L', dword)
+
+
+ def get_dword_from_data(self, data, offset):
+ """Convert four bytes of data to a double word (little endian)
+
+ 'offset' is assumed to index into a dword array. So setting it to
+ N will return a dword out of the data sarting at offset N*4.
+
+ Returns None if the data can't be turned into a double word.
+ """
+
+ if (offset+1)*4 > len(data):
+ return None
+
+ return struct.unpack('<L', data[offset*4:(offset+1)*4])[0]
+
+
+ def get_dword_at_rva(self, rva):
+ """Return the double word value at the given RVA.
+
+ Returns None if the value can't be read, i.e. the RVA can't be mapped
+ to a file offset.
+ """
+
+ try:
+ return self.get_dword_from_data(self.get_data(rva)[:4], 0)
+ except PEFormatError:
+ return None
+
+
+ def get_dword_from_offset(self, offset):
+ """Return the double word value at the given file offset. (little endian)"""
+
+ if offset+4 > len(self.__data__):
+ return None
+
+ return self.get_dword_from_data(self.__data__[offset:offset+4], 0)
+
+
+ def set_dword_at_rva(self, rva, dword):
+ """Set the double word value at the file offset corresponding to the given RVA."""
+ return self.set_bytes_at_rva(rva, self.get_data_from_dword(dword))
+
+
+ def set_dword_at_offset(self, offset, dword):
+ """Set the double word value at the given file offset."""
+ return self.set_bytes_at_offset(offset, self.get_data_from_dword(dword))
+
+
+
+ ##
+ # Word get/set
+ ##
+
+ def get_data_from_word(self, word):
+ """Return a two byte string representing the word value. (little endian)."""
+ return struct.pack('<H', word)
+
+
+ def get_word_from_data(self, data, offset):
+ """Convert two bytes of data to a word (little endian)
+
+ 'offset' is assumed to index into a word array. So setting it to
+ N will return a dword out of the data sarting at offset N*2.
+
+ Returns None if the data can't be turned into a word.
+ """
+
+ if (offset+1)*2 > len(data):
+ return None
+
+ return struct.unpack('<H', data[offset*2:(offset+1)*2])[0]
+
+
+ def get_word_at_rva(self, rva):
+ """Return the word value at the given RVA.
+
+ Returns None if the value can't be read, i.e. the RVA can't be mapped
+ to a file offset.
+ """
+
+ try:
+ return self.get_word_from_data(self.get_data(rva)[:2], 0)
+ except PEFormatError:
+ return None
+
+
+ def get_word_from_offset(self, offset):
+ """Return the word value at the given file offset. (little endian)"""
+
+ if offset+2 > len(self.__data__):
+ return None
+
+ return self.get_word_from_data(self.__data__[offset:offset+2], 0)
+
+
+ def set_word_at_rva(self, rva, word):
+ """Set the word value at the file offset corresponding to the given RVA."""
+ return self.set_bytes_at_rva(rva, self.get_data_from_word(word))
+
+
+ def set_word_at_offset(self, offset, word):
+ """Set the word value at the given file offset."""
+ return self.set_bytes_at_offset(offset, self.get_data_from_word(word))
+
+
+ ##
+ # Quad-Word get/set
+ ##
+
+ def get_data_from_qword(self, word):
+ """Return a eight byte string representing the quad-word value. (little endian)."""
+ return struct.pack('<Q', word)
+
+
+ def get_qword_from_data(self, data, offset):
+ """Convert eight bytes of data to a word (little endian)
+
+ 'offset' is assumed to index into a word array. So setting it to
+ N will return a dword out of the data sarting at offset N*8.
+
+ Returns None if the data can't be turned into a quad word.
+ """
+
+ if (offset+1)*8 > len(data):
+ return None
+
+ return struct.unpack('<Q', data[offset*8:(offset+1)*8])[0]
+
+
+ def get_qword_at_rva(self, rva):
+ """Return the quad-word value at the given RVA.
+
+ Returns None if the value can't be read, i.e. the RVA can't be mapped
+ to a file offset.
+ """
+
+ try:
+ return self.get_qword_from_data(self.get_data(rva)[:8], 0)
+ except PEFormatError:
+ return None
+
+
+ def get_qword_from_offset(self, offset):
+ """Return the quad-word value at the given file offset. (little endian)"""
+
+ if offset+8 > len(self.__data__):
+ return None
+
+ return self.get_qword_from_data(self.__data__[offset:offset+8], 0)
+
+
+ def set_qword_at_rva(self, rva, qword):
+ """Set the quad-word value at the file offset corresponding to the given RVA."""
+ return self.set_bytes_at_rva(rva, self.get_data_from_qword(qword))
+
+
+ def set_qword_at_offset(self, offset, qword):
+ """Set the quad-word value at the given file offset."""
+ return self.set_bytes_at_offset(offset, self.get_data_from_qword(qword))
+
+
+
+ ##
+ # Set bytes
+ ##
+
+
+ def set_bytes_at_rva(self, rva, data):
+ """Overwrite, with the given string, the bytes at the file offset corresponding to the given RVA.
+
+ Return True if successful, False otherwise. It can fail if the
+ offset is outside the file's boundaries.
+ """
+
+ offset = self.get_physical_by_rva(rva)
+ if not offset:
+ raise False
+
+ return self.set_bytes_at_offset(offset, data)
+
+
+ def set_bytes_at_offset(self, offset, data):
+ """Overwrite the bytes at the given file offset with the given string.
+
+ Return True if successful, False otherwise. It can fail if the
+ offset is outside the file's boundaries.
+ """
+
+ if not isinstance(data, str):
+ raise TypeError('data should be of type: str')
+
+ if offset >= 0 and offset < len(self.__data__):
+ self.__data__ = ( self.__data__[:offset] +
+ data +
+ self.__data__[offset+len(data):] )
+ else:
+ return False
+
+ # Refresh the section's data with the modified information
+ #
+ for section in self.sections:
+ section_data_start = section.PointerToRawData
+ section_data_end = section_data_start+section.SizeOfRawData
+ section.data = self.__data__[section_data_start:section_data_end]
+
+ return True
+
+
+
+ def relocate_image(self, new_ImageBase):
+ """Apply the relocation information to the image using the provided new image base.
+
+ This method will apply the relocation information to the image. Given the new base,
+ all the relocations will be processed and both the raw data and the section's data
+ will be fixed accordingly.
+ The resulting image can be retrieved as well through the method:
+
+ get_memory_mapped_image()
+
+ In order to get something that would more closely match what could be found in memory
+ once the Windows loader finished its work.
+ """
+
+ relocation_difference = new_ImageBase - self.OPTIONAL_HEADER.ImageBase
+
+
+ for reloc in self.DIRECTORY_ENTRY_BASERELOC:
+
+ virtual_address = reloc.struct.VirtualAddress
+ size_of_block = reloc.struct.SizeOfBlock
+
+ # We iterate with an index because if the relocation is of type
+ # IMAGE_REL_BASED_HIGHADJ we need to also process the next entry
+ # at once and skip it for the next interation
+ #
+ entry_idx = 0
+ while entry_idx<len(reloc.entries):
+
+ entry = reloc.entries[entry_idx]
+ entry_idx += 1
+
+ if entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_ABSOLUTE']:
+ # Nothing to do for this type of relocation
+ pass
+
+ elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGH']:
+ # Fix the high 16bits of a relocation
+ #
+ # Add high 16bits of relocation_difference to the
+ # 16bit value at RVA=entry.rva
+
+ self.set_word_at_rva(
+ entry.rva,
+ ( self.get_word_at_rva(entry.rva) + relocation_difference>>16)&0xffff )
+
+ elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_LOW']:
+ # Fix the low 16bits of a relocation
+ #
+ # Add low 16 bits of relocation_difference to the 16bit value
+ # at RVA=entry.rva
+
+ self.set_word_at_rva(
+ entry.rva,
+ ( self.get_word_at_rva(entry.rva) + relocation_difference)&0xffff)
+
+ elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGHLOW']:
+ # Handle all high and low parts of a 32bit relocation
+ #
+ # Add relocation_difference to the value at RVA=entry.rva
+
+ self.set_dword_at_rva(
+ entry.rva,
+ self.get_dword_at_rva(entry.rva)+relocation_difference)
+
+ elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGHADJ']:
+ # Fix the high 16bits of a relocation and adjust
+ #
+ # Add high 16bits of relocation_difference to the 32bit value
+ # composed from the (16bit value at RVA=entry.rva)<<16 plus
+ # the 16bit value at the next relocation entry.
+ #
+
+ # If the next entry is beyond the array's limits,
+ # abort... the table is corrupt
+ #
+ if entry_idx == len(reloc.entries):
+ break
+
+ next_entry = reloc.entries[entry_idx]
+ entry_idx += 1
+ self.set_word_at_rva( entry.rva,
+ ((self.get_word_at_rva(entry.rva)<<16) + next_entry.rva +
+ relocation_difference & 0xffff0000) >> 16 )
+
+ elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_DIR64']:
+ # Apply the difference to the 64bit value at the offset
+ # RVA=entry.rva
+
+ self.set_qword_at_rva(
+ entry.rva,
+ self.get_qword_at_rva(entry.rva) + relocation_difference)
+
+
+ def verify_checksum(self):
+
+ return self.OPTIONAL_HEADER.CheckSum == self.generate_checksum()
+
+
+ def generate_checksum(self):
+
+ # Get the offset to the CheckSum field in the OptionalHeader
+ #
+ checksum_offset = self.OPTIONAL_HEADER.__file_offset__ + 0x40 # 64
+
+ checksum = 0
+
+ for i in range( len(self.__data__) / 4 ):
+
+ # Skip the checksum field
+ #
+ if i == checksum_offset / 4:
+ continue
+
+ dword = struct.unpack('L', self.__data__[ i*4 : i*4+4 ])[0]
+ checksum = (checksum & 0xffffffff) + dword + (checksum>>32)
+ if checksum > 2**32:
+ checksum = (checksum & 0xffffffff) + (checksum >> 32)
+
+ checksum = (checksum & 0xffff) + (checksum >> 16)
+ checksum = (checksum) + (checksum >> 16)
+ checksum = checksum & 0xffff
+
+ return checksum + len(self.__data__)
diff --git a/chromium/tools/symsrc/source_index.py b/chromium/tools/symsrc/source_index.py
new file mode 100755
index 00000000000..f780cd6bab9
--- /dev/null
+++ b/chromium/tools/symsrc/source_index.py
@@ -0,0 +1,550 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Usage: <win-path-to-pdb.pdb>
+This tool will take a PDB on the command line, extract the source files that
+were used in building the PDB, query the source server for which repository
+and revision these files are at, and then finally write this information back
+into the PDB in a format that the debugging tools understand. This allows for
+automatic source debugging, as all of the information is contained in the PDB,
+and the debugger can go out and fetch the source files.
+
+You most likely want to run these immediately after a build, since the source
+input files need to match the generated PDB, and we want the correct
+revision information for the exact files that were used for the build.
+
+The following files from a windbg + source server installation are expected
+to reside in the same directory as this python script:
+ dbghelp.dll
+ pdbstr.exe
+ srctool.exe
+
+NOTE: Expected to run under a native win32 python, NOT cygwin. All paths are
+dealt with as win32 paths, since we have to interact with the Microsoft tools.
+"""
+
+import os
+import optparse
+import sys
+import tempfile
+import time
+import subprocess
+import win32api
+
+from collections import namedtuple
+
+# This serves two purposes. First, it acts as a whitelist, and only files
+# from repositories listed here will be source indexed. Second, it allows us
+# to map from one URL to another, so we can map to external source servers. It
+# also indicates if the source for this project will be retrieved in a base64
+# encoded format.
+# TODO(sebmarchand): Initialize this variable in the main function and pass it
+# to the sub functions instead of having a global variable.
+REPO_MAP = {
+ 'http://src.chromium.org/svn': {
+ 'url': 'https://src.chromium.org/chrome/'
+ '{file_path}?revision={revision}',
+ 'base64': False
+ },
+ 'https://src.chromium.org/svn': {
+ 'url': 'https://src.chromium.org/chrome/'
+ '{file_path}?revision={revision}',
+ 'base64': False
+ }
+}
+
+
+PROJECT_GROUPS = [
+ # Googlecode SVN projects
+ {
+ 'projects': [
+ 'angleproject',
+ 'google-breakpad',
+ 'google-cache-invalidation-api',
+ 'google-url',
+ 'googletest',
+ 'leveldb',
+ 'libphonenumber',
+ 'libyuv',
+ 'open-vcdiff',
+ 'ots',
+ 'sawbuck',
+ 'sfntly',
+ 'smhasher',
+ 'v8',
+ 'v8-i18n',
+ 'webrtc',
+ ],
+ 'public_url': 'https://%s.googlecode.com/svn-history/' \
+ 'r{revision}/{file_path}',
+ 'svn_urls': [
+ 'svn://svn-mirror.golo.chromium.org/%s',
+ 'http://src.chromium.org/%s',
+ 'https://src.chromium.org/%s',
+ 'http://%s.googlecode.com/svn',
+ 'https://%s.googlecode.com/svn',
+ ],
+ },
+ # Googlecode Git projects
+ {
+ 'projects': [
+ 'syzygy',
+ ],
+ 'public_url': 'https://%s.googlecode.com/git-history/' \
+ '{revision}/{file_path}',
+ 'svn_urls': [
+ 'https://code.google.com/p/%s/',
+ ],
+ },
+ # Chrome projects
+ {
+ 'projects': [
+ 'blink',
+ 'chrome',
+ 'multivm',
+ 'native_client',
+ ],
+ 'public_url': 'https://src.chromium.org/%s/' \
+ '{file_path}?revision={revision}',
+ 'svn_urls': [
+ 'svn://chrome-svn/%s',
+ 'svn://chrome-svn.corp.google.com/%s',
+ 'svn://svn-mirror.golo.chromium.org/%s',
+ 'svn://svn.chromium.org/%s',
+ ],
+ },
+]
+
+# A named tuple used to store the information about a repository.
+#
+# It contains the following members:
+# - repo: The URL of the repository;
+# - rev: The revision (or hash) of the current checkout.
+# - file_list: The list of files coming from this repository.
+# - root_path: The root path of this checkout.
+# - path_prefix: A prefix to apply to the filename of the files coming from
+# this repository.
+RevisionInfo = namedtuple('RevisionInfo',
+ ['repo', 'rev', 'files', 'root_path', 'path_prefix'])
+
+
+def GetCasedFilePath(filename):
+ """Return the correctly cased path for a given filename"""
+ return win32api.GetLongPathName(win32api.GetShortPathName(unicode(filename)))
+
+
+def FillRepositoriesMap():
+ """ Fill the repositories map with the whitelisted projects. """
+ for project_group in PROJECT_GROUPS:
+ for project in project_group['projects']:
+ for svn_url in project_group['svn_urls']:
+ REPO_MAP[svn_url % project] = {
+ 'url': project_group['public_url'] % project,
+ 'base64': False
+ }
+ REPO_MAP[project_group['public_url'] % project] = None
+
+FillRepositoriesMap()
+
+
+def FindFile(filename):
+ """Return the full windows path to a file in the same dir as this code."""
+ thisdir = os.path.dirname(os.path.join(os.path.curdir, __file__))
+ return os.path.abspath(os.path.join(thisdir, filename))
+
+
+def RunCommand(*cmd, **kwargs):
+ """Runs a command.
+
+ Returns what have been printed to stdout by this command.
+
+ kwargs:
+ raise_on_failure: Indicates if an exception should be raised on failure, if
+ set to false then the function will return None.
+ """
+ kwargs.setdefault('stdin', subprocess.PIPE)
+ kwargs.setdefault('stdout', subprocess.PIPE)
+ kwargs.setdefault('stderr', subprocess.PIPE)
+ kwargs.setdefault('universal_newlines', True)
+ raise_on_failure = kwargs.pop('raise_on_failure', True)
+
+ proc = subprocess.Popen(cmd, **kwargs)
+ ret, err = proc.communicate()
+ if proc.returncode != 0:
+ if raise_on_failure:
+ print 'Error: %s' % err
+ raise subprocess.CalledProcessError(proc.returncode, cmd)
+ return
+
+ ret = (ret or '').rstrip('\n')
+ return ret
+
+
+def ExtractSourceFiles(pdb_filename):
+ """Extract a list of local paths of the source files from a PDB."""
+ src_files = RunCommand(FindFile('srctool.exe'), '-r', pdb_filename)
+ if not src_files or src_files.startswith("srctool: "):
+ raise Exception("srctool failed: " + src_files)
+ return set(x.lower() for x in src_files.split('\n') if len(x) != 0)
+
+
+def ReadSourceStream(pdb_filename):
+ """Read the contents of the source information stream from a PDB."""
+ srctool = subprocess.Popen([FindFile('pdbstr.exe'),
+ '-r', '-s:srcsrv',
+ '-p:%s' % pdb_filename],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ data, _ = srctool.communicate()
+
+ if ((srctool.returncode != 0 and srctool.returncode != -1) or
+ data.startswith("pdbstr: ")):
+ raise Exception("pdbstr failed: " + data)
+ return data
+
+
+def WriteSourceStream(pdb_filename, data):
+ """Write the contents of the source information stream to a PDB."""
+ # Write out the data to a temporary filename that we can pass to pdbstr.
+ (f, fname) = tempfile.mkstemp()
+ f = os.fdopen(f, "wb")
+ f.write(data)
+ f.close()
+
+ srctool = subprocess.Popen([FindFile('pdbstr.exe'),
+ '-w', '-s:srcsrv',
+ '-i:%s' % fname,
+ '-p:%s' % pdb_filename],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ data, _ = srctool.communicate()
+
+ if ((srctool.returncode != 0 and srctool.returncode != -1) or
+ data.startswith("pdbstr: ")):
+ raise Exception("pdbstr failed: " + data)
+
+ os.unlink(fname)
+
+
+def GetSVNRepoInfo(local_path):
+ """Calls svn info to extract the SVN information about a path."""
+ # We call svn.bat to make sure and get the depot tools SVN and not cygwin.
+ info = RunCommand('svn.bat', 'info', local_path, raise_on_failure=False)
+ if not info:
+ return
+ # Hack up into a dictionary of the fields printed by svn info.
+ vals = dict((y.split(': ', 2) for y in info.split('\n') if y))
+ return vals
+
+
+def ExtractSVNInfo(local_filename):
+ """Checks if a file is coming from a svn repository and if so returns some
+ information about it.
+
+ Args:
+ local_filename: The name of the file that we want to check.
+
+ Returns:
+ None if the file doesn't come from a svn repository, otherwise it returns a
+ RevisionInfo tuple.
+ """
+ # Try to get the svn information about this file.
+ vals = GetSVNRepoInfo(local_filename)
+ if not vals:
+ return
+
+ repo = vals['Repository Root']
+ if not vals['URL'].startswith(repo):
+ raise Exception("URL is not inside of the repository root?!?")
+ rev = vals['Revision']
+
+ svn_local_root = os.path.split(local_filename)[0]
+
+ # We need to look at the SVN URL of the current path to handle the case when
+ # we do a partial SVN checkout inside another checkout of the same repository.
+ # This happens in Chromium where we do some checkout of
+ # '/trunk/deps/third_party' in 'src/third_party'.
+ svn_root_url = os.path.dirname(vals['URL'])
+
+ # Don't try to list all the files from this repository as this seem to slow
+ # down the indexing, instead index one file at a time.
+ file_list = [local_filename.replace(svn_local_root, '').lstrip(os.path.sep)]
+
+ return RevisionInfo(repo=repo, rev=rev, files=file_list,
+ root_path=svn_local_root, path_prefix=svn_root_url.replace(repo, ''))
+
+
+def ExtractGitInfo(local_filename):
+ """Checks if a file is coming from a git repository and if so returns some
+ information about it.
+
+ Args:
+ local_filename: The name of the file that we want to check.
+
+ Returns:
+ None if the file doesn't come from a git repository, otherwise it returns a
+ RevisionInfo tuple.
+ """
+ # Starts by checking if this file is coming from a git repository. For that
+ # we'll start by calling 'git info' on this file; for this to work we need to
+ # make sure that the current working directory is correctly cased. It turns
+ # out that even on Windows the casing of the path passed in the |cwd| argument
+ # of subprocess.Popen matters and if it's not correctly cased then 'git info'
+ # will return None even if the file is coming from a git repository. This
+ # is not the case if we're just interested in checking if the path containing
+ # |local_filename| is coming from a git repository, in this case the casing
+ # doesn't matter.
+ local_filename = GetCasedFilePath(local_filename)
+ local_file_basename = os.path.basename(local_filename)
+ local_file_dir = os.path.dirname(local_filename)
+ file_info = RunCommand('git.bat', 'log', '-n', '1', local_file_basename,
+ cwd=local_file_dir, raise_on_failure=False)
+
+ if not file_info:
+ return
+
+ # Get the revision of the master branch.
+ rev = RunCommand('git.bat', 'rev-parse', 'HEAD', cwd=local_file_dir)
+
+ # Get the url of the remote repository.
+ repo = RunCommand('git.bat', 'config', '--get', 'remote.origin.url',
+ cwd=local_file_dir)
+ # If the repository point to a local directory then we need to run this
+ # command one more time from this directory to get the repository url.
+ if os.path.isdir(repo):
+ repo = RunCommand('git.bat', 'config', '--get', 'remote.origin.url',
+ cwd=repo)
+
+ # Don't use the authenticated path.
+ repo = repo.replace('googlesource.com/a/', 'googlesource.com/')
+
+ # Get the relative file path for this file in the git repository.
+ git_path = RunCommand('git.bat', 'ls-tree', '--full-name', '--name-only',
+ 'HEAD', local_file_basename, cwd=local_file_dir).replace('/','\\')
+
+ if not git_path:
+ return
+
+ git_root_path = local_filename.replace(git_path, '')
+
+ if repo not in REPO_MAP:
+ # Automatically adds the project coming from a git GoogleCode repository to
+ # the repository map. The files from these repositories are accessible via
+ # gitiles in a base64 encoded format.
+ if 'chromium.googlesource.com' in repo:
+ REPO_MAP[repo] = {
+ 'url': '%s/+/{revision}/{file_path}?format=TEXT' % repo,
+ 'base64': True
+ }
+
+ # Get the list of files coming from this repository.
+ git_file_list = RunCommand('git.bat', 'ls-tree', '--full-name', '--name-only',
+ 'HEAD', '-r', cwd=git_root_path)
+
+ file_list = [x for x in git_file_list.splitlines() if len(x) != 0]
+
+ return RevisionInfo(repo=repo, rev=rev, files=file_list,
+ root_path=git_root_path, path_prefix=None)
+
+
+def IndexFilesFromRepo(local_filename, file_list, output_lines):
+ """Checks if a given file is a part of a revision control repository (svn or
+ git) and index all the files from this repository if it's the case.
+
+ Args:
+ local_filename: The filename of the current file.
+ file_list: The list of files that should be indexed.
+ output_lines: The source indexing lines that will be appended to the PDB.
+
+ Returns the number of indexed files.
+ """
+ indexed_files = 0
+
+ # Try to extract the revision info for the current file.
+ info = ExtractGitInfo(local_filename)
+ if not info:
+ info = ExtractSVNInfo(local_filename)
+
+ repo = info.repo
+ rev = info.rev
+ files = info.files
+ root_path = info.root_path.lower()
+
+ # Checks if we should index this file and if the source that we'll retrieve
+ # will be base64 encoded.
+ should_index = False
+ base_64 = False
+ if repo in REPO_MAP:
+ should_index = True
+ base_64 = REPO_MAP[repo].get('base64')
+ else:
+ repo = None
+
+ # Iterates over the files from this repo and index them if needed.
+ for file_iter in files:
+ current_filename = file_iter.lower()
+ full_file_path = os.path.normpath(os.path.join(root_path, current_filename))
+ # Checks if the file is in the list of files to be indexed.
+ if full_file_path in file_list:
+ if should_index:
+ source_url = ''
+ current_file = file_iter
+ # Prefix the filename with the prefix for this repository if needed.
+ if info.path_prefix:
+ current_file = os.path.join(info.path_prefix, current_file)
+ source_url = REPO_MAP[repo].get('url').format(revision=rev,
+ file_path=os.path.normpath(current_file).replace('\\', '/'))
+ output_lines.append('%s*%s*%s*%s*%s' % (full_file_path, current_file,
+ rev, source_url, 'base64.b64decode' if base_64 else ''))
+ indexed_files += 1
+ file_list.remove(full_file_path)
+
+ # The input file should have been removed from the list of files to index.
+ if indexed_files and local_filename in file_list:
+ print '%s shouldn\'t be in the list of files to index anymore.' % \
+ local_filename
+ # TODO(sebmarchand): Turn this into an exception once I've confirmed that
+ # this doesn't happen on the official builder.
+ file_list.remove(local_filename)
+
+ return indexed_files
+
+
+def DirectoryIsUnderPublicVersionControl(local_dir):
+ # Checks if this directory is from a Git checkout.
+ info = RunCommand('git.bat', 'config', '--get', 'remote.origin.url',
+ cwd=local_dir, raise_on_failure=False)
+ if info:
+ return True
+
+ # If not checks if it's from a SVN checkout.
+ info = GetSVNRepoInfo(local_dir)
+ if info:
+ return True
+
+ return False
+
+
+def UpdatePDB(pdb_filename, verbose=True, build_dir=None, toolchain_dir=None):
+ """Update a pdb file with source information."""
+ dir_blacklist = { }
+
+ if build_dir:
+ # Blacklisting the build directory allows skipping the generated files, for
+ # Chromium this makes the indexing ~10x faster.
+ build_dir = (os.path.normpath(build_dir)).lower()
+ for directory, _, _ in os.walk(build_dir):
+ dir_blacklist[directory.lower()] = True
+ dir_blacklist[build_dir.lower()] = True
+
+ if toolchain_dir:
+ # Blacklisting the directories from the toolchain as we don't have revision
+ # info for them.
+ toolchain_dir = (os.path.normpath(toolchain_dir)).lower()
+ for directory, _, _ in os.walk(build_dir):
+ dir_blacklist[directory.lower()] = True
+ dir_blacklist[toolchain_dir.lower()] = True
+
+ # Writes the header of the source index stream.
+ #
+ # Here's the description of the variables used in the SRC_* macros (those
+ # variables have to be defined for every source file that we want to index):
+ # var1: The file path.
+ # var2: The name of the file without its path.
+ # var3: The revision or the hash of this file's repository.
+ # var4: The URL to this file.
+ # var5: (optional) The python method to call to decode this file, e.g. for
+ # a base64 encoded file this value should be 'base64.b64decode'.
+ lines = [
+ 'SRCSRV: ini ------------------------------------------------',
+ 'VERSION=1',
+ 'INDEXVERSION=2',
+ 'VERCTRL=Subversion',
+ 'DATETIME=%s' % time.asctime(),
+ 'SRCSRV: variables ------------------------------------------',
+ 'SRC_EXTRACT_TARGET_DIR=%targ%\%fnbksl%(%var2%)\%var3%',
+ 'SRC_EXTRACT_TARGET=%SRC_EXTRACT_TARGET_DIR%\%fnfile%(%var1%)',
+ 'SRC_EXTRACT_CMD=cmd /c "mkdir "%SRC_EXTRACT_TARGET_DIR%" & python -c '
+ '"import urllib2, base64;'
+ 'url = \\\"%var4%\\\";'
+ 'u = urllib2.urlopen(url);'
+ 'print %var5%(u.read());" > "%SRC_EXTRACT_TARGET%""',
+ 'SRCSRVTRG=%SRC_EXTRACT_TARGET%',
+ 'SRCSRVCMD=%SRC_EXTRACT_CMD%',
+ 'SRCSRV: source files ---------------------------------------',
+ ]
+
+ if ReadSourceStream(pdb_filename):
+ raise Exception("PDB already has source indexing information!")
+
+ filelist = ExtractSourceFiles(pdb_filename)
+ number_of_files = len(filelist)
+ indexed_files_total = 0
+ while filelist:
+ filename = next(iter(filelist))
+ filedir = os.path.dirname(filename)
+ if verbose:
+ print "[%d / %d] Processing: %s" % (number_of_files - len(filelist),
+ number_of_files, filename)
+
+ # This directory is blacklisted, either because it's not part of a
+ # repository, or from one we're not interested in indexing.
+ if dir_blacklist.get(filedir, False):
+ if verbose:
+ print " skipping, directory is blacklisted."
+ filelist.remove(filename)
+ continue
+
+ # Skip the files that don't exist on the current machine.
+ if not os.path.exists(filename):
+ filelist.remove(filename)
+ continue
+
+ # Try to index the current file and all the ones coming from the same
+ # repository.
+ indexed_files = IndexFilesFromRepo(filename, filelist, lines)
+ if not indexed_files:
+ if not DirectoryIsUnderPublicVersionControl(filedir):
+ dir_blacklist[filedir] = True
+ if verbose:
+ print "Adding %s to the blacklist." % filedir
+ filelist.remove(filename)
+ continue
+
+ indexed_files_total += indexed_files
+
+ if verbose:
+ print " %d files have been indexed." % indexed_files
+
+ lines.append('SRCSRV: end ------------------------------------------------')
+
+ WriteSourceStream(pdb_filename, '\r\n'.join(lines))
+
+ if verbose:
+ print "%d / %d files have been indexed." % (indexed_files_total,
+ number_of_files)
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('-v', '--verbose', action='store_true', default=False)
+ parser.add_option('--build-dir', help='The original build directory, if set '
+ 'all the files present in this directory (or one of its subdirectories) '
+ 'will be skipped.')
+ parser.add_option('--toolchain-dir', help='The directory containing the '
+ 'toolchain that has been used for this build. If set all the files '
+ 'present in this directory (or one of its subdirectories) will be '
+ 'skipped.')
+ options, args = parser.parse_args()
+
+ if not args:
+ parser.error('Specify a pdb')
+
+ for pdb in args:
+ UpdatePDB(pdb, options.verbose, options.build_dir)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/tcmalloc/print-live-objects.py b/chromium/tools/tcmalloc/print-live-objects.py
new file mode 100755
index 00000000000..dfed9c60999
--- /dev/null
+++ b/chromium/tools/tcmalloc/print-live-objects.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Symbolizes and prints live objects as recorded by tcmalloc's
+HeapProfilerDumpLiveObjects.
+"""
+
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+def usage():
+ print """\
+Usage:
+ tools/tcmalloc/print-live-objects.py out/Debug/chrome leaks.dmp
+"""
+
+def LoadDump(dump_file):
+ result = []
+ leakfmt = re.compile(
+ r"^\s*1:\s*(\d+)\s*\[\s*1:\s*\d+\]\s*@(0x[a-f0-9]+)((\s+0x[a-f0-9]+)*)$")
+ line_no = 0
+ with open(dump_file) as f:
+ for line in f:
+ line_no = line_no + 1
+ matches = leakfmt.match(line)
+ if not matches:
+ print "%s: could not parse line %d, skipping" % (dump_file, line_no)
+ else:
+ trace = { "size": int(matches.group(1)),
+ "address": matches.group(2),
+ "frames": matches.group(3).strip().split(" ")}
+ result.append(trace)
+ return result
+
+
+def Symbolize(binary, traces):
+ addresses = set()
+ for trace in traces:
+ for frame in trace["frames"]:
+ addresses.add(frame)
+ addr_file, addr_filename = tempfile.mkstemp()
+ for addr in addresses:
+ os.write(addr_file, "%s\n" % addr)
+ os.close(addr_file)
+ syms = subprocess.Popen([
+ "addr2line", "-f", "-C", "-e", binary, "@%s" % addr_filename],
+ stdout=subprocess.PIPE).communicate()[0].strip().split("\n")
+ table = {}
+ cwd = os.getcwd()
+ for address, symbol, location in zip(addresses, syms[::2], syms[1::2]):
+ if location != "??:0":
+ filename, line = location.split(":")
+ filename = os.path.realpath(filename)[len(cwd)+1:]
+ location = "%s:%s" % (filename, line)
+ table[address] = { "name": symbol, "location": location }
+ for trace in traces:
+ frames = []
+ for frame in trace["frames"]:
+ frames.append(table[frame])
+ trace["frames"] = frames
+
+
+def Main(argv):
+ if sys.platform != 'linux2':
+ print 'print-live-objects.py requires addr2line only present on Linux.'
+ sys.exit(1)
+
+ if len(argv) != 3:
+ usage()
+ sys.exit(1)
+
+ traces = LoadDump(argv[2])
+ Symbolize(argv[1], traces)
+
+ if not traces:
+ print "No leaks found!"
+
+ for trace in sorted(traces, key=lambda x: -x["size"]):
+ print "Leak of %d bytes at address %s" % (trace["size"], trace["address"])
+ for frame in trace["frames"]:
+ print " %s (%s)" % (frame["name"], frame["location"])
+ print ""
+
+
+if __name__ == '__main__':
+ Main(sys.argv)
diff --git a/chromium/tools/telemetry/telemetry.gyp b/chromium/tools/telemetry/telemetry.gyp
deleted file mode 100644
index 02ecd8d6aee..00000000000
--- a/chromium/tools/telemetry/telemetry.gyp
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-{
- 'targets': [
- {
- 'target_name': 'bitmaptools',
- 'type': 'executable',
- 'sources': [
- 'telemetry/internal/image_processing/bitmaptools.cc',
- ],
- 'toolsets': ['host'],
- },
- ],
-}
diff --git a/chromium/tools/telemetry/telemetry.isolate b/chromium/tools/telemetry/telemetry.isolate
deleted file mode 100644
index aa9fc95f292..00000000000
--- a/chromium/tools/telemetry/telemetry.isolate
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-{
- 'conditions': [
- ['OS=="android" or OS=="linux" or OS=="mac" or OS=="win"', {
- 'variables': {
- 'files': [
- '../../build/android/devil/',
- '../../third_party/catapult/',
- './',
- # For Telemetry's screenshot support.
- '<(PRODUCT_DIR)/bitmaptools<(EXECUTABLE_SUFFIX)',
- ],
- },
- }],
- ]
-}
diff --git a/chromium/tools/trace/trace.html b/chromium/tools/trace/trace.html
new file mode 100644
index 00000000000..e14bbaef686
--- /dev/null
+++ b/chromium/tools/trace/trace.html
@@ -0,0 +1,287 @@
+<html>
+<head>
+<title>
+Trace Events
+</title>
+<style>
+body {
+ font-family: "Courier New";
+ font-size: 9pt;
+}
+
+#header {
+ position: absolute;
+ top: 0px;
+ left: 0px;
+ border-bottom: 1px dashed black;
+ background-color: #F0F0F0;
+ z-index: 3;
+}
+
+#outer {
+ position: relative;
+ height: 200px;
+}
+
+#time_scale {
+ height: 15px;
+ width: 100%;
+}
+
+#tooltip {
+ position: absolute;
+ background-color: #FFFFCC;
+ display: none;
+ font-family: "Courier New";
+ font-size: 9pt;
+ padding: 5px;
+ border: 1px solid #CCCC88;
+ z-index: 3;
+}
+
+#legend {
+ position: fixed;
+ left: 10px;
+ bottom: 10px;
+ padding: 5px;
+ border: 1px solid silver;
+ z-index: 10;
+ background-color: #f0f0f0;
+}
+
+h2 {
+ margin: 5px;
+}
+
+#instructions {
+ position: absolute;
+ top:
+ float: right;
+ display: none;
+}
+
+li.time_tick {
+ background-color: #FFFFCC;
+ height: 15px;
+}
+
+li {
+ background: pink;
+ position: absolute;
+ height: 10px;
+ list-style: none;
+ margin: 0px;
+ padding: 0px;
+ z-index: 2;
+}
+
+li:hover {
+ border: 1px solid red;
+}
+
+.url {
+ background-color: green;
+}
+
+.http {
+ background-color: blue;
+}
+
+.socket {
+ background-color: black;
+}
+
+.v8 {
+ background-color: orange;
+}
+
+</style>
+
+<script src='trace_data.js'></script>
+<script>
+var scale = 100000;
+var row_height = 15;
+var trace_initial_time = 0;
+var trace_threads = {};
+var heartbeats = [];
+var trace_total_time = 0;
+
+function process_raw_events() {
+ trace_initial_time = raw_trace_events[0].usec_begin;
+ var stack = [];
+ var e;
+ for (var i in raw_trace_events) {
+ e = raw_trace_events[i];
+ var trace_events = trace_threads["e.tid"];
+ if (!trace_events) {
+ trace_events = [];
+ trace_threads["e.tid"] = trace_events;
+ }
+ if (e.name.indexOf("heartbeat.") == 0) {
+ heartbeats.push(e);
+ } else if (e.type == "BEGIN") {
+ trace_events.push(e);
+ stack.unshift(e);
+ } else if (e.type == "END") {
+ for (var s in stack) {
+ var begin = stack[s];
+ if ((begin.id == e.id) && (begin.name == e.name) &&
+ (begin.pid == e.pid) && (begin.tid == e.tid)) {
+ begin.usec_end = e.usec_begin;
+ begin.duration = begin.usec_end - begin.usec_begin;
+ stack.splice(s, 1);
+ break;
+ }
+ }
+ } else if (e.type == "INSTANT") {
+ trace_events.push(e);
+ e.duration = 0;
+ }
+ }
+ if (e.usec_end)
+ trace_total_time = e.usec_end - trace_initial_time;
+ else
+ trace_total_time = e.usec_begin - trace_initial_time;
+}
+
+function compute_scale() {
+ var outer = document.getElementById("outer");
+ scale = Math.floor(trace_total_time / (outer.offsetWidth - (row_height * 2)));
+};
+
+function show_details(tid, i, event) {
+ var trace_events = trace_threads["e.tid"];
+ var inner = trace_events[i].name + " " +
+ trace_events[i].duration / 1000 + "ms<br />" +
+ trace_events[i].id + "<br />" +
+ trace_events[i].extra + "<br />";
+ var tooltip = document.getElementById("tooltip");
+ tooltip.innerHTML = inner;
+ if (window.event)
+ event = window.event;
+ tooltip.style.top = event.pageY + 3;
+ tooltip.style.left = event.pageX + 3;
+ tooltip.style.display = "block";
+};
+
+function generate_time_scale() {
+ var view_size = window.clientWidth;
+ var body_size = document.body.scrollWidth;
+ var inner = "";
+
+ var step_ms = Math.floor(scale / 10); // ms per 100px
+ var pow10 = Math.pow(10, Math.floor(Math.log(step_ms) / Math.log(10)));
+ var round = .5 * pow10;
+ step_ms = round * (Math.floor(step_ms / round)); // round to a multiple of round
+ for (var i = step_ms; i < trace_total_time / 1000; i += step_ms) {
+ var x = Math.floor(i * 1000 / scale);
+ inner += "<li class='time_tick' style='left: " + x + "px'>" + i + "</li>";
+ }
+ var time_scale = document.getElementById("time_scale");
+ time_scale.innerHTML = inner;
+ time_scale.style.width = document.body.scrollWidth;
+}
+
+function generate_subchart(trace_events, top) {
+ var inner = "";
+ var last_max_time = 0;
+ var last_max_x = 0;
+ for (var i in trace_events) {
+ var e = trace_events[i];
+ var start_time = e.usec_begin - trace_initial_time;
+ var left = row_height + Math.floor(start_time / scale);
+ var width = Math.floor(e.duration / scale);
+ if (width == 0)
+ width = 1;
+ if (start_time < last_max_time)
+ top += row_height;
+ var style = "top: " + top + "px; left: " + left + "px; width: " + width + "px;";
+ var js = 'javascript:show_details("' + e.tid + '", ' + i + ', event);';
+ var cls = e.name.split('.')[0];
+ inner += "<li class='" + cls + "' onmouseover='" + js + "' id='li-" + i + "' style='" + style + "'></li>\n";
+ last_max_time = start_time + e.duration;
+ last_max_x = left + width;
+ }
+ var subchart = document.createElement('div');
+ subchart.setAttribute("class", "subchart");
+ subchart.setAttribute("id", trace_events[0].tid);
+ subchart.innerHTML = inner;
+ subchart.style.height = top + row_height;
+ subchart.style.width = row_height + last_max_x;
+ var chart = document.getElementById("chart");
+ chart.appendChild(subchart);
+
+ return top;
+};
+
+function generate_chart() {
+ var chart = document.getElementById("chart");
+ chart.innerHTML = "";
+ var top = 60;
+ for (var t in trace_threads) {
+ top = generate_subchart(trace_threads[t], top);
+ }
+ generate_time_scale();
+}
+
+function change_scale(event) {
+ if (!event)
+ event = window.event;
+ if (!event.shiftKey)
+ return;
+ var delta = 0;
+ if (event.wheelDelta) {
+ delta = event.wheelDelta / 120;
+ } else if (event.detail) {
+ delta = - event.detail / 3;
+ }
+ if (delta) {
+ var tooltip = document.getElementById("tooltip");
+ tooltip.style.display = "none";
+ var factor = 1.1;
+ if (delta < 0)
+ scale = Math.floor(scale * factor);
+ else
+ scale = Math.floor(scale / factor);
+ if (scale > 300000)
+ scale = 300000;
+ generate_chart();
+ if (event.preventDefault)
+ event.preventDefault();
+ }
+ event.returnValue = false;
+};
+
+function initial_load() {
+ if (window.addEventListener)
+ window.addEventListener('DOMMouseScroll', change_scale, false);
+ window.onmousewheel = document.onmousewheel = change_scale;
+
+ process_raw_events();
+ compute_scale();
+ generate_chart();
+};
+
+</script>
+</head>
+<body onload='initial_load();'>
+<div id="header">
+<h2>Trace Events</h2>
+<div id="instructions">
+Use shift+mouse-wheel to zoom in and out.
+</div>
+<div id="time_scale"></div>
+</div>
+<div id="legend">
+<span class="url">&nbsp;</span> URL<br />
+<span class="http">&nbsp;</span> HTTP<br />
+<span class="socket">&nbsp;</span> Socket<br />
+<span class="v8">&nbsp;</span> V8<br />
+</div>
+<div id="chart">
+<div id="outer">
+</div>
+</div>
+<div id="tooltip" ondblclick="this.style.display = 'none';"></div>
+</body>
+</html>
diff --git a/chromium/tools/trace/trace_data.js b/chromium/tools/trace/trace_data.js
new file mode 100644
index 00000000000..61e0297ca9c
--- /dev/null
+++ b/chromium/tools/trace/trace_data.js
@@ -0,0 +1,1050 @@
+var raw_trace_events = [
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x16e8260', 'extra':'http://mail.google.com', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_shell.cc', 'line_number':'825', 'usec_begin': 246537},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x1', 'extra':'http://mail.google.com/', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 250373},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 300584},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 301820},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 301844},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 302652},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'frame.load', 'id':'0x16e8488', 'extra':'http://mail.google.com/', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'310', 'usec_begin': 302786},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x1', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 302866},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x2', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 303348},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 304497},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 304548},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 304569},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 304627},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 304640},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 306405},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 306425},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 306443},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 308431},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 308477},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 308560},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 308606},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x1808ae8', 'extra':'439 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 308637},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 308651},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 308663},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 308675},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'1279 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 367678},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x17b8e60', 'extra':'http://mail.google.com/mail/', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 368149},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 368763},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x2', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 368799},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 368991},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 369014},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 369028},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 369063},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 369130},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 370934},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 370964},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 370996},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 372273},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 373127},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x1808ae8', 'extra':'70 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 373160},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 373185},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'1718 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 379657},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 387342},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x1808ae8', 'extra':'186 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 387405},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 387424},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'47 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 391679},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 423698},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 433561},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 433582},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 433661},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x1808ae8', 'extra':'628 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 433705},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 433719},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 433732},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 433748},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'1425 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 466568},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 467724},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 467913},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 467931},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 470106},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 470143},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'4233 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'169', 'usec_begin': 470177},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 470231},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x3', 'extra':'https://mail.google.com/mail?view=page&name=browser&ver=1k96igf4806cy', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 473833},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 474818},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 474867},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 477174},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 477224},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 477615},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x1828c00', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 477662},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 480476},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 480728},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 480744},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 488153},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 488720},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 489077},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 489092},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 489249},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 489309},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 489346},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 489360},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 489384},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 489544},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 489664},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 489678},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x4', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 490180},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 490442},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x3', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 490470},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 490597},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 490622},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 490636},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 490674},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 490688},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x2', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 490772},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 492500},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 492527},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 492546},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 513216},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 513450},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x17c4d58', 'extra':'70 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 513480},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 513509},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x17c4d58', 'extra':'1171 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 543672},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 545326},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x17c4d58', 'extra':'314 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 545366},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 545386},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x17c4d58', 'extra':'47 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 579036},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 579232},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 579251},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 579307},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x17c4d58', 'extra':'611 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 579341},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 579354},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 579367},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 579394},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x17c4d58', 'extra':'1425 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 604199},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 604781},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 605139},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 605157},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 605470},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 605487},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x17c4d58', 'extra':'1234 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'169', 'usec_begin': 605508},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x17b8e60', 'extra':'https://ssl.google-analytics.com/siteopt.js?v=1&utmxkey=1206330561&utmx=&utmxx=&utmxtime=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 605550},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 605881},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 606445},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 606461},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 608174},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 612014},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 612070},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 612084},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 612669},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x5', 'extra':'https://mail.google.com/mail/help/images/logo.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 613155},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x6', 'extra':'https://mail.google.com/mail/images/corner_tl.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 614484},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 614782},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 614828},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 614842},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 614872},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x7', 'extra':'https://mail.google.com/mail/images/corner_tr.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 615312},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x8', 'extra':'https://mail.google.com/mail/images/corner_bl.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 615792},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x9', 'extra':'https://mail.google.com/mail/images/corner_br.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 616264},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 616610},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 616653},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 616667},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 616688},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0xa', 'extra':'https://mail.google.com/mail/help/images/icons/spam_new.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 617692},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0xb', 'extra':'https://mail.google.com/mail/help/images/icons/cell.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 618471},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0xc', 'extra':'https://mail.google.com/mail/help/images/icons/storage.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 619165},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 619695},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 619744},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 619758},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 619784},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 619894},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 619930},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 619944},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 619962},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 620183},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 620277},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 620292},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 620313},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0xd', 'extra':'https://www.google.com/accounts/google_transparent.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 625447},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 626010},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 626071},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 626085},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 626104},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x1844b80', 'extra':'https://www.google.com/accounts/google_transparent.gif', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 626259},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x1844b80', 'extra':'https://www.google.com/accounts/google_transparent.gif', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 626283},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x1844b80', 'extra':'https://www.google.com/accounts/google_transparent.gif', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 626296},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x1844b80', 'extra':'https://www.google.com/accounts/google_transparent.gif', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 626420},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x1844b80', 'extra':'https://www.google.com/accounts/google_transparent.gif', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 626436},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 626507},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x1808ae8', 'extra':'668 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 626554},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x1844b80', 'extra':'https://www.google.com/accounts/google_transparent.gif', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 626569},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x1844b80', 'extra':'https://www.google.com/accounts/google_transparent.gif', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 626581},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 626604},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 631563},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 631620},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 631634},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0xe', 'extra':'https://mail.google.com/mail/help/images/button_1_1.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 632318},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0xf', 'extra':'https://www.google.com/mail/help/images/clear.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 632749},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x10', 'extra':'https://mail.google.com/mail/help/images/button_1_2.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 633222},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x11', 'extra':'https://mail.google.com/mail/help/images/button_1_3.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 633704},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x12', 'extra':'https://mail.google.com/mail/help/images/button_2_1.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 634280},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x13', 'extra':'https://mail.google.com/mail/help/images/button_2_2.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 634918},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x14', 'extra':'https://mail.google.com/mail/help/images/button_2_3.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 635476},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x15', 'extra':'https://mail.google.com/mail/help/images/button_3_1.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 635969},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x16', 'extra':'https://mail.google.com/mail/help/images/button_3_2.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 636515},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x17', 'extra':'https://mail.google.com/mail/help/images/button_3_3.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 636987},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 637536},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 637742},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 637796},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 637812},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 637838},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 637905},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 637940},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 637954},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 640514},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x18', 'extra':'https://ssl.google-analytics.com/urchin.js', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 642073},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x4', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 642324},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x17c35e8', 'extra':'https://ssl.google-analytics.com/urchin.js', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 643004},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x17c35e8', 'extra':'https://ssl.google-analytics.com/urchin.js', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 643029},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x17c35e8', 'extra':'https://ssl.google-analytics.com/urchin.js', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 643042},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x17c35e8', 'extra':'https://ssl.google-analytics.com/urchin.js', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 643078},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x17c35e8', 'extra':'https://ssl.google-analytics.com/urchin.js', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 643092},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 643146},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x17c4d58', 'extra':'601 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 643196},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x17c35e8', 'extra':'https://ssl.google-analytics.com/urchin.js', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 643215},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x17c35e8', 'extra':'https://ssl.google-analytics.com/urchin.js', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 643228},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 643248},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x1808ae8', 'extra':'406 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 664360},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x1844b80', 'extra':'https://www.google.com/accounts/google_transparent.gif', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 664656},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x1808ae8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 665032},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x17c4d58', 'extra':'390 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 667425},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x17c35e8', 'extra':'https://ssl.google-analytics.com/urchin.js', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 667652},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x17c4d58', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 668023},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'25', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 674638},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x5', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 685812},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x6', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 692814},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x7', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 693201},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x8', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 698519},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x9', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 698776},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0xa', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 703813},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0xb', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 704060},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0xc', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 704264},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0xe', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 704483},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0xf', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 704744},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x10', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 705404},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x11', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 705684},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x12', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 705929},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x13', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 706155},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x14', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 706620},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x15', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 706805},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x16', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 707344},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x17', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 707631},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0xd', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 707885},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 708860},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 710721},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 710737},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 711495},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 711663},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 711730},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 711744},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 724816},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 724869},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 724883},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 724905},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x19', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 725598},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 726084},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 726112},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 726128},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 726168},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 726185},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 726504},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 726523},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x265a568', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 726544},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x1a', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 728532},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 728968},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 728992},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 729008},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 729096},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 729113},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 729407},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 729426},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x265aeb0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 729446},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 730189},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x18', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 730637},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x265a568', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 746070},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x265a568', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 746305},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x265a568', 'extra':'70 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 746339},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x265a568', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 746360},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x265aeb0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 749792},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x265aeb0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 750021},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x265aeb0', 'extra':'70 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 750051},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x265aeb0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 750073},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x265a568', 'extra':'1171 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 771135},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x265a568', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 772294},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x265a568', 'extra':'314 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 772351},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x265a568', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 772369},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x265aeb0', 'extra':'1171 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 775944},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x265aeb0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 776955},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x265aeb0', 'extra':'314 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 776990},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x265aeb0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 777007},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x265a568', 'extra':'47 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 804626},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 804813},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 804835},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x265a568', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 804905},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x265a568', 'extra':'1003 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 804939},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 804952},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 804968},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x265a568', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 804989},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x265aeb0', 'extra':'47 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 823698},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 823868},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 823902},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x265aeb0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 823964},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x265aeb0', 'extra':'993 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 823993},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 824006},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 824022},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x265aeb0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 824039},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x265a568', 'extra':'554 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 826462},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 827124},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 827368},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x184aed0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1300046089&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmcn=1&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/mail/gaia/homepage&utmac=UA-992684-1&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 827398},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x19', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 827721},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x265aeb0', 'extra':'551 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 847745},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 848279},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 848496},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'https://ssl.google-analytics.com/__utm.gif?utmwv=1.3&utmn=1449512545&utmcs=UTF-8&utmsr=1920x1200&utmsc=32-bit&utmul=en-us&utmje=1&utmfl=9.0%20%20r115&utmdt=%0A%20%20Gmail%3A%20Email%20from%20Google%0A&utmhn=www.google.com&utmhid=1875579123&utmr=-&utmp=/1206330561/test&utmac=UA-1923148-3&utmcc=__utma%3D173272373.1300046089.1221584154.1221584154.1221584154.1%3B%2B__utmx%3D173272373.%3B%2B__utmz%3D173272373.1221584154.1.1.utmccn%3D(direct)%7Cutmcsr%3D(direct)%7Cutmcmd%3D(none)%3B%2B', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 848542},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x1b', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 850184},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 850670},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 850693},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 850706},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 850740},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 850759},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 851028},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 851047},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 851062},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x1a', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 851484},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 853022},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 853209},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26d7910', 'extra':'70 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 853244},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 853286},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26d7910', 'extra':'1719 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 855598},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 856827},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26d7910', 'extra':'186 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 856864},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 856882},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26d7910', 'extra':'47 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 861150},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 861377},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 861394},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 861449},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26d7910', 'extra':'582 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 861482},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 861495},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 861515},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 861534},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26d7910', 'extra':'565 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 892717},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 893171},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 893394},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail/images/c.gif?t=1221584153332', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 893421},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'frame.load', 'id':'0x16e8488', 'extra':'https://www.google.com/accounts/ServiceLogin?service=mail&passive=true&rm=false&continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3Dhtml%26zy%3Dl&bsv=1k96igf4806cy&ltmpl=default&ltmplcache=2', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'310', 'usec_begin': 893699},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x1b', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 893847},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'12', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 923735},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 1173749},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 1423773},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 1673792},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'3', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 1924790},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 2174817},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 2424843},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x1c', 'extra':'https://mail.google.com/mail?gxlu=erikkay&zx=1221584155500', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 2657744},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail?gxlu=erikkay&zx=1221584155500', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 2658189},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail?gxlu=erikkay&zx=1221584155500', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 2658239},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail?gxlu=erikkay&zx=1221584155500', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 2658254},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail?gxlu=erikkay&zx=1221584155500', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 2658292},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail?gxlu=erikkay&zx=1221584155500', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 2658306},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 2658367},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26d7910', 'extra':'597 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 2658414},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail?gxlu=erikkay&zx=1221584155500', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 2658428},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail?gxlu=erikkay&zx=1221584155500', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 2658440},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 2658455},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 2674849},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26d7910', 'extra':'498 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 2712401},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail?gxlu=erikkay&zx=1221584155500', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 2712952},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'https://mail.google.com/mail?gxlu=erikkay&zx=1221584155500', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 2713157},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 2713174},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 2924878},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 3175861},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 3425898},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 3675905},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 3925923},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 4175951},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 4425962},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 4675987},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 4926016},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 5034304},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x1d', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 5034619},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 5035008},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 5035035},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 5035049},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 5035082},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 5035096},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 5035375},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 5035392},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 5035409},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 5036730},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 5036969},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26d7910', 'extra':'70 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 5037014},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5037064},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26d7910', 'extra':'1718 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 5039568},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 5040730},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26d7910', 'extra':'186 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 5040764},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5040782},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26d7910', 'extra':'47 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 5044663},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 5044830},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 5044855},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 5044964},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26d7910', 'extra':'1067 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 5044992},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 5045007},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_body', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'306', 'usec_begin': 5045020},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 5045043},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26d7910', 'extra':'219 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 5045061},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_body', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'311', 'usec_begin': 5045074},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 5045087},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5045100},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26d7910', 'extra':'1425 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 5136971},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 5137031},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 5137045},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5137059},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26d7910', 'extra':'547 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'169', 'usec_begin': 5137078},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'https://www.google.com/accounts/ServiceLoginAuth?service=mail', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 5137821},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x26d7910', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 5138262},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x1d', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 5138291},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 5138722},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 5138744},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 5138757},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 5138779},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 5138793},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 5139038},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 5139055},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x26c29a8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 5139075},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x26c29a8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 5140266},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26c29a8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 5140480},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26c29a8', 'extra':'70 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 5140522},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26c29a8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5140563},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26c29a8', 'extra':'1718 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 5142351},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26c29a8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 5143531},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26c29a8', 'extra':'186 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 5143565},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26c29a8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5143582},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26c29a8', 'extra':'47 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 5147388},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 5147552},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 5147586},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26c29a8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 5147647},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26c29a8', 'extra':'1572 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 5147681},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 5147695},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 5147709},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26c29a8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5147723},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 5176025},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26c29a8', 'extra':'1425 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 5179942},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 5180371},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 5180747},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 5180766},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 5180999},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26c29a8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5181037},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26c29a8', 'extra':'114 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'169', 'usec_begin': 5181057},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x184aed0', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 5181094},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5184752},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5184865},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5184880},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5185121},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'frame.load', 'id':'0x16e8488', 'extra':'https://www.google.com/accounts/CheckCookie?continue=http%3A%2F%2Fmail.google.com%2Fmail%2F%3F&service=mail&ltmpl=default&chtml=LoginDoneHtml', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'310', 'usec_begin': 5185249},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x1d', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 5185368},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x1e', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 5186094},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 5186490},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 5186836},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 5186855},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 5186955},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 5186972},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 5187271},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 5187289},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x26c1c50', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 5187304},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x26c1c50', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 5188602},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 5188632},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 5188663},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x26c1c50', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 5188736},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x26c1c50', 'extra':'935 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 5188786},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 5188799},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 5188813},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x26c1c50', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5188831},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 5426048},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x26c1c50', 'extra':'1403 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 5448207},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x184aed0', 'extra':'http://mail.google.com/mail/?auth=DQAAAIcAAADbe-apndwa7Xx20a4Fl-4priInNZo_bihPly3fHKKz17I0Sgr02B1EXCHhvGL6Ifi3JHFN9Z08Jsq7_3ZNuqdau5F6rcqTxLAHgQuLykwgkwch36z_ge541j7ef1hfCkal7F4ThC42jFkBiXD5Ia9K09lkrSl7uQlDfcnH-Qzz18TuHBIlaTTo60x3wF13E4w&gausr=erikkay%40gmail.com', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 5448754},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x26c1c50', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 5449221},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x1e', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 5449247},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 5449521},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 5449543},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 5449565},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 5449588},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 5449601},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 5449875},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 5449891},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 5449919},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 5451177},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 5451207},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 5451226},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 5451267},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x183c9d0', 'extra':'1076 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 5451306},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 5451326},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 5451344},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5451377},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'4096 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 5659772},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 5660050},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 5660264},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 5660286},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 5660572},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5660590},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'194 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'169', 'usec_begin': 5660609},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 5660621},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5663002},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5663067},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5663082},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5663444},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x1f', 'extra':'http://mail.google.com/mail/?view=page&name=browser&ver=1k96igf4806cy', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 5663617},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 5664187},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5664204},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'1243 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'169', 'usec_begin': 5664228},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 5664242},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 5664497},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 5664513},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5664666},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5664846},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5664861},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5666301},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5666531},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5666638},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5666653},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5669200},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5672512},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5673323},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5673340},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5673454},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x20', 'extra':'http://mail.google.com/mail/?ui=2&view=js&name=js&ver=vuagRBgWwto&am=R_E4pcQ3aAGDQefb', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 5674043},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x21', 'extra':'http://mail.google.com/mail/?ui=2&view=bsp&ver=1qygpcgurkovy', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 5674722},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x22', 'extra':'http://mail.google.com/mail/?ui=2&view=bsp&ver=1qygpcgurkovy', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 5675392},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x23', 'extra':'http://mail.google.com/mail/?ui=2&view=bsp&ver=1qygpcgurkovy', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 5676060},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x1f', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 5676230},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'3', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 5676274},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5678859},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5679537},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5679553},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5679962},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5681218},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5682436},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5682452},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5685626},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5686710},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5687420},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5687436},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5687815},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5690191},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5691313},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5691330},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5692120},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5693609},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5694734},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5694750},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5695974},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5697446},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5698579},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5698595},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5704025},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'frame.load', 'id':'0x16e8488', 'extra':'http://mail.google.com/mail/?ui=2&view=bsp&ver=1qygpcgurkovy', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'310', 'usec_begin': 5704226},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x21', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 5704446},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5707224},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5708662},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5708678},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5710175},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5711076},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5711922},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5711939},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5712496},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'frame.load', 'id':'0x16e8488', 'extra':'http://mail.google.com/mail/?ui=2&view=bsp&ver=1qygpcgurkovy', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'310', 'usec_begin': 5713037},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x22', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 5713115},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5714839},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5715817},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5715834},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5716464},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5717330},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5718018},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5718057},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5719545},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5720612},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5721545},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5721568},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x24', 'extra':'http://mail.google.com/mail/?ui=2&view=ss&ver=gh1z9bhrf433&am=R_E4pcQ3aAGDQefb', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 5725484},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5725885},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'frame.load', 'id':'0x16e8488', 'extra':'http://mail.google.com/mail/?ui=2&view=bsp&ver=1qygpcgurkovy', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'310', 'usec_begin': 5726725},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x23', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 5726810},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5728493},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5729806},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5729822},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5730693},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5731723},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5732590},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5732605},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5733440},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5734576},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5735554},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5735570},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5736975},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5739862},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5741391},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5741412},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5742147},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5744058},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5744928},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5744944},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5745594},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5746916},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5747953},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5747968},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5748623},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5749906},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5750861},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5750877},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5751605},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5753859},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5754954},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5754970},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5755731},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5760132},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5763002},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5763019},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5770324},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5771435},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5772394},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5772410},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5772744},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5774818},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5775747},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5775763},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5778463},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5780166},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5781529},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5781563},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5781924},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5782823},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5783630},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5783646},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5783866},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'12378 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 5784462},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x26b99b0', 'extra':'http://mail.google.com/mail/?shva=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 5784507},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5785901},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5786613},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5786637},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5786935},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5787995},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5788741},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5788757},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5788967},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5789815},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5790442},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5790458},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5790690},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5794789},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5796229},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5796246},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5797596},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x24', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 5804084},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5807913},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5809711},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5809728},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5810930},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5812241},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5813376},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5813399},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5813829},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5814801},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5815619},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5815635},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5815922},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5820052},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5821491},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5821507},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5823413},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5824726},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5825918},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5825934},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5827542},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5833240},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5838020},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5838036},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5838178},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5839050},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5839934},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5839951},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5840288},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5841576},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5842569},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5842585},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5842837},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5843917},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5844784},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5844800},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5845056},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5845915},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5846647},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5846663},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5847032},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x1e', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 5847198},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5848983},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5849929},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5849944},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5850203},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5851279},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5852131},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5852147},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5852588},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5854731},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5856045},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5856061},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5856517},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5857483},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5858279},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5858295},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5858530},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5860092},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5861295},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5861312},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5862046},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5863334},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5864083},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5864099},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5865299},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5866376},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5867211},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5867227},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5867534},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5868822},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5869580},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5869595},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5869966},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5870945},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5871949},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5871965},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5872419},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5874482},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5875635},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5875652},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5875959},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5876958},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5877753},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5877771},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5878074},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5879102},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5880039},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5880056},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5880498},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5883196},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5884790},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5884807},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5886935},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5887846},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5888900},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5888916},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5889562},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5891077},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5892318},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5892334},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5893038},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5894495},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5895265},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5895281},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5895541},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5896804},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5897890},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5897906},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5898425},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5899375},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5900216},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5900232},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5900602},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5902004},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5902825},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5902840},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5903198},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5904155},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5904843},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5904859},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5905208},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5906386},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5907352},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5907368},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5908413},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5909685},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5910618},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5910634},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5911162},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5912612},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5913905},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5913926},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5914786},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5916583},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5917588},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5917603},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5923655},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5925193},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5926428},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5926444},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5927406},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5929316},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5930373},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5930390},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5930859},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5932088},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5933072},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5933087},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5933405},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5934363},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5935105},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5935121},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5937089},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5939306},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5940982},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5940999},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5941663},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5942543},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5943167},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5943183},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5943375},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5944287},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5945048},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5945064},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5945312},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5946902},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5947689},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5947704},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5947982},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5949161},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5950128},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5950144},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5950491},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5951885},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5952879},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5952895},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5954060},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5956682},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5958366},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5958382},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5958904},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5959774},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5960572},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5960589},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5960975},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5962060},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5963319},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5963336},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5963754},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5965412},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5966375},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5966391},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5966925},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5969748},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5971857},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5971880},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5972629},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5974254},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5974979},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5974996},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5975798},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5976827},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5977665},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5977681},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5977957},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5989725},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5991102},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5991119},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5991574},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5992621},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5993420},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5993436},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5993890},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5994770},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5995430},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5995454},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5995644},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 5997509},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 5998368},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 5998384},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 5998878},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'50', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 6000447},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 6004922},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 6008518},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 6008536},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 6016226},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 6017496},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 6018313},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 6018329},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 6018656},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 6018958},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 6019128},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 6019143},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 6019395},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 6019530},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 6019634},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 6019657},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 6019730},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 6020006},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 6020069},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 6020084},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x25', 'extra':'about:blank', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6154931},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'frame.load', 'id':'0x16e8488', 'extra':'about:blank', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'310', 'usec_begin': 6155294},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x25', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6155359},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x26', 'extra':'http://mail.google.com/mail/images/cleardot.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6190721},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x27', 'extra':'http://mail.google.com/mail/rc?a=af&c=fff1a8&w=4&h=4', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6222057},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x28', 'extra':'http://mail.google.com/mail/rc?a=af&c=c3d9ff&w=4&h=4', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6223307},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x29', 'extra':'http://mail.google.com/mail/images/2/5/c/icons1c.png', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6227933},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x2a', 'extra':'http://mail.google.com/mail/images/2/icons_ns2a.png', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6229889},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x2b', 'extra':'http://mail.google.com/mail/rc?a=af&c=ccc&w=4&h=4', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6235619},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x2c', 'extra':'http://mail.google.com/mail/rc?a=af&c=c3d9ff&w=3&h=3', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6240936},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x2d', 'extra':'http://mail.google.com/mail/rc?a=af&c=b5edbc&w=3&h=3', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6241168},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x2e', 'extra':'http://mail.google.com/mail/rc?a=af&c=e0ecff&w=3&h=3', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6256964},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x2f', 'extra':'http://mail.google.com/mail/images/2/5/logo.png', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6362778},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 6474842},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'50', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 6475026},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 6640799},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 6640862},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 6640876},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 6640892},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x30', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6641172},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 6641558},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 6641587},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 6641601},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 6641640},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 6641655},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 6642083},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 6642103},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x318d410', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 6642117},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x318d410', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 6643324},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 6643353},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 6643373},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x318d410', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 6643409},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x318d410', 'extra':'648 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 6643437},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 6643451},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 6643463},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x318d410', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 6643484},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x318d410', 'extra':'530 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 6665182},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 6665381},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x3123180', 'extra':'http://www.google.com/setgmail?zx=li65sohl5bca', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 6665704},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x318d410', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 6665721},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x31', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6670763},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 6671109},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 6671134},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 6671147},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 6671181},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 6671195},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 6671242},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x183c9d0', 'extra':'1237 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 6671295},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 6671309},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 6671322},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 6671346},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'4096 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 6705526},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 6705775},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 6705906},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 6705926},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x20', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6724041},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'52', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 6829332},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 6829507},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 6829528},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'145 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'169', 'usec_begin': 6829554},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/?ui=2&ik=2f47b34cd6&view=au&rt=j', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 6829578},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x32', 'extra':'http://mail.google.com/mail/?ui=2&view=jsm&name=ld%2Cml&ver=vuagRBgWwto&am=R_E4pcQ3aAGDQefb', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 6831901},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x26', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6836122},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x27', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6836346},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x28', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6862988},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x29', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6864446},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x2a', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6892630},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x2b', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6909888},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x2c', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6910021},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x2d', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6913268},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x2e', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6916383},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x2f', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6916484},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x31', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 6926048},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x33', 'extra':'http://mail.google.com/mail/?ui=2&view=jsm&name=cv&ver=vuagRBgWwto&am=R_E4pcQ3aAGDQefb', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7006135},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x32', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7006601},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x34', 'extra':'http://mail.google.com/mail/?ui=2&view=jsm&name=mo%2Ccw%2Cch&ver=vuagRBgWwto&am=R_E4pcQ3aAGDQefb', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7069712},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x33', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7070044},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x35', 'extra':'http://mail.google.com/mail/?ui=2&view=jsm&name=cm&ver=vuagRBgWwto&am=R_E4pcQ3aAGDQefb', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7124934},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x34', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7125262},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'50', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 7125318},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x36', 'extra':'http://mail.google.com/mail/?ui=2&view=jsm&name=pc&ver=vuagRBgWwto&am=R_E4pcQ3aAGDQefb', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7154226},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x35', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7154552},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x37', 'extra':'http://mail.google.com/mail/?ui=2&view=jsm&name=ca&ver=vuagRBgWwto&am=R_E4pcQ3aAGDQefb', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7161100},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x36', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7161380},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x38', 'extra':'http://mail.google.com/mail/?ui=2&view=jsm&name=e&ver=vuagRBgWwto&am=R_E4pcQ3aAGDQefb', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7167291},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x37', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7167559},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x38', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7217361},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x39', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7231075},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 7232017},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 7232053},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 7232074},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 7232126},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 7232148},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 7232197},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x183c9d0', 'extra':'1217 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 7232247},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 7232261},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 7232274},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 7232301},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x3a', 'extra':'http://mail.google.com/mail/images/cleardot.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7236419},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x3a', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7236457},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x3b', 'extra':'http://mail.google.com/mail/images/cleardot.gif', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7245353},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x3b', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7245394},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x3c', 'extra':'http://mail.google.com/mail/images/2/icons_ns2a.png', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7247637},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x3c', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7247672},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'597 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 7268945},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 7269174},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 7269424},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x2f33910', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1086&MODE=init&zx=gsd2cum2kqx0&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 7269459},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'47', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 7375719},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x3d', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7377167},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x39', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7377400},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 7377570},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 7377597},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 7377618},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 7377657},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_host', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'270', 'usec_begin': 7377672},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_host', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'275', 'usec_begin': 7378021},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.connect', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'279', 'usec_begin': 7378038},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.connect', 'id':'0x2e8fea0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'73', 'usec_begin': 7378053},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.connect', 'id':'0x2e8fea0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'238', 'usec_begin': 7401287},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.connect', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'284', 'usec_begin': 7401329},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 7401345},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x2e8fea0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 7401386},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x2e8fea0', 'extra':'803 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 7401417},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 7401430},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 7401444},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x2e8fea0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 7401462},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x2e8fea0', 'extra':'544 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 7433482},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 7433788},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 7434023},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x185c988', 'extra':'http://chatenabled.mail.google.com/mail/images/cleardot.gif?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1234&zx=ycda9of1csnp', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 7434055},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'url.load', 'id':'0x3d', 'extra':'', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'196', 'usec_begin': 7434302},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'url.load', 'id':'0x3e', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\webkit\tools\test_shell\test_webview_delegate.cc', 'line_number':'189', 'usec_begin': 7435539},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.resolve_proxy', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'252', 'usec_begin': 7436159},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.resolve_proxy', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'257', 'usec_begin': 7436193},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.init_conn', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'261', 'usec_begin': 7436214},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.init_conn', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'266', 'usec_begin': 7436260},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.write_headers', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'297', 'usec_begin': 7436282},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.write', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'192', 'usec_begin': 7436335},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.write', 'id':'0x183c9d0', 'extra':'1220 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'201', 'usec_begin': 7436389},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.write_headers', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'302', 'usec_begin': 7436419},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_headers', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'315', 'usec_begin': 7436437},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 7436451},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'548 bytes', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'279', 'usec_begin': 7468253},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_headers', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'320', 'usec_begin': 7468524},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 7468734},
+{'pid':'0x854', 'tid':'0xa20', 'type':'END', 'name':'http.read_body', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'329', 'usec_begin': 7468755},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'http.read_body', 'id':'0x185c988', 'extra':'http://mail.google.com/mail/channel/test?at=xn3j339j0a8vwew777x485j2ygbokt&VER=6&it=1291&TYPE=xmlhttp&zx=ev050ueswmrg&t=1', 'file':'E:\src\cr\src\net\http\http_network_transaction.cc', 'line_number':'324', 'usec_begin': 7468881},
+{'pid':'0x854', 'tid':'0xa20', 'type':'BEGIN', 'name':'socket.read', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'160', 'usec_begin': 7468898},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'6', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 7626418},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 7876733},
+{'pid':'0x854', 'tid':'0x208', 'type':'INSTANT', 'name':'heartbeat.cpu', 'id':'0x0', 'extra':'0', 'file':'E:\src\cr\src\base\trace_event.cc', 'line_number':'76', 'usec_begin': 8126490},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 8309792},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 8309858},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 8309873},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 8326339},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'965', 'usec_begin': 8326394},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.compile', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'967', 'usec_begin': 8326477},
+{'pid':'0x854', 'tid':'0x208', 'type':'BEGIN', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'972', 'usec_begin': 8326505},
+{'pid':'0x854', 'tid':'0x208', 'type':'END', 'name':'v8.run', 'id':'0x0', 'extra':'', 'file':'E:\src\cr\src\webkit\port\bindings\v8\v8_proxy.cpp', 'line_number':'974', 'usec_begin': 8341241},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x183c9d0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 8341596},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x318d410', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 8341826},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x2e8fea0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 8375190},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x265a568', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 8375275},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x265aeb0', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 8375394},
+{'pid':'0x854', 'tid':'0xa20', 'type':'INSTANT', 'name':'socket.disconnect', 'id':'0x26c29a8', 'extra':'', 'file':'E:\src\cr\src\net\base\tcp_client_socket.cc', 'line_number':'113', 'usec_begin': 8375490},
+];
diff --git a/chromium/tools/traceline/svgui/README b/chromium/tools/traceline/svgui/README
new file mode 100644
index 00000000000..1a113e281bc
--- /dev/null
+++ b/chromium/tools/traceline/svgui/README
@@ -0,0 +1,12 @@
+This is a small "web application" to display traceline trace data. It uses
+xhtml, SVG, and JavaScript to interpret the JSON data in a timeline format.
+Events can be moused over to show details, stack traces, etc.
+
+The name of the JSON file should be supplied behind a # in the URL. Multiple
+JSON files can be listed (comma separated), allowing you to compare multiple
+sets of data. Examples:
+
+http://blah/traceline.xml#startup-release.json
+http://blah/traceline.xml#mydata1.json,mydata2.json
+
+Dean McNamee <deanm@chromium.org>
diff --git a/chromium/tools/traceline/svgui/startup-release.json b/chromium/tools/traceline/svgui/startup-release.json
new file mode 100644
index 00000000000..37a36201da2
--- /dev/null
+++ b/chromium/tools/traceline/svgui/startup-release.json
@@ -0,0 +1,178 @@
+parseEvents([
+{'stacktrace': [], 'thread': 3956, 'eventtype': 'EVENT_TYPE_APC', 'func_addr_name': 'ntdll.dll!LdrInitializeThunk+0x0', 'ret_addr': 2089872071, 'done': 23.106567999999999, 'func_addr': 2089816446, 'ms': 6.3611440000000004, 'cpu': 2147742720},
+{'stacktrace': [[2089816487, 'ntdll.dll!LdrpCallInitRoutine+0x14'], [2089929643, 'ntdll.dll!LdrpRunInitializeRoutines+0x1c7'], [2089917304, 'ntdll.dll!LdrpGetProcedureAddress+0x1c3'], [2089917344, 'ntdll.dll!LdrGetProcedureAddress+0x18'], [2089952052, 'ntdll.dll!LdrpInitializeProcess+0x878'], [2089948729, 'ntdll.dll!_LdrpInitialize+0x88d3'], [1717963930, 'failed']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 2147323904, 'startaddr': 2088763392, 'ms': 7.0830229999999998, 'cpu': 2147742720},
+{'stacktrace': [[2118314946, 'USER32.dll!_UserClientDllInitialize+0x246'], [2089816487, 'ntdll.dll!LdrpCallInitRoutine+0x14'], [2089929643, 'ntdll.dll!LdrpRunInitializeRoutines+0x1c7'], [2089948990, 'ntdll.dll!LdrpInitializeProcess+0xfffffc82'], [2089948729, 'ntdll.dll!_LdrpInitialize+0x88d3'], [1717963930, 'failed']], 'thread': 3956, 'syscall': 4316, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!NtGdiInit', 'done': 12.667252, 'ms': 11.086605, 'syscallargs': [1242536, 1243632, 2118314946], 'cpu': 2147742720},
+{'stacktrace': [[2088853969, 'kernel32.dll!CsrBasepCreateActCtx+0xa2'], [2088853339, 'kernel32.dll!BasepCreateActCtx+0x63a'], [2088851440, 'kernel32.dll!CreateActCtxW+0x394'], [2091021301, 'SHELL32.dll!SHFusionInitializeIDCC+0x6d'], [2091021175, 'SHELL32.dll!SHFusionInitializeFromModuleID+0x3a'], [2091020981, 'SHELL32.dll!_ProcessAttach+0x2f'], [2091020914, 'SHELL32.dll!DllMain+0x3fe1a'], [2090759390, 'SHELL32.dll!_DllMainCRTStartup+0x48'], [2089816487, 'ntdll.dll!LdrpCallInitRoutine+0x14'], [2089929643, 'ntdll.dll!LdrpRunInitializeRoutines+0x1c7'], [2089948990, 'ntdll.dll!LdrpInitializeProcess+0xfffffc82'], [2089948729, 'ntdll.dll!_LdrpInitialize+0x88d3'], [1717963930, 'failed']], 'thread': 3956, 'syscall': 200, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtRequestWaitReplyPort', 'done': 16.840966999999999, 'ms': 13.546135, 'syscallargs': [32, 1240476, 1240476], 'cpu': 2147742720},
+{'stacktrace': [[2088853969, 'kernel32.dll!CsrBasepCreateActCtx+0xa2'], [2088853339, 'kernel32.dll!BasepCreateActCtx+0x63a'], [2088851440, 'kernel32.dll!CreateActCtxW+0x394'], [2001057805, 'comctl32.dll!SHFusionInitializeIDCC+0x83'], [2001057855, 'comctl32.dll!SHFusionInitializeID+0x12'], [2001057879, 'comctl32.dll!SHFusionInitialize+0xf'], [2000503209, 'comctl32.dll!_ProcessAttach+0x32'], [2000503399, 'comctl32.dll!LibMain+0x21'], [2089816487, 'ntdll.dll!LdrpCallInitRoutine+0x14'], [2089929643, 'ntdll.dll!LdrpRunInitializeRoutines+0x1c7'], [2089902456, 'ntdll.dll!LdrpLoadDll+0xfffffe4f'], [2089902810, 'ntdll.dll!LdrLoadDll+0x110'], [2088770489, 'kernel32.dll!LoadLibraryExW+0xc8'], [2088808028, 'kernel32.dll!LoadLibraryW+0x11'], [2091021388, 'SHELL32.dll!SHFusionLoadLibrary+0x2a'], [2090759619, 'SHELL32.dll!DelayLoadCC+0x15']], 'thread': 3956, 'syscall': 200, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtRequestWaitReplyPort', 'done': 21.051558, 'ms': 17.985804999999999, 'syscallargs': [32, 1237992, 1237992], 'cpu': 2147742720},
+{'stacktrace': [[2089888951, 'ntdll.dll!LdrAccessResource+0x15'], [2009076731, 'VERSION.dll!GetFileVersionInfoSizeW+0x6a'], [4315331, 'chrome.exe!FileVersionInfo::CreateFileVersionInfo+0x23 [ c:\\g\\trunk\\src\\base\\file_version_info.cc:46 ]'], [4202450, "chrome.exe!`anonymous namespace'::GetCustomInfo+0x22 [ c:\\g\\trunk\\src\\chrome\\app\\breakpad.cc:63 ]"], [4204639, 'chrome.exe!InitCrashReporter+0x13f [ c:\\g\\trunk\\src\\chrome\\app\\breakpad.cc:238 ]'], [4205528, 'chrome.exe!wWinMain+0x2b8 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:95 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 178, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtQueryVirtualMemory', 'done': 24.995356000000001, 'ms': 24.501438, 'syscallargs': [4294967295, 19333120, 0], 'cpu': 2147742720},
+{'stacktrace': [[2089888951, 'ntdll.dll!LdrAccessResource+0x15'], [2009076752, 'VERSION.dll!GetFileVersionInfoSizeW+0x7f'], [4315331, 'chrome.exe!FileVersionInfo::CreateFileVersionInfo+0x23 [ c:\\g\\trunk\\src\\base\\file_version_info.cc:46 ]'], [4202450, "chrome.exe!`anonymous namespace'::GetCustomInfo+0x22 [ c:\\g\\trunk\\src\\chrome\\app\\breakpad.cc:63 ]"], [4204639, 'chrome.exe!InitCrashReporter+0x13f [ c:\\g\\trunk\\src\\chrome\\app\\breakpad.cc:238 ]'], [4205528, 'chrome.exe!wWinMain+0x2b8 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:95 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 178, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtQueryVirtualMemory', 'done': 25.491229000000001, 'ms': 24.997869999999999, 'syscallargs': [4294967295, 19333120, 0], 'cpu': 2147742720},
+{'stacktrace': [[2089888951, 'ntdll.dll!LdrAccessResource+0x15'], [2009077479, 'VERSION.dll!GetFileVersionInfoW+0x68'], [4315373, 'chrome.exe!FileVersionInfo::CreateFileVersionInfo+0x4d [ c:\\g\\trunk\\src\\base\\file_version_info.cc:54 ]'], [4202450, "chrome.exe!`anonymous namespace'::GetCustomInfo+0x22 [ c:\\g\\trunk\\src\\chrome\\app\\breakpad.cc:63 ]"], [4204639, 'chrome.exe!InitCrashReporter+0x13f [ c:\\g\\trunk\\src\\chrome\\app\\breakpad.cc:238 ]'], [4205528, 'chrome.exe!wWinMain+0x2b8 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:95 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 178, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtQueryVirtualMemory', 'done': 26.258085999999999, 'ms': 25.763888999999999, 'syscallargs': [4294967295, 19333120, 0], 'cpu': 2147742720},
+{'eventid': 1718060360, 'stacktrace': [[2088839620, 'kernel32.dll!BaseCreateThreadPoolThread+0x44'], [2089982739, 'ntdll.dll!RtlpStartThreadpoolThread+0x4e'], [2089993072, 'ntdll.dll!RtlpInitializeTimerThreadPool+0xb7'], [2089992706, 'ntdll.dll!RtlpInitializeWorkerThreadPool+0x1c'], [2089993149, 'ntdll.dll!RtlQueueWorkItem+0x4ce5'], [2088962678, 'kernel32.dll!QueueUserWorkItem+0x14'], [4204696, 'chrome.exe!InitCrashReporter+0x178 [ c:\\g\\trunk\\src\\chrome\\app\\breakpad.cc:250 ]'], [4205528, 'chrome.exe!wWinMain+0x2b8 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:95 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830553, 'ms': 26.361730000000001, 'cpu': 2147742720},
+{'stacktrace': [], 'thread': 3380, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 1718060360, 'startaddr': 2089974157, 'ms': 26.482137000000002, 'cpu': 3124053392},
+{'eventid': 1718062684, 'stacktrace': [[2088839620, 'kernel32.dll!BaseCreateThreadPoolThread+0x44'], [2089982739, 'ntdll.dll!RtlpStartThreadpoolThread+0x4e'], [2089982839, 'ntdll.dll!RtlpStartWorkerThread+0x16'], [2089982906, 'ntdll.dll!RtlQueueWorkItem+0x24e2'], [2088962678, 'kernel32.dll!QueueUserWorkItem+0x14'], [4204696, 'chrome.exe!InitCrashReporter+0x178 [ c:\\g\\trunk\\src\\chrome\\app\\breakpad.cc:250 ]'], [4205528, 'chrome.exe!wWinMain+0x2b8 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:95 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830553, 'ms': 26.58634, 'cpu': 2147742720},
+{'stacktrace': [], 'thread': 2440, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 1718062684, 'startaddr': 2089879392, 'ms': 26.691939999999999, 'cpu': 3124053392},
+{'stacktrace': [[2089816139, 'ntdll.dll!RtlEnterCriticalSection+0x46'], [2088809487, 'kernel32.dll!GetModuleFileNameW+0x3a'], [2089167589, 'kernel32.dll!FillUEFInfo+0x51'], [2089043624, 'kernel32.dll!SetUnhandledExceptionFilter+0x2b'], [4203570, 'chrome.exe!InitCrashReporterThread+0x1b2 [ c:\\g\\trunk\\src\\chrome\\app\\breakpad.cc:181 ]'], [2089973061, 'ntdll.dll!RtlpWorkerCallout+0x65'], [2089973123, 'ntdll.dll!RtlpExecuteWorkerRequest+0x1a'], [2089973317, 'ntdll.dll!RtlpApcCallout+0x11'], [2089973276, 'ntdll.dll!RtlpWorkerThread+0x16ebc'], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2440, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 51.803511, 'ms': 26.820167999999999, 'syscallargs': [120, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[2089934922, 'ntdll.dll!LdrpMapDll+0x1b0b'], [2089902193, 'ntdll.dll!LdrpLoadDll+0xfffffd48'], [2089902810, 'ntdll.dll!LdrLoadDll+0x110'], [2088770489, 'kernel32.dll!LoadLibraryExW+0xc8'], [4205549, 'chrome.exe!wWinMain+0x2cd [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:99 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 137, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwProtectVirtualMemory', 'done': 44.387484999999998, 'ms': 43.193758000000003, 'syscallargs': [4294967295, 1242540, 1242544], 'cpu': 2147742720},
+{'stacktrace': [[2088853969, 'kernel32.dll!CsrBasepCreateActCtx+0xa2'], [2088853339, 'kernel32.dll!BasepCreateActCtx+0x63a'], [2088851440, 'kernel32.dll!CreateActCtxW+0x394'], [2088959197, 'kernel32.dll!BasepProbeForDllManifest+0x1eacd'], [2089928717, 'ntdll.dll!LdrpWalkImportDescriptor+0x84'], [2089902276, 'ntdll.dll!LdrpLoadDll+0xfffffd9b'], [2089902810, 'ntdll.dll!LdrLoadDll+0x110'], [2088770489, 'kernel32.dll!LoadLibraryExW+0xc8'], [4205549, 'chrome.exe!wWinMain+0x2cd [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:99 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 200, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtRequestWaitReplyPort', 'done': 45.075840999999997, 'ms': 44.847599000000002, 'syscallargs': [32, 1240632, 1240632], 'cpu': 2147742720},
+{'stacktrace': [[2011010674, 'ADVAPI32.dll!GatherRandomKeyFastUserMode+0xba'], [2011006756, 'ADVAPI32.dll!RandomFillBuffer+0xa3'], [2011006564, 'ADVAPI32.dll!GenRandom+0x25'], [2011006503, 'ADVAPI32.dll!NewGenRandomEx+0x4d'], [2011006415, 'ADVAPI32.dll!NewGenRandom+0x34'], [2011006354, 'ADVAPI32.dll!SystemFunction036+0x14'], [2011652938, 'RPCRT4.dll!GenerateRandomNumber+0x2ff'], [2011652709, 'RPCRT4.dll!UuidCreate+0x11'], [2001932179, 'ole32.dll!DllMain+0x33e86'], [2001719529, 'ole32.dll!_DllMainCRTStartup+0x48'], [2089816487, 'ntdll.dll!LdrpCallInitRoutine+0x14'], [2089929643, 'ntdll.dll!LdrpRunInitializeRoutines+0x1c7'], [2089902456, 'ntdll.dll!LdrpLoadDll+0xfffffe4f'], [2089902810, 'ntdll.dll!LdrLoadDll+0x110'], [2088770489, 'kernel32.dll!LoadLibraryExW+0xc8'], [4205549, 'chrome.exe!wWinMain+0x2cd [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:99 ]']], 'thread': 3956, 'syscall': 66, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwDeviceIoControlFile', 'done': 47.469441000000003, 'ms': 47.221643999999998, 'syscallargs': [128, 0, 0], 'cpu': 2147742720},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2440, 'syscall': 190, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwRemoveIoCompletion', 'waiting': 1, 'done': 146.87732700000001, 'ms': 51.840667000000003, 'syscallargs': [112, 13500332, 13500336], 'cpu': 3124053392},
+{'stacktrace': [[2118254284, 'USER32.dll!_CreateWindowEx+0xb1'], [2118253656, 'USER32.dll!CreateWindowExW+0x33'], [2001923724, 'ole32.dll!InitMainThreadWnd+0x3c'], [2001727537, 'ole32.dll!CoInitializeEx+0xc6'], [2001729284, 'ole32.dll!OleInitializeEx+0x12'], [2001729257, 'ole32.dll!OleInitialize+0xf'], [19354461, 'chrome.dll!ChromeMain+0x7dd [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:223 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4439, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserCreateWindowEx', 'done': 53.857683000000002, 'ms': 53.325772000000001, 'syscallargs': [0, 49208, 49208], 'cpu': 2147742720},
+{'stacktrace': [[19417443, 'chrome.dll!PlatformThread::SetName+0x63 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:62 ]'], [20401225, 'chrome.dll!BrowserMain+0xc9 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:294 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_THREADNAME', 'threadname': 'Chromium_BrowserMain', 'ms': 54.000996999999998, 'cpu': 2147742720},
+{'eventid': 1718170996, 'stacktrace': [[2088830549, 'kernel32.dll!CreateThread+0x1e'], [19417542, 'chrome.dll!PlatformThread::Create+0x36 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:83 ]'], [23799365, 'chrome.dll!base::Thread::StartWithOptions+0xb5 [ c:\\g\\trunk\\src\\base\\thread.cc:82 ]'], [20506130, 'chrome.dll!BrowserProcessImpl::CreateFileThread+0xa2 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:300 ]'], [20398585, 'chrome.dll!BrowserProcessImpl::file_thread+0x19 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.h:61 ]'], [20852807, 'chrome.dll!ProfileImpl::GetPrefs+0xc7 [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:720 ]'], [20855066, 'chrome.dll!ProfileImpl::ProfileImpl+0x12a [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:565 ]'], [20856220, 'chrome.dll!Profile::CreateProfile+0x1c [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:69 ]'], [20602496, 'chrome.dll!ProfileManager::GetDefaultProfile+0x1c0 [ c:\\g\\trunk\\src\\chrome\\browser\\profile_manager.cc:96 ]'], [20402403, 'chrome.dll!BrowserMain+0x563 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:374 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830553, 'ms': 59.869340999999999, 'cpu': 2147742720},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [19495803, 'chrome.dll!base::WaitableEvent::Wait+0x1b [ c:\\g\\trunk\\src\\base\\waitable_event_win.cc:42 ]'], [23799422, 'chrome.dll!base::Thread::StartWithOptions+0xee [ c:\\g\\trunk\\src\\base\\thread.cc:91 ]'], [20506130, 'chrome.dll!BrowserProcessImpl::CreateFileThread+0xa2 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:300 ]'], [20398585, 'chrome.dll!BrowserProcessImpl::file_thread+0x19 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.h:61 ]'], [20852807, 'chrome.dll!ProfileImpl::GetPrefs+0xc7 [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:720 ]'], [20855066, 'chrome.dll!ProfileImpl::ProfileImpl+0x12a [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:565 ]'], [20856220, 'chrome.dll!Profile::CreateProfile+0x1c [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:69 ]'], [20602496, 'chrome.dll!ProfileManager::GetDefaultProfile+0x1c0 [ c:\\g\\trunk\\src\\chrome\\browser\\profile_manager.cc:96 ]'], [20402403, 'chrome.dll!BrowserMain+0x563 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:374 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 60.171892999999997, 'ms': 59.965162999999997, 'syscallargs': [152, 0, 0], 'cpu': 2147742720},
+{'stacktrace': [], 'thread': 2736, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 1718170996, 'startaddr': 19417280, 'ms': 60.031652000000001, 'cpu': 3124053392},
+{'stacktrace': [[19417443, 'chrome.dll!PlatformThread::SetName+0x63 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:62 ]'], [23799603, 'chrome.dll!base::Thread::ThreadMain+0x53 [ c:\\g\\trunk\\src\\base\\thread.cc:145 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2736, 'eventtype': 'EVENT_TYPE_THREADNAME', 'threadname': 'Chrome_FileThread', 'ms': 60.136972999999998, 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19467175, 'chrome.dll!base::MessagePumpForUI::WaitForWork+0x27 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:408 ]'], [19470141, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0xbd [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:393 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2736, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 64.683081000000001, 'ms': 60.200668, 'syscallargs': [1, 15858672, 1], 'cpu': 3124053392},
+{'stacktrace': [[2089175867, 'kernel32.dll!CreateToolhelp32Snapshot+0x2c'], [19416396, 'chrome.dll!process_util::KillProcesses+0x6c [ c:\\g\\trunk\\src\\base\\process_util_win.cc:296 ]'], [20567056, 'chrome.dll!BrowserInit::MessageWindow::HuntForZombieChromeProcesses+0x40 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:367 ]'], [20403945, 'chrome.dll!BrowserMain+0xb69 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:444 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 173, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtQuerySystemInformation', 'done': 61.949773, 'ms': 61.741366999999997, 'syscallargs': [5, 15859712, 65536], 'cpu': 2147742720},
+{'eventid': 1718192188, 'stacktrace': [[2088830549, 'kernel32.dll!CreateThread+0x1e'], [20819991, 'chrome.dll!chrome_browser_net::DnsMaster::PreLockedCreateNewSlaveIfNeeded+0x87 [ c:\\g\\trunk\\src\\chrome\\browser\\net\\dns_master.cc:303 ]'], [20824704, 'chrome.dll!chrome_browser_net::DnsMaster::Resolve+0x30 [ c:\\g\\trunk\\src\\chrome\\browser\\net\\dns_master.cc:70 ]'], [20522363, 'chrome.dll!chrome_browser_net::DnsPrefetchHostNamesAtStartup+0x2cb [ c:\\g\\trunk\\src\\chrome\\browser\\net\\dns_global.cc:430 ]'], [20404113, 'chrome.dll!BrowserMain+0xc11 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:462 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830553, 'ms': 63.390737999999999, 'cpu': 2147742720},
+{'stacktrace': [], 'thread': 2812, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 1718192188, 'startaddr': 21429184, 'ms': 63.532375999999999, 'cpu': 3124053392},
+{'stacktrace': [[19417443, 'chrome.dll!PlatformThread::SetName+0x63 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:62 ]'], [21429081, 'chrome.dll!chrome_browser_net::DnsSlave::Run+0xa9 [ c:\\g\\trunk\\src\\chrome\\browser\\net\\dns_slave.cc:57 ]'], [21429195, 'chrome.dll!chrome_browser_net::DnsSlave::ThreadStart+0xb [ c:\\g\\trunk\\src\\chrome\\browser\\net\\dns_slave.cc:45 ]'], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2812, 'eventtype': 'EVENT_TYPE_THREADNAME', 'threadname': 'dns_prefetcher_1_of_8', 'ms': 63.553328999999998, 'cpu': 3124053392},
+{'eventid': 1718197540, 'stacktrace': [[2088830549, 'kernel32.dll!CreateThread+0x1e'], [4216426, 'chrome.exe!sandbox::BrokerServicesBase::Init+0x5a [ c:\\g\\trunk\\src\\sandbox\\src\\broker_services.cc:70 ]'], [20404275, 'chrome.dll!BrowserMain+0xcb3 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:523 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830553, 'ms': 63.831575999999998, 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19467175, 'chrome.dll!base::MessagePumpForUI::WaitForWork+0x27 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:408 ]'], [19470141, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0xbd [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:393 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2736, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 133.03087400000001, 'ms': 64.752364, 'syscallargs': [1, 15858672, 1], 'cpu': 3124053392},
+{'stacktrace': [], 'thread': 2428, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 1718197540, 'startaddr': 4215808, 'ms': 64.816618000000005, 'cpu': 3124053392},
+{'eventid': 1718207424, 'stacktrace': [[2088830549, 'kernel32.dll!CreateThread+0x1e'], [19417542, 'chrome.dll!PlatformThread::Create+0x36 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:83 ]'], [23799365, 'chrome.dll!base::Thread::StartWithOptions+0xb5 [ c:\\g\\trunk\\src\\base\\thread.cc:82 ]'], [20505911, 'chrome.dll!BrowserProcessImpl::CreateIOThread+0xa7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:287 ]'], [20398537, 'chrome.dll!BrowserProcessImpl::io_thread+0x19 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.h:54 ]'], [20576442, 'chrome.dll!InstallJankometer+0x19a [ c:\\g\\trunk\\src\\chrome\\browser\\jankometer.cc:210 ]'], [20404892, 'chrome.dll!BrowserMain+0xf1c [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:565 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830553, 'ms': 64.829469000000003, 'cpu': 2147742720},
+{'stacktrace': [[4340339, 'chrome.exe!PlatformThread::SetName+0x63 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:62 ]'], [4215848, 'chrome.exe!sandbox::BrokerServicesBase::TargetEventsThread+0x28 [ c:\\g\\trunk\\src\\sandbox\\src\\broker_services.cc:147 ]'], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2428, 'eventtype': 'EVENT_TYPE_THREADNAME', 'threadname': 'BrokerEventThread', 'ms': 64.838966999999997, 'cpu': 3124053392},
+{'stacktrace': [[4215906, 'chrome.exe!sandbox::BrokerServicesBase::TargetEventsThread+0x62 [ c:\\g\\trunk\\src\\sandbox\\src\\broker_services.cc:151 ]'], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2428, 'syscall': 190, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwRemoveIoCompletion', 'waiting': 1, 'done': 115.701297, 'ms': 64.856288000000006, 'syscallargs': [168, 17039276, 17038976], 'cpu': 3124053392},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [19495803, 'chrome.dll!base::WaitableEvent::Wait+0x1b [ c:\\g\\trunk\\src\\base\\waitable_event_win.cc:42 ]'], [23799422, 'chrome.dll!base::Thread::StartWithOptions+0xee [ c:\\g\\trunk\\src\\base\\thread.cc:91 ]'], [20505911, 'chrome.dll!BrowserProcessImpl::CreateIOThread+0xa7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:287 ]'], [20398537, 'chrome.dll!BrowserProcessImpl::io_thread+0x19 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.h:54 ]'], [20576442, 'chrome.dll!InstallJankometer+0x19a [ c:\\g\\trunk\\src\\chrome\\browser\\jankometer.cc:210 ]'], [20404892, 'chrome.dll!BrowserMain+0xf1c [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:565 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 65.134815000000003, 'ms': 64.898751000000004, 'syscallargs': [192, 0, 0], 'cpu': 2147742720},
+{'stacktrace': [], 'thread': 708, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 1718207424, 'startaddr': 19417280, 'ms': 64.96105, 'cpu': 3124053392},
+{'stacktrace': [[19417443, 'chrome.dll!PlatformThread::SetName+0x63 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:62 ]'], [23799603, 'chrome.dll!base::Thread::ThreadMain+0x53 [ c:\\g\\trunk\\src\\base\\thread.cc:145 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'eventtype': 'EVENT_TYPE_THREADNAME', 'threadname': 'Chrome_IOThread', 'ms': 65.062179999999998, 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 101.74421599999999, 'ms': 65.212478000000004, 'syscallargs': [1, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118254284, 'USER32.dll!_CreateWindowEx+0xb1'], [2118253656, 'USER32.dll!CreateWindowExW+0x33'], [27144714, 'chrome.dll!views::ContainerWin::Init+0x11a [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:161 ]'], [27116867, 'chrome.dll!views::Window::Init+0x93 [ c:\\g\\trunk\\src\\chrome\\views\\window.cc:305 ]'], [27291723, 'chrome.dll!views::CustomFrameWindow::Init+0x3b [ c:\\g\\trunk\\src\\chrome\\views\\custom_frame_window.cc:917 ]'], [23006205, 'chrome.dll!BrowserFrame::CreateForBrowserView+0x4d [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_window_factory.cc:58 ]'], [23006465, 'chrome.dll!BrowserWindow::CreateBrowserWindow+0x51 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_window_factory.cc:34 ]'], [20656275, 'chrome.dll!Browser::Browser+0x263 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:229 ]'], [20564706, 'chrome.dll!BrowserInit::LaunchWithProfile::CreateTabbedBrowser+0x52 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:489 ]'], [20567260, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0x9c [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:536 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]']], 'thread': 3956, 'syscall': 4439, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserCreateWindowEx', 'done': 72.739132999999995, 'ms': 70.077900999999997, 'syscallargs': [0, 1241012, 1239776], 'cpu': 2147742720},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [23834872, 'chrome.dll!ConditionVariable::TimedWait+0xb8 [ c:\\g\\trunk\\src\\base\\condition_variable_win.cc:66 ]'], [23834961, 'chrome.dll!ConditionVariable::Wait+0x21 [ c:\\g\\trunk\\src\\base\\condition_variable_win.cc:48 ]'], [20821313, 'chrome.dll!chrome_browser_net::DnsMaster::GetNextAssignment+0x31 [ c:\\g\\trunk\\src\\chrome\\browser\\net\\dns_master.cc:245 ]'], [21429120, 'chrome.dll!chrome_browser_net::DnsSlave::Run+0xd0 [ c:\\g\\trunk\\src\\chrome\\browser\\net\\dns_slave.cc:63 ]'], [21429195, 'chrome.dll!chrome_browser_net::DnsSlave::ThreadStart+0xb [ c:\\g\\trunk\\src\\chrome\\browser\\net\\dns_slave.cc:45 ]'], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2812, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 302.163543, 'ms': 70.703958, 'syscallargs': [232, 0, 0], 'cpu': 2147742720},
+{'stacktrace': [[2118254284, 'USER32.dll!_CreateWindowEx+0xb1'], [2118253656, 'USER32.dll!CreateWindowExW+0x33'], [27264799, 'chrome.dll!views::TooltipManager::Init+0x2f [ c:\\g\\trunk\\src\\chrome\\views\\tooltip_manager.cc:114 ]'], [27145071, 'chrome.dll!views::ContainerWin::Init+0x27f [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:187 ]'], [27116867, 'chrome.dll!views::Window::Init+0x93 [ c:\\g\\trunk\\src\\chrome\\views\\window.cc:305 ]'], [27291723, 'chrome.dll!views::CustomFrameWindow::Init+0x3b [ c:\\g\\trunk\\src\\chrome\\views\\custom_frame_window.cc:917 ]'], [23006205, 'chrome.dll!BrowserFrame::CreateForBrowserView+0x4d [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_window_factory.cc:58 ]'], [23006465, 'chrome.dll!BrowserWindow::CreateBrowserWindow+0x51 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_window_factory.cc:34 ]'], [20656275, 'chrome.dll!Browser::Browser+0x263 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:229 ]'], [20564706, 'chrome.dll!BrowserInit::LaunchWithProfile::CreateTabbedBrowser+0x52 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:489 ]'], [20567260, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0x9c [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:536 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]']], 'thread': 3956, 'syscall': 4439, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserCreateWindowEx', 'done': 74.146016000000003, 'ms': 72.944186999999999, 'syscallargs': [32, 1240832, 1239596], 'cpu': 3124053392},
+{'stacktrace': [[2118254284, 'USER32.dll!_CreateWindowEx+0xb1'], [2118253656, 'USER32.dll!CreateWindowExW+0x33'], [27969145, 'chrome.dll!ATL::CWindowImplBaseT<ATL::CWindow,ATL::CWinTraits<47120384,0> >::Create+0xb9 [ c:\\program files\\microsoft visual studio 8\\vc\\atlmfc\\include\\atlwin.h:3139 ]'], [21304605, 'chrome.dll!ATL::CWindowImpl<AutocompleteEditView,WTL::CRichEditCtrlT<ATL::CWindow>,ATL::CWinTraits<1342177664,0> >::Create+0x6d [ c:\\program files\\microsoft visual studio 8\\vc\\atlmfc\\include\\atlwin.h:3215 ]'], [21312331, 'chrome.dll!AutocompleteEditView::AutocompleteEditView+0x2cb [ c:\\g\\trunk\\src\\chrome\\browser\\autocomplete\\autocomplete_edit.cc:694 ]'], [23005496, 'chrome.dll!LocationBarView::Init+0x168 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\location_bar_view.cc:131 ]'], [23207043, 'chrome.dll!BrowserToolbarView::CreateCenterStack+0x253 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\toolbar_view.cc:186 ]'], [23210088, 'chrome.dll!BrowserToolbarView::Init+0x18 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\toolbar_view.cc:96 ]'], [23149860, 'chrome.dll!BrowserView2::Init+0x134 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:283 ]'], [23145656, 'chrome.dll!BrowserView2::ViewHierarchyChanged+0x38 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:788 ]'], [27129880, 'chrome.dll!views::View::PropagateAddNotifications+0x88 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:674 ]'], [27131408, 'chrome.dll!views::View::AddChildView+0xb0 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:585 ]'], [27131647, 'chrome.dll!views::View::AddChildView+0x2f [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:561 ]'], [23201897, 'chrome.dll!OpaqueNonClientView::ViewHierarchyChanged+0xd9 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\opaque_non_client_view.cc:670 ]'], [27129880, 'chrome.dll!views::View::PropagateAddNotifications+0x88 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:674 ]'], [27131408, 'chrome.dll!views::View::AddChildView+0xb0 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:585 ]']], 'thread': 3956, 'syscall': 4439, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserCreateWindowEx', 'done': 88.230208000000005, 'ms': 86.457915999999997, 'syscallargs': [0, 49640, 49640], 'cpu': 2147742720},
+{'stacktrace': [[27278917, 'chrome.dll!views::HWNDView::DidChangeBounds+0x5 [ c:\\g\\trunk\\src\\chrome\\views\\hwnd_view.cc:137 ]'], [21099029, 'chrome.dll!views::View::SetBounds+0x45 [ c:\\g\\trunk\\src\\chrome\\views\\view.h:164 ]'], [23005111, 'chrome.dll!LocationBarView::DoLayout+0x227 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\location_bar_view.cc:431 ]'], [23006103, 'chrome.dll!LocationBarView::Layout+0x7 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\location_bar_view.cc:218 ]'], [21099029, 'chrome.dll!views::View::SetBounds+0x45 [ c:\\g\\trunk\\src\\chrome\\views\\view.h:164 ]'], [23203355, 'chrome.dll!BrowserToolbarView::Layout+0x20b [ c:\\g\\trunk\\src\\chrome\\browser\\views\\toolbar_view.cc:311 ]'], [27117623, 'chrome.dll!views::View::DidChangeBounds+0x7 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:192 ]'], [23143722, 'chrome.dll!BrowserView2::LayoutToolbar+0xea [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:920 ]'], [23143963, 'chrome.dll!BrowserView2::Layout+0x1b [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:768 ]'], [27117623, 'chrome.dll!views::View::DidChangeBounds+0x7 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:192 ]'], [23198989, 'chrome.dll!OpaqueNonClientView::Layout+0x4d [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\opaque_non_client_view.cc:633 ]'], [27117623, 'chrome.dll!views::View::DidChangeBounds+0x7 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:192 ]'], [27138473, 'chrome.dll!views::FillLayout::Layout+0x69 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:57 ]'], [27122596, 'chrome.dll!views::View::Layout+0x14 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:214 ]'], [27142349, 'chrome.dll!views::ContainerWin::ChangeSize+0x8d [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:815 ]'], [27141386, 'chrome.dll!views::ContainerWin::SetContentsView+0xea [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:214 ]']], 'thread': 3956, 'syscall': 4642, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetWindowPos', 'done': 97.329130000000006, 'ms': 96.881028000000001, 'syscallargs': [8979080, 0, 137], 'cpu': 3124053392},
+{'eventid': 1718457244, 'stacktrace': [[2088839620, 'kernel32.dll!BaseCreateThreadPoolThread+0x44'], [2089982739, 'ntdll.dll!RtlpStartThreadpoolThread+0x4e'], [2090037459, 'ntdll.dll!RtlpStartWaitThread+0x51'], [2090031864, 'ntdll.dll!RtlRegisterWait+0x105'], [2088899101, 'kernel32.dll!RegisterWaitForSingleObject+0x50'], [27049362, 'chrome.dll!WorkerThreadTicker::Start+0xc2 [ c:\\g\\trunk\\src\\chrome\\common\\worker_thread_ticker.cc:66 ]'], [20653873, 'chrome.dll!Browser::InitHangMonitor+0xd1 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1579 ]'], [20656498, 'chrome.dll!Browser::Browser+0x342 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:247 ]'], [20564706, 'chrome.dll!BrowserInit::LaunchWithProfile::CreateTabbedBrowser+0x52 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:489 ]'], [20567260, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0x9c [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:536 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830553, 'ms': 98.261930000000007, 'cpu': 3124053392},
+{'stacktrace': [], 'thread': 3500, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 1718457244, 'startaddr': 2089983918, 'ms': 98.425916999999998, 'cpu': 2147742720},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 103.631327, 'ms': 98.535987000000006, 'syscallargs': [2, 32505136, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118254284, 'USER32.dll!_CreateWindowEx+0xb1'], [2118253656, 'USER32.dll!CreateWindowExW+0x33'], [27264799, 'chrome.dll!views::TooltipManager::Init+0x2f [ c:\\g\\trunk\\src\\chrome\\views\\tooltip_manager.cc:114 ]'], [27145071, 'chrome.dll!views::ContainerWin::Init+0x27f [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:187 ]'], [21603024, 'chrome.dll!WebContentsViewWin::CreateView+0x20 [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents_view_win.cc:61 ]'], [21357991, 'chrome.dll!TabContents::CreateWithType+0x67 [ c:\\g\\trunk\\src\\chrome\\browser\\tab_contents_factory.cc:92 ]'], [20652484, 'chrome.dll!Browser::CreateTabContentsForURL+0xb4 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1349 ]'], [20651898, 'chrome.dll!Browser::AddTabWithURL+0x12a [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1228 ]'], [20567357, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0xfd [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:540 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]']], 'thread': 3956, 'syscall': 4439, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserCreateWindowEx', 'done': 99.715187, 'ms': 99.146679000000006, 'syscallargs': [32, 1240616, 1239380], 'cpu': 3124053392},
+{'eventid': 1718479108, 'stacktrace': [[2088830549, 'kernel32.dll!CreateThread+0x1e'], [19417542, 'chrome.dll!PlatformThread::Create+0x36 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:83 ]'], [23799365, 'chrome.dll!base::Thread::StartWithOptions+0xb5 [ c:\\g\\trunk\\src\\base\\thread.cc:82 ]'], [23799735, 'chrome.dll!base::Thread::Start+0x17 [ c:\\g\\trunk\\src\\base\\thread.cc:72 ]'], [20506336, 'chrome.dll!BrowserProcessImpl::CreateDBThread+0x90 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:311 ]'], [20398633, 'chrome.dll!BrowserProcessImpl::db_thread+0x19 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.h:68 ]'], [20857407, 'chrome.dll!ProfileImpl::RequestContext::RequestContext+0x2ef [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:144 ]'], [20862571, 'chrome.dll!ProfileImpl::GetRequestContext+0x11b [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:739 ]'], [20852186, 'chrome.dll!ProfileImpl::InitializeSpellChecker+0xfa [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:929 ]'], [20852480, 'chrome.dll!ProfileImpl::GetSpellChecker+0x10 [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:960 ]'], [21339442, 'chrome.dll!RenderViewHost::CreateRenderView+0x82 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host.cc:123 ]'], [21021101, 'chrome.dll!WebContents::CreateRenderViewForRenderManager+0x2d [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:1367 ]'], [21556055, 'chrome.dll!RenderViewHostManager::Navigate+0x57 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:103 ]'], [21021786, 'chrome.dll!WebContents::NavigateToPendingEntry+0x1a [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:354 ]'], [21011780, 'chrome.dll!NavigationController::NavigateToPendingEntry+0x144 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:1040 ]'], [21012234, 'chrome.dll!NavigationController::LoadEntry+0x6a [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:276 ]']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830553, 'ms': 101.072622, 'cpu': 3124053392},
+{'stacktrace': [], 'thread': 1884, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 1718479108, 'startaddr': 19417280, 'ms': 101.217613, 'cpu': 2147742720},
+{'stacktrace': [[19417443, 'chrome.dll!PlatformThread::SetName+0x63 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:62 ]'], [23799603, 'chrome.dll!base::Thread::ThreadMain+0x53 [ c:\\g\\trunk\\src\\base\\thread.cc:145 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 1884, 'eventtype': 'EVENT_TYPE_THREADNAME', 'threadname': 'Chrome_DBThread', 'ms': 101.24918099999999, 'cpu': 2147742720},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [19495803, 'chrome.dll!base::WaitableEvent::Wait+0x1b [ c:\\g\\trunk\\src\\base\\waitable_event_win.cc:42 ]'], [19485939, 'chrome.dll!base::MessagePumpDefault::Run+0xc3 [ c:\\g\\trunk\\src\\base\\message_pump_default.cc:43 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 1884, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 299.45342199999999, 'ms': 101.293041, 'syscallargs': [284, 0, 0], 'cpu': 2147742720},
+{'eventid': 1718482060, 'stacktrace': [[2088830549, 'kernel32.dll!CreateThread+0x1e'], [19417542, 'chrome.dll!PlatformThread::Create+0x36 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:83 ]'], [23799365, 'chrome.dll!base::Thread::StartWithOptions+0xb5 [ c:\\g\\trunk\\src\\base\\thread.cc:82 ]'], [23799735, 'chrome.dll!base::Thread::Start+0x17 [ c:\\g\\trunk\\src\\base\\thread.cc:72 ]'], [20811109, 'chrome.dll!SafeBrowsingService::Start+0xa5 [ c:\\g\\trunk\\src\\chrome\\browser\\safe_browsing\\safe_browsing_service.cc:61 ]'], [20812754, 'chrome.dll!SafeBrowsingService::Initialize+0x72 [ c:\\g\\trunk\\src\\chrome\\browser\\safe_browsing\\safe_browsing_service.cc:54 ]'], [20696885, 'chrome.dll!ResourceDispatcherHost::Initialize+0x75 [ c:\\g\\trunk\\src\\chrome\\browser\\resource_dispatcher_host.cc:1439 ]'], [20505550, 'chrome.dll!BrowserProcessImpl::CreateResourceDispatcherHost+0xae [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:263 ]'], [20398441, 'chrome.dll!BrowserProcessImpl::resource_dispatcher_host+0x19 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.h:40 ]'], [20390068, 'chrome.dll!RenderProcessHost::Init+0xa4 [ c:\\g\\trunk\\src\\chrome\\browser\\render_process_host.cc:223 ]'], [21339442, 'chrome.dll!RenderViewHost::CreateRenderView+0x82 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host.cc:123 ]'], [21021101, 'chrome.dll!WebContents::CreateRenderViewForRenderManager+0x2d [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:1367 ]'], [21556055, 'chrome.dll!RenderViewHostManager::Navigate+0x57 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:103 ]'], [21021786, 'chrome.dll!WebContents::NavigateToPendingEntry+0x1a [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:354 ]'], [21011780, 'chrome.dll!NavigationController::NavigateToPendingEntry+0x144 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:1040 ]'], [21012234, 'chrome.dll!NavigationController::LoadEntry+0x6a [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:276 ]']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830553, 'ms': 101.49222899999999, 'cpu': 3124053392},
+{'stacktrace': [], 'thread': 2136, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 1718482060, 'startaddr': 19417280, 'ms': 101.625765, 'cpu': 2147742720},
+{'stacktrace': [[19417443, 'chrome.dll!PlatformThread::SetName+0x63 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:62 ]'], [23799603, 'chrome.dll!base::Thread::ThreadMain+0x53 [ c:\\g\\trunk\\src\\base\\thread.cc:145 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2136, 'eventtype': 'EVENT_TYPE_THREADNAME', 'threadname': 'Chrome_SafeBrowsingThread', 'ms': 101.65426100000001, 'cpu': 2147742720},
+{'stacktrace': [[2011711194, 'RPCRT4.dll!NMP_Open+0x122'], [2011709069, 'RPCRT4.dll!OSF_CCONNECTION::TransOpen+0x5e'], [2011710455, 'RPCRT4.dll!OSF_CCONNECTION::OpenConnectionAndBind+0x98'], [2011710849, 'RPCRT4.dll!OSF_CCALL::BindToServer+0x88'], [2011710672, 'RPCRT4.dll!OSF_BINDING_HANDLE::AllocateCCall+0x4b04'], [2011691420, 'RPCRT4.dll!OSF_BINDING_HANDLE::NegotiateTransferSyntax+0x28'], [2011664833, 'RPCRT4.dll!I_RpcGetBufferWithObject+0x48'], [2011664888, 'RPCRT4.dll!I_RpcGetBuffer+0xf'], [2011666469, 'RPCRT4.dll!NdrGetBuffer+0x28'], [2012169739, 'RPCRT4.dll!NdrClientCall2+0x13b'], [2011045072, 'ADVAPI32.dll!LsarOpenPolicy2+0x1b'], [2011044946, 'ADVAPI32.dll!LsaOpenPolicy+0x43'], [2011053296, 'ADVAPI32.dll!InitializeSidLookupTable+0xef'], [2011049382, 'ADVAPI32.dll!LocalConvertStringSDToSD_Rev1+0x7a'], [2011049228, 'ADVAPI32.dll!ConvertStringSecurityDescriptorToSecurityDescriptorW+0x2e'], [19488210, 'chrome.dll!win_util::GetLogonSessionOnlyDACL+0x1a2 [ c:\\g\\trunk\\src\\base\\win_util.cc:227 ]']], 'thread': 3956, 'syscall': 224, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwSetInformationFile', 'done': 102.443746, 'ms': 102.131416, 'syscallargs': [340, 1236548, 1236564], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 103.014769, 'ms': 102.43620300000001, 'syscallargs': [1, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[19462999, 'chrome.dll!file_util::ReadFile+0x77 [ c:\\g\\trunk\\src\\base\\file_util_win.cc:508 ]'], [21425417, 'chrome.dll!SafeBrowsingDatabase::LoadBloomFilter+0xe9 [ c:\\g\\trunk\\src\\chrome\\browser\\safe_browsing\\safe_browsing_database.cc:81 ]'], [21806924, 'chrome.dll!SafeBrowsingDatabaseImpl::Init+0xfc [ c:\\g\\trunk\\src\\chrome\\browser\\safe_browsing\\safe_browsing_database_impl.cc:105 ]'], [20805565, 'chrome.dll!SafeBrowsingService::GetDatabase+0x16d [ c:\\g\\trunk\\src\\chrome\\browser\\safe_browsing\\safe_browsing_service.cc:381 ]'], [20807180, 'chrome.dll!SafeBrowsingService::OnDBInitialize+0x6c [ c:\\g\\trunk\\src\\chrome\\browser\\safe_browsing\\safe_browsing_service.cc:106 ]'], [19405904, 'chrome.dll!MessageLoop::RunTask+0x80 [ c:\\g\\trunk\\src\\base\\message_loop.cc:309 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19486017, 'chrome.dll!base::MessagePumpDefault::Run+0x111 [ c:\\g\\trunk\\src\\base\\message_pump_default.cc:50 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2136, 'syscall': 183, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtReadFile', 'done': 103.483823, 'ms': 102.872013, 'syscallargs': [340, 0, 0], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 103.86767, 'ms': 103.13768899999999, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118302493, 'USER32.dll!CreateDesktopW+0x42'], [20392388, 'chrome.dll!RenderProcessHost::Init+0x9b4 [ c:\\g\\trunk\\src\\chrome\\browser\\render_process_host.cc:374 ]'], [21339442, 'chrome.dll!RenderViewHost::CreateRenderView+0x82 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host.cc:123 ]'], [21021101, 'chrome.dll!WebContents::CreateRenderViewForRenderManager+0x2d [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:1367 ]'], [21556055, 'chrome.dll!RenderViewHostManager::Navigate+0x57 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:103 ]'], [21021786, 'chrome.dll!WebContents::NavigateToPendingEntry+0x1a [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:354 ]'], [21011780, 'chrome.dll!NavigationController::NavigateToPendingEntry+0x144 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:1040 ]'], [21012234, 'chrome.dll!NavigationController::LoadEntry+0x6a [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:276 ]'], [21013144, 'chrome.dll!NavigationController::LoadURL+0x28 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:520 ]'], [20652523, 'chrome.dll!Browser::CreateTabContentsForURL+0xdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1358 ]'], [20651898, 'chrome.dll!Browser::AddTabWithURL+0x12a [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1228 ]'], [20567357, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0xfd [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:540 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]']], 'thread': 3956, 'syscall': 4436, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserCreateDesktop', 'done': 103.842527, 'ms': 103.254464, 'syscallargs': [1239256, 0, 0], 'cpu': 2147742720},
+{'stacktrace': [[19407956, 'chrome.dll!MessageLoop::PostTask+0x14 [ c:\\g\\trunk\\src\\base\\message_loop.cc:231 ]'], [20805662, 'chrome.dll!SafeBrowsingService::GetDatabase+0x1ce [ c:\\g\\trunk\\src\\chrome\\browser\\safe_browsing\\safe_browsing_service.cc:383 ]'], [20807180, 'chrome.dll!SafeBrowsingService::OnDBInitialize+0x6c [ c:\\g\\trunk\\src\\chrome\\browser\\safe_browsing\\safe_browsing_service.cc:106 ]'], [19405904, 'chrome.dll!MessageLoop::RunTask+0x80 [ c:\\g\\trunk\\src\\base\\message_loop.cc:309 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19486017, 'chrome.dll!base::MessagePumpDefault::Run+0x111 [ c:\\g\\trunk\\src\\base\\message_pump_default.cc:50 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2136, 'syscall': 4571, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserPostMessage', 'done': 103.852026, 'ms': 103.507289, 'syscallargs': [6685238, 1025, 13825696], 'cpu': 3124053392},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 115.36522100000001, 'ms': 103.633562, 'syscallargs': [4, 32505136, 1], 'cpu': 3124053392},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [19495803, 'chrome.dll!base::WaitableEvent::Wait+0x1b [ c:\\g\\trunk\\src\\base\\waitable_event_win.cc:42 ]'], [19485939, 'chrome.dll!base::MessagePumpDefault::Run+0xc3 [ c:\\g\\trunk\\src\\base\\message_pump_default.cc:43 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2136, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 299.48722500000002, 'ms': 103.86012700000001, 'syscallargs': [292, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 118.02812900000001, 'ms': 104.123289, 'syscallargs': [2, 18348996, 1], 'cpu': 3124053392},
+{'eventid': 1718553012, 'stacktrace': [[2011134008, 'ADVAPI32.dll!CreateProcessAsUserW+0xc3'], [4226929, 'chrome.exe!sandbox::TargetProcess::Create+0xe1 [ c:\\g\\trunk\\src\\sandbox\\src\\target_process.cc:158 ]'], [4215094, 'chrome.exe!sandbox::BrokerServicesBase::SpawnTarget+0x146 [ c:\\g\\trunk\\src\\sandbox\\src\\broker_services.cc:262 ]'], [20393087, 'chrome.dll!RenderProcessHost::Init+0xc6f [ c:\\g\\trunk\\src\\chrome\\browser\\render_process_host.cc:400 ]'], [21339442, 'chrome.dll!RenderViewHost::CreateRenderView+0x82 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host.cc:123 ]'], [21021101, 'chrome.dll!WebContents::CreateRenderViewForRenderManager+0x2d [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:1367 ]'], [21556055, 'chrome.dll!RenderViewHostManager::Navigate+0x57 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:103 ]'], [21021786, 'chrome.dll!WebContents::NavigateToPendingEntry+0x1a [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:354 ]'], [21011780, 'chrome.dll!NavigationController::NavigateToPendingEntry+0x144 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:1040 ]'], [21012234, 'chrome.dll!NavigationController::LoadEntry+0x6a [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:276 ]'], [21013144, 'chrome.dll!NavigationController::LoadURL+0x28 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:520 ]'], [20652523, 'chrome.dll!Browser::CreateTabContentsForURL+0xdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1358 ]'], [20651898, 'chrome.dll!Browser::AddTabWithURL+0x12a [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1228 ]'], [20567357, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0xfd [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:540 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830565, 'ms': 109.568395, 'cpu': 3124053392},
+{'stacktrace': [[2088869567, 'kernel32.dll!CreateProcessInternalW+0x9ac'], [2011134008, 'ADVAPI32.dll!CreateProcessAsUserW+0xc3'], [4226929, 'chrome.exe!sandbox::TargetProcess::Create+0xe1 [ c:\\g\\trunk\\src\\sandbox\\src\\target_process.cc:158 ]'], [4215094, 'chrome.exe!sandbox::BrokerServicesBase::SpawnTarget+0x146 [ c:\\g\\trunk\\src\\sandbox\\src\\broker_services.cc:262 ]'], [20393087, 'chrome.dll!RenderProcessHost::Init+0xc6f [ c:\\g\\trunk\\src\\chrome\\browser\\render_process_host.cc:400 ]'], [21339442, 'chrome.dll!RenderViewHost::CreateRenderView+0x82 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host.cc:123 ]'], [21021101, 'chrome.dll!WebContents::CreateRenderViewForRenderManager+0x2d [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:1367 ]'], [21556055, 'chrome.dll!RenderViewHostManager::Navigate+0x57 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:103 ]'], [21021786, 'chrome.dll!WebContents::NavigateToPendingEntry+0x1a [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:354 ]'], [21011780, 'chrome.dll!NavigationController::NavigateToPendingEntry+0x144 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:1040 ]'], [21012234, 'chrome.dll!NavigationController::LoadEntry+0x6a [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:276 ]'], [21013144, 'chrome.dll!NavigationController::LoadURL+0x28 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:520 ]'], [20652523, 'chrome.dll!Browser::CreateTabContentsForURL+0xdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1358 ]'], [20651898, 'chrome.dll!Browser::AddTabWithURL+0x12a [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1228 ]'], [20567357, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0xfd [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:540 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]']], 'thread': 3956, 'syscall': 200, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtRequestWaitReplyPort', 'done': 112.819925, 'ms': 109.602198, 'syscallargs': [32, 1237344, 1237344], 'cpu': 3124053392},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 132.609871, 'ms': 115.64011600000001, 'syscallargs': [11, 32505136, 1], 'cpu': 2147742720},
+{'stacktrace': [[4216265, 'chrome.exe!sandbox::BrokerServicesBase::TargetEventsThread+0x1c9 [ c:\\g\\trunk\\src\\sandbox\\src\\broker_services.cc:151 ]'], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2428, 'syscall': 190, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwRemoveIoCompletion', 'waiting': 1, 'done': 304.12608299999999, 'ms': 115.711634, 'syscallargs': [168, 17039276, 17038976], 'cpu': 2147742720},
+{'stacktrace': [[20454796, 'chrome.dll!VisitedLinkMaster::ReadFromFile+0x2c [ c:\\g\\trunk\\src\\chrome\\browser\\visitedlink_master.cc:966 ]'], [20463804, 'chrome.dll!VisitedLinkMaster::InitFromFile+0x18c [ c:\\g\\trunk\\src\\chrome\\browser\\visitedlink_master.cc:577 ]'], [20464344, 'chrome.dll!VisitedLinkMaster::Init+0x8 [ c:\\g\\trunk\\src\\chrome\\browser\\visitedlink_master.cc:256 ]'], [20393352, 'chrome.dll!RenderProcessHost::Init+0xd78 [ c:\\g\\trunk\\src\\chrome\\browser\\render_process_host.cc:438 ]'], [21339442, 'chrome.dll!RenderViewHost::CreateRenderView+0x82 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host.cc:123 ]'], [21021101, 'chrome.dll!WebContents::CreateRenderViewForRenderManager+0x2d [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:1367 ]'], [21556055, 'chrome.dll!RenderViewHostManager::Navigate+0x57 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:103 ]'], [21021786, 'chrome.dll!WebContents::NavigateToPendingEntry+0x1a [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:354 ]'], [21011780, 'chrome.dll!NavigationController::NavigateToPendingEntry+0x144 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:1040 ]'], [21012234, 'chrome.dll!NavigationController::LoadEntry+0x6a [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:276 ]'], [21013144, 'chrome.dll!NavigationController::LoadURL+0x28 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:520 ]'], [20652523, 'chrome.dll!Browser::CreateTabContentsForURL+0xdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1358 ]'], [20651898, 'chrome.dll!Browser::AddTabWithURL+0x12a [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1228 ]'], [20567357, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0xfd [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:540 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]']], 'thread': 3956, 'syscall': 183, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtReadFile', 'done': 116.641361, 'ms': 115.943507, 'syscallargs': [428, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 126.694035, 'ms': 118.164739, 'syscallargs': [2, 18348996, 1], 'cpu': 3124053392},
+{'stacktrace': [[2118254284, 'USER32.dll!_CreateWindowEx+0xb1'], [2118253656, 'USER32.dll!CreateWindowExW+0x33'], [21895768, 'chrome.dll!RenderWidgetHostViewWin::EnsureTooltip+0x58 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.cc:897 ]'], [21896123, 'chrome.dll!RenderWidgetHostViewWin::SetSize+0x3b [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.cc:140 ]'], [21021157, 'chrome.dll!WebContents::CreateRenderViewForRenderManager+0x65 [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:1374 ]'], [21556055, 'chrome.dll!RenderViewHostManager::Navigate+0x57 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:103 ]'], [21021786, 'chrome.dll!WebContents::NavigateToPendingEntry+0x1a [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:354 ]'], [21011780, 'chrome.dll!NavigationController::NavigateToPendingEntry+0x144 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:1040 ]'], [21012234, 'chrome.dll!NavigationController::LoadEntry+0x6a [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:276 ]'], [21013144, 'chrome.dll!NavigationController::LoadURL+0x28 [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:520 ]'], [20652523, 'chrome.dll!Browser::CreateTabContentsForURL+0xdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1358 ]'], [20651898, 'chrome.dll!Browser::AddTabWithURL+0x12a [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1228 ]'], [20567357, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0xfd [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:540 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]']], 'thread': 3956, 'syscall': 4439, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserCreateWindowEx', 'done': 125.14244100000001, 'ms': 124.58874, 'syscallargs': [32, 1240148, 1238912], 'cpu': 2147742720},
+{'stacktrace': [[19469642, 'chrome.dll!base::MessagePumpForIO::ProcessNextObject+0xba [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:668 ]'], [19470272, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0x70 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:537 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 127.571521, 'ms': 126.896575, 'syscallargs': [1, 18349136, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 129.02449899999999, 'ms': 127.671254, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[27279557, 'chrome.dll!views::HWNDView::Attach+0x95 [ c:\\g\\trunk\\src\\chrome\\views\\hwnd_view.cc:46 ]'], [23096819, 'chrome.dll!TabContentsContainerView::SetTabContents+0xc3 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\tab_contents_container_view.cc:83 ]'], [23148817, 'chrome.dll!BrowserView2::TabSelectedAt+0x81 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:530 ]'], [21206163, 'chrome.dll!TabStripModel::ChangeSelectedContentsFrom+0xe3 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:585 ]'], [21208866, 'chrome.dll!TabStripModel::InsertTabContentsAt+0x142 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:103 ]'], [21209292, 'chrome.dll!TabStripModel::AddTabContents+0xac [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:378 ]'], [20651916, 'chrome.dll!Browser::AddTabWithURL+0x13c [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1232 ]'], [20567357, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0xfd [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:540 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4642, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetWindowPos', 'done': 129.14797799999999, 'ms': 128.71188900000001, 'syscallargs': [24969860, 0, 5], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 147.204184, 'ms': 129.097972, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2088925701, 'kernel32.dll!BasepCopyFileExW+0x3f2'], [2088926059, 'kernel32.dll!CopyFileExW+0x39'], [2088958097, 'kernel32.dll!CopyFileW+0x1e'], [19457905, 'chrome.dll!base::CopyFileW+0x41 [ c:\\g\\trunk\\src\\base\\file_util_win.cc:115 ]'], [19447294, 'chrome.dll!base::CopyFileW+0x2e [ c:\\g\\trunk\\src\\base\\file_util.cc:324 ]'], [21848115, 'chrome.dll!BookmarkStorageBackend::BookmarkStorageBackend+0xc3 [ c:\\g\\trunk\\src\\chrome\\browser\\bookmarks\\bookmark_storage.cc:123 ]'], [21848622, 'chrome.dll!BookmarkStorage::BookmarkStorage+0x12e [ c:\\g\\trunk\\src\\chrome\\browser\\bookmarks\\bookmark_storage.cc:41 ]'], [21452155, 'chrome.dll!BookmarkModel::Load+0xcb [ c:\\g\\trunk\\src\\chrome\\browser\\bookmarks\\bookmark_model.cc:137 ]'], [20848903, 'chrome.dll!ProfileImpl::GetBookmarkModel+0x47 [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:853 ]'], [22957786, 'chrome.dll!BookmarkBarView::BookmarkBarView+0x13a [ c:\\g\\trunk\\src\\chrome\\browser\\views\\bookmark_bar_view.cc:692 ]'], [23141288, 'chrome.dll!BrowserView2::GetBookmarkBarView+0x48 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:444 ]'], [23147457, 'chrome.dll!BrowserView2::UpdateUIForContents+0x11 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:1020 ]'], [23148906, 'chrome.dll!BrowserView2::TabSelectedAt+0xda [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:538 ]'], [21206163, 'chrome.dll!TabStripModel::ChangeSelectedContentsFrom+0xe3 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:585 ]'], [21208866, 'chrome.dll!TabStripModel::InsertTabContentsAt+0x142 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:103 ]'], [21209292, 'chrome.dll!TabStripModel::AddTabContents+0xac [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:378 ]']], 'thread': 3956, 'syscall': 37, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtCreateFile', 'done': 132.690607, 'ms': 132.46627699999999, 'syscallargs': [1238200, 1074856064, 1237976], 'cpu': 2147742720},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 146.83514199999999, 'ms': 132.61238499999999, 'syscallargs': [12, 32505136, 1], 'cpu': 3124053392},
+{'stacktrace': [[19407956, 'chrome.dll!MessageLoop::PostTask+0x14 [ c:\\g\\trunk\\src\\base\\message_loop.cc:231 ]'], [21849655, 'chrome.dll!BookmarkStorage::LoadBookmarks+0x87 [ c:\\g\\trunk\\src\\chrome\\browser\\bookmarks\\bookmark_storage.cc:53 ]'], [21452202, 'chrome.dll!BookmarkModel::Load+0xfa [ c:\\g\\trunk\\src\\chrome\\browser\\bookmarks\\bookmark_model.cc:138 ]'], [20848903, 'chrome.dll!ProfileImpl::GetBookmarkModel+0x47 [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:853 ]'], [22957786, 'chrome.dll!BookmarkBarView::BookmarkBarView+0x13a [ c:\\g\\trunk\\src\\chrome\\browser\\views\\bookmark_bar_view.cc:692 ]'], [23141288, 'chrome.dll!BrowserView2::GetBookmarkBarView+0x48 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:444 ]'], [23147457, 'chrome.dll!BrowserView2::UpdateUIForContents+0x11 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:1020 ]'], [23148906, 'chrome.dll!BrowserView2::TabSelectedAt+0xda [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:538 ]'], [21206163, 'chrome.dll!TabStripModel::ChangeSelectedContentsFrom+0xe3 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:585 ]'], [21208866, 'chrome.dll!TabStripModel::InsertTabContentsAt+0x142 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:103 ]'], [21209292, 'chrome.dll!TabStripModel::AddTabContents+0xac [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:378 ]'], [20651916, 'chrome.dll!Browser::AddTabWithURL+0x13c [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:1232 ]'], [20567357, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0xfd [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:540 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]']], 'thread': 3956, 'syscall': 4571, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserPostMessage', 'done': 135.53007400000001, 'ms': 133.02305200000001, 'syscallargs': [8585726, 1025, 13802976], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19467175, 'chrome.dll!base::MessagePumpForUI::WaitForWork+0x27 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:408 ]'], [19470141, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0xbd [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:393 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2736, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 220.41153299999999, 'ms': 135.494316, 'syscallargs': [1, 15858672, 1], 'cpu': 3124053392},
+{'stacktrace': [[27112751, 'chrome.dll!views::Window::Show+0xf [ c:\\g\\trunk\\src\\chrome\\views\\window.cc:93 ]'], [23139435, 'chrome.dll!BrowserView2::Show+0x1b [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:304 ]'], [20641529, 'chrome.dll!Browser::ShowAndFit+0x29 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:335 ]'], [20567444, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0x154 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:544 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4651, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserShowWindow', 'done': 166.489037, 'ms': 135.654112, 'syscallargs': [9961988, 10, 13830668], 'cpu': 3124053392},
+{'stacktrace': [[27287730, 'chrome.dll!views::CustomFrameWindow::OnNCActivate+0x42 [ c:\\g\\trunk\\src\\chrome\\views\\custom_frame_window.cc:1068 ]'], [23138561, 'chrome.dll!OpaqueFrame::OnNCActivate+0x21 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\opaque_frame.cc:104 ]'], [27136100, 'chrome.dll!views::ContainerWin::_ProcessWindowMessage+0x6c4 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.h:214 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [27112751, 'chrome.dll!views::Window::Show+0xf [ c:\\g\\trunk\\src\\chrome\\views\\window.cc:93 ]'], [23139435, 'chrome.dll!BrowserView2::Show+0x1b [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:304 ]']], 'thread': 3956, 'syscall': 4583, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserRedrawWindow', 'done': 164.93828099999999, 'ms': 137.03361699999999, 'syscallargs': [9961988, 1240560, 0], 'cpu': 2147742720},
+{'stacktrace': [[27139267, 'chrome.dll!views::ContainerWin::OnPaint+0x13 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:623 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [27287730, 'chrome.dll!views::CustomFrameWindow::OnNCActivate+0x42 [ c:\\g\\trunk\\src\\chrome\\views\\custom_frame_window.cc:1068 ]'], [23138561, 'chrome.dll!OpaqueFrame::OnNCActivate+0x21 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\opaque_frame.cc:104 ]'], [27136100, 'chrome.dll!views::ContainerWin::_ProcessWindowMessage+0x6c4 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.h:214 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]']], 'thread': 3956, 'syscall': 4404, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserBeginPaint', 'done': 147.41705999999999, 'ms': 137.187827, 'syscallargs': [9961988, 1239308, 0], 'cpu': 2147742720},
+{'stacktrace': [[2011005211, 'ADVAPI32.dll!RegEnumValueW+0x9a'], [1960473629, 'USP10.dll!CacheFontLinkingData+0x1e1'], [1960474061, 'USP10.dll!IsFontRegLinked+0x21'], [1960477397, 'USP10.dll!LoadFont+0x165'], [1960464506, 'USP10.dll!FindOrCreateFaceCache+0x9c'], [1960466474, 'USP10.dll!FindOrCreateSizeCacheWithoutRealizationID+0xc7'], [1960467102, 'USP10.dll!FindOrCreateSizeCacheUsingRealizationID+0x5d'], [1960468147, 'USP10.dll!UpdateCache+0x2c'], [1960468379, 'USP10.dll!ScriptCheckCache+0x67'], [1960460694, 'USP10.dll!ScriptStringAnalyse+0x143'], [1654409458, 'LPK.DLL!LpkStringAnalyse+0xfd'], [1654408779, 'LPK.DLL!LpkCharsetDraw+0x2f3'], [1654395497, 'LPK.DLL!LpkDrawTextEx+0x34'], [2118444036, 'USER32.dll!DT_DrawStr+0x30249'], [2118246821, 'USER32.dll!DT_DrawJustifiedLine+0x31'], [2118248132, 'USER32.dll!AddEllipsisAndDrawLine+0x5d']], 'thread': 3956, 'syscall': 73, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtEnumerateValueKey', 'done': 143.77721199999999, 'ms': 142.97459599999999, 'syscallargs': [508, 1, 1], 'cpu': 2147742720},
+{'stacktrace': [[2012320926, 'GDI32.dll!CreateFontIndirectW+0x46'], [1960473724, 'USP10.dll!CacheFontLinkingData+0x240'], [1960474061, 'USP10.dll!IsFontRegLinked+0x21'], [1960477397, 'USP10.dll!LoadFont+0x165'], [1960464506, 'USP10.dll!FindOrCreateFaceCache+0x9c'], [1960466474, 'USP10.dll!FindOrCreateSizeCacheWithoutRealizationID+0xc7'], [1960467102, 'USP10.dll!FindOrCreateSizeCacheUsingRealizationID+0x5d'], [1960468147, 'USP10.dll!UpdateCache+0x2c'], [1960468379, 'USP10.dll!ScriptCheckCache+0x67'], [1960460694, 'USP10.dll!ScriptStringAnalyse+0x143'], [1654409458, 'LPK.DLL!LpkStringAnalyse+0xfd'], [1654408779, 'LPK.DLL!LpkCharsetDraw+0x2f3'], [1654395497, 'LPK.DLL!LpkDrawTextEx+0x34'], [2118444036, 'USER32.dll!DT_DrawStr+0x30249'], [2118246821, 'USER32.dll!DT_DrawJustifiedLine+0x31'], [2118248132, 'USER32.dll!AddEllipsisAndDrawLine+0x5d']], 'thread': 3956, 'syscall': 4314, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!NtGdiHfontCreate', 'done': 145.325174, 'ms': 143.78643099999999, 'syscallargs': [1233740, 356, 0], 'cpu': 2147742720},
+{'stacktrace': [[26995006, 'chrome.dll!gfx::BitmapPlatformDeviceWin::drawToHDC+0x9e [ c:\\g\\trunk\\src\\base\\gfx\\bitmap_platform_device_win.cc:378 ]'], [21604437, 'chrome.dll!gfx::CanvasPaintT<ChromeCanvas>::~CanvasPaintT<ChromeCanvas>+0x55 [ c:\\g\\trunk\\src\\base\\gfx\\platform_canvas_win.h:127 ]'], [27136650, 'chrome.dll!views::ContainerWin::_ProcessWindowMessage+0x8ea [ c:\\g\\trunk\\src\\chrome\\views\\container_win.h:224 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [27139267, 'chrome.dll!views::ContainerWin::OnPaint+0x13 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:623 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]']], 'thread': 3956, 'syscall': 4109, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!NtGdiBitBlt', 'done': 146.987955, 'ms': 146.60997399999999, 'syscallargs': [771819156, 0, 0], 'cpu': 2147742720},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 214.033627, 'ms': 146.85358099999999, 'syscallargs': [12, 32505136, 1], 'cpu': 3124053392},
+{'stacktrace': [[26996791, 'chrome.dll!gfx::BitmapPlatformDeviceWin::~BitmapPlatformDeviceWin+0x37 [ c:\\g\\trunk\\src\\base\\gfx\\bitmap_platform_device_win.cc:328 ]'], [23980661, 'chrome.dll!DeviceCM::~DeviceCM+0x35 [ c:\\g\\trunk\\src\\skia\\sgl\\skcanvas.cpp:83 ]'], [23986073, 'chrome.dll!SkCanvas::~SkCanvas+0x29 [ c:\\g\\trunk\\src\\skia\\sgl\\skcanvas.cpp:410 ]'], [27136650, 'chrome.dll!views::ContainerWin::_ProcessWindowMessage+0x8ea [ c:\\g\\trunk\\src\\chrome\\views\\container_win.h:224 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [27139267, 'chrome.dll!views::ContainerWin::OnPaint+0x13 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:623 ]']], 'thread': 3956, 'syscall': 4218, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!DeleteObject', 'done': 147.34358700000001, 'ms': 147.01589200000001, 'syscallargs': [2181367073, 14636032, 14636232], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 149.87212099999999, 'ms': 147.65116800000001, 'syscallargs': [2, 18348996, 1], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 155.44461699999999, 'ms': 149.96458999999999, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[26995006, 'chrome.dll!gfx::BitmapPlatformDeviceWin::drawToHDC+0x9e [ c:\\g\\trunk\\src\\base\\gfx\\bitmap_platform_device_win.cc:378 ]'], [21604437, 'chrome.dll!gfx::CanvasPaintT<ChromeCanvas>::~CanvasPaintT<ChromeCanvas>+0x55 [ c:\\g\\trunk\\src\\base\\gfx\\platform_canvas_win.h:127 ]'], [27139267, 'chrome.dll!views::ContainerWin::OnPaint+0x13 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:623 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [27287730, 'chrome.dll!views::CustomFrameWindow::OnNCActivate+0x42 [ c:\\g\\trunk\\src\\chrome\\views\\custom_frame_window.cc:1068 ]'], [23138561, 'chrome.dll!OpaqueFrame::OnNCActivate+0x21 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\opaque_frame.cc:104 ]']], 'thread': 3956, 'syscall': 4109, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!NtGdiBitBlt', 'done': 153.59857199999999, 'ms': 153.273112, 'syscallargs': [771819156, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[26996791, 'chrome.dll!gfx::BitmapPlatformDeviceWin::~BitmapPlatformDeviceWin+0x37 [ c:\\g\\trunk\\src\\base\\gfx\\bitmap_platform_device_win.cc:328 ]'], [23980661, 'chrome.dll!DeviceCM::~DeviceCM+0x35 [ c:\\g\\trunk\\src\\skia\\sgl\\skcanvas.cpp:83 ]'], [23986073, 'chrome.dll!SkCanvas::~SkCanvas+0x29 [ c:\\g\\trunk\\src\\skia\\sgl\\skcanvas.cpp:410 ]'], [27139267, 'chrome.dll!views::ContainerWin::OnPaint+0x13 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:623 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [27287730, 'chrome.dll!views::CustomFrameWindow::OnNCActivate+0x42 [ c:\\g\\trunk\\src\\chrome\\views\\custom_frame_window.cc:1068 ]']], 'thread': 3956, 'syscall': 4218, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!DeleteObject', 'done': 153.84329600000001, 'ms': 153.641594, 'syscallargs': [2198144289, 14636032, 14636144], 'cpu': 3124053392},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [20447362, 'chrome.dll!RenderWidgetHelper::WaitForPaintMsg+0x102 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_helper.cc:121 ]'], [21729909, 'chrome.dll!RenderWidgetHost::GetBackingStore+0x125 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:742 ]'], [21898819, 'chrome.dll!RenderWidgetHostViewWin::OnPaint+0xa3 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.cc:402 ]'], [21900924, 'chrome.dll!RenderWidgetHostViewWin::ProcessWindowMessage+0xbc [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.h:76 ]'], [21752349, 'chrome.dll!ATL::CWindowImplBaseT<ATL::CWindow,ATL::CWinTraits<1174405120,0> >::WindowProc+0x5d [ c:\\program files\\microsoft visual studio 8\\vc\\atlmfc\\include\\atlwin.h:3078 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13']], 'thread': 3956, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 158.746712, 'ms': 154.248096, 'syscallargs': [260, 0, 1238856], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 158.69866099999999, 'ms': 155.524236, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 162.17675700000001, 'ms': 158.787779, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[21721107, 'chrome.dll!RenderWidgetHost::BackingStore::Refresh+0x133 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:95 ]'], [21726483, 'chrome.dll!RenderWidgetHost::BackingStoreManager::PrepareBackingStore+0x103 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:197 ]'], [21726586, 'chrome.dll!RenderWidgetHost::PaintRect+0x5a [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:770 ]'], [21727344, 'chrome.dll!RenderWidgetHost::OnMsgPaintRect+0x250 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:379 ]'], [21728798, 'chrome.dll!IPC::MessageWithTuple<ViewHostMsg_PaintRect_Params>::Dispatch<RenderWidgetHost,void (__thiscall RenderWidgetHost::*)(ViewHostMs+0x3e [ c:\\g\\trunk\\src\\chrome\\common\\ipc_message_utils.h:990 ]'], [21729932, 'chrome.dll!RenderWidgetHost::GetBackingStore+0x13c [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:747 ]'], [21898819, 'chrome.dll!RenderWidgetHostViewWin::OnPaint+0xa3 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.cc:402 ]'], [21900924, 'chrome.dll!RenderWidgetHostViewWin::ProcessWindowMessage+0xbc [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.h:76 ]'], [21752349, 'chrome.dll!ATL::CWindowImplBaseT<ATL::CWindow,ATL::CWinTraits<1174405120,0> >::WindowProc+0x5d [ c:\\program files\\microsoft visual studio 8\\vc\\atlmfc\\include\\atlwin.h:3078 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7']], 'thread': 3956, 'syscall': 4389, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!NtGdiStretchDIBitsInternal', 'done': 161.89711299999999, 'ms': 158.891423, 'syscallargs': [3825272163, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[21721114, 'chrome.dll!RenderWidgetHost::BackingStore::Refresh+0x13a [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:96 ]'], [21726483, 'chrome.dll!RenderWidgetHost::BackingStoreManager::PrepareBackingStore+0x103 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:197 ]'], [21726586, 'chrome.dll!RenderWidgetHost::PaintRect+0x5a [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:770 ]'], [21727344, 'chrome.dll!RenderWidgetHost::OnMsgPaintRect+0x250 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:379 ]'], [21728798, 'chrome.dll!IPC::MessageWithTuple<ViewHostMsg_PaintRect_Params>::Dispatch<RenderWidgetHost,void (__thiscall RenderWidgetHost::*)(ViewHostMs+0x3e [ c:\\g\\trunk\\src\\chrome\\common\\ipc_message_utils.h:990 ]'], [21729932, 'chrome.dll!RenderWidgetHost::GetBackingStore+0x13c [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host.cc:747 ]'], [21898819, 'chrome.dll!RenderWidgetHostViewWin::OnPaint+0xa3 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.cc:402 ]'], [21900924, 'chrome.dll!RenderWidgetHostViewWin::ProcessWindowMessage+0xbc [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.h:76 ]'], [21752349, 'chrome.dll!ATL::CWindowImplBaseT<ATL::CWindow,ATL::CWinTraits<1174405120,0> >::WindowProc+0x5d [ c:\\program files\\microsoft visual studio 8\\vc\\atlmfc\\include\\atlwin.h:3078 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7']], 'thread': 3956, 'syscall': 267, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtUnmapViewOfSection', 'done': 162.12954400000001, 'ms': 161.902421, 'syscallargs': [4294967295, 43974656, 1239104], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 170.10988800000001, 'ms': 162.27425600000001, 'syscallargs': [2, 18348996, 1], 'cpu': 3124053392},
+{'stacktrace': [[21898949, 'chrome.dll!RenderWidgetHostViewWin::OnPaint+0x125 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.cc:424 ]'], [21900924, 'chrome.dll!RenderWidgetHostViewWin::ProcessWindowMessage+0xbc [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.h:76 ]'], [21752349, 'chrome.dll!ATL::CWindowImplBaseT<ATL::CWindow,ATL::CWinTraits<1174405120,0> >::WindowProc+0x5d [ c:\\program files\\microsoft visual studio 8\\vc\\atlmfc\\include\\atlwin.h:3078 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [27287730, 'chrome.dll!views::CustomFrameWindow::OnNCActivate+0x42 [ c:\\g\\trunk\\src\\chrome\\views\\custom_frame_window.cc:1068 ]'], [23138561, 'chrome.dll!OpaqueFrame::OnNCActivate+0x21 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\opaque_frame.cc:104 ]'], [27136100, 'chrome.dll!views::ContainerWin::_ProcessWindowMessage+0x6c4 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.h:214 ]']], 'thread': 3956, 'syscall': 4109, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!NtGdiBitBlt', 'done': 164.14181099999999, 'ms': 162.369798, 'syscallargs': [3389065955, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[27151840, 'chrome.dll!views::FocusWindowCallback+0x130 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:175 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [27112751, 'chrome.dll!views::Window::Show+0xf [ c:\\g\\trunk\\src\\chrome\\views\\window.cc:93 ]'], [23139435, 'chrome.dll!BrowserView2::Show+0x1b [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:304 ]'], [20641529, 'chrome.dll!Browser::ShowAndFit+0x29 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:335 ]'], [20567444, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0x154 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:544 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]']], 'thread': 3956, 'syscall': 4611, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetFocus', 'done': 166.26107500000001, 'ms': 164.97012899999999, 'syscallargs': [9961988, 9961988, 13872320], 'cpu': 3124053392},
+{'stacktrace': [[2118254284, 'USER32.dll!_CreateWindowEx+0xb1'], [2118253656, 'USER32.dll!CreateWindowExW+0x33'], [2118563099, 'USER32.dll!CreateIMEUI+0xa4'], [2118566246, 'USER32.dll!ImeSetContextHandler+0x40'], [2118567660, 'USER32.dll!ImeWndProcWorker+0x2f6'], [2118304721, 'USER32.dll!SendMessageWorker+0x10bfe'], [2118431811, 'USER32.dll!RealDefWindowProcWorker+0x3022c'], [2118235129, 'USER32.dll!RealDefWindowProcW+0x27'], [2118235027, 'USER32.dll!DefWindowProcW+0x57'], [27142877, 'chrome.dll!views::ContainerWin::WndProc+0xdd [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:918 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28']], 'thread': 3956, 'syscall': 4439, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserCreateWindowEx', 'done': 165.93449699999999, 'ms': 165.35397699999999, 'syscallargs': [0, 1238728, 1237492], 'cpu': 3124053392},
+{'stacktrace': [[27208659, 'chrome.dll!views::RootView::FocusView+0xe3 [ c:\\g\\trunk\\src\\chrome\\views\\root_view.cc:499 ]'], [27118902, 'chrome.dll!views::View::RequestFocus+0x16 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:1423 ]'], [23139435, 'chrome.dll!BrowserView2::Show+0x1b [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:304 ]'], [20641529, 'chrome.dll!Browser::ShowAndFit+0x29 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:335 ]'], [20567444, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0x154 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:544 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4611, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetFocus', 'done': 168.39933600000001, 'ms': 166.56362799999999, 'syscallargs': [8979080, 27145854, 13872320], 'cpu': 3124053392},
+{'stacktrace': [[20641581, 'chrome.dll!Browser::ShowAndFit+0x5d [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:351 ]'], [20567444, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0x154 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:544 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4611, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetFocus', 'done': 170.63230100000001, 'ms': 168.406599, 'syscallargs': [24969860, 21048886, 13857560], 'cpu': 3124053392},
+{'stacktrace': [[27208659, 'chrome.dll!views::RootView::FocusView+0xe3 [ c:\\g\\trunk\\src\\chrome\\views\\root_view.cc:499 ]'], [27146912, 'chrome.dll!views::FocusManager::OnSetFocus+0x40 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:273 ]'], [27151783, 'chrome.dll!views::FocusWindowCallback+0xf7 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:158 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [20641581, 'chrome.dll!Browser::ShowAndFit+0x5d [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:351 ]'], [20567444, 'chrome.dll!BrowserInit::LaunchWithProfile::OpenURLsInBrowser+0x154 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:544 ]'], [20570295, 'chrome.dll!BrowserInit::LaunchWithProfile::Launch+0x5d7 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:472 ]'], [20571326, 'chrome.dll!BrowserInit::LaunchBrowserImpl+0x2de [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:720 ]'], [20572818, 'chrome.dll!BrowserInit::ProcessCommandLine+0x522 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_init.cc:659 ]'], [20405083, 'chrome.dll!BrowserMain+0xfdb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:579 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]']], 'thread': 3956, 'syscall': 4611, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetFocus', 'done': 170.61889199999999, 'ms': 169.63636399999999, 'syscallargs': [9896426, 13833040, 27145854], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 170.620847, 'ms': 170.203755, 'syscallargs': [2, 18348996, 1], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 171.51900599999999, 'ms': 170.68761499999999, 'syscallargs': [2, 18348996, 1], 'cpu': 3124053392},
+{'stacktrace': [[2118226946, 'USER32.dll!PeekMessageW+0x167'], [19465953, 'chrome.dll!base::MessagePumpWin::ProcessPumpReplacementMessage+0x21 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:307 ]'], [19466558, 'chrome.dll!base::MessagePumpWin::ProcessMessageHelper+0x3e [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:280 ]'], [19470020, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x44 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:364 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19407817, 'chrome.dll!MessageLoop::RunInternal+0xa9 [ c:\\g\\trunk\\src\\base\\message_loop.cc:192 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19409081, 'chrome.dll!MessageLoopForUI::Run+0x49 [ c:\\g\\trunk\\src\\base\\message_loop.cc:559 ]'], [20405134, 'chrome.dll!BrowserMain+0x100e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:583 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4570, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!PeekMessageW', 'done': 171.181253, 'ms': 170.81500600000001, 'syscallargs': [1242820, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 181.74991499999999, 'ms': 171.734117, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[27201618, 'chrome.dll!views::RootView::PaintNow+0x42 [ c:\\g\\trunk\\src\\chrome\\views\\root_view.cc:183 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19407817, 'chrome.dll!MessageLoop::RunInternal+0xa9 [ c:\\g\\trunk\\src\\base\\message_loop.cc:192 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19409081, 'chrome.dll!MessageLoopForUI::Run+0x49 [ c:\\g\\trunk\\src\\base\\message_loop.cc:559 ]'], [20405134, 'chrome.dll!BrowserMain+0x100e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:583 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4583, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserRedrawWindow', 'done': 176.42074600000001, 'ms': 171.76652300000001, 'syscallargs': [9961988, 1242600, 0], 'cpu': 3124053392},
+{'stacktrace': [[21898949, 'chrome.dll!RenderWidgetHostViewWin::OnPaint+0x125 [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.cc:424 ]'], [21900924, 'chrome.dll!RenderWidgetHostViewWin::ProcessWindowMessage+0xbc [ c:\\g\\trunk\\src\\chrome\\browser\\render_widget_host_view_win.h:76 ]'], [21752349, 'chrome.dll!ATL::CWindowImplBaseT<ATL::CWindow,ATL::CWinTraits<1174405120,0> >::WindowProc+0x5d [ c:\\program files\\microsoft visual studio 8\\vc\\atlmfc\\include\\atlwin.h:3078 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [27201618, 'chrome.dll!views::RootView::PaintNow+0x42 [ c:\\g\\trunk\\src\\chrome\\views\\root_view.cc:183 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]']], 'thread': 3956, 'syscall': 4109, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!NtGdiBitBlt', 'done': 175.758092, 'ms': 174.02686700000001, 'syscallargs': [2231437465, 0, 0], 'cpu': 3124053392},
+{'eventid': 1718825704, 'stacktrace': [[2088830549, 'kernel32.dll!CreateThread+0x1e'], [19417542, 'chrome.dll!PlatformThread::Create+0x36 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:83 ]'], [23799365, 'chrome.dll!base::Thread::StartWithOptions+0xb5 [ c:\\g\\trunk\\src\\base\\thread.cc:82 ]'], [23799735, 'chrome.dll!base::Thread::Start+0x17 [ c:\\g\\trunk\\src\\base\\thread.cc:72 ]'], [20426734, 'chrome.dll!HistoryService::Init+0xe [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history.cc:128 ]'], [20850347, 'chrome.dll!ProfileImpl::GetHistoryService+0x6b [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:752 ]'], [21456458, 'chrome.dll!BookmarkModel::LoadFavIcon+0x7a [ c:\\g\\trunk\\src\\chrome\\browser\\bookmarks\\bookmark_model.cc:685 ]'], [21457734, 'chrome.dll!BookmarkNode::GetFavIcon+0x16 [ c:\\g\\trunk\\src\\chrome\\browser\\bookmarks\\bookmark_model.cc:46 ]'], [22945986, 'chrome.dll!BookmarkBarView::CreateBookmarkButton+0x152 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\bookmark_bar_view.cc:1486 ]'], [22951285, 'chrome.dll!BookmarkBarView::Loaded+0x95 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\bookmark_bar_view.cc:1210 ]'], [21458349, 'chrome.dll!BookmarkModel::DoneLoading+0x8d [ c:\\g\\trunk\\src\\chrome\\browser\\bookmarks\\bookmark_model.cc:524 ]'], [21461574, 'chrome.dll!BookmarkModel::OnBookmarkStorageLoadedBookmarks+0x166 [ c:\\g\\trunk\\src\\chrome\\browser\\bookmarks\\bookmark_model.cc:468 ]'], [21847663, 'chrome.dll!BookmarkStorage::LoadedBookmarks+0x2f [ c:\\g\\trunk\\src\\chrome\\browser\\bookmarks\\bookmark_storage.cc:90 ]'], [21847724, 'chrome.dll!RunnableMethod<BookmarkStorage,void (__thiscall BookmarkStorage::*)(Value *,bool,bool),Tuple3<Value *,bool,bool> >::Run+0x1c [ c:\\g\\trunk\\src\\base\\task.h:313 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]']], 'thread': 3956, 'eventtype': 'EVENT_TYPE_CREATETHREAD', 'startaddr': 2088830553, 'ms': 177.017191, 'cpu': 3124053392},
+{'stacktrace': [], 'thread': 2688, 'eventtype': 'EVENT_TYPE_THREADBEGIN', 'parenteventid': 1718825704, 'startaddr': 19417280, 'ms': 177.216937, 'cpu': 2147742720},
+{'stacktrace': [[19417443, 'chrome.dll!PlatformThread::SetName+0x63 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:62 ]'], [23799603, 'chrome.dll!base::Thread::ThreadMain+0x53 [ c:\\g\\trunk\\src\\base\\thread.cc:145 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2688, 'eventtype': 'EVENT_TYPE_THREADNAME', 'threadname': 'Chrome_HistoryThread', 'ms': 177.250461, 'cpu': 2147742720},
+{'stacktrace': [[27392208, 'chrome.dll!winRead+0x20 [ c:\\g\\trunk\\src\\third_party\\sqlite\\os_win.c:1003 ]'], [27524918, 'chrome.dll!sqlite3OsRead+0x16 [ c:\\g\\trunk\\src\\third_party\\sqlite\\os.c:39 ]'], [27491281, 'chrome.dll!sqlite3PagerLoadall+0x71 [ c:\\g\\trunk\\src\\third_party\\sqlite\\pager.c:4512 ]'], [27324147, 'chrome.dll!sqlite3Preload+0x33 [ c:\\g\\trunk\\src\\third_party\\sqlite\\build.c:3381 ]'], [21199165, 'chrome.dll!history::HistoryDatabase::PrimeCache+0x5d [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history_database.cc:106 ]'], [21200727, 'chrome.dll!history::HistoryDatabase::Init+0x117 [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history_database.cc:71 ]'], [20616681, 'chrome.dll!history::HistoryBackend::InitImpl+0x1d9 [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history_backend.cc:508 ]'], [20622856, 'chrome.dll!history::HistoryBackend::Init+0x8 [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history_backend.cc:238 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19486017, 'chrome.dll!base::MessagePumpDefault::Run+0x111 [ c:\\g\\trunk\\src\\base\\message_pump_default.cc:50 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2688, 'syscall': 183, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtReadFile', 'done': 192.485637, 'ms': 178.359261, 'syscallargs': [512, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[21555953, 'chrome.dll!RenderViewHostManager::Shutdown+0xb1 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:84 ]'], [21036462, 'chrome.dll!WebContents::Destroy+0xee [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:312 ]'], [21008223, 'chrome.dll!NavigationController::Destroy+0x2df [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:458 ]'], [21208053, 'chrome.dll!TabStripModel::InternalCloseTabContentsAt+0x175 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:560 ]'], [20661873, 'chrome.dll!Browser::CloseContents+0xa1 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:810 ]'], [21019423, 'chrome.dll!WebContents::Close+0x1f [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:836 ]'], [21356349, 'chrome.dll!RenderViewHost::OnMessageReceived+0x1fd [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host.cc:616 ]'], [20394864, 'chrome.dll!RenderProcessHost::OnMessageReceived+0x170 [ c:\\g\\trunk\\src\\chrome\\browser\\render_process_host.cc:624 ]'], [21472487, 'chrome.dll!RunnableMethod<ProfileWriter,void (__thiscall ProfileWriter::*)(std::vector<history::URLRow,std::allocator<history::URLRow> > c+0x17 [ c:\\g\\trunk\\src\\base\\task.h:312 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19407817, 'chrome.dll!MessageLoop::RunInternal+0xa9 [ c:\\g\\trunk\\src\\base\\message_loop.cc:192 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19409081, 'chrome.dll!MessageLoopForUI::Run+0x49 [ c:\\g\\trunk\\src\\base\\message_loop.cc:559 ]'], [20405134, 'chrome.dll!BrowserMain+0x100e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:583 ]']], 'thread': 3956, 'syscall': 4451, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserDestroyWindow', 'done': 190.82481200000001, 'ms': 181.75131200000001, 'syscallargs': [9896426, 21722045, 14286272], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 182.32261399999999, 'ms': 181.84853100000001, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 182.77937600000001, 'ms': 182.413128, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 184.039592, 'ms': 182.86486099999999, 'syscallargs': [2, 18348996, 1], 'cpu': 3124053392},
+{'stacktrace': [[27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [21555953, 'chrome.dll!RenderViewHostManager::Shutdown+0xb1 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:84 ]'], [21036462, 'chrome.dll!WebContents::Destroy+0xee [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:312 ]'], [21008223, 'chrome.dll!NavigationController::Destroy+0x2df [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:458 ]'], [21208053, 'chrome.dll!TabStripModel::InternalCloseTabContentsAt+0x175 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:560 ]'], [20661873, 'chrome.dll!Browser::CloseContents+0xa1 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:810 ]']], 'thread': 3956, 'syscall': 4611, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetFocus', 'done': 184.892214, 'ms': 183.500696, 'syscallargs': [9896426, 14286908, 27137217], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 184.75420800000001, 'ms': 184.13038499999999, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 185.30260100000001, 'ms': 185.07575700000001, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 186.651376, 'ms': 185.397865, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [21555953, 'chrome.dll!RenderViewHostManager::Shutdown+0xb1 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:84 ]'], [21036462, 'chrome.dll!WebContents::Destroy+0xee [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:312 ]'], [21008223, 'chrome.dll!NavigationController::Destroy+0x2df [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:458 ]'], [21208053, 'chrome.dll!TabStripModel::InternalCloseTabContentsAt+0x175 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:560 ]'], [20661873, 'chrome.dll!Browser::CloseContents+0xa1 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:810 ]']], 'thread': 3956, 'syscall': 4611, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetFocus', 'done': 187.67385200000001, 'ms': 186.08454399999999, 'syscallargs': [9896426, 14286908, 27137217], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 187.526906, 'ms': 186.76451900000001, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 187.99735699999999, 'ms': 187.62943300000001, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 188.40103999999999, 'ms': 188.08507800000001, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 189.66041799999999, 'ms': 188.47507200000001, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [21555953, 'chrome.dll!RenderViewHostManager::Shutdown+0xb1 [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host_manager.cc:84 ]'], [21036462, 'chrome.dll!WebContents::Destroy+0xee [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:312 ]'], [21008223, 'chrome.dll!NavigationController::Destroy+0x2df [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:458 ]'], [21208053, 'chrome.dll!TabStripModel::InternalCloseTabContentsAt+0x175 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:560 ]'], [20661873, 'chrome.dll!Browser::CloseContents+0xa1 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:810 ]']], 'thread': 3956, 'syscall': 4611, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetFocus', 'done': 190.59852599999999, 'ms': 189.094424, 'syscallargs': [9896426, 14286908, 27137217], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 190.48985300000001, 'ms': 189.77244300000001, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 191.10529399999999, 'ms': 190.58735100000001, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 214.03893500000001, 'ms': 191.18016399999999, 'syscallargs': [2, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[23141617, 'chrome.dll!BrowserView2::TabDetachedAt+0x21 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:513 ]'], [21209929, 'chrome.dll!TabStripModel::DetachTabContentsAt+0x169 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:129 ]'], [21211607, 'chrome.dll!TabStripModel::Observe+0x77 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:522 ]'], [19680002, 'chrome.dll!NotificationService::Notify+0x2c2 [ c:\\g\\trunk\\src\\chrome\\common\\notification_service.cc:94 ]'], [21045731, 'chrome.dll!TabContents::Destroy+0x73 [ c:\\g\\trunk\\src\\chrome\\browser\\tab_contents.cc:93 ]'], [21036469, 'chrome.dll!WebContents::Destroy+0xf5 [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:312 ]'], [21008223, 'chrome.dll!NavigationController::Destroy+0x2df [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:458 ]'], [21208053, 'chrome.dll!TabStripModel::InternalCloseTabContentsAt+0x175 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:560 ]'], [20661873, 'chrome.dll!Browser::CloseContents+0xa1 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:810 ]'], [21019423, 'chrome.dll!WebContents::Close+0x1f [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:836 ]'], [21356349, 'chrome.dll!RenderViewHost::OnMessageReceived+0x1fd [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host.cc:616 ]'], [20394864, 'chrome.dll!RenderProcessHost::OnMessageReceived+0x170 [ c:\\g\\trunk\\src\\chrome\\browser\\render_process_host.cc:624 ]'], [21472487, 'chrome.dll!RunnableMethod<ProfileWriter,void (__thiscall ProfileWriter::*)(std::vector<history::URLRow,std::allocator<history::URLRow> > c+0x17 [ c:\\g\\trunk\\src\\base\\task.h:312 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]']], 'thread': 3956, 'syscall': 4651, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserShowWindow', 'done': 191.54417699999999, 'ms': 191.24218300000001, 'syscallargs': [24969860, 0, 13791504], 'cpu': 2147742720},
+{'stacktrace': [[23141617, 'chrome.dll!BrowserView2::TabDetachedAt+0x21 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:513 ]'], [21209929, 'chrome.dll!TabStripModel::DetachTabContentsAt+0x169 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:129 ]'], [21211607, 'chrome.dll!TabStripModel::Observe+0x77 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:522 ]'], [19680002, 'chrome.dll!NotificationService::Notify+0x2c2 [ c:\\g\\trunk\\src\\chrome\\common\\notification_service.cc:94 ]'], [21045731, 'chrome.dll!TabContents::Destroy+0x73 [ c:\\g\\trunk\\src\\chrome\\browser\\tab_contents.cc:93 ]'], [21036469, 'chrome.dll!WebContents::Destroy+0xf5 [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:312 ]'], [21008223, 'chrome.dll!NavigationController::Destroy+0x2df [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:458 ]'], [21208053, 'chrome.dll!TabStripModel::InternalCloseTabContentsAt+0x175 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:560 ]'], [20661873, 'chrome.dll!Browser::CloseContents+0xa1 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:810 ]'], [21019423, 'chrome.dll!WebContents::Close+0x1f [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:836 ]'], [21356349, 'chrome.dll!RenderViewHost::OnMessageReceived+0x1fd [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host.cc:616 ]'], [20394864, 'chrome.dll!RenderProcessHost::OnMessageReceived+0x170 [ c:\\g\\trunk\\src\\chrome\\browser\\render_process_host.cc:624 ]'], [21472487, 'chrome.dll!RunnableMethod<ProfileWriter,void (__thiscall ProfileWriter::*)(std::vector<history::URLRow,std::allocator<history::URLRow> > c+0x17 [ c:\\g\\trunk\\src\\base\\task.h:312 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]']], 'thread': 3956, 'syscall': 4625, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetParent', 'done': 191.86209400000001, 'ms': 191.546412, 'syscallargs': [24969860, 0, 13791504], 'cpu': 2147742720},
+{'stacktrace': [[2118236921, 'USER32.dll!GetPropW+0x1e'], [27148313, 'chrome.dll!views::FocusManager::GetFocusManager+0x69 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:228 ]'], [27117841, 'chrome.dll!views::View::GetFocusManager+0x21 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:299 ]'], [27129730, 'chrome.dll!views::View::PropagateRemoveNotifications+0xb2 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:665 ]'], [27129647, 'chrome.dll!views::View::PropagateRemoveNotifications+0x5f [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:660 ]'], [27130426, 'chrome.dll!views::View::DoRemoveChildView+0x10a [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:642 ]'], [27130610, 'chrome.dll!views::View::RemoveChildView+0x12 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:604 ]'], [23146904, 'chrome.dll!BrowserView2::UpdateChildViewAndLayout+0xb8 [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:1049 ]'], [23148971, 'chrome.dll!BrowserView2::TabStripEmpty+0x2b [ c:\\g\\trunk\\src\\chrome\\browser\\views\\frame\\browser_view2.cc:545 ]'], [21209949, 'chrome.dll!TabStripModel::DetachTabContentsAt+0x17d [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:130 ]'], [21211607, 'chrome.dll!TabStripModel::Observe+0x77 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:522 ]'], [19680002, 'chrome.dll!NotificationService::Notify+0x2c2 [ c:\\g\\trunk\\src\\chrome\\common\\notification_service.cc:94 ]'], [21045731, 'chrome.dll!TabContents::Destroy+0x73 [ c:\\g\\trunk\\src\\chrome\\browser\\tab_contents.cc:93 ]'], [21036469, 'chrome.dll!WebContents::Destroy+0xf5 [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:312 ]'], [21008223, 'chrome.dll!NavigationController::Destroy+0x2df [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:458 ]'], [21208053, 'chrome.dll!TabStripModel::InternalCloseTabContentsAt+0x175 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:560 ]']], 'thread': 3956, 'syscall': 4680, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserValidateHandleSecure', 'done': 206.04797500000001, 'ms': 193.17455200000001, 'syscallargs': [9961988, 1, 0], 'cpu': 2147742720},
+{'stacktrace': [[21036469, 'chrome.dll!WebContents::Destroy+0xf5 [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:312 ]'], [21008223, 'chrome.dll!NavigationController::Destroy+0x2df [ c:\\g\\trunk\\src\\chrome\\browser\\navigation_controller.cc:458 ]'], [21208053, 'chrome.dll!TabStripModel::InternalCloseTabContentsAt+0x175 [ c:\\g\\trunk\\src\\chrome\\browser\\tabs\\tab_strip_model.cc:560 ]'], [20661873, 'chrome.dll!Browser::CloseContents+0xa1 [ c:\\g\\trunk\\src\\chrome\\browser\\browser.cc:810 ]'], [21019423, 'chrome.dll!WebContents::Close+0x1f [ c:\\g\\trunk\\src\\chrome\\browser\\web_contents.cc:836 ]'], [21356349, 'chrome.dll!RenderViewHost::OnMessageReceived+0x1fd [ c:\\g\\trunk\\src\\chrome\\browser\\render_view_host.cc:616 ]'], [20394864, 'chrome.dll!RenderProcessHost::OnMessageReceived+0x170 [ c:\\g\\trunk\\src\\chrome\\browser\\render_process_host.cc:624 ]'], [21472487, 'chrome.dll!RunnableMethod<ProfileWriter,void (__thiscall ProfileWriter::*)(std::vector<history::URLRow,std::allocator<history::URLRow> > c+0x17 [ c:\\g\\trunk\\src\\base\\task.h:312 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19407817, 'chrome.dll!MessageLoop::RunInternal+0xa9 [ c:\\g\\trunk\\src\\base\\message_loop.cc:192 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19409081, 'chrome.dll!MessageLoopForUI::Run+0x49 [ c:\\g\\trunk\\src\\base\\message_loop.cc:559 ]'], [20405134, 'chrome.dll!BrowserMain+0x100e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:583 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]']], 'thread': 3956, 'syscall': 4451, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserDestroyWindow', 'done': 206.599163, 'ms': 206.26448300000001, 'syscallargs': [24969860, 14286184, 0], 'cpu': 2147742720},
+{'stacktrace': [[2118224400, 'USER32.dll!DispatchMessageW+0xf'], [27107759, 'chrome.dll!views::AcceleratorHandler::Dispatch+0x4f [ c:\\g\\trunk\\src\\chrome\\views\\accelerator_handler.cc:32 ]'], [19466592, 'chrome.dll!base::MessagePumpWin::ProcessMessageHelper+0x60 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:285 ]'], [19470020, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x44 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:364 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19407817, 'chrome.dll!MessageLoop::RunInternal+0xa9 [ c:\\g\\trunk\\src\\base\\message_loop.cc:192 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19409081, 'chrome.dll!MessageLoopForUI::Run+0x49 [ c:\\g\\trunk\\src\\base\\message_loop.cc:559 ]'], [20405134, 'chrome.dll!BrowserMain+0x100e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:583 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4453, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserDispatchMessage', 'done': 213.76627500000001, 'ms': 207.22298499999999, 'syscallargs': [1242896, 1242896, 1242896], 'cpu': 2147742720},
+{'stacktrace': [[26995006, 'chrome.dll!gfx::BitmapPlatformDeviceWin::drawToHDC+0x9e [ c:\\g\\trunk\\src\\base\\gfx\\bitmap_platform_device_win.cc:378 ]'], [21604437, 'chrome.dll!gfx::CanvasPaintT<ChromeCanvas>::~CanvasPaintT<ChromeCanvas>+0x55 [ c:\\g\\trunk\\src\\base\\gfx\\platform_canvas_win.h:127 ]'], [27139267, 'chrome.dll!views::ContainerWin::OnPaint+0x13 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:623 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [2118224400, 'USER32.dll!DispatchMessageW+0xf'], [27107759, 'chrome.dll!views::AcceleratorHandler::Dispatch+0x4f [ c:\\g\\trunk\\src\\chrome\\views\\accelerator_handler.cc:32 ]']], 'thread': 3956, 'syscall': 4109, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!NtGdiBitBlt', 'done': 211.82440800000001, 'ms': 209.57859199999999, 'syscallargs': [2231437465, 5, 79], 'cpu': 2147742720},
+{'stacktrace': [[26996791, 'chrome.dll!gfx::BitmapPlatformDeviceWin::~BitmapPlatformDeviceWin+0x37 [ c:\\g\\trunk\\src\\base\\gfx\\bitmap_platform_device_win.cc:328 ]'], [23980661, 'chrome.dll!DeviceCM::~DeviceCM+0x35 [ c:\\g\\trunk\\src\\skia\\sgl\\skcanvas.cpp:83 ]'], [23986073, 'chrome.dll!SkCanvas::~SkCanvas+0x29 [ c:\\g\\trunk\\src\\skia\\sgl\\skcanvas.cpp:410 ]'], [27139267, 'chrome.dll!views::ContainerWin::OnPaint+0x13 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:623 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151927, 'chrome.dll!views::FocusWindowCallback+0x187 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:187 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [2118224400, 'USER32.dll!DispatchMessageW+0xf']], 'thread': 3956, 'syscall': 4218, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'gdi32.dll!DeleteObject', 'done': 213.752027, 'ms': 211.89536699999999, 'syscallargs': [3875865955, 14288496, 14663680], 'cpu': 2147742720},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 214.63649699999999, 'ms': 214.08056099999999, 'syscallargs': [10, 32505136, 1], 'cpu': 3124053392},
+{'stacktrace': [[2089998945, 'ntdll.dll!RtlDeregisterWaitEx+0xe6'], [2088960121, 'kernel32.dll!UnregisterWaitEx+0x17'], [23815564, 'chrome.dll!base::ObjectWatcher::StopWatching+0x7c [ c:\\g\\trunk\\src\\base\\object_watcher.cc:84 ]'], [19696699, 'chrome.dll!IPC::SyncChannel::SyncContext::OnChannelClosed+0xb [ c:\\g\\trunk\\src\\chrome\\common\\ipc_sync_channel.cc:339 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470254, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0x5e [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:533 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 214.64711299999999, 'ms': 214.09145599999999, 'syscallargs': [2, 18348976, 1], 'cpu': 2147742720},
+{'stacktrace': [[27141588, 'chrome.dll!views::ContainerWin::Hide+0x14 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:350 ]'], [27116658, 'chrome.dll!views::Window::Close+0x32 [ c:\\g\\trunk\\src\\chrome\\views\\window.cc:130 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19407817, 'chrome.dll!MessageLoop::RunInternal+0xa9 [ c:\\g\\trunk\\src\\base\\message_loop.cc:192 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19409081, 'chrome.dll!MessageLoopForUI::Run+0x49 [ c:\\g\\trunk\\src\\base\\message_loop.cc:559 ]'], [20405134, 'chrome.dll!BrowserMain+0x100e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:583 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4642, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserSetWindowPos', 'done': 214.54654199999999, 'ms': 214.19063, 'syscallargs': [9961988, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[19405904, 'chrome.dll!MessageLoop::RunTask+0x80 [ c:\\g\\trunk\\src\\base\\message_loop.cc:309 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19407817, 'chrome.dll!MessageLoop::RunInternal+0xa9 [ c:\\g\\trunk\\src\\base\\message_loop.cc:192 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19409081, 'chrome.dll!MessageLoopForUI::Run+0x49 [ c:\\g\\trunk\\src\\base\\message_loop.cc:559 ]'], [20405134, 'chrome.dll!BrowserMain+0x100e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:583 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4451, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserDestroyWindow', 'done': 218.16459900000001, 'ms': 214.58872600000001, 'syscallargs': [9961988, 13966744, 0], 'cpu': 3124053392},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 220.37158400000001, 'ms': 214.64068800000001, 'syscallargs': [10, 32505136, 1], 'cpu': 3124053392},
+{'stacktrace': [[27117134, 'chrome.dll!views::Window::OnActivate+0xe [ c:\\g\\trunk\\src\\chrome\\views\\window.cc:364 ]'], [27134780, 'chrome.dll!views::ContainerWin::_ProcessWindowMessage+0x19c [ c:\\g\\trunk\\src\\chrome\\views\\container_win.h:184 ]'], [27142851, 'chrome.dll!views::ContainerWin::WndProc+0xc3 [ c:\\g\\trunk\\src\\chrome\\views\\container_win.cc:917 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118239807, 'USER32.dll!CallWindowProcAorW+0x51'], [2118239845, 'USER32.dll!CallWindowProcW+0x1b'], [27151819, 'chrome.dll!views::FocusWindowCallback+0x11b [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:173 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [19405904, 'chrome.dll!MessageLoop::RunTask+0x80 [ c:\\g\\trunk\\src\\base\\message_loop.cc:309 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]']], 'thread': 3956, 'syscall': 4536, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserGetWindowPlacement', 'done': 215.06532300000001, 'ms': 214.72868800000001, 'syscallargs': [9961988, 1241292, 13830668], 'cpu': 2147742720},
+{'stacktrace': [[2089816330, 'ntdll.dll!RtlLeaveCriticalSection+0x1d'], [22426797, 'chrome.dll!malloc+0x7a [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\malloc.c:163 ]']], 'thread': 2688, 'syscall': 220, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtSetEventBoostPriority', 'done': 215.099684, 'ms': 214.74600799999999, 'syscallargs': [264, 4248, 45021424], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 219.80195800000001, 'ms': 214.832053, 'syscallargs': [1, 18348996, 1], 'cpu': 2147742720},
+{'stacktrace': [[19407956, 'chrome.dll!MessageLoop::PostTask+0x14 [ c:\\g\\trunk\\src\\base\\message_loop.cc:231 ]'], [27201478, 'chrome.dll!views::RootView::SchedulePaint+0xb6 [ c:\\g\\trunk\\src\\chrome\\views\\root_view.cc:109 ]'], [27119984, 'chrome.dll!views::View::SchedulePaint+0x60 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:328 ]'], [27119984, 'chrome.dll!views::View::SchedulePaint+0x60 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:328 ]'], [27119984, 'chrome.dll!views::View::SchedulePaint+0x60 [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:328 ]'], [27117948, 'chrome.dll!views::View::SchedulePaint+0x2c [ c:\\g\\trunk\\src\\chrome\\views\\view.cc:333 ]'], [27151048, 'chrome.dll!views::FocusManager::OnPostActivate+0x48 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:405 ]'], [27151840, 'chrome.dll!views::FocusWindowCallback+0x130 [ c:\\g\\trunk\\src\\chrome\\views\\focus_manager.cc:175 ]'], [2118223668, 'USER32.dll!InternalCallWinProc+0x28'], [2118223894, 'USER32.dll!UserCallWinProcCheckWow+0xb7'], [2118235328, 'USER32.dll!DispatchClientMessage+0x4d'], [2118235404, 'USER32.dll!__fnDWORD+0x24'], [2089872099, 'ntdll.dll!KiUserCallbackDispatcher+0x13'], [19405904, 'chrome.dll!MessageLoop::RunTask+0x80 [ c:\\g\\trunk\\src\\base\\message_loop.cc:309 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470042, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0x5a [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:369 ]']], 'thread': 3956, 'syscall': 4571, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserPostMessage', 'done': 215.729094, 'ms': 215.09437700000001, 'syscallargs': [9437720, 1025, 13662856], 'cpu': 2147742720},
+{'stacktrace': [[2089910840, 'ntdll.dll!RtlpExtendHeap+0x88'], [2089884790, 'ntdll.dll!RtlAllocateHeap+0x16a2'], [22426797, 'chrome.dll!malloc+0x7a [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\malloc.c:163 ]']], 'thread': 2688, 'syscall': 17, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtAllocateVirtualMemory', 'done': 216.601551, 'ms': 215.89811, 'syscallargs': [4294967295, 45020820, 0], 'cpu': 2147742720},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'eventtype': 'EVENT_TYPE_APC', 'func_addr_name': 'ntdll.dll!RtlpDeregisterWait+0x0', 'ret_addr': 2089872071, 'done': 220.35901200000001, 'func_addr': 2089998983, 'ms': 217.798351, 'cpu': 3124053392},
+{'stacktrace': [[2089999079, 'ntdll.dll!RtlpDeregisterWait+0x60'], [1717963930, 'failed'], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'syscall': 219, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtSetEvent', 'done': 220.35677699999999, 'ms': 217.836904, 'syscallargs': [256, 0, 1465296], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19469071, 'chrome.dll!base::MessagePumpForIO::WaitForWork+0x18f [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:613 ]'], [19470371, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0xd3 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:562 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 285.42538200000001, 'ms': 219.85503700000001, 'syscallargs': [1, 18348996, 1], 'cpu': 3124053392},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 302.48732699999999, 'ms': 220.374098, 'syscallargs': [9, 32505136, 1], 'cpu': 3124053392},
+{'stacktrace': [[2089911260, 'ntdll.dll!RtlpDeCommitFreeBlock+0x672c'], [2089881290, 'ntdll.dll!RtlFreeHeap+0xa8d'], [22426564, 'chrome.dll!free+0x6e [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\free.c:110 ]'], [27028667, 'chrome.dll!PrefService::SavePersistentPrefs+0x15b [ c:\\g\\trunk\\src\\chrome\\common\\pref_service.cc:209 ]'], [20562380, 'chrome.dll!browser_shutdown::Shutdown+0x11c [ c:\\g\\trunk\\src\\chrome\\browser\\browser_shutdown.cc:118 ]'], [20405156, 'chrome.dll!BrowserMain+0x1024 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:604 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 83, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtFreeVirtualMemory', 'done': 221.13285300000001, 'ms': 220.45483400000001, 'syscallargs': [4294967295, 1242484, 1242488], 'cpu': 2147742720},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19467175, 'chrome.dll!base::MessagePumpForUI::WaitForWork+0x27 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:408 ]'], [19470141, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0xbd [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:393 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2736, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 287.21471600000001, 'ms': 221.18006600000001, 'syscallargs': [1, 15858672, 1], 'cpu': 3124053392},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [19417840, 'chrome.dll!PlatformThread::Join+0x60 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:94 ]'], [23799942, 'chrome.dll!base::Thread::Stop+0xc6 [ c:\\g\\trunk\\src\\base\\thread.cc:112 ]'], [23800270, 'chrome.dll!base::Thread::~Thread+0xe [ c:\\g\\trunk\\src\\base\\thread.cc:45 ]'], [20409619, 'chrome.dll!HistoryService::Cleanup+0xc3 [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history.cc:174 ]'], [20855337, 'chrome.dll!ProfileImpl::~ProfileImpl+0xc9 [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:605 ]'], [20856251, "chrome.dll!ProfileImpl::`scalar deleting destructor'+0xb"], [20599735, 'chrome.dll!ProfileManager::~ProfileManager+0x67 [ c:\\g\\trunk\\src\\chrome\\browser\\profile_manager.cc:47 ]'], [20600411, "chrome.dll!ProfileManager::`scalar deleting destructor'+0xb"], [20507748, 'chrome.dll!BrowserProcessImpl::~BrowserProcessImpl+0x64 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:154 ]'], [20508459, "chrome.dll!BrowserProcessImpl::`scalar deleting destructor'+0xb"], [20562398, 'chrome.dll!browser_shutdown::Shutdown+0x12e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_shutdown.cc:119 ]'], [20405156, 'chrome.dll!BrowserMain+0x1024 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:604 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]']], 'thread': 3956, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 285.39185800000001, 'ms': 221.208282, 'syscallargs': [516, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[2088803065, 'kernel32.dll!VirtualFree+0x15'], [27491400, 'chrome.dll!sqlite3PagerLoadall+0xe8 [ c:\\g\\trunk\\src\\third_party\\sqlite\\pager.c:4532 ]'], [27324147, 'chrome.dll!sqlite3Preload+0x33 [ c:\\g\\trunk\\src\\third_party\\sqlite\\build.c:3381 ]'], [21199165, 'chrome.dll!history::HistoryDatabase::PrimeCache+0x5d [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history_database.cc:106 ]'], [21200727, 'chrome.dll!history::HistoryDatabase::Init+0x117 [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history_database.cc:71 ]'], [20616681, 'chrome.dll!history::HistoryBackend::InitImpl+0x1d9 [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history_backend.cc:508 ]'], [20622856, 'chrome.dll!history::HistoryBackend::Init+0x8 [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history_backend.cc:238 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19486017, 'chrome.dll!base::MessagePumpDefault::Run+0x111 [ c:\\g\\trunk\\src\\base\\message_pump_default.cc:50 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2688, 'syscall': 83, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtFreeVirtualMemory', 'done': 227.42667, 'ms': 226.69361599999999, 'syscallargs': [4294967295, 45021524, 45021528], 'cpu': 2147742720},
+{'stacktrace': [[27392208, 'chrome.dll!winRead+0x20 [ c:\\g\\trunk\\src\\third_party\\sqlite\\os_win.c:1003 ]'], [27524918, 'chrome.dll!sqlite3OsRead+0x16 [ c:\\g\\trunk\\src\\third_party\\sqlite\\os.c:39 ]'], [27484071, 'chrome.dll!readDbPage+0x37 [ c:\\g\\trunk\\src\\third_party\\sqlite\\pager.c:2811 ]'], [27491450, 'chrome.dll!sqlite3PagerAcquire+0x1a [ c:\\g\\trunk\\src\\third_party\\sqlite\\pager.c:3113 ]'], [27352637, 'chrome.dll!sqlite3BtreeGetPage+0x1d [ c:\\g\\trunk\\src\\third_party\\sqlite\\btree.c:938 ]'], [27358074, 'chrome.dll!moveToChild+0x2a [ c:\\g\\trunk\\src\\third_party\\sqlite\\btree.c:2971 ]'], [27358534, 'chrome.dll!moveToLeftmost+0x36 [ c:\\g\\trunk\\src\\third_party\\sqlite\\btree.c:3086 ]'], [27516795, 'chrome.dll!sqlite3VdbeExec+0x318b [ c:\\g\\trunk\\src\\third_party\\sqlite\\vdbe.c:3838 ]'], [27336190, 'chrome.dll!sqlite3Step+0x11e [ c:\\g\\trunk\\src\\third_party\\sqlite\\vdbeapi.c:247 ]'], [27336432, 'chrome.dll!sqlite3_step+0x10 [ c:\\g\\trunk\\src\\third_party\\sqlite\\vdbeapi.c:303 ]'], [27314864, 'chrome.dll!sqlite3_exec+0xb0 [ c:\\g\\trunk\\src\\third_party\\sqlite\\legacy.c:76 ]'], [21366866, 'chrome.dll!history::InMemoryDatabase::InitFromDisk+0x162 [ c:\\g\\trunk\\src\\chrome\\browser\\history\\in_memory_database.cc:82 ]'], [20617111, 'chrome.dll!history::HistoryBackend::InitImpl+0x387 [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history_backend.cc:528 ]'], [20622856, 'chrome.dll!history::HistoryBackend::Init+0x8 [ c:\\g\\trunk\\src\\chrome\\browser\\history\\history_backend.cc:238 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19486017, 'chrome.dll!base::MessagePumpDefault::Run+0x111 [ c:\\g\\trunk\\src\\base\\message_pump_default.cc:50 ]']], 'thread': 2688, 'syscall': 183, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtReadFile', 'done': 257.96155700000003, 'ms': 257.69392499999998, 'syscallargs': [272, 0, 0], 'cpu': 2147742720},
+{'thread': 2688, 'eventtype': 'EVENT_TYPE_THREADEXIT', 'cpu': 2147742720, 'ms': 284.95968099999999, 'stacktrace': [[2088810121, 'kernel32.dll!BaseThreadStart+0x3d']]},
+{'stacktrace': [[19407956, 'chrome.dll!MessageLoop::PostTask+0x14 [ c:\\g\\trunk\\src\\base\\message_loop.cc:231 ]'], [20855572, 'chrome.dll!ProfileImpl::~ProfileImpl+0x1b4 [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:621 ]'], [20856251, "chrome.dll!ProfileImpl::`scalar deleting destructor'+0xb"], [20599735, 'chrome.dll!ProfileManager::~ProfileManager+0x67 [ c:\\g\\trunk\\src\\chrome\\browser\\profile_manager.cc:47 ]'], [20600411, "chrome.dll!ProfileManager::`scalar deleting destructor'+0xb"], [20507748, 'chrome.dll!BrowserProcessImpl::~BrowserProcessImpl+0x64 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:154 ]'], [20508459, "chrome.dll!BrowserProcessImpl::`scalar deleting destructor'+0xb"], [20562398, 'chrome.dll!browser_shutdown::Shutdown+0x12e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_shutdown.cc:119 ]'], [20405156, 'chrome.dll!BrowserMain+0x1024 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:604 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4571, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserPostMessage', 'done': 286.96077300000002, 'ms': 285.515897, 'syscallargs': [6685238, 1025, 13825696], 'cpu': 2147742720},
+{'stacktrace': [[19407956, 'chrome.dll!MessageLoop::PostTask+0x14 [ c:\\g\\trunk\\src\\base\\message_loop.cc:231 ]'], [27028623, 'chrome.dll!PrefService::SavePersistentPrefs+0x12f [ c:\\g\\trunk\\src\\chrome\\common\\pref_service.cc:204 ]'], [20855685, 'chrome.dll!ProfileImpl::~ProfileImpl+0x225 [ c:\\g\\trunk\\src\\chrome\\browser\\profile.cc:633 ]'], [20856251, "chrome.dll!ProfileImpl::`scalar deleting destructor'+0xb"], [20599735, 'chrome.dll!ProfileManager::~ProfileManager+0x67 [ c:\\g\\trunk\\src\\chrome\\browser\\profile_manager.cc:47 ]'], [20600411, "chrome.dll!ProfileManager::`scalar deleting destructor'+0xb"], [20507748, 'chrome.dll!BrowserProcessImpl::~BrowserProcessImpl+0x64 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:154 ]'], [20508459, "chrome.dll!BrowserProcessImpl::`scalar deleting destructor'+0xb"], [20562398, 'chrome.dll!browser_shutdown::Shutdown+0x12e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_shutdown.cc:119 ]'], [20405156, 'chrome.dll!BrowserMain+0x1024 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:604 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 4571, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'user32.dll!NtUserPostMessage', 'done': 287.78713499999998, 'ms': 287.20828999999998, 'syscallargs': [8585726, 1025, 13802976], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19467175, 'chrome.dll!base::MessagePumpForUI::WaitForWork+0x27 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:408 ]'], [19470141, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0xbd [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:393 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2736, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 287.99358599999999, 'ms': 287.77847500000001, 'syscallargs': [1, 15858672, 1], 'cpu': 3124053392},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [19417840, 'chrome.dll!PlatformThread::Join+0x60 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:94 ]'], [23799942, 'chrome.dll!base::Thread::Stop+0xc6 [ c:\\g\\trunk\\src\\base\\thread.cc:112 ]'], [20503505, "chrome.dll!`anonymous namespace'::BrowserProcessSubThread::`scalar deleting destructor'+0x11"], [20507877, 'chrome.dll!BrowserProcessImpl::~BrowserProcessImpl+0xe5 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:172 ]'], [20508459, "chrome.dll!BrowserProcessImpl::`scalar deleting destructor'+0xb"], [20562398, 'chrome.dll!browser_shutdown::Shutdown+0x12e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_shutdown.cc:119 ]'], [20405156, 'chrome.dll!BrowserMain+0x1024 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:604 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 300.546019, 'ms': 287.99023299999999, 'syscallargs': [196, 0, 0], 'cpu': 3124053392},
+{'stacktrace': [[2118227449, 'USER32.dll!RealMsgWaitForMultipleObjectsEx+0xd9'], [19467175, 'chrome.dll!base::MessagePumpForUI::WaitForWork+0x27 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:408 ]'], [19470141, 'chrome.dll!base::MessagePumpForUI::DoRunLoop+0xbd [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:393 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 2736, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 300.57563199999998, 'ms': 288.06845600000003, 'syscallargs': [1, 15858672, 1], 'cpu': 3124053392},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [19417840, 'chrome.dll!PlatformThread::Join+0x60 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:94 ]'], [23799942, 'chrome.dll!base::Thread::Stop+0xc6 [ c:\\g\\trunk\\src\\base\\thread.cc:112 ]'], [23800337, "chrome.dll!base::Thread::`scalar deleting destructor'+0x11"], [20807416, 'chrome.dll!SafeBrowsingService::OnIOShutdown+0xd8 [ c:\\g\\trunk\\src\\chrome\\browser\\safe_browsing\\safe_browsing_service.cc:124 ]'], [19405904, 'chrome.dll!MessageLoop::RunTask+0x80 [ c:\\g\\trunk\\src\\base\\message_loop.cc:309 ]'], [19408986, 'chrome.dll!MessageLoop::DoWork+0x1ea [ c:\\g\\trunk\\src\\base\\message_loop.cc:416 ]'], [19470254, 'chrome.dll!base::MessagePumpForIO::DoRunLoop+0x5e [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:533 ]'], [19464514, 'chrome.dll!base::MessagePumpWin::RunWithDispatcher+0x42 [ c:\\g\\trunk\\src\\base\\message_pump_win.cc:134 ]'], [19397518, 'chrome.dll!base::MessagePumpWin::Run+0xe [ c:\\g\\trunk\\src\\base\\message_pump_win.h:124 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 708, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 300.18396200000001, 'ms': 299.48918099999997, 'syscallargs': [288, 0, 0], 'cpu': 2147742720},
+{'thread': 2136, 'eventtype': 'EVENT_TYPE_THREADEXIT', 'cpu': 3124053392, 'ms': 299.871352, 'stacktrace': [[2088810121, 'kernel32.dll!BaseThreadStart+0x3d']]},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [19495803, 'chrome.dll!base::WaitableEvent::Wait+0x1b [ c:\\g\\trunk\\src\\base\\waitable_event_win.cc:42 ]'], [19485939, 'chrome.dll!base::MessagePumpDefault::Run+0xc3 [ c:\\g\\trunk\\src\\base\\message_pump_default.cc:43 ]'], [19407831, 'chrome.dll!MessageLoop::RunInternal+0xb7 [ c:\\g\\trunk\\src\\base\\message_loop.cc:197 ]'], [19408240, 'chrome.dll!MessageLoop::RunHandler+0xa0 [ c:\\g\\trunk\\src\\base\\message_loop.cc:181 ]'], [19410461, 'chrome.dll!MessageLoop::Run+0x3d [ c:\\g\\trunk\\src\\base\\message_loop.cc:155 ]'], [23799658, 'chrome.dll!base::Thread::ThreadMain+0x8a [ c:\\g\\trunk\\src\\base\\thread.cc:159 ]'], [19417293, "chrome.dll!`anonymous namespace'::ThreadFunc+0xd [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:27 ]"], [2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 1884, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 301.27013299999999, 'ms': 299.88085100000001, 'syscallargs': [284, 0, 0], 'cpu': 2147742720},
+{'thread': 708, 'eventtype': 'EVENT_TYPE_THREADEXIT', 'cpu': 3124053392, 'ms': 300.33398099999999, 'stacktrace': [[2088810121, 'kernel32.dll!BaseThreadStart+0x3d']]},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [19417840, 'chrome.dll!PlatformThread::Join+0x60 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:94 ]'], [23799942, 'chrome.dll!base::Thread::Stop+0xc6 [ c:\\g\\trunk\\src\\base\\thread.cc:112 ]'], [20503505, "chrome.dll!`anonymous namespace'::BrowserProcessSubThread::`scalar deleting destructor'+0x11"], [20507946, 'chrome.dll!BrowserProcessImpl::~BrowserProcessImpl+0x12a [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:183 ]'], [20508459, "chrome.dll!BrowserProcessImpl::`scalar deleting destructor'+0xb"], [20562398, 'chrome.dll!browser_shutdown::Shutdown+0x12e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_shutdown.cc:119 ]'], [20405156, 'chrome.dll!BrowserMain+0x1024 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:604 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 300.95026000000001, 'ms': 300.57758699999999, 'syscallargs': [156, 0, 0], 'cpu': 2147742720},
+{'thread': 2736, 'eventtype': 'EVENT_TYPE_THREADEXIT', 'cpu': 3124053392, 'ms': 300.73039999999997, 'stacktrace': [[2088810121, 'kernel32.dll!BaseThreadStart+0x3d']]},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [19417840, 'chrome.dll!PlatformThread::Join+0x60 [ c:\\g\\trunk\\src\\base\\platform_thread_win.cc:94 ]'], [23799942, 'chrome.dll!base::Thread::Stop+0xc6 [ c:\\g\\trunk\\src\\base\\thread.cc:112 ]'], [20503505, "chrome.dll!`anonymous namespace'::BrowserProcessSubThread::`scalar deleting destructor'+0x11"], [20508363, 'chrome.dll!BrowserProcessImpl::~BrowserProcessImpl+0x2cb [ c:\\g\\trunk\\src\\chrome\\browser\\browser_process_impl.cc:203 ]'], [20508459, "chrome.dll!BrowserProcessImpl::`scalar deleting destructor'+0xb"], [20562398, 'chrome.dll!browser_shutdown::Shutdown+0x12e [ c:\\g\\trunk\\src\\chrome\\browser\\browser_shutdown.cc:119 ]'], [20405156, 'chrome.dll!BrowserMain+0x1024 [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:604 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 301.49865399999999, 'ms': 301.267899, 'syscallargs': [280, 0, 0], 'cpu': 2147742720},
+{'thread': 1884, 'eventtype': 'EVENT_TYPE_THREADEXIT', 'cpu': 3124053392, 'ms': 301.33801899999997, 'stacktrace': [[2088810121, 'kernel32.dll!BaseThreadStart+0x3d']]},
+{'stacktrace': [[2088804469, 'kernel32.dll!WaitForMultipleObjects+0x18'], [20822595, 'chrome.dll!chrome_browser_net::DnsMaster::ShutdownSlaves+0x243 [ c:\\g\\trunk\\src\\chrome\\browser\\net\\dns_master.cc:358 ]'], [20522695, 'chrome.dll!chrome_browser_net::ShutdownDnsPrefetch+0x77 [ c:\\g\\trunk\\src\\chrome\\browser\\net\\dns_global.cc:372 ]'], [20405195, 'chrome.dll!BrowserMain+0x104b [ c:\\g\\trunk\\src\\chrome\\browser\\browser_main.cc:604 ]'], [19354554, 'chrome.dll!ChromeMain+0x83a [ c:\\g\\trunk\\src\\chrome\\app\\chrome_dll_main.cc:224 ]'], [4205604, 'chrome.exe!wWinMain+0x304 [ c:\\g\\trunk\\src\\chrome\\app\\chrome_exe_main.cc:103 ]'], [4482934, 'chrome.exe!__tmainCRTStartup+0x176 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0.c:324 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 302.38675599999999, 'ms': 302.15851500000002, 'syscallargs': [1, 1242668, 0], 'cpu': 3124053392},
+{'thread': 2812, 'eventtype': 'EVENT_TYPE_THREADEXIT', 'cpu': 2147742720, 'ms': 302.17639400000002, 'stacktrace': [[2088810121, 'kernel32.dll!BaseThreadStart+0x3d']]},
+{'stacktrace': [[2088810115, 'kernel32.dll!BaseThreadStart+0x37']], 'thread': 3500, 'syscall': 270, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!NtWaitForMultipleObjects', 'waiting': 1, 'done': 324.76809200000002, 'ms': 302.48956199999998, 'syscallargs': [8, 32505136, 1], 'cpu': 3124053392},
+{'thread': 2428, 'eventtype': 'EVENT_TYPE_THREADEXIT', 'cpu': 3124053392, 'ms': 304.13334700000001, 'stacktrace': [[2088810121, 'kernel32.dll!BaseThreadStart+0x3d']]},
+{'stacktrace': [[2088772914, 'kernel32.dll!WaitForSingleObject+0x12'], [4226553, 'chrome.exe!sandbox::TargetProcess::~TargetProcess+0x19 [ c:\\g\\trunk\\src\\sandbox\\src\\target_process.cc:103 ]'], [4228125, 'chrome.exe!sandbox::PolicyBase::OnJobEmpty+0xdd [ c:\\g\\trunk\\src\\sandbox\\src\\sandbox_policy_base.cc:171 ]'], [4215782, 'chrome.exe!sandbox::BrokerServicesBase::FreeResources+0xc6 [ c:\\g\\trunk\\src\\sandbox\\src\\broker_services.cc:124 ]'], [4216698, 'chrome.exe!sandbox::BrokerServicesBase::~BrokerServicesBase+0xfa [ c:\\g\\trunk\\src\\sandbox\\src\\broker_services.cc:101 ]'], [4212403, 'chrome.exe!sandbox::SingletonBase<sandbox::BrokerServicesBase>::OnExit+0x13 [ c:\\g\\trunk\\src\\sandbox\\src\\win_utils.h:58 ]'], [4484605, 'chrome.exe!exit+0xd [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0dat.c:398 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 271, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwWaitForSingleObject', 'waiting': 1, 'done': 324.702721, 'ms': 304.33728300000001, 'syscallargs': [424, 0, 1244444], 'cpu': 2147742720},
+{'stacktrace': [[2088881646, 'kernel32.dll!ExitProcess+0x14'], [4484030, 'chrome.exe!__crtExitProcess+0x14 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0dat.c:683 ]'], [4484605, 'chrome.exe!exit+0xd [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0dat.c:398 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']], 'thread': 3956, 'syscall': 257, 'eventtype': 'EVENT_TYPE_SYSCALL', 'syscallname': 'ntdll.dll!ZwTerminateProcess', 'done': 325.70620000000002, 'ms': 325.44387599999999, 'syscallargs': [0, 0, 241550694], 'cpu': 2147742720},
+{'thread': 3956, 'eventtype': 'EVENT_TYPE_PROCESSEXIT', 'cpu': 3124053392, 'ms': 325.70871399999999, 'stacktrace': [[2088881646, 'kernel32.dll!ExitProcess+0x14'], [4484030, 'chrome.exe!__crtExitProcess+0x14 [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0dat.c:683 ]'], [4484605, 'chrome.exe!exit+0xd [ f:\\sp\\vctools\\crt_bld\\self_x86\\crt\\src\\crt0dat.c:398 ]'], [2088857559, 'kernel32.dll!BaseProcessStart+0x23']]},
+])
diff --git a/chromium/tools/traceline/svgui/traceline.css b/chromium/tools/traceline/svgui/traceline.css
new file mode 100644
index 00000000000..98bf4450a77
--- /dev/null
+++ b/chromium/tools/traceline/svgui/traceline.css
@@ -0,0 +1,73 @@
+@namespace url(http://www.w3.org/1999/xhtml);
+@namespace svg url(http://www.w3.org/2000/svg);
+
+body {
+ /* Trim down the default top margin of 8px, so we have a bit more space. */
+ margin-top: 4px;
+}
+
+div.threadnamediv {
+ height: 16px;
+}
+div.threadnamesdiv {
+ margin-right: 2px;
+ position: absolute;
+ font-family: monospace;
+ z-index: -1;
+}
+
+/* The fakescrolldiv will have the scroll bar, the amount it scrolls is
+ controlled by the size of fattydiv within it. */
+div.fakescrolldiv {
+ /* This needs to be at least 16px, the height of the scrollbar, to have the
+ scrollbar renderer in firefox */
+ /* Making the height 16px in webkit causes the computed width to be 16px
+ shorter than what we specify. 17px is enough to cause it to be correct */
+ height: 17px;
+ width: 1008px;
+ overflow: auto;
+}
+
+div.fattydiv {
+ /* the div needs to have a height for the scrollbar to render in firefox */
+ height: 1px;
+}
+
+div.infoareadiv {
+ margin-top: 4px;
+ padding: 4px;
+ border: 1px solid gray;
+ height: 350px;
+ overflow: auto;
+ font-family: monospace;
+ white-space: pre;
+}
+
+svg|rect.thread {
+ /* fill: #f4f4f6; */
+ fill: #efeff2;
+ opacity: 0.7;
+}
+
+svg|rect.event {
+ fill: #f6a120;
+}
+
+svg|rect.eventwaiting {
+ fill: #62ccf3;
+}
+
+svg|rect.event:hover, svg|rect.eventwaiting:hover {
+ fill: #d92129;
+}
+
+svg|line.eventline {
+ stroke: #bad432;
+ stroke-width: 2px;
+ stroke-opacity: 0.7;
+}
+
+svg|line.eventline:hover {
+ stroke: #d92129;
+ stroke-opacity: 1;
+}
diff --git a/chromium/tools/traceline/svgui/traceline.js b/chromium/tools/traceline/svgui/traceline.js
new file mode 100644
index 00000000000..33cc2dfa388
--- /dev/null
+++ b/chromium/tools/traceline/svgui/traceline.js
@@ -0,0 +1,693 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO
+// - spacial partitioning of the data so that we don't have to scan the
+// entire scene every time we render.
+// - properly clip the SVG elements when they render, right now we are just
+// letting them go negative or off the screen. This might give us a little
+// bit better performance?
+// - make the lines for thread creation work again. Figure out a better UI
+// than these lines, because they can be a bit distracting.
+// - Implement filters, so that you can filter on specific event types, etc.
+// - Make the callstack box collapsable or scrollable or something, it takes
+// up a lot of screen realestate now.
+// - Figure out better ways to preserve screen realestate.
+// - Make the thread bar heights configurable, figure out a better way to
+// handle overlapping events (the pushdown code).
+// - "Sticky" info, so you can click on something, and it will stay. Now
+// if you need to scroll the page you usually lose the info because you
+// will mouse over something else on your way to scrolling.
+// - Help / legend
+// - Loading indicator / debug console.
+// - OH MAN BETTER COLORS PLEASE
+//
+// Dean McNamee <deanm@chromium.org>
+
+// Man... namespaces are such a pain.
+var svgNS = 'http://www.w3.org/2000/svg';
+var xhtmlNS = 'http://www.w3.org/1999/xhtml';
+
+function toHex(num) {
+ var str = "";
+ var table = "0123456789abcdef";
+ for (var i = 0; i < 8; ++i) {
+ str = table.charAt(num & 0xf) + str;
+ num >>= 4;
+ }
+ return str;
+}
+
+// a TLThread represents information about a thread in the traceline data.
+// A thread has a list of all events that happened on that thread, the start
+// and end time of the thread, the thread id, and name, etc.
+function TLThread(id, startms, endms) {
+ this.id = id;
+ // Default the name to the thread id, but if the application uses
+ // thread naming, we might see a THREADNAME event later and update.
+ this.name = "thread_" + id;
+ this.startms = startms;
+ this.endms = endms;
+ this.events = [ ];
+};
+
+TLThread.prototype.duration_ms =
+function() {
+ return this.endms - this.startms;
+};
+
+TLThread.prototype.AddEvent =
+function(e) {
+ this.events.push(e);
+};
+
+TLThread.prototype.toString =
+function() {
+ var res = "TLThread -- id: " + this.id + " name: " + this.name +
+ " startms: " + this.startms + " endms: " + this.endms +
+ " parent: " + this.parent;
+ return res;
+};
+
+// A TLEvent represents a single logged event that happened on a thread.
+function TLEvent(e) {
+ this.eventtype = e['eventtype'];
+ this.thread = toHex(e['thread']);
+ this.cpu = toHex(e['cpu']);
+ this.ms = e['ms'];
+ this.done = e['done'];
+ this.e = e;
+}
+
+function HTMLEscape(str) {
+ return str.replace(/&/g, '&amp;').replace(/</g, '&lt;').replace(/>/g, '&gt;');
+}
+
+TLEvent.prototype.toString =
+function() {
+ var res = "<b>ms:</b> " + this.ms + " " +
+ "<b>event:</b> " + this.eventtype + " " +
+ "<b>thread:</b> " + this.thread + " " +
+ "<b>cpu:</b> " + this.cpu + "<br/>";
+ if ('ldrinfo' in this.e) {
+ res += "<b>ldrinfo:</b> " + this.e['ldrinfo'] + "<br/>";
+ }
+ if ('done' in this.e && this.e['done'] > 0) {
+ res += "<b>done:</b> " + this.e['done'] + " ";
+ res += "<b>duration:</b> " + (this.e['done'] - this.ms) + "<br/>";
+ }
+ if ('syscall' in this.e) {
+ res += "<b>syscall:</b> " + this.e['syscall'];
+ if ('syscallname' in this.e) {
+ res += " <b>syscallname:</b> " + this.e['syscallname'];
+ }
+ if ('retval' in this.e) {
+ res += " <b>retval:</b> " + this.e['retval'];
+ }
+ res += "<br/>"
+ }
+ if ('func_addr' in this.e) {
+ res += "<b>func_addr:</b> " + toHex(this.e['func_addr']);
+ if ('func_addr_name' in this.e) {
+ res += " <b>func_addr_name:</b> " + HTMLEscape(this.e['func_addr_name']);
+ }
+ res += "<br/>"
+ }
+ if ('stacktrace' in this.e) {
+ var stack = this.e['stacktrace'];
+ res += "<b>stacktrace:</b><br/>";
+ for (var i = 0; i < stack.length; ++i) {
+ res += "0x" + toHex(stack[i][0]) + " - " +
+ HTMLEscape(stack[i][1]) + "<br/>";
+ }
+ }
+
+ return res;
+}
+
+// The trace logger dumps all log events to a simple JSON array. We delay
+// and background load the JSON, since it can be large. When the JSON is
+// loaded, parseEvents(...) is called and passed the JSON data. To make
+// things easier, we do a few passes on the data to group them together by
+// thread, gather together some useful pieces of data in a single place,
+// and form more of a structure out of the data. We also build links
+// between related events, for example a thread creating a new thread, and
+// the new thread starting to run. This structure is fairly close to what
+// we want to represent in the interface.
+
+// Delay load the JSON data. We want to display the order in the order it was
+// passed to us. Since we have no way of correlating the json callback to
+// which script element it was called on, we load them one at a time.
+
+function JSONLoader(json_urls) {
+ this.urls_to_load = json_urls;
+ this.script_element = null;
+}
+
+JSONLoader.prototype.IsFinishedLoading =
+function() { return this.urls_to_load.length == 0; };
+
+// Start loading of the next JSON URL.
+JSONLoader.prototype.LoadNext =
+function() {
+ var sc = document.createElementNS(
+ 'http://www.w3.org/1999/xhtml', 'script');
+ this.script_element = sc;
+
+ sc.setAttribute("src", this.urls_to_load[0]);
+ document.getElementsByTagNameNS(xhtmlNS, 'body')[0].appendChild(sc);
+};
+
+// Callback counterpart to load_next, should be called when the script element
+// is finished loading. Returns the URL that was just loaded.
+JSONLoader.prototype.DoneLoading =
+function() {
+ // Remove the script element from the DOM.
+ this.script_element.parentNode.removeChild(this.script_element);
+ this.script_element = null;
+ // Return the URL that had just finished loading.
+ return this.urls_to_load.shift();
+};
+
+var loader = null;
+
+function loadJSON(json_urls) {
+ loader = new JSONLoader(json_urls);
+ if (!loader.IsFinishedLoading())
+ loader.LoadNext();
+}
+
+var traceline = new Traceline();
+
+// Called from the JSON with the log event array.
+function parseEvents(json) {
+ loader.DoneLoading();
+
+ var done = loader.IsFinishedLoading();
+ if (!done)
+ loader.LoadNext();
+
+ traceline.ProcessJSON(json);
+
+ if (done)
+ traceline.Render();
+}
+
+// The Traceline class represents our entire state, all of the threads from
+// all sets of data, all of the events, DOM elements, etc.
+function Traceline() {
+ // The array of threads that existed in the program. Hopefully in order
+ // they were created. This includes all threads from all sets of data.
+ this.threads = [ ];
+
+ // Keep a mapping of where in the list of threads a set starts...
+ this.thread_set_indexes = [ ];
+
+ // Map a thread id to the index in the threads array. A thread ID is the
+ // unique ID from the OS, along with our set id of which data file we were.
+ this.threads_by_id = { };
+
+ // The last event time of all of our events.
+ this.endms = 0;
+
+ // Constants for SVG rendering...
+ this.kThreadHeightPx = 16;
+ this.kTimelineWidthPx = 1008;
+}
+
+// Called to add another set of data into the traceline.
+Traceline.prototype.ProcessJSON =
+function(json_data) {
+ // Keep track of which threads belong to which sets of data...
+ var set_id = this.thread_set_indexes.length;
+ this.thread_set_indexes.push(this.threads.length);
+
+ // TODO make this less hacky. Used to connect related events, like creating
+ // a thread and then having that thread run (two separate events which are
+ // related but come in at different times, etc).
+ var tiez = { };
+
+ // Run over the data, building TLThread's and TLEvents, and doing some
+ // processing to put things in an easier to display form...
+ for (var i = 0, il = json_data.length; i < il; ++i) {
+ var e = new TLEvent(json_data[i]);
+
+ // Create a unique identifier for a thread by using the id of this data
+ // set, so that they are isolated from other sets of data with the same
+ // thread id, etc. TODO don't overwrite the original...
+ e.thread = set_id + '_' + e.thread;
+
+ // If this is the first event ever seen on this thread, create a new
+ // thread object and add it to our lists of threads.
+ if (!(e.thread in this.threads_by_id)) {
+ var end_ms = e.done ? e.done : e.ms;
+ var new_thread = new TLThread(e.thread, e.ms, end_ms);
+ this.threads_by_id[new_thread.id] = this.threads.length;
+ this.threads.push(new_thread);
+ }
+
+ var thread = this.threads[this.threads_by_id[e.thread]];
+ thread.AddEvent(e);
+
+ // Keep trace of the time of the last event seen.
+ var end_ms = e.done ? e.done : e.ms;
+ if (end_ms > this.endms) this.endms = end_ms;
+ if (end_ms > thread.endms) thread.endms = end_ms;
+
+ switch(e.eventtype) {
+ case 'EVENT_TYPE_THREADNAME':
+ thread.name = e.e['threadname'];
+ break;
+ case 'EVENT_TYPE_CREATETHREAD':
+ tiez[e.e['eventid']] = e;
+ break;
+ case 'EVENT_TYPE_THREADBEGIN':
+ var pei = e.e['parenteventid'];
+ if (pei in tiez) {
+ e.parentevent = tiez[pei];
+ tiez[pei].childevent = e;
+ }
+ break;
+ }
+ }
+};
+
+Traceline.prototype.Render =
+function() { this.RenderSVG(); };
+
+Traceline.prototype.RenderText =
+function() {
+ var z = document.getElementsByTagNameNS(xhtmlNS, 'body')[0];
+ for (var i = 0, il = this.threads.length; i < il; ++i) {
+ var p = document.createElementNS(
+ 'http://www.w3.org/1999/xhtml', 'p');
+ p.innerHTML = this.threads[i].toString();
+ z.appendChild(p);
+ }
+};
+
+// Oh man, so here we go. For two reasons, I implement my own scrolling
+// system. First off, is that in order to scale, we want to have as little
+// on the DOM as possible. This means not having off-screen elements in the
+// DOM, as this slows down everything. This comes at a cost of more expensive
+// scrolling performance since you have to re-render the scene. The second
+// reason is a bug I stumbled into:
+// https://bugs.webkit.org/show_bug.cgi?id=21968
+// This means that scrolling an SVG element doesn't really work properly
+// anyway. So what the code does is this. We have our layout that looks like:
+// [ thread names ] [ svg timeline ]
+// [ scroll bar ]
+// We make a fake scrollbar, which doesn't actually have the SVG inside of it,
+// we want for when this scrolls, with some debouncing, and then when it has
+// scrolled we rerender the scene. This means that the SVG element is never
+// scrolled, and coordinates are always at 0. We keep the scene in millisecond
+// units which also helps for zooming. We do our own hit testing and decide
+// what needs to be renderer, convert from milliseconds to SVG pixels, and then
+// draw the update into the static SVG element... Y coordinates are still
+// always in pixels (since we aren't paging along the Y axis), but this might
+// be something to fix up later.
+
+function SVGSceneLine(msg, klass, x1, y1, x2, y2) {
+ this.type = SVGSceneLine;
+ this.msg = msg;
+ this.klass = klass;
+
+ this.x1 = x1;
+ this.y1 = y1;
+ this.x2 = x2;
+ this.y2 = y2;
+
+ this.hittest = function(startms, dur) {
+ return true;
+ };
+}
+
+function SVGSceneRect(msg, klass, x, y, width, height) {
+ this.type = SVGSceneRect;
+ this.msg = msg;
+ this.klass = klass;
+
+ this.x = x;
+ this.y = y;
+ this.width = width;
+ this.height = height;
+
+ this.hittest = function(startms, dur) {
+ return this.x <= (startms + dur) &&
+ (this.x + this.width) >= startms;
+ };
+}
+
+Traceline.prototype.RenderSVG =
+function() {
+ var threadnames = this.RenderSVGCreateThreadNames();
+ var scene = this.RenderSVGCreateScene();
+
+ var curzoom = 8;
+
+ // The height is static after we've created the scene
+ var dom = this.RenderSVGCreateDOM(threadnames, scene.height);
+
+ dom.zoom(curzoom);
+
+ dom.attach();
+
+ var draw = (function(obj) {
+ return function(scroll, total) {
+ var startms = (scroll / total) * obj.endms;
+
+ var start = (new Date).getTime();
+ var count = obj.RenderSVGRenderScene(dom, scene, startms, curzoom);
+ var total = (new Date).getTime() - start;
+
+ dom.infoareadiv.innerHTML =
+ 'Scene render of ' + count + ' nodes took: ' + total + ' ms';
+ };
+ })(this, dom, scene);
+
+ // Paint the initial paint with no scroll
+ draw(0, 1);
+
+ // Hook us up to repaint on scrolls.
+ dom.redraw = draw;
+};
+
+
+// Create all of the DOM elements for the SVG scene.
+Traceline.prototype.RenderSVGCreateDOM =
+function(threadnames, svgheight) {
+
+ // Total div holds the container and the info area.
+ var totaldiv = document.createElementNS(xhtmlNS, 'div');
+
+ // Container holds the thread names, SVG element, and fake scroll bar.
+ var container = document.createElementNS(xhtmlNS, 'div');
+ container.className = 'container';
+
+ // This is the div that holds the thread names along the left side, this is
+ // done in HTML for easier/better text support than SVG.
+ var threadnamesdiv = document.createElementNS(xhtmlNS, 'div');
+ threadnamesdiv.className = 'threadnamesdiv';
+
+ // Add all of the names into the div, these are static and don't update.
+ for (var i = 0, il = threadnames.length; i < il; ++i) {
+ var div = document.createElementNS(xhtmlNS, 'div');
+ div.className = 'threadnamediv';
+ div.appendChild(document.createTextNode(threadnames[i]));
+ threadnamesdiv.appendChild(div);
+ }
+
+ // SVG div goes along the right side, it holds the SVG element and our fake
+ // scroll bar.
+ var svgdiv = document.createElementNS(xhtmlNS, 'div');
+ svgdiv.className = 'svgdiv';
+
+ // The SVG element, static width, and we will update the height after we've
+ // walked through how many threads we have and know the size.
+ var svg = document.createElementNS(svgNS, 'svg');
+ svg.setAttributeNS(null, 'height', svgheight);
+ svg.setAttributeNS(null, 'width', this.kTimelineWidthPx);
+
+ // The fake scroll div is an outer div with a fixed size with a scroll.
+ var fakescrolldiv = document.createElementNS(xhtmlNS, 'div');
+ fakescrolldiv.className = 'fakescrolldiv';
+
+ // Fatty is inside the fake scroll div to give us the size we want to scroll.
+ var fattydiv = document.createElementNS(xhtmlNS, 'div');
+ fattydiv.className = 'fattydiv';
+ fakescrolldiv.appendChild(fattydiv);
+
+ var infoareadiv = document.createElementNS(xhtmlNS, 'div');
+ infoareadiv.className = 'infoareadiv';
+ infoareadiv.innerHTML = 'Hover an event...';
+
+ // Set the SVG mouseover handler to write the data to the infoarea.
+ svg.addEventListener('mouseover', (function(infoarea) {
+ return function(e) {
+ if ('msg' in e.target && e.target.msg) {
+ infoarea.innerHTML = e.target.msg;
+ }
+ e.stopPropagation(); // not really needed, but might as well.
+ };
+ })(infoareadiv), true);
+
+
+ svgdiv.appendChild(svg);
+ svgdiv.appendChild(fakescrolldiv);
+
+ container.appendChild(threadnamesdiv);
+ container.appendChild(svgdiv);
+
+ totaldiv.appendChild(container);
+ totaldiv.appendChild(infoareadiv);
+
+ var widthms = Math.floor(this.endms + 2);
+ // Make member variables out of the things we want to 'export', things that
+ // will need to be updated each time we redraw the scene.
+ var obj = {
+ // The root of our piece of the DOM.
+ 'totaldiv': totaldiv,
+ // We will want to listen for scrolling on the fakescrolldiv
+ 'fakescrolldiv': fakescrolldiv,
+ // The SVG element will of course need updating.
+ 'svg': svg,
+ // The area we update with the info on mouseovers.
+ 'infoareadiv': infoareadiv,
+ // Called when we detected new scroll a should redraw
+ 'redraw': function() { },
+ 'attached': false,
+ 'attach': function() {
+ document.getElementsByTagNameNS(xhtmlNS, 'body')[0].appendChild(
+ this.totaldiv);
+ this.attached = true;
+ },
+ // The fatty div will have its width adjusted based on the zoom level and
+ // the duration of the graph, to get the scrolling correct for the size.
+ 'zoom': function(curzoom) {
+ var width = widthms * curzoom;
+ fattydiv.style.width = width + 'px';
+ },
+ 'detach': function() {
+ this.totaldiv.parentNode.removeChild(this.totaldiv);
+ this.attached = false;
+ },
+ };
+
+ // Watch when we get scroll events on the fake scrollbar and debounce. We
+ // need to give it a pointer to use in the closer to call this.redraw();
+ fakescrolldiv.addEventListener('scroll', (function(theobj) {
+ var seqnum = 0;
+ return function(e) {
+ seqnum = (seqnum + 1) & 0xffff;
+ window.setTimeout((function(myseqnum) {
+ return function() {
+ if (seqnum == myseqnum) {
+ theobj.redraw(e.target.scrollLeft, e.target.scrollWidth);
+ }
+ };
+ })(seqnum), 100);
+ };
+ })(obj), false);
+
+ return obj;
+};
+
+Traceline.prototype.RenderSVGCreateThreadNames =
+function() {
+ // This names is the list to show along the left hand size.
+ var threadnames = [ ];
+
+ for (var i = 0, il = this.threads.length; i < il; ++i) {
+ var thread = this.threads[i];
+
+ // TODO make this not so stupid...
+ if (i != 0) {
+ for (var j = 0; j < this.thread_set_indexes.length; j++) {
+ if (i == this.thread_set_indexes[j]) {
+ threadnames.push('------');
+ break;
+ }
+ }
+ }
+
+ threadnames.push(thread.name);
+ }
+
+ return threadnames;
+};
+
+Traceline.prototype.RenderSVGCreateScene =
+function() {
+ // This scene is just a list of SVGSceneRect and SVGSceneLine, in no great
+ // order. In the future they should be structured to make range checking
+ // faster.
+ var scene = [ ];
+
+ // Remember, for now, Y (height) coordinates are still in pixels, since we
+ // don't zoom or scroll in this direction. X coordinates are milliseconds.
+
+ var lasty = 0;
+ for (var i = 0, il = this.threads.length; i < il; ++i) {
+ var thread = this.threads[i];
+
+ // TODO make this not so stupid...
+ if (i != 0) {
+ for (var j = 0; j < this.thread_set_indexes.length; j++) {
+ if (i == this.thread_set_indexes[j]) {
+ lasty += this.kThreadHeightPx;
+ break;
+ }
+ }
+ }
+
+ // For this thread, create the background thread (blue band);
+ scene.push(new SVGSceneRect(null,
+ 'thread',
+ thread.startms,
+ 1 + lasty,
+ thread.duration_ms(),
+ this.kThreadHeightPx - 2));
+
+ // Now create all of the events...
+ var pushdown = [ 0, 0, 0, 0 ];
+ for (var j = 0, jl = thread.events.length; j < jl; ++j) {
+ var e = thread.events[j];
+
+ var y = 2 + lasty;
+
+ // TODO this is a hack just so that we know the correct why position
+ // so we can create the threadline...
+ if (e.childevent) {
+ e.marky = y;
+ }
+
+ // Handle events that we want to represent as lines and not event blocks,
+ // right now this is only thread creation. We map an event back to its
+ // "parent" event, and now lets add a line to represent that.
+ if (e.parentevent) {
+ var eparent = e.parentevent;
+ var msg = eparent.toString() + '<br/>' + e.toString();
+ scene.push(
+ new SVGSceneLine(msg, 'eventline',
+ eparent.ms, eparent.marky + 5, e.ms, lasty + 5));
+ }
+
+ // We get negative done values (well, really, it was 0 and then made
+ // relative to start time) when a syscall never returned...
+ var dur = 0;
+ if ('done' in e.e && e.e['done'] > 0) {
+ dur = e.e['done'] - e.ms;
+ }
+
+ // TODO skip short events for now, but eventually we should figure out
+ // a way to control this from the UI, etc.
+ if (dur < 0.2)
+ continue;
+
+ var width = dur;
+
+ // Try to find an available horizontal slot for our event.
+ for (var z = 0; z < pushdown.length; ++z) {
+ var found = false;
+ var slot = z;
+ if (pushdown[z] < e.ms) {
+ found = true;
+ }
+ if (!found) {
+ if (z != pushdown.length - 1)
+ continue;
+ slot = Math.floor(Math.random() * pushdown.length);
+ alert('blah');
+ }
+
+ pushdown[slot] = e.ms + dur;
+ y += slot * 4;
+ break;
+ }
+
+
+ // Create the event
+ klass = e.e.waiting ? 'eventwaiting' : 'event';
+ scene.push(
+ new SVGSceneRect(e.toString(), klass, e.ms, y, width, 3));
+
+ // If there is a "parentevent", we want to make a line there.
+ // TODO
+ }
+
+ lasty += this.kThreadHeightPx;
+ }
+
+ return {
+ 'scene': scene,
+ 'width': this.endms + 2,
+ 'height': lasty,
+ };
+};
+
+Traceline.prototype.RenderSVGRenderScene =
+function(dom, scene, startms, curzoom) {
+ var stuff = scene.scene;
+ var svg = dom.svg;
+
+ var count = 0;
+
+ // Remove everything from the DOM.
+ while (svg.firstChild)
+ svg.removeChild(svg.firstChild);
+
+ // Don't actually need this, but you can't transform on an svg element,
+ // so it's nice to have a <g> around for transforms...
+ var svgg = document.createElementNS(svgNS, 'g');
+
+ var dur = this.kTimelineWidthPx / curzoom;
+
+ function min(a, b) {
+ return a < b ? a : b;
+ }
+
+ function max(a, b) {
+ return a > b ? a : b;
+ }
+
+ function timeToPixel(x) {
+ // TODO(deanm): This clip is a bit shady.
+ var x = min(max(Math.floor(x*curzoom), -100), 2000);
+ return (x == 0 ? 1 : x);
+ }
+
+ for (var i = 0, il = stuff.length; i < il; ++i) {
+ var thing = stuff[i];
+ if (!thing.hittest(startms, startms+dur))
+ continue;
+
+
+ if (thing.type == SVGSceneRect) {
+ var rect = document.createElementNS('http://www.w3.org/2000/svg', 'rect');
+ rect.setAttributeNS(null, 'class', thing.klass)
+ rect.setAttributeNS(null, 'x', timeToPixel(thing.x - startms));
+ rect.setAttributeNS(null, 'y', thing.y);
+ rect.setAttributeNS(null, 'width', timeToPixel(thing.width));
+ rect.setAttributeNS(null, 'height', thing.height);
+ rect.msg = thing.msg;
+ svgg.appendChild(rect);
+ } else if (thing.type == SVGSceneLine) {
+ var line = document.createElementNS('http://www.w3.org/2000/svg', 'line');
+ line.setAttributeNS(null, 'class', thing.klass)
+ line.setAttributeNS(null, 'x1', timeToPixel(thing.x1 - startms));
+ line.setAttributeNS(null, 'y1', thing.y1);
+ line.setAttributeNS(null, 'x2', timeToPixel(thing.x2 - startms));
+ line.setAttributeNS(null, 'y2', thing.y2);
+ line.msg = thing.msg;
+ svgg.appendChild(line);
+ }
+
+ ++count;
+ }
+
+ // Append the 'g' element on after we've build it.
+ svg.appendChild(svgg);
+
+ return count;
+};
diff --git a/chromium/tools/traceline/svgui/traceline.xml b/chromium/tools/traceline/svgui/traceline.xml
new file mode 100644
index 00000000000..375744d069b
--- /dev/null
+++ b/chromium/tools/traceline/svgui/traceline.xml
@@ -0,0 +1,11 @@
+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
+<head>
+ <title>Traceline SVG</title>
+
+ <link href="traceline.css" rel="stylesheet" type="text/css" />
+ <script src="traceline.js"></script>
+</head>
+
+<body onload="loadJSON(document.location.hash.substr(1).split(','));">
+</body>
+</html>
diff --git a/chromium/tools/traceline/traceline/Makefile b/chromium/tools/traceline/traceline/Makefile
new file mode 100644
index 00000000000..d5ac5b626a2
--- /dev/null
+++ b/chromium/tools/traceline/traceline/Makefile
@@ -0,0 +1,30 @@
+all:
+ cl \
+ sidestep/ia32_modrm_map.cc \
+ sidestep/ia32_opcode_map.cc \
+ sidestep/mini_disassembler.cc \
+ main.cc \
+ /Fetraceline.exe \
+ /D_WIN32_WINNT=0x0500 \
+ /D_HAS_EXCEPTIONS=0 \
+ "/I." \
+ /Zi \
+ /link \
+ kernel32.lib user32.lib dbghelp.lib powrprof.lib
+
+tests:
+ cl \
+ assembler_unittest.cc \
+ /Fassembler_unittest.exe \
+ /D_WIN32_WINNT=0x0500 \
+ /D_HAS_EXCEPTIONS=0 \
+ /Zi \
+ /link \
+ kernel32.lib user32.lib dbghelp.lib powrprof.lib
+
+stubs : stubs.asm
+ nasm -o stubs stubs.asm
+ ndisasm -u stubs
+
+clean:
+ rm -f *.obj {cpuinfo,traceline,assembler_unittest,vc80}.{exe,ilk,pdb} stubs
diff --git a/chromium/tools/traceline/traceline/README b/chromium/tools/traceline/traceline/README
new file mode 100644
index 00000000000..97ceec8b664
--- /dev/null
+++ b/chromium/tools/traceline/traceline/README
@@ -0,0 +1,21 @@
+Traceline is a Windows utility to intercept, time, and log system calls. This
+is achieved by injecting code into a target process, along with dynamically
+generated assembly hook stubs. One of the major goals was to skew performance
+timings as little as possible. This lead to a design in which the log buffer
+(which is called the playground) is kept within the process, and the logger
+routines use atomic instructions to log their events to this buffer. At the
+end of the processes lifetime, this buffer is pulled out of the process and
+used to generated JSON output. In addition to hooking system call activity,
+other hooks of interest have been written, including heap allocation functions.
+Symbols are supported with a command line flag. This works by capturing the
+process shutdown, and doing an intrusive symbol attach with dbghelp.dll
+
+NOTES:
+ - You should copy dbghelp.dll from a windbg installation into this directory.
+ The version shipped with Windows is old, and symbol support won't work.
+ - You will need a bit of cygwin if you want to use the Makefile. Otherwise
+ it is pretty clear how to build the files manually.
+ - The output JSON data will be printed out stdout. It is likely that you
+ will want to pipe the output of this program into a file.
+
+Dean McNamee <deanm@chromium.org>
diff --git a/chromium/tools/traceline/traceline/assembler.h b/chromium/tools/traceline/traceline/assembler.h
new file mode 100644
index 00000000000..111fd6f01f3
--- /dev/null
+++ b/chromium/tools/traceline/traceline/assembler.h
@@ -0,0 +1,578 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Copyright (c) 1994-2006 Sun Microsystems Inc.
+// All Rights Reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// - Redistributions of source code must retain the above copyright notice,
+// this list of conditions and the following disclaimer.
+//
+// - Redistribution in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+//
+// - Neither the name of Sun Microsystems or the names of contributors may
+// be used to endorse or promote products derived from this software without
+// specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+// IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
+// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// The original source code covered by the above license above has been
+// modified significantly by Google Inc.
+// Copyright 2006-2008 the V8 project authors. All rights reserved.
+
+// This implements a C++ assembler for dynamically generating machine code.
+// It is heavily based on the v8 assembler, which has a long history of its
+// own. Relocation information has been removed, and in general things were
+// made a bit simpler (and slower). Everything is implemented inline.
+
+#ifndef TRACELINE_ASSEMBLER_H_
+#define TRACELINE_ASSEMBLER_H_
+
+#include <windows.h>
+#include <stdio.h>
+#include <string>
+
+#include <stddef.h>
+
+#include "logging.h"
+
+#define ASSERT(x) CHECK(x)
+
+enum Register {
+ EAX = 0,
+ ECX = 1,
+ EDX = 2,
+ EBX = 3,
+ ESP = 4,
+ EBP = 5,
+ ESI = 6,
+ EDI = 7
+};
+
+enum Condition {
+ overflow = 0,
+ no_overflow = 1,
+ below = 2,
+ above_equal = 3,
+ equal = 4,
+ not_equal = 5,
+ below_equal = 6,
+ above = 7,
+ sign = 8,
+ not_sign = 9,
+ parity_even = 10,
+ parity_odd = 11,
+ less = 12,
+ greater_equal = 13,
+ less_equal = 14,
+ greater = 15,
+
+ // aliases
+ zero = equal,
+ not_zero = not_equal,
+ negative = sign,
+ positive = not_sign
+};
+
+// Labels are used for branching, and marks an offset in the CodeBuffer.
+// A label can be in 3 states:
+// - Unused, the label has never be used in an instruction.
+// - Linked, the label has been referenced (by a jump, for example), but the
+// target is not yet known, because the label is unbound.
+// - Bound, the label has been bound so the offset is known.
+class Label {
+ public:
+ Label() { Unuse(); }
+ ~Label() { ASSERT(!is_linked()); }
+
+ void Unuse() {
+ num_ = 0;
+ }
+
+ bool is_unused() const { return num_ == 0; }
+ bool is_bound() const { return num_ == -1; }
+ bool is_linked() const { return num_ > 0; }
+
+ int binding_pos() const {
+ ASSERT(is_bound());
+ return table_[0];
+ }
+
+ int num_links() const {
+ ASSERT(!is_bound());
+ return num_; // Will return 0 if unused.
+ }
+
+ int link_pos(int i) const {
+ ASSERT(is_linked());
+ ASSERT(i < num_);
+ return table_[i];
+ }
+
+ private:
+ void bind_to(int pos) {
+ ASSERT(!is_bound());
+ table_[0] = pos;
+ num_ = -1;
+ }
+ void link_to(int pos) {
+ ASSERT(!is_bound());
+ ASSERT(num_ < kTableSize);
+
+ table_[num_] = pos;
+ ++num_;
+ }
+
+ static const int kTableSize = 3;
+
+ // We store all links in a fixed size table. When we're bound, we store the
+ // binding position in the first entry of the table.
+ int table_[kTableSize];
+ // The number of entries in our table, if we're linked. If 0, then we're
+ // unusued. If -1, then we are bound (and the pos is at table_[0]).
+ int num_;
+
+ friend class CodeBuffer; // For binding, linking, etc
+};
+
+
+enum ScaleFactor {
+ SCALE_TIMES_1 = 0,
+ SCALE_TIMES_2 = 1,
+ SCALE_TIMES_4 = 2,
+ SCALE_TIMES_8 = 3
+};
+
+
+class Operand {
+ public:
+ explicit Operand(const Operand& x) : len_(x.len_) {
+ memcpy(buf_, x.buf_, sizeof(buf_));
+ }
+
+ // reg
+ explicit Operand(Register reg) {
+ Init(reg);
+ }
+
+ // [disp/r]
+ explicit Operand(int disp) {
+ Init(disp);
+ }
+
+ // [base + disp/r]
+ Operand(Register base, int disp) {
+ Init(base, disp);
+ }
+
+ // [base + index*scale + disp/r]
+ Operand(Register base, Register index, ScaleFactor scale, int disp) {
+ Init(base, index, scale, disp);
+ }
+
+ // [index*scale + disp/r]
+ Operand(Register index, ScaleFactor scale, int disp) {
+ Init(index, scale, disp);
+ }
+
+ void set_reg(Register reg) {
+ ASSERT(len_ > 0);
+ buf_[0] = (buf_[0] & ~0x38) | static_cast<char>(reg << 3);
+ }
+
+ char* data() { return buf_; }
+ int length() { return len_; }
+
+ private:
+ // reg
+ void Init(Register reg) {
+ set_modrm(3, reg);
+ }
+
+ // [disp/r]
+ void Init(int disp) {
+ set_modrm(0, EBP);
+ set_dispr(disp);
+ }
+
+ // [base + disp/r]
+ void Init(Register base, int disp) {
+ if (disp == 0) {
+ // [base]
+ set_modrm(0, base);
+ if (base == ESP) set_sib(SCALE_TIMES_1, ESP, base);
+ } else if (is_int8(disp)) {
+ // [base + disp8]
+ set_modrm(1, base);
+ if (base == ESP) set_sib(SCALE_TIMES_1, ESP, base);
+ set_disp8(disp);
+ } else {
+ // [base + disp/r]
+ set_modrm(2, base);
+ if (base == ESP) set_sib(SCALE_TIMES_1, ESP, base);
+ set_dispr(disp);
+ }
+ }
+
+ // [base + index*scale + disp/r]
+ void Init(Register base,
+ Register index,
+ ScaleFactor scale,
+ int disp) {
+ ASSERT(index != ESP); // illegal addressing mode
+ if (disp == 0 && base != EBP) {
+ // [base + index*scale]
+ set_modrm(0, ESP);
+ set_sib(scale, index, base);
+ } else if (is_int8(disp)) {
+ // [base + index*scale + disp8]
+ set_modrm(1, ESP);
+ set_sib(scale, index, base);
+ set_disp8(disp);
+ } else {
+ // [base + index*scale + disp/r]
+ set_modrm(2, ESP);
+ set_sib(scale, index, base);
+ set_dispr(disp);
+ }
+ }
+
+ // [index*scale + disp/r]
+ void Init(Register index,
+ ScaleFactor scale,
+ int disp) {
+ ASSERT(index != ESP); // illegal addressing mode
+ // We can reduce instruction size by translating instructions of the form:
+ // 8D044510000000 lea eax,[eax*2+0x10]
+ // To the more concise scale=1 version:
+ // 8D440010 lea eax,[eax+eax+0x10]
+ if (scale == SCALE_TIMES_2) {
+ Init(index, index, SCALE_TIMES_1, disp);
+ } else {
+ set_modrm(0, ESP);
+ set_sib(scale, index, EBP);
+ set_dispr(disp);
+ }
+ }
+
+ // Returns true if this Operand is a wrapper for the specified register.
+ bool is_reg(Register reg) const {
+ return ((buf_[0] & 0xF8) == 0xC0) // addressing mode is register only.
+ && ((buf_[0] & 0x07) == reg); // register codes match.
+ }
+
+ void set_modrm(int mod, Register rm) { // reg == 0
+ ASSERT((mod & -4) == 0);
+ buf_[0] = mod << 6 | rm;
+ len_ = 1;
+ }
+
+ void set_sib(ScaleFactor scale, Register index, Register base) {
+ ASSERT(len_ == 1);
+ ASSERT((scale & -4) == 0);
+ buf_[1] = scale << 6 | index << 3 | base;
+ len_ = 2;
+ }
+
+ void set_disp8(char disp) {
+ ASSERT(len_ == 1 || len_ == 2);
+ *reinterpret_cast<char*>(&buf_[len_++]) = disp;
+ }
+
+ void set_dispr(int disp) {
+ ASSERT(len_ == 1 || len_ == 2);
+ *reinterpret_cast<int*>(&buf_[len_]) = disp;
+ len_ += sizeof(int);
+ }
+
+ bool is_int8(int x) { return x >= -128 && x <= 127; }
+
+ // Mutable because reg in ModR/M byte is set by Assembler via set_reg().
+ char buf_[6];
+ // The number of bytes in buf_.
+ unsigned int len_;
+};
+
+// A convenient wrapper around a buffer for emitting code or data, etc.
+class CodeBuffer {
+ public:
+ // Use an externally managed buffer
+ explicit CodeBuffer(char* buf) : pos_(0), buf_(buf) { }
+
+ void* data() { return buf_; }
+ int size() { return pos_; }
+
+ void emit(unsigned char b) {
+ buf_[pos_++] = b;
+ }
+ void emit_word(unsigned short w) {
+ *reinterpret_cast<unsigned short*>(&buf_[pos_]) = w;
+ pos_ += 2;
+ }
+ void emit_dword(unsigned int d) {
+ *reinterpret_cast<unsigned int*>(&buf_[pos_]) = d;
+ pos_ += 4;
+ }
+
+ void emit_bytes(const char* bytes, size_t size) {
+ for (size_t i = 0; i < size; ++i)
+ emit(bytes[i]);
+ }
+
+ void emit_bytes(const std::string& bytes) {
+ emit_bytes(bytes.data(), bytes.size());
+ }
+
+ void put_dword_at(int pos, unsigned int d) {
+ *reinterpret_cast<unsigned int*>(&buf_[pos]) = d;
+ }
+
+ // We pass by value so that we get a copy that we can modify.
+ void emit_operand(Register reg, Operand operand) {
+ operand.set_reg(reg);
+ memcpy(&buf_[pos_], operand.data(), operand.length());
+ pos_ += operand.length();
+ }
+
+ void bind(Label* l) {
+ ASSERT(!l->is_bound());
+ for (int i = 0; i < l->num_links(); ++i) {
+ put_dword_at(l->link_pos(i), pos_ - (l->link_pos(i) + 4));
+ }
+ l->bind_to(pos_);
+ }
+
+ // TODO deprecate blah_imm and use blah(Immediate)
+
+ void add(Register dst, Register src) {
+ emit(0x01); emit(0xc0 | (src << 3) | dst);
+ }
+ void add_imm(Register dst, int d) {
+ if (d >= -128 && d <= 127) {
+ emit(0x83); emit(0xc0 | dst); emit(d & 0xff);
+ } else {
+ emit(0x81); emit(0xc0 | dst); emit_dword(d);
+ }
+ }
+
+ void and_(Register r, unsigned int mask) {
+ emit(0x81); emit(0xe0 | r); emit_dword(mask);
+ }
+
+ void call(Register r) {
+ call(Operand(r));
+ }
+ void call(const Operand& dst) {
+ emit(0xff); emit_operand(EDX, dst);
+ }
+
+ void cmp(Register r1, Register r2) {
+ emit(0x39); emit(0xc0 | (r2 << 3) | r1);
+ }
+
+ void cmp_imm(Register r, int d) {
+ if (d >= -128 && d <= 127) {
+ emit(0x83); emit(0xf8 | r); emit(d & 0xff);
+ } else {
+ emit(0x81); emit(0xf8 | r); emit_dword(d);
+ }
+ }
+
+ void fs() {
+ emit(0x64);
+ }
+
+ // Atomically increment the dword at |mem| with the increment amount in the
+ // register |inc|. Will replace |inc| with the old unincremented value.
+ void inc_atomic(Register mem, Register inc) {
+ // lock xadd [mem], inc
+ emit(0xF0); emit(0x0F); emit(0xC1); emit((inc << 3) | mem);
+ }
+
+ void int3() {
+ emit(0xcc);
+ }
+
+ void jcc(Condition cc, Label* l) {
+ emit(0x0f); emit(0x80 | cc);
+ if (l->is_bound()) {
+ emit_dword(l->binding_pos() - (pos_ + 4));
+ } else {
+ // Will fix up when the label is bound.
+ l->link_to(pos_);
+ emit_dword(0);
+ }
+ }
+
+ void jmp(Register r) {
+ emit(0xff); emit(0xe0 | r);
+ }
+
+ void jmp(Label* l) {
+ if (l->is_bound()) {
+ jmp_rel(l->binding_pos() - (pos_ + 5));
+ } else {
+ // Will fix up when the label is bound.
+ l->link_to(pos_ + 1);
+ jmp_rel(0);
+ }
+ }
+
+ void jmp_rel(int i) {
+ emit(0xe9); emit_dword(i);
+ }
+
+ void jmp_rel_short(char c) {
+ emit(0xeb); emit(c);
+ }
+
+ void lea(Register dst, const Operand& src) {
+ emit(0x8d); emit_operand(dst, src);
+ }
+
+ void lodsb() {
+ emit(0xac);
+ }
+ void lodsd() {
+ emit(0xad);
+ }
+
+ void loop(Label* l) {
+ ASSERT(l->is_bound());
+ int pos = l->binding_pos() - (pos_ + 2);
+ ASSERT(pos >= -128 && pos < 0);
+
+ emit(0xe2); emit(pos & 0xff);
+ }
+
+ void mov(Register dst, Register src) {
+ emit(0x89); emit(0xc0 | (src << 3) | dst);
+ }
+ void mov(Register dst, const Operand& src) {
+ emit(0x8b); emit_operand(dst, src);
+ }
+ void mov_imm(Register r, unsigned int d) {
+ emit(0xb8 | r); emit_dword(d);
+ }
+
+ void movsb() {
+ emit(0xa4);
+ }
+ void movsd() {
+ emit(0xa5);
+ }
+
+ void or_(Register r, unsigned int mask) {
+ emit(0x81); emit(0xc8 | r); emit_dword(mask);
+ }
+
+ void pop(Register r) {
+ emit(0x58 | r);
+ }
+ void pop(const Operand& dst) {
+ emit(0x8f); emit_operand(EAX, dst);
+ }
+
+ void push(Register r) {
+ emit(0x50 | r);
+ }
+ void push(const Operand& src) {
+ emit(0xff); emit_operand(ESI, src);
+ }
+ void push_imm(int i) {
+ if (i >= -128 && i <= 127) {
+ emit(0x6a); emit(i & 0xff);
+ } else {
+ emit(0x68); emit_dword(i);
+ }
+ }
+
+ // Puts the cycle counter into edx:eax.
+ void rdtsc() {
+ emit(0x0F); emit(0x31);
+ }
+
+ void rep() {
+ emit(0xf3);
+ }
+
+ void ret() {
+ ret(0);
+ }
+ void ret(short c) {
+ if (c == 0) {
+ emit(0xc3);
+ } else {
+ emit(0xc2); emit_word(c);
+ }
+ }
+
+ void spin() {
+ jmp_rel_short(-2);
+ }
+
+ void stosb() {
+ emit(0xaa);
+ }
+ void stosd() {
+ emit(0xab);
+ }
+
+ void sysenter() {
+ emit(0x0f); emit(0x34);
+ }
+
+ // Puts a unique cpu identifier into eax, using sidt to fingerprint cores.
+ void which_cpu() {
+ // Make space
+ push(EAX);
+ push(EAX);
+ // sidt [esp+2]
+ emit(0x0f); emit(0x01); emit_operand(ECX, Operand(ESP, 2));
+ pop(EAX);
+ pop(EAX); // sidt address
+ }
+
+ // Puts a unique identifier for the thread we're executing on into eax.
+ void which_thread() {
+ // mov eax, [fs:0x24]
+ emit(0x64); emit(0xa1); emit_dword(0x24);
+ // TODO: We could do this but it will use an encoding that is 1 byte bigger.
+ // fs(); mov(EAX, Operand(0x24));
+ }
+
+ void xchg(Register r1, Register r2) {
+ if (r1 == EAX) {
+ emit(0x90 | r2);
+ } else if (r2 == EAX) {
+ emit(0x90 | r1);
+ } else {
+ xchg(r1, Operand(r2));
+ }
+ }
+ void xchg(Register r1, const Operand& oper) {
+ emit(0x87); emit_operand(r1, oper);
+ }
+
+ private:
+ int pos_;
+ char* buf_;
+};
+
+#endif // TRACELINE_ASSEMBLER_H_
diff --git a/chromium/tools/traceline/traceline/assembler_unittest.cc b/chromium/tools/traceline/traceline/assembler_unittest.cc
new file mode 100644
index 00000000000..e2c17b66012
--- /dev/null
+++ b/chromium/tools/traceline/traceline/assembler_unittest.cc
@@ -0,0 +1,83 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+
+#include "assembler.h"
+
+int main(int argc, char** argv) {
+ char buf[1024];
+
+ CodeBuffer cb(buf);
+
+ // Branching tests first so the offsets are not always adjusting in the
+ // output diassembler when we add new tests.
+
+ cb.spin();
+
+ cb.call(EAX);
+ cb.call(Operand(EAX));
+ cb.call(Operand(EDX, 15));
+
+ cb.fs(); cb.mov(EAX, Operand(3));
+ cb.fs(); cb.mov(EDX, Operand(0x04));
+
+ cb.lea(EAX, Operand(EAX));
+ cb.lea(EAX, Operand(0x12345678));
+ cb.lea(EAX, Operand(EBX, 0x12345678));
+ cb.lea(EAX, Operand(EBX, ECX, SCALE_TIMES_2, 0x12345678));
+ cb.lea(EAX, Operand(ECX, SCALE_TIMES_2, 0x12345678));
+ cb.lea(EAX, Operand(EAX, SCALE_TIMES_2, 0));
+ cb.lea(EAX, Operand(EBX, SCALE_TIMES_2, 0));
+ cb.lea(EBP, Operand(EBP, SCALE_TIMES_2, 1));
+
+ cb.lodsb();
+ cb.lodsd();
+
+ cb.mov(EAX, ECX);
+ cb.mov(ESI, ESP);
+ cb.mov(EAX, Operand(ESP, 0x20));
+ cb.mov(EAX, Operand(EBP, 8));
+ cb.mov_imm(ESP, 1);
+ cb.mov_imm(EAX, 0x12345678);
+
+ cb.pop(EBX);
+ cb.pop(Operand(EBX));
+ cb.pop(Operand(EBX, 0));
+ cb.pop(Operand(EBX, 12));
+
+ cb.push(EBX);
+ cb.push(Operand(EBX));
+ cb.push(Operand(EBX, 0));
+ cb.push(Operand(EDI, -4));
+ cb.push(Operand(EDI, -8));
+ cb.push_imm(0x12);
+ cb.push_imm(0x1234);
+ cb.push(Operand(EBX, 12));
+ cb.push(Operand(ESP, 0x1234));
+
+ cb.ret();
+ cb.ret(0);
+ cb.ret(12);
+
+ cb.stosb();
+ cb.stosd();
+
+ cb.sysenter();
+
+ cb.which_cpu();
+ cb.which_thread();
+
+ cb.xchg(EAX, EAX);
+ cb.xchg(EBX, EAX);
+ cb.xchg(EAX, EBX);
+ cb.xchg(ECX, ESP);
+ cb.xchg(ECX, Operand(ESP));
+ cb.xchg(ECX, Operand(ESP, 5));
+ cb.xchg(ECX, Operand(EDX, 4));
+
+ fwrite(buf, 1, cb.size(), stdout);
+
+ return 0;
+}
diff --git a/chromium/tools/traceline/traceline/assembler_unittest.sh b/chromium/tools/traceline/traceline/assembler_unittest.sh
new file mode 100755
index 00000000000..6be6191bf12
--- /dev/null
+++ b/chromium/tools/traceline/traceline/assembler_unittest.sh
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+./assembler_unittest.exe | ndisasm -u - | cut -c 11-
diff --git a/chromium/tools/traceline/traceline/assembler_unittest.sh.expected b/chromium/tools/traceline/traceline/assembler_unittest.sh.expected
new file mode 100644
index 00000000000..a0d3af30a2d
--- /dev/null
+++ b/chromium/tools/traceline/traceline/assembler_unittest.sh.expected
@@ -0,0 +1,54 @@
+EBFE jmp short 0x0
+FFD0 call eax
+FFD0 call eax
+FF520F call dword near [edx+0xf]
+648B0503000000 mov eax,[dword fs:0x3]
+648B1504000000 mov edx,[dword fs:0x4]
+8D db 0x8D
+C08D0578563412 ror byte [ebp+0x34567805],0x12
+8D8378563412 lea eax,[ebx+0x12345678]
+8D844B78563412 lea eax,[ebx+ecx*2+0x12345678]
+8D840978563412 lea eax,[ecx+ecx+0x12345678]
+8D0400 lea eax,[eax+eax]
+8D041B lea eax,[ebx+ebx]
+8D6C2D01 lea ebp,[ebp+ebp+0x1]
+AC lodsb
+AD lodsd
+89C8 mov eax,ecx
+89E6 mov esi,esp
+8B442420 mov eax,[esp+0x20]
+8B4508 mov eax,[ebp+0x8]
+BC01000000 mov esp,0x1
+B878563412 mov eax,0x12345678
+5B pop ebx
+8FC3 pop ebx
+8F03 pop dword [ebx]
+8F430C pop dword [ebx+0xc]
+53 push ebx
+FFF3 push ebx
+FF33 push dword [ebx]
+FF77FC push dword [edi-0x4]
+FF77F8 push dword [edi-0x8]
+6A12 push byte +0x12
+6834120000 push 0x1234
+FF730C push dword [ebx+0xc]
+FFB42434120000 push dword [esp+0x1234]
+C3 ret
+C3 ret
+C20C00 ret 0xc
+AA stosb
+AB stosd
+0F34 sysenter
+50 push eax
+50 push eax
+0F014C2402 sidt [esp+0x2]
+58 pop eax
+58 pop eax
+64A124000000 mov eax,[fs:0x24]
+90 nop
+93 xchg eax,ebx
+93 xchg eax,ebx
+87CC xchg ecx,esp
+87CC xchg ecx,esp
+874C2405 xchg ecx,[esp+0x5]
+874A04 xchg ecx,[edx+0x4]
diff --git a/chromium/tools/traceline/traceline/dump_syscalls_idarub.rb b/chromium/tools/traceline/traceline/dump_syscalls_idarub.rb
new file mode 100755
index 00000000000..6e0fb8fe9d7
--- /dev/null
+++ b/chromium/tools/traceline/traceline/dump_syscalls_idarub.rb
@@ -0,0 +1,32 @@
+#!/usr/bin/env ruby
+
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is an idarub script for extracting system call numbers from a DLL that
+# has been loaded into the IDA disassembler. The interesting system call stubs
+# are contained in ntdll.dll, user32.dll, gdi32.dll, and imm32.dll.
+
+require 'idarub'
+
+ida, = IdaRub.auto_client
+
+curea = 0
+
+filename = ida.get_root_filename
+
+while true
+ curea = ida.find_binary(
+ curea, ida.BADADDR, 'ba 00 03 fe 7f', 16, ida.SEARCH_DOWN)
+ break if curea == ida.BADADDR
+
+ raise "z" if ida.get_byte(curea - 5) != 0xb8
+
+ syscall = ida.get_long(curea - 4)
+ # Remove the IDA _ prefix and the @argsize trailing decorator...
+ funcname = ida.get_func_name(curea).split('@', 2)[0].split('_', 2)[-1]
+ puts '%d: "%s!%s",' % [syscall, filename, funcname]
+
+ curea += 1
+end
diff --git a/chromium/tools/traceline/traceline/logging.h b/chromium/tools/traceline/traceline/logging.h
new file mode 100644
index 00000000000..bf2e4fa0684
--- /dev/null
+++ b/chromium/tools/traceline/traceline/logging.h
@@ -0,0 +1,28 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TRACELINE_LOGGING_H_
+#define TRACELINE_LOGGING_H_
+
+#include <windows.h>
+#include <stdio.h>
+
+#define CHECK(exp, ...) \
+ if (!(exp)) { \
+ printf("FAILED CHECK: %s\n %s:%d\n", #exp, __FILE__, __LINE__); \
+ printf("\naborted.\n"); \
+ if (::IsDebuggerPresent()) __debugbreak(); \
+ exit(1); \
+ }
+
+#define NOTREACHED(...) \
+ if (1) { \
+ printf("NOTREACHED:\n %s:%d\n", __FILE__, __LINE__); \
+ printf(__VA_ARGS__); \
+ printf("\naborted.\n"); \
+ if (::IsDebuggerPresent()) __debugbreak(); \
+ exit(1); \
+ }
+
+#endif // TRACELINE_LOGGING_H_
diff --git a/chromium/tools/traceline/traceline/main.cc b/chromium/tools/traceline/traceline/main.cc
new file mode 100644
index 00000000000..11d9e52437c
--- /dev/null
+++ b/chromium/tools/traceline/traceline/main.cc
@@ -0,0 +1,1339 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// TODO
+// - Make capturing system call arguments optional and the number configurable.
+// - Lots of places depend on the ABI so that we can modify EAX or EDX, this
+// is safe, but these could be moved to be saved and restored anyway.
+// - Understand the loader better, and make some more meaningful hooks with
+// proper data collection and durations. Right now it's just noise.
+// - Get the returned pointer from AllocateHeap.
+
+#include <windows.h>
+
+#include <stdio.h>
+
+#include <map>
+#include <string>
+
+#include "assembler.h"
+#include "logging.h"
+#include "rdtsc.h"
+#include "sym_resolver.h"
+#include "syscall_map.h"
+
+#include "sidestep/mini_disassembler.h"
+
+namespace {
+
+std::string JSONString(const std::string& str) {
+ static const char hextable[] = "0123456789abcdef";
+ std::string out;
+ out.push_back('"');
+ for (std::string::const_iterator it = str.begin(); it != str.end(); ++it) {
+ unsigned char c = static_cast<unsigned char>(*it);
+ switch (c) {
+ case '\\':
+ case '"':
+ case '\'':
+ out.push_back('\\'); out.push_back(c);
+ break;
+ default:
+ if (c < 20 || c >= 127) {
+ out.push_back('\\'); out.push_back('x');
+ out.push_back(hextable[c >> 4]); out.push_back(hextable[c & 0xf]);
+ } else {
+ // Unescaped.
+ out.push_back(c);
+ }
+ break;
+ }
+ }
+ out.push_back('"');
+ return out;
+}
+
+} // namespace
+
+class Playground {
+ public:
+ static const int kPlaygroundSize = 64 * 1024 * 1024;
+
+ // Encapsulate the configuration options to the playground.
+ class Options {
+ public:
+ Options()
+ : stack_unwind_depth_(0),
+ log_heap_(false),
+ log_lock_(false),
+ vista_(false) { }
+
+
+ // The maximum amount of frames we should unwind from the call stack.
+ int stack_unwind_depth() { return stack_unwind_depth_; }
+ void set_stack_unwind_depth(int depth) { stack_unwind_depth_ = depth; }
+
+ // Whether we should log heap operations (alloc / free).
+ bool log_heap() { return log_heap_; }
+ void set_log_heap(bool x) { log_heap_ = x; }
+
+ // Whether we should log lock (critical section) operations.
+ bool log_lock() { return log_lock_; }
+ void set_log_lock(bool x) { log_lock_ = x; }
+
+ // Whether we are running on Vista.
+ bool vista() { return vista_; }
+ void set_vista(bool x) { vista_ = x; }
+
+ private:
+ int stack_unwind_depth_;
+ bool log_heap_;
+ bool log_lock_;
+ bool vista_;
+ };
+
+ Playground(HANDLE proc, const Options& options)
+ : proc_(proc),
+ remote_addr_(NULL),
+ resolver_("ntdll.dll"),
+ options_(options) {
+ // We copy the entire playground into the remote process, and we have
+ // fields that we expect to be zero. TODO this could be a lot better.
+ memset(buf_, 0, sizeof(buf_));
+ }
+
+ void AllocateInRemote() {
+ // Try to get something out of the way and easy to debug.
+ static void* kPlaygroundAddr = reinterpret_cast<void*>(0x66660000);
+ // Allocate our playground memory in the target process. This is a big
+ // slab of read/write/execute memory that we use for our code
+ // instrumentation, and the memory for writing out our logging events.
+ remote_addr_ = reinterpret_cast<char*>(
+ VirtualAllocEx(proc_,
+ kPlaygroundAddr,
+ kPlaygroundSize,
+ MEM_COMMIT | MEM_RESERVE,
+ PAGE_EXECUTE_READWRITE));
+ if (remote_addr_ == NULL || remote_addr_ != kPlaygroundAddr) {
+ NOTREACHED("Falied to allocate playground: 0x%08x", remote_addr_);
+ }
+ }
+
+ void CopyToRemote() {
+ WriteProcessMemory(proc_,
+ remote_addr_,
+ buf_,
+ sizeof(buf_),
+ NULL);
+ }
+
+ void CopyFromRemote() {
+ SIZE_T size = 0;
+ ReadProcessMemory(proc_,
+ remote_addr_,
+ buf_,
+ sizeof(buf_),
+ &size);
+ }
+
+ enum EventRecordType {
+ EVENT_TYPE_LDR = 0,
+ EVENT_TYPE_THREADBEGIN = 1,
+ EVENT_TYPE_THREADNAME = 2,
+ EVENT_TYPE_EXCEPTION = 3,
+ EVENT_TYPE_PROCESSEXIT = 4,
+ EVENT_TYPE_CREATETHREAD = 5,
+ EVENT_TYPE_THREADEXIT = 6,
+ EVENT_TYPE_ALLOCHEAP = 7,
+ EVENT_TYPE_FREEHEAP = 8,
+ EVENT_TYPE_SYSCALL = 9,
+ EVENT_TYPE_ENTER_CS = 10,
+ EVENT_TYPE_TRYENTER_CS = 11,
+ EVENT_TYPE_LEAVE_CS = 12,
+ EVENT_TYPE_APC = 13
+ };
+
+ static const int kThreadNameBufSize = 64;
+ static const int kLdrBufSize = 512; // Looks like internal buffer is 512.
+
+ static const int kCodeBlockSize = 256;
+
+ static const int kOffLdrCode = 0 * kCodeBlockSize;
+ static const int kOffCreateThreadCode = 1 * kCodeBlockSize;
+ static const int kOffThreadCode = 2 * kCodeBlockSize;
+ static const int kOffExpCode = 3 * kCodeBlockSize;
+ static const int kOffExitCode = 4 * kCodeBlockSize;
+ static const int kOffThreadExitCode = 5 * kCodeBlockSize;
+ static const int kOffAllocHeapCode = 6 * kCodeBlockSize;
+ static const int kOffFreeHeapCode = 7 * kCodeBlockSize;
+ static const int kOffSyscallCode = 8 * kCodeBlockSize;
+ static const int kOffEnterCritSecCode = 9 * kCodeBlockSize;
+ static const int kOffTryEnterCritSecCode = 10 * kCodeBlockSize;
+ static const int kOffLeaveCritSecCode = 11 * kCodeBlockSize;
+ static const int kOffApcDispCode = 12 * kCodeBlockSize;
+
+ static const int kOffLogAreaPtr = 4096;
+ static const int kOffLogAreaData = 4096 + 4;
+
+ static const int kRecordHeaderSize = 8 + 4 + 4 + 4;
+
+ // Given the address to the start of a function, patch the function to jump
+ // to a given offset into the playground. This function will try to take
+ // advantage of hotpatch code, if the function is prefixed with 5 0x90 bytes.
+ // Returns a std::string of any assembly instructions that must be relocated,
+ // as they were overwritten during patching.
+ std::string PatchPreamble(int func_addr, int playground_off) {
+ sidestep::MiniDisassembler disas;
+ int stub_addr = reinterpret_cast<int>(remote_addr_ + playground_off);
+
+ std::string instrs;
+
+ char buf[15];
+ if (ReadProcessMemory(proc_,
+ reinterpret_cast<void*>(func_addr - 5),
+ buf,
+ sizeof(buf),
+ NULL) == 0) {
+ NOTREACHED("ReadProcessMemory(0x%08x) failed: %d",
+ func_addr - 5, GetLastError());
+ }
+
+ // TODO(deanm): It seems in more recent updates the compiler is generating
+ // complicated sequences for padding / alignment. For example:
+ // 00000000 8DA42400000000 lea esp,[esp+0x0]
+ // 00000007 8D4900 lea ecx,[ecx+0x0]
+ // is used for a 16 byte alignment. We need a better way of handling this.
+ if (memcmp(buf, "\x90\x90\x90\x90\x90", 5) == 0 ||
+ memcmp(buf, "\x00\x8D\x64\x24\x00", 5) == 0 ||
+ memcmp(buf, "\x00\x00\x8D\x49\x00", 5) == 0) {
+ unsigned int instr_bytes = 0;
+
+ // We might have a hotpatch no-op of mov edi, edi "\x8b\xff". It is a
+ // bit of a waste to relocate it, but it makes everything simpler.
+
+ while (instr_bytes < 2) {
+ if (disas.Disassemble(
+ reinterpret_cast<unsigned char*>(buf + 5 + instr_bytes),
+ &instr_bytes) != sidestep::IT_GENERIC) {
+ NOTREACHED("Could not disassemble or relocate instruction.");
+ }
+ // We only read 10 bytes worth of instructions.
+ CHECK(instr_bytes < 10);
+ }
+
+ instrs.assign(buf + 5, instr_bytes);
+
+ // We have a hotpatch prefix of 5 nop bytes. We can use this for our
+ // long jump, and then overwrite the first 2 bytes to jump back to there.
+ CodeBuffer patch(buf);
+ int off = stub_addr - func_addr;
+ patch.jmp_rel(off);
+ patch.jmp_rel_short(-2 - 5);
+ } else {
+ // We need a full 5 bytes for the jump.
+ unsigned int instr_bytes = 0;
+ while (instr_bytes < 5) {
+ if (disas.Disassemble(
+ reinterpret_cast<unsigned char*>(buf + 5 + instr_bytes),
+ &instr_bytes) != sidestep::IT_GENERIC) {
+ NOTREACHED("Could not disassemble or relocate instruction.");
+ }
+ // We only read 10 bytes worth of instructions.
+ CHECK(instr_bytes < 10);
+ }
+
+ instrs.assign(buf + 5, instr_bytes);
+
+ // Overwrite the first 5 bytes with a relative jump to our stub.
+ CodeBuffer patch(buf + 5);
+ int off = stub_addr - (func_addr + 5);
+ patch.jmp_rel(off);
+ }
+
+ // Write back the bytes, we are really probably writing more back than we
+ // need to, but it shouldn't really matter.
+ if (WriteProcessMemory(proc_,
+ reinterpret_cast<void*>(func_addr - 5),
+ buf,
+ sizeof(buf),
+ NULL) == 0) {
+ NOTREACHED("WriteProcessMemory(0x%08x) failed: %d",
+ func_addr - 5, GetLastError());
+ }
+
+ return instrs;
+ }
+
+ std::string PatchPreamble(const char* func_name, int playground_off) {
+ return PatchPreamble(
+ reinterpret_cast<int>(resolver_.Resolve(func_name)), playground_off);
+ }
+
+ // Restore any instructions that needed to be moved to make space for our
+ // patch and jump back to the original code.
+ void ResumeOriginalFunction(const char* func_name,
+ const std::string& moved_instructions,
+ int stub_offset,
+ CodeBuffer* cb) {
+ cb->emit_bytes(moved_instructions);
+ int off = resolver_.Resolve(func_name) +
+ moved_instructions.size() -
+ (remote_addr_ + stub_offset + cb->size() + 5);
+ cb->jmp_rel(off);
+ }
+
+ // Makes a call to NtQueryPerformanceCounter, writing the timestamp to the
+ // buffer pointed to by EDI. EDI it not incremented. EAX is not preserved.
+ void AssembleQueryPerformanceCounter(CodeBuffer* cb) {
+ // Make a call to NtQueryPerformanceCounter and write the result into
+ // the log area. The buffer we write to should be aligned, but we should
+ // garantee that anyway for the logging area for performance.
+ cb->push_imm(0); // PerformanceFrequency
+ cb->push(EDI); // PerformanceCounter
+ cb->mov_imm(EAX, reinterpret_cast<int>(
+ resolver_.Resolve("ntdll!NtQueryPerformanceCounter")));
+ cb->call(EAX);
+ }
+
+ // This is the common log setup routine. It will allocate a new log entry,
+ // and write out the common log header to the event entry. The header is:
+ // is [ 64bit QPC ] [ 32bit cpu id ] [ 32bit thread id ] [ 32bit rec id ]
+ // EDI will be left pointing to the log entry, with |space| bytes left for
+ // type specific data. All other registers should not be clobbered.
+ void AssembleHeaderCode(CodeBuffer* cb, EventRecordType rt, int space) {
+ cb->push(EAX);
+ cb->push(EDX);
+ cb->push(ECX);
+ cb->push(ESI);
+
+ int unwind_depth = options_.stack_unwind_depth();
+
+ // Load EDI with the number of bytes we want for our log entry, this will
+ // be used in the atomic increment to allocate the log entry.
+ cb->mov_imm(EDI, kRecordHeaderSize + (unwind_depth * 4) + space);
+ // Do the increment and have EDI point to our log entry buffer space.
+ cb->mov_imm(EDX, reinterpret_cast<int>(remote_addr_ + kOffLogAreaPtr));
+ cb->inc_atomic(EDX, EDI);
+ // EDI is the buffer offset, make it a pointer to the record entry.
+ cb->add_imm(EDI, reinterpret_cast<int>(remote_addr_ + kOffLogAreaData));
+
+ AssembleQueryPerformanceCounter(cb);
+ cb->add_imm(EDI, 8);
+
+ cb->which_cpu();
+ cb->stosd();
+
+ cb->which_thread();
+ cb->stosd();
+
+ // Stack unwinding, follow EBP to the maximum number of frames, and make
+ // sure that it stays on the stack (between ESP and TEB.StackBase).
+ if (unwind_depth > 0) {
+ cb->mov_imm(ECX, unwind_depth);
+ cb->fs(); cb->mov(EDX, Operand(0x04)); // get TEB.StackBase
+
+ // Start at EBP.
+ cb->mov(EAX, EBP);
+
+ Label unwind_loop, bail;
+ cb->bind(&unwind_loop);
+
+ // Bail if (EAX < ESP) (below the stack)
+ cb->cmp(EAX, ESP);
+ cb->jcc(below, &bail);
+ // Bail if (EAX >= EDX) (above the stack)
+ cb->cmp(EAX, EDX);
+ cb->jcc(above_equal, &bail);
+
+ // We have a valid stack pointer, it should point to something like:
+ // [ saved frame pointer ] [ return address ] [ arguments ... ]
+ cb->mov(ESI, EAX);
+ cb->lodsd(); // Get the new stack pointer to follow in EAX
+ cb->movsd(); // Copy the return address to the log area.
+
+ cb->loop(&unwind_loop);
+
+ cb->bind(&bail);
+ // If we did managed to unwind to the max, fill the rest with 0 (really
+ // we just want to inc EDI to the end, and this is an easy way).
+ cb->mov_imm(EAX, 0); // TODO use an xor
+ cb->rep(); cb->stosd();
+ }
+
+ // Store the type for this record entry.
+ cb->mov_imm(EAX, rt);
+ cb->stosd();
+
+ cb->pop(ESI);
+ cb->pop(ECX);
+ cb->pop(EDX);
+ cb->pop(EAX);
+ }
+
+ void PatchLoader() {
+ static const EventRecordType kRecordType = EVENT_TYPE_LDR;
+ static const char* kFuncName = "ntdll!DebugPrint";
+ static const int kStubOffset = kOffLdrCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+
+ // Set ShowSnaps to one to get the print routines to be called.
+ char enabled = 1;
+ WriteProcessMemory(
+ proc_, resolver_.Resolve("ntdll!ShowSnaps"), &enabled, 1, NULL);
+
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ cb.pop(EDX); // return address
+ cb.pop(EAX); // First param in eax
+ cb.push(ESI);
+ cb.push(EDI);
+ cb.push(EDX);
+
+ cb.mov(ESI, EAX); // ESI points at the string structure.
+
+ // We used to do variable length based on the length supplied in the str
+ // structure, but it's easier (and sloppier) to just copy a fixed amount.
+ AssembleHeaderCode(&cb, kRecordType, kLdrBufSize);
+
+ cb.lodsd(); // Load the character count
+ cb.lodsd(); // Load the char*
+ cb.mov(ESI, EAX);
+ cb.mov_imm(ECX, kLdrBufSize / 4); // load the char count as the rep count
+ cb.rep(); cb.movsb(); // Copy the string to the logging buffer
+
+ // Return
+ cb.pop(EDX);
+ cb.pop(EDI);
+ cb.pop(ESI);
+ cb.pop(ECX); // don't care
+ cb.pop(ECX); // don't care
+ cb.jmp(EDX);
+ }
+
+ void PatchCreateThread() {
+ static const EventRecordType kRecordType = EVENT_TYPE_CREATETHREAD;
+ static const char* kFuncName =
+ options_.vista() ? "ntdll!NtCreateThreadEx" : "ntdll!NtCreateThread";
+ static const int kStubOffset = kOffCreateThreadCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ cb.push(EDI);
+ cb.push(ESI);
+
+ AssembleHeaderCode(&cb, kRecordType, 8);
+
+ cb.mov(EAX, Operand(ESP, 0x18 + 8));
+
+ // Super ugly hack. To coorrelate between creating a thread and the new
+ // thread running, we stash something to identify the creating event when
+ // we log the created event. We just use a pointer to the event log data
+ // since this will be unique and can tie the two events together. We pass
+ // it by writing into the context structure, so it will be passed in ESI.
+ cb.add_imm(EAX, 0xa0);
+ cb.push(EDI);
+ cb.mov(EDI, EAX);
+ cb.pop(EAX);
+ cb.push(EAX);
+ cb.stosd();
+
+ // Get and save CONTEXT.Eip
+ cb.mov(ESI, EDI);
+ cb.add_imm(ESI, 20);
+ cb.pop(EDI);
+ cb.mov(EAX, EDI);
+ cb.stosd(); // Record the event identifier to tie together the events.
+ cb.movsd(); // write Eip to the log event
+
+ cb.pop(ESI);
+ cb.pop(EDI);
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ }
+
+ void PatchThreadBegin() {
+ static const EventRecordType kRecordType = EVENT_TYPE_THREADBEGIN;
+ static const char* kFuncName = "ntdll!CsrNewThread";
+ static const int kStubOffset = kOffThreadCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ cb.push(EDI);
+
+ AssembleHeaderCode(&cb, kRecordType, 8);
+
+ cb.mov(EAX, ESI); // We stashed the creator's eventid in the context ESI.
+ cb.stosd();
+
+ // TODO(deanm): The pointer is going to point into the CRT or something,
+ // should we dig deeper to get more information about the real entry?
+ cb.mov(EAX, Operand(EBP, 0x8));
+ cb.stosd();
+ cb.pop(EDI);
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ }
+
+ void PatchThreadBeginVista() {
+ static const EventRecordType kRecordType = EVENT_TYPE_THREADBEGIN;
+ static const char* kFuncName = "ntdll!_RtlUserThreadStart";
+ static const int kStubOffset = kOffThreadCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ cb.push(EDI);
+
+ AssembleHeaderCode(&cb, kRecordType, 8);
+
+ cb.mov(EAX, ESI); // We stashed the creator's eventid in the context ESI.
+ cb.stosd();
+
+ // TODO(deanm): The pointer is going to point into the CRT or something,
+ // should we dig deeper to get more information about the real entry?
+ //cb.mov(EAX, Operand(EBP, 0x8));
+ cb.mov_imm(EAX, 0);
+ cb.stosd();
+ cb.pop(EDI);
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ }
+
+ // Intercept exception dispatching so we can catch when threads set a thread
+ // name (which is an exception with a special code). TODO it could be
+ // useful to log all exceptions.
+ void PatchSetThreadName() {
+ static const EventRecordType kRecordType = EVENT_TYPE_THREADNAME;
+ static const char* kFuncName = "ntdll!RtlDispatchException";
+ static const int kStubOffset = kOffExpCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ cb.pop(EDX); // return address
+ cb.pop(EAX); // ExceptionRecord
+ cb.push(EAX);
+ cb.push(EDX);
+
+ cb.push(ESI);
+
+ cb.mov(ESI, EAX);
+ cb.lodsd();
+
+ Label bail;
+ // exception code
+ cb.cmp_imm(EAX, 0x406D1388);
+ cb.jcc(not_equal, &bail);
+
+ cb.push(EDI);
+
+ AssembleHeaderCode(&cb, kRecordType, kThreadNameBufSize);
+
+ // Fetch the second parameter.
+ for (int i = 0; i < 6; ++i) {
+ cb.lodsd();
+ }
+
+ // TODO This is sloppy and we could run into unmapped memory...
+ cb.mov(ESI, EAX);
+ cb.mov_imm(ECX, kThreadNameBufSize / 4);
+ cb.rep(); cb.movsd();
+
+ cb.pop(EDI);
+
+ cb.bind(&bail);
+ cb.pop(ESI);
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ }
+
+
+ void PatchThreadExit() {
+ static const EventRecordType kRecordType = EVENT_TYPE_THREADEXIT;
+ static const char* kFuncName = "ntdll!LdrShutdownThread";
+ static const int kStubOffset = kOffThreadExitCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ cb.push(EDI);
+ AssembleHeaderCode(&cb, kRecordType, 0);
+ cb.pop(EDI);
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ }
+
+ void PatchAllocateHeap() {
+ static const EventRecordType kRecordType = EVENT_TYPE_ALLOCHEAP;
+ static const char* kFuncName = "ntdll!RtlAllocateHeap";
+ static const int kStubOffset = kOffAllocHeapCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ cb.push(EDI);
+ cb.push(ESI);
+
+ AssembleHeaderCode(&cb, kRecordType, 12);
+
+ cb.mov(ESI, ESP);
+ cb.add_imm(ESI, 12); // Skip over our saved and the return address
+ cb.movsd(); cb.movsd(); cb.movsd(); // Copy the 3 parameters
+
+ cb.pop(ESI);
+ cb.pop(EDI);
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ }
+
+ void PatchFreeHeap() {
+ static const EventRecordType kRecordType = EVENT_TYPE_FREEHEAP;
+ static const char* kFuncName = "ntdll!RtlFreeHeap";
+ static const int kStubOffset = kOffFreeHeapCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ cb.push(EDI);
+ cb.push(ESI);
+
+ AssembleHeaderCode(&cb, kRecordType, 12);
+
+ cb.mov(ESI, ESP);
+ cb.add_imm(ESI, 12); // Skip over our saved and the return address
+ cb.movsd(); cb.movsd(); cb.movsd(); // Copy the 3 parameters
+
+ cb.pop(ESI);
+ cb.pop(EDI);
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ }
+
+ // Don't even bother going back to the original code, just implement our
+ // own KiFastSystemCall. The original looks like:
+ // .text:7C90EB8B mov edx, esp
+ // .text:7C90EB8D sysenter
+ // .text:7C90EB8F nop
+ // .text:7C90EB90 nop
+ // .text:7C90EB91 nop
+ // .text:7C90EB92 nop
+ // .text:7C90EB93 nop
+ void PatchSyscall() {
+ static const EventRecordType kRecordType = EVENT_TYPE_SYSCALL;
+ static const char* kFuncName = "ntdll!KiFastSystemCall";
+ static const int kStubOffset = kOffSyscallCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+
+ {
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ Label skip;
+
+ // Skip 0xa5 which is QueryPerformanceCounter, to make sure we don't log
+ // our own logging's QPC. Disabled for now, using ret addr check...
+ // cb.cmp_imm(EAX, 0xa5);
+ // cb.jcc(equal, &skip);
+
+ // Check if the return address is from 0x6666 (our code region).
+ // 66817C24066666 cmp word [esp+0x6],0x6666
+ cb.emit(0x66); cb.emit(0x81); cb.emit(0x7C);
+ cb.emit(0x24); cb.emit(0x06); cb.emit(0x66); cb.emit(0x66);
+ cb.jcc(equal, &skip);
+
+ // This is all a bit shit. Originally I thought I could store some state
+ // on the stack above ESP, however, it seems that when APCs, etc are
+ // queued, they will use the stack above ESP. Well, not above ESP, above
+ // what was passed in as EDX into the systemcall, not matter if ESP was
+ // different than this :(. So we need to store our state in the event
+ // log record, and then we stick a pointer to that over a ret addr...
+
+ // Our stack starts like:
+ // [ ret addr ] [ ret addr 2 ] [ arguments ]
+ // We will update it to look like
+ // [ ret stub addr ] [ event entry ptr ] [ arguments ]
+
+ cb.push(EDI); // save EDI since we're using it
+ AssembleHeaderCode(&cb, kRecordType, 16 + 16 + 8);
+ cb.mov(EDX, EAX); // Save EAX...
+ cb.stosd(); // eax is the syscall number
+ cb.pop(EAX);
+ cb.stosd(); // store the saved EDI
+ cb.pop(EAX);
+ cb.stosd(); // store the real return address
+ cb.pop(EAX);
+ cb.stosd(); // store the real (secondary) return address;
+
+ cb.push(ESI);
+ cb.mov(ESI, ESP);
+ cb.lodsd();
+ cb.movsd(); // argument 1
+ cb.movsd(); // argument 2
+ cb.movsd(); // argument 3
+ cb.pop(ESI);
+
+ cb.push(EDI); // store our event ptr over the secondary ret addr.
+ cb.push_imm(reinterpret_cast<int>(remote_addr_ + kOffSyscallCode + 200));
+ cb.mov(EAX, EDX); // restore EAX
+
+ cb.bind(&skip);
+ cb.mov(EDX, ESP);
+ cb.sysenter();
+
+ if (cb.size() > 200) {
+ NOTREACHED("code too big: %d", cb.size());
+ }
+ }
+
+ {
+ CodeBuffer cb(buf_ + kStubOffset + 200);
+
+ // TODO share the QPC code, this is a copy and paste...
+
+ cb.pop(EDI); // get the log area
+
+ cb.stosd(); // Log the system call return value.
+
+ // QPC will clobber EAX, and it's very important to save it since it
+ // is the return value from the system call. TODO validate if there is
+ // anything else we need to save...
+ cb.push(EAX);
+ AssembleQueryPerformanceCounter(&cb);
+ cb.pop(EAX);
+
+ // We need to:
+ // - Restore the original "seconary" return address
+ // - Restore the original value of the EDI register
+ // - Jump control flow to the original return address
+ // All 3 of these values are stored in the log record...
+ // [ syscall num ] [ saved edi ] [ real rets ] [ args ] [ retval ] [ ts ]
+ // currently edi points here ----^
+
+ cb.push(Operand(EDI, -4 - 16)); // push the real 2nd ret
+ cb.push(Operand(EDI, -8 - 16)); // push the real ret
+ cb.push(Operand(EDI, -12 - 16)); // push the saved EDI
+
+ cb.pop(EDI); // restore EDI that was saved in the record
+ cb.ret(); // jmp back to the real ret ...
+
+ if (cb.size() > 56) {
+ NOTREACHED("ug");
+ }
+ }
+ }
+
+ // Patch lock (criticial section) holding.
+ void PatchEnterCriticalSection() {
+ static const EventRecordType kRecordType = EVENT_TYPE_ENTER_CS;
+ static const char* kFuncName = "ntdll!RtlEnterCriticalSection";
+ static const int kStubOffset = kOffEnterCritSecCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+
+ // We just want to capture the return address and original argument, so
+ // we know when EnterCriticalSection returned, we don't want to know when
+ // it entered because it could sit waiting. We want to know when the lock
+ // actually started being held. The compiler will sometimes generated code
+ // that overwrites arguments, so we'll keep a copy of the argument just in
+ // case code like this is ever generated in the future. TODO is it enough
+ // to just assume a LPCRITICAL_SECTION uniquely identifies the lock, or
+ // can the same lock have multiple different copies, I would assume not.
+ {
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ // Set up an additional frame so that we capture the return.
+ // TODO use memory instructions instead of using registers.
+ cb.pop(EAX); // return address
+ cb.pop(EDX); // first argument (critical section pointer)
+
+ cb.push(EDX);
+ cb.push(EAX);
+ cb.push(EDX);
+ cb.push_imm(
+ reinterpret_cast<int>(remote_addr_ + kStubOffset + 40));
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ CHECK(cb.size() < 40);
+ }
+
+ {
+ CodeBuffer cb(buf_ + kStubOffset + 40);
+
+ cb.push(ESI);
+ cb.mov(ESI, ESP);
+ cb.push(EAX);
+ cb.push(EDI);
+
+ AssembleHeaderCode(&cb, kRecordType, 4);
+
+ cb.lodsd(); // Skip over our saved ESI
+ cb.lodsd(); // Skip over the return address
+ cb.movsd(); // Write the CRITICAL_SECTION* to the event record.
+
+ cb.pop(EDI);
+ cb.pop(EAX);
+ cb.pop(ESI);
+
+ cb.ret(0x04);
+ }
+ }
+
+ void PatchTryEnterCriticalSection() {
+ static const EventRecordType kRecordType = EVENT_TYPE_TRYENTER_CS;
+ static const char* kFuncName = "ntdll!RtlTryEnterCriticalSection";
+ static const int kStubOffset = kOffTryEnterCritSecCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+
+ {
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ // Set up an additional frame so that we capture the return.
+ // TODO use memory instructions instead of using registers.
+ cb.pop(EAX); // return address
+ cb.pop(EDX); // first argument (critical section pointer)
+
+ cb.push(EDX);
+ cb.push(EAX);
+ cb.push(EDX);
+ cb.push_imm(reinterpret_cast<int>(remote_addr_ + kStubOffset + 40));
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ CHECK(cb.size() < 40);
+ }
+
+ {
+ CodeBuffer cb(buf_ + kStubOffset + 40);
+
+ cb.push(ESI);
+ cb.mov(ESI, ESP);
+ cb.push(EDI);
+
+ cb.push(EAX);
+
+ AssembleHeaderCode(&cb, kRecordType, 8);
+
+ cb.lodsd(); // Skip over our saved ESI
+ cb.lodsd(); // Skip over the return address
+ cb.movsd(); // Write the CRITICAL_SECTION* to the event record.
+
+ cb.pop(EAX);
+ cb.stosd(); // Write the return value to the event record.
+
+ cb.pop(EDI);
+ cb.pop(ESI);
+
+ cb.ret(0x04);
+ }
+ }
+
+ void PatchLeaveCriticalSection() {
+ static const EventRecordType kRecordType = EVENT_TYPE_LEAVE_CS;
+ static const char* kFuncName = "ntdll!RtlLeaveCriticalSection";
+ static const int kStubOffset = kOffLeaveCritSecCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ // TODO use memory instructions instead of using registers.
+ cb.pop(EDX); // return address
+ cb.pop(EAX); // first argument (critical section pointer)
+ cb.push(EAX);
+ cb.push(EDX);
+
+ cb.push(EDI);
+ AssembleHeaderCode(&cb, kRecordType, 4);
+ cb.stosd(); // Write the CRITICAL_SECTION* to the event record.
+ cb.pop(EDI);
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ }
+
+ // Patch APC dispatching. This is a bit hacky, since the return to kernel
+ // mode is done with NtContinue, we have to shim in a stub return address to
+ // catch when the callback is finished. It is probably a bit fragile.
+ void PatchApcDispatcher() {
+ static const EventRecordType kRecordType = EVENT_TYPE_APC;
+ static const char* kFuncName = "ntdll!KiUserApcDispatcher";
+ static const int kStubOffset = kOffApcDispCode;
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+
+ {
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ // We don't really need to preserve these since we're the first thing
+ // executing from the kernel dispatch, but yeah, it is good practice.
+ cb.push(EDI);
+ cb.push(EAX);
+
+ AssembleHeaderCode(&cb, kRecordType, 4 + 4 + 8);
+
+ cb.mov_imm(EAX, reinterpret_cast<int>(remote_addr_ + kStubOffset + 140));
+ cb.xchg(EAX, Operand(ESP, 8)); // Swap the callback address with ours.
+ cb.stosd(); // Store the original callback function address.
+
+ // TODO for now we're lazy and depend that ESI will be preserved, and we
+ // use it to store the pointer into our log record. EDI isn't preserved.
+ cb.mov(ESI, EDI);
+
+ cb.pop(EAX);
+ cb.pop(EDI);
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+
+ CHECK(cb.size() < 140);
+ }
+ {
+ CodeBuffer cb(buf_ + kStubOffset + 140);
+
+ // This is our shim, we need to call the original callback function, then
+ // we can catch the return and log when it was completed.
+ cb.pop(EAX); // The real return address, safe to use EAX w/ the ABI?
+ cb.push(EDI);
+
+ cb.mov(EDI, ESI);
+ cb.stosd(); // Store the real return address, we'll need it.
+
+ cb.add_imm(ESI, -4);
+ cb.lodsd(); // Load the real callback address.
+
+ cb.mov(ESI, EDI);
+ cb.pop(EDI);
+
+ cb.call(EAX); // Call the original callback address.
+
+ cb.push(EAX);
+ cb.push(EDI);
+
+ cb.mov(EDI, ESI);
+ AssembleQueryPerformanceCounter(&cb);
+
+ cb.pop(EDI);
+ cb.pop(EAX);
+
+ cb.push(Operand(ESI, -4)); // Push the real return address.
+ cb.ret(); // Return back to the APC Dispatcher.
+
+ CHECK(cb.size() < 50);
+ }
+ }
+
+ // We need to hook into process shutdown for two reasons. Most importantly,
+ // we need to copy the playground back from the process before the address
+ // space goes away. We could avoid this with shared memory, however, there
+ // is a reason two. In order to capture symbols for all of the libraries
+ // loaded into arbitrary applications, on shutdown we do an instrusive load
+ // of symbols into the traced process.
+ //
+ // ntdll!LdrShutdownProcess
+ // - NtSetEvent(event, 0);
+ // - NtWaitForSingleObject(event, FALSE, NULL);
+ // - jmp back
+ void PatchExit(HANDLE exiting, HANDLE exited) {
+ static const EventRecordType kRecordType = EVENT_TYPE_PROCESSEXIT;
+ static const char* kFuncName = "ntdll!LdrShutdownProcess";
+ static const int kStubOffset = kOffExitCode;
+
+ HANDLE rexiting, rexited;
+ if (!DuplicateHandle(::GetCurrentProcess(),
+ exiting,
+ proc_,
+ &rexiting,
+ 0,
+ FALSE,
+ DUPLICATE_SAME_ACCESS)) {
+ NOTREACHED("");
+ }
+ if (!DuplicateHandle(::GetCurrentProcess(),
+ exited,
+ proc_,
+ &rexited,
+ 0,
+ FALSE,
+ DUPLICATE_SAME_ACCESS)) {
+ NOTREACHED("");
+ }
+
+ std::string moved_instructions = PatchPreamble(kFuncName, kStubOffset);
+ CodeBuffer cb(buf_ + kStubOffset);
+
+ cb.push(EDI);
+ AssembleHeaderCode(&cb, kRecordType, 0);
+ cb.pop(EDI);
+
+ // NtSetEvent(exiting, 0);
+ cb.push_imm(0);
+ cb.push_imm(reinterpret_cast<int>(rexiting));
+ cb.mov_imm(EAX, reinterpret_cast<int>(
+ resolver_.Resolve("ntdll!NtSetEvent")));
+ cb.call(EAX);
+
+ // NtWaitForSingleObject(exited, FALSE, INFINITE);
+ cb.push_imm(0);
+ cb.push_imm(0);
+ cb.push_imm(reinterpret_cast<int>(rexited));
+ cb.mov_imm(EAX, reinterpret_cast<int>(
+ resolver_.Resolve("ntdll!NtWaitForSingleObject")));
+ cb.call(EAX);
+
+ ResumeOriginalFunction(kFuncName, moved_instructions, kStubOffset, &cb);
+ }
+
+
+ void Patch() {
+ if (options_.vista()) {
+ // TODO(deanm): Make PatchCreateThread work on Vista.
+ PatchThreadBeginVista();
+ } else {
+ PatchCreateThread();
+ PatchThreadBegin();
+ }
+
+ PatchThreadExit();
+ PatchSetThreadName();
+ PatchSyscall();
+
+ PatchApcDispatcher();
+
+ // The loader logging needs to be improved a bit to really be useful.
+ //PatchLoader();
+
+ // These are interesting, but will collect a ton of data:
+ if (options_.log_heap()) {
+ PatchAllocateHeap();
+ PatchFreeHeap();
+ }
+ if (options_.log_lock()) {
+ PatchEnterCriticalSection();
+ PatchTryEnterCriticalSection();
+ PatchLeaveCriticalSection();
+ }
+ }
+
+ // Dump the event records from the playground to stdout in a JSON format.
+ // TODO: Drop RDTSCNormalizer, it was from old code that tried to use the
+ // rdtsc counters from the CPU, and this required a bunch of normalization
+ // to account for non-syncronized timestamps across different cores, etc.
+ void DumpJSON(RDTSCNormalizer* rdn, SymResolver* res) {
+ int pos = kOffLogAreaPtr;
+ int i = IntAt(pos);
+ pos += 4;
+
+ std::map<int, const char*> syscalls = CreateSyscallMap();
+
+ printf("parseEvents([\n");
+ for (int end = pos + i; pos < end; ) {
+ printf("{\n");
+ __int64 ts = Int64At(pos);
+ pos += 8;
+ void* cpuid = reinterpret_cast<void*>(IntAt(pos));
+ pos += 4;
+ printf("'ms': %f,\n", rdn->MsFromStart(cpuid, ts));
+
+ printf("'cpu': 0x%x,\n'thread': 0x%x,\n", cpuid, IntAt(pos));
+ pos += 4;
+
+ if (options_.stack_unwind_depth() > 0) {
+ printf("'stacktrace': [\n");
+ for (int i = 0; i < options_.stack_unwind_depth(); ++i) {
+ int retaddr = IntAt(pos + (i * 4));
+ if (!retaddr)
+ break;
+ printf(" [ 0x%x, %s ],\n",
+ retaddr,
+ res ? JSONString(res->Unresolve(retaddr)).c_str() : "\"\"");
+ }
+ printf("],\n");
+ pos += (options_.stack_unwind_depth() * 4);
+ }
+
+
+ EventRecordType rt = static_cast<EventRecordType>(IntAt(pos));
+ pos += 4;
+
+ switch (rt) {
+ case EVENT_TYPE_LDR:
+ {
+ printf("'eventtype': 'EVENT_TYPE_LDR',\n");
+ std::string str(&buf_[pos], kLdrBufSize);
+ str = str.substr(0, str.find('\0'));
+ printf("'ldrinfo': %s,\n", JSONString(str).c_str());
+ pos += kLdrBufSize;
+ break;
+ }
+ case EVENT_TYPE_CREATETHREAD:
+ {
+ printf("'eventtype': 'EVENT_TYPE_CREATETHREAD',\n"
+ "'eventid': 0x%x,\n"
+ "'startaddr': 0x%x,\n",
+ IntAt(pos), IntAt(pos+4));
+ pos += 8;
+ break;
+ }
+ case EVENT_TYPE_THREADBEGIN:
+ {
+ printf("'eventtype': 'EVENT_TYPE_THREADBEGIN',\n"
+ "'parenteventid': 0x%x,\n"
+ "'startaddr': 0x%x,\n",
+ IntAt(pos), IntAt(pos+4));
+ pos += 8;
+ break;
+ }
+ case EVENT_TYPE_THREADNAME:
+ {
+ std::string str(&buf_[pos], kThreadNameBufSize);
+ str = str.substr(0, str.find('\0'));
+ printf("'eventtype': 'EVENT_TYPE_THREADNAME',\n"
+ "'threadname': %s,\n",
+ JSONString(str).c_str());
+ pos += kThreadNameBufSize;
+ break;
+ }
+ case EVENT_TYPE_PROCESSEXIT:
+ {
+ printf("'eventtype': 'EVENT_TYPE_PROCESSEXIT',\n");
+ break;
+ }
+ case EVENT_TYPE_THREADEXIT:
+ {
+ printf("'eventtype': 'EVENT_TYPE_THREADEXIT',\n");
+ break;
+ }
+ case EVENT_TYPE_ALLOCHEAP:
+ {
+ printf("'eventtype': 'EVENT_TYPE_ALLOCHEAP',\n"
+ "'heaphandle': 0x%x,\n"
+ "'heapflags': 0x%x,\n"
+ "'heapsize': %d,\n",
+ IntAt(pos), IntAt(pos+4), IntAt(pos+8));
+ pos += 12;
+ break;
+ }
+ case EVENT_TYPE_FREEHEAP:
+ {
+ printf("'eventtype': 'EVENT_TYPE_FREEHEAP',\n"
+ "'heaphandle': 0x%x,\n"
+ "'heapflags': 0x%x,\n"
+ "'heapptr': %d,\n",
+ IntAt(pos), IntAt(pos+4), IntAt(pos+8));
+ pos += 12;
+ break;
+ }
+ case EVENT_TYPE_SYSCALL:
+ {
+ int syscall = IntAt(pos);
+ printf("'eventtype': 'EVENT_TYPE_SYSCALL',\n"
+ "'syscall': 0x%x,\n", syscall);
+ pos += 16;
+
+ printf("'syscallargs': [\n");
+ for (int i = 0; i < 3; ++i) {
+ printf(" 0x%x,\n", IntAt(pos));
+ pos += 4;
+ }
+ printf("],\n");
+
+ printf("'retval': 0x%x,\n"
+ "'done': %f,\n",
+ IntAt(pos), rdn->MsFromStart(0, Int64At(pos+4)));
+ pos += 12;
+
+ if (syscalls.count(syscall) == 1) {
+ std::string sname = syscalls[syscall];
+ printf("'syscallname': %s,\n",
+ JSONString(sname).c_str());
+ // Mark system calls that we should consider "waiting" system
+ // calls, where we are not actually active.
+ if (sname.find("WaitFor") != std::string::npos ||
+ sname.find("RemoveIoCompletion") != std::string::npos) {
+ printf("'waiting': 1,\n");
+ }
+ }
+ break;
+ }
+ case EVENT_TYPE_ENTER_CS:
+ {
+ printf("'eventtype': 'EVENT_TYPE_ENTER_CS',\n"
+ "'critical_section': 0x%x,\n", IntAt(pos));
+ pos += 4;
+ break;
+ }
+ case EVENT_TYPE_TRYENTER_CS:
+ {
+ printf("'eventtype': 'EVENT_TYPE_TRYENTER_CS',\n"
+ "'critical_section': 0x%x,\n"
+ "'retval': 0x%x,\n",
+ IntAt(pos), IntAt(pos+4));
+ pos += 8;
+ break;
+ }
+ case EVENT_TYPE_LEAVE_CS:
+ {
+ printf("'eventtype': 'EVENT_TYPE_LEAVE_CS',\n"
+ "'critical_section': 0x%x,\n", IntAt(pos));
+ pos += 4;
+ break;
+ }
+ case EVENT_TYPE_APC:
+ {
+ int func_addr = IntAt(pos);
+ printf("'eventtype': 'EVENT_TYPE_APC',\n"
+ "'func_addr': 0x%x,\n"
+ "'func_addr_name': %s,\n"
+ "'ret_addr': 0x%x,\n"
+ "'done': %f,\n",
+ func_addr,
+ res ? JSONString(res->Unresolve(func_addr)).c_str() : "\"\"",
+ IntAt(pos+4), rdn->MsFromStart(0, Int64At(pos+8)));
+ pos += 16;
+ break;
+ }
+ default:
+ NOTREACHED("Unknown event type: %d", rt);
+ break;
+ }
+ printf("},\n");
+ }
+ printf("]);");
+ }
+
+ int IntAt(int pos) { return *reinterpret_cast<int*>(&buf_[pos]); }
+ __int64 Int64At(int pos) { return *reinterpret_cast<__int64*>(&buf_[pos]); }
+
+
+ private:
+ // Handle the process we install into or read back from.
+ HANDLE proc_;
+ // The address where we will keep our playground in the remote process.
+ char* remote_addr_;
+ // Lookup addresses from symbol names for ntdll.dll.
+ SymResolver resolver_;
+ Options options_;
+ // A local copy of the playground data, we copy it into the remote process.
+ char buf_[kPlaygroundSize];
+};
+
+
+int main(int argc, char** argv) {
+ std::string command_line;
+ bool use_symbols = false;
+ bool attaching = false;
+ bool launched = false;
+ bool manual_quit = false;
+
+ Playground::Options options;
+
+ PROCESS_INFORMATION info = {0};
+
+ argc--; argv++;
+
+ while (argc > 0) {
+ if (std::string("--symbols") == argv[0]) {
+ use_symbols = true;
+ } else if (std::string("--vista") == argv[0]) {
+ options.set_vista(true);
+ } else if (std::string("--log-heap") == argv[0]) {
+ options.set_log_heap(true);
+ } else if (std::string("--log-lock") == argv[0]) {
+ options.set_log_lock(true);
+ } else if (std::string("--manual-quit") == argv[0]) {
+ manual_quit = true;
+ } else if (argc >= 2 && std::string("--unwind") == argv[0]) {
+ options.set_stack_unwind_depth(atoi(argv[1]));
+ argc--; argv++;
+ } else if (argc >= 2 && !launched && std::string("--attach") == argv[0]) {
+ attaching = true;
+ info.hProcess = OpenProcess(PROCESS_ALL_ACCESS, FALSE, atoi(argv[1]));
+ launched = true;
+ argc--; argv++;
+ } else if (!launched) {
+ STARTUPINFOA start_info = {0};
+ start_info.cb = sizeof(start_info);
+
+ if (!CreateProcessA(NULL,
+ argv[0],
+ NULL,
+ NULL,
+ FALSE,
+ CREATE_SUSPENDED,
+ NULL,
+ NULL,
+ &start_info,
+ &info)) {
+ NOTREACHED("Failed to launch \"%s\": %d\n", argv[0], GetLastError());
+ return 1;
+ }
+ launched = true;
+ } else {
+ NOTREACHED("error parsing command line.");
+ }
+ argc--; argv++;
+ }
+
+ if (!launched) {
+ printf("usage: traceline.exe \"app.exe my arguments\"\n"
+ " --attach 123: buggy support for attaching to a process\n"
+ " --unwind 16: unwind the stack to the specified max depth\n"
+ " --symbols: use symbols for stacktraces\n"
+ " --log-heap: log heap operations (alloc / free).\n"
+ " --log-lock: log lock (critical section) operations.\n");
+ return 1;
+ }
+
+
+ HANDLE exiting = CreateEvent(NULL, FALSE, FALSE, NULL);
+ HANDLE exited = CreateEvent(NULL, FALSE, FALSE, NULL);
+
+ // The playground object is big (32MB), dynamically alloc.
+ Playground* pg = new Playground(info.hProcess, options);
+
+ pg->AllocateInRemote();
+ pg->Patch();
+ pg->PatchExit(exiting, exited);
+ pg->CopyToRemote();
+
+ RDTSCNormalizer rdn;
+ rdn.Start();
+
+ if (!attaching)
+ ResumeThread(info.hThread);
+
+ // Wait until we have been notified that it's exiting.
+ if (manual_quit) {
+ fprintf(stderr, "Press enter when you want stop tracing and collect.\n");
+ fflush(stderr);
+ getchar();
+ } else {
+ HANDLE whs[] = {exiting, info.hProcess};
+ if (WaitForMultipleObjects(2, whs, FALSE, INFINITE) != WAIT_OBJECT_0) {
+ NOTREACHED("Failed to correctly capture process shutdown.");
+ }
+ }
+
+ pg->CopyFromRemote();
+
+ if (use_symbols) {
+ // Break in and get the symbols...
+ SymResolver res(NULL, info.hProcess);
+ pg->DumpJSON(&rdn, &res);
+ } else {
+ pg->DumpJSON(&rdn, NULL);
+ }
+
+ // Notify that it can exit now, we are done.
+ SetEvent(exited);
+
+ CloseHandle(info.hProcess);
+ CloseHandle(info.hThread);
+
+ delete pg;
+}
diff --git a/chromium/tools/traceline/traceline/rdtsc.h b/chromium/tools/traceline/traceline/rdtsc.h
new file mode 100644
index 00000000000..7c3cb1a9676
--- /dev/null
+++ b/chromium/tools/traceline/traceline/rdtsc.h
@@ -0,0 +1,43 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef TRACELINE_RDTSC_H_
+#define TRACELINE_RDTSC_H_
+
+#include <windows.h>
+#include <powrprof.h>
+
+#include <map>
+
+#include "logging.h"
+
+class RDTSCNormalizer {
+ public:
+ RDTSCNormalizer() { }
+ ~RDTSCNormalizer() { }
+
+ void Start() {
+ LARGE_INTEGER freq, now;
+ if (QueryPerformanceFrequency(&freq) == 0) {
+ NOTREACHED("");
+ }
+ freq_ = freq.QuadPart;
+
+ if (QueryPerformanceCounter(&now) == 0) {
+ NOTREACHED("");
+ }
+ start_ = now.QuadPart;
+ }
+
+ // Calculate the time from start for a given processor.
+ double MsFromStart(void* procid, __int64 stamp) {
+ return (stamp - start_) / (freq_ / 1000.0);
+ }
+
+ private:
+ __int64 freq_;
+ __int64 start_;
+};
+
+#endif // TRACELINE_RDTSC_H_
diff --git a/chromium/tools/traceline/traceline/scripts/__init__.py b/chromium/tools/traceline/traceline/scripts/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/traceline/traceline/scripts/__init__.py
diff --git a/chromium/tools/traceline/traceline/scripts/alloc.py b/chromium/tools/traceline/traceline/scripts/alloc.py
new file mode 100755
index 00000000000..ee4af220d59
--- /dev/null
+++ b/chromium/tools/traceline/traceline/scripts/alloc.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+from syscalls import syscalls
+
+
+def parseEvents(z):
+ calls = { }
+ for e in z:
+ if e['eventtype'] == 'EVENT_TYPE_SYSCALL' and e['syscall'] == 17:
+ delta = e['done'] - e['ms']
+ tid = e['thread']
+ ms = e['ms']
+ print '%f - %f - %x' % (
+ delta, ms, tid)
+
+
+def main():
+ execfile(sys.argv[1])
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/traceline/traceline/scripts/crit_sec.js b/chromium/tools/traceline/traceline/scripts/crit_sec.js
new file mode 100644
index 00000000000..906bba10f0a
--- /dev/null
+++ b/chromium/tools/traceline/traceline/scripts/crit_sec.js
@@ -0,0 +1,87 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// You should run this with v8, like v8_shell alloc.js datafile.json
+
+function toHex(num) {
+ var str = "";
+ var table = "0123456789abcdef";
+ while (num != 0) {
+ str = table.charAt(num & 0xf) + str;
+ num >>= 4;
+ }
+ return str;
+}
+
+function parseEvents(z) {
+ var crits = { }
+ var calls = { }
+
+ for (var i = 0, il = z.length; i < il; ++i) {
+ var e = z[i];
+
+ if (e['eventtype'] == 'EVENT_TYPE_ENTER_CS' ||
+ e['eventtype'] == 'EVENT_TYPE_TRYENTER_CS' ||
+ e['eventtype'] == 'EVENT_TYPE_LEAVE_CS') {
+ cs = e['critical_section'];
+ if (!(cs in crits)) {
+ crits[cs] = [ ];
+ }
+ crits[cs].push(e);
+ }
+ }
+
+ // Verify that the locks get unlocked, and operations stay on the same thread.
+ for (var key in crits) {
+ var cs = key;
+ var es = crits[key];
+
+ var tid_stack = [ ];
+ for (var j = 0, jl = es.length; j < jl; ++j) {
+ var e = es[j];
+ if (e['eventtype'] == 'EVENT_TYPE_ENTER_CS') {
+ tid_stack.push(e);
+ } else if (e['eventtype'] == 'EVENT_TYPE_TRYENTER_CS') {
+ if (e['retval'] != 0)
+ tid_stack.push(e);
+ } else if (e['eventtype'] == 'EVENT_TYPE_LEAVE_CS') {
+ if (tid_stack.length == 0) {
+ print('fail ' + e);
+ }
+ var tid = tid_stack.pop()
+ if (tid['thread'] != e['thread']) {
+ print('fail ' + tid);
+ }
+ }
+ }
+ }
+
+ // Look for long-held / contended locks. We can't really know it is
+ // contended without looking if anyone is waiting on the embedded event...
+ // Just look for locks are are held a long time? Not so good...
+ for (var key in crits) {
+ var cs = key;
+ var es = crits[key];
+
+ var tid_stack = [ ];
+ for (var j = 0, jl = es.length; j < jl; ++j) {
+ var e = es[j];
+ if (e['eventtype'] == 'EVENT_TYPE_ENTER_CS') {
+ tid_stack.push(e);
+ } else if (e['eventtype'] == 'EVENT_TYPE_TRYENTER_CS') {
+ if (e['retval'] != 0)
+ tid_stack.push(e);
+ } else if (e['eventtype'] == 'EVENT_TYPE_LEAVE_CS') {
+ if (tid_stack.length == 0) {
+ print('fail ' + e);
+ }
+ var tid = tid_stack.pop();
+ var dur = e['ms'] - tid['ms'];
+ if (dur > 0.1) {
+ print('Lock: 0x' + toHex(cs) + ' for ' + dur + ' at: ' + e['ms']);
+ }
+ }
+ }
+ }
+}
diff --git a/chromium/tools/traceline/traceline/scripts/crit_sec.py b/chromium/tools/traceline/traceline/scripts/crit_sec.py
new file mode 100755
index 00000000000..ee710bd2032
--- /dev/null
+++ b/chromium/tools/traceline/traceline/scripts/crit_sec.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+import os
+
+from syscalls import syscalls
+
+
+def parseEvents(z):
+ crits = { }
+ calls = { }
+ for e in z:
+ if (e['eventtype'] == 'EVENT_TYPE_ENTER_CS' or
+ e['eventtype'] == 'EVENT_TYPE_TRYENTER_CS' or
+ e['eventtype'] == 'EVENT_TYPE_LEAVE_CS'):
+ cs = e['critical_section']
+ if not crits.has_key(cs):
+ crits[cs] = [ ]
+ crits[cs].append(e)
+
+# for cs, es in crits.iteritems():
+# print 'cs: 0x%08x' % cs
+# for e in es:
+# print ' 0x%08x - %s - %f' % (e['thread'], e['eventtype'], e['ms'])
+
+ for cs, es in crits.iteritems():
+ print 'cs: 0x%08x' % cs
+
+ tid_stack = [ ]
+ for e in es:
+ if e['eventtype'] == 'EVENT_TYPE_ENTER_CS':
+ tid_stack.append(e)
+ elif e['eventtype'] == 'EVENT_TYPE_TRYENTER_CS':
+ if e['retval'] != 0:
+ tid_stack.append(e)
+ elif e['eventtype'] == 'EVENT_TYPE_LEAVE_CS':
+ if not tid_stack:
+ raise repr(e)
+ tid = tid_stack.pop()
+ if tid['thread'] != e['thread']:
+ raise repr(tid) + '--' + repr(e)
+
+ # Critical section left locked?
+ if tid_stack:
+ #raise repr(tid_stack)
+ pass
+
+
+def main():
+ execfile(sys.argv[1])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/traceline/traceline/scripts/filter_short.py b/chromium/tools/traceline/traceline/scripts/filter_short.py
new file mode 100755
index 00000000000..1b73bf96af0
--- /dev/null
+++ b/chromium/tools/traceline/traceline/scripts/filter_short.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Takes an input JSON, and filters out all system call events that
+took less than 0.2ms.
+
+This helps trim down the JSON data to only the most interesting / time critical
+events.
+"""
+
+import sys
+import re
+
+
+def parseEvents(z):
+ print 'parseEvents(['
+ for e in z:
+ if e.has_key('ms') and e.has_key('done'):
+ dur = e['done'] - e['ms']
+ if dur < 0.2:
+ continue
+ # Ugly regex to remove the L suffix on large python numbers.
+ print '%s,' % re.sub('([0-9])L\\b', '\\1', str(e))
+ print '])'
+
+
+def main():
+ execfile(sys.argv[1])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/traceline/traceline/scripts/filter_split.sh b/chromium/tools/traceline/traceline/scripts/filter_split.sh
new file mode 100755
index 00000000000..876488e3111
--- /dev/null
+++ b/chromium/tools/traceline/traceline/scripts/filter_split.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Runs filter_short on the individual splits of a json file, and puts
+# everything back together into a single output json. This is useful when you
+# want to filter a large json file that would otherwise OOM Python.
+
+echo "parseEvents([" > totalsplit
+for f in split.*; do
+ ./scripts/filter_short.py "$f" | tail -n +2 | head -n -1 >> totalsplit
+done
+echo "]);" >> totalsplit
diff --git a/chromium/tools/traceline/traceline/scripts/heap.js b/chromium/tools/traceline/traceline/scripts/heap.js
new file mode 100644
index 00000000000..9025ac3d95f
--- /dev/null
+++ b/chromium/tools/traceline/traceline/scripts/heap.js
@@ -0,0 +1,69 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// You should run this with v8, like v8_shell alloc.js datafile.json
+
+function toHex(num) {
+ var str = "";
+ var table = "0123456789abcdef";
+ while (num != 0) {
+ str = table.charAt(num & 0xf) + str;
+ num >>= 4;
+ }
+ return str;
+}
+
+function dump(obj) {
+ for (var key in obj) {
+ print('key: ' + key);
+ print(' ' + obj[key]);
+ }
+}
+
+function TopN(n) {
+ this.n = n;
+ this.min = 0;
+ this.sorted = [ ];
+}
+
+TopN.prototype.add =
+function(num, data) {
+ if (num < this.min)
+ return;
+
+ this.sorted.push([num, data]);
+ this.sorted.sort(function(a, b) { return b[0] - a[0] });
+ if (this.sorted.length > this.n)
+ this.sorted.pop();
+
+ this.min = this.sorted[this.sorted.lenth - 1];
+};
+
+TopN.prototype.datas =
+function() {
+ var datas = [ ];
+ for (var i = 0, il = this.sorted.length; i < il; ++i) {
+ datas.push(this.sorted[i][1]);
+ }
+ return datas;
+};
+
+function parseEvents(z) {
+ var topper = new TopN(1000);
+
+ // Find the largest allocation.
+ for (var i = 0, il = z.length; i < il; ++i) {
+ var e = z[i];
+
+ if (e['eventtype'] == 'EVENT_TYPE_ALLOCHEAP') {
+ var size = e['heapsize'];
+ topper.add(e['heapsize'], e);
+ }
+ }
+
+ var datas = topper.datas();
+ for (var i = 0, il = datas.length; i < il; ++i) {
+ dump(datas[i]);
+ }
+}
diff --git a/chromium/tools/traceline/traceline/scripts/scstats.py b/chromium/tools/traceline/traceline/scripts/scstats.py
new file mode 100755
index 00000000000..e2f28dc94a1
--- /dev/null
+++ b/chromium/tools/traceline/traceline/scripts/scstats.py
@@ -0,0 +1,32 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+from syscalls import syscalls
+
+
+def parseEvents(z):
+ calls = { }
+ for e in z:
+ if e['eventtype'] == 'EVENT_TYPE_SYSCALL' and e['done'] > 0:
+ delta = e['done'] - e['ms']
+ syscall = e['syscall']
+ tid = e['thread']
+ ms = e['ms']
+ calls[syscall] = calls.get(syscall, 0) + delta
+ print '%f - %f - %x - %d %s' % (
+ delta, ms, tid, syscall, syscalls.get(syscall, 'unknown'))
+
+ #for syscall, delta in calls.items():
+ # print '%f - %d %s' % (delta, syscall, syscalls.get(syscall, 'unknown'))
+
+
+def main():
+ execfile(sys.argv[1])
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/traceline/traceline/scripts/split.py b/chromium/tools/traceline/traceline/scripts/split.py
new file mode 100755
index 00000000000..6f20e0457e4
--- /dev/null
+++ b/chromium/tools/traceline/traceline/scripts/split.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Splits a single json file (read from stdin) into separate files of 40k
+records, named split.X.
+"""
+
+import sys
+
+
+def main():
+ filecount = 0
+ count = 0
+ f = open('split.0', 'wb')
+ for l in sys.stdin:
+ if l == "},\r\n":
+ count += 1
+ if count == 40000:
+ f.write("}]);\r\n")
+ count = 0
+ filecount += 1
+ f = open('split.%d' % filecount, 'wb')
+ f.write("parseEvents([\r\n")
+ continue
+ f.write(l)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/traceline/traceline/scripts/syscalls.py b/chromium/tools/traceline/traceline/scripts/syscalls.py
new file mode 100644
index 00000000000..6800b45c128
--- /dev/null
+++ b/chromium/tools/traceline/traceline/scripts/syscalls.py
@@ -0,0 +1,942 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+syscalls = {
+0: "ntdll.dll!NtAcceptConnectPort",
+1: "ntdll.dll!NtAccessCheck",
+2: "ntdll.dll!ZwAccessCheckAndAuditAlarm",
+3: "ntdll.dll!NtAccessCheckByType",
+4: "ntdll.dll!NtAccessCheckByTypeAndAuditAlarm",
+5: "ntdll.dll!NtAccessCheckByTypeResultList",
+6: "ntdll.dll!NtAccessCheckByTypeResultListAndAuditAlarm",
+7: "ntdll.dll!ZwAccessCheckByTypeResultListAndAuditAlarmByHandle",
+8: "ntdll.dll!NtAddAtom",
+9: "ntdll.dll!ZwAddBootEntry",
+10: "ntdll.dll!ZwAdjustGroupsToken",
+11: "ntdll.dll!ZwAdjustPrivilegesToken",
+12: "ntdll.dll!NtAlertResumeThread",
+13: "ntdll.dll!NtAlertThread",
+14: "ntdll.dll!ZwAllocateLocallyUniqueId",
+15: "ntdll.dll!NtAllocateUserPhysicalPages",
+16: "ntdll.dll!NtAllocateUuids",
+17: "ntdll.dll!NtAllocateVirtualMemory",
+18: "ntdll.dll!ZwAreMappedFilesTheSame",
+19: "ntdll.dll!ZwAssignProcessToJobObject",
+20: "ntdll.dll!ZwCallbackReturn",
+21: "ntdll.dll!NtCancelDeviceWakeupRequest",
+22: "ntdll.dll!ZwCancelIoFile",
+23: "ntdll.dll!ZwCancelTimer",
+24: "ntdll.dll!NtClearEvent",
+25: "ntdll.dll!NtClose",
+26: "ntdll.dll!ZwCloseObjectAuditAlarm",
+27: "ntdll.dll!NtCompactKeys",
+28: "ntdll.dll!ZwCompareTokens",
+29: "ntdll.dll!NtCompleteConnectPort",
+30: "ntdll.dll!ZwCompressKey",
+31: "ntdll.dll!NtConnectPort",
+32: "ntdll.dll!ZwContinue",
+33: "ntdll.dll!ZwCreateDebugObject",
+34: "ntdll.dll!ZwCreateDirectoryObject",
+35: "ntdll.dll!NtCreateEvent",
+36: "ntdll.dll!NtCreateEventPair",
+37: "ntdll.dll!NtCreateFile",
+38: "ntdll.dll!NtCreateIoCompletion",
+39: "ntdll.dll!ZwCreateJobObject",
+40: "ntdll.dll!NtCreateJobSet",
+41: "ntdll.dll!ZwCreateKey",
+42: "ntdll.dll!ZwCreateMailslotFile",
+43: "ntdll.dll!ZwCreateMutant",
+44: "ntdll.dll!ZwCreateNamedPipeFile",
+45: "ntdll.dll!NtCreatePagingFile",
+46: "ntdll.dll!ZwCreatePort",
+47: "ntdll.dll!ZwCreateProcess",
+48: "ntdll.dll!ZwCreateProcessEx",
+49: "ntdll.dll!ZwCreateProfile",
+50: "ntdll.dll!NtCreateSection",
+51: "ntdll.dll!NtCreateSemaphore",
+52: "ntdll.dll!ZwCreateSymbolicLinkObject",
+53: "ntdll.dll!NtCreateThread",
+54: "ntdll.dll!ZwCreateTimer",
+55: "ntdll.dll!NtCreateToken",
+56: "ntdll.dll!ZwCreateWaitablePort",
+57: "ntdll.dll!NtDebugActiveProcess",
+58: "ntdll.dll!ZwDebugContinue",
+59: "ntdll.dll!ZwDelayExecution",
+60: "ntdll.dll!ZwDeleteAtom",
+61: "ntdll.dll!NtDeleteBootEntry",
+62: "ntdll.dll!NtDeleteFile",
+63: "ntdll.dll!ZwDeleteKey",
+64: "ntdll.dll!NtDeleteObjectAuditAlarm",
+65: "ntdll.dll!NtDeleteValueKey",
+66: "ntdll.dll!ZwDeviceIoControlFile",
+67: "ntdll.dll!NtDisplayString",
+68: "ntdll.dll!ZwDuplicateObject",
+69: "ntdll.dll!NtDuplicateToken",
+70: "ntdll.dll!ZwEnumerateBootEntries",
+71: "ntdll.dll!ZwEnumerateKey",
+72: "ntdll.dll!ZwEnumerateSystemEnvironmentValuesEx",
+73: "ntdll.dll!NtEnumerateValueKey",
+74: "ntdll.dll!ZwExtendSection",
+75: "ntdll.dll!NtFilterToken",
+76: "ntdll.dll!NtFindAtom",
+77: "ntdll.dll!ZwFlushBuffersFile",
+78: "ntdll.dll!ZwFlushInstructionCache",
+79: "ntdll.dll!NtFlushKey",
+80: "ntdll.dll!ZwFlushVirtualMemory",
+81: "ntdll.dll!NtFlushWriteBuffer",
+82: "ntdll.dll!NtFreeUserPhysicalPages",
+83: "ntdll.dll!NtFreeVirtualMemory",
+84: "ntdll.dll!NtFsControlFile",
+85: "ntdll.dll!NtGetContextThread",
+86: "ntdll.dll!NtGetDevicePowerState",
+87: "ntdll.dll!ZwGetPlugPlayEvent",
+88: "ntdll.dll!NtGetWriteWatch",
+89: "ntdll.dll!NtImpersonateAnonymousToken",
+90: "ntdll.dll!ZwImpersonateClientOfPort",
+91: "ntdll.dll!ZwImpersonateThread",
+92: "ntdll.dll!ZwInitializeRegistry",
+93: "ntdll.dll!NtInitiatePowerAction",
+94: "ntdll.dll!ZwIsProcessInJob",
+95: "ntdll.dll!NtIsSystemResumeAutomatic",
+96: "ntdll.dll!ZwListenPort",
+97: "ntdll.dll!NtLoadDriver",
+98: "ntdll.dll!NtLoadKey",
+99: "ntdll.dll!NtLoadKey2",
+100: "ntdll.dll!NtLockFile",
+101: "ntdll.dll!ZwLockProductActivationKeys",
+102: "ntdll.dll!NtLockRegistryKey",
+103: "ntdll.dll!ZwLockVirtualMemory",
+104: "ntdll.dll!ZwMakePermanentObject",
+105: "ntdll.dll!NtMakeTemporaryObject",
+106: "ntdll.dll!NtMapUserPhysicalPages",
+107: "ntdll.dll!ZwMapUserPhysicalPagesScatter",
+108: "ntdll.dll!ZwMapViewOfSection",
+109: "ntdll.dll!NtModifyBootEntry",
+110: "ntdll.dll!NtNotifyChangeDirectoryFile",
+111: "ntdll.dll!NtNotifyChangeKey",
+112: "ntdll.dll!NtNotifyChangeMultipleKeys",
+113: "ntdll.dll!ZwOpenDirectoryObject",
+114: "ntdll.dll!NtOpenEvent",
+115: "ntdll.dll!NtOpenEventPair",
+116: "ntdll.dll!NtOpenFile",
+117: "ntdll.dll!ZwOpenIoCompletion",
+118: "ntdll.dll!ZwOpenJobObject",
+119: "ntdll.dll!ZwOpenKey",
+120: "ntdll.dll!NtOpenMutant",
+121: "ntdll.dll!ZwOpenObjectAuditAlarm",
+122: "ntdll.dll!ZwOpenProcess",
+123: "ntdll.dll!ZwOpenProcessToken",
+124: "ntdll.dll!ZwOpenProcessTokenEx",
+125: "ntdll.dll!NtOpenSection",
+126: "ntdll.dll!NtOpenSemaphore",
+127: "ntdll.dll!NtOpenSymbolicLinkObject",
+128: "ntdll.dll!ZwOpenThread",
+129: "ntdll.dll!NtOpenThreadToken",
+130: "ntdll.dll!NtOpenThreadTokenEx",
+131: "ntdll.dll!ZwOpenTimer",
+132: "ntdll.dll!NtPlugPlayControl",
+133: "ntdll.dll!ZwPowerInformation",
+134: "ntdll.dll!ZwPrivilegeCheck",
+135: "ntdll.dll!ZwPrivilegeObjectAuditAlarm",
+136: "ntdll.dll!NtPrivilegedServiceAuditAlarm",
+137: "ntdll.dll!ZwProtectVirtualMemory",
+138: "ntdll.dll!ZwPulseEvent",
+139: "ntdll.dll!ZwQueryAttributesFile",
+140: "ntdll.dll!ZwQueryBootEntryOrder",
+141: "ntdll.dll!ZwQueryBootOptions",
+142: "ntdll.dll!NtQueryDebugFilterState",
+143: "ntdll.dll!NtQueryDefaultLocale",
+144: "ntdll.dll!ZwQueryDefaultUILanguage",
+145: "ntdll.dll!ZwQueryDirectoryFile",
+146: "ntdll.dll!ZwQueryDirectoryObject",
+147: "ntdll.dll!ZwQueryEaFile",
+148: "ntdll.dll!NtQueryEvent",
+149: "ntdll.dll!ZwQueryFullAttributesFile",
+150: "ntdll.dll!NtQueryInformationAtom",
+151: "ntdll.dll!ZwQueryInformationFile",
+152: "ntdll.dll!ZwQueryInformationJobObject",
+153: "ntdll.dll!ZwQueryInformationPort",
+154: "ntdll.dll!ZwQueryInformationProcess",
+155: "ntdll.dll!NtQueryInformationThread",
+156: "ntdll.dll!ZwQueryInformationToken",
+157: "ntdll.dll!NtQueryInstallUILanguage",
+158: "ntdll.dll!NtQueryIntervalProfile",
+159: "ntdll.dll!NtQueryIoCompletion",
+160: "ntdll.dll!ZwQueryKey",
+161: "ntdll.dll!NtQueryMultipleValueKey",
+162: "ntdll.dll!NtQueryMutant",
+163: "ntdll.dll!NtQueryObject",
+164: "ntdll.dll!NtQueryOpenSubKeys",
+165: "ntdll.dll!NtQueryPerformanceCounter",
+166: "ntdll.dll!ZwQueryQuotaInformationFile",
+167: "ntdll.dll!ZwQuerySection",
+168: "ntdll.dll!NtQuerySecurityObject",
+169: "ntdll.dll!ZwQuerySemaphore",
+170: "ntdll.dll!ZwQuerySymbolicLinkObject",
+171: "ntdll.dll!ZwQuerySystemEnvironmentValue",
+172: "ntdll.dll!ZwQuerySystemEnvironmentValueEx",
+173: "ntdll.dll!NtQuerySystemInformation",
+174: "ntdll.dll!NtQuerySystemTime",
+175: "ntdll.dll!ZwQueryTimer",
+176: "ntdll.dll!NtQueryTimerResolution",
+177: "ntdll.dll!ZwQueryValueKey",
+178: "ntdll.dll!NtQueryVirtualMemory",
+179: "ntdll.dll!NtQueryVolumeInformationFile",
+180: "ntdll.dll!NtQueueApcThread",
+181: "ntdll.dll!ZwRaiseException",
+182: "ntdll.dll!ZwRaiseHardError",
+183: "ntdll.dll!NtReadFile",
+184: "ntdll.dll!NtReadFileScatter",
+185: "ntdll.dll!ZwReadRequestData",
+186: "ntdll.dll!NtReadVirtualMemory",
+187: "ntdll.dll!ZwRegisterThreadTerminatePort",
+188: "ntdll.dll!ZwReleaseMutant",
+189: "ntdll.dll!NtReleaseSemaphore",
+190: "ntdll.dll!ZwRemoveIoCompletion",
+191: "ntdll.dll!ZwRemoveProcessDebug",
+192: "ntdll.dll!ZwRenameKey",
+193: "ntdll.dll!ZwReplaceKey",
+194: "ntdll.dll!ZwReplyPort",
+195: "ntdll.dll!NtReplyWaitReceivePort",
+196: "ntdll.dll!NtReplyWaitReceivePortEx",
+197: "ntdll.dll!NtReplyWaitReplyPort",
+198: "ntdll.dll!ZwRequestDeviceWakeup",
+199: "ntdll.dll!ZwRequestPort",
+200: "ntdll.dll!NtRequestWaitReplyPort",
+201: "ntdll.dll!ZwRequestWakeupLatency",
+202: "ntdll.dll!NtResetEvent",
+203: "ntdll.dll!ZwResetWriteWatch",
+204: "ntdll.dll!NtRestoreKey",
+205: "ntdll.dll!ZwResumeProcess",
+206: "ntdll.dll!ZwResumeThread",
+207: "ntdll.dll!NtSaveKey",
+208: "ntdll.dll!NtSaveKeyEx",
+209: "ntdll.dll!NtSaveMergedKeys",
+210: "ntdll.dll!NtSecureConnectPort",
+211: "ntdll.dll!ZwSetBootEntryOrder",
+212: "ntdll.dll!ZwSetBootOptions",
+213: "ntdll.dll!ZwSetContextThread",
+214: "ntdll.dll!NtSetDebugFilterState",
+215: "ntdll.dll!NtSetDefaultHardErrorPort",
+216: "ntdll.dll!NtSetDefaultLocale",
+217: "ntdll.dll!ZwSetDefaultUILanguage",
+218: "ntdll.dll!ZwSetEaFile",
+219: "ntdll.dll!NtSetEvent",
+220: "ntdll.dll!NtSetEventBoostPriority",
+221: "ntdll.dll!NtSetHighEventPair",
+222: "ntdll.dll!NtSetHighWaitLowEventPair",
+223: "ntdll.dll!ZwSetInformationDebugObject",
+224: "ntdll.dll!ZwSetInformationFile",
+225: "ntdll.dll!ZwSetInformationJobObject",
+226: "ntdll.dll!ZwSetInformationKey",
+227: "ntdll.dll!ZwSetInformationObject",
+228: "ntdll.dll!ZwSetInformationProcess",
+229: "ntdll.dll!ZwSetInformationThread",
+230: "ntdll.dll!ZwSetInformationToken",
+231: "ntdll.dll!NtSetIntervalProfile",
+232: "ntdll.dll!NtSetIoCompletion",
+233: "ntdll.dll!ZwSetLdtEntries",
+234: "ntdll.dll!ZwSetLowEventPair",
+235: "ntdll.dll!ZwSetLowWaitHighEventPair",
+236: "ntdll.dll!ZwSetQuotaInformationFile",
+237: "ntdll.dll!NtSetSecurityObject",
+238: "ntdll.dll!ZwSetSystemEnvironmentValue",
+239: "ntdll.dll!ZwSetSystemEnvironmentValueEx",
+240: "ntdll.dll!ZwSetSystemInformation",
+241: "ntdll.dll!ZwSetSystemPowerState",
+242: "ntdll.dll!ZwSetSystemTime",
+243: "ntdll.dll!ZwSetThreadExecutionState",
+244: "ntdll.dll!ZwSetTimer",
+245: "ntdll.dll!NtSetTimerResolution",
+246: "ntdll.dll!ZwSetUuidSeed",
+247: "ntdll.dll!ZwSetValueKey",
+248: "ntdll.dll!NtSetVolumeInformationFile",
+249: "ntdll.dll!ZwShutdownSystem",
+250: "ntdll.dll!ZwSignalAndWaitForSingleObject",
+251: "ntdll.dll!NtStartProfile",
+252: "ntdll.dll!ZwStopProfile",
+253: "ntdll.dll!ZwSuspendProcess",
+254: "ntdll.dll!ZwSuspendThread",
+255: "ntdll.dll!NtSystemDebugControl",
+256: "ntdll.dll!ZwTerminateJobObject",
+257: "ntdll.dll!ZwTerminateProcess",
+258: "ntdll.dll!ZwTerminateThread",
+259: "ntdll.dll!NtTestAlert",
+260: "ntdll.dll!NtTraceEvent",
+261: "ntdll.dll!NtTranslateFilePath",
+262: "ntdll.dll!ZwUnloadDriver",
+263: "ntdll.dll!NtUnloadKey",
+264: "ntdll.dll!ZwUnloadKeyEx",
+265: "ntdll.dll!ZwUnlockFile",
+266: "ntdll.dll!NtUnlockVirtualMemory",
+267: "ntdll.dll!NtUnmapViewOfSection",
+268: "ntdll.dll!NtVdmControl",
+269: "ntdll.dll!NtWaitForDebugEvent",
+270: "ntdll.dll!NtWaitForMultipleObjects",
+271: "ntdll.dll!ZwWaitForSingleObject",
+272: "ntdll.dll!ZwWaitHighEventPair",
+273: "ntdll.dll!NtWaitLowEventPair",
+274: "ntdll.dll!NtWriteFile",
+275: "ntdll.dll!NtWriteFileGather",
+276: "ntdll.dll!NtWriteRequestData",
+277: "ntdll.dll!NtWriteVirtualMemory",
+278: "ntdll.dll!ZwYieldExecution",
+279: "ntdll.dll!ZwCreateKeyedEvent",
+280: "ntdll.dll!NtOpenKeyedEvent",
+281: "ntdll.dll!NtReleaseKeyedEvent",
+282: "ntdll.dll!NtWaitForKeyedEvent",
+283: "ntdll.dll!ZwQueryPortInformationProcess",
+4096: "gdi32.dll!NtGdiAbortDoc",
+4097: "gdi32.dll!NtGdiAbortPath",
+4098: "gdi32.dll!NtGdiAddFontResourceW",
+4099: "gdi32.dll!NtGdiAddRemoteFontToDC",
+4100: "gdi32.dll!NtGdiAddFontMemResourceEx",
+4101: "gdi32.dll!NtGdiRemoveMergeFont",
+4102: "gdi32.dll!NtGdiAddRemoteMMInstanceToDC",
+4103: "gdi32.dll!NtGdiAlphaBlend",
+4104: "gdi32.dll!NtGdiAngleArc",
+4105: "gdi32.dll!NtGdiAnyLinkedFonts",
+4106: "gdi32.dll!NtGdiFontIsLinked",
+4107: "gdi32.dll!NtGdiArcInternal",
+4108: "gdi32.dll!NtGdiBeginPath",
+4109: "gdi32.dll!NtGdiBitBlt",
+4110: "gdi32.dll!NtGdiCancelDC",
+4111: "gdi32.dll!NtGdiCheckBitmapBits",
+4112: "gdi32.dll!NtGdiCloseFigure",
+4113: "gdi32.dll!NtGdiClearBitmapAttributes",
+4114: "gdi32.dll!NtGdiClearBrushAttributes",
+4115: "gdi32.dll!NtGdiColorCorrectPalette",
+4116: "gdi32.dll!NtGdiCombineRgn",
+4117: "gdi32.dll!CombineTransform",
+4118: "gdi32.dll!NtGdiComputeXformCoefficients",
+4119: "gdi32.dll!NtGdiConsoleTextOut",
+4120: "gdi32.dll!NtGdiConvertMetafileRect",
+4121: "gdi32.dll!NtGdiCreateBitmap",
+4122: "gdi32.dll!NtGdiCreateClientObj",
+4123: "gdi32.dll!NtGdiCreateColorSpace",
+4124: "gdi32.dll!NtGdiCreateColorTransform",
+4125: "gdi32.dll!NtGdiCreateCompatibleBitmap",
+4126: "gdi32.dll!NtGdiCreateCompatibleDC",
+4127: "gdi32.dll!NtGdiCreateDIBBrush",
+4128: "gdi32.dll!NtGdiCreateDIBitmapInternal",
+4129: "gdi32.dll!NtGdiCreateDIBSection",
+4130: "gdi32.dll!NtGdiCreateEllipticRgn",
+4131: "gdi32.dll!NtGdiCreateHalftonePalette",
+4132: "gdi32.dll!NtGdiCreateHatchBrushInternal",
+4133: "gdi32.dll!NtGdiCreateMetafileDC",
+4134: "gdi32.dll!NtGdiCreatePaletteInternal",
+4135: "gdi32.dll!NtGdiCreatePatternBrushInternal",
+4136: "gdi32.dll!CreatePen",
+4137: "gdi32.dll!NtGdiCreateRectRgn",
+4138: "gdi32.dll!CreateRoundRectRgn",
+4139: "gdi32.dll!NtGdiCreateServerMetaFile",
+4140: "gdi32.dll!NtGdiCreateSolidBrush",
+4141: "gdi32.dll!NtGdiD3dContextCreate",
+4142: "gdi32.dll!NtGdiD3dContextDestroy",
+4143: "gdi32.dll!NtGdiD3dContextDestroyAll",
+4144: "gdi32.dll!NtGdiD3dValidateTextureStageState",
+4145: "gdi32.dll!NtGdiD3dDrawPrimitives2",
+4146: "gdi32.dll!NtGdiDdGetDriverState",
+4147: "gdi32.dll!NtGdiDdAddAttachedSurface",
+4148: "gdi32.dll!NtGdiDdAlphaBlt",
+4149: "gdi32.dll!NtGdiDdAttachSurface",
+4150: "gdi32.dll!NtGdiDdBeginMoCompFrame",
+4151: "gdi32.dll!NtGdiDdBlt",
+4152: "gdi32.dll!NtGdiDdCanCreateSurface",
+4153: "gdi32.dll!NtGdiDdCanCreateD3DBuffer",
+4154: "gdi32.dll!NtGdiDdColorControl",
+4155: "gdi32.dll!NtGdiDdCreateDirectDrawObject",
+4156: "gdi32.dll!NtGdiDdCreateSurface",
+4157: "gdi32.dll!NtGdiDdCreateD3DBuffer",
+4158: "gdi32.dll!NtGdiDdCreateMoComp",
+4159: "gdi32.dll!NtGdiDdCreateSurfaceObject",
+4160: "gdi32.dll!NtGdiDdDeleteDirectDrawObject",
+4161: "gdi32.dll!NtGdiDdDeleteSurfaceObject",
+4162: "gdi32.dll!NtGdiDdDestroyMoComp",
+4163: "gdi32.dll!NtGdiDdDestroySurface",
+4164: "gdi32.dll!NtGdiDdDestroyD3DBuffer",
+4165: "gdi32.dll!NtGdiDdEndMoCompFrame",
+4166: "gdi32.dll!NtGdiDdFlip",
+4167: "gdi32.dll!NtGdiDdFlipToGDISurface",
+4168: "gdi32.dll!NtGdiDdGetAvailDriverMemory",
+4169: "gdi32.dll!NtGdiDdGetBltStatus",
+4170: "gdi32.dll!NtGdiDdGetDC",
+4171: "gdi32.dll!NtGdiDdGetDriverInfo",
+4172: "gdi32.dll!NtGdiDdGetDxHandle",
+4173: "gdi32.dll!NtGdiDdGetFlipStatus",
+4174: "gdi32.dll!NtGdiDdGetInternalMoCompInfo",
+4175: "gdi32.dll!NtGdiDdGetMoCompBuffInfo",
+4176: "gdi32.dll!NtGdiDdGetMoCompGuids",
+4177: "gdi32.dll!NtGdiDdGetMoCompFormats",
+4178: "gdi32.dll!NtGdiDdGetScanLine",
+4179: "gdi32.dll!NtGdiDdLock",
+4180: "gdi32.dll!NtGdiDdLockD3D",
+4181: "gdi32.dll!NtGdiDdQueryDirectDrawObject",
+4182: "gdi32.dll!NtGdiDdQueryMoCompStatus",
+4183: "gdi32.dll!NtGdiDdReenableDirectDrawObject",
+4184: "gdi32.dll!NtGdiDdReleaseDC",
+4185: "gdi32.dll!NtGdiDdRenderMoComp",
+4186: "gdi32.dll!NtGdiDdResetVisrgn",
+4187: "gdi32.dll!NtGdiDdSetColorKey",
+4188: "gdi32.dll!NtGdiDdSetExclusiveMode",
+4189: "gdi32.dll!NtGdiDdSetGammaRamp",
+4190: "gdi32.dll!NtGdiDdCreateSurfaceEx",
+4191: "gdi32.dll!NtGdiDdSetOverlayPosition",
+4192: "gdi32.dll!NtGdiDdUnattachSurface",
+4193: "gdi32.dll!NtGdiDdUnlock",
+4194: "gdi32.dll!NtGdiDdUnlockD3D",
+4195: "gdi32.dll!NtGdiDdUpdateOverlay",
+4196: "gdi32.dll!NtGdiDdWaitForVerticalBlank",
+4197: "gdi32.dll!NtGdiDvpCanCreateVideoPort",
+4198: "gdi32.dll!NtGdiDvpColorControl",
+4199: "gdi32.dll!NtGdiDvpCreateVideoPort",
+4200: "gdi32.dll!NtGdiDvpDestroyVideoPort",
+4201: "gdi32.dll!NtGdiDvpFlipVideoPort",
+4202: "gdi32.dll!NtGdiDvpGetVideoPortBandwidth",
+4203: "gdi32.dll!NtGdiDvpGetVideoPortField",
+4204: "gdi32.dll!NtGdiDvpGetVideoPortFlipStatus",
+4205: "gdi32.dll!NtGdiDvpGetVideoPortInputFormats",
+4206: "gdi32.dll!NtGdiDvpGetVideoPortLine",
+4207: "gdi32.dll!NtGdiDvpGetVideoPortOutputFormats",
+4208: "gdi32.dll!NtGdiDvpGetVideoPortConnectInfo",
+4209: "gdi32.dll!NtGdiDvpGetVideoSignalStatus",
+4210: "gdi32.dll!NtGdiDvpUpdateVideoPort",
+4211: "gdi32.dll!NtGdiDvpWaitForVideoPortSync",
+4212: "gdi32.dll!NtGdiDvpAcquireNotification",
+4213: "gdi32.dll!NtGdiDvpReleaseNotification",
+4214: "gdi32.dll!NtGdiDxgGenericThunk",
+4215: "gdi32.dll!NtGdiDeleteClientObj",
+4216: "gdi32.dll!NtGdiDeleteColorSpace",
+4217: "gdi32.dll!NtGdiDeleteColorTransform",
+4218: "gdi32.dll!DeleteObject",
+4219: "gdi32.dll!NtGdiDescribePixelFormat",
+4220: "gdi32.dll!NtGdiGetPerBandInfo",
+4221: "gdi32.dll!NtGdiDoBanding",
+4222: "gdi32.dll!NtGdiDoPalette",
+4223: "gdi32.dll!NtGdiDrawEscape",
+4224: "gdi32.dll!NtGdiEllipse",
+4225: "gdi32.dll!NtGdiEnableEudc",
+4226: "gdi32.dll!NtGdiEndDoc",
+4227: "gdi32.dll!NtGdiEndPage",
+4228: "gdi32.dll!NtGdiEndPath",
+4229: "gdi32.dll!NtGdiEnumFontChunk",
+4230: "gdi32.dll!NtGdiEnumFontClose",
+4231: "gdi32.dll!NtGdiEnumFontOpen",
+4232: "gdi32.dll!NtGdiEnumObjects",
+4233: "gdi32.dll!NtGdiEqualRgn",
+4234: "gdi32.dll!NtGdiEudcLoadUnloadLink",
+4235: "gdi32.dll!NtGdiExcludeClipRect",
+4236: "gdi32.dll!NtGdiExtCreatePen",
+4237: "gdi32.dll!NtGdiExtCreateRegion",
+4238: "gdi32.dll!NtGdiExtEscape",
+4239: "gdi32.dll!NtGdiExtFloodFill",
+4240: "gdi32.dll!NtGdiExtGetObjectW",
+4241: "gdi32.dll!ExtSelectClipRgn",
+4242: "gdi32.dll!NtGdiExtTextOutW",
+4243: "gdi32.dll!NtGdiFillPath",
+4244: "gdi32.dll!NtGdiFillRgn",
+4245: "gdi32.dll!NtGdiFlattenPath",
+4247: "gdi32.dll!NtGdiFlush",
+4248: "gdi32.dll!NtGdiForceUFIMapping",
+4249: "gdi32.dll!NtGdiFrameRgn",
+4250: "gdi32.dll!NtGdiFullscreenControl",
+4251: "gdi32.dll!NtGdiGetAndSetDCDword",
+4252: "gdi32.dll!GetClipBox",
+4253: "gdi32.dll!GetBitmapBits",
+4254: "gdi32.dll!NtGdiGetBitmapDimension",
+4255: "gdi32.dll!NtGdiGetBoundsRect",
+4256: "gdi32.dll!NtGdiGetCharABCWidthsW",
+4257: "gdi32.dll!NtGdiGetCharacterPlacementW",
+4258: "gdi32.dll!NtGdiGetCharSet",
+4259: "gdi32.dll!NtGdiGetCharWidthW",
+4260: "gdi32.dll!NtGdiGetCharWidthInfo",
+4261: "gdi32.dll!NtGdiGetColorAdjustment",
+4263: "gdi32.dll!NtGdiGetDCDword",
+4264: "gdi32.dll!NtGdiGetDCforBitmap",
+4265: "gdi32.dll!NtGdiGetDCObject",
+4266: "gdi32.dll!NtGdiGetDCPoint",
+4267: "gdi32.dll!NtGdiGetDeviceCaps",
+4268: "gdi32.dll!NtGdiGetDeviceGammaRamp",
+4269: "gdi32.dll!NtGdiGetDeviceCapsAll",
+4270: "gdi32.dll!NtGdiGetDIBitsInternal",
+4271: "gdi32.dll!NtGdiGetETM",
+4272: "gdi32.dll!NtGdiGetEudcTimeStampEx",
+4273: "gdi32.dll!GetFontData",
+4274: "gdi32.dll!NtGdiGetFontResourceInfoInternalW",
+4275: "gdi32.dll!NtGdiGetGlyphIndicesW",
+4276: "gdi32.dll!NtGdiGetGlyphIndicesWInternal",
+4277: "gdi32.dll!NtGdiGetGlyphOutline",
+4278: "gdi32.dll!NtGdiGetKerningPairs",
+4279: "gdi32.dll!NtGdiGetLinkedUFIs",
+4280: "gdi32.dll!GetMiterLimit",
+4281: "gdi32.dll!NtGdiGetMonitorID",
+4282: "gdi32.dll!GetNearestColor",
+4283: "gdi32.dll!NtGdiGetNearestPaletteIndex",
+4284: "gdi32.dll!NtGdiGetObjectBitmapHandle",
+4285: "gdi32.dll!NtGdiGetOutlineTextMetricsInternalW",
+4286: "gdi32.dll!NtGdiGetPath",
+4287: "gdi32.dll!NtGdiGetPixel",
+4288: "gdi32.dll!NtGdiGetRandomRgn",
+4289: "gdi32.dll!GetRasterizerCaps",
+4290: "gdi32.dll!NtGdiGetRealizationInfo",
+4291: "gdi32.dll!GetRegionData",
+4292: "gdi32.dll!NtGdiGetRgnBox",
+4293: "gdi32.dll!NtGdiGetServerMetaFileBits",
+4294: "gdi32.dll!NtGdiGetSpoolMessage",
+4296: "gdi32.dll!NtGdiGetStockObject",
+4297: "gdi32.dll!NtGdiGetStringBitmapW",
+4298: "gdi32.dll!GetSystemPaletteUse",
+4299: "gdi32.dll!NtGdiGetTextCharsetInfo",
+4300: "gdi32.dll!NtGdiGetTextExtent",
+4301: "gdi32.dll!NtGdiGetTextExtentExW",
+4302: "gdi32.dll!NtGdiGetTextFaceW",
+4303: "gdi32.dll!NtGdiGetTextMetricsW",
+4304: "gdi32.dll!NtGdiGetTransform",
+4305: "gdi32.dll!NtGdiGetUFI",
+4306: "gdi32.dll!NtGdiGetEmbUFI",
+4307: "gdi32.dll!NtGdiGetUFIPathname",
+4308: "gdi32.dll!NtGdiGetEmbedFonts",
+4309: "gdi32.dll!NtGdiChangeGhostFont",
+4310: "gdi32.dll!NtGdiAddEmbFontToDC",
+4311: "gdi32.dll!NtGdiGetFontUnicodeRanges",
+4312: "gdi32.dll!NtGdiGetWidthTable",
+4313: "gdi32.dll!NtGdiGradientFill",
+4314: "gdi32.dll!NtGdiHfontCreate",
+4315: "gdi32.dll!NtGdiIcmBrushInfo",
+4316: "gdi32.dll!NtGdiInit",
+4317: "gdi32.dll!NtGdiInitSpool",
+4318: "gdi32.dll!NtGdiIntersectClipRect",
+4319: "gdi32.dll!NtGdiInvertRgn",
+4320: "gdi32.dll!NtGdiLineTo",
+4321: "gdi32.dll!NtGdiMakeFontDir",
+4322: "gdi32.dll!NtGdiMakeInfoDC",
+4323: "gdi32.dll!NtGdiMaskBlt",
+4324: "gdi32.dll!NtGdiModifyWorldTransform",
+4325: "gdi32.dll!NtGdiMonoBitmap",
+4327: "gdi32.dll!NtGdiOffsetClipRgn",
+4328: "gdi32.dll!NtGdiOffsetRgn",
+4329: "gdi32.dll!NtGdiOpenDCW",
+4330: "gdi32.dll!NtGdiPatBlt",
+4331: "gdi32.dll!NtGdiPolyPatBlt",
+4332: "gdi32.dll!NtGdiPathToRegion",
+4333: "gdi32.dll!NtGdiPlgBlt",
+4334: "gdi32.dll!NtGdiPolyDraw",
+4335: "gdi32.dll!NtGdiPolyPolyDraw",
+4336: "gdi32.dll!NtGdiPolyTextOutW",
+4337: "gdi32.dll!NtGdiPtInRegion",
+4338: "gdi32.dll!NtGdiPtVisible",
+4339: "gdi32.dll!NtGdiQueryFonts",
+4340: "gdi32.dll!NtGdiQueryFontAssocInfo",
+4341: "gdi32.dll!NtGdiRectangle",
+4342: "gdi32.dll!NtGdiRectInRegion",
+4343: "gdi32.dll!RectVisible",
+4344: "gdi32.dll!NtGdiRemoveFontResourceW",
+4345: "gdi32.dll!NtGdiRemoveFontMemResourceEx",
+4346: "gdi32.dll!NtGdiResetDC",
+4347: "gdi32.dll!NtGdiResizePalette",
+4348: "gdi32.dll!NtGdiRestoreDC",
+4349: "gdi32.dll!NtGdiRoundRect",
+4350: "gdi32.dll!NtGdiSaveDC",
+4351: "gdi32.dll!NtGdiScaleViewportExtEx",
+4352: "gdi32.dll!NtGdiScaleWindowExtEx",
+4353: "gdi32.dll!NtGdiSelectBitmap",
+4355: "gdi32.dll!NtGdiSelectClipPath",
+4356: "gdi32.dll!NtGdiSelectFont",
+4357: "gdi32.dll!NtGdiSelectPen",
+4358: "gdi32.dll!NtGdiSetBitmapAttributes",
+4359: "gdi32.dll!SetBitmapBits",
+4360: "gdi32.dll!NtGdiSetBitmapDimension",
+4361: "gdi32.dll!NtGdiSetBoundsRect",
+4362: "gdi32.dll!NtGdiSetBrushAttributes",
+4363: "gdi32.dll!NtGdiSetBrushOrg",
+4364: "gdi32.dll!NtGdiSetColorAdjustment",
+4365: "gdi32.dll!NtGdiSetColorSpace",
+4366: "gdi32.dll!NtGdiSetDeviceGammaRamp",
+4367: "gdi32.dll!NtGdiSetDIBitsToDeviceInternal",
+4368: "gdi32.dll!NtGdiSetFontEnumeration",
+4369: "gdi32.dll!NtGdiSetFontXform",
+4370: "gdi32.dll!NtGdiSetIcmMode",
+4371: "gdi32.dll!NtGdiSetLinkedUFIs",
+4372: "gdi32.dll!NtGdiSetMagicColors",
+4373: "gdi32.dll!NtGdiSetMetaRgn",
+4374: "gdi32.dll!NtGdiSetMiterLimit",
+4375: "gdi32.dll!NtGdiGetDeviceWidth",
+4376: "gdi32.dll!NtGdiMirrorWindowOrg",
+4377: "gdi32.dll!NtGdiSetLayout",
+4378: "gdi32.dll!NtGdiSetPixel",
+4379: "gdi32.dll!NtGdiSetPixelFormat",
+4380: "gdi32.dll!NtGdiSetRectRgn",
+4381: "gdi32.dll!NtGdiSetSystemPaletteUse",
+4383: "gdi32.dll!NtGdiSetupPublicCFONT",
+4384: "gdi32.dll!NtGdiSetVirtualResolution",
+4385: "gdi32.dll!NtGdiSetSizeDevice",
+4386: "gdi32.dll!NtGdiStartDoc",
+4387: "gdi32.dll!NtGdiStartPage",
+4388: "gdi32.dll!NtGdiStretchBlt",
+4389: "gdi32.dll!NtGdiStretchDIBitsInternal",
+4390: "gdi32.dll!NtGdiStrokeAndFillPath",
+4391: "gdi32.dll!NtGdiStrokePath",
+4392: "gdi32.dll!NtGdiSwapBuffers",
+4393: "gdi32.dll!NtGdiTransformPoints",
+4394: "gdi32.dll!NtGdiTransparentBlt",
+4395: "gdi32.dll!NtGdiUnloadPrinterDriver",
+4397: "gdi32.dll!NtGdiUnrealizeObject",
+4398: "gdi32.dll!NtGdiUpdateColors",
+4399: "gdi32.dll!NtGdiWidenPath",
+4400: "user32.dll!NtUserActivateKeyboardLayout",
+4401: "user32.dll!NtUserAlterWindowStyle",
+4402: "imm32.dll!NtUserAssociateInputContext",
+4403: "user32.dll!NtUserAttachThreadInput",
+4404: "user32.dll!NtUserBeginPaint",
+4405: "user32.dll!NtUserBitBltSysBmp",
+4406: "user32.dll!NtUserBlockInput",
+4407: "imm32.dll!NtUserBuildHimcList",
+4408: "user32.dll!NtUserBuildHwndList",
+4409: "user32.dll!NtUserBuildNameList",
+4410: "user32.dll!NtUserBuildPropList",
+4411: "user32.dll!NtUserCallHwnd",
+4412: "user32.dll!NtUserCallHwndLock",
+4413: "user32.dll!NtUserCallHwndOpt",
+4414: "user32.dll!NtUserCallHwndParam",
+4415: "user32.dll!NtUserCallHwndParamLock",
+4416: "user32.dll!NtUserCallMsgFilter",
+4417: "user32.dll!NtUserCallNextHookEx",
+4418: "user32.dll!NtUserCallNoParam",
+4419: "imm32.dll!NtUserCallOneParam",
+4419: "user32.dll!NtUserCallOneParam",
+4420: "user32.dll!NtUserCallTwoParam",
+4421: "user32.dll!NtUserChangeClipboardChain",
+4422: "user32.dll!NtUserChangeDisplaySettings",
+4424: "user32.dll!NtUserCheckMenuItem",
+4425: "user32.dll!NtUserChildWindowFromPointEx",
+4426: "user32.dll!NtUserClipCursor",
+4427: "user32.dll!NtUserCloseClipboard",
+4428: "user32.dll!NtUserCloseDesktop",
+4429: "user32.dll!NtUserCloseWindowStation",
+4431: "user32.dll!NtUserConvertMemHandle",
+4432: "user32.dll!NtUserCopyAcceleratorTable",
+4433: "user32.dll!NtUserCountClipboardFormats",
+4434: "user32.dll!NtUserCreateAcceleratorTable",
+4435: "user32.dll!NtUserCreateCaret",
+4436: "user32.dll!NtUserCreateDesktop",
+4437: "imm32.dll!NtUserCreateInputContext",
+4438: "user32.dll!NtUserCreateLocalMemHandle",
+4439: "user32.dll!NtUserCreateWindowEx",
+4440: "user32.dll!NtUserCreateWindowStation",
+4441: "user32.dll!NtUserDdeGetQualityOfService",
+4442: "user32.dll!NtUserDdeInitialize",
+4443: "user32.dll!NtUserDdeSetQualityOfService",
+4444: "user32.dll!NtUserDeferWindowPos",
+4445: "user32.dll!NtUserDefSetText",
+4446: "user32.dll!NtUserDeleteMenu",
+4447: "user32.dll!DestroyAcceleratorTable",
+4448: "user32.dll!NtUserDestroyCursor",
+4449: "imm32.dll!NtUserDestroyInputContext",
+4450: "user32.dll!NtUserDestroyMenu",
+4451: "user32.dll!NtUserDestroyWindow",
+4452: "imm32.dll!NtUserDisableThreadIme",
+4453: "user32.dll!NtUserDispatchMessage",
+4454: "user32.dll!NtUserDragDetect",
+4455: "user32.dll!NtUserDragObject",
+4456: "user32.dll!NtUserDrawAnimatedRects",
+4457: "user32.dll!NtUserDrawCaption",
+4458: "user32.dll!NtUserDrawCaptionTemp",
+4459: "user32.dll!NtUserDrawIconEx",
+4460: "user32.dll!NtUserDrawMenuBarTemp",
+4461: "user32.dll!NtUserEmptyClipboard",
+4462: "user32.dll!NtUserEnableMenuItem",
+4463: "user32.dll!NtUserEnableScrollBar",
+4464: "user32.dll!NtUserEndDeferWindowPosEx",
+4465: "user32.dll!NtUserEndMenu",
+4466: "user32.dll!NtUserEndPaint",
+4467: "user32.dll!NtUserEnumDisplayDevices",
+4468: "user32.dll!NtUserEnumDisplayMonitors",
+4469: "user32.dll!NtUserEnumDisplaySettings",
+4470: "user32.dll!NtUserEvent",
+4471: "user32.dll!NtUserExcludeUpdateRgn",
+4472: "user32.dll!NtUserFillWindow",
+4473: "user32.dll!NtUserFindExistingCursorIcon",
+4474: "user32.dll!NtUserFindWindowEx",
+4475: "user32.dll!NtUserFlashWindowEx",
+4476: "user32.dll!NtUserGetAltTabInfo",
+4477: "user32.dll!NtUserGetAncestor",
+4478: "imm32.dll!NtUserGetAppImeLevel",
+4479: "user32.dll!GetAsyncKeyState",
+4480: "user32.dll!NtUserGetAtomName",
+4481: "user32.dll!NtUserGetCaretBlinkTime",
+4482: "user32.dll!NtUserGetCaretPos",
+4483: "user32.dll!NtUserGetClassInfo",
+4484: "user32.dll!NtUserGetClassName",
+4485: "user32.dll!NtUserGetClipboardData",
+4486: "user32.dll!NtUserGetClipboardFormatName",
+4487: "user32.dll!NtUserGetClipboardOwner",
+4488: "user32.dll!NtUserGetClipboardSequenceNumber",
+4489: "user32.dll!NtUserGetClipboardViewer",
+4490: "user32.dll!NtUserGetClipCursor",
+4491: "user32.dll!NtUserGetComboBoxInfo",
+4492: "user32.dll!NtUserGetControlBrush",
+4493: "user32.dll!NtUserGetControlColor",
+4494: "user32.dll!NtUserGetCPD",
+4495: "user32.dll!NtUserGetCursorFrameInfo",
+4496: "user32.dll!NtUserGetCursorInfo",
+4497: "user32.dll!NtUserGetDC",
+4498: "user32.dll!NtUserGetDCEx",
+4499: "user32.dll!NtUserGetDoubleClickTime",
+4500: "user32.dll!NtUserGetForegroundWindow",
+4501: "user32.dll!NtUserGetGuiResources",
+4502: "user32.dll!NtUserGetGUIThreadInfo",
+4503: "user32.dll!NtUserGetIconInfo",
+4504: "user32.dll!NtUserGetIconSize",
+4505: "imm32.dll!NtUserGetImeHotKey",
+4505: "user32.dll!NtUserGetImeHotKey",
+4506: "imm32.dll!NtUserGetImeInfoEx",
+4507: "user32.dll!NtUserGetInternalWindowPos",
+4508: "user32.dll!NtUserGetKeyboardLayoutList",
+4509: "user32.dll!NtUserGetKeyboardLayoutName",
+4510: "user32.dll!NtUserGetKeyboardState",
+4511: "user32.dll!NtUserGetKeyNameText",
+4512: "user32.dll!NtUserGetKeyState",
+4513: "user32.dll!NtUserGetListBoxInfo",
+4514: "user32.dll!NtUserGetMenuBarInfo",
+4515: "user32.dll!NtUserGetMenuIndex",
+4516: "user32.dll!NtUserGetMenuItemRect",
+4517: "user32.dll!NtUserGetMessage",
+4518: "user32.dll!NtUserGetMouseMovePointsEx",
+4519: "user32.dll!NtUserGetObjectInformation",
+4520: "user32.dll!NtUserGetOpenClipboardWindow",
+4521: "user32.dll!NtUserGetPriorityClipboardFormat",
+4522: "user32.dll!NtUserGetProcessWindowStation",
+4523: "user32.dll!NtUserGetRawInputBuffer",
+4524: "user32.dll!NtUserGetRawInputData",
+4525: "user32.dll!NtUserGetRawInputDeviceInfo",
+4526: "user32.dll!NtUserGetRawInputDeviceList",
+4527: "user32.dll!NtUserGetRegisteredRawInputDevices",
+4528: "user32.dll!NtUserGetScrollBarInfo",
+4529: "user32.dll!NtUserGetSystemMenu",
+4530: "user32.dll!NtUserGetThreadDesktop",
+4531: "imm32.dll!NtUserGetThreadState",
+4531: "user32.dll!NtUserGetThreadState",
+4532: "user32.dll!NtUserGetTitleBarInfo",
+4533: "user32.dll!GetUpdateRect",
+4534: "user32.dll!GetUpdateRgn",
+4535: "user32.dll!NtUserGetWindowDC",
+4536: "user32.dll!NtUserGetWindowPlacement",
+4537: "user32.dll!NtUserGetWOWClass",
+4539: "user32.dll!NtUserHideCaret",
+4540: "user32.dll!NtUserHiliteMenuItem",
+4541: "user32.dll!NtUserImpersonateDdeClientWindow",
+4543: "user32.dll!NtUserInitializeClientPfnArrays",
+4544: "user32.dll!NtUserInitTask",
+4545: "user32.dll!NtUserInternalGetWindowText",
+4546: "user32.dll!NtUserInvalidateRect",
+4547: "user32.dll!NtUserInvalidateRgn",
+4548: "user32.dll!NtUserIsClipboardFormatAvailable",
+4549: "user32.dll!NtUserKillTimer",
+4550: "user32.dll!NtUserLoadKeyboardLayoutEx",
+4551: "user32.dll!NtUserLockWindowStation",
+4552: "user32.dll!NtUserLockWindowUpdate",
+4553: "user32.dll!NtUserLockWorkStation",
+4554: "user32.dll!NtUserMapVirtualKeyEx",
+4555: "user32.dll!NtUserMenuItemFromPoint",
+4556: "user32.dll!NtUserMessageCall",
+4557: "user32.dll!NtUserMinMaximize",
+4558: "user32.dll!NtUserMNDragLeave",
+4559: "user32.dll!NtUserMNDragOver",
+4560: "user32.dll!NtUserModifyUserStartupInfoFlags",
+4561: "user32.dll!NtUserMoveWindow",
+4562: "imm32.dll!NtUserNotifyIMEStatus",
+4562: "user32.dll!NtUserNotifyIMEStatus",
+4564: "user32.dll!NtUserNotifyWinEvent",
+4565: "user32.dll!NtUserOpenClipboard",
+4566: "user32.dll!NtUserOpenDesktop",
+4567: "user32.dll!NtUserOpenInputDesktop",
+4568: "user32.dll!NtUserOpenWindowStation",
+4569: "user32.dll!NtUserPaintDesktop",
+4570: "user32.dll!PeekMessageW",
+4571: "user32.dll!NtUserPostMessage",
+4572: "user32.dll!NtUserPostThreadMessage",
+4573: "user32.dll!NtUserPrintWindow",
+4574: "user32.dll!NtUserProcessConnect",
+4576: "imm32.dll!NtUserQueryInputContext",
+4577: "user32.dll!NtUserQuerySendMessage",
+4578: "user32.dll!NtUserQueryUserCounters",
+4579: "imm32.dll!NtUserQueryWindow",
+4579: "user32.dll!NtUserQueryWindow",
+4580: "user32.dll!NtUserRealChildWindowFromPoint",
+4581: "user32.dll!NtUserRealInternalGetMessage",
+4582: "user32.dll!NtUserRealWaitMessageEx",
+4583: "user32.dll!NtUserRedrawWindow",
+4584: "user32.dll!NtUserRegisterClassExWOW",
+4585: "user32.dll!NtUserRegisterUserApiHook",
+4586: "user32.dll!NtUserRegisterHotKey",
+4587: "user32.dll!NtUserRegisterRawInputDevices",
+4588: "user32.dll!NtUserRegisterTasklist",
+4589: "user32.dll!NtUserRegisterWindowMessage",
+4590: "user32.dll!NtUserRemoveMenu",
+4591: "user32.dll!NtUserRemoveProp",
+4593: "user32.dll!NtUserResolveDesktopForWOW",
+4594: "user32.dll!NtUserSBGetParms",
+4595: "user32.dll!ScrollDC",
+4596: "user32.dll!NtUserScrollWindowEx",
+4597: "gdi32.dll!NtUserSelectPalette",
+4598: "user32.dll!NtUserSendInput",
+4599: "user32.dll!NtUserSetActiveWindow",
+4600: "imm32.dll!NtUserSetAppImeLevel",
+4601: "user32.dll!NtUserSetCapture",
+4602: "user32.dll!NtUserSetClassLong",
+4603: "user32.dll!NtUserSetClassWord",
+4604: "user32.dll!NtUserSetClipboardData",
+4605: "user32.dll!NtUserSetClipboardViewer",
+4606: "user32.dll!NtUserSetConsoleReserveKeys",
+4607: "user32.dll!NtUserSetCursor",
+4608: "user32.dll!NtUserSetCursorContents",
+4609: "user32.dll!NtUserSetCursorIconData",
+4610: "user32.dll!NtUserSetDbgTag",
+4611: "user32.dll!NtUserSetFocus",
+4612: "user32.dll!NtUserSetImeHotKey",
+4613: "imm32.dll!NtUserSetImeInfoEx",
+4614: "user32.dll!NtUserSetImeOwnerWindow",
+4616: "user32.dll!NtUserSetInformationThread",
+4617: "user32.dll!NtUserSetInternalWindowPos",
+4618: "user32.dll!NtUserSetKeyboardState",
+4619: "user32.dll!NtUserSetLogonNotifyWindow",
+4620: "user32.dll!NtUserSetMenu",
+4621: "user32.dll!NtUserSetMenuContextHelpId",
+4622: "user32.dll!NtUserSetMenuDefaultItem",
+4623: "user32.dll!NtUserSetMenuFlagRtoL",
+4624: "user32.dll!NtUserSetObjectInformation",
+4625: "user32.dll!NtUserSetParent",
+4626: "user32.dll!NtUserSetProcessWindowStation",
+4627: "user32.dll!NtUserSetProp",
+4628: "user32.dll!NtUserSetRipFlags",
+4629: "user32.dll!NtUserSetScrollInfo",
+4630: "user32.dll!NtUserSetShellWindowEx",
+4631: "user32.dll!NtUserSetSysColors",
+4632: "user32.dll!NtUserSetSystemCursor",
+4633: "user32.dll!NtUserSetSystemMenu",
+4634: "user32.dll!NtUserSetSystemTimer",
+4635: "user32.dll!NtUserSetThreadDesktop",
+4636: "imm32.dll!NtUserSetThreadLayoutHandles",
+4637: "user32.dll!NtUserSetThreadState",
+4638: "user32.dll!NtUserSetTimer",
+4639: "user32.dll!NtUserSetWindowFNID",
+4640: "user32.dll!NtUserSetWindowLong",
+4641: "user32.dll!NtUserSetWindowPlacement",
+4642: "user32.dll!NtUserSetWindowPos",
+4643: "user32.dll!NtUserSetWindowRgn",
+4644: "user32.dll!NtUserSetWindowsHookAW",
+4645: "user32.dll!NtUserSetWindowsHookEx",
+4646: "user32.dll!NtUserSetWindowStationUser",
+4647: "user32.dll!NtUserSetWindowWord",
+4648: "user32.dll!NtUserSetWinEventHook",
+4649: "user32.dll!NtUserShowCaret",
+4650: "user32.dll!NtUserShowScrollBar",
+4651: "user32.dll!NtUserShowWindow",
+4652: "user32.dll!NtUserShowWindowAsync",
+4654: "user32.dll!NtUserSwitchDesktop",
+4655: "user32.dll!NtUserSystemParametersInfo",
+4657: "user32.dll!NtUserThunkedMenuInfo",
+4658: "user32.dll!NtUserThunkedMenuItemInfo",
+4659: "user32.dll!NtUserToUnicodeEx",
+4660: "user32.dll!NtUserTrackMouseEvent",
+4661: "user32.dll!NtUserTrackPopupMenuEx",
+4662: "user32.dll!NtUserCalcMenuBar",
+4663: "user32.dll!NtUserPaintMenuBar",
+4664: "user32.dll!TranslateAcceleratorA",
+4665: "user32.dll!NtUserTranslateMessage",
+4666: "user32.dll!NtUserUnhookWindowsHookEx",
+4667: "user32.dll!NtUserUnhookWinEvent",
+4668: "user32.dll!NtUserUnloadKeyboardLayout",
+4669: "user32.dll!NtUserUnlockWindowStation",
+4670: "user32.dll!NtUserUnregisterClass",
+4671: "user32.dll!NtUserUnregisterUserApiHook",
+4672: "user32.dll!NtUserUnregisterHotKey",
+4673: "imm32.dll!NtUserUpdateInputContext",
+4673: "user32.dll!NtUserUpdateInputContext",
+4674: "user32.dll!NtUserUpdateInstance",
+4675: "user32.dll!NtUserUpdateLayeredWindow",
+4676: "user32.dll!NtUserGetLayeredWindowAttributes",
+4677: "user32.dll!NtUserSetLayeredWindowAttributes",
+4678: "user32.dll!NtUserUpdatePerUserSystemParameters",
+4679: "user32.dll!NtUserUserHandleGrantAccess",
+4680: "imm32.dll!NtUserValidateHandleSecure",
+4680: "user32.dll!NtUserValidateHandleSecure",
+4681: "user32.dll!NtUserValidateRect",
+4682: "user32.dll!NtUserValidateTimerCallback",
+4683: "user32.dll!NtUserVkKeyScanEx",
+4684: "user32.dll!NtUserWaitForInputIdle",
+4685: "user32.dll!NtUserWaitForMsgAndEvent",
+4686: "user32.dll!NtUserWaitMessage",
+4687: "user32.dll!NtUserWin32PoolAllocationStats",
+4688: "user32.dll!NtUserWindowFromPoint",
+4689: "user32.dll!NtUserYieldTask",
+4695: "gdi32.dll!NtGdiEngAssociateSurface",
+4696: "gdi32.dll!NtGdiEngCreateBitmap",
+4697: "gdi32.dll!NtGdiEngCreateDeviceSurface",
+4698: "gdi32.dll!NtGdiEngCreateDeviceBitmap",
+4699: "gdi32.dll!NtGdiEngCreatePalette",
+4700: "gdi32.dll!NtGdiEngComputeGlyphSet",
+4701: "gdi32.dll!NtGdiEngCopyBits",
+4702: "gdi32.dll!NtGdiEngDeletePalette",
+4703: "gdi32.dll!NtGdiEngDeleteSurface",
+4704: "gdi32.dll!NtGdiEngEraseSurface",
+4705: "gdi32.dll!NtGdiEngUnlockSurface",
+4706: "gdi32.dll!NtGdiEngLockSurface",
+4707: "gdi32.dll!NtGdiEngBitBlt",
+4708: "gdi32.dll!NtGdiEngStretchBlt",
+4709: "gdi32.dll!NtGdiEngPlgBlt",
+4710: "gdi32.dll!NtGdiEngMarkBandingSurface",
+4711: "gdi32.dll!NtGdiEngStrokePath",
+4712: "gdi32.dll!NtGdiEngFillPath",
+4713: "gdi32.dll!NtGdiEngStrokeAndFillPath",
+4714: "gdi32.dll!NtGdiEngPaint",
+4715: "gdi32.dll!NtGdiEngLineTo",
+4716: "gdi32.dll!NtGdiEngAlphaBlend",
+4717: "gdi32.dll!NtGdiEngGradientFill",
+4718: "gdi32.dll!NtGdiEngTransparentBlt",
+4719: "gdi32.dll!NtGdiEngTextOut",
+4720: "gdi32.dll!NtGdiEngStretchBltROP",
+4721: "gdi32.dll!NtGdiXLATEOBJ_cGetPalette",
+4722: "gdi32.dll!NtGdiXLATEOBJ_iXlate",
+4723: "gdi32.dll!NtGdiXLATEOBJ_hGetColorTransform",
+4724: "gdi32.dll!NtGdiCLIPOBJ_bEnum",
+4725: "gdi32.dll!NtGdiCLIPOBJ_cEnumStart",
+4726: "gdi32.dll!NtGdiCLIPOBJ_ppoGetPath",
+4727: "gdi32.dll!NtGdiEngDeletePath",
+4728: "gdi32.dll!NtGdiEngCreateClip",
+4729: "gdi32.dll!NtGdiEngDeleteClip",
+4730: "gdi32.dll!NtGdiBRUSHOBJ_ulGetBrushColor",
+4731: "gdi32.dll!NtGdiBRUSHOBJ_pvAllocRbrush",
+4732: "gdi32.dll!NtGdiBRUSHOBJ_pvGetRbrush",
+4733: "gdi32.dll!NtGdiBRUSHOBJ_hGetColorTransform",
+4734: "gdi32.dll!NtGdiXFORMOBJ_bApplyXform",
+4735: "gdi32.dll!NtGdiXFORMOBJ_iGetXform",
+4736: "gdi32.dll!NtGdiFONTOBJ_vGetInfo",
+4737: "gdi32.dll!NtGdiFONTOBJ_pxoGetXform",
+4738: "gdi32.dll!NtGdiFONTOBJ_cGetGlyphs",
+4739: "gdi32.dll!NtGdiFONTOBJ_pifi",
+4740: "gdi32.dll!NtGdiFONTOBJ_pfdg",
+4741: "gdi32.dll!NtGdiFONTOBJ_pQueryGlyphAttrs",
+4742: "gdi32.dll!NtGdiFONTOBJ_pvTrueTypeFontFile",
+4743: "gdi32.dll!NtGdiFONTOBJ_cGetAllGlyphHandles",
+4744: "gdi32.dll!NtGdiSTROBJ_bEnum",
+4745: "gdi32.dll!NtGdiSTROBJ_bEnumPositionsOnly",
+4746: "gdi32.dll!NtGdiSTROBJ_bGetAdvanceWidths",
+4747: "gdi32.dll!NtGdiSTROBJ_vEnumStart",
+4748: "gdi32.dll!NtGdiSTROBJ_dwGetCodePage",
+4749: "gdi32.dll!NtGdiPATHOBJ_vGetBounds",
+4750: "gdi32.dll!NtGdiPATHOBJ_bEnum",
+4751: "gdi32.dll!NtGdiPATHOBJ_vEnumStart",
+4752: "gdi32.dll!NtGdiPATHOBJ_vEnumStartClipLines",
+4753: "gdi32.dll!NtGdiPATHOBJ_bEnumClipLines",
+4754: "gdi32.dll!NtGdiGetDhpdev",
+4755: "gdi32.dll!NtGdiEngCheckAbort",
+4756: "gdi32.dll!NtGdiHT_Get8BPPFormatPalette",
+4757: "gdi32.dll!NtGdiHT_Get8BPPMaskPalette",
+4758: "gdi32.dll!NtGdiUpdateTransform",
+4759: "gdi32.dll!NtGdiSetPUMPDOBJ",
+4760: "gdi32.dll!NtGdiBRUSHOBJ_DeleteRbrush",
+4761: "gdi32.dll!NtGdiUMPDEngFreeUserMem",
+4762: "gdi32.dll!NtGdiDrawStream",
+}
diff --git a/chromium/tools/traceline/traceline/sidestep/ia32_modrm_map.cc b/chromium/tools/traceline/traceline/sidestep/ia32_modrm_map.cc
new file mode 100644
index 00000000000..e1aeec6765d
--- /dev/null
+++ b/chromium/tools/traceline/traceline/sidestep/ia32_modrm_map.cc
@@ -0,0 +1,92 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Table of relevant information about how to decode the ModR/M byte.
+// Based on information in the IA-32 Intel Architecture
+// Software Developer's Manual Volume 2: Instruction Set Reference.
+
+#include "sidestep/mini_disassembler.h"
+#include "sidestep/mini_disassembler_types.h"
+
+namespace sidestep {
+
+const ModrmEntry MiniDisassembler::s_ia16_modrm_map_[] = {
+// mod == 00
+ /* r/m == 000 */ { false, false, OS_ZERO },
+ /* r/m == 001 */ { false, false, OS_ZERO },
+ /* r/m == 010 */ { false, false, OS_ZERO },
+ /* r/m == 011 */ { false, false, OS_ZERO },
+ /* r/m == 100 */ { false, false, OS_ZERO },
+ /* r/m == 101 */ { false, false, OS_ZERO },
+ /* r/m == 110 */ { true, false, OS_WORD },
+ /* r/m == 111 */ { false, false, OS_ZERO },
+// mod == 01
+ /* r/m == 000 */ { true, false, OS_BYTE },
+ /* r/m == 001 */ { true, false, OS_BYTE },
+ /* r/m == 010 */ { true, false, OS_BYTE },
+ /* r/m == 011 */ { true, false, OS_BYTE },
+ /* r/m == 100 */ { true, false, OS_BYTE },
+ /* r/m == 101 */ { true, false, OS_BYTE },
+ /* r/m == 110 */ { true, false, OS_BYTE },
+ /* r/m == 111 */ { true, false, OS_BYTE },
+// mod == 10
+ /* r/m == 000 */ { true, false, OS_WORD },
+ /* r/m == 001 */ { true, false, OS_WORD },
+ /* r/m == 010 */ { true, false, OS_WORD },
+ /* r/m == 011 */ { true, false, OS_WORD },
+ /* r/m == 100 */ { true, false, OS_WORD },
+ /* r/m == 101 */ { true, false, OS_WORD },
+ /* r/m == 110 */ { true, false, OS_WORD },
+ /* r/m == 111 */ { true, false, OS_WORD },
+// mod == 11
+ /* r/m == 000 */ { false, false, OS_ZERO },
+ /* r/m == 001 */ { false, false, OS_ZERO },
+ /* r/m == 010 */ { false, false, OS_ZERO },
+ /* r/m == 011 */ { false, false, OS_ZERO },
+ /* r/m == 100 */ { false, false, OS_ZERO },
+ /* r/m == 101 */ { false, false, OS_ZERO },
+ /* r/m == 110 */ { false, false, OS_ZERO },
+ /* r/m == 111 */ { false, false, OS_ZERO }
+};
+
+const ModrmEntry MiniDisassembler::s_ia32_modrm_map_[] = {
+// mod == 00
+ /* r/m == 000 */ { false, false, OS_ZERO },
+ /* r/m == 001 */ { false, false, OS_ZERO },
+ /* r/m == 010 */ { false, false, OS_ZERO },
+ /* r/m == 011 */ { false, false, OS_ZERO },
+ /* r/m == 100 */ { false, true, OS_ZERO },
+ /* r/m == 101 */ { true, false, OS_DOUBLE_WORD },
+ /* r/m == 110 */ { false, false, OS_ZERO },
+ /* r/m == 111 */ { false, false, OS_ZERO },
+// mod == 01
+ /* r/m == 000 */ { true, false, OS_BYTE },
+ /* r/m == 001 */ { true, false, OS_BYTE },
+ /* r/m == 010 */ { true, false, OS_BYTE },
+ /* r/m == 011 */ { true, false, OS_BYTE },
+ /* r/m == 100 */ { true, true, OS_BYTE },
+ /* r/m == 101 */ { true, false, OS_BYTE },
+ /* r/m == 110 */ { true, false, OS_BYTE },
+ /* r/m == 111 */ { true, false, OS_BYTE },
+// mod == 10
+ /* r/m == 000 */ { true, false, OS_DOUBLE_WORD },
+ /* r/m == 001 */ { true, false, OS_DOUBLE_WORD },
+ /* r/m == 010 */ { true, false, OS_DOUBLE_WORD },
+ /* r/m == 011 */ { true, false, OS_DOUBLE_WORD },
+ /* r/m == 100 */ { true, true, OS_DOUBLE_WORD },
+ /* r/m == 101 */ { true, false, OS_DOUBLE_WORD },
+ /* r/m == 110 */ { true, false, OS_DOUBLE_WORD },
+ /* r/m == 111 */ { true, false, OS_DOUBLE_WORD },
+// mod == 11
+ /* r/m == 000 */ { false, false, OS_ZERO },
+ /* r/m == 001 */ { false, false, OS_ZERO },
+ /* r/m == 010 */ { false, false, OS_ZERO },
+ /* r/m == 011 */ { false, false, OS_ZERO },
+ /* r/m == 100 */ { false, false, OS_ZERO },
+ /* r/m == 101 */ { false, false, OS_ZERO },
+ /* r/m == 110 */ { false, false, OS_ZERO },
+ /* r/m == 111 */ { false, false, OS_ZERO },
+};
+
+}; // namespace sidestep
diff --git a/chromium/tools/traceline/traceline/sidestep/ia32_opcode_map.cc b/chromium/tools/traceline/traceline/sidestep/ia32_opcode_map.cc
new file mode 100644
index 00000000000..4600cfa25b9
--- /dev/null
+++ b/chromium/tools/traceline/traceline/sidestep/ia32_opcode_map.cc
@@ -0,0 +1,1159 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Opcode decoding maps. Based on the IA-32 Intel Architecture
+// Software Developer's Manual Volume 2: Instruction Set Reference. Idea
+// for how to lay out the tables in memory taken from the implementation
+// in the Bastard disassembly environment.
+
+#include "sidestep/mini_disassembler.h"
+
+namespace sidestep {
+
+/*
+* This is the first table to be searched; the first field of each
+* Opcode in the table is either 0 to indicate you're in the
+* right table, or an index to the correct table, in the global
+* map g_pentiumOpcodeMap
+*/
+const Opcode s_first_opcode_byte[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "add", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "add", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_G | OT_B, AM_E | OT_B, AM_NOT_USED, "add", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "add", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "add", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "add", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "or", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "or", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA */ { 0, IT_GENERIC, AM_G | OT_B, AM_E | OT_B, AM_NOT_USED, "or", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "or", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "or", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "or", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF */ { 1, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x10 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "adc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x11 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "adc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x12 */ { 0, IT_GENERIC, AM_G | OT_B, AM_E | OT_B, AM_NOT_USED, "adc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x13 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "adc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x14 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "adc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x15 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "adc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x16 */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x17 */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x18 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "sbb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x19 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "sbb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1A */ { 0, IT_GENERIC, AM_G | OT_B, AM_E | OT_B, AM_NOT_USED, "sbb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1B */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "sbb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1C */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "sbb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1D */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "sbb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1E */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1F */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x20 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "and", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x21 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "and", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x22 */ { 0, IT_GENERIC, AM_G | OT_B, AM_E | OT_B, AM_NOT_USED, "and", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x23 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "and", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x24 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "and", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x25 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "and", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x26 */ { 0, IT_PREFIX, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x27 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "daa", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x28 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "sub", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x29 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "sub", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2A */ { 0, IT_GENERIC, AM_G | OT_B, AM_E | OT_B, AM_NOT_USED, "sub", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2B */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "sub", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2C */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "sub", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2D */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "sub", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2E */ { 0, IT_PREFIX, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2F */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "das", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x30 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "xor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x31 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "xor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x32 */ { 0, IT_GENERIC, AM_G | OT_B, AM_E | OT_B, AM_NOT_USED, "xor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x33 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "xor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x34 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "xor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x35 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "xor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x36 */ { 0, IT_PREFIX, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x37 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "aaa", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x38 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "cmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x39 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "cmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3A */ { 0, IT_GENERIC, AM_G | OT_B, AM_E | OT_B, AM_NOT_USED, "cmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3B */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3C */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "cmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3D */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "cmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3E */ { 0, IT_PREFIX, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3F */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "aas", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x40 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "inc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x41 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "inc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x42 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "inc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x43 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "inc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x44 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "inc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x45 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "inc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x46 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "inc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x47 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "inc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x48 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "dec", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x49 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "dec", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4A */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "dec", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4B */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "dec", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4C */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "dec", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4D */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "dec", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4E */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "dec", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4F */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "dec", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x50 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x51 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x52 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x53 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x54 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x55 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x56 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x57 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x58 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x59 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5A */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5B */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5C */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5D */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5E */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5F */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x60 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "pushad", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x61 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "popad", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x62 */ { 0, IT_GENERIC, AM_G | OT_V, AM_M | OT_A, AM_NOT_USED, "bound", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x63 */ { 0, IT_GENERIC, AM_E | OT_W, AM_G | OT_W, AM_NOT_USED, "arpl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x64 */ { 0, IT_PREFIX, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x65 */ { 0, IT_PREFIX, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x66 */ { 0, IT_PREFIX_OPERAND, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x67 */ { 0, IT_PREFIX_ADDRESS, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x68 */ { 0, IT_GENERIC, AM_I | OT_V, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x69 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_I | OT_V, "imul", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6A */ { 0, IT_GENERIC, AM_I | OT_B, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6B */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_I | OT_B, "imul", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6C */ { 0, IT_GENERIC, AM_Y | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "insb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6D */ { 0, IT_GENERIC, AM_Y | OT_V, AM_REGISTER | OT_V, AM_NOT_USED, "insd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6E */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_X | OT_B, AM_NOT_USED, "outsb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6F */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_X | OT_V, AM_NOT_USED, "outsb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x70 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jo", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x71 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jno", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x72 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x73 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jnc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x74 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x75 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jnz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x76 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jbe", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x77 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "ja", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x78 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "js", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x79 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jns", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7A */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jpe", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7B */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jpo", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7C */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7D */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jge", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7E */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jle", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7F */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x80 */ { 2, IT_REFERENCE, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x81 */ { 3, IT_REFERENCE, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x82 */ { 4, IT_REFERENCE, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x83 */ { 5, IT_REFERENCE, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x84 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "test", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x85 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "test", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x86 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "xchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x87 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "xchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x88 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x89 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8A */ { 0, IT_GENERIC, AM_G | OT_B, AM_E | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8B */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8C */ { 0, IT_GENERIC, AM_E | OT_W, AM_S | OT_W, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8D */ { 0, IT_GENERIC, AM_G | OT_V, AM_M | OT_ADDRESS_MODE_M, AM_NOT_USED, "lea", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8E */ { 0, IT_GENERIC, AM_S | OT_W, AM_E | OT_W, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8F */ { 0, IT_GENERIC, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x90 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "nop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x91 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_REGISTER | OT_V, AM_NOT_USED, "xchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x92 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_REGISTER | OT_V, AM_NOT_USED, "xchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x93 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_REGISTER | OT_V, AM_NOT_USED, "xchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x94 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_REGISTER | OT_V, AM_NOT_USED, "xchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x95 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_REGISTER | OT_V, AM_NOT_USED, "xchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x96 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_REGISTER | OT_V, AM_NOT_USED, "xchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x97 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_REGISTER | OT_V, AM_NOT_USED, "xchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x98 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "cwde", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x99 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "cdq", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9A */ { 0, IT_JUMP, AM_A | OT_P, AM_NOT_USED, AM_NOT_USED, "callf", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9B */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "wait", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9C */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "pushfd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9D */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "popfd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9E */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "sahf", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9F */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "lahf", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA0 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_O | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA1 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_O | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA2 */ { 0, IT_GENERIC, AM_O | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA3 */ { 0, IT_GENERIC, AM_O | OT_V, AM_REGISTER | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA4 */ { 0, IT_GENERIC, AM_X | OT_B, AM_Y | OT_B, AM_NOT_USED, "movsb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA5 */ { 0, IT_GENERIC, AM_X | OT_V, AM_Y | OT_V, AM_NOT_USED, "movsd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA6 */ { 0, IT_GENERIC, AM_X | OT_B, AM_Y | OT_B, AM_NOT_USED, "cmpsb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA7 */ { 0, IT_GENERIC, AM_X | OT_V, AM_Y | OT_V, AM_NOT_USED, "cmpsd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA8 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "test", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA9 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "test", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAA */ { 0, IT_GENERIC, AM_Y | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "stosb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAB */ { 0, IT_GENERIC, AM_Y | OT_V, AM_REGISTER | OT_V, AM_NOT_USED, "stosd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAC */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_X| OT_B, AM_NOT_USED, "lodsb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAD */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_X| OT_V, AM_NOT_USED, "lodsd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAE */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_Y | OT_B, AM_NOT_USED, "scasb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAF */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_Y | OT_V, AM_NOT_USED, "scasd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB0 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB1 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB2 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB3 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB4 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB5 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB6 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB7 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB8 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB9 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBA */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBB */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBC */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBD */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBE */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBF */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_I | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC0 */ { 6, IT_REFERENCE, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC1 */ { 7, IT_REFERENCE, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC2 */ { 0, IT_RETURN, AM_I | OT_W, AM_NOT_USED, AM_NOT_USED, "ret", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC3 */ { 0, IT_RETURN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "ret", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC4 */ { 0, IT_GENERIC, AM_G | OT_V, AM_M | OT_P, AM_NOT_USED, "les", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC5 */ { 0, IT_GENERIC, AM_G | OT_V, AM_M | OT_P, AM_NOT_USED, "lds", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC6 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC7 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC8 */ { 0, IT_GENERIC, AM_I | OT_W, AM_I | OT_B, AM_NOT_USED, "enter", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC9 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "leave", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCA */ { 0, IT_RETURN, AM_I | OT_W, AM_NOT_USED, AM_NOT_USED, "retf", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCB */ { 0, IT_RETURN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "retf", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCC */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "int3", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCD */ { 0, IT_GENERIC, AM_I | OT_B, AM_NOT_USED, AM_NOT_USED, "int", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCE */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "into", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCF */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "iret", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD0 */ { 8, IT_REFERENCE, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD1 */ { 9, IT_REFERENCE, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD2 */ { 10, IT_REFERENCE, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD3 */ { 11, IT_REFERENCE, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD4 */ { 0, IT_GENERIC, AM_I | OT_B, AM_NOT_USED, AM_NOT_USED, "aam", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD5 */ { 0, IT_GENERIC, AM_I | OT_B, AM_NOT_USED, AM_NOT_USED, "aad", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD6 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD7 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "xlat", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+
+ // The following 8 lines would be references to the FPU tables, but we currently
+ // do not support the FPU instructions in this disassembler.
+
+ /* 0xD8 */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD9 */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xDA */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xDB */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xDC */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xDD */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xDE */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xDF */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+
+
+ /* 0xE0 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "loopnz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE1 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "loopz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE2 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "loop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE3 */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jcxz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE4 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "in", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE5 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_I | OT_B, AM_NOT_USED, "in", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE6 */ { 0, IT_GENERIC, AM_I | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "out", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE7 */ { 0, IT_GENERIC, AM_I | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "out", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE8 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "call", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE9 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xEA */ { 0, IT_JUMP, AM_A | OT_P, AM_NOT_USED, AM_NOT_USED, "jmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xEB */ { 0, IT_JUMP, AM_J | OT_B, AM_NOT_USED, AM_NOT_USED, "jmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xEC */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_REGISTER | OT_W, AM_NOT_USED, "in", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xED */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_REGISTER | OT_W, AM_NOT_USED, "in", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xEE */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_REGISTER | OT_B, AM_NOT_USED, "out", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xEF */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_REGISTER | OT_V, AM_NOT_USED, "out", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF0 */ { 0, IT_PREFIX, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "lock:", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF1 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF2 */ { 0, IT_PREFIX, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "repne:", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF3 */ { 0, IT_PREFIX, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "rep:", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF4 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "hlt", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF5 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "cmc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF6 */ { 12, IT_REFERENCE, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF7 */ { 13, IT_REFERENCE, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF8 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "clc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF9 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "stc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xFA */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "cli", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xFB */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "sti", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xFC */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "cld", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xFD */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "std", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xFE */ { 14, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xFF */ { 15, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_0f[] = {
+ /* 0x0 */ { 16, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 17, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_W, AM_NOT_USED, "lar", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_W, AM_NOT_USED, "lsl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "clts", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "invd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "wbinvd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "ud2", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xE */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x10 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "movups", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "movsd" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "movss" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "movupd" } },
+ /* 0x11 */ { 0, IT_GENERIC, AM_W | OT_PS, AM_V | OT_PS, AM_NOT_USED, "movups", true,
+ /* F2h */ { 0, IT_GENERIC, AM_W | OT_SD, AM_V | OT_SD, AM_NOT_USED, "movsd" },
+ /* F3h */ { 0, IT_GENERIC, AM_W | OT_SS, AM_V | OT_SS, AM_NOT_USED, "movss" },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_PD, AM_V | OT_PD, AM_NOT_USED, "movupd" } },
+ /* 0x12 */ { 0, IT_GENERIC, AM_W | OT_Q, AM_V | OT_Q, AM_NOT_USED, "movlps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_Q, AM_V | OT_Q, AM_NOT_USED, "movhlps" }, // only one of ...
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_Q, AM_V | OT_Q, AM_NOT_USED, "movhlps" }, // ...these two is correct, Intel doesn't specify which
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_Q, AM_W | OT_S, AM_NOT_USED, "movlpd" } },
+ /* 0x13 */ { 0, IT_GENERIC, AM_V | OT_Q, AM_W | OT_Q, AM_NOT_USED, "movlps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_Q, AM_W | OT_Q, AM_NOT_USED, "movlpd" } },
+ /* 0x14 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_Q, AM_NOT_USED, "unpcklps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_Q, AM_NOT_USED, "unpcklpd" } },
+ /* 0x15 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_Q, AM_NOT_USED, "unpckhps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_Q, AM_NOT_USED, "unpckhpd" } },
+ /* 0x16 */ { 0, IT_GENERIC, AM_V | OT_Q, AM_W | OT_Q, AM_NOT_USED, "movhps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_Q, AM_V | OT_Q, AM_NOT_USED, "movlhps" }, // only one of...
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_Q, AM_V | OT_Q, AM_NOT_USED, "movlhps" }, // ...these two is correct, Intel doesn't specify which
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_Q, AM_W | OT_Q, AM_NOT_USED, "movhpd" } },
+ /* 0x17 */ { 0, IT_GENERIC, AM_W | OT_Q, AM_V | OT_Q, AM_NOT_USED, "movhps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_Q, AM_V | OT_Q, AM_NOT_USED, "movhpd" } },
+ /* 0x18 */ { 18, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x19 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1A */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1B */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1C */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1D */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1E */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1F */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x20 */ { 0, IT_GENERIC, AM_R | OT_D, AM_C | OT_D, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x21 */ { 0, IT_GENERIC, AM_R | OT_D, AM_D | OT_D, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x22 */ { 0, IT_GENERIC, AM_C | OT_D, AM_R | OT_D, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x23 */ { 0, IT_GENERIC, AM_D | OT_D, AM_R | OT_D, AM_NOT_USED, "mov", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x24 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x25 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x26 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x27 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x28 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "movaps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "movapd" } },
+ /* 0x29 */ { 0, IT_GENERIC, AM_W | OT_PS, AM_V | OT_PS, AM_NOT_USED, "movaps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_PD, AM_V | OT_PD, AM_NOT_USED, "movapd" } },
+ /* 0x2A */ { 0, IT_GENERIC, AM_V | OT_PS, AM_Q | OT_Q, AM_NOT_USED, "cvtpi2ps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_E | OT_D, AM_NOT_USED, "cvtsi2sd" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_E | OT_D, AM_NOT_USED, "cvtsi2ss" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_Q | OT_DQ, AM_NOT_USED, "cvtpi2pd" } },
+ /* 0x2B */ { 0, IT_GENERIC, AM_W | OT_PS, AM_V | OT_PS, AM_NOT_USED, "movntps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_PD, AM_V | OT_PD, AM_NOT_USED, "movntpd" } },
+ /* 0x2C */ { 0, IT_GENERIC, AM_Q | OT_Q, AM_W | OT_PS, AM_NOT_USED, "cvttps2pi", true,
+ /* F2h */ { 0, IT_GENERIC, AM_G | OT_D, AM_W | OT_SD, AM_NOT_USED, "cvttsd2si" },
+ /* F3h */ { 0, IT_GENERIC, AM_G | OT_D, AM_W | OT_SS, AM_NOT_USED, "cvttss2si" },
+ /* 66h */ { 0, IT_GENERIC, AM_Q | OT_DQ, AM_W | OT_PD, AM_NOT_USED, "cvttpd2pi" } },
+ /* 0x2D */ { 0, IT_GENERIC, AM_Q | OT_Q, AM_W | OT_PS, AM_NOT_USED, "cvtps2pi", true,
+ /* F2h */ { 0, IT_GENERIC, AM_G | OT_D, AM_W | OT_SD, AM_NOT_USED, "cvtsd2si" },
+ /* F3h */ { 0, IT_GENERIC, AM_G | OT_D, AM_W | OT_SS, AM_NOT_USED, "cvtss2si" },
+ /* 66h */ { 0, IT_GENERIC, AM_Q | OT_DQ, AM_W | OT_PD, AM_NOT_USED, "cvtpd2pi" } },
+ /* 0x2E */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "ucomiss", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "ucomisd" } },
+ /* 0x2F */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_SS, AM_NOT_USED, "comiss", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "comisd" } },
+ /* 0x30 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "wrmsr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x31 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "rdtsc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x32 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "rdmsr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x33 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "rdpmc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x34 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "sysenter", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x35 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "sysexit", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x36 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x37 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x38 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x39 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3A */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3B */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3C */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "movnti", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3D */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3E */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3F */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x40 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovo", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x41 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovno", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x42 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x43 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovnc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x44 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x45 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovnz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x46 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovbe", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x47 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmova", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x48 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovs", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x49 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovns", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4A */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovpe", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4B */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovpo", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4C */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4D */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovge", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4E */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovle", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4F */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "cmovg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x50 */ { 0, IT_GENERIC, AM_E | OT_D, AM_V | OT_PS, AM_NOT_USED, "movmskps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_E | OT_D, AM_V | OT_PD, AM_NOT_USED, "movmskpd" } },
+ /* 0x51 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "sqrtps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "sqrtsd" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "sqrtss" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "sqrtpd" } },
+ /* 0x52 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "rsqrtps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "rsqrtss" },
+ /* 66h */ { 0 } },
+ /* 0x53 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "rcpps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "rcpss" },
+ /* 66h */ { 0 } },
+ /* 0x54 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "andps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "andpd" } },
+ /* 0x55 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "andnps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "andnpd" } },
+ /* 0x56 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "orps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "orpd" } },
+ /* 0x57 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "xorps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "xorpd" } },
+ /* 0x58 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "addps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "addsd" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "addss" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "addpd" } },
+ /* 0x59 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "mulps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "mulsd" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "mulss" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "mulpd" } },
+ /* 0x5A */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PS, AM_NOT_USED, "cvtps2pd", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "cvtsd2ss" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "cvtss2sd" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PD, AM_NOT_USED, "cvtpd2ps" } },
+ /* 0x5B */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_DQ, AM_NOT_USED, "cvtdq2ps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_PS, AM_NOT_USED, "cvttps2dq" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_PS, AM_NOT_USED, "cvtps2dq" } },
+ /* 0x5C */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "subps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "subsd" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "subss" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "subpd" } },
+ /* 0x5D */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "minps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "minsd" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "minss" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "minpd" } },
+ /* 0x5E */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "divps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "divsd" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "divss" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "divpd" } },
+ /* 0x5F */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_NOT_USED, "maxps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_NOT_USED, "maxsd" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_NOT_USED, "maxss" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_NOT_USED, "maxpd" } },
+ /* 0x60 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "punpcklbw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "punpcklbw" } },
+ /* 0x61 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "punpcklwd", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "punpcklwd" } },
+ /* 0x62 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "punpckldq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "punpckldq" } },
+ /* 0x63 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "packsswb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "packsswb" } },
+ /* 0x64 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "pcmpgtb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pcmpgtb" } },
+ /* 0x65 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "pcmpgtw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pcmpgtw" } },
+ /* 0x66 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "pcmpgtd", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pcmpgtd" } },
+ /* 0x67 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "packuswb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "packuswb" } },
+ /* 0x68 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "punpckhbw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_P | OT_DQ, AM_Q | OT_DQ, AM_NOT_USED, "punpckhbw" } },
+ /* 0x69 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "punpckhwd", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_P | OT_DQ, AM_Q | OT_DQ, AM_NOT_USED, "punpckhwd" } },
+ /* 0x6A */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "punpckhdq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_P | OT_DQ, AM_Q | OT_DQ, AM_NOT_USED, "punpckhdq" } },
+ /* 0x6B */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "packssdw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_P | OT_DQ, AM_Q | OT_DQ, AM_NOT_USED, "packssdw" } },
+ /* 0x6C */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "not used without prefix", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "punpcklqdq" } },
+ /* 0x6D */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "not used without prefix", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "punpcklqdq" } },
+ /* 0x6E */ { 0, IT_GENERIC, AM_P | OT_D, AM_E | OT_D, AM_NOT_USED, "movd", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_E | OT_D, AM_NOT_USED, "movd" } },
+ /* 0x6F */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_D, AM_NOT_USED, "movq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "movdqu" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "movdqa" } },
+ /* 0x70 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_I | OT_B, "pshuf", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_I | OT_B, "pshuflw" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_I | OT_B, "pshufhw" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_I | OT_B, "pshufd" } },
+ /* 0x71 */ { 19, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x72 */ { 20, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x73 */ { 21, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x74 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pcmpeqb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pcmpeqb" } },
+ /* 0x75 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pcmpeqw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pcmpeqw" } },
+ /* 0x76 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pcmpeqd", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pcmpeqd" } },
+ /* 0x77 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "emms", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+
+ // The following six opcodes are escapes into the MMX stuff, which this disassembler does not support.
+ /* 0x78 */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x79 */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7A */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7B */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7C */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7D */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+
+ /* 0x7E */ { 0, IT_GENERIC, AM_E | OT_D, AM_P | OT_D, AM_NOT_USED, "movd", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_Q, AM_W | OT_Q, AM_NOT_USED, "movq" },
+ /* 66h */ { 0, IT_GENERIC, AM_E | OT_D, AM_V | OT_DQ, AM_NOT_USED, "movd" } },
+ /* 0x7F */ { 0, IT_GENERIC, AM_Q | OT_Q, AM_P | OT_Q, AM_NOT_USED, "movq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0, IT_GENERIC, AM_W | OT_DQ, AM_V | OT_DQ, AM_NOT_USED, "movdqu" },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_DQ, AM_V | OT_DQ, AM_NOT_USED, "movdqa" } },
+ /* 0x80 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jo", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x81 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jno", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x82 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x83 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jnc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x84 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x85 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jnz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x86 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jbe", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x87 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "ja", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x88 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "js", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x89 */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jns", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8A */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jpe", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8B */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jpo", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8C */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8D */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jge", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8E */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jle", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x8F */ { 0, IT_JUMP, AM_J | OT_V, AM_NOT_USED, AM_NOT_USED, "jg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x90 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "seto", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x91 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setno", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x92 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x93 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setnc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x94 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x95 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setnz", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x96 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setbe", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x97 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "seta", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x98 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "sets", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x99 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setns", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9A */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setpe", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9B */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setpo", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9C */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9D */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setge", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9E */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setle", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x9F */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "setg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA0 */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA1 */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA2 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "cpuid", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA3 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "bt", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA4 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_I | OT_B, "shld", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA5 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_I | OT_B | AM_REGISTER, "shld", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA6 */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA7 */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA8 */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xA9 */ { 0, IT_GENERIC, AM_REGISTER | OT_W, AM_NOT_USED, AM_NOT_USED, "pop", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAA */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "rsm", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAB */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "bts", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAC */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_I | OT_B, "shrd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAD */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_I | OT_B | AM_REGISTER, "shrd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAE */ { 22, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xAF */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "imul", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB0 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "cmpxchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "cmpxchg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB2 */ { 0, IT_GENERIC, AM_M | OT_P, AM_NOT_USED, AM_NOT_USED, "lss", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB3 */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "btr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB4 */ { 0, IT_GENERIC, AM_M | OT_P, AM_NOT_USED, AM_NOT_USED, "lfs", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB5 */ { 0, IT_GENERIC, AM_M | OT_P, AM_NOT_USED, AM_NOT_USED, "lgs", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB6 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_B, AM_NOT_USED, "movzx", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB7 */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_W, AM_NOT_USED, "movzx", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB8 */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xB9 */ { 0, IT_UNKNOWN, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "ud1", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBA */ { 23, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBB */ { 0, IT_GENERIC, AM_E | OT_V, AM_G | OT_V, AM_NOT_USED, "btc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBC */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "bsf", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBD */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_V, AM_NOT_USED, "bsr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBE */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_B, AM_NOT_USED, "movsx", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xBF */ { 0, IT_GENERIC, AM_G | OT_V, AM_E | OT_W, AM_NOT_USED, "movsx", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC0 */ { 0, IT_GENERIC, AM_E | OT_B, AM_G | OT_B, AM_NOT_USED, "xadd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, "xadd", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC2 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_I | OT_B, "cmpps", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_SD, AM_W | OT_SD, AM_I | OT_B, "cmpsd" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_SS, AM_W | OT_SS, AM_I | OT_B, "cmpss" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_I | OT_B, "cmppd" } },
+ /* 0xC3 */ { 0, IT_GENERIC, AM_E | OT_D, AM_G | OT_D, AM_NOT_USED, "movnti", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC4 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_E | OT_D, AM_I | OT_B, "pinsrw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_E | OT_D, AM_I | OT_B, "pinsrw" } },
+ /* 0xC5 */ { 0, IT_GENERIC, AM_G | OT_D, AM_P | OT_Q, AM_I | OT_B, "pextrw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_G | OT_D, AM_V | OT_DQ, AM_I | OT_B, "pextrw" } },
+ /* 0xC6 */ { 0, IT_GENERIC, AM_V | OT_PS, AM_W | OT_PS, AM_I | OT_B, "shufps", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_PD, AM_I | OT_B, "shufpd" } },
+ /* 0xC7 */ { 24, IT_REFERENCE, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC8 */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "bswap", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xC9 */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "bswap", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCA */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "bswap", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCB */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "bswap", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCC */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "bswap", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCD */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "bswap", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCE */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "bswap", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xCF */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "bswap", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD0 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xD1 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psrlw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psrlw" } },
+ /* 0xD2 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psrld", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psrld" } },
+ /* 0xD3 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psrlq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psrlq" } },
+ /* 0xD4 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "paddq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "paddq" } },
+ /* 0xD5 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pmullw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pmullw" } },
+ /* 0xD6 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "unused without prefix", true,
+ /* F2h */ { 0, IT_GENERIC, AM_P | OT_Q, AM_W | OT_Q, AM_NOT_USED, "movdq2q" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_Q | OT_Q, AM_NOT_USED, "movq2dq" },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_Q, AM_V | OT_Q, AM_NOT_USED, "movq" } },
+ /* 0xD7 */ { 0, IT_GENERIC, AM_G | OT_D, AM_P | OT_Q, AM_NOT_USED, "pmovmskb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_G | OT_D, AM_V | OT_DQ, AM_NOT_USED, "pmovmskb" } },
+ /* 0xD8 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psubusb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psubusb" } },
+ /* 0xD9 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psubusw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psubusw" } },
+ /* 0xDA */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pminub", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pminub" } },
+ /* 0xDB */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pand", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pand" } },
+ /* 0xDC */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "paddusb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "paddusb" } },
+ /* 0xDD */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "paddusw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "paddusw" } },
+ /* 0xDE */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pmaxub", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pmaxub" } },
+ /* 0xDF */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pandn", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pandn" } },
+ /* 0xE0 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pavgb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pavgb" } },
+ /* 0xE1 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psraw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psrqw" } },
+ /* 0xE2 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psrad", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psrad" } },
+ /* 0xE3 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pavgw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pavgw" } },
+ /* 0xE4 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pmulhuw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pmulhuw" } },
+ /* 0xE5 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pmulhuw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pmulhw" } },
+ /* 0xE6 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "not used without prefix", true,
+ /* F2h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_PD, AM_NOT_USED, "cvtpd2dq" },
+ /* F3h */ { 0, IT_GENERIC, AM_V | OT_PD, AM_W | OT_DQ, AM_NOT_USED, "cvtdq2pd" },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_PD, AM_NOT_USED, "cvttpd2dq" } },
+ /* 0xE7 */ { 0, IT_GENERIC, AM_W | OT_Q, AM_V | OT_Q, AM_NOT_USED, "movntq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_DQ, AM_V | OT_DQ, AM_NOT_USED, "movntdq" } },
+ /* 0xE8 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psubsb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psubsb" } },
+ /* 0xE9 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psubsw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psubsw" } },
+ /* 0xEA */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pminsw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pminsw" } },
+ /* 0xEB */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "por", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "por" } },
+ /* 0xEC */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "paddsb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "paddsb" } },
+ /* 0xED */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "paddsw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "paddsw" } },
+ /* 0xEE */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pmaxsw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pmaxsw" } },
+ /* 0xEF */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pxor", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pxor" } },
+ /* 0xF0 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0xF1 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psllw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psllw" } },
+ /* 0xF2 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pslld", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pslld" } },
+ /* 0xF3 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psllq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psllq" } },
+ /* 0xF4 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pmuludq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pmuludq" } },
+ /* 0xF5 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "pmaddwd", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "pmaddwd" } },
+ /* 0xF6 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psadbw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psadbw" } },
+ /* 0xF7 */ { 0, IT_GENERIC, AM_P | OT_PI, AM_Q | OT_PI, AM_NOT_USED, "maskmovq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "maskmovdqu" } },
+ /* 0xF8 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psubb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psubb" } },
+ /* 0xF9 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psubw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psubw" } },
+ /* 0xFA */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psubd", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psubd" } },
+ /* 0xFB */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "psubq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "psubq" } },
+ /* 0xFC */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "paddb", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "paddb" } },
+ /* 0xFD */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "paddw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "paddw" } },
+ /* 0xFE */ { 0, IT_GENERIC, AM_P | OT_Q, AM_Q | OT_Q, AM_NOT_USED, "paddd", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_V | OT_DQ, AM_W | OT_DQ, AM_NOT_USED, "paddd" } },
+ /* 0xFF */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_0f00[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_W, AM_NOT_USED, AM_NOT_USED, "sldt", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_W, AM_NOT_USED, AM_NOT_USED, "str", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_W, AM_NOT_USED, AM_NOT_USED, "lldt", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_W, AM_NOT_USED, AM_NOT_USED, "ltr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_W, AM_NOT_USED, AM_NOT_USED, "verr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_W, AM_NOT_USED, AM_NOT_USED, "verw", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_0f01[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_M | OT_S, AM_NOT_USED, AM_NOT_USED, "sgdt", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_M | OT_S, AM_NOT_USED, AM_NOT_USED, "sidt", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_M | OT_S, AM_NOT_USED, AM_NOT_USED, "lgdt", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_M | OT_S, AM_NOT_USED, AM_NOT_USED, "lidt", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_W, AM_NOT_USED, AM_NOT_USED, "smsw", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_W, AM_NOT_USED, AM_NOT_USED, "lmsw", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_M | OT_B, AM_NOT_USED, AM_NOT_USED, "invlpg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_0f18[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_M | OT_ADDRESS_MODE_M, AM_NOT_USED, AM_NOT_USED, "prefetch", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "prefetch", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "prefetch", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_REGISTER | OT_D, AM_NOT_USED, AM_NOT_USED, "prefetch", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_0f71[] = {
+ /* 0x0 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_I | OT_B, AM_NOT_USED, "psrlw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_P | OT_DQ, AM_I | OT_B, AM_NOT_USED, "psrlw" } },
+ /* 0x3 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_I | OT_B, AM_NOT_USED, "psraw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_P | OT_DQ, AM_I | OT_B, AM_NOT_USED, "psraw" } },
+ /* 0x5 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_I | OT_B, AM_NOT_USED, "psllw", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_P | OT_DQ, AM_I | OT_B, AM_NOT_USED, "psllw" } },
+ /* 0x7 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_0f72[] = {
+ /* 0x0 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_I | OT_B, AM_NOT_USED, "psrld", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_DQ, AM_I | OT_B, AM_NOT_USED, "psrld" } },
+ /* 0x3 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_I | OT_B, AM_NOT_USED, "psrad", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_DQ, AM_I | OT_B, AM_NOT_USED, "psrad" } },
+ /* 0x5 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_I | OT_B, AM_NOT_USED, "pslld", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_DQ, AM_I | OT_B, AM_NOT_USED, "pslld" } },
+ /* 0x7 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_0f73[] = {
+ /* 0x0 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_I | OT_B, AM_NOT_USED, "psrlq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_DQ, AM_I | OT_B, AM_NOT_USED, "psrlq" } },
+ /* 0x3 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_P | OT_Q, AM_I | OT_B, AM_NOT_USED, "psllq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_DQ, AM_I | OT_B, AM_NOT_USED, "psllq" } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_W | OT_DQ, AM_I | OT_B, AM_NOT_USED, "pslldq", true,
+ /* F2h */ { 0 },
+ /* F3h */ { 0 },
+ /* 66h */ { 0, IT_GENERIC, AM_W | OT_DQ, AM_I | OT_B, AM_NOT_USED, "pslldq" } },
+};
+
+const Opcode s_opcode_byte_after_0fae[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "fxsave", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "fxrstor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "ldmxcsr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "stmxcsr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "lfence", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "mfence", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, "clflush/sfence", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+};
+
+const Opcode s_opcode_byte_after_0fba[] = {
+ /* 0x0 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "bt", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "bts", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "btr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "btc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_0fc7[] = {
+ /* 0x0 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_M | OT_Q, AM_NOT_USED, AM_NOT_USED, "cmpxch8b", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_80[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "add", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "or", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "adc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "sbb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "and", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "sub", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "xor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "cmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_81[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "add", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "or", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "adc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "sbb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "and", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "sub", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "xor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "cmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_82[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "add", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "or", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "adc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "sbb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "and", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "sub", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "xor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "cmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_83[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "add", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "or", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "adc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "sbb", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "and", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "sub", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "xor", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "cmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_c0[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "rol", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "ror", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "rcl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "rcr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "shl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "shr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "sal", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "sar", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_c1[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "rol", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "ror", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "rcl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "rcr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "shl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "shr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "sal", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_B, AM_NOT_USED, "sar", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_d0[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_B, AM_IMPLICIT, AM_NOT_USED, "rol", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_B, AM_IMPLICIT, AM_NOT_USED, "ror", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_B, AM_IMPLICIT, AM_NOT_USED, "rcl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_B, AM_IMPLICIT, AM_NOT_USED, "rcr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_B, AM_IMPLICIT, AM_NOT_USED, "shl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_B, AM_IMPLICIT, AM_NOT_USED, "shr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_B, AM_IMPLICIT, AM_NOT_USED, "sal", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_B, AM_IMPLICIT, AM_NOT_USED, "sar", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_d1[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_V, AM_IMPLICIT, AM_NOT_USED, "rol", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_IMPLICIT, AM_NOT_USED, "ror", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_V, AM_IMPLICIT, AM_NOT_USED, "rcl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_V, AM_IMPLICIT, AM_NOT_USED, "rcr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_V, AM_IMPLICIT, AM_NOT_USED, "shl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_V, AM_IMPLICIT, AM_NOT_USED, "shr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_V, AM_IMPLICIT, AM_NOT_USED, "sal", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_V, AM_IMPLICIT, AM_NOT_USED, "sar", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_d2[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "rol", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "ror", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "rcl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "rcr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "shl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "shr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "sal", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_B, AM_REGISTER | OT_B, AM_NOT_USED, "sar", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_d3[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_V, AM_REGISTER | OT_B, AM_NOT_USED, "rol", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_REGISTER | OT_B, AM_NOT_USED, "ror", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_V, AM_REGISTER | OT_B, AM_NOT_USED, "rcl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_V, AM_REGISTER | OT_B, AM_NOT_USED, "rcr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_E | OT_V, AM_REGISTER | OT_B, AM_NOT_USED, "shl", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_E | OT_V, AM_REGISTER | OT_B, AM_NOT_USED, "shr", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_V, AM_REGISTER | OT_B, AM_NOT_USED, "sal", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_E | OT_V, AM_REGISTER | OT_B, AM_NOT_USED, "sar", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_f6[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "test", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_B, AM_I | OT_B, AM_NOT_USED, "test", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "not", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "neg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, OT_B | AM_REGISTER, AM_E | OT_B, AM_NOT_USED, "mul", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, OT_B | AM_REGISTER, AM_E | OT_B, AM_NOT_USED, "imul", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_E | OT_B, AM_NOT_USED, "div", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_REGISTER | OT_B, AM_E | OT_B, AM_NOT_USED, "idiv", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_f7[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "test", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_I | OT_V, AM_NOT_USED, "test", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_GENERIC, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, "not", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_GENERIC, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, "neg", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_E | OT_V, AM_NOT_USED, "mul", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_E | OT_V, AM_NOT_USED, "imul", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_E | OT_V, AM_NOT_USED, "div", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_GENERIC, AM_REGISTER | OT_V, AM_E | OT_V, AM_NOT_USED, "idiv", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_fe[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "inc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_B, AM_NOT_USED, AM_NOT_USED, "dec", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+const Opcode s_opcode_byte_after_ff[] = {
+ /* 0x0 */ { 0, IT_GENERIC, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, "inc", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x1 */ { 0, IT_GENERIC, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, "dec", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x2 */ { 0, IT_JUMP, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, "call", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x3 */ { 0, IT_JUMP, AM_E | OT_P, AM_NOT_USED, AM_NOT_USED, "call", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x4 */ { 0, IT_JUMP, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, "jmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x5 */ { 0, IT_JUMP, AM_E | OT_P, AM_NOT_USED, AM_NOT_USED, "jmp", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x6 */ { 0, IT_GENERIC, AM_E | OT_V, AM_NOT_USED, AM_NOT_USED, "push", false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } },
+ /* 0x7 */ { 0, IT_UNUSED, AM_NOT_USED, AM_NOT_USED, AM_NOT_USED, 0, false, /* F2h */ { 0 }, /* F3h */ { 0 }, /* 66h */ { 0 } }
+};
+
+/*
+* A table of all the other tables, containing some extra information, e.g.
+* how to mask out the byte we're looking at.
+*/
+const OpcodeTable MiniDisassembler::s_ia32_opcode_map_[]={
+ // One-byte opcodes and jumps to larger
+ /* 0 */ {s_first_opcode_byte, 0, 0xff, 0, 0xff},
+ // Two-byte opcodes (second byte)
+ /* 1 */ {s_opcode_byte_after_0f, 0, 0xff, 0, 0xff},
+ // Start of tables for opcodes using ModR/M bits as extension
+ /* 2 */ {s_opcode_byte_after_80, 3, 0x07, 0, 0x07},
+ /* 3 */ {s_opcode_byte_after_81, 3, 0x07, 0, 0x07},
+ /* 4 */ {s_opcode_byte_after_82, 3, 0x07, 0, 0x07},
+ /* 5 */ {s_opcode_byte_after_83, 3, 0x07, 0, 0x07},
+ /* 6 */ {s_opcode_byte_after_c0, 3, 0x07, 0, 0x07},
+ /* 7 */ {s_opcode_byte_after_c1, 3, 0x07, 0, 0x07},
+ /* 8 */ {s_opcode_byte_after_d0, 3, 0x07, 0, 0x07},
+ /* 9 */ {s_opcode_byte_after_d1, 3, 0x07, 0, 0x07},
+ /* 10 */ {s_opcode_byte_after_d2, 3, 0x07, 0, 0x07},
+ /* 11 */ {s_opcode_byte_after_d3, 3, 0x07, 0, 0x07},
+ /* 12 */ {s_opcode_byte_after_f6, 3, 0x07, 0, 0x07},
+ /* 13 */ {s_opcode_byte_after_f7, 3, 0x07, 0, 0x07},
+ /* 14 */ {s_opcode_byte_after_fe, 3, 0x07, 0, 0x01},
+ /* 15 */ {s_opcode_byte_after_ff, 3, 0x07, 0, 0x07},
+ /* 16 */ {s_opcode_byte_after_0f00, 3, 0x07, 0, 0x07},
+ /* 17 */ {s_opcode_byte_after_0f01, 3, 0x07, 0, 0x07},
+ /* 18 */ {s_opcode_byte_after_0f18, 3, 0x07, 0, 0x07},
+ /* 19 */ {s_opcode_byte_after_0f71, 3, 0x07, 0, 0x07},
+ /* 20 */ {s_opcode_byte_after_0f72, 3, 0x07, 0, 0x07},
+ /* 21 */ {s_opcode_byte_after_0f73, 3, 0x07, 0, 0x07},
+ /* 22 */ {s_opcode_byte_after_0fae, 3, 0x07, 0, 0x07},
+ /* 23 */ {s_opcode_byte_after_0fba, 3, 0x07, 0, 0x07},
+ /* 24 */ {s_opcode_byte_after_0fc7, 3, 0x07, 0, 0x01}
+};
+
+}; // namespace sidestep
diff --git a/chromium/tools/traceline/traceline/sidestep/mini_disassembler.cc b/chromium/tools/traceline/traceline/sidestep/mini_disassembler.cc
new file mode 100644
index 00000000000..a603ebe637a
--- /dev/null
+++ b/chromium/tools/traceline/traceline/sidestep/mini_disassembler.cc
@@ -0,0 +1,416 @@
+// Copyright 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Implementation of MiniDisassembler.
+
+#include "sidestep/mini_disassembler.h"
+
+namespace sidestep {
+
+MiniDisassembler::MiniDisassembler(bool operand_default_is_32_bits,
+ bool address_default_is_32_bits)
+ : operand_default_is_32_bits_(operand_default_is_32_bits),
+ address_default_is_32_bits_(address_default_is_32_bits) {
+ Initialize();
+}
+
+MiniDisassembler::MiniDisassembler()
+ : operand_default_is_32_bits_(true),
+ address_default_is_32_bits_(true) {
+ Initialize();
+}
+
+InstructionType MiniDisassembler::Disassemble(
+ unsigned char* start_byte,
+ unsigned int* instruction_bytes) {
+ // Clean up any state from previous invocations.
+ Initialize();
+
+ // Start by processing any prefixes.
+ unsigned char* current_byte = start_byte;
+ unsigned int size = 0;
+ InstructionType instruction_type = ProcessPrefixes(current_byte, &size);
+
+ if (IT_UNKNOWN == instruction_type)
+ return instruction_type;
+
+ current_byte += size;
+ size = 0;
+
+ // Invariant: We have stripped all prefixes, and the operand_is_32_bits_
+ // and address_is_32_bits_ flags are correctly set.
+
+ instruction_type = ProcessOpcode(current_byte, 0, &size);
+
+ // Check for error processing instruction
+ if ((IT_UNKNOWN == instruction_type_) || (IT_UNUSED == instruction_type_)) {
+ return IT_UNKNOWN;
+ }
+
+ current_byte += size;
+
+ // Invariant: operand_bytes_ indicates the total size of operands
+ // specified by the opcode and/or ModR/M byte and/or SIB byte.
+ // pCurrentByte points to the first byte after the ModR/M byte, or after
+ // the SIB byte if it is present (i.e. the first byte of any operands
+ // encoded in the instruction).
+
+ // We get the total length of any prefixes, the opcode, and the ModR/M and
+ // SIB bytes if present, by taking the difference of the original starting
+ // address and the current byte (which points to the first byte of the
+ // operands if present, or to the first byte of the next instruction if
+ // they are not). Adding the count of bytes in the operands encoded in
+ // the instruction gives us the full length of the instruction in bytes.
+ *instruction_bytes += operand_bytes_ + (current_byte - start_byte);
+
+ // Return the instruction type, which was set by ProcessOpcode().
+ return instruction_type_;
+}
+
+void MiniDisassembler::Initialize() {
+ operand_is_32_bits_ = operand_default_is_32_bits_;
+ address_is_32_bits_ = address_default_is_32_bits_;
+ operand_bytes_ = 0;
+ have_modrm_ = false;
+ should_decode_modrm_ = false;
+ instruction_type_ = IT_UNKNOWN;
+ got_f2_prefix_ = false;
+ got_f3_prefix_ = false;
+ got_66_prefix_ = false;
+}
+
+InstructionType MiniDisassembler::ProcessPrefixes(unsigned char* start_byte,
+ unsigned int* size) {
+ InstructionType instruction_type = IT_GENERIC;
+ const Opcode& opcode = s_ia32_opcode_map_[0].table_[*start_byte];
+
+ switch (opcode.type_) {
+ case IT_PREFIX_ADDRESS:
+ address_is_32_bits_ = !address_default_is_32_bits_;
+ goto nochangeoperand;
+ case IT_PREFIX_OPERAND:
+ operand_is_32_bits_ = !operand_default_is_32_bits_;
+ nochangeoperand:
+ case IT_PREFIX:
+
+ if (0xF2 == (*start_byte))
+ got_f2_prefix_ = true;
+ else if (0xF3 == (*start_byte))
+ got_f3_prefix_ = true;
+ else if (0x66 == (*start_byte))
+ got_66_prefix_ = true;
+
+ instruction_type = opcode.type_;
+ (*size)++;
+ // we got a prefix, so add one and check next byte
+ ProcessPrefixes(start_byte + 1, size);
+ default:
+ break; // not a prefix byte
+ }
+
+ return instruction_type;
+}
+
+InstructionType MiniDisassembler::ProcessOpcode(unsigned char* start_byte,
+ unsigned int table_index,
+ unsigned int* size) {
+ const OpcodeTable& table = s_ia32_opcode_map_[table_index]; // Get our table
+ unsigned char current_byte = (*start_byte) >> table.shift_;
+ current_byte = current_byte & table.mask_; // Mask out the bits we will use
+
+ // Check whether the byte we have is inside the table we have.
+ if (current_byte < table.min_lim_ || current_byte > table.max_lim_) {
+ instruction_type_ = IT_UNKNOWN;
+ return instruction_type_;
+ }
+
+ const Opcode& opcode = table.table_[current_byte];
+ if (IT_UNUSED == opcode.type_) {
+ // This instruction is not used by the IA-32 ISA, so we indicate
+ // this to the user. Probably means that we were pointed to
+ // a byte in memory that was not the start of an instruction.
+ instruction_type_ = IT_UNUSED;
+ return instruction_type_;
+ } else if (IT_REFERENCE == opcode.type_) {
+ // We are looking at an opcode that has more bytes (or is continued
+ // in the ModR/M byte). Recursively find the opcode definition in
+ // the table for the opcode's next byte.
+ (*size)++;
+ ProcessOpcode(start_byte + 1, opcode.table_index_, size);
+ return instruction_type_;
+ }
+
+ const SpecificOpcode* specific_opcode = reinterpret_cast<
+ const SpecificOpcode*>(&opcode);
+ if (opcode.is_prefix_dependent_) {
+ if (got_f2_prefix_ && opcode.opcode_if_f2_prefix_.mnemonic_ != 0) {
+ specific_opcode = &opcode.opcode_if_f2_prefix_;
+ } else if (got_f3_prefix_ && opcode.opcode_if_f3_prefix_.mnemonic_ != 0) {
+ specific_opcode = &opcode.opcode_if_f3_prefix_;
+ } else if (got_66_prefix_ && opcode.opcode_if_66_prefix_.mnemonic_ != 0) {
+ specific_opcode = &opcode.opcode_if_66_prefix_;
+ }
+ }
+
+ // Inv: The opcode type is known.
+ instruction_type_ = specific_opcode->type_;
+
+ // Let's process the operand types to see if we have any immediate
+ // operands, and/or a ModR/M byte.
+
+ ProcessOperand(specific_opcode->flag_dest_);
+ ProcessOperand(specific_opcode->flag_source_);
+ ProcessOperand(specific_opcode->flag_aux_);
+
+ // Inv: We have processed the opcode and incremented operand_bytes_
+ // by the number of bytes of any operands specified by the opcode
+ // that are stored in the instruction (not registers etc.). Now
+ // we need to return the total number of bytes for the opcode and
+ // for the ModR/M or SIB bytes if they are present.
+
+ if (table.mask_ != 0xff) {
+ if (have_modrm_) {
+ // we're looking at a ModR/M byte so we're not going to
+ // count that into the opcode size
+ ProcessModrm(start_byte, size);
+ return IT_GENERIC;
+ } else {
+ // need to count the ModR/M byte even if it's just being
+ // used for opcode extension
+ (*size)++;
+ return IT_GENERIC;
+ }
+ } else {
+ if (have_modrm_) {
+ // The ModR/M byte is the next byte.
+ (*size)++;
+ ProcessModrm(start_byte + 1, size);
+ return IT_GENERIC;
+ } else {
+ (*size)++;
+ return IT_GENERIC;
+ }
+ }
+}
+
+bool MiniDisassembler::ProcessOperand(int flag_operand) {
+ bool succeeded = true;
+ if (AM_NOT_USED == flag_operand)
+ return succeeded;
+
+ // Decide what to do based on the addressing mode.
+ switch (flag_operand & AM_MASK) {
+ // No ModR/M byte indicated by these addressing modes, and no
+ // additional (e.g. immediate) parameters.
+ case AM_A: // Direct address
+ case AM_F: // EFLAGS register
+ case AM_X: // Memory addressed by the DS:SI register pair
+ case AM_Y: // Memory addressed by the ES:DI register pair
+ case AM_IMPLICIT: // Parameter is implicit, occupies no space in
+ // instruction
+ break;
+
+ // There is a ModR/M byte but it does not necessarily need
+ // to be decoded.
+ case AM_C: // reg field of ModR/M selects a control register
+ case AM_D: // reg field of ModR/M selects a debug register
+ case AM_G: // reg field of ModR/M selects a general register
+ case AM_P: // reg field of ModR/M selects an MMX register
+ case AM_R: // mod field of ModR/M may refer only to a general register
+ case AM_S: // reg field of ModR/M selects a segment register
+ case AM_T: // reg field of ModR/M selects a test register
+ case AM_V: // reg field of ModR/M selects a 128-bit XMM register
+ have_modrm_ = true;
+ break;
+
+ // In these addressing modes, there is a ModR/M byte and it needs to be
+ // decoded. No other (e.g. immediate) params than indicated in ModR/M.
+ case AM_E: // Operand is either a general-purpose register or memory,
+ // specified by ModR/M byte
+ case AM_M: // ModR/M byte will refer only to memory
+ case AM_Q: // Operand is either an MMX register or memory (complex
+ // evaluation), specified by ModR/M byte
+ case AM_W: // Operand is either a 128-bit XMM register or memory (complex
+ // eval), specified by ModR/M byte
+ have_modrm_ = true;
+ should_decode_modrm_ = true;
+ break;
+
+ // These addressing modes specify an immediate or an offset value
+ // directly, so we need to look at the operand type to see how many
+ // bytes.
+ case AM_I: // Immediate data.
+ case AM_J: // Jump to offset.
+ case AM_O: // Operand is at offset.
+ switch (flag_operand & OT_MASK) {
+ case OT_B: // Byte regardless of operand-size attribute.
+ operand_bytes_ += OS_BYTE;
+ break;
+ case OT_C: // Byte or word, depending on operand-size attribute.
+ if (operand_is_32_bits_)
+ operand_bytes_ += OS_WORD;
+ else
+ operand_bytes_ += OS_BYTE;
+ break;
+ case OT_D: // Doubleword, regardless of operand-size attribute.
+ operand_bytes_ += OS_DOUBLE_WORD;
+ break;
+ case OT_DQ: // Double-quadword, regardless of operand-size attribute.
+ operand_bytes_ += OS_DOUBLE_QUAD_WORD;
+ break;
+ case OT_P: // 32-bit or 48-bit pointer, depending on operand-size
+ // attribute.
+ if (operand_is_32_bits_)
+ operand_bytes_ += OS_48_BIT_POINTER;
+ else
+ operand_bytes_ += OS_32_BIT_POINTER;
+ break;
+ case OT_PS: // 128-bit packed single-precision floating-point data.
+ operand_bytes_ += OS_128_BIT_PACKED_SINGLE_PRECISION_FLOATING;
+ break;
+ case OT_Q: // Quadword, regardless of operand-size attribute.
+ operand_bytes_ += OS_QUAD_WORD;
+ break;
+ case OT_S: // 6-byte pseudo-descriptor.
+ operand_bytes_ += OS_PSEUDO_DESCRIPTOR;
+ break;
+ case OT_SD: // Scalar Double-Precision Floating-Point Value
+ case OT_PD: // Unaligned packed double-precision floating point value
+ operand_bytes_ += OS_DOUBLE_PRECISION_FLOATING;
+ break;
+ case OT_SS:
+ // Scalar element of a 128-bit packed single-precision
+ // floating data.
+ // We simply return enItUnknown since we don't have to support
+ // floating point
+ succeeded = false;
+ break;
+ case OT_V: // Word or doubleword, depending on operand-size attribute.
+ if (operand_is_32_bits_)
+ operand_bytes_ += OS_DOUBLE_WORD;
+ else
+ operand_bytes_ += OS_WORD;
+ break;
+ case OT_W: // Word, regardless of operand-size attribute.
+ operand_bytes_ += OS_WORD;
+ break;
+
+ // Can safely ignore these.
+ case OT_A: // Two one-word operands in memory or two double-word
+ // operands in memory
+ case OT_PI: // Quadword MMX technology register (e.g. mm0)
+ case OT_SI: // Doubleword integer register (e.g., eax)
+ break;
+
+ default:
+ break;
+ }
+ break;
+
+ default:
+ break;
+ }
+
+ return succeeded;
+}
+
+bool MiniDisassembler::ProcessModrm(unsigned char* start_byte,
+ unsigned int* size) {
+ // If we don't need to decode, we just return the size of the ModR/M
+ // byte (there is never a SIB byte in this case).
+ if (!should_decode_modrm_) {
+ (*size)++;
+ return true;
+ }
+
+ // We never care about the reg field, only the combination of the mod
+ // and r/m fields, so let's start by packing those fields together into
+ // 5 bits.
+ unsigned char modrm = (*start_byte);
+ unsigned char mod = modrm & 0xC0; // mask out top two bits to get mod field
+ modrm = modrm & 0x07; // mask out bottom 3 bits to get r/m field
+ mod = mod >> 3; // shift the mod field to the right place
+ modrm = mod | modrm; // combine the r/m and mod fields as discussed
+ mod = mod >> 3; // shift the mod field to bits 2..0
+
+ // Invariant: modrm contains the mod field in bits 4..3 and the r/m field
+ // in bits 2..0, and mod contains the mod field in bits 2..0
+
+ const ModrmEntry* modrm_entry = 0;
+ if (address_is_32_bits_)
+ modrm_entry = &s_ia32_modrm_map_[modrm];
+ else
+ modrm_entry = &s_ia16_modrm_map_[modrm];
+
+ // Invariant: modrm_entry points to information that we need to decode
+ // the ModR/M byte.
+
+ // Add to the count of operand bytes, if the ModR/M byte indicates
+ // that some operands are encoded in the instruction.
+ if (modrm_entry->is_encoded_in_instruction_)
+ operand_bytes_ += modrm_entry->operand_size_;
+
+ // Process the SIB byte if necessary, and return the count
+ // of ModR/M and SIB bytes.
+ if (modrm_entry->use_sib_byte_) {
+ (*size)++;
+ return ProcessSib(start_byte + 1, mod, size);
+ } else {
+ (*size)++;
+ return true;
+ }
+}
+
+bool MiniDisassembler::ProcessSib(unsigned char* start_byte,
+ unsigned char mod,
+ unsigned int* size) {
+ // get the mod field from the 2..0 bits of the SIB byte
+ unsigned char sib_base = (*start_byte) & 0x07;
+ if (0x05 == sib_base) {
+ switch (mod) {
+ case 0x00: // mod == 00
+ case 0x02: // mod == 10
+ operand_bytes_ += OS_DOUBLE_WORD;
+ break;
+ case 0x01: // mod == 01
+ operand_bytes_ += OS_BYTE;
+ break;
+ case 0x03: // mod == 11
+ // According to the IA-32 docs, there does not seem to be a disp
+ // value for this value of mod
+ default:
+ break;
+ }
+ }
+
+ (*size)++;
+ return true;
+}
+
+}; // namespace sidestep
diff --git a/chromium/tools/traceline/traceline/sidestep/mini_disassembler.h b/chromium/tools/traceline/traceline/sidestep/mini_disassembler.h
new file mode 100644
index 00000000000..2c3ea2f2d5e
--- /dev/null
+++ b/chromium/tools/traceline/traceline/sidestep/mini_disassembler.h
@@ -0,0 +1,156 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Definition of MiniDisassembler.
+
+#ifndef TRACELINE_SIDESTEP_MINI_DISASSEMBLER_H_
+#define TRACELINE_SIDESTEP_MINI_DISASSEMBLER_H_
+
+#include "sidestep/mini_disassembler_types.h"
+
+namespace sidestep {
+
+// This small disassembler is very limited
+// in its functionality, and in fact does only the bare minimum required by the
+// preamble patching utility. It may be useful for other purposes, however.
+//
+// The limitations include at least the following:
+// -# No support for coprocessor opcodes, MMX, etc.
+// -# No machine-readable identification of opcodes or decoding of
+// assembly parameters. The name of the opcode (as a string) is given,
+// however, to aid debugging.
+//
+// You may ask what this little disassembler actually does, then? The answer is
+// that it does the following, which is exactly what the patching utility needs:
+// -# Indicates if opcode is a jump (any kind) or a return (any kind)
+// because this is important for the patching utility to determine if
+// a function is too short or there are jumps too early in it for it
+// to be preamble patched.
+// -# The opcode length is always calculated, so that the patching utility
+// can figure out where the next instruction starts, and whether it
+// already has enough instructions to replace with the absolute jump
+// to the patching code.
+//
+// The usage is quite simple; just create a MiniDisassembler and use its
+// Disassemble() method.
+//
+// If you would like to extend this disassembler, please refer to the
+// IA-32 Intel Architecture Software Developer's Manual Volume 2:
+// Instruction Set Reference for information about operand decoding
+// etc.
+class MiniDisassembler {
+ public:
+
+ // Creates a new instance and sets defaults.
+ //
+ // operand_default_32_bits: If true, the default operand size is
+ // set to 32 bits, which is the default under Win32. Otherwise it is 16 bits.
+ // address_default_32_bits: If true, the default address size is
+ // set to 32 bits, which is the default under Win32. Otherwise it is 16 bits.
+ MiniDisassembler(bool operand_default_32_bits,
+ bool address_default_32_bits);
+
+ // Equivalent to MiniDisassembler(true, true);
+ MiniDisassembler();
+
+ // Attempts to disassemble a single instruction starting from the
+ // address in memory it is pointed to.
+ //
+ // start: Address where disassembly should start.
+ // instruction_bytes: Variable that will be incremented by
+ // the length in bytes of the instruction.
+ // Returns enItJump, enItReturn or enItGeneric on success. enItUnknown
+ // if unable to disassemble, enItUnused if this seems to be an unused
+ // opcode. In the last two (error) cases, cbInstruction will be set
+ // to 0xffffffff.
+ //
+ // Postcondition: This instance of the disassembler is ready to be used again,
+ // with unchanged defaults from creation time.
+ InstructionType Disassemble(unsigned char* start,
+ unsigned int* instruction_bytes);
+
+ private:
+
+ // Makes the disassembler ready for reuse.
+ void Initialize();
+
+ // Sets the flags for address and operand sizes.
+ // Returns Number of prefix bytes.
+ InstructionType ProcessPrefixes(unsigned char* start, unsigned int* size);
+
+ // Sets the flag for whether we have ModR/M, and increments
+ // operand_bytes_ if any are specifies by the opcode directly.
+ // Returns Number of opcode bytes.
+ InstructionType ProcessOpcode(unsigned char* start,
+ unsigned int table,
+ unsigned int* size);
+
+ // Checks the type of the supplied operand. Increments
+ // operand_bytes_ if it directly indicates an immediate etc.
+ // operand. Asserts have_modrm_ if the operand specifies
+ // a ModR/M byte.
+ bool ProcessOperand(int flag_operand);
+
+ // Increments operand_bytes_ by size specified by ModR/M and
+ // by SIB if present.
+ // Returns 0 in case of error, 1 if there is just a ModR/M byte,
+ // 2 if there is a ModR/M byte and a SIB byte.
+ bool ProcessModrm(unsigned char* start, unsigned int* size);
+
+ // Processes the SIB byte that it is pointed to.
+ // start: Pointer to the SIB byte.
+ // mod: The mod field from the ModR/M byte.
+ // Returns 1 to indicate success (indicates 1 SIB byte)
+ bool ProcessSib(unsigned char* start, unsigned char mod, unsigned int* size);
+
+ // The instruction type we have decoded from the opcode.
+ InstructionType instruction_type_;
+
+ // Counts the number of bytes that is occupied by operands in
+ // the current instruction (note: we don't care about how large
+ // operands stored in registers etc. are).
+ unsigned int operand_bytes_;
+
+ // True iff there is a ModR/M byte in this instruction.
+ bool have_modrm_;
+
+ // True iff we need to decode the ModR/M byte (sometimes it just
+ // points to a register, we can tell by the addressing mode).
+ bool should_decode_modrm_;
+
+ // Current operand size is 32 bits if true, 16 bits if false.
+ bool operand_is_32_bits_;
+
+ // Default operand size is 32 bits if true, 16 bits if false.
+ bool operand_default_is_32_bits_;
+
+ // Current address size is 32 bits if true, 16 bits if false.
+ bool address_is_32_bits_;
+
+ // Default address size is 32 bits if true, 16 bits if false.
+ bool address_default_is_32_bits_;
+
+ // Huge big opcode table based on the IA-32 manual, defined
+ // in Ia32OpcodeMap.cpp
+ static const OpcodeTable s_ia32_opcode_map_[];
+
+ // Somewhat smaller table to help with decoding ModR/M bytes
+ // when 16-bit addressing mode is being used. Defined in
+ // Ia32ModrmMap.cpp
+ static const ModrmEntry s_ia16_modrm_map_[];
+
+ // Somewhat smaller table to help with decoding ModR/M bytes
+ // when 32-bit addressing mode is being used. Defined in
+ // Ia32ModrmMap.cpp
+ static const ModrmEntry s_ia32_modrm_map_[];
+
+ // Indicators of whether we got certain prefixes that certain
+ // silly Intel instructions depend on in nonstandard ways for
+ // their behaviors.
+ bool got_f2_prefix_, got_f3_prefix_, got_66_prefix_;
+};
+
+}; // namespace sidestep
+
+#endif // TRACELINE_SIDESTEP_MINI_DISASSEMBLER_H_
diff --git a/chromium/tools/traceline/traceline/sidestep/mini_disassembler_types.h b/chromium/tools/traceline/traceline/sidestep/mini_disassembler_types.h
new file mode 100644
index 00000000000..cb9e0062456
--- /dev/null
+++ b/chromium/tools/traceline/traceline/sidestep/mini_disassembler_types.h
@@ -0,0 +1,197 @@
+// Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// Several simple types used by the disassembler and some of the patching
+// mechanisms.
+
+#ifndef TRACELINE_SIDESTEP_MINI_DISASSEMBLER_TYPES_H_
+#define TRACELINE_SIDESTEP_MINI_DISASSEMBLER_TYPES_H_
+
+namespace sidestep {
+
+// Categories of instructions that we care about
+enum InstructionType {
+ // This opcode is not used
+ IT_UNUSED,
+ // This disassembler does not recognize this opcode (error)
+ IT_UNKNOWN,
+ // This is not an instruction but a reference to another table
+ IT_REFERENCE,
+ // This byte is a prefix byte that we can ignore
+ IT_PREFIX,
+ // This is a prefix byte that switches to the nondefault address size
+ IT_PREFIX_ADDRESS,
+ // This is a prefix byte that switches to the nondefault operand size
+ IT_PREFIX_OPERAND,
+ // A jump or call instruction
+ IT_JUMP,
+ // A return instruction
+ IT_RETURN,
+ // Any other type of instruction (in this case we don't care what it is)
+ IT_GENERIC,
+};
+
+// Lists IA-32 operand sizes in multiples of 8 bits
+enum OperandSize {
+ OS_ZERO = 0,
+ OS_BYTE = 1,
+ OS_WORD = 2,
+ OS_DOUBLE_WORD = 4,
+ OS_QUAD_WORD = 8,
+ OS_DOUBLE_QUAD_WORD = 16,
+ OS_32_BIT_POINTER = 32/8,
+ OS_48_BIT_POINTER = 48/8,
+ OS_SINGLE_PRECISION_FLOATING = 32/8,
+ OS_DOUBLE_PRECISION_FLOATING = 64/8,
+ OS_DOUBLE_EXTENDED_PRECISION_FLOATING = 80/8,
+ OS_128_BIT_PACKED_SINGLE_PRECISION_FLOATING = 128/8,
+ OS_PSEUDO_DESCRIPTOR = 6
+};
+
+// Operand addressing methods from the IA-32 manual. The enAmMask value
+// is a mask for the rest. The other enumeration values are named for the
+// names given to the addressing methods in the manual, e.g. enAm_D is for
+// the D addressing method.
+//
+// The reason we use a full 4 bytes and a mask, is that we need to combine
+// these flags with the enOperandType to store the details
+// on the operand in a single integer.
+enum AddressingMethod {
+ AM_NOT_USED = 0, // This operand is not used for this instruction
+ AM_MASK = 0x00FF0000, // Mask for the rest of the values in this enumeration
+ AM_A = 0x00010000, // A addressing type
+ AM_C = 0x00020000, // C addressing type
+ AM_D = 0x00030000, // D addressing type
+ AM_E = 0x00040000, // E addressing type
+ AM_F = 0x00050000, // F addressing type
+ AM_G = 0x00060000, // G addressing type
+ AM_I = 0x00070000, // I addressing type
+ AM_J = 0x00080000, // J addressing type
+ AM_M = 0x00090000, // M addressing type
+ AM_O = 0x000A0000, // O addressing type
+ AM_P = 0x000B0000, // P addressing type
+ AM_Q = 0x000C0000, // Q addressing type
+ AM_R = 0x000D0000, // R addressing type
+ AM_S = 0x000E0000, // S addressing type
+ AM_T = 0x000F0000, // T addressing type
+ AM_V = 0x00100000, // V addressing type
+ AM_W = 0x00110000, // W addressing type
+ AM_X = 0x00120000, // X addressing type
+ AM_Y = 0x00130000, // Y addressing type
+ AM_REGISTER = 0x00140000, // Specific register is always used as this op
+ AM_IMPLICIT = 0x00150000, // An implicit, fixed value is used
+};
+
+// Operand types from the IA-32 manual. The enOtMask value is
+// a mask for the rest. The rest of the values are named for the
+// names given to these operand types in the manual, e.g. enOt_ps
+// is for the ps operand type in the manual.
+//
+// The reason we use a full 4 bytes and a mask, is that we need
+// to combine these flags with the enAddressingMethod to store the details
+// on the operand in a single integer.
+enum OperandType {
+ OT_MASK = 0xFF000000,
+ OT_A = 0x01000000,
+ OT_B = 0x02000000,
+ OT_C = 0x03000000,
+ OT_D = 0x04000000,
+ OT_DQ = 0x05000000,
+ OT_P = 0x06000000,
+ OT_PI = 0x07000000,
+ OT_PS = 0x08000000, // actually unsupported for (we don't know its size)
+ OT_Q = 0x09000000,
+ OT_S = 0x0A000000,
+ OT_SS = 0x0B000000,
+ OT_SI = 0x0C000000,
+ OT_V = 0x0D000000,
+ OT_W = 0x0E000000,
+ OT_SD = 0x0F000000, // scalar double-precision floating-point value
+ OT_PD = 0x10000000, // double-precision floating point
+ // dummy "operand type" for address mode M - which doesn't specify
+ // operand type
+ OT_ADDRESS_MODE_M = 0x80000000
+};
+
+// Everything that's in an Opcode (see below) except the three
+// alternative opcode structs for different prefixes.
+struct SpecificOpcode {
+ // Index to continuation table, or 0 if this is the last
+ // byte in the opcode.
+ int table_index_;
+
+ // The opcode type
+ InstructionType type_;
+
+ // Description of the type of the dest, src and aux operands,
+ // put together from an enOperandType flag and an enAddressingMethod
+ // flag.
+ int flag_dest_;
+ int flag_source_;
+ int flag_aux_;
+
+ // We indicate the mnemonic for debugging purposes
+ const char* mnemonic_;
+};
+
+// The information we keep in our tables about each of the different
+// valid instructions recognized by the IA-32 architecture.
+struct Opcode {
+ // Index to continuation table, or 0 if this is the last
+ // byte in the opcode.
+ int table_index_;
+
+ // The opcode type
+ InstructionType type_;
+
+ // Description of the type of the dest, src and aux operands,
+ // put together from an enOperandType flag and an enAddressingMethod
+ // flag.
+ int flag_dest_;
+ int flag_source_;
+ int flag_aux_;
+
+ // We indicate the mnemonic for debugging purposes
+ const char* mnemonic_;
+
+ // Alternative opcode info if certain prefixes are specified.
+ // In most cases, all of these are zeroed-out. Only used if
+ // bPrefixDependent is true.
+ bool is_prefix_dependent_;
+ SpecificOpcode opcode_if_f2_prefix_;
+ SpecificOpcode opcode_if_f3_prefix_;
+ SpecificOpcode opcode_if_66_prefix_;
+};
+
+// Information about each table entry.
+struct OpcodeTable {
+ // Table of instruction entries
+ const Opcode* table_;
+ // How many bytes left to shift ModR/M byte <b>before</b> applying mask
+ unsigned char shift_;
+ // Mask to apply to byte being looked at before comparing to table
+ unsigned char mask_;
+ // Minimum/maximum indexes in table.
+ unsigned char min_lim_;
+ unsigned char max_lim_;
+};
+
+// Information about each entry in table used to decode ModR/M byte.
+struct ModrmEntry {
+ // Is the operand encoded as bytes in the instruction (rather than
+ // if it's e.g. a register in which case it's just encoded in the
+ // ModR/M byte)
+ bool is_encoded_in_instruction_;
+
+ // Is there a SIB byte? In this case we always need to decode it.
+ bool use_sib_byte_;
+
+ // What is the size of the operand (only important if it's encoded
+ // in the instruction)?
+ OperandSize operand_size_;
+};
+
+}; // namespace sidestep
+
+#endif // TRACELINE_SIDESTEP_MINI_DISASSEMBLER_TYPES_H_
diff --git a/chromium/tools/traceline/traceline/stubs.asm b/chromium/tools/traceline/traceline/stubs.asm
new file mode 100644
index 00000000000..d556b6bba59
--- /dev/null
+++ b/chromium/tools/traceline/traceline/stubs.asm
@@ -0,0 +1,132 @@
+; Copyright (c) 2009 The Chromium Authors. All rights reserved.
+; Use of this source code is governed by a BSD-style license that can be
+; found in the LICENSE file.
+
+; This file is just a convenient place for experimenting with x86 encodings.
+
+BITS 32
+
+; sldt to detect which processor we are running on.
+sldt eax
+sidt [esp]
+sidt [esp+2]
+
+lea eax, [fs:0]
+
+mov eax, [fs:0x18]
+
+mov ebx, 0x1234567
+mov eax, 0x1234567
+
+rdtsc
+
+push eax
+pop eax
+
+mov eax, [ecx]
+mov eax, [esp+4]
+mov ebx, [esp+4]
+
+lock xadd [eax], eax
+lock xadd [ecx], ecx
+lock xadd [ecx], eax
+
+jmp eax
+jmp edx
+
+lodsd
+
+rep stosb
+
+rep movsb
+
+mov eax, ebx
+mov edx, edx
+
+mov eax, eax
+
+stosd
+
+add eax, eax
+add edi, ecx
+
+and eax, 0x0000ffff
+and ecx, 0x0000ffff
+and edx, 0x0000ffff
+
+add edi, 0x12345
+add eax, 0x12345
+add ecx, 0x12345
+
+push 0x12
+push BYTE 0x12
+
+mov eax, [ebp+8]
+
+mov eax, 0x1234
+mov [fs:0], eax
+
+call 0x1234
+
+call eax
+call ecx
+
+add ebx, BYTE 3
+or ecx, 0xffff
+or eax, 0xffff
+
+mov eax, [esp+24]
+
+movsd
+movsb
+
+jmp blah
+blah:
+jmp blah
+
+cmp eax, 0x1234567
+cmp ecx, 0x1234567
+je NEAR blah2
+jo NEAR blah2
+blah2:
+
+add esp, 12
+add esp, BYTE 12
+sub esp, BYTE 12
+
+cmp eax, 12
+cmp ecx, BYTE 12
+
+cmp WORD [esp+6], 0x6666
+
+push DWORD [edi-4]
+push DWORD [edi-8]
+push DWORD [edi-12]
+push DWORD [edi-16]
+push DWORD [edi-20]
+
+x:
+loop x
+
+mov edx, [fs:0x4]
+
+cmp ecx, ecx
+cmp ecx, ebx
+cmp ebx, ebx
+
+mov eax,[dword fs:0x24]
+mov eax,[fs:0x24]
+
+mov ecx,[dword fs:0x24]
+mov ecx,[fs:0x24]
+
+mov eax, [ebx+12]
+mov ebx, [ebx+12]
+
+cmovo eax, eax
+
+mov eax, eax
+
+xchg eax, ebx
+xchg ebx, ecx
+xchg ebx, [esp+4]
diff --git a/chromium/tools/traceline/traceline/sym_resolver.h b/chromium/tools/traceline/traceline/sym_resolver.h
new file mode 100644
index 00000000000..eb7e06c08af
--- /dev/null
+++ b/chromium/tools/traceline/traceline/sym_resolver.h
@@ -0,0 +1,167 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// A smaller wrapper around the dbghelp symbol resolution routines.
+// For example:
+// SymResolver resolver("ntdll.dll");
+// resolver.Resolve("ntdll!NtBlahBlah");
+
+#ifndef TRACELINE_SYM_RESOLVER_H_
+#define TRACELINE_SYM_RESOLVER_H_
+
+#include <windows.h>
+#include <dbghelp.h>
+
+#include <vector>
+#include <string>
+#include <map>
+
+static BOOL CALLBACK SymEnumer(PCSTR name, DWORD64 base, PVOID context) {
+ reinterpret_cast<std::vector<DWORD64>*>(context)->push_back(base);
+ return TRUE;
+}
+
+class SymResolver {
+ public:
+
+ // Constructor to load a single DLL.
+ SymResolver(const char* dllname, HANDLE proc = ::GetCurrentProcess())
+ : proc_(proc) {
+
+ // TODO(deanm): Would be nice to get this from WinDBG, but it's buried
+ // in the workspace data blob... _NT_SYMBOL_PATH is not usually set...
+ static char* kSymbolPath =
+ "C:\\Program Files\\Debugging Tools for Windows (x86)\\sym;"
+ "C:\\Program Files\\Debugging Tools for Windows\\sym";
+
+ // If we want to load a specific DLL, or we want to load all.
+ if (::SymInitialize(proc_, kSymbolPath, dllname ? FALSE : TRUE) != TRUE) {
+ NOTREACHED("SymInitialize failed: %d", GetLastError());
+ }
+
+ base_ = 0;
+
+ if (dllname) {
+ base_ = ::SymLoadModuleEx(proc_,
+ NULL,
+ const_cast<char*>(dllname),
+ NULL,
+ reinterpret_cast<DWORD64>(
+ GetModuleHandleA(dllname)),
+ 0,
+ NULL,
+ 0);
+ if (base_ == 0) {
+ NOTREACHED("SymLoadModuleEx(%s) failed: %d", dllname, GetLastError());
+ }
+ }
+
+ std::vector<DWORD64> bases;
+ // The name returned from SymEnumerateModules64 doesn't include the ext,
+ // so we can't differentiate between a dll and exe of the same name. So
+ // collect all of the base addresses and query for more info.
+ // The prototype changed from PSTR to PCSTR, so in order to support older
+ // SDKs we have to cast SymEnumer.
+ PSYM_ENUMMODULES_CALLBACK64 enumer =
+ reinterpret_cast<PSYM_ENUMMODULES_CALLBACK64>(&SymEnumer);
+ if (SymEnumerateModules64(proc_, enumer, &bases) != TRUE) {
+ NOTREACHED("SymEnumerateModules64 failed: %d\n", GetLastError());
+ }
+ for (size_t i = 0; i < bases.size(); ++i) {
+ // This was failing, turns out I was just using the system32
+ // dbghelp.dll which is old, use the one from windbg :(
+ IMAGEHLP_MODULE64 info;
+ info.SizeOfStruct = sizeof(info);
+ if (SymGetModuleInfo64(proc_, bases[i], &info) != TRUE) {
+ NOTREACHED("SymGetModuleInfo64 failed: %d\n", GetLastError());
+ }
+ std::string filename(info.ImageName);
+ size_t last_slash = filename.find_last_of('\\');
+ if (last_slash != std::string::npos)
+ filename = filename.substr(filename.find_last_of('\\') + 1);
+
+ // Map the base address to the image name...
+ dlls_[static_cast<int>(bases[i])] = filename;
+ }
+
+ // TODO(deanm): check the symbols are rad and stuff...
+ }
+
+ char* Resolve(const char* name) {
+ // The API writes to the space after SYMBOL_INFO...
+ struct {
+ SYMBOL_INFO info;
+ char buf[128];
+ } info = {0};
+
+ info.info.SizeOfStruct = sizeof(info.info);
+ info.info.ModBase = base_;
+ info.info.MaxNameLen = 127;
+
+ if (SymFromName(proc_, const_cast<char*>(name), &info.info) != TRUE) {
+ NOTREACHED("SymFromName(%s) failed: %d", name, GetLastError());
+ }
+
+ return reinterpret_cast<char*>(info.info.Address);
+ }
+
+ std::string Unresolve(int ptr) {
+ // The API writes to the space after SYMBOL_INFO...
+ struct {
+ SYMBOL_INFO info;
+ char buf[128];
+ } info = {0};
+
+ info.info.SizeOfStruct = sizeof(info.info);
+ info.info.ModBase = base_;
+ info.info.MaxNameLen = 127;
+ if (!::SymFromAddr(proc_, static_cast<DWORD64>(ptr), NULL, &info.info)) {
+ return std::string("failed");
+ }
+
+ std::string name;
+ int addr = static_cast<int>(info.info.Address);
+ int base = static_cast<int>(info.info.ModBase);
+
+ if (dlls_.count(base) == 1) {
+ name.append(dlls_[base]);
+ } else {
+ name.append("unknown_mod");
+ }
+ name.push_back('!');
+ name.append(info.info.Name);
+
+ char buf[32];
+ _itoa_s(ptr - addr, buf, sizeof(buf), 16);
+ name.append("+0x");
+ name.append(buf);
+
+ DWORD disp;
+ IMAGEHLP_LINE64 line;
+ if (::SymGetLineFromAddr64(
+ proc_, static_cast<DWORD64>(ptr), &disp, &line)) {
+ name.append(" [ ");
+ name.append(line.FileName);
+ name.append(":");
+ _itoa_s(line.LineNumber, buf, sizeof(buf), 10);
+ name.append(buf);
+ name.append(" ]");
+ }
+
+ return name;
+ }
+
+ ~SymResolver() {
+ if (::SymCleanup(proc_) != TRUE) {
+ NOTREACHED("SymCleanup failed: %d", GetLastError());
+ }
+ }
+
+ private:
+ HANDLE proc_;
+ ULONG64 base_;
+ std::map<int, std::string> dlls_;
+};
+
+#endif // TRACELINE_SYM_RESOLVER_H_
diff --git a/chromium/tools/traceline/traceline/syscall_map.h b/chromium/tools/traceline/traceline/syscall_map.h
new file mode 100644
index 00000000000..515e9c7863b
--- /dev/null
+++ b/chromium/tools/traceline/traceline/syscall_map.h
@@ -0,0 +1,2116 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This table is a dump of system call numbers -> function names on an XP
+// system. These numbers change between kernel versions, so it's likely they
+// will have to be regenerated for your system. See the idarub script.
+
+// Define a global map used for resolving a syscall number to name.
+// Call CreateSyscallMap() to create a return a std::map<int, const char*>
+// which maps from a system call number to its function name.
+
+#ifndef TRACELINE_SYSCALL_MAP_H_
+#define TRACELINE_SYSCALL_MAP_H_
+
+#include <map>
+
+// TODO(deanm): Right now these tables are manually extracted and hardcoded
+// here. It would be great (but possibly difficult) to do it on startup. We
+// should at least checksum the DLLs to make sure they match.
+
+std::map<int, const char*> CreateSyscallMap() {
+ std::map<int, const char*> table;
+if (1) { // XP table.
+ table[0] = "ntdll.dll!NtAcceptConnectPort";
+ table[1] = "ntdll.dll!NtAccessCheck";
+ table[2] = "ntdll.dll!ZwAccessCheckAndAuditAlarm";
+ table[3] = "ntdll.dll!NtAccessCheckByType";
+ table[4] = "ntdll.dll!NtAccessCheckByTypeAndAuditAlarm";
+ table[5] = "ntdll.dll!NtAccessCheckByTypeResultList";
+ table[6] = "ntdll.dll!NtAccessCheckByTypeResultListAndAuditAlarm";
+ table[7] = "ntdll.dll!ZwAccessCheckByTypeResultListAndAuditAlarmByHandle";
+ table[8] = "ntdll.dll!NtAddAtom";
+ table[9] = "ntdll.dll!ZwAddBootEntry";
+ table[10] = "ntdll.dll!ZwAdjustGroupsToken";
+ table[11] = "ntdll.dll!ZwAdjustPrivilegesToken";
+ table[12] = "ntdll.dll!NtAlertResumeThread";
+ table[13] = "ntdll.dll!NtAlertThread";
+ table[14] = "ntdll.dll!ZwAllocateLocallyUniqueId";
+ table[15] = "ntdll.dll!NtAllocateUserPhysicalPages";
+ table[16] = "ntdll.dll!NtAllocateUuids";
+ table[17] = "ntdll.dll!NtAllocateVirtualMemory";
+ table[18] = "ntdll.dll!ZwAreMappedFilesTheSame";
+ table[19] = "ntdll.dll!ZwAssignProcessToJobObject";
+ table[20] = "ntdll.dll!ZwCallbackReturn";
+ table[21] = "ntdll.dll!NtCancelDeviceWakeupRequest";
+ table[22] = "ntdll.dll!ZwCancelIoFile";
+ table[23] = "ntdll.dll!ZwCancelTimer";
+ table[24] = "ntdll.dll!NtClearEvent";
+ table[25] = "ntdll.dll!NtClose";
+ table[26] = "ntdll.dll!ZwCloseObjectAuditAlarm";
+ table[27] = "ntdll.dll!NtCompactKeys";
+ table[28] = "ntdll.dll!ZwCompareTokens";
+ table[29] = "ntdll.dll!NtCompleteConnectPort";
+ table[30] = "ntdll.dll!ZwCompressKey";
+ table[31] = "ntdll.dll!NtConnectPort";
+ table[32] = "ntdll.dll!ZwContinue";
+ table[33] = "ntdll.dll!ZwCreateDebugObject";
+ table[34] = "ntdll.dll!ZwCreateDirectoryObject";
+ table[35] = "ntdll.dll!NtCreateEvent";
+ table[36] = "ntdll.dll!NtCreateEventPair";
+ table[37] = "ntdll.dll!NtCreateFile";
+ table[38] = "ntdll.dll!NtCreateIoCompletion";
+ table[39] = "ntdll.dll!ZwCreateJobObject";
+ table[40] = "ntdll.dll!NtCreateJobSet";
+ table[41] = "ntdll.dll!ZwCreateKey";
+ table[42] = "ntdll.dll!ZwCreateMailslotFile";
+ table[43] = "ntdll.dll!ZwCreateMutant";
+ table[44] = "ntdll.dll!ZwCreateNamedPipeFile";
+ table[45] = "ntdll.dll!NtCreatePagingFile";
+ table[46] = "ntdll.dll!ZwCreatePort";
+ table[47] = "ntdll.dll!ZwCreateProcess";
+ table[48] = "ntdll.dll!ZwCreateProcessEx";
+ table[49] = "ntdll.dll!ZwCreateProfile";
+ table[50] = "ntdll.dll!NtCreateSection";
+ table[51] = "ntdll.dll!NtCreateSemaphore";
+ table[52] = "ntdll.dll!ZwCreateSymbolicLinkObject";
+ table[53] = "ntdll.dll!NtCreateThread";
+ table[54] = "ntdll.dll!ZwCreateTimer";
+ table[55] = "ntdll.dll!NtCreateToken";
+ table[56] = "ntdll.dll!ZwCreateWaitablePort";
+ table[57] = "ntdll.dll!NtDebugActiveProcess";
+ table[58] = "ntdll.dll!ZwDebugContinue";
+ table[59] = "ntdll.dll!ZwDelayExecution";
+ table[60] = "ntdll.dll!ZwDeleteAtom";
+ table[61] = "ntdll.dll!NtDeleteBootEntry";
+ table[62] = "ntdll.dll!NtDeleteFile";
+ table[63] = "ntdll.dll!ZwDeleteKey";
+ table[64] = "ntdll.dll!NtDeleteObjectAuditAlarm";
+ table[65] = "ntdll.dll!NtDeleteValueKey";
+ table[66] = "ntdll.dll!ZwDeviceIoControlFile";
+ table[67] = "ntdll.dll!NtDisplayString";
+ table[68] = "ntdll.dll!ZwDuplicateObject";
+ table[69] = "ntdll.dll!NtDuplicateToken";
+ table[70] = "ntdll.dll!ZwEnumerateBootEntries";
+ table[71] = "ntdll.dll!ZwEnumerateKey";
+ table[72] = "ntdll.dll!ZwEnumerateSystemEnvironmentValuesEx";
+ table[73] = "ntdll.dll!NtEnumerateValueKey";
+ table[74] = "ntdll.dll!ZwExtendSection";
+ table[75] = "ntdll.dll!NtFilterToken";
+ table[76] = "ntdll.dll!NtFindAtom";
+ table[77] = "ntdll.dll!ZwFlushBuffersFile";
+ table[78] = "ntdll.dll!ZwFlushInstructionCache";
+ table[79] = "ntdll.dll!NtFlushKey";
+ table[80] = "ntdll.dll!ZwFlushVirtualMemory";
+ table[81] = "ntdll.dll!NtFlushWriteBuffer";
+ table[82] = "ntdll.dll!NtFreeUserPhysicalPages";
+ table[83] = "ntdll.dll!NtFreeVirtualMemory";
+ table[84] = "ntdll.dll!NtFsControlFile";
+ table[85] = "ntdll.dll!NtGetContextThread";
+ table[86] = "ntdll.dll!NtGetDevicePowerState";
+ table[87] = "ntdll.dll!ZwGetPlugPlayEvent";
+ table[88] = "ntdll.dll!NtGetWriteWatch";
+ table[89] = "ntdll.dll!NtImpersonateAnonymousToken";
+ table[90] = "ntdll.dll!ZwImpersonateClientOfPort";
+ table[91] = "ntdll.dll!ZwImpersonateThread";
+ table[92] = "ntdll.dll!ZwInitializeRegistry";
+ table[93] = "ntdll.dll!NtInitiatePowerAction";
+ table[94] = "ntdll.dll!ZwIsProcessInJob";
+ table[95] = "ntdll.dll!NtIsSystemResumeAutomatic";
+ table[96] = "ntdll.dll!ZwListenPort";
+ table[97] = "ntdll.dll!NtLoadDriver";
+ table[98] = "ntdll.dll!NtLoadKey";
+ table[99] = "ntdll.dll!NtLoadKey2";
+ table[100] = "ntdll.dll!NtLockFile";
+ table[101] = "ntdll.dll!ZwLockProductActivationKeys";
+ table[102] = "ntdll.dll!NtLockRegistryKey";
+ table[103] = "ntdll.dll!ZwLockVirtualMemory";
+ table[104] = "ntdll.dll!ZwMakePermanentObject";
+ table[105] = "ntdll.dll!NtMakeTemporaryObject";
+ table[106] = "ntdll.dll!NtMapUserPhysicalPages";
+ table[107] = "ntdll.dll!ZwMapUserPhysicalPagesScatter";
+ table[108] = "ntdll.dll!ZwMapViewOfSection";
+ table[109] = "ntdll.dll!NtModifyBootEntry";
+ table[110] = "ntdll.dll!NtNotifyChangeDirectoryFile";
+ table[111] = "ntdll.dll!NtNotifyChangeKey";
+ table[112] = "ntdll.dll!NtNotifyChangeMultipleKeys";
+ table[113] = "ntdll.dll!ZwOpenDirectoryObject";
+ table[114] = "ntdll.dll!NtOpenEvent";
+ table[115] = "ntdll.dll!NtOpenEventPair";
+ table[116] = "ntdll.dll!NtOpenFile";
+ table[117] = "ntdll.dll!ZwOpenIoCompletion";
+ table[118] = "ntdll.dll!ZwOpenJobObject";
+ table[119] = "ntdll.dll!ZwOpenKey";
+ table[120] = "ntdll.dll!NtOpenMutant";
+ table[121] = "ntdll.dll!ZwOpenObjectAuditAlarm";
+ table[122] = "ntdll.dll!ZwOpenProcess";
+ table[123] = "ntdll.dll!ZwOpenProcessToken";
+ table[124] = "ntdll.dll!ZwOpenProcessTokenEx";
+ table[125] = "ntdll.dll!NtOpenSection";
+ table[126] = "ntdll.dll!NtOpenSemaphore";
+ table[127] = "ntdll.dll!NtOpenSymbolicLinkObject";
+ table[128] = "ntdll.dll!ZwOpenThread";
+ table[129] = "ntdll.dll!NtOpenThreadToken";
+ table[130] = "ntdll.dll!NtOpenThreadTokenEx";
+ table[131] = "ntdll.dll!ZwOpenTimer";
+ table[132] = "ntdll.dll!NtPlugPlayControl";
+ table[133] = "ntdll.dll!ZwPowerInformation";
+ table[134] = "ntdll.dll!ZwPrivilegeCheck";
+ table[135] = "ntdll.dll!ZwPrivilegeObjectAuditAlarm";
+ table[136] = "ntdll.dll!NtPrivilegedServiceAuditAlarm";
+ table[137] = "ntdll.dll!ZwProtectVirtualMemory";
+ table[138] = "ntdll.dll!ZwPulseEvent";
+ table[139] = "ntdll.dll!ZwQueryAttributesFile";
+ table[140] = "ntdll.dll!ZwQueryBootEntryOrder";
+ table[141] = "ntdll.dll!ZwQueryBootOptions";
+ table[142] = "ntdll.dll!NtQueryDebugFilterState";
+ table[143] = "ntdll.dll!NtQueryDefaultLocale";
+ table[144] = "ntdll.dll!ZwQueryDefaultUILanguage";
+ table[145] = "ntdll.dll!ZwQueryDirectoryFile";
+ table[146] = "ntdll.dll!ZwQueryDirectoryObject";
+ table[147] = "ntdll.dll!ZwQueryEaFile";
+ table[148] = "ntdll.dll!NtQueryEvent";
+ table[149] = "ntdll.dll!ZwQueryFullAttributesFile";
+ table[150] = "ntdll.dll!NtQueryInformationAtom";
+ table[151] = "ntdll.dll!ZwQueryInformationFile";
+ table[152] = "ntdll.dll!ZwQueryInformationJobObject";
+ table[153] = "ntdll.dll!ZwQueryInformationPort";
+ table[154] = "ntdll.dll!ZwQueryInformationProcess";
+ table[155] = "ntdll.dll!NtQueryInformationThread";
+ table[156] = "ntdll.dll!ZwQueryInformationToken";
+ table[157] = "ntdll.dll!NtQueryInstallUILanguage";
+ table[158] = "ntdll.dll!NtQueryIntervalProfile";
+ table[159] = "ntdll.dll!NtQueryIoCompletion";
+ table[160] = "ntdll.dll!ZwQueryKey";
+ table[161] = "ntdll.dll!NtQueryMultipleValueKey";
+ table[162] = "ntdll.dll!NtQueryMutant";
+ table[163] = "ntdll.dll!NtQueryObject";
+ table[164] = "ntdll.dll!NtQueryOpenSubKeys";
+ table[165] = "ntdll.dll!NtQueryPerformanceCounter";
+ table[166] = "ntdll.dll!ZwQueryQuotaInformationFile";
+ table[167] = "ntdll.dll!ZwQuerySection";
+ table[168] = "ntdll.dll!NtQuerySecurityObject";
+ table[169] = "ntdll.dll!ZwQuerySemaphore";
+ table[170] = "ntdll.dll!ZwQuerySymbolicLinkObject";
+ table[171] = "ntdll.dll!ZwQuerySystemEnvironmentValue";
+ table[172] = "ntdll.dll!ZwQuerySystemEnvironmentValueEx";
+ table[173] = "ntdll.dll!NtQuerySystemInformation";
+ table[174] = "ntdll.dll!NtQuerySystemTime";
+ table[175] = "ntdll.dll!ZwQueryTimer";
+ table[176] = "ntdll.dll!NtQueryTimerResolution";
+ table[177] = "ntdll.dll!ZwQueryValueKey";
+ table[178] = "ntdll.dll!NtQueryVirtualMemory";
+ table[179] = "ntdll.dll!NtQueryVolumeInformationFile";
+ table[180] = "ntdll.dll!NtQueueApcThread";
+ table[181] = "ntdll.dll!ZwRaiseException";
+ table[182] = "ntdll.dll!ZwRaiseHardError";
+ table[183] = "ntdll.dll!NtReadFile";
+ table[184] = "ntdll.dll!NtReadFileScatter";
+ table[185] = "ntdll.dll!ZwReadRequestData";
+ table[186] = "ntdll.dll!NtReadVirtualMemory";
+ table[187] = "ntdll.dll!ZwRegisterThreadTerminatePort";
+ table[188] = "ntdll.dll!ZwReleaseMutant";
+ table[189] = "ntdll.dll!NtReleaseSemaphore";
+ table[190] = "ntdll.dll!ZwRemoveIoCompletion";
+ table[191] = "ntdll.dll!ZwRemoveProcessDebug";
+ table[192] = "ntdll.dll!ZwRenameKey";
+ table[193] = "ntdll.dll!ZwReplaceKey";
+ table[194] = "ntdll.dll!ZwReplyPort";
+ table[195] = "ntdll.dll!NtReplyWaitReceivePort";
+ table[196] = "ntdll.dll!NtReplyWaitReceivePortEx";
+ table[197] = "ntdll.dll!NtReplyWaitReplyPort";
+ table[198] = "ntdll.dll!ZwRequestDeviceWakeup";
+ table[199] = "ntdll.dll!ZwRequestPort";
+ table[200] = "ntdll.dll!NtRequestWaitReplyPort";
+ table[201] = "ntdll.dll!ZwRequestWakeupLatency";
+ table[202] = "ntdll.dll!NtResetEvent";
+ table[203] = "ntdll.dll!ZwResetWriteWatch";
+ table[204] = "ntdll.dll!NtRestoreKey";
+ table[205] = "ntdll.dll!ZwResumeProcess";
+ table[206] = "ntdll.dll!ZwResumeThread";
+ table[207] = "ntdll.dll!NtSaveKey";
+ table[208] = "ntdll.dll!NtSaveKeyEx";
+ table[209] = "ntdll.dll!NtSaveMergedKeys";
+ table[210] = "ntdll.dll!NtSecureConnectPort";
+ table[211] = "ntdll.dll!ZwSetBootEntryOrder";
+ table[212] = "ntdll.dll!ZwSetBootOptions";
+ table[213] = "ntdll.dll!ZwSetContextThread";
+ table[214] = "ntdll.dll!NtSetDebugFilterState";
+ table[215] = "ntdll.dll!NtSetDefaultHardErrorPort";
+ table[216] = "ntdll.dll!NtSetDefaultLocale";
+ table[217] = "ntdll.dll!ZwSetDefaultUILanguage";
+ table[218] = "ntdll.dll!ZwSetEaFile";
+ table[219] = "ntdll.dll!NtSetEvent";
+ table[220] = "ntdll.dll!NtSetEventBoostPriority";
+ table[221] = "ntdll.dll!NtSetHighEventPair";
+ table[222] = "ntdll.dll!NtSetHighWaitLowEventPair";
+ table[223] = "ntdll.dll!ZwSetInformationDebugObject";
+ table[224] = "ntdll.dll!ZwSetInformationFile";
+ table[225] = "ntdll.dll!ZwSetInformationJobObject";
+ table[226] = "ntdll.dll!ZwSetInformationKey";
+ table[227] = "ntdll.dll!ZwSetInformationObject";
+ table[228] = "ntdll.dll!ZwSetInformationProcess";
+ table[229] = "ntdll.dll!ZwSetInformationThread";
+ table[230] = "ntdll.dll!ZwSetInformationToken";
+ table[231] = "ntdll.dll!NtSetIntervalProfile";
+ table[232] = "ntdll.dll!NtSetIoCompletion";
+ table[233] = "ntdll.dll!ZwSetLdtEntries";
+ table[234] = "ntdll.dll!ZwSetLowEventPair";
+ table[235] = "ntdll.dll!ZwSetLowWaitHighEventPair";
+ table[236] = "ntdll.dll!ZwSetQuotaInformationFile";
+ table[237] = "ntdll.dll!NtSetSecurityObject";
+ table[238] = "ntdll.dll!ZwSetSystemEnvironmentValue";
+ table[239] = "ntdll.dll!ZwSetSystemEnvironmentValueEx";
+ table[240] = "ntdll.dll!ZwSetSystemInformation";
+ table[241] = "ntdll.dll!ZwSetSystemPowerState";
+ table[242] = "ntdll.dll!ZwSetSystemTime";
+ table[243] = "ntdll.dll!ZwSetThreadExecutionState";
+ table[244] = "ntdll.dll!ZwSetTimer";
+ table[245] = "ntdll.dll!NtSetTimerResolution";
+ table[246] = "ntdll.dll!ZwSetUuidSeed";
+ table[247] = "ntdll.dll!ZwSetValueKey";
+ table[248] = "ntdll.dll!NtSetVolumeInformationFile";
+ table[249] = "ntdll.dll!ZwShutdownSystem";
+ table[250] = "ntdll.dll!ZwSignalAndWaitForSingleObject";
+ table[251] = "ntdll.dll!NtStartProfile";
+ table[252] = "ntdll.dll!ZwStopProfile";
+ table[253] = "ntdll.dll!ZwSuspendProcess";
+ table[254] = "ntdll.dll!ZwSuspendThread";
+ table[255] = "ntdll.dll!NtSystemDebugControl";
+ table[256] = "ntdll.dll!ZwTerminateJobObject";
+ table[257] = "ntdll.dll!ZwTerminateProcess";
+ table[258] = "ntdll.dll!ZwTerminateThread";
+ table[259] = "ntdll.dll!NtTestAlert";
+ table[260] = "ntdll.dll!NtTraceEvent";
+ table[261] = "ntdll.dll!NtTranslateFilePath";
+ table[262] = "ntdll.dll!ZwUnloadDriver";
+ table[263] = "ntdll.dll!NtUnloadKey";
+ table[264] = "ntdll.dll!ZwUnloadKeyEx";
+ table[265] = "ntdll.dll!ZwUnlockFile";
+ table[266] = "ntdll.dll!NtUnlockVirtualMemory";
+ table[267] = "ntdll.dll!NtUnmapViewOfSection";
+ table[268] = "ntdll.dll!NtVdmControl";
+ table[269] = "ntdll.dll!NtWaitForDebugEvent";
+ table[270] = "ntdll.dll!NtWaitForMultipleObjects";
+ table[271] = "ntdll.dll!ZwWaitForSingleObject";
+ table[272] = "ntdll.dll!ZwWaitHighEventPair";
+ table[273] = "ntdll.dll!NtWaitLowEventPair";
+ table[274] = "ntdll.dll!NtWriteFile";
+ table[275] = "ntdll.dll!NtWriteFileGather";
+ table[276] = "ntdll.dll!NtWriteRequestData";
+ table[277] = "ntdll.dll!NtWriteVirtualMemory";
+ table[278] = "ntdll.dll!ZwYieldExecution";
+ table[279] = "ntdll.dll!ZwCreateKeyedEvent";
+ table[280] = "ntdll.dll!NtOpenKeyedEvent";
+ table[281] = "ntdll.dll!NtReleaseKeyedEvent";
+ table[282] = "ntdll.dll!NtWaitForKeyedEvent";
+ table[283] = "ntdll.dll!ZwQueryPortInformationProcess";
+ table[4096] = "gdi32.dll!NtGdiAbortDoc";
+ table[4097] = "gdi32.dll!NtGdiAbortPath";
+ table[4098] = "gdi32.dll!NtGdiAddFontResourceW";
+ table[4099] = "gdi32.dll!NtGdiAddRemoteFontToDC";
+ table[4100] = "gdi32.dll!NtGdiAddFontMemResourceEx";
+ table[4101] = "gdi32.dll!NtGdiRemoveMergeFont";
+ table[4102] = "gdi32.dll!NtGdiAddRemoteMMInstanceToDC";
+ table[4103] = "gdi32.dll!NtGdiAlphaBlend";
+ table[4104] = "gdi32.dll!NtGdiAngleArc";
+ table[4105] = "gdi32.dll!NtGdiAnyLinkedFonts";
+ table[4106] = "gdi32.dll!NtGdiFontIsLinked";
+ table[4107] = "gdi32.dll!NtGdiArcInternal";
+ table[4108] = "gdi32.dll!NtGdiBeginPath";
+ table[4109] = "gdi32.dll!NtGdiBitBlt";
+ table[4110] = "gdi32.dll!NtGdiCancelDC";
+ table[4111] = "gdi32.dll!NtGdiCheckBitmapBits";
+ table[4112] = "gdi32.dll!NtGdiCloseFigure";
+ table[4113] = "gdi32.dll!NtGdiClearBitmapAttributes";
+ table[4114] = "gdi32.dll!NtGdiClearBrushAttributes";
+ table[4115] = "gdi32.dll!NtGdiColorCorrectPalette";
+ table[4116] = "gdi32.dll!NtGdiCombineRgn";
+ table[4117] = "gdi32.dll!CombineTransform";
+ table[4118] = "gdi32.dll!NtGdiComputeXformCoefficients";
+ table[4119] = "gdi32.dll!NtGdiConsoleTextOut";
+ table[4120] = "gdi32.dll!NtGdiConvertMetafileRect";
+ table[4121] = "gdi32.dll!NtGdiCreateBitmap";
+ table[4122] = "gdi32.dll!NtGdiCreateClientObj";
+ table[4123] = "gdi32.dll!NtGdiCreateColorSpace";
+ table[4124] = "gdi32.dll!NtGdiCreateColorTransform";
+ table[4125] = "gdi32.dll!NtGdiCreateCompatibleBitmap";
+ table[4126] = "gdi32.dll!NtGdiCreateCompatibleDC";
+ table[4127] = "gdi32.dll!NtGdiCreateDIBBrush";
+ table[4128] = "gdi32.dll!NtGdiCreateDIBitmapInternal";
+ table[4129] = "gdi32.dll!NtGdiCreateDIBSection";
+ table[4130] = "gdi32.dll!NtGdiCreateEllipticRgn";
+ table[4131] = "gdi32.dll!NtGdiCreateHalftonePalette";
+ table[4132] = "gdi32.dll!NtGdiCreateHatchBrushInternal";
+ table[4133] = "gdi32.dll!NtGdiCreateMetafileDC";
+ table[4134] = "gdi32.dll!NtGdiCreatePaletteInternal";
+ table[4135] = "gdi32.dll!NtGdiCreatePatternBrushInternal";
+ table[4136] = "gdi32.dll!CreatePen";
+ table[4137] = "gdi32.dll!NtGdiCreateRectRgn";
+ table[4138] = "gdi32.dll!CreateRoundRectRgn";
+ table[4139] = "gdi32.dll!NtGdiCreateServerMetaFile";
+ table[4140] = "gdi32.dll!NtGdiCreateSolidBrush";
+ table[4141] = "gdi32.dll!NtGdiD3dContextCreate";
+ table[4142] = "gdi32.dll!NtGdiD3dContextDestroy";
+ table[4143] = "gdi32.dll!NtGdiD3dContextDestroyAll";
+ table[4144] = "gdi32.dll!NtGdiD3dValidateTextureStageState";
+ table[4145] = "gdi32.dll!NtGdiD3dDrawPrimitives2";
+ table[4146] = "gdi32.dll!NtGdiDdGetDriverState";
+ table[4147] = "gdi32.dll!NtGdiDdAddAttachedSurface";
+ table[4148] = "gdi32.dll!NtGdiDdAlphaBlt";
+ table[4149] = "gdi32.dll!NtGdiDdAttachSurface";
+ table[4150] = "gdi32.dll!NtGdiDdBeginMoCompFrame";
+ table[4151] = "gdi32.dll!NtGdiDdBlt";
+ table[4152] = "gdi32.dll!NtGdiDdCanCreateSurface";
+ table[4153] = "gdi32.dll!NtGdiDdCanCreateD3DBuffer";
+ table[4154] = "gdi32.dll!NtGdiDdColorControl";
+ table[4155] = "gdi32.dll!NtGdiDdCreateDirectDrawObject";
+ table[4156] = "gdi32.dll!NtGdiDdCreateSurface";
+ table[4157] = "gdi32.dll!NtGdiDdCreateD3DBuffer";
+ table[4158] = "gdi32.dll!NtGdiDdCreateMoComp";
+ table[4159] = "gdi32.dll!NtGdiDdCreateSurfaceObject";
+ table[4160] = "gdi32.dll!NtGdiDdDeleteDirectDrawObject";
+ table[4161] = "gdi32.dll!NtGdiDdDeleteSurfaceObject";
+ table[4162] = "gdi32.dll!NtGdiDdDestroyMoComp";
+ table[4163] = "gdi32.dll!NtGdiDdDestroySurface";
+ table[4164] = "gdi32.dll!NtGdiDdDestroyD3DBuffer";
+ table[4165] = "gdi32.dll!NtGdiDdEndMoCompFrame";
+ table[4166] = "gdi32.dll!NtGdiDdFlip";
+ table[4167] = "gdi32.dll!NtGdiDdFlipToGDISurface";
+ table[4168] = "gdi32.dll!NtGdiDdGetAvailDriverMemory";
+ table[4169] = "gdi32.dll!NtGdiDdGetBltStatus";
+ table[4170] = "gdi32.dll!NtGdiDdGetDC";
+ table[4171] = "gdi32.dll!NtGdiDdGetDriverInfo";
+ table[4172] = "gdi32.dll!NtGdiDdGetDxHandle";
+ table[4173] = "gdi32.dll!NtGdiDdGetFlipStatus";
+ table[4174] = "gdi32.dll!NtGdiDdGetInternalMoCompInfo";
+ table[4175] = "gdi32.dll!NtGdiDdGetMoCompBuffInfo";
+ table[4176] = "gdi32.dll!NtGdiDdGetMoCompGuids";
+ table[4177] = "gdi32.dll!NtGdiDdGetMoCompFormats";
+ table[4178] = "gdi32.dll!NtGdiDdGetScanLine";
+ table[4179] = "gdi32.dll!NtGdiDdLock";
+ table[4180] = "gdi32.dll!NtGdiDdLockD3D";
+ table[4181] = "gdi32.dll!NtGdiDdQueryDirectDrawObject";
+ table[4182] = "gdi32.dll!NtGdiDdQueryMoCompStatus";
+ table[4183] = "gdi32.dll!NtGdiDdReenableDirectDrawObject";
+ table[4184] = "gdi32.dll!NtGdiDdReleaseDC";
+ table[4185] = "gdi32.dll!NtGdiDdRenderMoComp";
+ table[4186] = "gdi32.dll!NtGdiDdResetVisrgn";
+ table[4187] = "gdi32.dll!NtGdiDdSetColorKey";
+ table[4188] = "gdi32.dll!NtGdiDdSetExclusiveMode";
+ table[4189] = "gdi32.dll!NtGdiDdSetGammaRamp";
+ table[4190] = "gdi32.dll!NtGdiDdCreateSurfaceEx";
+ table[4191] = "gdi32.dll!NtGdiDdSetOverlayPosition";
+ table[4192] = "gdi32.dll!NtGdiDdUnattachSurface";
+ table[4193] = "gdi32.dll!NtGdiDdUnlock";
+ table[4194] = "gdi32.dll!NtGdiDdUnlockD3D";
+ table[4195] = "gdi32.dll!NtGdiDdUpdateOverlay";
+ table[4196] = "gdi32.dll!NtGdiDdWaitForVerticalBlank";
+ table[4197] = "gdi32.dll!NtGdiDvpCanCreateVideoPort";
+ table[4198] = "gdi32.dll!NtGdiDvpColorControl";
+ table[4199] = "gdi32.dll!NtGdiDvpCreateVideoPort";
+ table[4200] = "gdi32.dll!NtGdiDvpDestroyVideoPort";
+ table[4201] = "gdi32.dll!NtGdiDvpFlipVideoPort";
+ table[4202] = "gdi32.dll!NtGdiDvpGetVideoPortBandwidth";
+ table[4203] = "gdi32.dll!NtGdiDvpGetVideoPortField";
+ table[4204] = "gdi32.dll!NtGdiDvpGetVideoPortFlipStatus";
+ table[4205] = "gdi32.dll!NtGdiDvpGetVideoPortInputFormats";
+ table[4206] = "gdi32.dll!NtGdiDvpGetVideoPortLine";
+ table[4207] = "gdi32.dll!NtGdiDvpGetVideoPortOutputFormats";
+ table[4208] = "gdi32.dll!NtGdiDvpGetVideoPortConnectInfo";
+ table[4209] = "gdi32.dll!NtGdiDvpGetVideoSignalStatus";
+ table[4210] = "gdi32.dll!NtGdiDvpUpdateVideoPort";
+ table[4211] = "gdi32.dll!NtGdiDvpWaitForVideoPortSync";
+ table[4212] = "gdi32.dll!NtGdiDvpAcquireNotification";
+ table[4213] = "gdi32.dll!NtGdiDvpReleaseNotification";
+ table[4214] = "gdi32.dll!NtGdiDxgGenericThunk";
+ table[4215] = "gdi32.dll!NtGdiDeleteClientObj";
+ table[4216] = "gdi32.dll!NtGdiDeleteColorSpace";
+ table[4217] = "gdi32.dll!NtGdiDeleteColorTransform";
+ table[4218] = "gdi32.dll!DeleteObject";
+ table[4219] = "gdi32.dll!NtGdiDescribePixelFormat";
+ table[4220] = "gdi32.dll!NtGdiGetPerBandInfo";
+ table[4221] = "gdi32.dll!NtGdiDoBanding";
+ table[4222] = "gdi32.dll!NtGdiDoPalette";
+ table[4223] = "gdi32.dll!NtGdiDrawEscape";
+ table[4224] = "gdi32.dll!NtGdiEllipse";
+ table[4225] = "gdi32.dll!NtGdiEnableEudc";
+ table[4226] = "gdi32.dll!NtGdiEndDoc";
+ table[4227] = "gdi32.dll!NtGdiEndPage";
+ table[4228] = "gdi32.dll!NtGdiEndPath";
+ table[4229] = "gdi32.dll!NtGdiEnumFontChunk";
+ table[4230] = "gdi32.dll!NtGdiEnumFontClose";
+ table[4231] = "gdi32.dll!NtGdiEnumFontOpen";
+ table[4232] = "gdi32.dll!NtGdiEnumObjects";
+ table[4233] = "gdi32.dll!NtGdiEqualRgn";
+ table[4234] = "gdi32.dll!NtGdiEudcLoadUnloadLink";
+ table[4235] = "gdi32.dll!NtGdiExcludeClipRect";
+ table[4236] = "gdi32.dll!NtGdiExtCreatePen";
+ table[4237] = "gdi32.dll!NtGdiExtCreateRegion";
+ table[4238] = "gdi32.dll!NtGdiExtEscape";
+ table[4239] = "gdi32.dll!NtGdiExtFloodFill";
+ table[4240] = "gdi32.dll!NtGdiExtGetObjectW";
+ table[4241] = "gdi32.dll!ExtSelectClipRgn";
+ table[4242] = "gdi32.dll!NtGdiExtTextOutW";
+ table[4243] = "gdi32.dll!NtGdiFillPath";
+ table[4244] = "gdi32.dll!NtGdiFillRgn";
+ table[4245] = "gdi32.dll!NtGdiFlattenPath";
+ table[4247] = "gdi32.dll!NtGdiFlush";
+ table[4248] = "gdi32.dll!NtGdiForceUFIMapping";
+ table[4249] = "gdi32.dll!NtGdiFrameRgn";
+ table[4250] = "gdi32.dll!NtGdiFullscreenControl";
+ table[4251] = "gdi32.dll!NtGdiGetAndSetDCDword";
+ table[4252] = "gdi32.dll!GetClipBox";
+ table[4253] = "gdi32.dll!GetBitmapBits";
+ table[4254] = "gdi32.dll!NtGdiGetBitmapDimension";
+ table[4255] = "gdi32.dll!NtGdiGetBoundsRect";
+ table[4256] = "gdi32.dll!NtGdiGetCharABCWidthsW";
+ table[4257] = "gdi32.dll!NtGdiGetCharacterPlacementW";
+ table[4258] = "gdi32.dll!NtGdiGetCharSet";
+ table[4259] = "gdi32.dll!NtGdiGetCharWidthW";
+ table[4260] = "gdi32.dll!NtGdiGetCharWidthInfo";
+ table[4261] = "gdi32.dll!NtGdiGetColorAdjustment";
+ table[4263] = "gdi32.dll!NtGdiGetDCDword";
+ table[4264] = "gdi32.dll!NtGdiGetDCforBitmap";
+ table[4265] = "gdi32.dll!NtGdiGetDCObject";
+ table[4266] = "gdi32.dll!NtGdiGetDCPoint";
+ table[4267] = "gdi32.dll!NtGdiGetDeviceCaps";
+ table[4268] = "gdi32.dll!NtGdiGetDeviceGammaRamp";
+ table[4269] = "gdi32.dll!NtGdiGetDeviceCapsAll";
+ table[4270] = "gdi32.dll!NtGdiGetDIBitsInternal";
+ table[4271] = "gdi32.dll!NtGdiGetETM";
+ table[4272] = "gdi32.dll!NtGdiGetEudcTimeStampEx";
+ table[4273] = "gdi32.dll!GetFontData";
+ table[4274] = "gdi32.dll!NtGdiGetFontResourceInfoInternalW";
+ table[4275] = "gdi32.dll!NtGdiGetGlyphIndicesW";
+ table[4276] = "gdi32.dll!NtGdiGetGlyphIndicesWInternal";
+ table[4277] = "gdi32.dll!NtGdiGetGlyphOutline";
+ table[4278] = "gdi32.dll!NtGdiGetKerningPairs";
+ table[4279] = "gdi32.dll!NtGdiGetLinkedUFIs";
+ table[4280] = "gdi32.dll!GetMiterLimit";
+ table[4281] = "gdi32.dll!NtGdiGetMonitorID";
+ table[4282] = "gdi32.dll!GetNearestColor";
+ table[4283] = "gdi32.dll!NtGdiGetNearestPaletteIndex";
+ table[4284] = "gdi32.dll!NtGdiGetObjectBitmapHandle";
+ table[4285] = "gdi32.dll!NtGdiGetOutlineTextMetricsInternalW";
+ table[4286] = "gdi32.dll!NtGdiGetPath";
+ table[4287] = "gdi32.dll!NtGdiGetPixel";
+ table[4288] = "gdi32.dll!NtGdiGetRandomRgn";
+ table[4289] = "gdi32.dll!GetRasterizerCaps";
+ table[4290] = "gdi32.dll!NtGdiGetRealizationInfo";
+ table[4291] = "gdi32.dll!GetRegionData";
+ table[4292] = "gdi32.dll!NtGdiGetRgnBox";
+ table[4293] = "gdi32.dll!NtGdiGetServerMetaFileBits";
+ table[4294] = "gdi32.dll!NtGdiGetSpoolMessage";
+ table[4296] = "gdi32.dll!NtGdiGetStockObject";
+ table[4297] = "gdi32.dll!NtGdiGetStringBitmapW";
+ table[4298] = "gdi32.dll!GetSystemPaletteUse";
+ table[4299] = "gdi32.dll!NtGdiGetTextCharsetInfo";
+ table[4300] = "gdi32.dll!NtGdiGetTextExtent";
+ table[4301] = "gdi32.dll!NtGdiGetTextExtentExW";
+ table[4302] = "gdi32.dll!NtGdiGetTextFaceW";
+ table[4303] = "gdi32.dll!NtGdiGetTextMetricsW";
+ table[4304] = "gdi32.dll!NtGdiGetTransform";
+ table[4305] = "gdi32.dll!NtGdiGetUFI";
+ table[4306] = "gdi32.dll!NtGdiGetEmbUFI";
+ table[4307] = "gdi32.dll!NtGdiGetUFIPathname";
+ table[4308] = "gdi32.dll!NtGdiGetEmbedFonts";
+ table[4309] = "gdi32.dll!NtGdiChangeGhostFont";
+ table[4310] = "gdi32.dll!NtGdiAddEmbFontToDC";
+ table[4311] = "gdi32.dll!NtGdiGetFontUnicodeRanges";
+ table[4312] = "gdi32.dll!NtGdiGetWidthTable";
+ table[4313] = "gdi32.dll!NtGdiGradientFill";
+ table[4314] = "gdi32.dll!NtGdiHfontCreate";
+ table[4315] = "gdi32.dll!NtGdiIcmBrushInfo";
+ table[4316] = "gdi32.dll!NtGdiInit";
+ table[4317] = "gdi32.dll!NtGdiInitSpool";
+ table[4318] = "gdi32.dll!NtGdiIntersectClipRect";
+ table[4319] = "gdi32.dll!NtGdiInvertRgn";
+ table[4320] = "gdi32.dll!NtGdiLineTo";
+ table[4321] = "gdi32.dll!NtGdiMakeFontDir";
+ table[4322] = "gdi32.dll!NtGdiMakeInfoDC";
+ table[4323] = "gdi32.dll!NtGdiMaskBlt";
+ table[4324] = "gdi32.dll!NtGdiModifyWorldTransform";
+ table[4325] = "gdi32.dll!NtGdiMonoBitmap";
+ table[4327] = "gdi32.dll!NtGdiOffsetClipRgn";
+ table[4328] = "gdi32.dll!NtGdiOffsetRgn";
+ table[4329] = "gdi32.dll!NtGdiOpenDCW";
+ table[4330] = "gdi32.dll!NtGdiPatBlt";
+ table[4331] = "gdi32.dll!NtGdiPolyPatBlt";
+ table[4332] = "gdi32.dll!NtGdiPathToRegion";
+ table[4333] = "gdi32.dll!NtGdiPlgBlt";
+ table[4334] = "gdi32.dll!NtGdiPolyDraw";
+ table[4335] = "gdi32.dll!NtGdiPolyPolyDraw";
+ table[4336] = "gdi32.dll!NtGdiPolyTextOutW";
+ table[4337] = "gdi32.dll!NtGdiPtInRegion";
+ table[4338] = "gdi32.dll!NtGdiPtVisible";
+ table[4339] = "gdi32.dll!NtGdiQueryFonts";
+ table[4340] = "gdi32.dll!NtGdiQueryFontAssocInfo";
+ table[4341] = "gdi32.dll!NtGdiRectangle";
+ table[4342] = "gdi32.dll!NtGdiRectInRegion";
+ table[4343] = "gdi32.dll!RectVisible";
+ table[4344] = "gdi32.dll!NtGdiRemoveFontResourceW";
+ table[4345] = "gdi32.dll!NtGdiRemoveFontMemResourceEx";
+ table[4346] = "gdi32.dll!NtGdiResetDC";
+ table[4347] = "gdi32.dll!NtGdiResizePalette";
+ table[4348] = "gdi32.dll!NtGdiRestoreDC";
+ table[4349] = "gdi32.dll!NtGdiRoundRect";
+ table[4350] = "gdi32.dll!NtGdiSaveDC";
+ table[4351] = "gdi32.dll!NtGdiScaleViewportExtEx";
+ table[4352] = "gdi32.dll!NtGdiScaleWindowExtEx";
+ table[4353] = "gdi32.dll!NtGdiSelectBitmap";
+ table[4355] = "gdi32.dll!NtGdiSelectClipPath";
+ table[4356] = "gdi32.dll!NtGdiSelectFont";
+ table[4357] = "gdi32.dll!NtGdiSelectPen";
+ table[4358] = "gdi32.dll!NtGdiSetBitmapAttributes";
+ table[4359] = "gdi32.dll!SetBitmapBits";
+ table[4360] = "gdi32.dll!NtGdiSetBitmapDimension";
+ table[4361] = "gdi32.dll!NtGdiSetBoundsRect";
+ table[4362] = "gdi32.dll!NtGdiSetBrushAttributes";
+ table[4363] = "gdi32.dll!NtGdiSetBrushOrg";
+ table[4364] = "gdi32.dll!NtGdiSetColorAdjustment";
+ table[4365] = "gdi32.dll!NtGdiSetColorSpace";
+ table[4366] = "gdi32.dll!NtGdiSetDeviceGammaRamp";
+ table[4367] = "gdi32.dll!NtGdiSetDIBitsToDeviceInternal";
+ table[4368] = "gdi32.dll!NtGdiSetFontEnumeration";
+ table[4369] = "gdi32.dll!NtGdiSetFontXform";
+ table[4370] = "gdi32.dll!NtGdiSetIcmMode";
+ table[4371] = "gdi32.dll!NtGdiSetLinkedUFIs";
+ table[4372] = "gdi32.dll!NtGdiSetMagicColors";
+ table[4373] = "gdi32.dll!NtGdiSetMetaRgn";
+ table[4374] = "gdi32.dll!NtGdiSetMiterLimit";
+ table[4375] = "gdi32.dll!NtGdiGetDeviceWidth";
+ table[4376] = "gdi32.dll!NtGdiMirrorWindowOrg";
+ table[4377] = "gdi32.dll!NtGdiSetLayout";
+ table[4378] = "gdi32.dll!NtGdiSetPixel";
+ table[4379] = "gdi32.dll!NtGdiSetPixelFormat";
+ table[4380] = "gdi32.dll!NtGdiSetRectRgn";
+ table[4381] = "gdi32.dll!NtGdiSetSystemPaletteUse";
+ table[4383] = "gdi32.dll!NtGdiSetupPublicCFONT";
+ table[4384] = "gdi32.dll!NtGdiSetVirtualResolution";
+ table[4385] = "gdi32.dll!NtGdiSetSizeDevice";
+ table[4386] = "gdi32.dll!NtGdiStartDoc";
+ table[4387] = "gdi32.dll!NtGdiStartPage";
+ table[4388] = "gdi32.dll!NtGdiStretchBlt";
+ table[4389] = "gdi32.dll!NtGdiStretchDIBitsInternal";
+ table[4390] = "gdi32.dll!NtGdiStrokeAndFillPath";
+ table[4391] = "gdi32.dll!NtGdiStrokePath";
+ table[4392] = "gdi32.dll!NtGdiSwapBuffers";
+ table[4393] = "gdi32.dll!NtGdiTransformPoints";
+ table[4394] = "gdi32.dll!NtGdiTransparentBlt";
+ table[4395] = "gdi32.dll!NtGdiUnloadPrinterDriver";
+ table[4397] = "gdi32.dll!NtGdiUnrealizeObject";
+ table[4398] = "gdi32.dll!NtGdiUpdateColors";
+ table[4399] = "gdi32.dll!NtGdiWidenPath";
+ table[4400] = "user32.dll!NtUserActivateKeyboardLayout";
+ table[4401] = "user32.dll!NtUserAlterWindowStyle";
+ table[4402] = "imm32.dll!NtUserAssociateInputContext";
+ table[4403] = "user32.dll!NtUserAttachThreadInput";
+ table[4404] = "user32.dll!NtUserBeginPaint";
+ table[4405] = "user32.dll!NtUserBitBltSysBmp";
+ table[4406] = "user32.dll!NtUserBlockInput";
+ table[4407] = "imm32.dll!NtUserBuildHimcList";
+ table[4408] = "user32.dll!NtUserBuildHwndList";
+ table[4409] = "user32.dll!NtUserBuildNameList";
+ table[4410] = "user32.dll!NtUserBuildPropList";
+ table[4411] = "user32.dll!NtUserCallHwnd";
+ table[4412] = "user32.dll!NtUserCallHwndLock";
+ table[4413] = "user32.dll!NtUserCallHwndOpt";
+ table[4414] = "user32.dll!NtUserCallHwndParam";
+ table[4415] = "user32.dll!NtUserCallHwndParamLock";
+ table[4416] = "user32.dll!NtUserCallMsgFilter";
+ table[4417] = "user32.dll!NtUserCallNextHookEx";
+ table[4418] = "user32.dll!NtUserCallNoParam";
+ table[4419] = "imm32.dll!NtUserCallOneParam";
+ table[4419] = "user32.dll!NtUserCallOneParam";
+ table[4420] = "user32.dll!NtUserCallTwoParam";
+ table[4421] = "user32.dll!NtUserChangeClipboardChain";
+ table[4422] = "user32.dll!NtUserChangeDisplaySettings";
+ table[4424] = "user32.dll!NtUserCheckMenuItem";
+ table[4425] = "user32.dll!NtUserChildWindowFromPointEx";
+ table[4426] = "user32.dll!NtUserClipCursor";
+ table[4427] = "user32.dll!NtUserCloseClipboard";
+ table[4428] = "user32.dll!NtUserCloseDesktop";
+ table[4429] = "user32.dll!NtUserCloseWindowStation";
+ table[4431] = "user32.dll!NtUserConvertMemHandle";
+ table[4432] = "user32.dll!NtUserCopyAcceleratorTable";
+ table[4433] = "user32.dll!NtUserCountClipboardFormats";
+ table[4434] = "user32.dll!NtUserCreateAcceleratorTable";
+ table[4435] = "user32.dll!NtUserCreateCaret";
+ table[4436] = "user32.dll!NtUserCreateDesktop";
+ table[4437] = "imm32.dll!NtUserCreateInputContext";
+ table[4438] = "user32.dll!NtUserCreateLocalMemHandle";
+ table[4439] = "user32.dll!NtUserCreateWindowEx";
+ table[4440] = "user32.dll!NtUserCreateWindowStation";
+ table[4441] = "user32.dll!NtUserDdeGetQualityOfService";
+ table[4442] = "user32.dll!NtUserDdeInitialize";
+ table[4443] = "user32.dll!NtUserDdeSetQualityOfService";
+ table[4444] = "user32.dll!NtUserDeferWindowPos";
+ table[4445] = "user32.dll!NtUserDefSetText";
+ table[4446] = "user32.dll!NtUserDeleteMenu";
+ table[4447] = "user32.dll!DestroyAcceleratorTable";
+ table[4448] = "user32.dll!NtUserDestroyCursor";
+ table[4449] = "imm32.dll!NtUserDestroyInputContext";
+ table[4450] = "user32.dll!NtUserDestroyMenu";
+ table[4451] = "user32.dll!NtUserDestroyWindow";
+ table[4452] = "imm32.dll!NtUserDisableThreadIme";
+ table[4453] = "user32.dll!NtUserDispatchMessage";
+ table[4454] = "user32.dll!NtUserDragDetect";
+ table[4455] = "user32.dll!NtUserDragObject";
+ table[4456] = "user32.dll!NtUserDrawAnimatedRects";
+ table[4457] = "user32.dll!NtUserDrawCaption";
+ table[4458] = "user32.dll!NtUserDrawCaptionTemp";
+ table[4459] = "user32.dll!NtUserDrawIconEx";
+ table[4460] = "user32.dll!NtUserDrawMenuBarTemp";
+ table[4461] = "user32.dll!NtUserEmptyClipboard";
+ table[4462] = "user32.dll!NtUserEnableMenuItem";
+ table[4463] = "user32.dll!NtUserEnableScrollBar";
+ table[4464] = "user32.dll!NtUserEndDeferWindowPosEx";
+ table[4465] = "user32.dll!NtUserEndMenu";
+ table[4466] = "user32.dll!NtUserEndPaint";
+ table[4467] = "user32.dll!NtUserEnumDisplayDevices";
+ table[4468] = "user32.dll!NtUserEnumDisplayMonitors";
+ table[4469] = "user32.dll!NtUserEnumDisplaySettings";
+ table[4470] = "user32.dll!NtUserEvent";
+ table[4471] = "user32.dll!NtUserExcludeUpdateRgn";
+ table[4472] = "user32.dll!NtUserFillWindow";
+ table[4473] = "user32.dll!NtUserFindExistingCursorIcon";
+ table[4474] = "user32.dll!NtUserFindWindowEx";
+ table[4475] = "user32.dll!NtUserFlashWindowEx";
+ table[4476] = "user32.dll!NtUserGetAltTabInfo";
+ table[4477] = "user32.dll!NtUserGetAncestor";
+ table[4478] = "imm32.dll!NtUserGetAppImeLevel";
+ table[4479] = "user32.dll!GetAsyncKeyState";
+ table[4480] = "user32.dll!NtUserGetAtomName";
+ table[4481] = "user32.dll!NtUserGetCaretBlinkTime";
+ table[4482] = "user32.dll!NtUserGetCaretPos";
+ table[4483] = "user32.dll!NtUserGetClassInfo";
+ table[4484] = "user32.dll!NtUserGetClassName";
+ table[4485] = "user32.dll!NtUserGetClipboardData";
+ table[4486] = "user32.dll!NtUserGetClipboardFormatName";
+ table[4487] = "user32.dll!NtUserGetClipboardOwner";
+ table[4488] = "user32.dll!NtUserGetClipboardSequenceNumber";
+ table[4489] = "user32.dll!NtUserGetClipboardViewer";
+ table[4490] = "user32.dll!NtUserGetClipCursor";
+ table[4491] = "user32.dll!NtUserGetComboBoxInfo";
+ table[4492] = "user32.dll!NtUserGetControlBrush";
+ table[4493] = "user32.dll!NtUserGetControlColor";
+ table[4494] = "user32.dll!NtUserGetCPD";
+ table[4495] = "user32.dll!NtUserGetCursorFrameInfo";
+ table[4496] = "user32.dll!NtUserGetCursorInfo";
+ table[4497] = "user32.dll!NtUserGetDC";
+ table[4498] = "user32.dll!NtUserGetDCEx";
+ table[4499] = "user32.dll!NtUserGetDoubleClickTime";
+ table[4500] = "user32.dll!NtUserGetForegroundWindow";
+ table[4501] = "user32.dll!NtUserGetGuiResources";
+ table[4502] = "user32.dll!NtUserGetGUIThreadInfo";
+ table[4503] = "user32.dll!NtUserGetIconInfo";
+ table[4504] = "user32.dll!NtUserGetIconSize";
+ table[4505] = "imm32.dll!NtUserGetImeHotKey";
+ table[4505] = "user32.dll!NtUserGetImeHotKey";
+ table[4506] = "imm32.dll!NtUserGetImeInfoEx";
+ table[4507] = "user32.dll!NtUserGetInternalWindowPos";
+ table[4508] = "user32.dll!NtUserGetKeyboardLayoutList";
+ table[4509] = "user32.dll!NtUserGetKeyboardLayoutName";
+ table[4510] = "user32.dll!NtUserGetKeyboardState";
+ table[4511] = "user32.dll!NtUserGetKeyNameText";
+ table[4512] = "user32.dll!NtUserGetKeyState";
+ table[4513] = "user32.dll!NtUserGetListBoxInfo";
+ table[4514] = "user32.dll!NtUserGetMenuBarInfo";
+ table[4515] = "user32.dll!NtUserGetMenuIndex";
+ table[4516] = "user32.dll!NtUserGetMenuItemRect";
+ table[4517] = "user32.dll!NtUserGetMessage";
+ table[4518] = "user32.dll!NtUserGetMouseMovePointsEx";
+ table[4519] = "user32.dll!NtUserGetObjectInformation";
+ table[4520] = "user32.dll!NtUserGetOpenClipboardWindow";
+ table[4521] = "user32.dll!NtUserGetPriorityClipboardFormat";
+ table[4522] = "user32.dll!NtUserGetProcessWindowStation";
+ table[4523] = "user32.dll!NtUserGetRawInputBuffer";
+ table[4524] = "user32.dll!NtUserGetRawInputData";
+ table[4525] = "user32.dll!NtUserGetRawInputDeviceInfo";
+ table[4526] = "user32.dll!NtUserGetRawInputDeviceList";
+ table[4527] = "user32.dll!NtUserGetRegisteredRawInputDevices";
+ table[4528] = "user32.dll!NtUserGetScrollBarInfo";
+ table[4529] = "user32.dll!NtUserGetSystemMenu";
+ table[4530] = "user32.dll!NtUserGetThreadDesktop";
+ table[4531] = "imm32.dll!NtUserGetThreadState";
+ table[4531] = "user32.dll!NtUserGetThreadState";
+ table[4532] = "user32.dll!NtUserGetTitleBarInfo";
+ table[4533] = "user32.dll!GetUpdateRect";
+ table[4534] = "user32.dll!GetUpdateRgn";
+ table[4535] = "user32.dll!NtUserGetWindowDC";
+ table[4536] = "user32.dll!NtUserGetWindowPlacement";
+ table[4537] = "user32.dll!NtUserGetWOWClass";
+ table[4539] = "user32.dll!NtUserHideCaret";
+ table[4540] = "user32.dll!NtUserHiliteMenuItem";
+ table[4541] = "user32.dll!NtUserImpersonateDdeClientWindow";
+ table[4543] = "user32.dll!NtUserInitializeClientPfnArrays";
+ table[4544] = "user32.dll!NtUserInitTask";
+ table[4545] = "user32.dll!NtUserInternalGetWindowText";
+ table[4546] = "user32.dll!NtUserInvalidateRect";
+ table[4547] = "user32.dll!NtUserInvalidateRgn";
+ table[4548] = "user32.dll!NtUserIsClipboardFormatAvailable";
+ table[4549] = "user32.dll!NtUserKillTimer";
+ table[4550] = "user32.dll!NtUserLoadKeyboardLayoutEx";
+ table[4551] = "user32.dll!NtUserLockWindowStation";
+ table[4552] = "user32.dll!NtUserLockWindowUpdate";
+ table[4553] = "user32.dll!NtUserLockWorkStation";
+ table[4554] = "user32.dll!NtUserMapVirtualKeyEx";
+ table[4555] = "user32.dll!NtUserMenuItemFromPoint";
+ table[4556] = "user32.dll!NtUserMessageCall";
+ table[4557] = "user32.dll!NtUserMinMaximize";
+ table[4558] = "user32.dll!NtUserMNDragLeave";
+ table[4559] = "user32.dll!NtUserMNDragOver";
+ table[4560] = "user32.dll!NtUserModifyUserStartupInfoFlags";
+ table[4561] = "user32.dll!NtUserMoveWindow";
+ table[4562] = "imm32.dll!NtUserNotifyIMEStatus";
+ table[4562] = "user32.dll!NtUserNotifyIMEStatus";
+ table[4564] = "user32.dll!NtUserNotifyWinEvent";
+ table[4565] = "user32.dll!NtUserOpenClipboard";
+ table[4566] = "user32.dll!NtUserOpenDesktop";
+ table[4567] = "user32.dll!NtUserOpenInputDesktop";
+ table[4568] = "user32.dll!NtUserOpenWindowStation";
+ table[4569] = "user32.dll!NtUserPaintDesktop";
+ table[4570] = "user32.dll!PeekMessageW";
+ table[4571] = "user32.dll!NtUserPostMessage";
+ table[4572] = "user32.dll!NtUserPostThreadMessage";
+ table[4573] = "user32.dll!NtUserPrintWindow";
+ table[4574] = "user32.dll!NtUserProcessConnect";
+ table[4576] = "imm32.dll!NtUserQueryInputContext";
+ table[4577] = "user32.dll!NtUserQuerySendMessage";
+ table[4578] = "user32.dll!NtUserQueryUserCounters";
+ table[4579] = "imm32.dll!NtUserQueryWindow";
+ table[4579] = "user32.dll!NtUserQueryWindow";
+ table[4580] = "user32.dll!NtUserRealChildWindowFromPoint";
+ table[4581] = "user32.dll!NtUserRealInternalGetMessage";
+ table[4582] = "user32.dll!NtUserRealWaitMessageEx";
+ table[4583] = "user32.dll!NtUserRedrawWindow";
+ table[4584] = "user32.dll!NtUserRegisterClassExWOW";
+ table[4585] = "user32.dll!NtUserRegisterUserApiHook";
+ table[4586] = "user32.dll!NtUserRegisterHotKey";
+ table[4587] = "user32.dll!NtUserRegisterRawInputDevices";
+ table[4588] = "user32.dll!NtUserRegisterTasklist";
+ table[4589] = "user32.dll!NtUserRegisterWindowMessage";
+ table[4590] = "user32.dll!NtUserRemoveMenu";
+ table[4591] = "user32.dll!NtUserRemoveProp";
+ table[4593] = "user32.dll!NtUserResolveDesktopForWOW";
+ table[4594] = "user32.dll!NtUserSBGetParms";
+ table[4595] = "user32.dll!ScrollDC";
+ table[4596] = "user32.dll!NtUserScrollWindowEx";
+ table[4597] = "gdi32.dll!NtUserSelectPalette";
+ table[4598] = "user32.dll!NtUserSendInput";
+ table[4599] = "user32.dll!NtUserSetActiveWindow";
+ table[4600] = "imm32.dll!NtUserSetAppImeLevel";
+ table[4601] = "user32.dll!NtUserSetCapture";
+ table[4602] = "user32.dll!NtUserSetClassLong";
+ table[4603] = "user32.dll!NtUserSetClassWord";
+ table[4604] = "user32.dll!NtUserSetClipboardData";
+ table[4605] = "user32.dll!NtUserSetClipboardViewer";
+ table[4606] = "user32.dll!NtUserSetConsoleReserveKeys";
+ table[4607] = "user32.dll!NtUserSetCursor";
+ table[4608] = "user32.dll!NtUserSetCursorContents";
+ table[4609] = "user32.dll!NtUserSetCursorIconData";
+ table[4610] = "user32.dll!NtUserSetDbgTag";
+ table[4611] = "user32.dll!NtUserSetFocus";
+ table[4612] = "user32.dll!NtUserSetImeHotKey";
+ table[4613] = "imm32.dll!NtUserSetImeInfoEx";
+ table[4614] = "user32.dll!NtUserSetImeOwnerWindow";
+ table[4616] = "user32.dll!NtUserSetInformationThread";
+ table[4617] = "user32.dll!NtUserSetInternalWindowPos";
+ table[4618] = "user32.dll!NtUserSetKeyboardState";
+ table[4619] = "user32.dll!NtUserSetLogonNotifyWindow";
+ table[4620] = "user32.dll!NtUserSetMenu";
+ table[4621] = "user32.dll!NtUserSetMenuContextHelpId";
+ table[4622] = "user32.dll!NtUserSetMenuDefaultItem";
+ table[4623] = "user32.dll!NtUserSetMenuFlagRtoL";
+ table[4624] = "user32.dll!NtUserSetObjectInformation";
+ table[4625] = "user32.dll!NtUserSetParent";
+ table[4626] = "user32.dll!NtUserSetProcessWindowStation";
+ table[4627] = "user32.dll!NtUserSetProp";
+ table[4628] = "user32.dll!NtUserSetRipFlags";
+ table[4629] = "user32.dll!NtUserSetScrollInfo";
+ table[4630] = "user32.dll!NtUserSetShellWindowEx";
+ table[4631] = "user32.dll!NtUserSetSysColors";
+ table[4632] = "user32.dll!NtUserSetSystemCursor";
+ table[4633] = "user32.dll!NtUserSetSystemMenu";
+ table[4634] = "user32.dll!NtUserSetSystemTimer";
+ table[4635] = "user32.dll!NtUserSetThreadDesktop";
+ table[4636] = "imm32.dll!NtUserSetThreadLayoutHandles";
+ table[4637] = "user32.dll!NtUserSetThreadState";
+ table[4638] = "user32.dll!NtUserSetTimer";
+ table[4639] = "user32.dll!NtUserSetWindowFNID";
+ table[4640] = "user32.dll!NtUserSetWindowLong";
+ table[4641] = "user32.dll!NtUserSetWindowPlacement";
+ table[4642] = "user32.dll!NtUserSetWindowPos";
+ table[4643] = "user32.dll!NtUserSetWindowRgn";
+ table[4644] = "user32.dll!NtUserSetWindowsHookAW";
+ table[4645] = "user32.dll!NtUserSetWindowsHookEx";
+ table[4646] = "user32.dll!NtUserSetWindowStationUser";
+ table[4647] = "user32.dll!NtUserSetWindowWord";
+ table[4648] = "user32.dll!NtUserSetWinEventHook";
+ table[4649] = "user32.dll!NtUserShowCaret";
+ table[4650] = "user32.dll!NtUserShowScrollBar";
+ table[4651] = "user32.dll!NtUserShowWindow";
+ table[4652] = "user32.dll!NtUserShowWindowAsync";
+ table[4654] = "user32.dll!NtUserSwitchDesktop";
+ table[4655] = "user32.dll!NtUserSystemParametersInfo";
+ table[4657] = "user32.dll!NtUserThunkedMenuInfo";
+ table[4658] = "user32.dll!NtUserThunkedMenuItemInfo";
+ table[4659] = "user32.dll!NtUserToUnicodeEx";
+ table[4660] = "user32.dll!NtUserTrackMouseEvent";
+ table[4661] = "user32.dll!NtUserTrackPopupMenuEx";
+ table[4662] = "user32.dll!NtUserCalcMenuBar";
+ table[4663] = "user32.dll!NtUserPaintMenuBar";
+ table[4664] = "user32.dll!TranslateAcceleratorA";
+ table[4665] = "user32.dll!NtUserTranslateMessage";
+ table[4666] = "user32.dll!NtUserUnhookWindowsHookEx";
+ table[4667] = "user32.dll!NtUserUnhookWinEvent";
+ table[4668] = "user32.dll!NtUserUnloadKeyboardLayout";
+ table[4669] = "user32.dll!NtUserUnlockWindowStation";
+ table[4670] = "user32.dll!NtUserUnregisterClass";
+ table[4671] = "user32.dll!NtUserUnregisterUserApiHook";
+ table[4672] = "user32.dll!NtUserUnregisterHotKey";
+ table[4673] = "imm32.dll!NtUserUpdateInputContext";
+ table[4673] = "user32.dll!NtUserUpdateInputContext";
+ table[4674] = "user32.dll!NtUserUpdateInstance";
+ table[4675] = "user32.dll!NtUserUpdateLayeredWindow";
+ table[4676] = "user32.dll!NtUserGetLayeredWindowAttributes";
+ table[4677] = "user32.dll!NtUserSetLayeredWindowAttributes";
+ table[4678] = "user32.dll!NtUserUpdatePerUserSystemParameters";
+ table[4679] = "user32.dll!NtUserUserHandleGrantAccess";
+ table[4680] = "imm32.dll!NtUserValidateHandleSecure";
+ table[4680] = "user32.dll!NtUserValidateHandleSecure";
+ table[4681] = "user32.dll!NtUserValidateRect";
+ table[4682] = "user32.dll!NtUserValidateTimerCallback";
+ table[4683] = "user32.dll!NtUserVkKeyScanEx";
+ table[4684] = "user32.dll!NtUserWaitForInputIdle";
+ table[4685] = "user32.dll!NtUserWaitForMsgAndEvent";
+ table[4686] = "user32.dll!NtUserWaitMessage";
+ table[4687] = "user32.dll!NtUserWin32PoolAllocationStats";
+ table[4688] = "user32.dll!NtUserWindowFromPoint";
+ table[4689] = "user32.dll!NtUserYieldTask";
+ table[4695] = "gdi32.dll!NtGdiEngAssociateSurface";
+ table[4696] = "gdi32.dll!NtGdiEngCreateBitmap";
+ table[4697] = "gdi32.dll!NtGdiEngCreateDeviceSurface";
+ table[4698] = "gdi32.dll!NtGdiEngCreateDeviceBitmap";
+ table[4699] = "gdi32.dll!NtGdiEngCreatePalette";
+ table[4700] = "gdi32.dll!NtGdiEngComputeGlyphSet";
+ table[4701] = "gdi32.dll!NtGdiEngCopyBits";
+ table[4702] = "gdi32.dll!NtGdiEngDeletePalette";
+ table[4703] = "gdi32.dll!NtGdiEngDeleteSurface";
+ table[4704] = "gdi32.dll!NtGdiEngEraseSurface";
+ table[4705] = "gdi32.dll!NtGdiEngUnlockSurface";
+ table[4706] = "gdi32.dll!NtGdiEngLockSurface";
+ table[4707] = "gdi32.dll!NtGdiEngBitBlt";
+ table[4708] = "gdi32.dll!NtGdiEngStretchBlt";
+ table[4709] = "gdi32.dll!NtGdiEngPlgBlt";
+ table[4710] = "gdi32.dll!NtGdiEngMarkBandingSurface";
+ table[4711] = "gdi32.dll!NtGdiEngStrokePath";
+ table[4712] = "gdi32.dll!NtGdiEngFillPath";
+ table[4713] = "gdi32.dll!NtGdiEngStrokeAndFillPath";
+ table[4714] = "gdi32.dll!NtGdiEngPaint";
+ table[4715] = "gdi32.dll!NtGdiEngLineTo";
+ table[4716] = "gdi32.dll!NtGdiEngAlphaBlend";
+ table[4717] = "gdi32.dll!NtGdiEngGradientFill";
+ table[4718] = "gdi32.dll!NtGdiEngTransparentBlt";
+ table[4719] = "gdi32.dll!NtGdiEngTextOut";
+ table[4720] = "gdi32.dll!NtGdiEngStretchBltROP";
+ table[4721] = "gdi32.dll!NtGdiXLATEOBJ_cGetPalette";
+ table[4722] = "gdi32.dll!NtGdiXLATEOBJ_iXlate";
+ table[4723] = "gdi32.dll!NtGdiXLATEOBJ_hGetColorTransform";
+ table[4724] = "gdi32.dll!NtGdiCLIPOBJ_bEnum";
+ table[4725] = "gdi32.dll!NtGdiCLIPOBJ_cEnumStart";
+ table[4726] = "gdi32.dll!NtGdiCLIPOBJ_ppoGetPath";
+ table[4727] = "gdi32.dll!NtGdiEngDeletePath";
+ table[4728] = "gdi32.dll!NtGdiEngCreateClip";
+ table[4729] = "gdi32.dll!NtGdiEngDeleteClip";
+ table[4730] = "gdi32.dll!NtGdiBRUSHOBJ_ulGetBrushColor";
+ table[4731] = "gdi32.dll!NtGdiBRUSHOBJ_pvAllocRbrush";
+ table[4732] = "gdi32.dll!NtGdiBRUSHOBJ_pvGetRbrush";
+ table[4733] = "gdi32.dll!NtGdiBRUSHOBJ_hGetColorTransform";
+ table[4734] = "gdi32.dll!NtGdiXFORMOBJ_bApplyXform";
+ table[4735] = "gdi32.dll!NtGdiXFORMOBJ_iGetXform";
+ table[4736] = "gdi32.dll!NtGdiFONTOBJ_vGetInfo";
+ table[4737] = "gdi32.dll!NtGdiFONTOBJ_pxoGetXform";
+ table[4738] = "gdi32.dll!NtGdiFONTOBJ_cGetGlyphs";
+ table[4739] = "gdi32.dll!NtGdiFONTOBJ_pifi";
+ table[4740] = "gdi32.dll!NtGdiFONTOBJ_pfdg";
+ table[4741] = "gdi32.dll!NtGdiFONTOBJ_pQueryGlyphAttrs";
+ table[4742] = "gdi32.dll!NtGdiFONTOBJ_pvTrueTypeFontFile";
+ table[4743] = "gdi32.dll!NtGdiFONTOBJ_cGetAllGlyphHandles";
+ table[4744] = "gdi32.dll!NtGdiSTROBJ_bEnum";
+ table[4745] = "gdi32.dll!NtGdiSTROBJ_bEnumPositionsOnly";
+ table[4746] = "gdi32.dll!NtGdiSTROBJ_bGetAdvanceWidths";
+ table[4747] = "gdi32.dll!NtGdiSTROBJ_vEnumStart";
+ table[4748] = "gdi32.dll!NtGdiSTROBJ_dwGetCodePage";
+ table[4749] = "gdi32.dll!NtGdiPATHOBJ_vGetBounds";
+ table[4750] = "gdi32.dll!NtGdiPATHOBJ_bEnum";
+ table[4751] = "gdi32.dll!NtGdiPATHOBJ_vEnumStart";
+ table[4752] = "gdi32.dll!NtGdiPATHOBJ_vEnumStartClipLines";
+ table[4753] = "gdi32.dll!NtGdiPATHOBJ_bEnumClipLines";
+ table[4754] = "gdi32.dll!NtGdiGetDhpdev";
+ table[4755] = "gdi32.dll!NtGdiEngCheckAbort";
+ table[4756] = "gdi32.dll!NtGdiHT_Get8BPPFormatPalette";
+ table[4757] = "gdi32.dll!NtGdiHT_Get8BPPMaskPalette";
+ table[4758] = "gdi32.dll!NtGdiUpdateTransform";
+ table[4759] = "gdi32.dll!NtGdiSetPUMPDOBJ";
+ table[4760] = "gdi32.dll!NtGdiBRUSHOBJ_DeleteRbrush";
+ table[4761] = "gdi32.dll!NtGdiUMPDEngFreeUserMem";
+ table[4762] = "gdi32.dll!NtGdiDrawStream";
+} else { // Vista table.
+ table[4272] = "gdi32.dll!NtGdiGetDeviceCaps";
+ table[4220] = "gdi32.dll!NtGdiDeleteObjectApp";
+ table[4249] = "gdi32.dll!NtGdiFlush";
+ table[4120] = "gdi32.dll!NtGdiConsoleTextOut";
+ table[4825] = "gdi32.dll!NtGdiDdDDIEscape";
+ table[4299] = "gdi32.dll!GetRgnBox";
+ table[4828] = "gdi32.dll!NtGdiDdDDIGetPresentHistory";
+ table[4835] = "gdi32.dll!NtGdiDdDDIGetDeviceState";
+ table[4850] = "gdi32.dll!NtGdiDdDDICheckExclusiveOwnership";
+ table[4849] = "gdi32.dll!NtGdiDdDDICheckMonitorPowerState";
+ table[4833] = "gdi32.dll!NtGdiDdDDIWaitForVerticalBlankEvent";
+ table[4298] = "gdi32.dll!GetRegionData";
+ table[4796] = "gdi32.dll!NtGdiDwmGetDirtyRgn";
+ table[4820] = "gdi32.dll!NtGdiDdDDIRender";
+ table[4819] = "gdi32.dll!NtGdiDdDDIPresent";
+ table[4815] = "gdi32.dll!NtGdiDdDDIUnlock";
+ table[4814] = "gdi32.dll!NtGdiDdDDILock";
+ table[4240] = "gdi32.dll!NtGdiExtCreateRegion";
+ table[4339] = "gdi32.dll!NtGdiPolyPatBlt";
+ table[4361] = "gdi32.dll!NtGdiSelectBitmap";
+ table[4127] = "gdi32.dll!NtGdiCreateCompatibleDC";
+ table[4142] = "gdi32.dll!NtGdiCreateSolidBrush";
+ table[4122] = "gdi32.dll!CreateBitmap";
+ table[4303] = "gdi32.dll!CreateBitmap";
+ table[4348] = "gdi32.dll!NtGdiQueryFontAssocInfo";
+ table[4126] = "gdi32.dll!NtGdiCreateCompatibleBitmap";
+ table[4109] = "gdi32.dll!NtGdiBitBlt";
+ table[4243] = "gdi32.dll!NtGdiExtGetObjectW";
+ table[4270] = "gdi32.dll!NtGdiGetDCObject";
+ table[4620] = "gdi32.dll!NtUserSelectPalette";
+ table[4338] = "gdi32.dll!NtGdiPatBlt";
+ table[4275] = "gdi32.dll!NtGdiGetDIBitsInternal";
+ table[4398] = "gdi32.dll!NtGdiStretchDIBitsInternal";
+ table[4130] = "gdi32.dll!NtGdiCreateDIBSection";
+ table[4356] = "gdi32.dll!NtGdiRestoreDC";
+ table[4358] = "gdi32.dll!NtGdiSaveDC";
+ table[4103] = "gdi32.dll!NtGdiAlphaBlend";
+ table[4349] = "gdi32.dll!NtGdiRectangle";
+ table[4295] = "gdi32.dll!NtGdiGetRandomRgn";
+ table[4244] = "gdi32.dll!NtGdiExtSelectClipRgn";
+ table[4268] = "gdi32.dll!NtGdiGetDCDword";
+ table[4245] = "gdi32.dll!NtGdiExtTextOutW";
+ table[4254] = "gdi32.dll!GetClipBox";
+ table[4397] = "gdi32.dll!NtGdiStretchBlt";
+ table[4331] = "gdi32.dll!NtGdiMaskBlt";
+ table[4324] = "gdi32.dll!NtGdiInit";
+ table[4262] = "gdi32.dll!NtGdiGetCharSet";
+ table[4336] = "gdi32.dll!NtGdiOffsetRgn";
+ table[4326] = "gdi32.dll!NtGdiIntersectClipRect";
+ table[4795] = "gdi32.dll!NtGdiDrawStream";
+ table[4364] = "gdi32.dll!NtGdiSelectFont";
+ table[4116] = "gdi32.dll!NtGdiCombineRgn";
+ table[4238] = "gdi32.dll!NtGdiExcludeClipRect";
+ table[4802] = "gdi32.dll!NtGdiDdDDISetAllocationPriority";
+ table[4798] = "gdi32.dll!NtGdiDdDDICreateAllocation";
+ table[4801] = "gdi32.dll!NtGdiDdDDIDestroyAllocation";
+ table[4797] = "gdi32.dll!NtGdiDwmGetSurfaceData";
+ table[4140] = "gdi32.dll!CreateRoundRectRgn";
+ table[4840] = "gdi32.dll!NtGdiDdDDISetProcessSchedulingPriorityClass";
+ table[4307] = "gdi32.dll!NtGdiGetTextCharsetInfo";
+ table[4351] = "gdi32.dll!RectVisible";
+ table[4139] = "gdi32.dll!NtGdiCreateRectRgn";
+ table[4369] = "gdi32.dll!NtGdiSetBoundsRect";
+ table[4257] = "gdi32.dll!NtGdiGetBoundsRect";
+ table[4309] = "gdi32.dll!NtGdiGetTextExtentExW";
+ table[4402] = "gdi32.dll!NtGdiTransformPoints";
+ table[4118] = "gdi32.dll!NtGdiComputeXformCoefficients";
+ table[4337] = "gdi32.dll!NtGdiOpenDCW";
+ table[4813] = "gdi32.dll!NtGdiDdDDIQueryAdapterInfo";
+ table[4822] = "gdi32.dll!NtGdiDdDDIOpenAdapterFromHdc";
+ table[4816] = "gdi32.dll!NtGdiDdDDIGetDisplayModeList";
+ table[4823] = "gdi32.dll!NtGdiDdDDICloseAdapter";
+ table[4804] = "gdi32.dll!NtGdiDdDDICreateDevice";
+ table[4806] = "gdi32.dll!NtGdiDdDDICreateContext";
+ table[4271] = "gdi32.dll!NtGdiGetDCPoint";
+ table[4225] = "gdi32.dll!NtGdiDoPalette";
+ table[4288] = "gdi32.dll!GetNearestColor";
+ table[4129] = "gdi32.dll!NtGdiCreateDIBitmapInternal";
+ table[4322] = "gdi32.dll!NtGdiHfontCreate";
+ table[4137] = "gdi32.dll!NtGdiCreatePatternBrushInternal";
+ table[4136] = "gdi32.dll!NtGdiCreatePaletteInternal";
+ table[4132] = "gdi32.dll!NtGdiCreateHalftonePalette";
+ table[4375] = "gdi32.dll!NtGdiSetDIBitsToDeviceInternal";
+ table[4269] = "gdi32.dll!NtGdiGetDCforBitmap";
+ table[4392] = "gdi32.dll!NtGdiSetupPublicCFONT";
+ table[4836] = "gdi32.dll!NtGdiDdDDICreateDCFromMemory";
+ table[4264] = "gdi32.dll!NtGdiGetCharWidthInfo";
+ table[4367] = "gdi32.dll!SetBitmapBits";
+ table[4255] = "gdi32.dll!GetBitmapBits";
+ table[4311] = "gdi32.dll!NtGdiGetTextMetricsW";
+ table[4297] = "gdi32.dll!NtGdiGetRealizationInfo";
+ table[4310] = "gdi32.dll!NtGdiGetTextFaceW";
+ table[4292] = "gdi32.dll!NtGdiGetOutlineTextMetricsInternalW";
+ table[4280] = "gdi32.dll!NtGdiGetGlyphIndicesW";
+ table[4320] = "gdi32.dll!NtGdiGetWidthTable";
+ table[4234] = "gdi32.dll!NtGdiEnumFontOpen";
+ table[4233] = "gdi32.dll!NtGdiEnumFontClose";
+ table[4232] = "gdi32.dll!NtGdiEnumFontChunk";
+ table[4260] = "gdi32.dll!NtGdiGetCharABCWidthsW";
+ table[4105] = "gdi32.dll!NtGdiAnyLinkedFonts";
+ table[4278] = "gdi32.dll!NtGdiGetFontData";
+ table[4106] = "gdi32.dll!NtGdiFontIsLinked";
+ table[4332] = "gdi32.dll!NtGdiModifyWorldTransform";
+ table[4312] = "gdi32.dll!NtGdiGetTransform";
+ table[4321] = "gdi32.dll!NtGdiGradientFill";
+ table[4294] = "gdi32.dll!NtGdiGetPixel";
+ table[4403] = "gdi32.dll!NtGdiTransparentBlt";
+ table[4319] = "gdi32.dll!NtGdiGetFontUnicodeRanges";
+ table[4817] = "gdi32.dll!NtGdiDdDDISetDisplayMode";
+ table[4827] = "gdi32.dll!NtGdiDdDDISetVidPnSourceOwner";
+ table[4273] = "gdi32.dll!NtGdiGetDeviceGammaRamp";
+ table[4374] = "gdi32.dll!NtGdiSetDeviceGammaRamp";
+ table[4366] = "gdi32.dll!NtGdiSetBitmapAttributes";
+ table[4098] = "gdi32.dll!NtGdiAddFontResourceW";
+ table[4376] = "gdi32.dll!NtGdiSetFontEnumeration";
+ table[4228] = "gdi32.dll!NtGdiEnableEudc";
+ table[4343] = "gdi32.dll!NtGdiPolyPolyDraw";
+ table[4328] = "gdi32.dll!NtGdiLineTo";
+ table[4138] = "gdi32.dll!NtGdiCreatePen";
+ table[4236] = "gdi32.dll!EqualRgn";
+ table[4805] = "gdi32.dll!NtGdiDdDDIDestroyDevice";
+ table[4807] = "gdi32.dll!NtGdiDdDDIDestroyContext";
+ table[4799] = "gdi32.dll!NtGdiDdDDIQueryResourceInfo";
+ table[4800] = "gdi32.dll!NtGdiDdDDIOpenResource";
+ table[4838] = "gdi32.dll!NtGdiDdDDISetContextSchedulingPriority";
+ table[4263] = "gdi32.dll!NtGdiGetCharWidthW";
+ table[4837] = "gdi32.dll!NtGdiDdDDIDestroyDCFromMemory";
+ table[4803] = "gdi32.dll!NtGdiDdDDIQueryAllocationResidency";
+ table[4824] = "gdi32.dll!NtGdiDdDDIGetSharedPrimaryHandle";
+ table[4385] = "gdi32.dll!NtGdiSetLayout";
+ table[4371] = "gdi32.dll!NtGdiSetBrushOrg";
+ table[4157] = "gdi32.dll!NtGdiDdCreateDirectDrawObject";
+ table[4128] = "gdi32.dll!NtGdiCreateDIBBrush";
+ table[4387] = "gdi32.dll!NtGdiSetPixel";
+ table[4345] = "gdi32.dll!PtInRegion";
+ table[4821] = "gdi32.dll!NtGdiDdDDIOpenAdapterFromDeviceName";
+ table[4253] = "gdi32.dll!NtGdiGetAndSetDCDword";
+ table[4274] = "gdi32.dll!NtGdiGetDeviceCapsAll";
+ table[4247] = "gdi32.dll!NtGdiFillRgn";
+ table[4289] = "gdi32.dll!GetNearestPaletteIndex";
+ table[4378] = "gdi32.dll!NtGdiSetIcmMode";
+ table[4327] = "gdi32.dll!NtGdiInvertRgn";
+ table[4792] = "gdi32.dll!NtGdiSetPUMPDOBJ";
+ table[4735] = "gdi32.dll!NtGdiEngDeletePalette";
+ table[4732] = "gdi32.dll!NtGdiEngCreatePalette";
+ table[4241] = "gdi32.dll!NtGdiExtEscape";
+ table[4217] = "gdi32.dll!NtGdiDeleteClientObj";
+ table[4123] = "gdi32.dll!NtGdiCreateClientObj";
+ table[4354] = "gdi32.dll!NtGdiResetDC";
+ table[4235] = "gdi32.dll!NtGdiEnumObjects";
+ table[4135] = "gdi32.dll!NtGdiCreateOPMProtectedOutputs";
+ table[4222] = "gdi32.dll!NtGdiDestroyOPMProtectedOutput";
+ table[4258] = "gdi32.dll!NtGdiGetCertificate";
+ table[4259] = "gdi32.dll!NtGdiGetCertificateSize";
+ table[4291] = "gdi32.dll!NtGdiGetOPMRandomNumber";
+ table[4305] = "gdi32.dll!NtGdiGetSuggestedOPMProtectedOutputArraySize";
+ table[4386] = "gdi32.dll!NtGdiSetOPMSigningKeyAndSequenceNumbers";
+ table[4844] = "gdi32.dll!NtGdiDdDDISetQueuedLimit";
+ table[4341] = "gdi32.dll!NtGdiPlgBlt";
+ table[4117] = "gdi32.dll!CombineTransform";
+ table[4134] = "gdi32.dll!NtGdiCreateMetafileDC";
+ table[4393] = "gdi32.dll!NtGdiSetVirtualResolution";
+ table[4394] = "gdi32.dll!NtGdiSetSizeDevice";
+ table[4381] = "gdi32.dll!NtGdiSetMetaRgn";
+ table[4382] = "gdi32.dll!NtGdiSetMiterLimit";
+ table[4377] = "gdi32.dll!NtGdiSetFontXform";
+ table[4333] = "gdi32.dll!NtGdiMonoBitmap";
+ table[4768] = "gdi32.dll!NtGdiXFORMOBJ_iGetXform";
+ table[4770] = "gdi32.dll!NtGdiFONTOBJ_pxoGetXform";
+ table[4239] = "gdi32.dll!NtGdiExtCreatePen";
+ table[4286] = "gdi32.dll!GetMiterLimit";
+ table[4290] = "gdi32.dll!NtGdiGetObjectBitmapHandle";
+ table[4287] = "gdi32.dll!NtGdiGetMonitorID";
+ table[4284] = "gdi32.dll!NtGdiGetKerningPairs";
+ table[4357] = "gdi32.dll!NtGdiRoundRect";
+ table[4242] = "gdi32.dll!NtGdiExtFloodFill";
+ table[4306] = "gdi32.dll!GetSystemPaletteUse";
+ table[4096] = "gdi32.dll!NtGdiAbortDoc";
+ table[4097] = "gdi32.dll!NtGdiAbortPath";
+ table[4099] = "gdi32.dll!NtGdiAddRemoteFontToDC";
+ table[4100] = "gdi32.dll!NtGdiAddFontMemResourceEx";
+ table[4101] = "gdi32.dll!NtGdiRemoveMergeFont";
+ table[4102] = "gdi32.dll!NtGdiAddRemoteMMInstanceToDC";
+ table[4104] = "gdi32.dll!NtGdiAngleArc";
+ table[4107] = "gdi32.dll!NtGdiArcInternal";
+ table[4108] = "gdi32.dll!NtGdiBeginPath";
+ table[4110] = "gdi32.dll!NtGdiCancelDC";
+ table[4111] = "gdi32.dll!NtGdiCheckBitmapBits";
+ table[4112] = "gdi32.dll!NtGdiCloseFigure";
+ table[4113] = "gdi32.dll!NtGdiClearBitmapAttributes";
+ table[4114] = "gdi32.dll!NtGdiClearBrushAttributes";
+ table[4115] = "gdi32.dll!NtGdiColorCorrectPalette";
+ table[4119] = "gdi32.dll!NtGdiConfigureOPMProtectedOutput";
+ table[4121] = "gdi32.dll!NtGdiConvertMetafileRect";
+ table[4124] = "gdi32.dll!NtGdiCreateColorSpace";
+ table[4125] = "gdi32.dll!NtGdiCreateColorTransform";
+ table[4131] = "gdi32.dll!NtGdiCreateEllipticRgn";
+ table[4133] = "gdi32.dll!NtGdiCreateHatchBrushInternal";
+ table[4141] = "gdi32.dll!NtGdiCreateServerMetaFile";
+ table[4143] = "gdi32.dll!NtGdiD3dContextCreate";
+ table[4144] = "gdi32.dll!NtGdiD3dContextDestroy";
+ table[4145] = "gdi32.dll!NtGdiD3dContextDestroyAll";
+ table[4146] = "gdi32.dll!NtGdiD3dValidateTextureStageState";
+ table[4147] = "gdi32.dll!NtGdiD3dDrawPrimitives2";
+ table[4148] = "gdi32.dll!NtGdiDdGetDriverState";
+ table[4149] = "gdi32.dll!NtGdiDdAddAttachedSurface";
+ table[4150] = "gdi32.dll!NtGdiDdAlphaBlt";
+ table[4151] = "gdi32.dll!NtGdiDdAttachSurface";
+ table[4152] = "gdi32.dll!NtGdiDdBeginMoCompFrame";
+ table[4153] = "gdi32.dll!NtGdiDdBlt";
+ table[4154] = "gdi32.dll!NtGdiDdCanCreateSurface";
+ table[4155] = "gdi32.dll!NtGdiDdCanCreateD3DBuffer";
+ table[4156] = "gdi32.dll!NtGdiDdColorControl";
+ table[4158] = "gdi32.dll!NtGdiDdCreateSurface";
+ table[4159] = "gdi32.dll!NtGdiDdCreateD3DBuffer";
+ table[4160] = "gdi32.dll!NtGdiDdCreateMoComp";
+ table[4161] = "gdi32.dll!NtGdiDdCreateSurfaceObject";
+ table[4162] = "gdi32.dll!NtGdiDdDeleteDirectDrawObject";
+ table[4163] = "gdi32.dll!NtGdiDdDeleteSurfaceObject";
+ table[4164] = "gdi32.dll!NtGdiDdDestroyMoComp";
+ table[4165] = "gdi32.dll!NtGdiDdDestroySurface";
+ table[4166] = "gdi32.dll!NtGdiDdDestroyD3DBuffer";
+ table[4167] = "gdi32.dll!NtGdiDdEndMoCompFrame";
+ table[4168] = "gdi32.dll!NtGdiDdFlip";
+ table[4169] = "gdi32.dll!NtGdiDdFlipToGDISurface";
+ table[4170] = "gdi32.dll!NtGdiDdGetAvailDriverMemory";
+ table[4171] = "gdi32.dll!NtGdiDdGetBltStatus";
+ table[4172] = "gdi32.dll!NtGdiDdGetDC";
+ table[4173] = "gdi32.dll!NtGdiDdGetDriverInfo";
+ table[4174] = "gdi32.dll!NtGdiDdGetDxHandle";
+ table[4175] = "gdi32.dll!NtGdiDdGetFlipStatus";
+ table[4176] = "gdi32.dll!NtGdiDdGetInternalMoCompInfo";
+ table[4177] = "gdi32.dll!NtGdiDdGetMoCompBuffInfo";
+ table[4178] = "gdi32.dll!NtGdiDdGetMoCompGuids";
+ table[4179] = "gdi32.dll!NtGdiDdGetMoCompFormats";
+ table[4180] = "gdi32.dll!NtGdiDdGetScanLine";
+ table[4181] = "gdi32.dll!NtGdiDdLock";
+ table[4182] = "gdi32.dll!NtGdiDdLockD3D";
+ table[4183] = "gdi32.dll!NtGdiDdQueryDirectDrawObject";
+ table[4184] = "gdi32.dll!NtGdiDdQueryMoCompStatus";
+ table[4185] = "gdi32.dll!NtGdiDdReenableDirectDrawObject";
+ table[4186] = "gdi32.dll!NtGdiDdReleaseDC";
+ table[4187] = "gdi32.dll!NtGdiDdRenderMoComp";
+ table[4188] = "gdi32.dll!NtGdiDdResetVisrgn";
+ table[4189] = "gdi32.dll!NtGdiDdSetColorKey";
+ table[4190] = "gdi32.dll!NtGdiDdSetExclusiveMode";
+ table[4191] = "gdi32.dll!NtGdiDdSetGammaRamp";
+ table[4192] = "gdi32.dll!NtGdiDdCreateSurfaceEx";
+ table[4193] = "gdi32.dll!NtGdiDdSetOverlayPosition";
+ table[4194] = "gdi32.dll!NtGdiDdUnattachSurface";
+ table[4195] = "gdi32.dll!NtGdiDdUnlock";
+ table[4196] = "gdi32.dll!NtGdiDdUnlockD3D";
+ table[4197] = "gdi32.dll!NtGdiDdUpdateOverlay";
+ table[4198] = "gdi32.dll!NtGdiDdWaitForVerticalBlank";
+ table[4199] = "gdi32.dll!NtGdiDvpCanCreateVideoPort";
+ table[4200] = "gdi32.dll!NtGdiDvpColorControl";
+ table[4201] = "gdi32.dll!NtGdiDvpCreateVideoPort";
+ table[4202] = "gdi32.dll!NtGdiDvpDestroyVideoPort";
+ table[4203] = "gdi32.dll!NtGdiDvpFlipVideoPort";
+ table[4204] = "gdi32.dll!NtGdiDvpGetVideoPortBandwidth";
+ table[4205] = "gdi32.dll!NtGdiDvpGetVideoPortField";
+ table[4206] = "gdi32.dll!NtGdiDvpGetVideoPortFlipStatus";
+ table[4207] = "gdi32.dll!NtGdiDvpGetVideoPortInputFormats";
+ table[4208] = "gdi32.dll!NtGdiDvpGetVideoPortLine";
+ table[4209] = "gdi32.dll!NtGdiDvpGetVideoPortOutputFormats";
+ table[4210] = "gdi32.dll!NtGdiDvpGetVideoPortConnectInfo";
+ table[4211] = "gdi32.dll!NtGdiDvpGetVideoSignalStatus";
+ table[4212] = "gdi32.dll!NtGdiDvpUpdateVideoPort";
+ table[4213] = "gdi32.dll!NtGdiDvpWaitForVideoPortSync";
+ table[4214] = "gdi32.dll!NtGdiDvpAcquireNotification";
+ table[4215] = "gdi32.dll!NtGdiDvpReleaseNotification";
+ table[4216] = "gdi32.dll!NtGdiDxgGenericThunk";
+ table[4218] = "gdi32.dll!NtGdiDeleteColorSpace";
+ table[4219] = "gdi32.dll!NtGdiDeleteColorTransform";
+ table[4221] = "gdi32.dll!NtGdiDescribePixelFormat";
+ table[4223] = "gdi32.dll!NtGdiGetPerBandInfo";
+ table[4224] = "gdi32.dll!NtGdiDoBanding";
+ table[4226] = "gdi32.dll!NtGdiDrawEscape";
+ table[4227] = "gdi32.dll!NtGdiEllipse";
+ table[4229] = "gdi32.dll!NtGdiEndDoc";
+ table[4230] = "gdi32.dll!NtGdiEndPage";
+ table[4231] = "gdi32.dll!NtGdiEndPath";
+ table[4237] = "gdi32.dll!NtGdiEudcLoadUnloadLink";
+ table[4246] = "gdi32.dll!NtGdiFillPath";
+ table[4248] = "gdi32.dll!NtGdiFlattenPath";
+ table[4250] = "gdi32.dll!NtGdiForceUFIMapping";
+ table[4251] = "gdi32.dll!NtGdiFrameRgn";
+ table[4252] = "gdi32.dll!NtGdiFullscreenControl";
+ table[4256] = "gdi32.dll!NtGdiGetBitmapDimension";
+ table[4261] = "gdi32.dll!NtGdiGetCharacterPlacementW";
+ table[4265] = "gdi32.dll!NtGdiGetColorAdjustment";
+ table[4267] = "gdi32.dll!NtGdiGetCOPPCompatibleOPMInformation";
+ table[4276] = "gdi32.dll!NtGdiGetETM";
+ table[4277] = "gdi32.dll!NtGdiGetEudcTimeStampEx";
+ table[4279] = "gdi32.dll!NtGdiGetFontResourceInfoInternalW";
+ table[4281] = "gdi32.dll!NtGdiGetGlyphIndicesWInternal";
+ table[4282] = "gdi32.dll!NtGdiGetGlyphOutline";
+ table[4283] = "gdi32.dll!NtGdiGetOPMInformation";
+ table[4285] = "gdi32.dll!NtGdiGetLinkedUFIs";
+ table[4293] = "gdi32.dll!NtGdiGetPath";
+ table[4296] = "gdi32.dll!NtGdiGetRasterizerCaps";
+ table[4300] = "gdi32.dll!NtGdiGetServerMetaFileBits";
+ table[4301] = "gdi32.dll!NtGdiGetSpoolMessage";
+ table[4304] = "gdi32.dll!NtGdiGetStringBitmapW";
+ table[4308] = "gdi32.dll!NtGdiGetTextExtent";
+ table[4313] = "gdi32.dll!NtGdiGetUFI";
+ table[4314] = "gdi32.dll!NtGdiGetEmbUFI";
+ table[4315] = "gdi32.dll!NtGdiGetUFIPathname";
+ table[4316] = "gdi32.dll!NtGdiGetEmbedFonts";
+ table[4317] = "gdi32.dll!NtGdiChangeGhostFont";
+ table[4318] = "gdi32.dll!NtGdiAddEmbFontToDC";
+ table[4323] = "gdi32.dll!NtGdiIcmBrushInfo";
+ table[4325] = "gdi32.dll!NtGdiInitSpool";
+ table[4329] = "gdi32.dll!NtGdiMakeFontDir";
+ table[4330] = "gdi32.dll!NtGdiMakeInfoDC";
+ table[4335] = "gdi32.dll!NtGdiOffsetClipRgn";
+ table[4340] = "gdi32.dll!NtGdiPathToRegion";
+ table[4342] = "gdi32.dll!NtGdiPolyDraw";
+ table[4344] = "gdi32.dll!NtGdiPolyTextOutW";
+ table[4346] = "gdi32.dll!NtGdiPtVisible";
+ table[4347] = "gdi32.dll!NtGdiQueryFonts";
+ table[4350] = "gdi32.dll!NtGdiRectInRegion";
+ table[4352] = "gdi32.dll!NtGdiRemoveFontResourceW";
+ table[4353] = "gdi32.dll!NtGdiRemoveFontMemResourceEx";
+ table[4355] = "gdi32.dll!NtGdiResizePalette";
+ table[4359] = "gdi32.dll!NtGdiScaleViewportExtEx";
+ table[4360] = "gdi32.dll!NtGdiScaleWindowExtEx";
+ table[4363] = "gdi32.dll!NtGdiSelectClipPath";
+ table[4365] = "gdi32.dll!NtGdiSelectPen";
+ table[4368] = "gdi32.dll!NtGdiSetBitmapDimension";
+ table[4370] = "gdi32.dll!NtGdiSetBrushAttributes";
+ table[4372] = "gdi32.dll!NtGdiSetColorAdjustment";
+ table[4373] = "gdi32.dll!NtGdiSetColorSpace";
+ table[4379] = "gdi32.dll!NtGdiSetLinkedUFIs";
+ table[4380] = "gdi32.dll!NtGdiSetMagicColors";
+ table[4383] = "gdi32.dll!NtGdiGetDeviceWidth";
+ table[4384] = "gdi32.dll!NtGdiMirrorWindowOrg";
+ table[4388] = "gdi32.dll!NtGdiSetPixelFormat";
+ table[4389] = "gdi32.dll!NtGdiSetRectRgn";
+ table[4390] = "gdi32.dll!NtGdiSetSystemPaletteUse";
+ table[4395] = "gdi32.dll!NtGdiStartDoc";
+ table[4396] = "gdi32.dll!NtGdiStartPage";
+ table[4399] = "gdi32.dll!NtGdiStrokeAndFillPath";
+ table[4400] = "gdi32.dll!NtGdiStrokePath";
+ table[4401] = "gdi32.dll!NtGdiSwapBuffers";
+ table[4404] = "gdi32.dll!NtGdiUnloadPrinterDriver";
+ table[4406] = "gdi32.dll!NtGdiUnrealizeObject";
+ table[4407] = "gdi32.dll!NtGdiUpdateColors";
+ table[4408] = "gdi32.dll!NtGdiWidenPath";
+ table[4728] = "gdi32.dll!NtGdiEngAssociateSurface";
+ table[4729] = "gdi32.dll!NtGdiEngCreateBitmap";
+ table[4730] = "gdi32.dll!NtGdiEngCreateDeviceSurface";
+ table[4731] = "gdi32.dll!NtGdiEngCreateDeviceBitmap";
+ table[4733] = "gdi32.dll!NtGdiEngComputeGlyphSet";
+ table[4734] = "gdi32.dll!NtGdiEngCopyBits";
+ table[4736] = "gdi32.dll!NtGdiEngDeleteSurface";
+ table[4737] = "gdi32.dll!NtGdiEngEraseSurface";
+ table[4738] = "gdi32.dll!NtGdiEngUnlockSurface";
+ table[4739] = "gdi32.dll!NtGdiEngLockSurface";
+ table[4740] = "gdi32.dll!NtGdiEngBitBlt";
+ table[4741] = "gdi32.dll!NtGdiEngStretchBlt";
+ table[4742] = "gdi32.dll!NtGdiEngPlgBlt";
+ table[4743] = "gdi32.dll!NtGdiEngMarkBandingSurface";
+ table[4744] = "gdi32.dll!NtGdiEngStrokePath";
+ table[4745] = "gdi32.dll!NtGdiEngFillPath";
+ table[4746] = "gdi32.dll!NtGdiEngStrokeAndFillPath";
+ table[4747] = "gdi32.dll!NtGdiEngPaint";
+ table[4748] = "gdi32.dll!NtGdiEngLineTo";
+ table[4749] = "gdi32.dll!NtGdiEngAlphaBlend";
+ table[4750] = "gdi32.dll!NtGdiEngGradientFill";
+ table[4751] = "gdi32.dll!NtGdiEngTransparentBlt";
+ table[4752] = "gdi32.dll!NtGdiEngTextOut";
+ table[4753] = "gdi32.dll!NtGdiEngStretchBltROP";
+ table[4754] = "gdi32.dll!NtGdiXLATEOBJ_cGetPalette";
+ table[4755] = "gdi32.dll!NtGdiXLATEOBJ_iXlate";
+ table[4756] = "gdi32.dll!NtGdiXLATEOBJ_hGetColorTransform";
+ table[4757] = "gdi32.dll!NtGdiCLIPOBJ_bEnum";
+ table[4758] = "gdi32.dll!NtGdiCLIPOBJ_cEnumStart";
+ table[4759] = "gdi32.dll!NtGdiCLIPOBJ_ppoGetPath";
+ table[4760] = "gdi32.dll!NtGdiEngDeletePath";
+ table[4761] = "gdi32.dll!NtGdiEngCreateClip";
+ table[4762] = "gdi32.dll!NtGdiEngDeleteClip";
+ table[4763] = "gdi32.dll!NtGdiBRUSHOBJ_ulGetBrushColor";
+ table[4764] = "gdi32.dll!NtGdiBRUSHOBJ_pvAllocRbrush";
+ table[4765] = "gdi32.dll!NtGdiBRUSHOBJ_pvGetRbrush";
+ table[4766] = "gdi32.dll!NtGdiBRUSHOBJ_hGetColorTransform";
+ table[4767] = "gdi32.dll!NtGdiXFORMOBJ_bApplyXform";
+ table[4769] = "gdi32.dll!NtGdiFONTOBJ_vGetInfo";
+ table[4771] = "gdi32.dll!NtGdiFONTOBJ_cGetGlyphs";
+ table[4772] = "gdi32.dll!NtGdiFONTOBJ_pifi";
+ table[4773] = "gdi32.dll!NtGdiFONTOBJ_pfdg";
+ table[4774] = "gdi32.dll!NtGdiFONTOBJ_pQueryGlyphAttrs";
+ table[4775] = "gdi32.dll!NtGdiFONTOBJ_pvTrueTypeFontFile";
+ table[4776] = "gdi32.dll!NtGdiFONTOBJ_cGetAllGlyphHandles";
+ table[4777] = "gdi32.dll!NtGdiSTROBJ_bEnum";
+ table[4778] = "gdi32.dll!NtGdiSTROBJ_bEnumPositionsOnly";
+ table[4779] = "gdi32.dll!NtGdiSTROBJ_bGetAdvanceWidths";
+ table[4780] = "gdi32.dll!NtGdiSTROBJ_vEnumStart";
+ table[4781] = "gdi32.dll!NtGdiSTROBJ_dwGetCodePage";
+ table[4782] = "gdi32.dll!NtGdiPATHOBJ_vGetBounds";
+ table[4783] = "gdi32.dll!NtGdiPATHOBJ_bEnum";
+ table[4784] = "gdi32.dll!NtGdiPATHOBJ_vEnumStart";
+ table[4785] = "gdi32.dll!NtGdiPATHOBJ_vEnumStartClipLines";
+ table[4786] = "gdi32.dll!NtGdiPATHOBJ_bEnumClipLines";
+ table[4787] = "gdi32.dll!NtGdiGetDhpdev";
+ table[4788] = "gdi32.dll!NtGdiEngCheckAbort";
+ table[4789] = "gdi32.dll!NtGdiHT_Get8BPPFormatPalette";
+ table[4790] = "gdi32.dll!NtGdiHT_Get8BPPMaskPalette";
+ table[4791] = "gdi32.dll!NtGdiUpdateTransform";
+ table[4793] = "gdi32.dll!NtGdiBRUSHOBJ_DeleteRbrush";
+ table[4794] = "gdi32.dll!NtGdiUMPDEngFreeUserMem";
+ table[4808] = "gdi32.dll!NtGdiDdDDICreateSynchronizationObject";
+ table[4809] = "gdi32.dll!NtGdiDdDDIDestroySynchronizationObject";
+ table[4810] = "gdi32.dll!NtGdiDdDDIWaitForSynchronizationObject";
+ table[4811] = "gdi32.dll!NtGdiDdDDISignalSynchronizationObject";
+ table[4812] = "gdi32.dll!NtGdiDdDDIGetRuntimeData";
+ table[4818] = "gdi32.dll!NtGdiDdDDIGetMultisampleMethodList";
+ table[4826] = "gdi32.dll!NtGdiDdDDIQueryStatistics";
+ table[4829] = "gdi32.dll!NtGdiDdDDICreateOverlay";
+ table[4830] = "gdi32.dll!NtGdiDdDDIUpdateOverlay";
+ table[4831] = "gdi32.dll!NtGdiDdDDIFlipOverlay";
+ table[4832] = "gdi32.dll!NtGdiDdDDIDestroyOverlay";
+ table[4834] = "gdi32.dll!NtGdiDdDDISetGammaRamp";
+ table[4839] = "gdi32.dll!NtGdiDdDDIGetContextSchedulingPriority";
+ table[4841] = "gdi32.dll!NtGdiDdDDIGetProcessSchedulingPriorityClass";
+ table[4842] = "gdi32.dll!NtGdiDdDDIReleaseProcessVidPnSourceOwners";
+ table[4843] = "gdi32.dll!NtGdiDdDDIGetScanLine";
+ table[4845] = "gdi32.dll!NtGdiDdDDIPollDisplayChildren";
+ table[4846] = "gdi32.dll!NtGdiDdDDIInvalidateActiveVidPn";
+ table[4847] = "gdi32.dll!NtGdiDdDDICheckOcclusion";
+ table[4848] = "gdi32.dll!NtGdiDdDDIWaitForIdle";
+ table[4851] = "gdi32.dll!NtGdiDdDDISetDisplayPrivateDriverFormat";
+ table[4852] = "gdi32.dll!NtGdiDdDDISharedPrimaryLockNotification";
+ table[4853] = "gdi32.dll!NtGdiDdDDISharedPrimaryUnLockNotification";
+ table[4856] = "gdi32.dll!NtGdiGetNumberOfPhysicalMonitors";
+ table[4857] = "gdi32.dll!NtGdiGetPhysicalMonitors";
+ table[4858] = "gdi32.dll!NtGdiGetPhysicalMonitorDescription";
+ table[4859] = "gdi32.dll!NtGdiDestroyPhysicalMonitor";
+ table[4860] = "gdi32.dll!NtGdiDDCCIGetVCPFeature";
+ table[4861] = "gdi32.dll!NtGdiDDCCISetVCPFeature";
+ table[4862] = "gdi32.dll!NtGdiDDCCISaveCurrentSettings";
+ table[4863] = "gdi32.dll!NtGdiDDCCIGetCapabilitiesStringLength";
+ table[4864] = "gdi32.dll!NtGdiDDCCIGetCapabilitiesString";
+ table[4865] = "gdi32.dll!NtGdiDDCCIGetTimingReport";
+ table[0] = "ntdll.dll!NtAcceptConnectPort";
+ table[1] = "ntdll.dll!NtAccessCheck";
+ table[2] = "ntdll.dll!ZwAccessCheckAndAuditAlarm";
+ table[3] = "ntdll.dll!NtAccessCheckByType";
+ table[4] = "ntdll.dll!NtAccessCheckByTypeAndAuditAlarm";
+ table[5] = "ntdll.dll!NtAccessCheckByTypeResultList";
+ table[6] = "ntdll.dll!NtAccessCheckByTypeResultListAndAuditAlarm";
+ table[7] = "ntdll.dll!ZwAccessCheckByTypeResultListAndAuditAlarmByHandle";
+ table[8] = "ntdll.dll!NtAddAtom";
+ table[9] = "ntdll.dll!ZwAddBootEntry";
+ table[10] = "ntdll.dll!NtAddDriverEntry";
+ table[11] = "ntdll.dll!ZwAdjustGroupsToken";
+ table[12] = "ntdll.dll!ZwAdjustPrivilegesToken";
+ table[13] = "ntdll.dll!NtAlertResumeThread";
+ table[14] = "ntdll.dll!NtAlertThread";
+ table[15] = "ntdll.dll!ZwAllocateLocallyUniqueId";
+ table[16] = "ntdll.dll!NtAllocateUserPhysicalPages";
+ table[17] = "ntdll.dll!NtAllocateUuids";
+ table[18] = "ntdll.dll!NtAllocateVirtualMemory";
+ table[19] = "ntdll.dll!NtAlpcAcceptConnectPort";
+ table[20] = "ntdll.dll!ZwAlpcCancelMessage";
+ table[21] = "ntdll.dll!ZwAlpcConnectPort";
+ table[22] = "ntdll.dll!ZwAlpcCreatePort";
+ table[23] = "ntdll.dll!NtAlpcCreatePortSection";
+ table[24] = "ntdll.dll!ZwAlpcCreateResourceReserve";
+ table[25] = "ntdll.dll!ZwAlpcCreateSectionView";
+ table[26] = "ntdll.dll!ZwAlpcCreateSecurityContext";
+ table[27] = "ntdll.dll!ZwAlpcDeletePortSection";
+ table[28] = "ntdll.dll!NtAlpcDeleteResourceReserve";
+ table[29] = "ntdll.dll!NtAlpcDeleteSectionView";
+ table[30] = "ntdll.dll!NtAlpcDeleteSecurityContext";
+ table[31] = "ntdll.dll!NtAlpcDisconnectPort";
+ table[32] = "ntdll.dll!ZwAlpcImpersonateClientOfPort";
+ table[33] = "ntdll.dll!ZwAlpcOpenSenderProcess";
+ table[34] = "ntdll.dll!ZwAlpcOpenSenderThread";
+ table[35] = "ntdll.dll!ZwAlpcQueryInformation";
+ table[36] = "ntdll.dll!ZwAlpcQueryInformationMessage";
+ table[37] = "ntdll.dll!NtAlpcRevokeSecurityContext";
+ table[38] = "ntdll.dll!NtAlpcSendWaitReceivePort";
+ table[39] = "ntdll.dll!NtAlpcSetInformation";
+ table[40] = "ntdll.dll!NtApphelpCacheControl";
+ table[41] = "ntdll.dll!ZwAreMappedFilesTheSame";
+ table[42] = "ntdll.dll!ZwAssignProcessToJobObject";
+ table[43] = "ntdll.dll!ZwCallbackReturn";
+ table[44] = "ntdll.dll!NtCancelDeviceWakeupRequest";
+ table[45] = "ntdll.dll!ZwCancelIoFile";
+ table[46] = "ntdll.dll!ZwCancelTimer";
+ table[47] = "ntdll.dll!NtClearEvent";
+ table[48] = "ntdll.dll!NtClose";
+ table[49] = "ntdll.dll!ZwCloseObjectAuditAlarm";
+ table[50] = "ntdll.dll!NtCompactKeys";
+ table[51] = "ntdll.dll!ZwCompareTokens";
+ table[52] = "ntdll.dll!NtCompleteConnectPort";
+ table[53] = "ntdll.dll!ZwCompressKey";
+ table[54] = "ntdll.dll!NtConnectPort";
+ table[55] = "ntdll.dll!ZwContinue";
+ table[56] = "ntdll.dll!ZwCreateDebugObject";
+ table[57] = "ntdll.dll!ZwCreateDirectoryObject";
+ table[58] = "ntdll.dll!NtCreateEvent";
+ table[59] = "ntdll.dll!NtCreateEventPair";
+ table[60] = "ntdll.dll!NtCreateFile";
+ table[61] = "ntdll.dll!NtCreateIoCompletion";
+ table[62] = "ntdll.dll!ZwCreateJobObject";
+ table[63] = "ntdll.dll!NtCreateJobSet";
+ table[64] = "ntdll.dll!ZwCreateKey";
+ table[65] = "ntdll.dll!NtCreateKeyTransacted";
+ table[66] = "ntdll.dll!ZwCreateMailslotFile";
+ table[67] = "ntdll.dll!ZwCreateMutant";
+ table[68] = "ntdll.dll!ZwCreateNamedPipeFile";
+ table[69] = "ntdll.dll!NtCreatePrivateNamespace";
+ table[70] = "ntdll.dll!NtCreatePagingFile";
+ table[71] = "ntdll.dll!ZwCreatePort";
+ table[72] = "ntdll.dll!ZwCreateProcess";
+ table[73] = "ntdll.dll!ZwCreateProcessEx";
+ table[74] = "ntdll.dll!ZwCreateProfile";
+ table[75] = "ntdll.dll!NtCreateSection";
+ table[76] = "ntdll.dll!NtCreateSemaphore";
+ table[77] = "ntdll.dll!ZwCreateSymbolicLinkObject";
+ table[78] = "ntdll.dll!NtCreateThread";
+ table[79] = "ntdll.dll!ZwCreateTimer";
+ table[80] = "ntdll.dll!NtCreateToken";
+ table[81] = "ntdll.dll!NtCreateTransaction";
+ table[82] = "ntdll.dll!ZwOpenTransaction";
+ table[83] = "ntdll.dll!ZwQueryInformationTransaction";
+ table[84] = "ntdll.dll!NtQueryInformationTransactionManager";
+ table[85] = "ntdll.dll!NtPrePrepareEnlistment";
+ table[86] = "ntdll.dll!ZwPrepareEnlistment";
+ table[87] = "ntdll.dll!NtCommitEnlistment";
+ table[88] = "ntdll.dll!ZwReadOnlyEnlistment";
+ table[89] = "ntdll.dll!ZwRollbackComplete";
+ table[90] = "ntdll.dll!NtRollbackEnlistment";
+ table[91] = "ntdll.dll!NtCommitTransaction";
+ table[92] = "ntdll.dll!NtRollbackTransaction";
+ table[93] = "ntdll.dll!ZwPrePrepareComplete";
+ table[94] = "ntdll.dll!NtPrepareComplete";
+ table[95] = "ntdll.dll!ZwCommitComplete";
+ table[96] = "ntdll.dll!ZwSinglePhaseReject";
+ table[97] = "ntdll.dll!ZwSetInformationTransaction";
+ table[98] = "ntdll.dll!ZwSetInformationTransactionManager";
+ table[99] = "ntdll.dll!ZwSetInformationResourceManager";
+ table[100] = "ntdll.dll!ZwCreateTransactionManager";
+ table[101] = "ntdll.dll!ZwOpenTransactionManager";
+ table[102] = "ntdll.dll!NtRenameTransactionManager";
+ table[103] = "ntdll.dll!NtRollforwardTransactionManager";
+ table[104] = "ntdll.dll!NtRecoverEnlistment";
+ table[105] = "ntdll.dll!NtRecoverResourceManager";
+ table[106] = "ntdll.dll!ZwRecoverTransactionManager";
+ table[107] = "ntdll.dll!ZwCreateResourceManager";
+ table[108] = "ntdll.dll!ZwOpenResourceManager";
+ table[109] = "ntdll.dll!ZwGetNotificationResourceManager";
+ table[110] = "ntdll.dll!ZwQueryInformationResourceManager";
+ table[111] = "ntdll.dll!ZwCreateEnlistment";
+ table[112] = "ntdll.dll!ZwOpenEnlistment";
+ table[113] = "ntdll.dll!NtSetInformationEnlistment";
+ table[114] = "ntdll.dll!ZwQueryInformationEnlistment";
+ table[115] = "ntdll.dll!ZwCreateWaitablePort";
+ table[116] = "ntdll.dll!NtDebugActiveProcess";
+ table[117] = "ntdll.dll!ZwDebugContinue";
+ table[118] = "ntdll.dll!ZwDelayExecution";
+ table[119] = "ntdll.dll!ZwDeleteAtom";
+ table[120] = "ntdll.dll!NtDeleteBootEntry";
+ table[121] = "ntdll.dll!ZwDeleteDriverEntry";
+ table[122] = "ntdll.dll!NtDeleteFile";
+ table[123] = "ntdll.dll!ZwDeleteKey";
+ table[124] = "ntdll.dll!NtDeletePrivateNamespace";
+ table[125] = "ntdll.dll!NtDeleteObjectAuditAlarm";
+ table[126] = "ntdll.dll!NtDeleteValueKey";
+ table[127] = "ntdll.dll!ZwDeviceIoControlFile";
+ table[128] = "ntdll.dll!NtDisplayString";
+ table[129] = "ntdll.dll!ZwDuplicateObject";
+ table[130] = "ntdll.dll!NtDuplicateToken";
+ table[131] = "ntdll.dll!ZwEnumerateBootEntries";
+ table[132] = "ntdll.dll!NtEnumerateDriverEntries";
+ table[133] = "ntdll.dll!ZwEnumerateKey";
+ table[134] = "ntdll.dll!ZwEnumerateSystemEnvironmentValuesEx";
+ table[135] = "ntdll.dll!ZwEnumerateTransactionObject";
+ table[136] = "ntdll.dll!NtEnumerateValueKey";
+ table[137] = "ntdll.dll!ZwExtendSection";
+ table[138] = "ntdll.dll!NtFilterToken";
+ table[139] = "ntdll.dll!NtFindAtom";
+ table[140] = "ntdll.dll!ZwFlushBuffersFile";
+ table[141] = "ntdll.dll!ZwFlushInstructionCache";
+ table[142] = "ntdll.dll!NtFlushKey";
+ table[143] = "ntdll.dll!ZwFlushProcessWriteBuffers";
+ table[144] = "ntdll.dll!ZwFlushVirtualMemory";
+ table[145] = "ntdll.dll!NtFlushWriteBuffer";
+ table[146] = "ntdll.dll!NtFreeUserPhysicalPages";
+ table[147] = "ntdll.dll!NtFreeVirtualMemory";
+ table[148] = "ntdll.dll!NtFreezeRegistry";
+ table[149] = "ntdll.dll!ZwFreezeTransactions";
+ table[150] = "ntdll.dll!NtFsControlFile";
+ table[151] = "ntdll.dll!NtGetContextThread";
+ table[152] = "ntdll.dll!NtGetDevicePowerState";
+ table[153] = "ntdll.dll!NtGetNlsSectionPtr";
+ table[154] = "ntdll.dll!ZwGetPlugPlayEvent";
+ table[155] = "ntdll.dll!NtGetWriteWatch";
+ table[156] = "ntdll.dll!NtImpersonateAnonymousToken";
+ table[157] = "ntdll.dll!ZwImpersonateClientOfPort";
+ table[158] = "ntdll.dll!ZwImpersonateThread";
+ table[159] = "ntdll.dll!ZwInitializeNlsFiles";
+ table[160] = "ntdll.dll!ZwInitializeRegistry";
+ table[161] = "ntdll.dll!NtInitiatePowerAction";
+ table[162] = "ntdll.dll!ZwIsProcessInJob";
+ table[163] = "ntdll.dll!NtIsSystemResumeAutomatic";
+ table[164] = "ntdll.dll!ZwListenPort";
+ table[165] = "ntdll.dll!NtLoadDriver";
+ table[166] = "ntdll.dll!NtLoadKey";
+ table[167] = "ntdll.dll!NtLoadKey2";
+ table[168] = "ntdll.dll!NtLoadKeyEx";
+ table[169] = "ntdll.dll!NtLockFile";
+ table[170] = "ntdll.dll!ZwLockProductActivationKeys";
+ table[171] = "ntdll.dll!NtLockRegistryKey";
+ table[172] = "ntdll.dll!ZwLockVirtualMemory";
+ table[173] = "ntdll.dll!ZwMakePermanentObject";
+ table[174] = "ntdll.dll!NtMakeTemporaryObject";
+ table[175] = "ntdll.dll!NtMapUserPhysicalPages";
+ table[176] = "ntdll.dll!ZwMapUserPhysicalPagesScatter";
+ table[177] = "ntdll.dll!ZwMapViewOfSection";
+ table[178] = "ntdll.dll!NtModifyBootEntry";
+ table[179] = "ntdll.dll!ZwModifyDriverEntry";
+ table[180] = "ntdll.dll!NtNotifyChangeDirectoryFile";
+ table[181] = "ntdll.dll!NtNotifyChangeKey";
+ table[182] = "ntdll.dll!NtNotifyChangeMultipleKeys";
+ table[183] = "ntdll.dll!ZwOpenDirectoryObject";
+ table[184] = "ntdll.dll!NtOpenEvent";
+ table[185] = "ntdll.dll!NtOpenEventPair";
+ table[186] = "ntdll.dll!NtOpenFile";
+ table[187] = "ntdll.dll!ZwOpenIoCompletion";
+ table[188] = "ntdll.dll!ZwOpenJobObject";
+ table[189] = "ntdll.dll!ZwOpenKey";
+ table[190] = "ntdll.dll!NtOpenKeyTransacted";
+ table[191] = "ntdll.dll!NtOpenMutant";
+ table[192] = "ntdll.dll!NtOpenPrivateNamespace";
+ table[193] = "ntdll.dll!ZwOpenObjectAuditAlarm";
+ table[194] = "ntdll.dll!ZwOpenProcess";
+ table[195] = "ntdll.dll!ZwOpenProcessToken";
+ table[196] = "ntdll.dll!ZwOpenProcessTokenEx";
+ table[197] = "ntdll.dll!NtOpenSection";
+ table[198] = "ntdll.dll!NtOpenSemaphore";
+ table[199] = "ntdll.dll!NtOpenSession";
+ table[200] = "ntdll.dll!NtOpenSymbolicLinkObject";
+ table[201] = "ntdll.dll!ZwOpenThread";
+ table[202] = "ntdll.dll!NtOpenThreadToken";
+ table[203] = "ntdll.dll!NtOpenThreadTokenEx";
+ table[204] = "ntdll.dll!ZwOpenTimer";
+ table[205] = "ntdll.dll!NtPlugPlayControl";
+ table[206] = "ntdll.dll!ZwPowerInformation";
+ table[207] = "ntdll.dll!ZwPrivilegeCheck";
+ table[208] = "ntdll.dll!ZwPrivilegeObjectAuditAlarm";
+ table[209] = "ntdll.dll!NtPrivilegedServiceAuditAlarm";
+ table[210] = "ntdll.dll!ZwProtectVirtualMemory";
+ table[211] = "ntdll.dll!ZwPulseEvent";
+ table[212] = "ntdll.dll!ZwQueryAttributesFile";
+ table[213] = "ntdll.dll!ZwQueryBootEntryOrder";
+ table[214] = "ntdll.dll!ZwQueryBootOptions";
+ table[215] = "ntdll.dll!NtQueryDebugFilterState";
+ table[216] = "ntdll.dll!NtQueryDefaultLocale";
+ table[217] = "ntdll.dll!ZwQueryDefaultUILanguage";
+ table[218] = "ntdll.dll!ZwQueryDirectoryFile";
+ table[219] = "ntdll.dll!ZwQueryDirectoryObject";
+ table[220] = "ntdll.dll!NtQueryDriverEntryOrder";
+ table[221] = "ntdll.dll!ZwQueryEaFile";
+ table[222] = "ntdll.dll!NtQueryEvent";
+ table[223] = "ntdll.dll!ZwQueryFullAttributesFile";
+ table[224] = "ntdll.dll!NtQueryInformationAtom";
+ table[225] = "ntdll.dll!ZwQueryInformationFile";
+ table[226] = "ntdll.dll!ZwQueryInformationJobObject";
+ table[227] = "ntdll.dll!ZwQueryInformationPort";
+ table[228] = "ntdll.dll!ZwQueryInformationProcess";
+ table[229] = "ntdll.dll!NtQueryInformationThread";
+ table[230] = "ntdll.dll!ZwQueryInformationToken";
+ table[231] = "ntdll.dll!NtQueryInstallUILanguage";
+ table[232] = "ntdll.dll!NtQueryIntervalProfile";
+ table[233] = "ntdll.dll!NtQueryIoCompletion";
+ table[234] = "ntdll.dll!ZwQueryKey";
+ table[235] = "ntdll.dll!NtQueryMultipleValueKey";
+ table[236] = "ntdll.dll!NtQueryMutant";
+ table[237] = "ntdll.dll!NtQueryObject";
+ table[238] = "ntdll.dll!NtQueryOpenSubKeys";
+ table[239] = "ntdll.dll!NtQueryOpenSubKeysEx";
+ table[240] = "ntdll.dll!NtQueryPerformanceCounter";
+ table[241] = "ntdll.dll!ZwQueryQuotaInformationFile";
+ table[242] = "ntdll.dll!ZwQuerySection";
+ table[243] = "ntdll.dll!NtQuerySecurityObject";
+ table[244] = "ntdll.dll!ZwQuerySemaphore";
+ table[245] = "ntdll.dll!ZwQuerySymbolicLinkObject";
+ table[246] = "ntdll.dll!ZwQuerySystemEnvironmentValue";
+ table[247] = "ntdll.dll!ZwQuerySystemEnvironmentValueEx";
+ table[248] = "ntdll.dll!NtQuerySystemInformation";
+ table[249] = "ntdll.dll!NtQuerySystemTime";
+ table[250] = "ntdll.dll!ZwQueryTimer";
+ table[251] = "ntdll.dll!NtQueryTimerResolution";
+ table[252] = "ntdll.dll!ZwQueryValueKey";
+ table[253] = "ntdll.dll!NtQueryVirtualMemory";
+ table[254] = "ntdll.dll!NtQueryVolumeInformationFile";
+ table[255] = "ntdll.dll!NtQueueApcThread";
+ table[256] = "ntdll.dll!ZwRaiseException";
+ table[257] = "ntdll.dll!ZwRaiseHardError";
+ table[258] = "ntdll.dll!NtReadFile";
+ table[259] = "ntdll.dll!NtReadFileScatter";
+ table[260] = "ntdll.dll!ZwReadRequestData";
+ table[261] = "ntdll.dll!NtReadVirtualMemory";
+ table[262] = "ntdll.dll!ZwRegisterThreadTerminatePort";
+ table[263] = "ntdll.dll!ZwReleaseMutant";
+ table[264] = "ntdll.dll!NtReleaseSemaphore";
+ table[265] = "ntdll.dll!ZwRemoveIoCompletion";
+ table[266] = "ntdll.dll!ZwRemoveProcessDebug";
+ table[267] = "ntdll.dll!ZwRenameKey";
+ table[268] = "ntdll.dll!ZwReplaceKey";
+ table[269] = "ntdll.dll!NtReplacePartitionUnit";
+ table[270] = "ntdll.dll!ZwReplyPort";
+ table[271] = "ntdll.dll!NtReplyWaitReceivePort";
+ table[272] = "ntdll.dll!NtReplyWaitReceivePortEx";
+ table[273] = "ntdll.dll!NtReplyWaitReplyPort";
+ table[274] = "ntdll.dll!ZwRequestDeviceWakeup";
+ table[275] = "ntdll.dll!ZwRequestPort";
+ table[276] = "ntdll.dll!NtRequestWaitReplyPort";
+ table[277] = "ntdll.dll!ZwRequestWakeupLatency";
+ table[278] = "ntdll.dll!NtResetEvent";
+ table[279] = "ntdll.dll!ZwResetWriteWatch";
+ table[280] = "ntdll.dll!NtRestoreKey";
+ table[281] = "ntdll.dll!ZwResumeProcess";
+ table[282] = "ntdll.dll!ZwResumeThread";
+ table[283] = "ntdll.dll!NtSaveKey";
+ table[284] = "ntdll.dll!NtSaveKeyEx";
+ table[285] = "ntdll.dll!NtSaveMergedKeys";
+ table[286] = "ntdll.dll!NtSecureConnectPort";
+ table[287] = "ntdll.dll!ZwSetBootEntryOrder";
+ table[288] = "ntdll.dll!ZwSetBootOptions";
+ table[289] = "ntdll.dll!ZwSetContextThread";
+ table[290] = "ntdll.dll!NtSetDebugFilterState";
+ table[291] = "ntdll.dll!NtSetDefaultHardErrorPort";
+ table[292] = "ntdll.dll!NtSetDefaultLocale";
+ table[293] = "ntdll.dll!ZwSetDefaultUILanguage";
+ table[294] = "ntdll.dll!NtSetDriverEntryOrder";
+ table[295] = "ntdll.dll!ZwSetEaFile";
+ table[296] = "ntdll.dll!NtSetEvent";
+ table[297] = "ntdll.dll!NtSetEventBoostPriority";
+ table[298] = "ntdll.dll!NtSetHighEventPair";
+ table[299] = "ntdll.dll!NtSetHighWaitLowEventPair";
+ table[300] = "ntdll.dll!ZwSetInformationDebugObject";
+ table[301] = "ntdll.dll!ZwSetInformationFile";
+ table[302] = "ntdll.dll!ZwSetInformationJobObject";
+ table[303] = "ntdll.dll!ZwSetInformationKey";
+ table[304] = "ntdll.dll!ZwSetInformationObject";
+ table[305] = "ntdll.dll!ZwSetInformationProcess";
+ table[306] = "ntdll.dll!ZwSetInformationThread";
+ table[307] = "ntdll.dll!ZwSetInformationToken";
+ table[308] = "ntdll.dll!NtSetIntervalProfile";
+ table[309] = "ntdll.dll!NtSetIoCompletion";
+ table[310] = "ntdll.dll!ZwSetLdtEntries";
+ table[311] = "ntdll.dll!ZwSetLowEventPair";
+ table[312] = "ntdll.dll!ZwSetLowWaitHighEventPair";
+ table[313] = "ntdll.dll!ZwSetQuotaInformationFile";
+ table[314] = "ntdll.dll!NtSetSecurityObject";
+ table[315] = "ntdll.dll!ZwSetSystemEnvironmentValue";
+ table[316] = "ntdll.dll!ZwSetSystemEnvironmentValueEx";
+ table[317] = "ntdll.dll!ZwSetSystemInformation";
+ table[318] = "ntdll.dll!ZwSetSystemPowerState";
+ table[319] = "ntdll.dll!ZwSetSystemTime";
+ table[320] = "ntdll.dll!ZwSetThreadExecutionState";
+ table[321] = "ntdll.dll!ZwSetTimer";
+ table[322] = "ntdll.dll!NtSetTimerResolution";
+ table[323] = "ntdll.dll!ZwSetUuidSeed";
+ table[324] = "ntdll.dll!ZwSetValueKey";
+ table[325] = "ntdll.dll!NtSetVolumeInformationFile";
+ table[326] = "ntdll.dll!ZwShutdownSystem";
+ table[327] = "ntdll.dll!ZwSignalAndWaitForSingleObject";
+ table[328] = "ntdll.dll!NtStartProfile";
+ table[329] = "ntdll.dll!ZwStopProfile";
+ table[330] = "ntdll.dll!ZwSuspendProcess";
+ table[331] = "ntdll.dll!ZwSuspendThread";
+ table[332] = "ntdll.dll!NtSystemDebugControl";
+ table[333] = "ntdll.dll!ZwTerminateJobObject";
+ table[334] = "ntdll.dll!ZwTerminateProcess";
+ table[335] = "ntdll.dll!ZwTerminateThread";
+ table[336] = "ntdll.dll!NtTestAlert";
+ table[337] = "ntdll.dll!ZwThawRegistry";
+ table[338] = "ntdll.dll!NtThawTransactions";
+ table[339] = "ntdll.dll!NtTraceEvent";
+ table[340] = "ntdll.dll!ZwTraceControl";
+ table[341] = "ntdll.dll!NtTranslateFilePath";
+ table[342] = "ntdll.dll!ZwUnloadDriver";
+ table[343] = "ntdll.dll!NtUnloadKey";
+ table[344] = "ntdll.dll!ZwUnloadKey2";
+ table[345] = "ntdll.dll!ZwUnloadKeyEx";
+ table[346] = "ntdll.dll!ZwUnlockFile";
+ table[347] = "ntdll.dll!NtUnlockVirtualMemory";
+ table[348] = "ntdll.dll!NtUnmapViewOfSection";
+ table[349] = "ntdll.dll!NtVdmControl";
+ table[350] = "ntdll.dll!NtWaitForDebugEvent";
+ table[351] = "ntdll.dll!NtWaitForMultipleObjects";
+ table[352] = "ntdll.dll!ZwWaitForSingleObject";
+ table[353] = "ntdll.dll!ZwWaitHighEventPair";
+ table[354] = "ntdll.dll!NtWaitLowEventPair";
+ table[355] = "ntdll.dll!NtWriteFile";
+ table[356] = "ntdll.dll!NtWriteFileGather";
+ table[357] = "ntdll.dll!NtWriteRequestData";
+ table[358] = "ntdll.dll!NtWriteVirtualMemory";
+ table[359] = "ntdll.dll!ZwYieldExecution";
+ table[360] = "ntdll.dll!ZwCreateKeyedEvent";
+ table[361] = "ntdll.dll!NtOpenKeyedEvent";
+ table[362] = "ntdll.dll!NtReleaseKeyedEvent";
+ table[363] = "ntdll.dll!NtWaitForKeyedEvent";
+ table[364] = "ntdll.dll!ZwQueryPortInformationProcess";
+ table[365] = "ntdll.dll!NtGetCurrentProcessorNumber";
+ table[366] = "ntdll.dll!NtWaitForMultipleObjects32";
+ table[367] = "ntdll.dll!ZwGetNextProcess";
+ table[368] = "ntdll.dll!ZwGetNextThread";
+ table[369] = "ntdll.dll!NtCancelIoFileEx";
+ table[370] = "ntdll.dll!NtCancelSynchronousIoFile";
+ table[371] = "ntdll.dll!ZwRemoveIoCompletionEx";
+ table[372] = "ntdll.dll!ZwRegisterProtocolAddressInformation";
+ table[373] = "ntdll.dll!NtPropagationComplete";
+ table[374] = "ntdll.dll!ZwPropagationFailed";
+ table[375] = "ntdll.dll!NtCreateWorkerFactory";
+ table[376] = "ntdll.dll!ZwReleaseWorkerFactoryWorker";
+ table[377] = "ntdll.dll!NtWaitForWorkViaWorkerFactory";
+ table[378] = "ntdll.dll!ZwSetInformationWorkerFactory";
+ table[379] = "ntdll.dll!ZwQueryInformationWorkerFactory";
+ table[380] = "ntdll.dll!NtWorkerFactoryWorkerReady";
+ table[381] = "ntdll.dll!NtShutdownWorkerFactory";
+ table[382] = "ntdll.dll!NtCreateThreadEx";
+ table[383] = "ntdll.dll!NtCreateUserProcess";
+ table[384] = "ntdll.dll!NtQueryLicenseValue";
+ table[385] = "ntdll.dll!ZwMapCMFModule";
+ table[386] = "ntdll.dll!ZwIsUILanguageComitted";
+ table[387] = "ntdll.dll!ZwFlushInstallUILanguage";
+ table[388] = "ntdll.dll!NtGetMUIRegistryInfo";
+ table[389] = "ntdll.dll!ZwAcquireCMFViewOwnership";
+ table[390] = "ntdll.dll!NtReleaseCMFViewOwnership";
+ table[4545] = "imm32.dll!NtUserGetThreadState";
+ table[4520] = "imm32.dll!NtUserGetImeInfoEx";
+ table[4600] = "imm32.dll!NtUserQueryWindow";
+ table[4598] = "imm32.dll!NtUserQueryInputContext";
+ table[4581] = "imm32.dll!NtUserNotifyIMEStatus";
+ table[4412] = "imm32.dll!NtUserAssociateInputContext";
+ table[4429] = "imm32.dll!NtUserCallOneParam";
+ table[4463] = "imm32.dll!NtUserDisableThreadIme";
+ table[4635] = "imm32.dll!NtUserSetImeInfoEx";
+ table[4697] = "imm32.dll!NtUserUpdateInputContext";
+ table[4417] = "imm32.dll!NtUserBuildHimcList";
+ table[4704] = "imm32.dll!NtUserValidateHandleSecure";
+ table[4450] = "imm32.dll!NtUserCreateInputContext";
+ table[4460] = "imm32.dll!NtUserDestroyInputContext";
+ table[4492] = "imm32.dll!NtUserGetAppImeLevel";
+ table[4519] = "imm32.dll!NtUserGetImeHotKey";
+ table[4623] = "imm32.dll!NtUserSetAppImeLevel";
+ table[4657] = "imm32.dll!NtUserSetThreadLayoutHandles";
+ table[4560] = "user32.dll!NtUserInitializeClientPfnArrays";
+ table[4596] = "user32.dll!NtUserProcessConnect";
+ table[4453] = "user32.dll!NtUserCreateWindowStation";
+ table[4449] = "user32.dll!NtUserCreateDesktopEx";
+ table[4695] = "user32.dll!NtUserUnregisterUserApiHook";
+ table[4607] = "user32.dll!NtUserRegisterUserApiHook";
+ table[4634] = "user32.dll!NtUserSetImeHotKey";
+ table[4670] = "user32.dll!NtUserSetWindowStationUser";
+ table[4568] = "user32.dll!NtUserLoadKeyboardLayoutEx";
+ table[4702] = "user32.dll!NtUserUpdatePerUserSystemParameters";
+ table[4465] = "user32.dll!NtUserDoSoundConnect";
+ table[4720] = "user32.dll!NtUserRegisterSessionPort";
+ table[4423] = "user32.dll!NtUserCallHwndOpt";
+ table[4651] = "user32.dll!NtUserSetShellWindowEx";
+ table[4421] = "user32.dll!NtUserCallHwnd";
+ table[4723] = "user32.dll!NtUserDwmStartRedirection";
+ table[4609] = "user32.dll!NtUserRegisterRawInputDevices";
+ table[4495] = "user32.dll!NtUserGetCaretBlinkTime";
+ table[4681] = "user32.dll!NtUserThunkedMenuInfo";
+ table[4570] = "user32.dll!NtUserLockWindowUpdate";
+ table[4445] = "user32.dll!NtUserCopyAcceleratorTable";
+ table[4478] = "user32.dll!NtUserEndMenu";
+ table[4458] = "user32.dll!DestroyAcceleratorTable";
+ table[4625] = "user32.dll!NtUserSetClassLong";
+ table[4613] = "user32.dll!NtUserRemoveMenu";
+ table[4643] = "user32.dll!NtUserSetMenuDefaultItem";
+ table[4461] = "user32.dll!NtUserDestroyMenu";
+ table[4553] = "user32.dll!NtUserGhostWindowFromHungWindow";
+ table[4590] = "user32.dll!NtUserPaintMonitor";
+ table[4589] = "user32.dll!NtUserPaintDesktop";
+ table[4454] = "user32.dll!NtUserDdeInitialize";
+ table[4663] = "user32.dll!NtUserSetWindowPlacement";
+ table[4669] = "user32.dll!NtUserSetWindowsHookEx";
+ table[4457] = "user32.dll!NtUserDeleteMenu";
+ table[4565] = "user32.dll!NtUserInvalidateRgn";
+ table[4638] = "user32.dll!NtUserSetInformationThread";
+ table[4493] = "user32.dll!NtUserGetAsyncKeyState";
+ table[4434] = "user32.dll!NtUserCheckDesktopByThreadId";
+ table[4727] = "user32.dll!NtUserGetWindowMinimizeRect";
+ table[4672] = "user32.dll!NtUserSetWinEventHook";
+ table[4508] = "user32.dll!NtUserGetCPD";
+ table[4665] = "user32.dll!NtUserSetWindowRgn";
+ table[4569] = "user32.dll!NtUserLockWindowStation";
+ table[4485] = "user32.dll!NtUserFillWindow";
+ table[4677] = "user32.dll!NtUserSoundSentry";
+ table[4696] = "user32.dll!NtUserUnregisterHotKey";
+ table[4524] = "user32.dll!NtUserGetKeyboardState";
+ table[4691] = "user32.dll!NtUserUnhookWinEvent";
+ table[4621] = "user32.dll!NtUserSendInput";
+ table[4658] = "user32.dll!NtUserSetThreadState";
+ table[4624] = "user32.dll!NtUserSetCapture";
+ table[4633] = "user32.dll!NtUserSetFocus";
+ table[4636] = "user32.dll!NtUserSetImeOwnerWindow";
+ table[4425] = "user32.dll!NtUserCallHwndParamLock";
+ table[4686] = "user32.dll!NtUserCalcMenuBar";
+ table[4546] = "user32.dll!NtUserGetTitleBarInfo";
+ table[4512] = "user32.dll!NtUserGetDCEx";
+ table[4528] = "user32.dll!NtUserGetMenuBarInfo";
+ table[4678] = "user32.dll!NtUserSwitchDesktop";
+ table[4522] = "user32.dll!NtUserGetKeyboardLayoutList";
+ table[4675] = "user32.dll!NtUserShowWindow";
+ table[4660] = "user32.dll!NtUserSetProcessDPIAware";
+ table[4646] = "user32.dll!NtUserSetParent";
+ table[4693] = "user32.dll!NtUserUnlockWindowStation";
+ table[4684] = "user32.dll!NtUserTrackMouseEvent";
+ table[4608] = "user32.dll!NtUserRegisterHotKey";
+ table[4701] = "user32.dll!NtUserSetLayeredWindowAttributes";
+ table[4580] = "user32.dll!NtUserMoveWindow";
+ table[4699] = "user32.dll!NtUserUpdateLayeredWindow";
+ table[4542] = "user32.dll!NtUserGetScrollBarInfo";
+ table[4447] = "user32.dll!NtUserCreateAcceleratorTable";
+ table[4630] = "user32.dll!NtUserSetCursor";
+ table[4586] = "user32.dll!NtUserOpenInputDesktop";
+ table[4514] = "user32.dll!NtUserGetForegroundWindow";
+ table[4548] = "user32.dll!GetUpdateRect";
+ table[4656] = "user32.dll!NtUserSetThreadDesktop";
+ table[4536] = "user32.dll!NtUserGetProcessWindowStation";
+ table[4647] = "user32.dll!NtUserSetProcessWindowStation";
+ table[4605] = "user32.dll!NtUserRegisterClassExWOW";
+ table[4494] = "user32.dll!NtUserGetAtomName";
+ table[4697] = "user32.dll!NtUserUpdateInputContext";
+ table[4661] = "user32.dll!NtUserSetWindowFNID";
+ table[4498] = "user32.dll!NtUserGetClassName";
+ table[4516] = "user32.dll!NtUserGetGUIThreadInfo";
+ table[4585] = "user32.dll!NtUserOpenDesktop";
+ table[4419] = "user32.dll!NtUserBuildNameList";
+ table[4617] = "user32.dll!NtUserSBGetParms";
+ table[4674] = "user32.dll!NtUserShowScrollBar";
+ table[4588] = "user32.dll!NtUserOpenWindowStation";
+ table[4694] = "user32.dll!NtUserUnregisterClass";
+ table[4481] = "user32.dll!NtUserEnumDisplayMonitors";
+ table[4462] = "user32.dll!NtUserDestroyWindow";
+ table[4441] = "user32.dll!NtUserCloseDesktop";
+ table[4459] = "user32.dll!NtUserDestroyCursor";
+ table[4442] = "user32.dll!NtUserCloseWindowStation";
+ table[4418] = "user32.dll!NtUserBuildHwndList";
+ table[4594] = "user32.dll!NtUserPostThreadMessage";
+ table[4614] = "user32.dll!NtUserRemoveProp";
+ table[4662] = "user32.dll!NtUserSetWindowLong";
+ table[4664] = "user32.dll!NtUserSetWindowPos";
+ table[4422] = "user32.dll!NtUserCallHwndLock";
+ table[4649] = "user32.dll!NtUserSetProp";
+ table[4477] = "user32.dll!NtUserEndDeferWindowPosEx";
+ table[4455] = "user32.dll!NtUserDeferWindowPos";
+ table[4632] = "user32.dll!NtUserSetCursorIconData";
+ table[4486] = "user32.dll!NtUserFindExistingCursorIcon";
+ table[4533] = "user32.dll!NtUserGetObjectInformation";
+ table[4452] = "user32.dll!NtUserCreateWindowEx";
+ table[4682] = "user32.dll!NtUserThunkedMenuItemInfo";
+ table[4543] = "user32.dll!NtUserGetSystemMenu";
+ table[4676] = "user32.dll!NtUserShowWindowAsync";
+ table[4600] = "user32.dll!NtUserQueryWindow";
+ table[4517] = "user32.dll!NtUserGetIconInfo";
+ table[4518] = "user32.dll!NtUserGetIconSize";
+ table[4583] = "user32.dll!NtUserNotifyWinEvent";
+ table[4562] = "user32.dll!NtUserInternalGetWindowText";
+ table[4666] = "user32.dll!NtUserGetWindowRgnEx";
+ table[4472] = "user32.dll!NtUserDrawIconEx";
+ table[4424] = "user32.dll!NtUserCallHwndParam";
+ table[4409] = "user32.dll!NtUserActivateKeyboardLayout";
+ table[4629] = "user32.dll!NtUserSetConsoleReserveKeys";
+ table[4557] = "user32.dll!NtUserHungWindowFromGhostWindow";
+ table[4622] = "user32.dll!NtUserSetActiveWindow";
+ table[4544] = "user32.dll!NtUserGetThreadDesktop";
+ table[4497] = "user32.dll!NtUserGetClassInfoEx";
+ table[4480] = "user32.dll!NtUserEnumDisplayDevices";
+ table[4482] = "user32.dll!NtUserEnumDisplaySettings";
+ table[4650] = "user32.dll!NtUserSetScrollInfo";
+ table[4526] = "user32.dll!NtUserGetKeyState";
+ table[4575] = "user32.dll!NtUserMessageCall";
+ table[4706] = "user32.dll!NtUserValidateTimerCallback";
+ table[4611] = "user32.dll!NtUserRegisterWindowMessage";
+ table[4511] = "user32.dll!NtUserGetDC";
+ table[4602] = "user32.dll!NtUserRealInternalGetMessage";
+ table[4487] = "user32.dll!NtUserFindWindowEx";
+ table[4500] = "user32.dll!NtUserGetClipboardFormatName";
+ table[4545] = "user32.dll!NtUserGetThreadState";
+ table[4679] = "user32.dll!NtUserSystemParametersInfo";
+ table[4593] = "user32.dll!NtUserPostMessage";
+ table[4659] = "user32.dll!NtUserSetTimer";
+ table[4567] = "user32.dll!NtUserKillTimer";
+ table[4414] = "user32.dll!NtUserBeginPaint";
+ table[4479] = "user32.dll!NtUserEndPaint";
+ table[4604] = "user32.dll!NtUserRedrawWindow";
+ table[4491] = "user32.dll!NtUserGetAncestor";
+ table[4555] = "user32.dll!NtUserHideCaret";
+ table[4673] = "user32.dll!NtUserShowCaret";
+ table[4448] = "user32.dll!NtUserCreateCaret";
+ table[4531] = "user32.dll!NtUserGetMessage";
+ table[4591] = "user32.dll!PeekMessageW";
+ table[4429] = "user32.dll!NtUserCallOneParam";
+ table[4428] = "user32.dll!NtUserCallNoParam";
+ table[4464] = "user32.dll!NtUserDispatchMessage";
+ table[4648] = "user32.dll!NtUserGetProp";
+ table[4550] = "user32.dll!NtUserGetWindowDC";
+ table[4513] = "user32.dll!NtUserGetDoubleClickTime";
+ table[4710] = "user32.dll!NtUserWaitMessage";
+ table[4564] = "user32.dll!NtUserInvalidateRect";
+ table[4430] = "user32.dll!NtUserCallTwoParam";
+ table[4618] = "user32.dll!NtUserScrollDC";
+ table[4704] = "user32.dll!NtUserValidateHandleSecure";
+ table[4439] = "user32.dll!NtUserClipCursor";
+ table[4571] = "user32.dll!NtUserLockWorkStation";
+ table[4724] = "user32.dll!NtUserDwmStopRedirection";
+ table[4534] = "user32.dll!NtUserGetOpenClipboardWindow";
+ table[4484] = "user32.dll!NtUserExcludeUpdateRgn";
+ table[4707] = "user32.dll!NtUserVkKeyScanEx";
+ table[4654] = "user32.dll!NtUserSetSystemMenu";
+ table[4438] = "user32.dll!NtUserChildWindowFromPointEx";
+ table[4671] = "user32.dll!NtUserSetWindowWord";
+ table[4476] = "user32.dll!NtUserEnableScrollBar";
+ table[4415] = "user32.dll!NtUserBitBltSysBmp";
+ table[4509] = "user32.dll!NtUserGetCursorFrameInfo";
+ table[4427] = "user32.dll!NtUserCallNextHookEx";
+ table[4628] = "user32.dll!NtUserSetClipboardViewer";
+ table[4437] = "user32.dll!CheckMenuItem";
+ table[4566] = "user32.dll!NtUserIsClipboardFormatAvailable";
+ table[4440] = "user32.dll!NtUserCloseClipboard";
+ table[4584] = "user32.dll!NtUserOpenClipboard";
+ table[4502] = "user32.dll!NtUserGetClipboardSequenceNumber";
+ table[4431] = "user32.dll!NtUserChangeClipboardChain";
+ table[4688] = "user32.dll!NtUserTranslateAccelerator";
+ table[4705] = "user32.dll!NtUserValidateRect";
+ table[4667] = "user32.dll!NtUserSetWindowRgnEx";
+ table[4475] = "user32.dll!NtUserEnableMenuItem";
+ table[4496] = "user32.dll!NtUserGetCaretPos";
+ table[4505] = "user32.dll!NtUserGetComboBoxInfo";
+ table[4426] = "user32.dll!NtUserCallMsgFilter";
+ table[4690] = "user32.dll!NtUserUnhookWindowsHookEx";
+ table[4416] = "user32.dll!NtUserBlockInput";
+ table[4595] = "user32.dll!NtUserPrintWindow";
+ table[4501] = "user32.dll!NtUserGetClipboardOwner";
+ table[4413] = "user32.dll!NtUserAttachThreadInput";
+ table[4446] = "user32.dll!NtUserCountClipboardFormats";
+ table[4572] = "user32.dll!NtUserLogicalToPhysicalPoint";
+ table[4592] = "user32.dll!NtUserPhysicalToLogicalPoint";
+ table[4712] = "user32.dll!NtUserWindowFromPhysicalPoint";
+ table[4685] = "user32.dll!NtUserTrackPopupMenuEx";
+ table[4641] = "user32.dll!NtUserSetMenu";
+ table[4432] = "user32.dll!NtUserChangeDisplaySettings";
+ table[4726] = "user32.dll!NtUserDwmGetDxRgn";
+ table[4725] = "user32.dll!NtUserDwmHintDxUpdate";
+ table[4573] = "user32.dll!NtUserMapVirtualKeyEx";
+ table[4640] = "user32.dll!NtUserSetKeyboardState";
+ table[4708] = "user32.dll!NtUserWaitForInputIdle";
+ table[4549] = "user32.dll!NtUserGetUpdateRgn";
+ table[4456] = "user32.dll!NtUserDefSetText";
+ table[4506] = "user32.dll!NtUserGetControlBrush";
+ table[4488] = "user32.dll!NtUserFlashWindowEx";
+ table[4689] = "user32.dll!NtUserTranslateMessage";
+ table[4619] = "user32.dll!NtUserScrollWindowEx";
+ table[4687] = "user32.dll!NtUserPaintMenuBar";
+ table[4551] = "user32.dll!NtUserGetWindowPlacement";
+ table[4713] = "user32.dll!NtUserWindowFromPoint";
+ table[4410] = "user32.dll!NtUserAddClipboardFormatListener";
+ table[4411] = "user32.dll!NtUserAlterWindowStyle";
+ table[4420] = "user32.dll!NtUserBuildPropList";
+ table[4433] = "user32.dll!NtUserCheckAccessForIntegrityLevel";
+ table[4435] = "user32.dll!NtUserCheckWindowThreadDesktop";
+ table[4444] = "user32.dll!NtUserConvertMemHandle";
+ table[4451] = "user32.dll!NtUserCreateLocalMemHandle";
+ table[4466] = "user32.dll!NtUserDoSoundDisconnect";
+ table[4467] = "user32.dll!NtUserDragDetect";
+ table[4468] = "user32.dll!NtUserDragObject";
+ table[4469] = "user32.dll!NtUserDrawAnimatedRects";
+ table[4470] = "user32.dll!NtUserDrawCaption";
+ table[4471] = "user32.dll!NtUserDrawCaptionTemp";
+ table[4473] = "user32.dll!NtUserDrawMenuBarTemp";
+ table[4474] = "user32.dll!NtUserEmptyClipboard";
+ table[4483] = "user32.dll!NtUserEvent";
+ table[4489] = "user32.dll!NtUserFrostCrashedWindow";
+ table[4490] = "user32.dll!NtUserGetAltTabInfo";
+ table[4499] = "user32.dll!NtUserGetClipboardData";
+ table[4503] = "user32.dll!NtUserGetClipboardViewer";
+ table[4504] = "user32.dll!NtUserGetClipCursor";
+ table[4507] = "user32.dll!NtUserGetControlColor";
+ table[4510] = "user32.dll!NtUserGetCursorInfo";
+ table[4515] = "user32.dll!NtUserGetGuiResources";
+ table[4519] = "user32.dll!NtUserGetImeHotKey";
+ table[4521] = "user32.dll!NtUserGetInternalWindowPos";
+ table[4523] = "user32.dll!NtUserGetKeyboardLayoutName";
+ table[4525] = "user32.dll!NtUserGetKeyNameText";
+ table[4527] = "user32.dll!NtUserGetListBoxInfo";
+ table[4529] = "user32.dll!NtUserGetMenuIndex";
+ table[4530] = "user32.dll!NtUserGetMenuItemRect";
+ table[4532] = "user32.dll!NtUserGetMouseMovePointsEx";
+ table[4535] = "user32.dll!NtUserGetPriorityClipboardFormat";
+ table[4537] = "user32.dll!NtUserGetRawInputBuffer";
+ table[4538] = "user32.dll!NtUserGetRawInputData";
+ table[4539] = "user32.dll!NtUserGetRawInputDeviceInfo";
+ table[4540] = "user32.dll!NtUserGetRawInputDeviceList";
+ table[4541] = "user32.dll!NtUserGetRegisteredRawInputDevices";
+ table[4547] = "user32.dll!NtUserGetUpdatedClipboardFormats";
+ table[4552] = "user32.dll!NtUserGetWOWClass";
+ table[4556] = "user32.dll!NtUserHiliteMenuItem";
+ table[4558] = "user32.dll!NtUserImpersonateDdeClientWindow";
+ table[4561] = "user32.dll!NtUserInitTask";
+ table[4563] = "user32.dll!NtUserInternalGetWindowIcon";
+ table[4574] = "user32.dll!NtUserMenuItemFromPoint";
+ table[4576] = "user32.dll!NtUserMinMaximize";
+ table[4577] = "user32.dll!NtUserMNDragLeave";
+ table[4578] = "user32.dll!NtUserMNDragOver";
+ table[4579] = "user32.dll!NtUserModifyUserStartupInfoFlags";
+ table[4581] = "user32.dll!NtUserNotifyIMEStatus";
+ table[4587] = "user32.dll!NtUserOpenThreadDesktop";
+ table[4599] = "user32.dll!NtUserQuerySendMessage";
+ table[4601] = "user32.dll!NtUserRealChildWindowFromPoint";
+ table[4603] = "user32.dll!NtUserRealWaitMessageEx";
+ table[4606] = "user32.dll!NtUserRegisterErrorReportingDialog";
+ table[4610] = "user32.dll!NtUserRegisterTasklist";
+ table[4612] = "user32.dll!NtUserRemoveClipboardFormatListener";
+ table[4616] = "user32.dll!NtUserResolveDesktopForWOW";
+ table[4626] = "user32.dll!NtUserSetClassWord";
+ table[4627] = "user32.dll!NtUserSetClipboardData";
+ table[4631] = "user32.dll!NtUserSetCursorContents";
+ table[4639] = "user32.dll!NtUserSetInternalWindowPos";
+ table[4642] = "user32.dll!NtUserSetMenuContextHelpId";
+ table[4644] = "user32.dll!NtUserSetMenuFlagRtoL";
+ table[4645] = "user32.dll!NtUserSetObjectInformation";
+ table[4652] = "user32.dll!NtUserSetSysColors";
+ table[4653] = "user32.dll!NtUserSetSystemCursor";
+ table[4655] = "user32.dll!NtUserSetSystemTimer";
+ table[4668] = "user32.dll!NtUserSetWindowsHookAW";
+ table[4680] = "user32.dll!NtUserTestForInteractiveUser";
+ table[4683] = "user32.dll!NtUserToUnicodeEx";
+ table[4692] = "user32.dll!NtUserUnloadKeyboardLayout";
+ table[4698] = "user32.dll!NtUserUpdateInstance";
+ table[4700] = "user32.dll!NtUserGetLayeredWindowAttributes";
+ table[4703] = "user32.dll!NtUserUserHandleGrantAccess";
+ table[4709] = "user32.dll!NtUserWaitForMsgAndEvent";
+ table[4711] = "user32.dll!NtUserWin32PoolAllocationStats";
+ table[4714] = "user32.dll!NtUserYieldTask";
+ table[4721] = "user32.dll!NtUserUnregisterSessionPort";
+ table[4722] = "user32.dll!NtUserUpdateWindowTransform";
+ table[4866] = "user32.dll!NtUserSetMirrorRendering";
+ table[4867] = "user32.dll!NtUserShowSystemCursor";
+}
+
+ return table;
+}
+
+#endif // TRACELINE_SYSCALL_MAP_H_
diff --git a/chromium/tools/unused-symbols-report.py b/chromium/tools/unused-symbols-report.py
new file mode 100755
index 00000000000..900bf16e120
--- /dev/null
+++ b/chromium/tools/unused-symbols-report.py
@@ -0,0 +1,171 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints a report of symbols stripped by the linker due to being unused.
+
+To use, build with these linker flags:
+ -Wl,--gc-sections
+ -Wl,--print-gc-sections
+the first one is the default in Release; search build/common.gypi for it
+and to see where to add the other.
+
+Then build, saving the output into a file:
+ make chrome 2>&1 | tee buildlog
+and run this script on it:
+ ./tools/unused-symbols-report.py buildlog > report.html
+"""
+
+import cgi
+import optparse
+import os
+import re
+import subprocess
+import sys
+
+cppfilt_proc = None
+def Demangle(sym):
+ """Demangle a C++ symbol by passing it through c++filt."""
+ global cppfilt_proc
+ if cppfilt_proc is None:
+ cppfilt_proc = subprocess.Popen(['c++filt'], stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+ print >>cppfilt_proc.stdin, sym
+ return cppfilt_proc.stdout.readline().strip()
+
+
+def Unyuck(sym):
+ """Attempt to prettify a C++ symbol by some basic heuristics."""
+ sym = sym.replace('std::basic_string<char, std::char_traits<char>, '
+ 'std::allocator<char> >', 'std::string')
+ sym = sym.replace('std::basic_string<wchar_t, std::char_traits<wchar_t>, '
+ 'std::allocator<wchar_t> >', 'std::wstring')
+ sym = sym.replace('std::basic_string<unsigned short, '
+ 'base::string16_char_traits, '
+ 'std::allocator<unsigned short> >', 'string16')
+ sym = re.sub(r', std::allocator<\S+\s+>', '', sym)
+ return sym
+
+
+def Parse(input, skip_paths=None, only_paths=None):
+ """Parse the --print-gc-sections build output.
+
+ Args:
+ input: iterable over the lines of the build output
+
+ Yields:
+ (target name, path to .o file, demangled symbol)
+ """
+ symbol_re = re.compile(r"'\.text\.(\S+)' in file '(\S+)'$")
+ path_re = re.compile(r"^out/[^/]+/[^/]+/([^/]+)/(.*)$")
+ for line in input:
+ match = symbol_re.search(line)
+ if not match:
+ continue
+ symbol, path = match.groups()
+ symbol = Unyuck(Demangle(symbol))
+ path = os.path.normpath(path)
+ if skip_paths and skip_paths in path:
+ continue
+ if only_paths and only_paths not in path:
+ continue
+ match = path_re.match(path)
+ if not match:
+ print >>sys.stderr, "Skipping weird path", path
+ continue
+ target, path = match.groups()
+ yield target, path, symbol
+
+
+# HTML header for our output page.
+TEMPLATE_HEADER = """<!DOCTYPE html>
+<head>
+<style>
+body {
+ font-family: sans-serif;
+ font-size: 0.8em;
+}
+h1, h2 {
+ font-weight: normal;
+ margin: 0.5em 0;
+}
+h2 {
+ margin-top: 1em;
+}
+tr:hover {
+ background: #eee;
+}
+.permalink {
+ padding-left: 1ex;
+ font-size: 80%;
+ text-decoration: none;
+ color: #ccc;
+}
+.symbol {
+ font-family: WebKitWorkAround, monospace;
+ margin-left: 4ex;
+ text-indent: -4ex;
+ padding: 0.5ex 1ex;
+}
+.file {
+ padding: 0.5ex 1ex;
+ padding-left: 2ex;
+ font-family: WebKitWorkAround, monospace;
+ font-size: 90%;
+ color: #777;
+}
+</style>
+</head>
+<body>
+<h1>chrome symbols deleted at link time</h1>
+"""
+
+
+def Output(iter):
+ """Print HTML given an iterable of (target, path, symbol) tuples."""
+ targets = {}
+ for target, path, symbol in iter:
+ entries = targets.setdefault(target, [])
+ entries.append((symbol, path))
+
+ print TEMPLATE_HEADER
+ print "<p>jump to target:"
+ print "<select onchange='document.location.hash = this.value'>"
+ for target in sorted(targets.keys()):
+ print "<option>%s</option>" % target
+ print "</select></p>"
+
+ for target in sorted(targets.keys()):
+ print "<h2>%s" % target
+ print "<a class=permalink href='#%s' name='%s'>#</a>" % (target, target)
+ print "</h2>"
+ print "<table width=100% cellspacing=0>"
+ for symbol, path in sorted(targets[target]):
+ htmlsymbol = cgi.escape(symbol).replace('::', '::<wbr>')
+ print "<tr><td><div class=symbol>%s</div></td>" % htmlsymbol
+ print "<td valign=top><div class=file>%s</div></td></tr>" % path
+ print "</table>"
+
+
+def main():
+ parser = optparse.OptionParser(usage='%prog [options] buildoutput\n\n' +
+ __doc__)
+ parser.add_option("--skip-paths", metavar="STR", default="third_party",
+ help="skip paths matching STR [default=%default]")
+ parser.add_option("--only-paths", metavar="STR",
+ help="only include paths matching STR [default=%default]")
+ opts, args = parser.parse_args()
+
+ if len(args) < 1:
+ parser.print_help()
+ sys.exit(1)
+
+ iter = Parse(open(args[0]),
+ skip_paths=opts.skip_paths,
+ only_paths=opts.only_paths)
+ Output(iter)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/usb_gadget/BUILD.gn b/chromium/tools/usb_gadget/BUILD.gn
new file mode 100644
index 00000000000..3427d68f328
--- /dev/null
+++ b/chromium/tools/usb_gadget/BUILD.gn
@@ -0,0 +1,39 @@
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+action("usb_gadget") {
+ script = "//tools/usb_gadget/package.py"
+ inputs = [
+ "__init__.py",
+ "__main__.py",
+ "composite_echo_gadget.py",
+ "composite_gadget.py",
+ "default_gadget.py",
+ "echo_gadget.py",
+ "gadget.py",
+ "hid_constants.py",
+ "hid_descriptors.py",
+ "hid_echo_gadget.py",
+ "hid_gadget.py",
+ "keyboard_gadget.py",
+ "linux_gadgetfs.py",
+ "mouse_gadget.py",
+ "msos20_descriptors.py",
+ "server.py",
+ "usb_constants.py",
+ "usb_descriptors.py",
+ ]
+ package_path = "$root_build_dir/usb_gadget.zip"
+ hash_path = "$root_build_dir/usb_gadget.zip.md5"
+ outputs = [
+ package_path,
+ hash_path,
+ ]
+ args = [
+ "--zip-file",
+ rebase_path(package_path, root_build_dir),
+ "--hash-file",
+ rebase_path(hash_path, root_build_dir),
+ ] + rebase_path(inputs, root_build_dir)
+}
diff --git a/chromium/tools/usb_gadget/OWNERS b/chromium/tools/usb_gadget/OWNERS
new file mode 100644
index 00000000000..ff41043f529
--- /dev/null
+++ b/chromium/tools/usb_gadget/OWNERS
@@ -0,0 +1 @@
+reillyg@chromium.org
diff --git a/chromium/tools/usb_gadget/__init__.py b/chromium/tools/usb_gadget/__init__.py
new file mode 100644
index 00000000000..4d6aabb953d
--- /dev/null
+++ b/chromium/tools/usb_gadget/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/chromium/tools/usb_gadget/__main__.py b/chromium/tools/usb_gadget/__main__.py
new file mode 100644
index 00000000000..03341ea85cd
--- /dev/null
+++ b/chromium/tools/usb_gadget/__main__.py
@@ -0,0 +1,67 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Package entry-point."""
+
+import argparse
+
+import netifaces
+from tornado import ioloop
+
+import composite_echo_gadget
+import echo_gadget
+import hid_echo_gadget
+import keyboard_gadget
+import linux_gadgetfs
+import mouse_gadget
+import server
+
+
+def ParseArgs():
+ """Parse application arguments."""
+ parser = argparse.ArgumentParser(description='USB gadget server.')
+ parser.add_argument(
+ '-i', '--interface', default='lo',
+ help='Listen for HTTP connections on this interface.')
+ parser.add_argument(
+ '-p', '--port', default=8080,
+ help='Listen for HTTP connections on this port.')
+ parser.add_argument(
+ '--hardware', default='beaglebone-black',
+ help='Hardware configuration.')
+ parser.add_argument(
+ '--start-claimed',
+ help='Start with the device claimed by this client.')
+ return parser.parse_args()
+
+
+def main():
+ args = ParseArgs()
+
+ server.interface = args.interface
+ server.port = args.port
+ server.hardware = args.hardware
+ server.claimed_by = args.start_claimed
+
+ addrs = netifaces.ifaddresses(server.interface)
+ ip_address = addrs[netifaces.AF_INET][0]['addr']
+ server.address = '{}:{}'.format(ip_address, server.port)
+
+ server.chip = linux_gadgetfs.LinuxGadgetfs(server.hardware)
+ server.SwitchGadget(server.default)
+
+ composite_echo_gadget.RegisterHandlers()
+ echo_gadget.RegisterHandlers()
+ hid_echo_gadget.RegisterHandlers()
+ keyboard_gadget.RegisterHandlers()
+ mouse_gadget.RegisterHandlers()
+
+ server.http_server.listen(server.port)
+
+ ioloop.IOLoop.instance().start()
+ print 'Exiting...'
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/usb_gadget/composite_echo_gadget.py b/chromium/tools/usb_gadget/composite_echo_gadget.py
new file mode 100644
index 00000000000..4f3cd165453
--- /dev/null
+++ b/chromium/tools/usb_gadget/composite_echo_gadget.py
@@ -0,0 +1,68 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import uuid
+
+import composite_gadget
+import echo_gadget
+import hid_echo_gadget
+import hid_gadget
+import usb_constants
+import usb_descriptors
+
+
+class CompositeEchoGadget(composite_gadget.CompositeGadget):
+
+ def __init__(self):
+ device_desc = usb_descriptors.DeviceDescriptor(
+ idVendor=usb_constants.VendorID.GOOGLE,
+ idProduct=usb_constants.ProductID.GOOGLE_COMPOSITE_ECHO_GADGET,
+ bcdUSB=0x0210, # USB 2.1 to indicate support for BOS descriptors.
+ iManufacturer=1,
+ iProduct=2,
+ iSerialNumber=3,
+ bcdDevice=0x0100)
+
+ echo_feature = echo_gadget.EchoCompositeFeature(
+ endpoints=[(0, 5, 0x81, 0x01), (1, 6, 0x82, 0x02), (2, 7, 0x83, 0x03)])
+
+ hid_echo_feature = hid_echo_gadget.EchoFeature()
+ hid_feature = hid_gadget.HidCompositeFeature(
+ report_desc=hid_echo_gadget.EchoFeature.REPORT_DESC,
+ features={0: hid_echo_feature},
+ interface_number=3,
+ interface_string=4,
+ in_endpoint=0x84, out_endpoint=0x04)
+
+ super(CompositeEchoGadget, self).__init__(
+ device_desc, [echo_feature, hid_feature])
+ self.AddStringDescriptor(1, 'Google Inc.')
+ self.AddStringDescriptor(2, 'Echo Gadget')
+ self.AddStringDescriptor(3, '{:06X}'.format(uuid.getnode()))
+ self.AddStringDescriptor(4, 'HID Echo')
+ self.AddStringDescriptor(5, 'Interrupt Echo')
+ self.AddStringDescriptor(6, 'Bulk Echo')
+ self.AddStringDescriptor(7, 'Isochronous Echo')
+
+ # Enable Microsoft OS 2.0 Descriptors for Windows 8.1 and above.
+ self.EnableMicrosoftOSDescriptorsV2(vendor_code=0x02)
+ # These are used to force Windows to load WINUSB.SYS for the echo functions.
+ self.SetMicrosoftCompatId(0, 'WINUSB')
+ self.SetMicrosoftCompatId(1, 'WINUSB')
+ self.SetMicrosoftCompatId(2, 'WINUSB')
+
+def RegisterHandlers():
+ """Registers web request handlers with the application server."""
+
+ import server
+ from tornado import web
+
+ class WebConfigureHandler(web.RequestHandler):
+
+ def post(self):
+ server.SwitchGadget(CompositeEchoGadget())
+
+ server.app.add_handlers('.*$', [
+ (r'/composite_echo/configure', WebConfigureHandler),
+ ])
diff --git a/chromium/tools/usb_gadget/composite_gadget.py b/chromium/tools/usb_gadget/composite_gadget.py
new file mode 100644
index 00000000000..ebf2ea317cc
--- /dev/null
+++ b/chromium/tools/usb_gadget/composite_gadget.py
@@ -0,0 +1,277 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A composite USB gadget is built from multiple USB features.
+"""
+
+import gadget
+import usb_constants
+import usb_descriptors
+
+
+class CompositeGadget(gadget.Gadget):
+ """Basic functionality for a composite USB device.
+
+ Composes multiple USB features into a single device.
+ """
+
+ def __init__(self, device_desc, features):
+ """Create a USB gadget device.
+
+ Args:
+ device_desc: USB device descriptor.
+ features: USB device features.
+ """
+ # dicts mapping interface numbers to features for FS and HS configurations
+ self._fs_interface_feature_map = {}
+ self._hs_interface_feature_map = {}
+
+ fs_config_desc = usb_descriptors.ConfigurationDescriptor(
+ bmAttributes=0x80,
+ MaxPower=50)
+ hs_config_desc = usb_descriptors.ConfigurationDescriptor(
+ bmAttributes=0x80,
+ MaxPower=50)
+ for feature in features:
+ for fs_interface in feature.GetFullSpeedInterfaces():
+ fs_config_desc.AddInterface(fs_interface)
+ self._fs_interface_feature_map[fs_interface.bInterfaceNumber] = feature
+ for hs_interface in feature.GetHighSpeedInterfaces():
+ hs_config_desc.AddInterface(hs_interface)
+ self._hs_interface_feature_map[hs_interface.bInterfaceNumber] = feature
+
+ super(CompositeGadget, self).__init__(
+ device_desc, fs_config_desc, hs_config_desc)
+ self._features = features
+
+ def Connected(self, chip, speed):
+ super(CompositeGadget, self).Connected(chip, speed)
+ for feature in self._features:
+ feature.Connected(self)
+
+ def Disconnected(self):
+ super(CompositeGadget, self).Disconnected()
+ for feature in self._features:
+ feature.Disconnected()
+
+ def _GetInterfaceFeatureMap(self):
+ if self.GetSpeed() == usb_constants.Speed.FULL:
+ return self._fs_interface_feature_map
+ elif self.GetSpeed() == usb_constants.Speed.HIGH:
+ return self._hs_interface_feature_map
+ else:
+ raise RuntimeError('Device is not connected.')
+
+ def ReceivePacket(self, endpoint, data):
+ interface = self.GetInterfaceForEndpoint(endpoint)
+ feature = self._GetInterfaceFeatureMap()[interface]
+ feature.ReceivePacket(endpoint, data)
+
+ def _GetFeatureForIndex(self, recipient, index):
+ interface = None
+ if recipient == usb_constants.Recipient.INTERFACE:
+ interface = index
+ elif recipient == usb_constants.Recipient.ENDPOINT:
+ interface = self.GetInterfaceForEndpoint(index)
+
+ if interface is not None:
+ return self._GetInterfaceFeatureMap().get(interface)
+ return None
+
+ def StandardControlRead(self, recipient, request, value, index, length):
+ response = super(CompositeGadget, self).StandardControlRead(
+ recipient, request, value, index, length)
+ if response is not None:
+ return response
+
+ feature = self._GetFeatureForIndex(recipient, index)
+ if feature:
+ return feature.StandardControlRead(
+ recipient, request, value, index, length)
+
+ def StandardControlWrite(self, recipient, request, value, index, data):
+ response = super(CompositeGadget, self).StandardControlWrite(
+ recipient, request, value, index, data)
+ if response is not None:
+ return response
+
+ feature = self._GetFeatureForIndex(recipient, index)
+ if feature:
+ return feature.StandardControlWrite(
+ recipient, request, value, index, data)
+
+ def ClassControlRead(self, recipient, request, value, index, length):
+ response = super(CompositeGadget, self).ClassControlRead(
+ recipient, request, value, index, length)
+ if response is not None:
+ return response
+
+ feature = self._GetFeatureForIndex(recipient, index)
+ if feature:
+ return feature.ClassControlRead(recipient, request, value, index, length)
+
+ def ClassControlWrite(self, recipient, request, value, index, data):
+ response = super(CompositeGadget, self).ClassControlWrite(
+ recipient, request, value, index, data)
+ if response is not None:
+ return response
+
+ feature = self._GetFeatureForIndex(recipient, index)
+ if feature:
+ return feature.ClassControlWrite(recipient, request, value, index, data)
+
+ def VendorControlRead(self, recipient, request, value, index, length):
+ response = super(CompositeGadget, self).VendorControlRead(
+ recipient, request, value, index, length)
+ if response is not None:
+ return response
+
+ feature = self._GetFeatureForIndex(recipient, index)
+ if feature:
+ return feature.VendorControlRead(recipient, request, value, index, length)
+
+ def VendorControlWrite(self, recipient, request, value, index, data):
+ response = super(CompositeGadget, self).VendorControlWrite(
+ recipient, request, value, index, data)
+ if response is not None:
+ return response
+
+ feature = self._GetFeatureForIndex(recipient, index)
+ if feature:
+ return feature.VendorControlWrite(recipient, request, value, index, data)
+
+
+class CompositeFeature(object):
+ def __init__(self, fs_interface_descs, hs_interface_descs):
+ self._gadget = None
+ self._fs_interface_descs = fs_interface_descs
+ self._hs_interface_descs = hs_interface_descs
+
+ def GetFullSpeedInterfaces(self):
+ return self._fs_interface_descs
+
+ def GetHighSpeedInterfaces(self):
+ return self._hs_interface_descs
+
+ def Connected(self, my_gadget):
+ self._gadget = my_gadget
+
+ def Disconnected(self):
+ self._gadget = None
+
+ def IsConnected(self):
+ return self._gadget is not None
+
+ def SendPacket(self, endpoint, data):
+ if self._gadget is None:
+ raise RuntimeError('Device is not connected.')
+ self._gadget.SendPacket(endpoint, data)
+
+ def HaltEndpoint(self, endpoint):
+ if self._gadget is None:
+ raise RuntimeError('Device is not connected.')
+ self._gadget.HaltEndpoint(endpoint)
+
+ def GetDescriptor(self, recipient, typ, index, lang, length):
+ _ = recipient, typ, index, lang, length
+ return None
+
+ def StandardControlRead(self, recipient, request, value, index, length):
+ """Handle standard USB control transfers.
+
+ Args:
+ recipient: Request recipient (interface or endpoint)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ A buffer to return to the USB host with len <= length on success or
+ None to stall the pipe.
+ """
+ _ = recipient, request, value, index, length
+ return None
+
+ def ClassControlRead(self, recipient, request, value, index, length):
+ """Handle class-specific control transfers.
+
+ Args:
+ recipient: Request recipient (interface or endpoint)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ A buffer to return to the USB host with len <= length on success or
+ None to stall the pipe.
+ """
+ _ = recipient, request, value, index, length
+ return None
+
+ def VendorControlRead(self, recipient, request, value, index, length):
+ """Handle vendor-specific control transfers.
+
+ Args:
+ recipient: Request recipient (interface or endpoint)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ A buffer to return to the USB host with len <= length on success or
+ None to stall the pipe.
+ """
+ _ = recipient, request, value, index, length
+ return None
+
+ def StandardControlWrite(self, recipient, request, value, index, data):
+ """Handle standard USB control transfers.
+
+ Args:
+ recipient: Request recipient (interface or endpoint)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ data: Data stage of the request.
+
+ Returns:
+ True on success, None to stall the pipe.
+ """
+ _ = recipient, request, value, index, data
+ return None
+
+ def ClassControlWrite(self, recipient, request, value, index, data):
+ """Handle class-specific control transfers.
+
+ Args:
+ recipient: Request recipient (interface or endpoint)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ data: Data stage of the request.
+
+ Returns:
+ True on success, None to stall the pipe.
+ """
+ _ = recipient, request, value, index, data
+ return None
+
+ def VendorControlWrite(self, recipient, request, value, index, data):
+ """Handle vendor-specific control transfers.
+
+ Args:
+ recipient: Request recipient (interface or endpoint)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ data: Data stage of the request.
+
+ Returns:
+ True on success, None to stall the pipe.
+ """
+ _ = recipient, request, value, index, data
+ return None
diff --git a/chromium/tools/usb_gadget/default_gadget.py b/chromium/tools/usb_gadget/default_gadget.py
new file mode 100644
index 00000000000..707c51be559
--- /dev/null
+++ b/chromium/tools/usb_gadget/default_gadget.py
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Default gadget configuration."""
+
+import gadget
+import usb_constants
+import usb_descriptors
+
+
+class DefaultGadget(gadget.Gadget):
+
+ def __init__(self):
+ device_desc = usb_descriptors.DeviceDescriptor(
+ idVendor=usb_constants.VendorID.GOOGLE,
+ idProduct=usb_constants.ProductID.GOOGLE_TEST_GADGET,
+ bcdUSB=0x0200,
+ iManufacturer=1,
+ iProduct=2,
+ iSerialNumber=3,
+ bcdDevice=0x0100)
+
+ fs_config_desc = usb_descriptors.ConfigurationDescriptor(
+ bmAttributes=0x80,
+ MaxPower=50)
+
+ hs_config_desc = usb_descriptors.ConfigurationDescriptor(
+ bmAttributes=0x80,
+ MaxPower=50)
+
+ interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=0)
+ fs_config_desc.AddInterface(interface_desc)
+ hs_config_desc.AddInterface(interface_desc)
+
+ super(DefaultGadget, self).__init__(
+ device_desc, fs_config_desc, hs_config_desc)
+
+ self.AddStringDescriptor(1, "Google Inc.")
+ self.AddStringDescriptor(2, "Test Gadget (default state)")
diff --git a/chromium/tools/usb_gadget/echo_gadget.py b/chromium/tools/usb_gadget/echo_gadget.py
new file mode 100644
index 00000000000..98d625a204e
--- /dev/null
+++ b/chromium/tools/usb_gadget/echo_gadget.py
@@ -0,0 +1,239 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""USB echo gadget module.
+
+This gadget has pairs of IN/OUT endpoints that echo packets back to the host.
+"""
+
+import uuid
+
+import composite_gadget
+import usb_constants
+import usb_descriptors
+
+
+class EchoCompositeFeature(composite_gadget.CompositeFeature):
+ """Composite device feature that echos data back to the host.
+ """
+
+ def __init__(self, endpoints):
+ """Create an echo gadget.
+ """
+ fs_interfaces = []
+ hs_interfaces = []
+
+ if len(endpoints) >= 1:
+ iface_num, iface_string, in_endpoint, out_endpoint = endpoints[0]
+ fs_intr_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=iface_num,
+ bInterfaceClass=usb_constants.DeviceClass.VENDOR,
+ bInterfaceSubClass=0,
+ bInterfaceProtocol=0,
+ iInterface=iface_string,
+ )
+ fs_intr_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=out_endpoint,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=64,
+ bInterval=1 # 1ms
+ ))
+ fs_intr_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=in_endpoint,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=64,
+ bInterval=1 # 1ms
+ ))
+ fs_interfaces.append(fs_intr_interface_desc)
+
+ hs_intr_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=iface_num,
+ bInterfaceClass=usb_constants.DeviceClass.VENDOR,
+ bInterfaceSubClass=0,
+ bInterfaceProtocol=0,
+ iInterface=iface_string
+ )
+ hs_intr_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=out_endpoint,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=64,
+ bInterval=4 # 1ms
+ ))
+ hs_intr_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=in_endpoint,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=64,
+ bInterval=4 # 1ms
+ ))
+ hs_interfaces.append(hs_intr_interface_desc)
+
+ if len(endpoints) >= 2:
+ iface_num, iface_string, in_endpoint, out_endpoint = endpoints[1]
+ fs_bulk_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=iface_num,
+ bInterfaceClass=usb_constants.DeviceClass.VENDOR,
+ bInterfaceSubClass=0,
+ bInterfaceProtocol=0,
+ iInterface=iface_string
+ )
+ fs_bulk_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=out_endpoint,
+ bmAttributes=usb_constants.TransferType.BULK,
+ wMaxPacketSize=64,
+ bInterval=0
+ ))
+ fs_bulk_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=in_endpoint,
+ bmAttributes=usb_constants.TransferType.BULK,
+ wMaxPacketSize=64,
+ bInterval=0
+ ))
+ fs_interfaces.append(fs_bulk_interface_desc)
+
+ hs_bulk_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=iface_num,
+ bInterfaceClass=usb_constants.DeviceClass.VENDOR,
+ bInterfaceSubClass=0,
+ bInterfaceProtocol=0,
+ iInterface=iface_string
+ )
+ hs_bulk_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=out_endpoint,
+ bmAttributes=usb_constants.TransferType.BULK,
+ wMaxPacketSize=512,
+ bInterval=0
+ ))
+ hs_bulk_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=in_endpoint,
+ bmAttributes=usb_constants.TransferType.BULK,
+ wMaxPacketSize=512,
+ bInterval=0
+ ))
+ hs_interfaces.append(hs_bulk_interface_desc)
+
+ if len(endpoints) >= 3:
+ iface_num, iface_string, in_endpoint, out_endpoint = endpoints[2]
+ fs_interfaces.append(usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=iface_num,
+ bInterfaceClass=usb_constants.DeviceClass.VENDOR,
+ bInterfaceSubClass=0,
+ bInterfaceProtocol=0,
+ iInterface=iface_string
+ ))
+ fs_isoc_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=iface_num,
+ bAlternateSetting=1,
+ bInterfaceClass=usb_constants.DeviceClass.VENDOR,
+ bInterfaceSubClass=0,
+ bInterfaceProtocol=0,
+ iInterface=iface_string
+ )
+ fs_isoc_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=out_endpoint,
+ bmAttributes=usb_constants.TransferType.ISOCHRONOUS,
+ wMaxPacketSize=1023,
+ bInterval=1 # 1ms
+ ))
+ fs_isoc_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=in_endpoint,
+ bmAttributes=usb_constants.TransferType.ISOCHRONOUS,
+ wMaxPacketSize=1023,
+ bInterval=1 # 1ms
+ ))
+ fs_interfaces.append(fs_isoc_interface_desc)
+
+ hs_interfaces.append(usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=iface_num,
+ bInterfaceClass=usb_constants.DeviceClass.VENDOR,
+ bInterfaceSubClass=0,
+ bInterfaceProtocol=0,
+ iInterface=iface_string
+ ))
+ hs_isoc_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=iface_num,
+ bAlternateSetting=1,
+ bInterfaceClass=usb_constants.DeviceClass.VENDOR,
+ bInterfaceSubClass=0,
+ bInterfaceProtocol=0,
+ iInterface=iface_string
+ )
+ hs_isoc_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=out_endpoint,
+ bmAttributes=usb_constants.TransferType.ISOCHRONOUS,
+ wMaxPacketSize=512,
+ bInterval=4 # 1ms
+ ))
+ hs_isoc_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=in_endpoint,
+ bmAttributes=usb_constants.TransferType.ISOCHRONOUS,
+ wMaxPacketSize=512,
+ bInterval=4 # 1ms
+ ))
+ hs_interfaces.append(hs_isoc_interface_desc)
+
+ super(EchoCompositeFeature, self).__init__(fs_interfaces, hs_interfaces)
+
+ def ReceivePacket(self, endpoint, data):
+ """Echo a packet back to the host.
+
+ Args:
+ endpoint: Incoming endpoint (must be an OUT pipe).
+ data: Packet data.
+ """
+ assert endpoint & usb_constants.Dir.IN == 0
+
+ self.SendPacket(endpoint | usb_constants.Dir.IN, data)
+
+
+class EchoGadget(composite_gadget.CompositeGadget):
+ """Echo gadget.
+ """
+
+ def __init__(self):
+ """Create an echo gadget.
+ """
+ device_desc = usb_descriptors.DeviceDescriptor(
+ idVendor=usb_constants.VendorID.GOOGLE,
+ idProduct=usb_constants.ProductID.GOOGLE_ECHO_GADGET,
+ bcdUSB=0x0200,
+ iManufacturer=1,
+ iProduct=2,
+ iSerialNumber=3,
+ bcdDevice=0x0100)
+
+ feature = EchoCompositeFeature(
+ endpoints=[(0, 4, 0x81, 0x01), (1, 5, 0x82, 0x02), (2, 6, 0x83, 0x03)])
+
+ super(EchoGadget, self).__init__(device_desc, [feature])
+ self.AddStringDescriptor(1, 'Google Inc.')
+ self.AddStringDescriptor(2, 'Echo Gadget')
+ self.AddStringDescriptor(3, '{:06X}'.format(uuid.getnode()))
+ self.AddStringDescriptor(4, 'Interrupt Echo')
+ self.AddStringDescriptor(5, 'Bulk Echo')
+ self.AddStringDescriptor(6, 'Isochronous Echo')
+
+ # Enable Microsoft OS Descriptors for Windows 8 and above.
+ self.EnableMicrosoftOSDescriptorsV1(vendor_code=0x01)
+ # These are used to force Windows to load WINUSB.SYS for the echo functions.
+ self.SetMicrosoftCompatId(0, 'WINUSB')
+ self.SetMicrosoftCompatId(1, 'WINUSB')
+ self.SetMicrosoftCompatId(2, 'WINUSB')
+
+ self.AddDeviceCapabilityDescriptor(usb_descriptors.ContainerIdDescriptor(
+ ContainerID=uuid.uuid4().bytes_le))
+
+def RegisterHandlers():
+ """Registers web request handlers with the application server."""
+
+ import server
+ from tornado import web
+
+ class WebConfigureHandler(web.RequestHandler):
+
+ def post(self):
+ server.SwitchGadget(EchoGadget())
+
+ server.app.add_handlers('.*$', [
+ (r'/echo/configure', WebConfigureHandler),
+ ])
diff --git a/chromium/tools/usb_gadget/echo_gadget_test.py b/chromium/tools/usb_gadget/echo_gadget_test.py
new file mode 100755
index 00000000000..a25d68cb233
--- /dev/null
+++ b/chromium/tools/usb_gadget/echo_gadget_test.py
@@ -0,0 +1,22 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mock
+
+import echo_gadget
+import usb_constants
+
+
+class EchoGadgetTest(unittest.TestCase):
+
+ def test_bulk_echo(self):
+ g = echo_gadget.EchoGadget()
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.SetConfiguration(1)
+ g.ReceivePacket(0x02, 'Hello world!')
+ chip.SendPacket.assert_called_once_with(0x82, 'Hello world!')
diff --git a/chromium/tools/usb_gadget/gadget.py b/chromium/tools/usb_gadget/gadget.py
new file mode 100644
index 00000000000..c29075003f6
--- /dev/null
+++ b/chromium/tools/usb_gadget/gadget.py
@@ -0,0 +1,585 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generic USB gadget functionality.
+"""
+
+import struct
+
+import msos20_descriptors
+import usb_constants
+import usb_descriptors
+
+
+class Gadget(object):
+ """Basic functionality for a USB device.
+
+ Implements standard control requests assuming that a subclass will handle
+ class- or vendor-specific requests.
+ """
+
+ def __init__(self, device_desc, fs_config_desc, hs_config_desc):
+ """Create a USB gadget device.
+
+ Args:
+ device_desc: USB device descriptor.
+ fs_config_desc: Low/full-speed device descriptor.
+ hs_config_desc: High-speed device descriptor.
+ """
+ self._speed = usb_constants.Speed.UNKNOWN
+ self._chip = None
+ self._device_desc = device_desc
+ self._fs_config_desc = fs_config_desc
+ self._hs_config_desc = hs_config_desc
+ # dict mapping language codes to a dict mapping indexes to strings
+ self._strings = {}
+ self._bos_descriptor = None
+ # dict mapping interface numbers to a set of endpoint addresses
+ self._active_endpoints = {}
+ # dict mapping endpoint addresses to interfaces
+ self._endpoint_interface_map = {}
+ self._ms_vendor_code_v1 = None
+ self._ms_vendor_code_v2 = None
+ self._ms_compat_ids = {}
+ self._ms_os20_config_subset = None
+
+ def GetDeviceDescriptor(self):
+ return self._device_desc
+
+ def GetFullSpeedConfigurationDescriptor(self):
+ return self._fs_config_desc
+
+ def GetHighSpeedConfigurationDescriptor(self):
+ return self._hs_config_desc
+
+ def GetConfigurationDescriptor(self):
+ if self._speed == usb_constants.Speed.FULL:
+ return self._fs_config_desc
+ elif self._speed == usb_constants.Speed.HIGH:
+ return self._hs_config_desc
+ else:
+ raise RuntimeError('Device is not connected.')
+
+ def GetSpeed(self):
+ return self._speed
+
+ def AddStringDescriptor(self, index, value, lang=0x0409):
+ """Add a string descriptor to this device.
+
+ Args:
+ index: String descriptor index (matches 'i' fields in descriptors).
+ value: The string.
+ lang: Language code (default: English).
+
+ Raises:
+ ValueError: The index or language code is invalid.
+ """
+ if index < 1 or index > 255:
+ raise ValueError('String descriptor index out of range.')
+ if lang < 0 or lang > 0xffff:
+ raise ValueError('String descriptor language code out of range.')
+
+ lang_strings = self._strings.setdefault(lang, {})
+ lang_strings[index] = value
+
+ def EnableMicrosoftOSDescriptorsV1(self, vendor_code=0x01):
+ if vendor_code < 0 or vendor_code > 255:
+ raise ValueError('Vendor code out of range.')
+ if vendor_code == self._ms_vendor_code_v1:
+ raise ValueError('OS Descriptor v1 vendor code conflicts with v2.')
+
+ self._ms_vendor_code_v1 = vendor_code
+
+ def EnableMicrosoftOSDescriptorsV2(self, vendor_code=0x02):
+ if vendor_code < 0 or vendor_code > 255:
+ raise ValueError('Vendor code out of range.')
+ if vendor_code == self._ms_vendor_code_v1:
+ raise ValueError('OS Descriptor v2 vendor code conflicts with v1.')
+
+ self._ms_vendor_code_v2 = vendor_code
+ self._ms_os20_descriptor_set = \
+ msos20_descriptors.DescriptorSetHeader(dwWindowsVersion=0x06030000)
+ # Gadget devices currently only support one configuration. Contrary to
+ # Microsoft's documentation the bConfigurationValue field should be set to
+ # the index passed to GET_DESCRIPTOR that returned the configuration instead
+ # of the configuration's bConfigurationValue field. (i.e. 0 instead of 1).
+ #
+ # https://social.msdn.microsoft.com/Forums/windowsdesktop/en-US/ae64282c-3bc3-49af-8391-4d174479d9e7/microsoft-os-20-descriptors-not-working-on-an-interface-of-a-composite-usb-device
+ self._ms_os20_config_subset = msos20_descriptors.ConfigurationSubsetHeader(
+ bConfigurationValue=0)
+ self._ms_os20_descriptor_set.Add(self._ms_os20_config_subset)
+ self._ms_os20_platform_descriptor = \
+ msos20_descriptors.PlatformCapabilityDescriptor(
+ dwWindowsVersion=0x06030000,
+ bMS_VendorCode=self._ms_vendor_code_v2)
+ self._ms_os20_platform_descriptor.SetDescriptorSet(
+ self._ms_os20_descriptor_set)
+ self.AddDeviceCapabilityDescriptor(self._ms_os20_platform_descriptor)
+
+ def SetMicrosoftCompatId(self, interface_number, compat_id, sub_compat_id=''):
+ self._ms_compat_ids[interface_number] = (compat_id, sub_compat_id)
+ if self._ms_os20_config_subset is not None:
+ function_header = msos20_descriptors.FunctionSubsetHeader(
+ bFirstInterface=interface_number)
+ function_header.Add(msos20_descriptors.CompatibleId(
+ CompatibleID=compat_id, SubCompatibleID=sub_compat_id))
+ self._ms_os20_config_subset.Add(function_header)
+
+ def AddDeviceCapabilityDescriptor(self, device_capability):
+ """Add a device capability descriptor to this device.
+
+ Args:
+ device_capability: The Descriptor object.
+ """
+ if self._bos_descriptor is None:
+ self._bos_descriptor = usb_descriptors.BosDescriptor()
+ self._bos_descriptor.AddDeviceCapability(device_capability)
+
+ def Connected(self, chip, speed):
+ """The device has been connected to a USB host.
+
+ Args:
+ chip: USB controller.
+ speed: Connection speed.
+ """
+ self._speed = speed
+ self._chip = chip
+
+ def Disconnected(self):
+ """The device has been disconnected from the USB host."""
+ self._speed = usb_constants.Speed.UNKNOWN
+ self._chip = None
+ self._active_endpoints.clear()
+ self._endpoint_interface_map.clear()
+
+ def IsConnected(self):
+ return self._chip is not None
+
+ def ControlRead(self, request_type, request, value, index, length):
+ """Handle a read on the control pipe (endpoint zero).
+
+ Args:
+ request_type: bmRequestType field of the setup packet.
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ A buffer to return to the USB host with len <= length on success or
+ None to stall the pipe.
+ """
+ assert request_type & usb_constants.Dir.IN
+ typ = request_type & usb_constants.Type.MASK
+ recipient = request_type & usb_constants.Recipient.MASK
+ if typ == usb_constants.Type.STANDARD:
+ return self.StandardControlRead(
+ recipient, request, value, index, length)
+ elif typ == usb_constants.Type.CLASS:
+ return self.ClassControlRead(
+ recipient, request, value, index, length)
+ elif typ == usb_constants.Type.VENDOR:
+ return self.VendorControlRead(
+ recipient, request, value, index, length)
+
+ def ControlWrite(self, request_type, request, value, index, data):
+ """Handle a write to the control pipe (endpoint zero).
+
+ Args:
+ request_type: bmRequestType field of the setup packet.
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ data: Data stage of the request.
+
+ Returns:
+ True on success, None to stall the pipe.
+ """
+ assert not request_type & usb_constants.Dir.IN
+ typ = request_type & usb_constants.Type.MASK
+ recipient = request_type & usb_constants.Recipient.MASK
+ if typ == usb_constants.Type.STANDARD:
+ return self.StandardControlWrite(
+ recipient, request, value, index, data)
+ elif typ == usb_constants.Type.CLASS:
+ return self.ClassControlWrite(
+ recipient, request, value, index, data)
+ elif typ == usb_constants.Type.VENDOR:
+ return self.VendorControlWrite(
+ recipient, request, value, index, data)
+
+ def SendPacket(self, endpoint, data):
+ """Send a data packet on the given endpoint.
+
+ Args:
+ endpoint: Endpoint address.
+ data: Data buffer.
+
+ Raises:
+ ValueError: If the endpoint address is not valid.
+ RuntimeError: If the device is not connected.
+ """
+ if self._chip is None:
+ raise RuntimeError('Device is not connected.')
+ if not endpoint & usb_constants.Dir.IN:
+ raise ValueError('Cannot write to non-input endpoint.')
+ self._chip.SendPacket(endpoint, data)
+
+ def ReceivePacket(self, endpoint, data):
+ """Handle an incoming data packet on one of the device's active endpoints.
+
+ This method should be overridden by a subclass implementing endpoint-based
+ data transfers.
+
+ Args:
+ endpoint: Endpoint address.
+ data: Data buffer.
+ """
+ pass
+
+ def HaltEndpoint(self, endpoint):
+ """Signals a STALL condition to the host on the given endpoint.
+
+ Args:
+ endpoint: Endpoint address.
+ """
+ self._chip.HaltEndpoint(endpoint)
+
+ def StandardControlRead(self, recipient, request, value, index, length):
+ """Handle standard control transfers.
+
+ Args:
+ recipient: Request recipient (device, interface, endpoint, etc.)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ A buffer to return to the USB host with len <= length on success or
+ None to stall the pipe.
+ """
+ if recipient == usb_constants.Recipient.DEVICE:
+ if request == usb_constants.Request.GET_DESCRIPTOR:
+ desc_type = value >> 8
+ desc_index = value & 0xff
+ desc_lang = index
+
+ print 'GetDescriptor(recipient={}, type={}, index={}, lang={})'.format(
+ recipient, desc_type, desc_index, desc_lang)
+
+ return self.GetDescriptor(recipient, desc_type, desc_index, desc_lang,
+ length)
+
+ def GetDescriptor(self, recipient, typ, index, lang, length):
+ """Handle a standard GET_DESCRIPTOR request.
+
+ See Universal Serial Bus Specification Revision 2.0 section 9.4.3.
+
+ Args:
+ recipient: Request recipient (device, interface, endpoint, etc.)
+ typ: Descriptor type.
+ index: Descriptor index.
+ lang: Descriptor language code.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ The value of the descriptor or None to stall the pipe.
+ """
+ if typ == usb_constants.DescriptorType.STRING:
+ return self.GetStringDescriptor(index, lang, length)
+ elif typ == usb_constants.DescriptorType.BOS:
+ return self.GetBosDescriptor(length)
+
+ def ClassControlRead(self, recipient, request, value, index, length):
+ """Handle class-specific control transfers.
+
+ This function should be overridden by a subclass implementing a particular
+ device class.
+
+ Args:
+ recipient: Request recipient (device, interface, endpoint, etc.)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ A buffer to return to the USB host with len <= length on success or
+ None to stall the pipe.
+ """
+ _ = recipient, request, value, index, length
+ return None
+
+ def VendorControlRead(self, recipient, request, value, index, length):
+ """Handle vendor-specific control transfers.
+
+ This function should be overridden by a subclass if implementing a device
+ that responds to vendor-specific requests.
+
+ Args:
+ recipient: Request recipient (device, interface, endpoint, etc.)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ A buffer to return to the USB host with len <= length on success or
+ None to stall the pipe.
+ """
+ if (self._ms_vendor_code_v1 is not None and
+ request == self._ms_vendor_code_v1 and
+ (recipient == usb_constants.Recipient.DEVICE or
+ recipient == usb_constants.Recipient.INTERFACE)):
+ return self.GetMicrosoftOSDescriptorV1(recipient, value, index, length)
+ if (self._ms_vendor_code_v2 is not None and
+ request == self._ms_vendor_code_v2 and
+ recipient == usb_constants.Recipient.DEVICE and
+ value == 0x0000 and
+ index == 0x0007):
+ return self.GetMicrosoftOSDescriptorV2(length)
+
+ return None
+
+ def StandardControlWrite(self, recipient, request, value, index, data):
+ """Handle standard control transfers.
+
+ Args:
+ recipient: Request recipient (device, interface, endpoint, etc.)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ data: Data stage of the request.
+
+ Returns:
+ True on success, None to stall the pipe.
+ """
+ _ = data
+
+ if request == usb_constants.Request.SET_CONFIGURATION:
+ if recipient == usb_constants.Recipient.DEVICE:
+ return self.SetConfiguration(value)
+ elif request == usb_constants.Request.SET_INTERFACE:
+ if recipient == usb_constants.Recipient.INTERFACE:
+ return self.SetInterface(index, value)
+
+ def ClassControlWrite(self, recipient, request, value, index, data):
+ """Handle class-specific control transfers.
+
+ This function should be overridden by a subclass implementing a particular
+ device class.
+
+ Args:
+ recipient: Request recipient (device, interface, endpoint, etc.)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ data: Data stage of the request.
+
+ Returns:
+ True on success, None to stall the pipe.
+ """
+ _ = recipient, request, value, index, data
+ return None
+
+ def VendorControlWrite(self, recipient, request, value, index, data):
+ """Handle vendor-specific control transfers.
+
+ This function should be overridden by a subclass if implementing a device
+ that responds to vendor-specific requests.
+
+ Args:
+ recipient: Request recipient (device, interface, endpoint, etc.)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ data: Data stage of the request.
+
+ Returns:
+ True on success, None to stall the pipe.
+ """
+ _ = recipient, request, value, index, data
+ return None
+
+ def GetStringDescriptor(self, index, lang, length):
+ """Handle a GET_DESCRIPTOR(String) request from the host.
+
+ Descriptor index 0 returns the set of languages supported by the device.
+ All other indices return the string descriptors registered with those
+ indices.
+
+ See Universal Serial Bus Specification Revision 2.0 section 9.6.7.
+
+ Args:
+ index: Descriptor index.
+ lang: Descriptor language code.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ The string descriptor or None to stall the pipe if the descriptor is not
+ found.
+ """
+ if index == 0:
+ length = 2 + len(self._strings) * 2
+ header = struct.pack('<BB', length, usb_constants.DescriptorType.STRING)
+ lang_codes = [struct.pack('<H', lang)
+ for lang in self._strings.iterkeys()]
+ buf = header + ''.join(lang_codes)
+ assert len(buf) == length
+ return buf[:length]
+ if index == 0xEE and lang == 0 and self._ms_vendor_code_v1 is not None:
+ # See https://msdn.microsoft.com/en-us/windows/hardware/gg463179 for the
+ # definition of this special string descriptor.
+ buf = (struct.pack('<BB', 18, usb_constants.DescriptorType.STRING) +
+ 'MSFT100'.encode('UTF-16LE') +
+ struct.pack('<BB', self._ms_vendor_code_v1, 0))
+ assert len(buf) == 18
+ return buf[:length]
+ elif lang not in self._strings:
+ return None
+ elif index not in self._strings[lang]:
+ return None
+ else:
+ descriptor = usb_descriptors.StringDescriptor(
+ bString=self._strings[lang][index])
+ return descriptor.Encode()[:length]
+
+ def GetMicrosoftOSDescriptorV1(self, recipient, value, index, length):
+ """Handle a the Microsoft OS 1.0 Descriptor request from the host.
+
+ See https://msdn.microsoft.com/en-us/windows/hardware/gg463179 for the
+ format of these descriptors.
+
+ Args:
+ recipient: Request recipient (device or interface)
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ The descriptor or None to stall the pipe if the descriptor is not
+ supported.
+ """
+ _ = recipient, value
+ if index == 0x0004:
+ return self.GetMicrosoftCompatIds(length)
+
+ def GetMicrosoftCompatIds(self, length):
+ interfaces = self.GetConfigurationDescriptor().GetInterfaces()
+ max_interface = max([iface.bInterfaceNumber for iface in interfaces])
+
+ header = struct.pack('<IHHBxxxxxxx',
+ 16 + 24 * (max_interface + 1),
+ 0x0100,
+ 0x0004,
+ max_interface + 1)
+ if length <= len(header):
+ return header[:length]
+
+ buf = header
+ for interface in xrange(max_interface + 1):
+ compat_id, sub_compat_id = self._ms_compat_ids.get(interface, ('', ''))
+ buf += struct.pack('<BB8s8sxxxxxx',
+ interface, 0x01, compat_id, sub_compat_id)
+ return buf[:length]
+
+ def GetMicrosoftOSDescriptorV2(self, length):
+ return self._ms_os20_descriptor_set.Encode()[:length]
+
+ def GetBosDescriptor(self, length):
+ """Handle a GET_DESCRIPTOR(BOS) request from the host.
+
+ Device capability descriptors can be added to the Binary Device Object Store
+ returned by this method by calling AddDeviceCapabilityDescriptor.
+
+ See Universal Serial Bus 3.1 Specification, Revision 1.0 section 9.6.2.
+
+ Args:
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ The device's binary object store descriptor or None to stall the pipe if
+ no device capability descriptors have been configured.
+ """
+ if self._bos_descriptor is None:
+ return None
+
+ return self._bos_descriptor.Encode()[:length]
+
+ def SetConfiguration(self, index):
+ """Handle a SET_CONFIGURATION request from the host.
+
+ See Universal Serial Bus Specification Revision 2.0 section 9.4.7.
+
+ Args:
+ index: Configuration index selected.
+
+ Returns:
+ True on success, None on error to stall the pipe.
+ """
+ print 'SetConfiguration({})'.format(index)
+
+ for endpoint_addrs in self._active_endpoints.values():
+ for endpoint_addr in endpoint_addrs:
+ self._chip.StopEndpoint(endpoint_addr)
+ endpoint_addrs.clear()
+ self._endpoint_interface_map.clear();
+
+ if index == 0:
+ # SET_CONFIGRATION(0) puts the device into the Address state which
+ # Windows does before suspending the port.
+ return True
+ elif index != 1:
+ return None
+
+ config_desc = self.GetConfigurationDescriptor()
+ for interface_desc in config_desc.GetInterfaces():
+ if interface_desc.bAlternateSetting != 0:
+ continue
+ endpoint_addrs = self._active_endpoints.setdefault(
+ interface_desc.bInterfaceNumber, set())
+ for endpoint_desc in interface_desc.GetEndpoints():
+ self._chip.StartEndpoint(endpoint_desc)
+ endpoint_addrs.add(endpoint_desc.bEndpointAddress)
+ self._endpoint_interface_map[endpoint_desc.bEndpointAddress] = \
+ interface_desc.bInterfaceNumber
+ return True
+
+ def SetInterface(self, interface, alt_setting):
+ """Handle a SET_INTERFACE request from the host.
+
+ See Universal Serial Bus Specification Revision 2.0 section 9.4.10.
+
+ Args:
+ interface: Interface number to configure.
+ alt_setting: Alternate setting to select.
+
+ Returns:
+ True on success, None on error to stall the pipe.
+ """
+ print 'SetInterface({}, {})'.format(interface, alt_setting)
+
+ config_desc = self.GetConfigurationDescriptor()
+ interface_desc = None
+ for interface_option in config_desc.GetInterfaces():
+ if (interface_option.bInterfaceNumber == interface and
+ interface_option.bAlternateSetting == alt_setting):
+ interface_desc = interface_option
+ if interface_desc is None:
+ return None
+
+ endpoint_addrs = self._active_endpoints.setdefault(interface, set())
+ for endpoint_addr in endpoint_addrs:
+ self._chip.StopEndpoint(endpoint_addr)
+ del self._endpoint_interface_map[endpoint_addr]
+ for endpoint_desc in interface_desc.GetEndpoints():
+ self._chip.StartEndpoint(endpoint_desc)
+ endpoint_addrs.add(endpoint_desc.bEndpointAddress)
+ self._endpoint_interface_map[endpoint_desc.bEndpointAddress] = \
+ interface_desc.bInterfaceNumber
+ return True
+
+ def GetInterfaceForEndpoint(self, endpoint_addr):
+ return self._endpoint_interface_map.get(endpoint_addr)
diff --git a/chromium/tools/usb_gadget/gadget_test.py b/chromium/tools/usb_gadget/gadget_test.py
new file mode 100755
index 00000000000..2440983baef
--- /dev/null
+++ b/chromium/tools/usb_gadget/gadget_test.py
@@ -0,0 +1,352 @@
+#!/usr/bin/python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+import uuid
+
+import mock
+
+import gadget
+import usb_constants
+import usb_descriptors
+
+
+device_desc = usb_descriptors.DeviceDescriptor(
+ idVendor=0x18D1, # Google Inc.
+ idProduct=0xFF00,
+ bcdUSB=0x0200,
+ iManufacturer=1,
+ iProduct=2,
+ iSerialNumber=3,
+ bNumConfigurations=1,
+ bcdDevice=0x0100)
+
+fs_config_desc = usb_descriptors.ConfigurationDescriptor(
+ bmAttributes=0xC0,
+ MaxPower=50)
+
+fs_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=0
+)
+fs_config_desc.AddInterface(fs_interface_desc)
+
+fs_bulk_in_endpoint_desc = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x01,
+ bmAttributes=usb_constants.TransferType.BULK,
+ wMaxPacketSize=64,
+ bInterval=0
+)
+fs_interface_desc.AddEndpoint(fs_bulk_in_endpoint_desc)
+
+fs_bulk_out_endpoint_desc = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x81,
+ bmAttributes=usb_constants.TransferType.BULK,
+ wMaxPacketSize=64,
+ bInterval=0
+)
+fs_interface_desc.AddEndpoint(fs_bulk_out_endpoint_desc)
+
+fs_alt_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=0,
+ bAlternateSetting=1
+)
+fs_config_desc.AddInterface(fs_alt_interface_desc)
+
+fs_interrupt_in_endpoint_desc = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x01,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=64,
+ bInterval=1
+)
+fs_alt_interface_desc.AddEndpoint(fs_interrupt_in_endpoint_desc)
+
+fs_interrupt_out_endpoint_desc = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x81,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=64,
+ bInterval=1
+)
+fs_alt_interface_desc.AddEndpoint(fs_interrupt_out_endpoint_desc)
+
+hs_config_desc = usb_descriptors.ConfigurationDescriptor(
+ bmAttributes=0xC0,
+ MaxPower=50)
+
+hs_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=0
+)
+hs_config_desc.AddInterface(hs_interface_desc)
+
+hs_bulk_in_endpoint_desc = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x01,
+ bmAttributes=usb_constants.TransferType.BULK,
+ wMaxPacketSize=512,
+ bInterval=0
+)
+hs_interface_desc.AddEndpoint(hs_bulk_in_endpoint_desc)
+
+hs_bulk_out_endpoint_desc = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x81,
+ bmAttributes=usb_constants.TransferType.BULK,
+ wMaxPacketSize=512,
+ bInterval=0
+)
+hs_interface_desc.AddEndpoint(hs_bulk_out_endpoint_desc)
+
+hs_alt_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=0,
+ bAlternateSetting=1
+)
+hs_config_desc.AddInterface(hs_alt_interface_desc)
+
+hs_interrupt_in_endpoint_desc = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x01,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=256,
+ bInterval=1
+)
+hs_alt_interface_desc.AddEndpoint(hs_interrupt_in_endpoint_desc)
+
+hs_interrupt_out_endpoint_desc = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x81,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=256,
+ bInterval=1
+)
+hs_alt_interface_desc.AddEndpoint(hs_interrupt_out_endpoint_desc)
+
+
+class GadgetTest(unittest.TestCase):
+
+ def test_get_descriptors(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ self.assertEquals(g.GetDeviceDescriptor(), device_desc)
+ self.assertEquals(g.GetFullSpeedConfigurationDescriptor(), fs_config_desc)
+ self.assertEquals(g.GetHighSpeedConfigurationDescriptor(), hs_config_desc)
+ with self.assertRaisesRegexp(RuntimeError, 'not connected'):
+ g.GetConfigurationDescriptor()
+
+ def test_connect_full_speed(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.Connected(mock.Mock(), usb_constants.Speed.FULL)
+ self.assertTrue(g.IsConnected())
+ self.assertEquals(g.GetSpeed(), usb_constants.Speed.FULL)
+ self.assertEquals(g.GetConfigurationDescriptor(), fs_config_desc)
+ g.Disconnected()
+ self.assertFalse(g.IsConnected())
+
+ def test_connect_high_speed(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.Connected(mock.Mock(), usb_constants.Speed.HIGH)
+ self.assertTrue(g.IsConnected())
+ self.assertEquals(g.GetSpeed(), usb_constants.Speed.HIGH)
+ self.assertEquals(g.GetConfigurationDescriptor(), hs_config_desc)
+ g.Disconnected()
+ self.assertFalse(g.IsConnected())
+
+ def test_string_index_out_of_range(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ with self.assertRaisesRegexp(ValueError, 'index out of range'):
+ g.AddStringDescriptor(0, 'Hello world!')
+
+ def test_language_id_out_of_range(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ with self.assertRaisesRegexp(ValueError, 'language code out of range'):
+ g.AddStringDescriptor(1, 'Hello world!', lang=-1)
+
+ def test_get_languages(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.AddStringDescriptor(1, 'Hello world!')
+ desc = g.ControlRead(0x80, 6, 0x0300, 0, 255)
+ self.assertEquals(desc, '\x04\x03\x09\x04')
+
+ def test_get_string_descriptor(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.AddStringDescriptor(1, 'Hello world!')
+ desc = g.ControlRead(0x80, 6, 0x0301, 0x0409, 255)
+ self.assertEquals(desc, '\x1A\x03H\0e\0l\0l\0o\0 \0w\0o\0r\0l\0d\0!\0')
+
+ def test_get_missing_string_descriptor(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.AddStringDescriptor(1, 'Hello world!')
+ desc = g.ControlRead(0x80, 6, 0x0302, 0x0409, 255)
+ self.assertEquals(desc, None)
+
+ def test_get_missing_string_language(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.AddStringDescriptor(1, 'Hello world!')
+ desc = g.ControlRead(0x80, 6, 0x0301, 0x040C, 255)
+ self.assertEquals(desc, None)
+
+ def test_class_and_vendor_transfers(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ self.assertIsNone(g.ControlRead(0xA0, 0, 0, 0, 0))
+ self.assertIsNone(g.ControlRead(0xC0, 0, 0, 0, 0))
+ self.assertIsNone(g.ControlWrite(0x20, 0, 0, 0, ''))
+ self.assertIsNone(g.ControlWrite(0x40, 0, 0, 0, ''))
+
+ def test_set_configuration(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.ControlWrite(0, 9, 1, 0, 0)
+ chip.StartEndpoint.assert_has_calls([
+ mock.call(hs_bulk_in_endpoint_desc),
+ mock.call(hs_bulk_out_endpoint_desc)
+ ])
+
+ def test_set_configuration_zero(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.ControlWrite(0, 9, 1, 0, 0)
+ chip.StartEndpoint.reset_mock()
+ g.ControlWrite(0, 9, 0, 0, 0)
+ chip.StopEndpoint.assert_has_calls([
+ mock.call(0x01),
+ mock.call(0x81)
+ ])
+
+ def test_set_bad_configuration(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.Connected(mock.Mock(), usb_constants.Speed.HIGH)
+ self.assertIsNone(g.ControlWrite(0, 9, 2, 0, 0))
+
+ def test_set_interface(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ self.assertTrue(g.ControlWrite(0, 9, 1, 0, 0))
+ chip.reset_mock()
+ self.assertTrue(g.ControlWrite(1, 11, 1, 0, 0))
+ chip.StopEndpoint.assert_has_calls([
+ mock.call(0x01),
+ mock.call(0x81)
+ ])
+ chip.StartEndpoint.assert_has_calls([
+ mock.call(hs_interrupt_in_endpoint_desc),
+ mock.call(hs_interrupt_out_endpoint_desc)
+ ])
+ chip.reset_mock()
+ self.assertTrue(g.ControlWrite(1, 11, 0, 0, 0))
+ chip.StopEndpoint.assert_has_calls([
+ mock.call(0x01),
+ mock.call(0x81)
+ ])
+ chip.StartEndpoint.assert_has_calls([
+ mock.call(hs_bulk_in_endpoint_desc),
+ mock.call(hs_bulk_out_endpoint_desc)
+ ])
+
+ def test_set_bad_interface(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.Connected(mock.Mock(), usb_constants.Speed.HIGH)
+ self.assertTrue(g.ControlWrite(0, 9, 1, 0, 0))
+ self.assertIsNone(g.ControlWrite(1, 11, 0, 1, 0))
+
+ def test_send_packet(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.SendPacket(0x81, 'Hello world!')
+ chip.SendPacket.assert_called_once_with(0x81, 'Hello world!')
+
+ def test_send_packet_disconnected(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ with self.assertRaisesRegexp(RuntimeError, 'not connected'):
+ g.SendPacket(0x81, 'Hello world!')
+ g.Connected(mock.Mock(), usb_constants.Speed.HIGH)
+ g.SendPacket(0x81, 'Hello world!')
+ g.Disconnected()
+ with self.assertRaisesRegexp(RuntimeError, 'not connected'):
+ g.SendPacket(0x81, 'Hello world!')
+
+ def test_send_invalid_endpoint(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ with self.assertRaisesRegexp(ValueError, 'non-input endpoint'):
+ g.SendPacket(0x01, 'Hello world!')
+
+ def test_receive_packet(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ self.assertIsNone(g.ReceivePacket(0x01, 'Hello world!'))
+
+ def test_halt_endpoint(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.HaltEndpoint(0x01)
+ chip.HaltEndpoint.assert_called_once_with(0x01)
+
+ def test_get_microsoft_os_string_descriptor(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.EnableMicrosoftOSDescriptorsV1(vendor_code=0x42)
+ os_string_descriptor = g.ControlRead(0x80,
+ usb_constants.Request.GET_DESCRIPTOR,
+ 0x03EE,
+ 0x0000,
+ 0x12)
+ self.assertEqual(os_string_descriptor,
+ "\x12\x03M\x00S\x00F\x00T\x001\x000\x000\x00\x42\x00")
+
+ def test_get_microsoft_os_compat_id_descriptor(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.EnableMicrosoftOSDescriptorsV1(vendor_code=0x42)
+ g.SetMicrosoftCompatId(0, 'WINUSB')
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+
+ expected_compatid_header = \
+ "\x28\x00\x00\x00\x00\x01\x04\x00\x01\0\0\0\0\0\0\0"
+ compatid_header = g.ControlRead(0xC0, 0x42, 0x0000, 0x0004, 0x0010)
+ self.assertEqual(compatid_header, expected_compatid_header)
+
+ compatid_descriptor = g.ControlRead(0xC0, 0x42, 0x0000, 0x0004, 0x0028)
+ self.assertEqual(compatid_descriptor,
+ expected_compatid_header +
+ "\x00\x01WINUSB\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0\0")
+
+ def test_get_bos_descriptor(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ self.assertIsNone(g.ControlRead(0x80, 0x06, 0x0F00, 0x0000, 5))
+
+ container_id = uuid.uuid4()
+ g.AddDeviceCapabilityDescriptor(usb_descriptors.ContainerIdDescriptor(
+ ContainerID=container_id.bytes_le))
+ bos_descriptor_header = g.ControlRead(0x80, 0x06, 0x0F00, 0x0000, 5)
+ self.assertEquals('\x05\x0F\x19\x00\x01', bos_descriptor_header)
+
+ bos_descriptor = g.ControlRead(0x80, 0x06, 0x0F00, 0x0000, 25)
+ self.assertEquals(
+ '\x05\x0F\x19\x00\x01\x14\x10\x04\x00' + container_id.bytes_le,
+ bos_descriptor)
+
+ def test_get_microsoft_os_20_descriptor_set(self):
+ g = gadget.Gadget(device_desc, fs_config_desc, hs_config_desc)
+ g.EnableMicrosoftOSDescriptorsV2(vendor_code=0x42)
+ g.SetMicrosoftCompatId(0, 'WINUSB')
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+
+ bos_descriptor = g.ControlRead(0x80, 0x06, 0x0F00, 0x0000, 33)
+ self.assertEquals(
+ '\x05\x0F\x21\x00\x01' +
+ '\x1C\x10\x05\x00' +
+ uuid.UUID('{D8DD60DF-4589-4CC7-9CD2-659D9E648A9F}').bytes_le +
+ '\x00\x00\x03\x06\x2E\x00\x42\x00',
+ bos_descriptor)
+
+ descriptor_set = g.ControlRead(0xC0, 0x42, 0x0000, 0x0007, 48)
+ self.assertEquals(
+ '\x0A\x00\x00\x00\x00\x00\x03\x06\x2E\x00' +
+ '\x08\x00\x01\x00\x00\x00\x24\x00' +
+ '\x08\x00\x02\x00\x00\x00\x1C\x00' +
+ '\x14\x00\x03\x00WINUSB\0\0\0\0\0\0\0\0\0\0',
+ descriptor_set)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/usb_gadget/hid_constants.py b/chromium/tools/usb_gadget/hid_constants.py
new file mode 100644
index 00000000000..ce8249c8d04
--- /dev/null
+++ b/chromium/tools/usb_gadget/hid_constants.py
@@ -0,0 +1,140 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""HID constant definitions.
+"""
+
+import usb_constants
+
+
+class DescriptorType(object):
+ """Class descriptors.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 7.1.
+ """
+ HID = usb_constants.Type.CLASS | 0x01
+ REPORT = usb_constants.Type.CLASS | 0x02
+ PHYSICAL = usb_constants.Type.CLASS | 0x03
+
+
+class Scope(object):
+ """Item scope.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 6.2.2.2.
+ """
+ MAIN = 0
+ GLOBAL = 1
+ LOCAL = 2
+
+
+class CollectionType(object):
+ """Collection types.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 6.2.2.4.
+ """
+ PHYSICAL = 0
+ APPLICATION = 1
+ LOGICAL = 2
+ REPORT = 3
+ NAMED_ARRAY = 4
+ USAGE_SWITCH = 5
+ USAGE_MODIFIER = 6
+
+
+class Request(object):
+ """Class specific requests.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 7.2.
+ """
+ GET_REPORT = 1
+ GET_IDLE = 2
+ GET_PROTOCOL = 3
+ SET_REPORT = 9
+ SET_IDLE = 0x0A
+ SET_PROTOCOL = 0x0B
+
+
+class ReportType(object):
+ """Report types.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 7.2.1.
+ """
+ INPUT = 1
+ OUTPUT = 2
+ FEATURE = 3
+
+
+class ModifierKey(object):
+ """Keyboard modifier key report values.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 8.3 and HID Usage Tables Version 1.1 Table 12.
+ """
+ L_CTRL = 0x01
+ L_SHIFT = 0x02
+ L_ALT = 0x04
+ L_GUI = 0x08
+ R_CTRL = 0x10
+ R_SHIFT = 0x20
+ R_ALT = 0x40
+ R_GUI = 0x80
+
+
+class LED(object):
+ """Keyboard LED report values.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section B.1 and HID Usage Tables Version 1.1 Table 13.
+ """
+ NUM_LOCK = 0x01
+ CAPS_LOCK = 0x02
+ SCROLL_LOCK = 0x04
+ COMPOSE = 0x08
+ KANA = 0x10
+
+
+class Mouse(object):
+ """Mouse button report values.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section B.2.
+ """
+ BUTTON_1 = 0x01
+ BUTTON_2 = 0x02
+ BUTTON_3 = 0x04
+
+
+KEY_CODES = {}
+for key, code in zip(xrange(ord('a'), ord('z') + 1), xrange(4, 30)):
+ KEY_CODES[chr(key)] = code
+for key, code in zip(xrange(ord('1'), ord('9') + 1), xrange(30, 39)):
+ KEY_CODES[chr(key)] = code
+for key, code in zip(['Enter', 'Esc', 'Backspace', 'Tab', ' '], xrange(40, 45)):
+ KEY_CODES[key] = code
+for key, code in zip('-=[]\\', xrange(45, 50)):
+ KEY_CODES[key] = code
+for key, code in zip(';\'`,./', xrange(51, 57)):
+ KEY_CODES[key] = code
+for key, code in zip(
+ ['CapsLock', 'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9', 'F10',
+ 'F11', 'F12', 'PrintScreen', 'ScrollLock', 'Pause', 'Insert', 'Home',
+ 'PageUp', 'PageDown', 'Delete', 'End', 'PageDown', 'RightArrow',
+ 'LeftArrow', 'DownArrow', 'UpArrow', 'NumLock'],
+ xrange(57, 84)):
+ KEY_CODES[key] = code
+
+SHIFT_KEY_CODES = {}
+for key, code in zip(xrange(ord('A'), ord('Z') + 1), xrange(4, 30)):
+ SHIFT_KEY_CODES[chr(key)] = code
+for key, code in zip('!@#$%^&*()', xrange(30, 40)):
+ SHIFT_KEY_CODES[key] = code
+for key, code in zip('_+{}|', xrange(45, 50)):
+ SHIFT_KEY_CODES[key] = code
+for key, code in zip(':"~<>?', xrange(51, 57)):
+ SHIFT_KEY_CODES[key] = code
diff --git a/chromium/tools/usb_gadget/hid_descriptors.py b/chromium/tools/usb_gadget/hid_descriptors.py
new file mode 100644
index 00000000000..ad0d5807117
--- /dev/null
+++ b/chromium/tools/usb_gadget/hid_descriptors.py
@@ -0,0 +1,159 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Utility functions for constructing HID report descriptors.
+"""
+
+import struct
+
+import hid_constants
+
+
+def ReportDescriptor(*items):
+ return ''.join(items)
+
+
+def _PackItem(tag, typ, value=0, force_length=0):
+ """Pack a multibyte value.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 5.8.
+
+ Args:
+ tag: Item tag.
+ typ: Item type.
+ value: Item value.
+ force_length: Force packing to a specific width.
+
+ Returns:
+ Packed string.
+ """
+ if value == 0 and force_length <= 0:
+ return struct.pack('<B', tag << 4 | typ << 2 | 0)
+ elif value <= 0xff and force_length <= 1:
+ return struct.pack('<BB', tag << 4 | typ << 2 | 1, value)
+ elif value <= 0xffff and force_length <= 2:
+ return struct.pack('<BH', tag << 4 | typ << 2 | 2, value)
+ elif value <= 0xffffffff and force_length <= 4:
+ return struct.pack('<BI', tag << 4 | typ << 2 | 3, value)
+ else:
+ raise NotImplementedError('Long items are not implemented.')
+
+
+def _DefineItem(name, tag, typ):
+ """Create a function which encodes a HID item.
+
+ Args:
+ name: Function name.
+ tag: Item tag.
+ typ: Item type.
+
+ Returns:
+ A function which encodes a HID item of the given type.
+ """
+ assert tag >= 0 and tag <= 0xF
+ assert typ >= 0 and typ <= 3
+
+ def EncodeItem(value=0, force_length=0):
+ return _PackItem(tag, typ, value, force_length)
+
+ EncodeItem.__name__ = name
+ return EncodeItem
+
+
+def _DefineMainItem(name, tag):
+ """Create a function which encodes a HID Main item.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 6.2.2.4.
+
+ Args:
+ name: Function name.
+ tag: Item tag.
+
+ Returns:
+ A function which encodes a HID item of the given type.
+
+ Raises:
+ ValueError: If the tag value is out of range.
+ """
+ assert tag >= 0 and tag <= 0xF
+
+ def EncodeMainItem(*properties):
+ value = 0
+ for bit, is_set in properties:
+ if is_set:
+ value |= 1 << bit
+ return _PackItem(tag, hid_constants.Scope.MAIN, value, force_length=1)
+
+ EncodeMainItem.__name__ = name
+ return EncodeMainItem
+
+Input = _DefineMainItem('Input', 8)
+Output = _DefineMainItem('Output', 9)
+Feature = _DefineMainItem('Feature', 11)
+
+# Input, Output and Feature Item Properties
+#
+# See Device Class Definition for Human Interface Devices (HID) Version 1.11
+# section 6.2.2.5.
+Data = (0, False)
+Constant = (0, True)
+Array = (1, False)
+Variable = (1, True)
+Absolute = (2, False)
+Relative = (2, True)
+NoWrap = (3, False)
+Wrap = (3, True)
+Linear = (4, False)
+NonLinear = (4, True)
+PreferredState = (5, False)
+NoPreferred = (5, True)
+NoNullPosition = (6, False)
+NullState = (6, True)
+NonVolatile = (7, False)
+Volatile = (7, True)
+BitField = (8, False)
+BufferedBytes = (8, True)
+
+
+def Collection(typ, *items):
+ start = struct.pack('<BB', 0xA1, typ)
+ end = struct.pack('<B', 0xC0)
+ return start + ''.join(items) + end
+
+# Global Items
+#
+# See Device Class Definition for Human Interface Devices (HID) Version 1.11
+# section 6.2.2.7.
+UsagePage = _DefineItem('UsagePage', 0, hid_constants.Scope.GLOBAL)
+LogicalMinimum = _DefineItem('LogicalMinimum', 1, hid_constants.Scope.GLOBAL)
+LogicalMaximum = _DefineItem('LogicalMaximum', 2, hid_constants.Scope.GLOBAL)
+PhysicalMinimum = _DefineItem('PhysicalMinimum', 3, hid_constants.Scope.GLOBAL)
+PhysicalMaximum = _DefineItem('PhysicalMaximum', 4, hid_constants.Scope.GLOBAL)
+UnitExponent = _DefineItem('UnitExponent', 5, hid_constants.Scope.GLOBAL)
+Unit = _DefineItem('Unit', 6, hid_constants.Scope.GLOBAL)
+ReportSize = _DefineItem('ReportSize', 7, hid_constants.Scope.GLOBAL)
+ReportID = _DefineItem('ReportID', 8, hid_constants.Scope.GLOBAL)
+ReportCount = _DefineItem('ReportCount', 9, hid_constants.Scope.GLOBAL)
+Push = _DefineItem('Push', 10, hid_constants.Scope.GLOBAL)
+Pop = _DefineItem('Pop', 11, hid_constants.Scope.GLOBAL)
+
+# Local Items
+#
+# See Device Class Definition for Human Interface Devices (HID) Version 1.11
+# section 6.2.2.8.
+Usage = _DefineItem('Usage', 0, hid_constants.Scope.LOCAL)
+UsageMinimum = _DefineItem('UsageMinimum', 1, hid_constants.Scope.LOCAL)
+UsageMaximum = _DefineItem('UsageMaximum', 2, hid_constants.Scope.LOCAL)
+DesignatorIndex = _DefineItem('DesignatorIndex', 3, hid_constants.Scope.LOCAL)
+DesignatorMinimum = _DefineItem('DesignatorMinimum', 4,
+ hid_constants.Scope.LOCAL)
+DesignatorMaximum = _DefineItem('DesignatorMaximum', 5,
+ hid_constants.Scope.LOCAL)
+StringIndex = _DefineItem('StringIndex', 7, hid_constants.Scope.LOCAL)
+StringMinimum = _DefineItem('StringMinimum', 8, hid_constants.Scope.LOCAL)
+StringMaximum = _DefineItem('StringMaximum', 9, hid_constants.Scope.LOCAL)
+Delimiter = _DefineItem('Delimiter', 10, hid_constants.Scope.LOCAL)
diff --git a/chromium/tools/usb_gadget/hid_descriptors_test.py b/chromium/tools/usb_gadget/hid_descriptors_test.py
new file mode 100755
index 00000000000..62d51d401b6
--- /dev/null
+++ b/chromium/tools/usb_gadget/hid_descriptors_test.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import hid_descriptors
+import keyboard_gadget
+import mouse_gadget
+
+
+class HidTest(unittest.TestCase):
+
+ def test_keyboard_example(self):
+ expected = ''.join(chr(x) for x in [
+ 0x05, 0x01, 0x09, 0x06, 0xA1, 0x01, 0x05, 0x07, 0x19, 0xE0, 0x29,
+ 0xE7, 0x15, 0x00, 0x25, 0x01, 0x75, 0x01, 0x95, 0x08, 0x81, 0x02,
+ 0x95, 0x01, 0x75, 0x08, 0x81, 0x01, 0x95, 0x05, 0x75, 0x01, 0x05,
+ 0x08, 0x19, 0x01, 0x29, 0x05, 0x91, 0x02, 0x95, 0x01, 0x75, 0x03,
+ 0x91, 0x01, 0x95, 0x06, 0x75, 0x08, 0x15, 0x00, 0x25, 0x65, 0x05,
+ 0x07, 0x19, 0x00, 0x29, 0x65, 0x81, 0x00, 0xC0
+ ])
+ self.assertEquals(keyboard_gadget.KeyboardFeature.REPORT_DESC, expected)
+
+ def test_mouse_example(self):
+ expected = ''.join(chr(x) for x in [
+ 0x05, 0x01, 0x09, 0x02, 0xA1, 0x01, 0x09, 0x01, 0xA1, 0x00, 0x05, 0x09,
+ 0x19, 0x01, 0x29, 0x03, 0x15, 0x00, 0x25, 0x01, 0x95, 0x03, 0x75, 0x01,
+ 0x81, 0x02, 0x95, 0x01, 0x75, 0x05, 0x81, 0x01, 0x05, 0x01, 0x09, 0x30,
+ 0x09, 0x31, 0x15, 0x81, 0x25, 0x7F, 0x75, 0x08, 0x95, 0x02, 0x81, 0x06,
+ 0xC0, 0xC0
+ ])
+ self.assertEquals(mouse_gadget.MouseFeature.REPORT_DESC, expected)
+
+ def test_tag(self):
+ self.assertEquals(hid_descriptors.Push(), '\xa4')
+
+ def test_2byte_tag(self):
+ self.assertEquals(hid_descriptors.LogicalMaximum(0xFF00), '\x26\x00\xFF')
+
+ def test_4byte_tag(self):
+ self.assertEquals(hid_descriptors.LogicalMaximum(0xFF884400),
+ '\x27\x00\x44\x88\xFF')
+
+ def test_long_tag(self):
+ with self.assertRaises(NotImplementedError):
+ hid_descriptors.LogicalMaximum(0xFFFFFFFFFFFFFFFF)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/usb_gadget/hid_echo_gadget.py b/chromium/tools/usb_gadget/hid_echo_gadget.py
new file mode 100644
index 00000000000..ac677bb9108
--- /dev/null
+++ b/chromium/tools/usb_gadget/hid_echo_gadget.py
@@ -0,0 +1,105 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A HID-class echo device.
+
+This module provides a HID feature and HID device that can be used as an
+echo test for HID drivers. The device exposes vendor-specific input, output
+and feature usages that transmit 8 bytes of data. Data written sent as an
+output report is echoed as an input report. The value of the feature report
+can be written and read with control transfers.
+"""
+
+import struct
+
+import hid_constants
+import hid_descriptors
+import hid_gadget
+import usb_constants
+
+
+class EchoFeature(hid_gadget.HidFeature):
+
+ REPORT_DESC = hid_descriptors.ReportDescriptor(
+ hid_descriptors.UsagePage(0xFF00), # Vendor Defined
+ hid_descriptors.Usage(0),
+ hid_descriptors.Collection(
+ hid_constants.CollectionType.APPLICATION,
+ hid_descriptors.LogicalMinimum(0, force_length=1),
+ hid_descriptors.LogicalMaximum(255, force_length=2),
+ hid_descriptors.ReportSize(8),
+ hid_descriptors.ReportCount(8),
+ hid_descriptors.Usage(0),
+ hid_descriptors.Input(hid_descriptors.Data,
+ hid_descriptors.Variable,
+ hid_descriptors.Absolute),
+ hid_descriptors.Usage(0),
+ hid_descriptors.Output(hid_descriptors.Data,
+ hid_descriptors.Variable,
+ hid_descriptors.Absolute),
+ hid_descriptors.Usage(0),
+ hid_descriptors.Feature(hid_descriptors.Data,
+ hid_descriptors.Variable,
+ hid_descriptors.Absolute)
+ )
+ )
+
+ def __init__(self):
+ super(EchoFeature, self).__init__()
+ self._input_output_report = 0
+ self._feature_report = 0
+
+ def SetInputReport(self, data):
+ self._input_output_report, = struct.unpack('<Q', data)
+ self.SendReport(struct.pack('<Q', self._input_output_report))
+ return True
+
+ def SetOutputReport(self, data):
+ self._input_output_report, = struct.unpack('<Q', data)
+ self.SendReport(struct.pack('<Q', self._input_output_report))
+ return True
+
+ def SetFeatureReport(self, data):
+ self._feature_report, = struct.unpack('<Q', data)
+ return True
+
+ def GetInputReport(self):
+ return struct.pack('<Q', self._input_output_report)
+
+ def GetOutputReport(self):
+ return struct.pack('<Q', self._input_output_report)
+
+ def GetFeatureReport(self):
+ return struct.pack('<Q', self._feature_report)
+
+
+class EchoGadget(hid_gadget.HidGadget):
+
+ def __init__(self):
+ self._feature = EchoFeature()
+ super(EchoGadget, self).__init__(
+ report_desc=EchoFeature.REPORT_DESC,
+ features={0: self._feature},
+ packet_size=8,
+ interval_ms=1,
+ out_endpoint=True,
+ vendor_id=usb_constants.VendorID.GOOGLE,
+ product_id=usb_constants.ProductID.GOOGLE_HID_ECHO_GADGET,
+ device_version=0x0100)
+ self.AddStringDescriptor(1, 'Google Inc.')
+ self.AddStringDescriptor(2, 'HID Echo Gadget')
+
+
+def RegisterHandlers():
+ from tornado import web
+
+ class WebConfigureHandler(web.RequestHandler):
+
+ def post(self):
+ server.SwitchGadget(EchoGadget())
+
+ import server
+ server.app.add_handlers('.*$', [
+ (r'/hid_echo/configure', WebConfigureHandler),
+ ])
diff --git a/chromium/tools/usb_gadget/hid_gadget.py b/chromium/tools/usb_gadget/hid_gadget.py
new file mode 100644
index 00000000000..0b632a1ba94
--- /dev/null
+++ b/chromium/tools/usb_gadget/hid_gadget.py
@@ -0,0 +1,432 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Human Interface Device gadget module.
+
+This gadget emulates a USB Human Interface Device. Multiple logical components
+of a device can be composed together as separate "features" where each has its
+own Report ID and will be called upon to answer get/set input/output/feature
+report requests as necessary.
+"""
+
+import math
+import struct
+import uuid
+
+import composite_gadget
+import hid_constants
+import usb_constants
+import usb_descriptors
+
+
+class HidCompositeFeature(composite_gadget.CompositeFeature):
+ """Generic HID feature for a composite device.
+ """
+
+ def __init__(self, report_desc, features,
+ packet_size=64, interval_ms=10, interface_number=0,
+ interface_string=0,
+ in_endpoint=0x81, out_endpoint=0x01):
+ """Create a composite device feature implementing the HID protocol.
+
+ Args:
+ report_desc: HID report descriptor.
+ features: Map between Report IDs and HidFeature objects to handle them.
+ packet_size: Maximum interrupt packet size.
+ interval_ms: Interrupt transfer interval in milliseconds.
+ interface_number: Interface number for this feature (default 0).
+ in_endpoint: Endpoint number for the IN endpoint (defualt 0x81).
+ out_endpoint: Endpoint number for the OUT endpoint or None to disable
+ the endpoint (default 0x01).
+
+ Raises:
+ ValueError: If any of the parameters are out of range.
+ """
+ fs_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=interface_number,
+ bInterfaceClass=usb_constants.DeviceClass.HID,
+ bInterfaceSubClass=0, # Non-bootable.
+ bInterfaceProtocol=0, # None.
+ iInterface=interface_string,
+ )
+
+ hs_interface_desc = usb_descriptors.InterfaceDescriptor(
+ bInterfaceNumber=interface_number,
+ bInterfaceClass=usb_constants.DeviceClass.HID,
+ bInterfaceSubClass=0, # Non-bootable.
+ bInterfaceProtocol=0, # None.
+ iInterface=interface_string,
+ )
+
+ hid_desc = usb_descriptors.HidDescriptor()
+ hid_desc.AddDescriptor(hid_constants.DescriptorType.REPORT,
+ len(report_desc))
+ fs_interface_desc.Add(hid_desc)
+ hs_interface_desc.Add(hid_desc)
+
+ fs_interval = math.ceil(math.log(interval_ms, 2)) + 1
+ if fs_interval < 1 or fs_interval > 16:
+ raise ValueError('Full speed interval out of range: {} ({} ms)'
+ .format(fs_interval, interval_ms))
+
+ fs_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=in_endpoint,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=packet_size,
+ bInterval=fs_interval
+ ))
+
+ hs_interval = math.ceil(math.log(interval_ms, 2)) + 4
+ if hs_interval < 1 or hs_interval > 16:
+ raise ValueError('High speed interval out of range: {} ({} ms)'
+ .format(hs_interval, interval_ms))
+
+ hs_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=in_endpoint,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=packet_size,
+ bInterval=hs_interval
+ ))
+
+ if out_endpoint is not None:
+ fs_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=out_endpoint,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=packet_size,
+ bInterval=fs_interval
+ ))
+ hs_interface_desc.AddEndpoint(usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=out_endpoint,
+ bmAttributes=usb_constants.TransferType.INTERRUPT,
+ wMaxPacketSize=packet_size,
+ bInterval=hs_interval
+ ))
+
+ super(HidCompositeFeature, self).__init__(
+ [fs_interface_desc], [hs_interface_desc])
+ self._report_desc = report_desc
+ self._features = features
+ self._interface_number = interface_number
+ self._in_endpoint = in_endpoint
+ self._out_endpoint = out_endpoint
+
+ def Connected(self, gadget):
+ super(HidCompositeFeature, self).Connected(gadget)
+ for report_id, feature in self._features.iteritems():
+ feature.Connected(self, report_id)
+
+ def Disconnected(self):
+ super(HidCompositeFeature, self).Disconnected()
+ for feature in self._features.itervalues():
+ feature.Disconnected()
+
+ def StandardControlRead(self, recipient, request, value, index, length):
+ if recipient == usb_constants.Recipient.INTERFACE:
+ if index == self._interface_number:
+ desc_type = value >> 8
+ desc_index = value & 0xff
+ if desc_type == hid_constants.DescriptorType.REPORT:
+ if desc_index == 0:
+ return self._report_desc[:length]
+
+ return super(HidCompositeFeature, self).StandardControlRead(
+ recipient, request, value, index, length)
+
+ def ClassControlRead(self, recipient, request, value, index, length):
+ """Handle class-specific control requests.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 7.2.
+
+ Args:
+ recipient: Request recipient (device, interface, endpoint, etc.)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ A buffer to return to the USB host with len <= length on success or
+ None to stall the pipe.
+ """
+ if recipient != usb_constants.Recipient.INTERFACE:
+ return None
+ if index != self._interface_number:
+ return None
+
+ if request == hid_constants.Request.GET_REPORT:
+ report_type, report_id = value >> 8, value & 0xFF
+ print ('GetReport(type={}, id={}, length={})'
+ .format(report_type, report_id, length))
+ return self.GetReport(report_type, report_id, length)
+
+ def ClassControlWrite(self, recipient, request, value, index, data):
+ """Handle class-specific control requests.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 7.2.
+
+ Args:
+ recipient: Request recipient (device, interface, endpoint, etc.)
+ request: bRequest field of the setup packet.
+ value: wValue field of the setup packet.
+ index: wIndex field of the setup packet.
+ data: Data stage of the request.
+
+ Returns:
+ True on success, None to stall the pipe.
+ """
+ if recipient != usb_constants.Recipient.INTERFACE:
+ return None
+ if index != self._interface_number:
+ return None
+
+ if request == hid_constants.Request.SET_REPORT:
+ report_type, report_id = value >> 8, value & 0xFF
+ print('SetReport(type={}, id={}, length={})'
+ .format(report_type, report_id, len(data)))
+ return self.SetReport(report_type, report_id, data)
+ elif request == hid_constants.Request.SET_IDLE:
+ duration, report_id = value >> 8, value & 0xFF
+ print('SetIdle(duration={}, report={})'
+ .format(duration, report_id))
+ return True
+
+ def GetReport(self, report_type, report_id, length):
+ """Handle GET_REPORT requests.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 7.2.1.
+
+ Args:
+ report_type: Requested report type.
+ report_id: Requested report ID.
+ length: Maximum amount of data the host expects the device to return.
+
+ Returns:
+ A buffer to return to the USB host with len <= length on success or
+ None to stall the pipe.
+ """
+ feature = self._features.get(report_id, None)
+ if feature is None:
+ return None
+
+ if report_type == hid_constants.ReportType.INPUT:
+ return feature.GetInputReport()[:length]
+ elif report_type == hid_constants.ReportType.OUTPUT:
+ return feature.GetOutputReport()[:length]
+ elif report_type == hid_constants.ReportType.FEATURE:
+ return feature.GetFeatureReport()[:length]
+
+ def SetReport(self, report_type, report_id, data):
+ """Handle SET_REPORT requests.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 7.2.2.
+
+ Args:
+ report_type: Report type.
+ report_id: Report ID.
+ data: Report data.
+
+ Returns:
+ True on success, None to stall the pipe.
+ """
+ feature = self._features.get(report_id, None)
+ if feature is None:
+ return None
+
+ if report_type == hid_constants.ReportType.INPUT:
+ return feature.SetInputReport(data)
+ elif report_type == hid_constants.ReportType.OUTPUT:
+ return feature.SetOutputReport(data)
+ elif report_type == hid_constants.ReportType.FEATURE:
+ return feature.SetFeatureReport(data)
+
+ def SendReport(self, report_id, data):
+ """Send a HID report.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 8.
+
+ Args:
+ report_id: Report ID associated with the data.
+ data: Contents of the report.
+ """
+ if report_id == 0:
+ self.SendPacket(self._in_endpoint, data)
+ else:
+ self.SendPacket(self._in_endpoint, struct.pack('B', report_id) + data)
+
+ def ReceivePacket(self, endpoint, data):
+ """Dispatch a report to the appropriate feature.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 8.
+
+ Args:
+ endpoint: Incoming endpoint (must be the Interrupt OUT pipe).
+ data: Interrupt packet data.
+ """
+ assert endpoint == self._out_endpoint
+
+ if 0 in self._features:
+ self._features[0].SetOutputReport(data)
+ elif len(data) >= 1:
+ report_id, = struct.unpack('B', data[0])
+ feature = self._features.get(report_id, None)
+ if feature is None or feature.SetOutputReport(data[1:]) is None:
+ self.HaltEndpoint(endpoint)
+
+
+class HidFeature(object):
+ """Represents a component of a HID gadget.
+
+ A "feature" produces and consumes reports with a particular Report ID. For
+ example a keyboard, mouse or vendor specific functionality.
+ """
+
+ def __init__(self):
+ self._gadget = None
+ self._report_id = None
+
+ def Connected(self, my_gadget, report_id):
+ self._gadget = my_gadget
+ self._report_id = report_id
+
+ def Disconnected(self):
+ self._gadget = None
+ self._report_id = None
+
+ def IsConnected(self):
+ return self._gadget is not None
+
+ def SendReport(self, data):
+ """Send a report with this feature's Report ID.
+
+ Args:
+ data: Report to send. If necessary the Report ID will be added.
+
+ Raises:
+ RuntimeError: If a report cannot be sent at this time.
+ """
+ if not self.IsConnected():
+ raise RuntimeError('Device is not connected.')
+ self._gadget.SendReport(self._report_id, data)
+
+ def SetInputReport(self, data):
+ """Handle an input report sent from the host.
+
+ This function is called when a SET_REPORT(input) command for this class's
+ Report ID is received. It should be overridden by a subclass.
+
+ Args:
+ data: Contents of the input report.
+ """
+ pass # pragma: no cover
+
+ def SetOutputReport(self, data):
+ """Handle an feature report sent from the host.
+
+ This function is called when a SET_REPORT(output) command or interrupt OUT
+ transfer is received with this class's Report ID. It should be overridden
+ by a subclass.
+
+ Args:
+ data: Contents of the output report.
+ """
+ pass # pragma: no cover
+
+ def SetFeatureReport(self, data):
+ """Handle an feature report sent from the host.
+
+ This function is called when a SET_REPORT(feature) command for this class's
+ Report ID is received. It should be overridden by a subclass.
+
+ Args:
+ data: Contents of the feature report.
+ """
+ pass # pragma: no cover
+
+ def GetInputReport(self):
+ """Handle a input report request from the host.
+
+ This function is called when a GET_REPORT(input) command for this class's
+ Report ID is received. It should be overridden by a subclass.
+
+ Returns:
+ The input report or None to stall the pipe.
+ """
+ pass # pragma: no cover
+
+ def GetOutputReport(self):
+ """Handle a output report request from the host.
+
+ This function is called when a GET_REPORT(output) command for this class's
+ Report ID is received. It should be overridden by a subclass.
+
+ Returns:
+ The output report or None to stall the pipe.
+ """
+ pass # pragma: no cover
+
+ def GetFeatureReport(self):
+ """Handle a feature report request from the host.
+
+ This function is called when a GET_REPORT(feature) command for this class's
+ Report ID is received. It should be overridden by a subclass.
+
+ Returns:
+ The feature report or None to stall the pipe.
+ """
+ pass # pragma: no cover
+
+class HidGadget(composite_gadget.CompositeGadget):
+ """Generic HID gadget.
+ """
+
+ def __init__(self, report_desc, features, vendor_id, product_id,
+ packet_size=64, interval_ms=10, out_endpoint=True,
+ device_version=0x0100):
+ """Create a HID gadget.
+
+ Args:
+ report_desc: HID report descriptor.
+ features: Map between Report IDs and HidFeature objects to handle them.
+ vendor_id: Device Vendor ID.
+ product_id: Device Product ID.
+ packet_size: Maximum interrupt packet size.
+ interval_ms: Interrupt transfer interval in milliseconds.
+ out_endpoint: Should this device have an interrupt OUT endpoint?
+ device_version: Device version number.
+
+ Raises:
+ ValueError: If any of the parameters are out of range.
+ """
+ device_desc = usb_descriptors.DeviceDescriptor(
+ idVendor=vendor_id,
+ idProduct=product_id,
+ bcdUSB=0x0200,
+ iManufacturer=1,
+ iProduct=2,
+ iSerialNumber=3,
+ bcdDevice=device_version)
+
+ if out_endpoint:
+ out_endpoint = 0x01
+ else:
+ out_endpoint = None
+
+ self._hid_feature = HidCompositeFeature(
+ report_desc=report_desc,
+ features=features,
+ packet_size=packet_size,
+ interval_ms=interval_ms,
+ out_endpoint=out_endpoint)
+
+ super(HidGadget, self).__init__(device_desc, [self._hid_feature])
+ self.AddStringDescriptor(3, '{:06X}'.format(uuid.getnode()))
+
+ def SendReport(self, report_id, data):
+ self._hid_feature.SendReport(report_id, data)
diff --git a/chromium/tools/usb_gadget/hid_gadget_test.py b/chromium/tools/usb_gadget/hid_gadget_test.py
new file mode 100755
index 00000000000..f0a6a29185b
--- /dev/null
+++ b/chromium/tools/usb_gadget/hid_gadget_test.py
@@ -0,0 +1,258 @@
+#!/usr/bin/python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mock
+
+import hid_constants
+import hid_descriptors
+import hid_gadget
+import usb_constants
+
+
+report_desc = hid_descriptors.ReportDescriptor(
+ hid_descriptors.UsagePage(0xFF00), # Vendor Defined
+ hid_descriptors.Usage(0x00),
+ hid_descriptors.Collection(
+ hid_constants.CollectionType.APPLICATION,
+ hid_descriptors.LogicalMinimum(0, force_length=1),
+ hid_descriptors.LogicalMaximum(255, force_length=2),
+ hid_descriptors.ReportSize(8),
+ hid_descriptors.ReportCount(8),
+ hid_descriptors.Input(hid_descriptors.Data,
+ hid_descriptors.Variable,
+ hid_descriptors.Absolute,
+ hid_descriptors.BufferedBytes),
+ hid_descriptors.Output(hid_descriptors.Data,
+ hid_descriptors.Variable,
+ hid_descriptors.Absolute,
+ hid_descriptors.BufferedBytes),
+ hid_descriptors.Feature(hid_descriptors.Data,
+ hid_descriptors.Variable,
+ hid_descriptors.Absolute,
+ hid_descriptors.BufferedBytes)
+ )
+)
+
+combo_report_desc = hid_descriptors.ReportDescriptor(
+ hid_descriptors.ReportID(1),
+ report_desc,
+ hid_descriptors.ReportID(2),
+ report_desc
+)
+
+
+class HidGadgetTest(unittest.TestCase):
+
+ def test_bad_intervals(self):
+ with self.assertRaisesRegexp(ValueError, 'Full speed'):
+ hid_gadget.HidGadget(report_desc, features={}, interval_ms=50000,
+ vendor_id=0, product_id=0)
+ with self.assertRaisesRegexp(ValueError, 'High speed'):
+ hid_gadget.HidGadget(report_desc, features={}, interval_ms=5000,
+ vendor_id=0, product_id=0)
+
+ def test_get_string_descriptor(self):
+ g = hid_gadget.HidGadget(report_desc=report_desc, features={},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.AddStringDescriptor(2, 'HID Gadget')
+ desc = g.ControlRead(0x80, 6, 0x0302, 0x0409, 255)
+ self.assertEquals(desc, '\x16\x03H\0I\0D\0 \0G\0a\0d\0g\0e\0t\0')
+
+ def test_get_report_descriptor(self):
+ g = hid_gadget.HidGadget(report_desc=report_desc, features={},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ desc = g.ControlRead(0x81, 6, 0x2200, 0, 63)
+ self.assertEquals(desc, report_desc)
+
+ def test_set_idle(self):
+ g = hid_gadget.HidGadget(report_desc=report_desc, features={},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ self.assertTrue(g.ControlWrite(0x21, 0x0A, 0, 0, ''))
+
+ def test_class_wrong_target(self):
+ g = hid_gadget.HidGadget(report_desc=report_desc, features={},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ self.assertIsNone(g.ControlRead(0xA0, 0, 0, 0, 0)) # Device
+ self.assertIsNone(g.ControlRead(0xA1, 0, 0, 1, 0)) # Interface 1
+ self.assertIsNone(g.ControlWrite(0x20, 0, 0, 0, '')) # Device
+ self.assertIsNone(g.ControlWrite(0x21, 0, 0, 1, '')) # Interface 1
+
+ def test_send_report_zero(self):
+ g = hid_gadget.HidGadget(report_desc=report_desc, features={},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.SendReport(0, 'Hello world!')
+ chip.SendPacket.assert_called_once_with(0x81, 'Hello world!')
+
+ def test_send_multiple_reports(self):
+ g = hid_gadget.HidGadget(report_desc=report_desc, features={},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.SendReport(1, 'Hello!')
+ g.SendReport(2, 'World!')
+ chip.SendPacket.assert_has_calls([
+ mock.call(0x81, '\x01Hello!'),
+ mock.call(0x81, '\x02World!'),
+ ])
+
+
+class TestFeature(hid_gadget.HidFeature):
+
+ def SetInputReport(self, data):
+ self.input_report = data
+ return True
+
+ def SetOutputReport(self, data):
+ self.output_report = data
+ return True
+
+ def SetFeatureReport(self, data):
+ self.feature_report = data
+ return True
+
+ def GetInputReport(self):
+ return 'Input report.'
+
+ def GetOutputReport(self):
+ return 'Output report.'
+
+ def GetFeatureReport(self):
+ return 'Feature report.'
+
+
+class HidFeatureTest(unittest.TestCase):
+
+ def test_disconnected(self):
+ feature = TestFeature()
+ with self.assertRaisesRegexp(RuntimeError, 'not connected'):
+ feature.SendReport('Hello world!')
+
+ def test_send_report(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ feature.SendReport('Hello world!')
+ chip.SendPacket.assert_called_once_with(0x81, '\x01Hello world!')
+ g.Disconnected()
+
+ def test_get_bad_report(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ self.assertIsNone(g.ControlRead(0xA1, 1, 0x0102, 0, 8))
+
+ def test_set_bad_report(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ self.assertIsNone(g.ControlWrite(0x21, 0x09, 0x0102, 0, 'Hello!'))
+
+ def test_get_input_report(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ report = g.ControlRead(0xA1, 1, 0x0101, 0, 8)
+ self.assertEquals(report, 'Input re')
+
+ def test_set_input_report(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ self.assertTrue(g.ControlWrite(0x21, 0x09, 0x0101, 0, 'Hello!'))
+ self.assertEquals(feature.input_report, 'Hello!')
+
+ def test_get_output_report(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ report = g.ControlRead(0xA1, 1, 0x0201, 0, 8)
+ self.assertEquals(report, 'Output r')
+
+ def test_set_output_report(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ self.assertTrue(g.ControlWrite(0x21, 0x09, 0x0201, 0, 'Hello!'))
+ self.assertEquals(feature.output_report, 'Hello!')
+
+ def test_receive_interrupt(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.SetConfiguration(1)
+ g.ReceivePacket(0x01, '\x01Hello!')
+ self.assertFalse(chip.HaltEndpoint.called)
+ self.assertEquals(feature.output_report, 'Hello!')
+
+ def test_receive_interrupt_report_zero(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={0: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.SetConfiguration(1)
+ g.ReceivePacket(0x01, 'Hello!')
+ self.assertFalse(chip.HaltEndpoint.called)
+ self.assertEquals(feature.output_report, 'Hello!')
+
+ def test_receive_bad_interrupt(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ g.SetConfiguration(1)
+ g.ReceivePacket(0x01, '\x00Hello!')
+ chip.HaltEndpoint.assert_called_once_with(0x01)
+
+ def test_get_feature_report(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ report = g.ControlRead(0xA1, 1, 0x0301, 0, 8)
+ self.assertEquals(report, 'Feature ')
+
+ def test_set_feature_report(self):
+ feature = TestFeature()
+ g = hid_gadget.HidGadget(report_desc, features={1: feature},
+ vendor_id=0, product_id=0)
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.HIGH)
+ self.assertTrue(g.ControlWrite(0x21, 0x09, 0x0301, 0, 'Hello!'))
+ self.assertEquals(feature.feature_report, 'Hello!')
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/usb_gadget/keyboard_gadget.py b/chromium/tools/usb_gadget/keyboard_gadget.py
new file mode 100644
index 00000000000..8f1e3b023f6
--- /dev/null
+++ b/chromium/tools/usb_gadget/keyboard_gadget.py
@@ -0,0 +1,202 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Implementation of a USB HID keyboard.
+
+Two classes are provided by this module. The KeyboardFeature class implements
+the core functionality of a HID keyboard and can be included in any HID gadget.
+The KeyboardGadget class implements an example keyboard gadget.
+"""
+
+import struct
+
+import hid_constants
+import hid_descriptors
+import hid_gadget
+import usb_constants
+
+
+class KeyboardFeature(hid_gadget.HidFeature):
+ """HID feature implementation for a keyboard.
+
+ REPORT_DESC provides an example HID report descriptor for a device including
+ this functionality.
+ """
+
+ REPORT_DESC = hid_descriptors.ReportDescriptor(
+ hid_descriptors.UsagePage(0x01), # Generic Desktop
+ hid_descriptors.Usage(0x06), # Keyboard
+ hid_descriptors.Collection(
+ hid_constants.CollectionType.APPLICATION,
+ hid_descriptors.UsagePage(0x07), # Key Codes
+ hid_descriptors.UsageMinimum(224),
+ hid_descriptors.UsageMaximum(231),
+ hid_descriptors.LogicalMinimum(0, force_length=1),
+ hid_descriptors.LogicalMaximum(1),
+ hid_descriptors.ReportSize(1),
+ hid_descriptors.ReportCount(8),
+ hid_descriptors.Input(hid_descriptors.Data,
+ hid_descriptors.Variable,
+ hid_descriptors.Absolute),
+ hid_descriptors.ReportCount(1),
+ hid_descriptors.ReportSize(8),
+ hid_descriptors.Input(hid_descriptors.Constant),
+ hid_descriptors.ReportCount(5),
+ hid_descriptors.ReportSize(1),
+ hid_descriptors.UsagePage(0x08), # LEDs
+ hid_descriptors.UsageMinimum(1),
+ hid_descriptors.UsageMaximum(5),
+ hid_descriptors.Output(hid_descriptors.Data,
+ hid_descriptors.Variable,
+ hid_descriptors.Absolute),
+ hid_descriptors.ReportCount(1),
+ hid_descriptors.ReportSize(3),
+ hid_descriptors.Output(hid_descriptors.Constant),
+ hid_descriptors.ReportCount(6),
+ hid_descriptors.ReportSize(8),
+ hid_descriptors.LogicalMinimum(0, force_length=1),
+ hid_descriptors.LogicalMaximum(101),
+ hid_descriptors.UsagePage(0x07), # Key Codes
+ hid_descriptors.UsageMinimum(0, force_length=1),
+ hid_descriptors.UsageMaximum(101),
+ hid_descriptors.Input(hid_descriptors.Data, hid_descriptors.Array)
+ )
+ )
+
+ def __init__(self):
+ super(KeyboardFeature, self).__init__()
+ self._modifiers = 0
+ self._keys = [0, 0, 0, 0, 0, 0]
+ self._leds = 0
+
+ def ModifierDown(self, modifier):
+ self._modifiers |= modifier
+ if self.IsConnected():
+ self.SendReport(self.GetInputReport())
+
+ def ModifierUp(self, modifier):
+ self._modifiers &= ~modifier
+ if self.IsConnected():
+ self.SendReport(self.GetInputReport())
+
+ def KeyDown(self, keycode):
+ free = self._keys.index(0)
+ self._keys[free] = keycode
+ if self.IsConnected():
+ self.SendReport(self.GetInputReport())
+
+ def KeyUp(self, keycode):
+ free = self._keys.index(keycode)
+ self._keys[free] = 0
+ if self.IsConnected():
+ self.SendReport(self.GetInputReport())
+
+ def GetInputReport(self):
+ """Construct an input report.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ Appendix B.1.
+
+ Returns:
+ A packed input report.
+ """
+ return struct.pack('BBBBBBBB', self._modifiers, 0, *self._keys)
+
+ def GetOutputReport(self):
+ """Construct an output report.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ Appendix B.1.
+
+ Returns:
+ A packed input report.
+ """
+ return struct.pack('B', self._leds)
+
+ def SetOutputReport(self, data):
+ """Handle an output report.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ Appendix B.1.
+
+ Args:
+ data: Report data.
+
+ Returns:
+ True on success, None to stall the pipe.
+ """
+ if len(data) >= 1:
+ self._leds, = struct.unpack('B', data)
+ return True
+
+
+class KeyboardGadget(hid_gadget.HidGadget):
+ """USB gadget implementation of a HID keyboard."""
+
+ def __init__(self, vendor_id=0x18D1, product_id=0xFF02):
+ self._feature = KeyboardFeature()
+ super(KeyboardGadget, self).__init__(
+ report_desc=KeyboardFeature.REPORT_DESC,
+ features={0: self._feature},
+ packet_size=8,
+ interval_ms=1,
+ out_endpoint=True,
+ vendor_id=usb_constants.VendorID.GOOGLE,
+ product_id=usb_constants.ProductID.GOOGLE_KEYBOARD_GADGET,
+ device_version=0x0100)
+ self.AddStringDescriptor(1, 'Google Inc.')
+ self.AddStringDescriptor(2, 'Keyboard Gadget')
+
+ def ModifierDown(self, modifier):
+ self._feature.ModifierDown(modifier)
+
+ def ModifierUp(self, modifier):
+ self._feature.ModifierUp(modifier)
+
+ def KeyDown(self, keycode):
+ self._feature.KeyDown(keycode)
+
+ def KeyUp(self, keycode):
+ self._feature.KeyUp(keycode)
+
+
+def RegisterHandlers():
+ """Registers web request handlers with the application server."""
+
+ from tornado import web
+
+ class WebConfigureHandler(web.RequestHandler):
+
+ def post(self):
+ server.SwitchGadget(KeyboardGadget())
+
+ class WebTypeHandler(web.RequestHandler):
+
+ def post(self):
+ string = self.get_argument('string')
+ for char in string:
+ if char in hid_constants.KEY_CODES:
+ code = hid_constants.KEY_CODES[char]
+ server.gadget.KeyDown(code)
+ server.gadget.KeyUp(code)
+ elif char in hid_constants.SHIFT_KEY_CODES:
+ code = hid_constants.SHIFT_KEY_CODES[char]
+ server.gadget.ModifierDown(hid_constants.ModifierKey.L_SHIFT)
+ server.gadget.KeyDown(code)
+ server.gadget.KeyUp(code)
+ server.gadget.ModifierUp(hid_constants.ModifierKey.L_SHIFT)
+
+ class WebPressHandler(web.RequestHandler):
+
+ def post(self):
+ code = hid_constants.KEY_CODES[self.get_argument('key')]
+ server.gadget.KeyDown(code)
+ server.gadget.KeyUp(code)
+
+ import server
+ server.app.add_handlers('.*$', [
+ (r'/keyboard/configure', WebConfigureHandler),
+ (r'/keyboard/type', WebTypeHandler),
+ (r'/keyboard/press', WebPressHandler),
+ ])
diff --git a/chromium/tools/usb_gadget/keyboard_gadget_test.py b/chromium/tools/usb_gadget/keyboard_gadget_test.py
new file mode 100755
index 00000000000..cac3e313a41
--- /dev/null
+++ b/chromium/tools/usb_gadget/keyboard_gadget_test.py
@@ -0,0 +1,64 @@
+#!/usr/bin/python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mock
+
+import hid_constants
+import keyboard_gadget
+import usb_constants
+
+
+class KeyboardGadgetTest(unittest.TestCase):
+
+ def test_key_press(self):
+ g = keyboard_gadget.KeyboardGadget()
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.FULL)
+ g.KeyDown(0x04)
+ self.assertEqual(g.ControlRead(0xA1, 1, 0x0100, 0, 8),
+ '\x00\x00\x04\x00\x00\x00\x00\x00')
+ g.KeyUp(0x04)
+ self.assertEqual(g.ControlRead(0xA1, 1, 0x0100, 0, 8),
+ '\x00\x00\x00\x00\x00\x00\x00\x00')
+ chip.SendPacket.assert_has_calls([
+ mock.call(0x81, '\x00\x00\x04\x00\x00\x00\x00\x00'),
+ mock.call(0x81, '\x00\x00\x00\x00\x00\x00\x00\x00'),
+ ])
+
+ def test_key_press_with_modifier(self):
+ g = keyboard_gadget.KeyboardGadget()
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.FULL)
+ g.ModifierDown(hid_constants.ModifierKey.L_SHIFT)
+ g.KeyDown(0x04)
+ g.KeyDown(0x05)
+ g.KeyUp(0x04)
+ g.KeyUp(0x05)
+ g.ModifierUp(hid_constants.ModifierKey.L_SHIFT)
+ chip.SendPacket.assert_has_calls([
+ mock.call(0x81, '\x02\x00\x00\x00\x00\x00\x00\x00'),
+ mock.call(0x81, '\x02\x00\x04\x00\x00\x00\x00\x00'),
+ mock.call(0x81, '\x02\x00\x04\x05\x00\x00\x00\x00'),
+ mock.call(0x81, '\x02\x00\x00\x05\x00\x00\x00\x00'),
+ mock.call(0x81, '\x02\x00\x00\x00\x00\x00\x00\x00'),
+ mock.call(0x81, '\x00\x00\x00\x00\x00\x00\x00\x00'),
+ ])
+
+ def test_set_leds(self):
+ g = keyboard_gadget.KeyboardGadget()
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.FULL)
+ g.SetConfiguration(1)
+ self.assertEqual(g.ControlRead(0xA1, 1, 0x0200, 0, 8), '\x00')
+ self.assertTrue(g.ControlWrite(0x21, 9, 0x0200, 0, '\x01'))
+ self.assertEqual(g.ControlRead(0xA1, 1, 0x0200, 0, 8), '\x01')
+ g.ReceivePacket(0x01, '\x03')
+ self.assertFalse(chip.HaltEndpoint.called)
+ self.assertEqual(g.ControlRead(0xA1, 1, 0x0200, 0, 8), '\x03')
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/usb_gadget/linux_gadgetfs.py b/chromium/tools/usb_gadget/linux_gadgetfs.py
new file mode 100644
index 00000000000..b67bba80f74
--- /dev/null
+++ b/chromium/tools/usb_gadget/linux_gadgetfs.py
@@ -0,0 +1,302 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Linux gadgetfs glue.
+
+Exposes a USB gadget using a USB peripheral controller on Linux. The userspace
+ABI is documented here:
+
+https://github.com/torvalds/linux/blob/master/drivers/usb/gadget/inode.c
+"""
+
+import errno
+import multiprocessing
+import os
+import struct
+
+from tornado import ioloop
+
+import usb_constants
+import usb_descriptors
+
+GADGETFS_NOP = 0
+GADGETFS_CONNECT = 1
+GADGETFS_DISCONNECT = 2
+GADGETFS_SETUP = 3
+GADGETFS_SUSPEND = 4
+
+BULK = 0x01
+INTERRUPT = 0x02
+ISOCHRONOUS = 0x04
+
+USB_TRANSFER_TYPE_TO_MASK = {
+ usb_constants.TransferType.BULK: BULK,
+ usb_constants.TransferType.INTERRUPT: INTERRUPT,
+ usb_constants.TransferType.ISOCHRONOUS: ISOCHRONOUS
+}
+
+IN = 0x01
+OUT = 0x02
+
+HARDWARE = {
+ 'beaglebone-black': (
+ 'musb-hdrc', # Gadget controller name,
+ {
+ 0x01: ('ep1out', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x81: ('ep1in', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x02: ('ep2out', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x82: ('ep2in', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x03: ('ep3out', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x83: ('ep3in', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x04: ('ep4out', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x84: ('ep4in', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x05: ('ep5out', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x85: ('ep5in', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x06: ('ep6out', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x86: ('ep6in', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x07: ('ep7out', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x87: ('ep7in', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x08: ('ep8out', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x88: ('ep8in', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x09: ('ep9out', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x89: ('ep9in', BULK | INTERRUPT | ISOCHRONOUS, 512),
+ 0x0A: ('ep10out', BULK | INTERRUPT | ISOCHRONOUS, 64),
+ 0x8A: ('ep10in', BULK | INTERRUPT | ISOCHRONOUS, 256),
+ 0x0B: ('ep11out', BULK | INTERRUPT | ISOCHRONOUS, 64),
+ 0x8B: ('ep11in', BULK | INTERRUPT | ISOCHRONOUS, 256),
+ 0x0C: ('ep12out', BULK | INTERRUPT | ISOCHRONOUS, 64),
+ 0x8C: ('ep12in', BULK | INTERRUPT | ISOCHRONOUS, 256),
+ 0x0D: ('ep13', BULK | INTERRUPT | ISOCHRONOUS, 4096),
+ 0x8D: ('ep13', BULK | INTERRUPT | ISOCHRONOUS, 4096),
+ 0x0E: ('ep14', BULK | INTERRUPT | ISOCHRONOUS, 1024),
+ 0x8E: ('ep14', BULK | INTERRUPT | ISOCHRONOUS, 1024),
+ 0x0F: ('ep15', BULK | INTERRUPT | ISOCHRONOUS, 1024),
+ 0x8F: ('ep15', BULK | INTERRUPT | ISOCHRONOUS, 1024),
+ }
+ )
+}
+
+
+class LinuxGadgetfs(object):
+ """Linux gadgetfs-based gadget driver.
+ """
+
+ def __init__(self, hardware, mountpoint='/dev/gadget'):
+ """Initialize bindings to the Linux gadgetfs interface.
+
+ Args:
+ hardware: Hardware type.
+ mountpoint: Gadget filesystem mount point.
+ """
+ self._chip, self._hw_eps = HARDWARE[hardware]
+ self._ep_dir = mountpoint
+ self._gadget = None
+ self._fd = None
+ # map from bEndpointAddress to hardware ep name and open file descriptor
+ self._ep_fds = {}
+ self._io_loop = ioloop.IOLoop.current()
+
+ def Create(self, gadget):
+ """Bind a gadget to the USB peripheral controller."""
+ self._gadget = gadget
+ self._fd = os.open(os.path.join(self._ep_dir, self._chip), os.O_RDWR)
+ buf = ''.join([struct.pack('=I', 0),
+ gadget.GetFullSpeedConfigurationDescriptor().Encode(),
+ gadget.GetHighSpeedConfigurationDescriptor().Encode(),
+ gadget.GetDeviceDescriptor().Encode()])
+ os.write(self._fd, buf)
+ self._io_loop.add_handler(self._fd, self.HandleEvent, self._io_loop.READ)
+
+ def Destroy(self):
+ """Unbind the gadget from the USB peripheral controller."""
+ self.Disconnected()
+ self._io_loop.remove_handler(self._fd)
+ os.close(self._fd)
+ self._gadget = None
+ self._fd = None
+
+ def IsConfigured(self):
+ return self._gadget is not None
+
+ def HandleEvent(self, unused_fd, unused_events):
+ buf = os.read(self._fd, 12)
+ event_type, = struct.unpack_from('=I', buf, 8)
+
+ if event_type == GADGETFS_NOP:
+ print 'NOP'
+ elif event_type == GADGETFS_CONNECT:
+ speed, = struct.unpack('=Ixxxxxxxx', buf)
+ self.Connected(speed)
+ elif event_type == GADGETFS_DISCONNECT:
+ self.Disconnected()
+ elif event_type == GADGETFS_SETUP:
+ request_type, request, value, index, length = struct.unpack(
+ '<BBHHHxxxx', buf)
+ self.HandleSetup(request_type, request, value, index, length)
+ elif event_type == GADGETFS_SUSPEND:
+ print 'SUSPEND'
+ else:
+ print 'Unknown gadgetfs event type:', event_type
+
+ def Connected(self, speed):
+ print 'CONNECT speed={}'.format(speed)
+ self._gadget.Connected(self, speed)
+
+ def Disconnected(self):
+ print 'DISCONNECT'
+ for endpoint_addr in self._ep_fds.keys():
+ self.StopEndpoint(endpoint_addr)
+ self._ep_fds.clear()
+ self._gadget.Disconnected()
+
+ def HandleSetup(self, request_type, request, value, index, length):
+ print ('SETUP bmRequestType=0x{:02X} bRequest=0x{:02X} wValue=0x{:04X} '
+ 'wIndex=0x{:04X} wLength={}'
+ .format(request_type, request, value, index, length))
+
+ if request_type & usb_constants.Dir.IN:
+ data = self._gadget.ControlRead(
+ request_type, request, value, index, length)
+ if data is None:
+ print 'SETUP STALL'
+ try:
+ os.read(self._fd, 0) # Backwards I/O stalls the pipe.
+ except OSError, e:
+ # gadgetfs always returns EL2HLT which we should ignore.
+ if e.errno != errno.EL2HLT:
+ raise
+ else:
+ os.write(self._fd, data)
+ else:
+ data = ''
+ if length:
+ data = os.read(self._fd, length)
+ result = self._gadget.ControlWrite(
+ request_type, request, value, index, data)
+ if result is None:
+ print 'SETUP STALL'
+ try:
+ os.write(self._fd, '') # Backwards I/O stalls the pipe.
+ except OSError, e:
+ # gadgetfs always returns EL2HLT which we should ignore.
+ if e.errno != errno.EL2HLT:
+ raise
+ elif not length:
+ # Only empty OUT transfers can be ACKed.
+ os.read(self._fd, 0)
+
+ def StartEndpoint(self, endpoint_desc):
+ """Activate an endpoint.
+
+ To enable a hardware endpoint the appropriate endpoint file must be opened
+ and the endpoint descriptors written to it. Linux requires both full- and
+ high-speed descriptors to be written for a high-speed device but since the
+ endpoint is always reinitialized after disconnect only the high-speed
+ endpoint will be valid in this case.
+
+ Args:
+ endpoint_desc: Endpoint descriptor.
+
+ Raises:
+ RuntimeError: If the hardware endpoint is in use or the configuration
+ is not supported by the hardware.
+ """
+ endpoint_addr = endpoint_desc.bEndpointAddress
+ name, hw_ep_type, hw_ep_size = self._hw_eps[endpoint_addr]
+
+ if name in self._ep_fds:
+ raise RuntimeError('Hardware endpoint {} already in use.'.format(name))
+
+ ep_type = USB_TRANSFER_TYPE_TO_MASK[
+ endpoint_desc.bmAttributes & usb_constants.TransferType.MASK]
+ ep_size = endpoint_desc.wMaxPacketSize
+
+ if not hw_ep_type & ep_type:
+ raise RuntimeError('Hardware endpoint {} does not support this transfer '
+ 'type.'.format(name))
+ elif hw_ep_size < ep_size:
+ raise RuntimeError('Hardware endpoint {} only supports a maximum packet '
+ 'size of {}, {} requested.'
+ .format(name, hw_ep_size, ep_size))
+
+ fd = os.open(os.path.join(self._ep_dir, name), os.O_RDWR)
+
+ buf = struct.pack('=I', 1)
+ if self._gadget.GetSpeed() == usb_constants.Speed.HIGH:
+ # The full speed endpoint descriptor will not be used but Linux requires
+ # one to be provided.
+ full_speed_endpoint = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=endpoint_desc.bEndpointAddress,
+ bmAttributes=0,
+ wMaxPacketSize=0,
+ bInterval=0)
+ buf = ''.join([buf, full_speed_endpoint.Encode(), endpoint_desc.Encode()])
+ else:
+ buf = ''.join([buf, endpoint_desc.Encode()])
+ os.write(fd, buf)
+
+ pipe_r, pipe_w = multiprocessing.Pipe(False)
+ child = None
+
+ # gadgetfs doesn't support polling on the endpoint file descriptors (why?)
+ # so we have to start background threads for each.
+ if endpoint_addr & usb_constants.Dir.IN:
+ def WriterProcess():
+ while True:
+ data = pipe_r.recv()
+ written = os.write(fd, data)
+ print('IN bEndpointAddress=0x{:02X} length={}'
+ .format(endpoint_addr, written))
+
+ child = multiprocessing.Process(target=WriterProcess)
+ self._ep_fds[endpoint_addr] = fd, child, pipe_w
+ else:
+ def ReceivePacket(unused_fd, unused_events):
+ data = pipe_r.recv()
+ print('OUT bEndpointAddress=0x{:02X} length={}'
+ .format(endpoint_addr, len(data)))
+ self._gadget.ReceivePacket(endpoint_addr, data)
+
+ def ReaderProcess():
+ while True:
+ data = os.read(fd, ep_size)
+ pipe_w.send(data)
+
+ child = multiprocessing.Process(target=ReaderProcess)
+ pipe_fd = pipe_r.fileno()
+ self._io_loop.add_handler(pipe_fd, ReceivePacket, self._io_loop.READ)
+ self._ep_fds[endpoint_addr] = fd, child, pipe_r
+
+ child.start()
+ print 'Started endpoint 0x{:02X}.'.format(endpoint_addr)
+
+ def StopEndpoint(self, endpoint_addr):
+ """Deactivate the given endpoint."""
+ fd, child, pipe = self._ep_fds.pop(endpoint_addr)
+ pipe_fd = pipe.fileno()
+ child.terminate()
+ child.join()
+ if not endpoint_addr & usb_constants.Dir.IN:
+ self._io_loop.remove_handler(pipe_fd)
+ os.close(fd)
+ print 'Stopped endpoint 0x{:02X}.'.format(endpoint_addr)
+
+ def SendPacket(self, endpoint_addr, data):
+ """Send a packet on the given endpoint."""
+ _, _, pipe = self._ep_fds[endpoint_addr]
+ pipe.send(data)
+
+ def HaltEndpoint(self, endpoint_addr):
+ """Signal a stall condition on the given endpoint."""
+ fd, _ = self._ep_fds[endpoint_addr]
+ # Reverse I/O direction sets the halt condition on the pipe.
+ try:
+ if endpoint_addr & usb_constants.Dir.IN:
+ os.read(fd, 0)
+ else:
+ os.write(fd, '')
+ except OSError, e:
+ # gadgetfs always returns EBADMSG which we should ignore.
+ if e.errno != errno.EBADMSG:
+ raise
diff --git a/chromium/tools/usb_gadget/mouse_gadget.py b/chromium/tools/usb_gadget/mouse_gadget.py
new file mode 100644
index 00000000000..cb4f7e8df67
--- /dev/null
+++ b/chromium/tools/usb_gadget/mouse_gadget.py
@@ -0,0 +1,158 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Implementation of a USB HID mouse.
+
+Two classes are provided by this module. The MouseFeature class implements
+the core functionality of a HID mouse and can be included in any HID gadget.
+The MouseGadget class implements an example mouse gadget.
+"""
+
+import struct
+
+import hid_constants
+import hid_descriptors
+import hid_gadget
+import usb_constants
+
+
+class MouseFeature(hid_gadget.HidFeature):
+ """HID feature implementation for a mouse.
+
+ REPORT_DESC provides an example HID report descriptor for a device including
+ this functionality.
+ """
+
+ REPORT_DESC = hid_descriptors.ReportDescriptor(
+ hid_descriptors.UsagePage(0x01), # Generic Desktop
+ hid_descriptors.Usage(0x02), # Mouse
+ hid_descriptors.Collection(
+ hid_constants.CollectionType.APPLICATION,
+ hid_descriptors.Usage(0x01), # Pointer
+ hid_descriptors.Collection(
+ hid_constants.CollectionType.PHYSICAL,
+ hid_descriptors.UsagePage(0x09), # Buttons
+ hid_descriptors.UsageMinimum(1),
+ hid_descriptors.UsageMaximum(3),
+ hid_descriptors.LogicalMinimum(0, force_length=1),
+ hid_descriptors.LogicalMaximum(1),
+ hid_descriptors.ReportCount(3),
+ hid_descriptors.ReportSize(1),
+ hid_descriptors.Input(hid_descriptors.Data,
+ hid_descriptors.Variable,
+ hid_descriptors.Absolute),
+ hid_descriptors.ReportCount(1),
+ hid_descriptors.ReportSize(5),
+ hid_descriptors.Input(hid_descriptors.Constant),
+ hid_descriptors.UsagePage(0x01), # Generic Desktop
+ hid_descriptors.Usage(0x30), # X
+ hid_descriptors.Usage(0x31), # Y
+ hid_descriptors.LogicalMinimum(0x81), # -127
+ hid_descriptors.LogicalMaximum(127),
+ hid_descriptors.ReportSize(8),
+ hid_descriptors.ReportCount(2),
+ hid_descriptors.Input(hid_descriptors.Data,
+ hid_descriptors.Variable,
+ hid_descriptors.Relative)
+ )
+ )
+ )
+
+ def __init__(self):
+ super(MouseFeature, self).__init__()
+ self._buttons = 0
+
+ def ButtonDown(self, button):
+ self._buttons |= button
+ if self.IsConnected():
+ self.SendReport(self.EncodeInputReport())
+
+ def ButtonUp(self, button):
+ self._buttons &= ~button
+ if self.IsConnected():
+ self.SendReport(self.EncodeInputReport())
+
+ def Move(self, x_displacement, y_displacement):
+ if self.IsConnected():
+ self.SendReport(self.EncodeInputReport(x_displacement, y_displacement))
+
+ def EncodeInputReport(self, x_displacement=0, y_displacement=0):
+ return struct.pack('Bbb', self._buttons, x_displacement, y_displacement)
+
+ def GetInputReport(self):
+ """Construct an input report.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ Appendix B.2.
+
+ Returns:
+ A packed input report.
+ """
+ return self.EncodeInputReport()
+
+
+class MouseGadget(hid_gadget.HidGadget):
+ """USB gadget implementation of a HID mouse."""
+
+ def __init__(self):
+ self._feature = MouseFeature()
+ super(MouseGadget, self).__init__(
+ report_desc=MouseFeature.REPORT_DESC,
+ features={0: self._feature},
+ packet_size=8,
+ interval_ms=1,
+ out_endpoint=False,
+ vendor_id=usb_constants.VendorID.GOOGLE,
+ product_id=usb_constants.ProductID.GOOGLE_MOUSE_GADGET,
+ device_version=0x0100)
+ self.AddStringDescriptor(1, 'Google Inc.')
+ self.AddStringDescriptor(2, 'Mouse Gadget')
+
+ def ButtonDown(self, button):
+ self._feature.ButtonDown(button)
+
+ def ButtonUp(self, button):
+ self._feature.ButtonUp(button)
+
+ def Move(self, x_displacement, y_displacement):
+ self._feature.Move(x_displacement, y_displacement)
+
+
+def RegisterHandlers():
+ """Registers web request handlers with the application server."""
+
+ from tornado import web
+
+ class WebConfigureHandler(web.RequestHandler):
+
+ def post(self):
+ gadget = MouseGadget()
+ server.SwitchGadget(gadget)
+
+ class WebClickHandler(web.RequestHandler):
+
+ def post(self):
+ BUTTONS = {
+ '1': hid_constants.Mouse.BUTTON_1,
+ '2': hid_constants.Mouse.BUTTON_2,
+ '3': hid_constants.Mouse.BUTTON_3,
+ }
+
+ button = BUTTONS[self.get_argument('button')]
+ server.gadget.ButtonDown(button)
+ server.gadget.ButtonUp(button)
+
+ class WebMoveHandler(web.RequestHandler):
+
+ def post(self):
+ x = int(self.get_argument('x'))
+ y = int(self.get_argument('y'))
+ server.gadget.Move(x, y)
+
+ import server
+ server.app.add_handlers('.*$', [
+ (r'/mouse/configure', WebConfigureHandler),
+ (r'/mouse/move', WebMoveHandler),
+ (r'/mouse/click', WebClickHandler),
+ ])
diff --git a/chromium/tools/usb_gadget/mouse_gadget_test.py b/chromium/tools/usb_gadget/mouse_gadget_test.py
new file mode 100755
index 00000000000..5216798e977
--- /dev/null
+++ b/chromium/tools/usb_gadget/mouse_gadget_test.py
@@ -0,0 +1,50 @@
+#!/usr/bin/python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import mock
+
+import hid_constants
+import mouse_gadget
+import usb_constants
+
+
+class MouseGadgetTest(unittest.TestCase):
+
+ def test_click(self):
+ g = mouse_gadget.MouseGadget()
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.FULL)
+ g.ButtonDown(hid_constants.Mouse.BUTTON_1)
+ self.assertEqual(g.ControlRead(0xA1, 1, 0x0100, 0, 8), '\x01\x00\x00')
+ g.ButtonUp(hid_constants.Mouse.BUTTON_1)
+ chip.SendPacket.assert_has_calls([
+ mock.call(0x81, '\x01\x00\x00'),
+ mock.call(0x81, '\x00\x00\x00'),
+ ])
+
+ def test_move(self):
+ g = mouse_gadget.MouseGadget()
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.FULL)
+ g.Move(-1, 1)
+ chip.SendPacket.assert_called(0x81, '\x00\xFF\x01')
+
+ def test_drag(self):
+ g = mouse_gadget.MouseGadget()
+ chip = mock.Mock()
+ g.Connected(chip, usb_constants.Speed.FULL)
+ g.ButtonDown(hid_constants.Mouse.BUTTON_1)
+ g.Move(5, 5)
+ g.ButtonUp(hid_constants.Mouse.BUTTON_1)
+ chip.SendPacket.assert_has_calls([
+ mock.call(0x81, '\x01\x00\x00'),
+ mock.call(0x81, '\x01\x05\x05'),
+ mock.call(0x81, '\x00\x00\x00'),
+ ])
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/usb_gadget/msos20_descriptors.py b/chromium/tools/usb_gadget/msos20_descriptors.py
new file mode 100644
index 00000000000..b1e0baab062
--- /dev/null
+++ b/chromium/tools/usb_gadget/msos20_descriptors.py
@@ -0,0 +1,95 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Microsoft OS 2.0 Descriptor generation utilities.
+
+Classes to help generate Microsoft OS 2.0 descriptors.
+
+Based on documentation here:
+https://msdn.microsoft.com/en-us/library/windows/hardware/dn385747.aspx
+"""
+
+import uuid
+
+import usb_constants
+import usb_descriptors
+
+
+class PlatformCapabilityDescriptor(usb_descriptors.Descriptor):
+ """Microsoft OS 2.0 platform capability descriptor.
+ """
+
+ def __init__(self, **kwargs):
+ super(PlatformCapabilityDescriptor, self).__init__(**kwargs)
+ self._descriptor_set = None
+
+ @property
+ def descriptor_set_size(self):
+ if self._descriptor_set is None:
+ return 0
+ return len(self._descriptor_set.Encode())
+
+ def SetDescriptorSet(self, descriptor_set):
+ self._descriptor_set = descriptor_set
+
+PlatformCapabilityDescriptor.AddComputedField('bLength', 'B', 'struct_size')
+PlatformCapabilityDescriptor.AddFixedField(
+ 'bDescriptorType', 'B', usb_constants.DescriptorType.DEVICE_CAPABILITY)
+PlatformCapabilityDescriptor.AddFixedField(
+ 'bDevCapabilityType', 'B', usb_constants.CapabilityType.PLATFORM)
+PlatformCapabilityDescriptor.AddFixedField('bReserved', 'B', 0)
+PlatformCapabilityDescriptor.AddFixedField(
+ 'MS_OS_20_Platform_Capability_ID', '16s',
+ uuid.UUID('{D8DD60DF-4589-4CC7-9CD2-659D9E648A9F}').bytes_le)
+PlatformCapabilityDescriptor.AddField('dwWindowsVersion', 'I')
+PlatformCapabilityDescriptor.AddComputedField(
+ 'wMSOSDescriptorSetTotalLength', 'H', 'descriptor_set_size')
+PlatformCapabilityDescriptor.AddField('bMS_VendorCode', 'B')
+PlatformCapabilityDescriptor.AddField('bAltEnumCode', 'B', default=0)
+
+
+class DescriptorSetHeader(usb_descriptors.DescriptorContainer):
+ """Microsoft OS 2.0 descriptor set header.
+ """
+ pass
+
+DescriptorSetHeader.AddComputedField('wLength', 'H', 'struct_size')
+DescriptorSetHeader.AddFixedField('wDescriptorType', 'H', 0x00)
+DescriptorSetHeader.AddField('dwWindowsVersion', 'I')
+DescriptorSetHeader.AddComputedField('wTotalLength', 'H', 'total_size')
+
+
+class ConfigurationSubsetHeader(usb_descriptors.DescriptorContainer):
+ """Microsoft OS 2.0 configuration subset header.
+ """
+ pass
+
+ConfigurationSubsetHeader.AddComputedField('wLength', 'H', 'struct_size')
+ConfigurationSubsetHeader.AddFixedField('wDescriptorType', 'H', 0x01)
+ConfigurationSubsetHeader.AddField('bConfigurationValue', 'B')
+ConfigurationSubsetHeader.AddFixedField('bReserved', 'B', 0)
+ConfigurationSubsetHeader.AddComputedField('wTotalLength', 'H', 'total_size')
+
+
+class FunctionSubsetHeader(usb_descriptors.DescriptorContainer):
+ """Microsoft OS 2.0 function subset header.
+ """
+ pass
+
+FunctionSubsetHeader.AddComputedField('wLength', 'H', 'struct_size')
+FunctionSubsetHeader.AddFixedField('wDescriptorType', 'H', 0x02)
+FunctionSubsetHeader.AddField('bFirstInterface', 'B')
+FunctionSubsetHeader.AddFixedField('bReserved', 'B', 0)
+FunctionSubsetHeader.AddComputedField('wSubsetLength', 'H', 'total_size')
+
+
+class CompatibleId(usb_descriptors.Descriptor):
+ """Microsoft OS 2.0 compatible ID descriptor.
+ """
+ pass
+
+CompatibleId.AddComputedField('wLength', 'H', 'struct_size')
+CompatibleId.AddFixedField('wDescriptorType', 'H', 0x03)
+CompatibleId.AddField('CompatibleID', '8s')
+CompatibleId.AddField('SubCompatibleID', '8s', default='')
diff --git a/chromium/tools/usb_gadget/package.py b/chromium/tools/usb_gadget/package.py
new file mode 100755
index 00000000000..1c500332566
--- /dev/null
+++ b/chromium/tools/usb_gadget/package.py
@@ -0,0 +1,95 @@
+#!/usr/bin/python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility to package and upload the USB gadget framework.
+"""
+
+import argparse
+import hashlib
+import os
+import StringIO
+import urllib2
+import zipfile
+
+
+def MakeZip(directory=None, files=None):
+ """Construct a zip file.
+
+ Args:
+ directory: Include Python source files from this directory
+ files: Include these files
+
+ Returns:
+ A tuple of the buffer containing the zip file and its MD5 hash.
+ """
+ buf = StringIO.StringIO()
+ archive = zipfile.PyZipFile(buf, 'w')
+ if directory is not None:
+ archive.writepy(directory)
+ if files is not None:
+ for f in files:
+ archive.write(f, os.path.basename(f))
+ archive.close()
+ content = buf.getvalue()
+ buf.close()
+ md5 = hashlib.md5(content).hexdigest()
+ return content, md5
+
+
+def EncodeBody(filename, buf):
+ return '\r\n'.join([
+ '--foo',
+ 'Content-Disposition: form-data; name="file"; filename="{}"'
+ .format(filename),
+ 'Content-Type: application/octet-stream',
+ '',
+ buf,
+ '--foo--',
+ ''
+ ])
+
+
+def UploadZip(content, md5, host):
+ filename = 'usb_gadget-{}.zip'.format(md5)
+ req = urllib2.Request(url='http://{}/update'.format(host),
+ data=EncodeBody(filename, content))
+ req.add_header('Content-Type', 'multipart/form-data; boundary=foo')
+ urllib2.urlopen(req)
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Package (and upload) the USB gadget framework.')
+ parser.add_argument(
+ '--dir', type=str, metavar='DIR',
+ help='package all Python files from DIR')
+ parser.add_argument(
+ '--zip-file', type=str, metavar='FILE',
+ help='save package as FILE')
+ parser.add_argument(
+ '--hash-file', type=str, metavar='FILE',
+ help='save package hash as FILE')
+ parser.add_argument(
+ '--upload', type=str, metavar='HOST[:PORT]',
+ help='upload package to target system')
+ parser.add_argument(
+ 'files', metavar='FILE', type=str, nargs='*',
+ help='source files')
+
+ args = parser.parse_args()
+
+ content, md5 = MakeZip(directory=args.dir, files=args.files)
+ if args.zip_file:
+ with open(args.zip_file, 'wb') as zip_file:
+ zip_file.write(content)
+ if args.hash_file:
+ with open(args.hash_file, 'wb') as hash_file:
+ hash_file.write(md5)
+ if args.upload:
+ UploadZip(content, md5, args.upload)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/usb_gadget/server.py b/chromium/tools/usb_gadget/server.py
new file mode 100644
index 00000000000..e5aa8b8e5d4
--- /dev/null
+++ b/chromium/tools/usb_gadget/server.py
@@ -0,0 +1,170 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""WSGI application to manage a USB gadget.
+"""
+
+import datetime
+import hashlib
+import re
+import subprocess
+import sys
+import time
+import urllib2
+
+from tornado import httpserver
+from tornado import ioloop
+from tornado import web
+
+import default_gadget
+
+VERSION_PATTERN = re.compile(r'.*usb_gadget-([a-z0-9]{32})\.zip')
+
+address = None
+chip = None
+claimed_by = None
+default = default_gadget.DefaultGadget()
+gadget = None
+hardware = None
+interface = None
+port = None
+
+
+def SwitchGadget(new_gadget):
+ if chip.IsConfigured():
+ chip.Destroy()
+
+ global gadget
+ gadget = new_gadget
+ gadget.AddStringDescriptor(3, address)
+ chip.Create(gadget)
+
+
+class VersionHandler(web.RequestHandler):
+
+ def get(self):
+ version = 'unpackaged'
+ for path in sys.path:
+ match = VERSION_PATTERN.match(path)
+ if match:
+ version = match.group(1)
+ break
+
+ self.write(version)
+
+
+class UpdateHandler(web.RequestHandler):
+
+ def post(self):
+ fileinfo = self.request.files['file'][0]
+
+ match = VERSION_PATTERN.match(fileinfo['filename'])
+ if match is None:
+ self.write('Filename must contain MD5 hash.')
+ self.set_status(400)
+ return
+
+ content = fileinfo['body']
+ md5sum = hashlib.md5(content).hexdigest()
+ if md5sum != match.group(1):
+ self.write('File hash does not match.')
+ self.set_status(400)
+ return
+
+ filename = 'usb_gadget-{}.zip'.format(md5sum)
+ with open(filename, 'wb') as f:
+ f.write(content)
+
+ args = ['/usr/bin/python', filename,
+ '--interface', interface,
+ '--port', str(port),
+ '--hardware', hardware]
+ if claimed_by is not None:
+ args.extend(['--start-claimed', claimed_by])
+
+ print 'Reloading with version {}...'.format(md5sum)
+
+ global http_server
+ if chip.IsConfigured():
+ chip.Destroy()
+ http_server.stop()
+
+ child = subprocess.Popen(args, close_fds=True)
+
+ while True:
+ child.poll()
+ if child.returncode is not None:
+ self.write('New package exited with error {}.'
+ .format(child.returncode))
+ self.set_status(500)
+
+ http_server = httpserver.HTTPServer(app)
+ http_server.listen(port)
+ SwitchGadget(gadget)
+ return
+
+ try:
+ f = urllib2.urlopen('http://{}/version'.format(address))
+ if f.getcode() == 200:
+ # Update complete, wait 1 second to make sure buffers are flushed.
+ io_loop = ioloop.IOLoop.instance()
+ io_loop.add_timeout(datetime.timedelta(seconds=1), io_loop.stop)
+ return
+ except urllib2.URLError:
+ pass
+ time.sleep(0.1)
+
+
+class ClaimHandler(web.RequestHandler):
+
+ def post(self):
+ global claimed_by
+
+ if claimed_by is None:
+ claimed_by = self.get_argument('session_id')
+ else:
+ self.write('Device is already claimed by "{}".'.format(claimed_by))
+ self.set_status(403)
+
+
+class UnclaimHandler(web.RequestHandler):
+
+ def post(self):
+ global claimed_by
+ claimed_by = None
+ if gadget != default:
+ SwitchGadget(default)
+
+
+class UnconfigureHandler(web.RequestHandler):
+
+ def post(self):
+ SwitchGadget(default)
+
+
+class DisconnectHandler(web.RequestHandler):
+
+ def post(self):
+ if chip.IsConfigured():
+ chip.Destroy()
+
+
+class ReconnectHandler(web.RequestHandler):
+
+ def post(self):
+ if not chip.IsConfigured():
+ chip.Create(gadget)
+
+
+app = web.Application([
+ (r'/version', VersionHandler),
+ (r'/update', UpdateHandler),
+ (r'/claim', ClaimHandler),
+ (r'/unclaim', UnclaimHandler),
+ (r'/unconfigure', UnconfigureHandler),
+ (r'/disconnect', DisconnectHandler),
+ (r'/reconnect', ReconnectHandler),
+])
+
+http_server = httpserver.HTTPServer(app)
diff --git a/chromium/tools/usb_gadget/usb_constants.py b/chromium/tools/usb_gadget/usb_constants.py
new file mode 100644
index 00000000000..39710c26952
--- /dev/null
+++ b/chromium/tools/usb_gadget/usb_constants.py
@@ -0,0 +1,191 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""USB constant definitions.
+"""
+
+
+class DescriptorType(object):
+ """Descriptor Types.
+
+ See Universal Serial Bus Specification Revision 2.0 Table 9-5.
+ """
+ DEVICE = 1
+ CONFIGURATION = 2
+ STRING = 3
+ INTERFACE = 4
+ ENDPOINT = 5
+ QUALIFIER = 6
+ OTHER_SPEED_CONFIGURATION = 7
+ BOS = 15
+ DEVICE_CAPABILITY = 16
+
+
+class CapabilityType(object):
+ """Device capability types.
+
+ See Universal Serial Bus 3.1 Specification, Revision 1.0 Table 9-14.
+ """
+ WIRELESS_USB = 0x01
+ USB_20_EXTENSION = 0x02
+ SUPERSPEED_USB = 0x03
+ CONTAINER_ID = 0x04
+ PLATFORM = 0x05
+ POWER_DELIVERY_CAPABILITY = 0x06
+ BATTERY_INFO_CAPABILITY = 0x07
+ PD_CONSUMER_PORT_CAPABILITY = 0x08
+ PD_PROVIDER_PORT_CAPABILITY = 0x09
+ SUPERSPEED_PLUS = 0x0A
+ PRECISION_TIME_MEASUREMENT = 0x0B
+ WIRELESS_USB_EXT = 0x0C
+
+
+class DeviceClass(object):
+ """Class code.
+
+ See http://www.usb.org/developers/defined_class.
+ """
+ PER_INTERFACE = 0
+ AUDIO = 1
+ COMM = 2
+ HID = 3
+ PHYSICAL = 5
+ STILL_IMAGE = 6
+ PRINTER = 7
+ MASS_STORAGE = 8
+ HUB = 9
+ CDC_DATA = 10
+ CSCID = 11
+ CONTENT_SEC = 13
+ VIDEO = 14
+ VENDOR = 0xFF
+
+
+class DeviceSubClass(object):
+ """Subclass code.
+
+ See http://www.usb.org/developers/defined_class.
+ """
+ PER_INTERFACE = 0
+ VENDOR = 0xFF
+
+
+class DeviceProtocol(object):
+ """Protocol code.
+
+ See http://www.usb.org/developers/defined_class.
+ """
+ PER_INTERFACE = 0
+ VENDOR = 0xFF
+
+
+class InterfaceClass(object):
+ """Class code.
+
+ See http://www.usb.org/developers/defined_class.
+ """
+ VENDOR = 0xFF
+
+
+class InterfaceSubClass(object):
+ """Subclass code.
+
+ See http://www.usb.org/developers/defined_class.
+ """
+ VENDOR = 0xFF
+
+
+class InterfaceProtocol(object):
+ """Protocol code.
+
+ See http://www.usb.org/developers/defined_class.
+ """
+ VENDOR = 0xFF
+
+
+class TransferType(object):
+ """Transfer Type.
+
+ See http://www.usb.org/developers/defined_class.
+ """
+ MASK = 3
+ CONTROL = 0
+ ISOCHRONOUS = 1
+ BULK = 2
+ INTERRUPT = 3
+
+
+class Dir(object):
+ """Data transfer direction.
+
+ See Universal Serial Bus Specification Revision 2.0 Table 9-2.
+ """
+ OUT = 0
+ IN = 0x80
+
+
+class Type(object):
+ """Request Type.
+
+ See Universal Serial Bus Specification Revision 2.0 Table 9-2.
+ """
+ MASK = 0x60
+ STANDARD = 0x00
+ CLASS = 0x20
+ VENDOR = 0x40
+ RESERVED = 0x60
+
+
+class Recipient(object):
+ """Request Recipient.
+
+ See Universal Serial Bus Specification Revision 2.0 Table 9-2.
+ """
+ MASK = 0x1f
+ DEVICE = 0
+ INTERFACE = 1
+ ENDPOINT = 2
+ OTHER = 3
+
+
+class Request(object):
+ """Standard Request Codes.
+
+ See Universal Serial Bus Specification Revision 2.0 Table 9-4.
+ """
+ GET_STATUS = 0x00
+ CLEAR_FEATURE = 0x01
+ SET_FEATURE = 0x03
+ SET_ADDRESS = 0x05
+ GET_DESCRIPTOR = 0x06
+ SET_DESCRIPTOR = 0x07
+ GET_CONFIGURATION = 0x08
+ SET_CONFIGURATION = 0x09
+ GET_INTERFACE = 0x0A
+ SET_INTERFACE = 0x0B
+ SYNCH_FRAME = 0x0C
+ SET_SEL = 0x30
+ SET_ISOCH_DELAY = 0x31
+
+
+class Speed(object):
+ UNKNOWN = 0
+ LOW = 1
+ FULL = 2
+ HIGH = 3
+ WIRELESS = 4
+ SUPER = 5
+
+
+class VendorID(object):
+ GOOGLE = 0x18D1
+
+
+class ProductID(object):
+ GOOGLE_TEST_GADGET = 0x58F0
+ GOOGLE_KEYBOARD_GADGET = 0x58F1
+ GOOGLE_MOUSE_GADGET = 0x58F2
+ GOOGLE_HID_ECHO_GADGET = 0x58F3
+ GOOGLE_ECHO_GADGET = 0x58F4
+ GOOGLE_COMPOSITE_ECHO_GADGET = 0x58F5
diff --git a/chromium/tools/usb_gadget/usb_descriptors.py b/chromium/tools/usb_gadget/usb_descriptors.py
new file mode 100644
index 00000000000..1ec5dbd41e4
--- /dev/null
+++ b/chromium/tools/usb_gadget/usb_descriptors.py
@@ -0,0 +1,454 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""USB descriptor generation utilities.
+
+Classes to represent and generate USB descriptors.
+"""
+
+import struct
+
+import hid_constants
+import usb_constants
+
+
+class Field(object):
+ """USB descriptor field information."""
+
+ def __init__(self, name, str_fmt, struct_fmt, required):
+ """Define a new USB descriptor field.
+
+ Args:
+ name: Name of the field.
+ str_fmt: Python 'string' module format string for this field.
+ struct_fmt: Python 'struct' module format string for this field.
+ required: Is this a required field?
+ """
+ self.name = name
+ self.str_fmt = str_fmt
+ self.struct_fmt = struct_fmt
+ self.required = required
+
+ def Format(self, value):
+ return self.str_fmt.format(value)
+
+
+class Descriptor(object):
+ """Base class for USB descriptor types.
+
+ This class provides general functionality for creating object types that
+ represent USB descriptors. The AddField and related methods are used to
+ define the fields of each structure. Fields can then be set using keyword
+ arguments to the object constructor or by accessing properties on the object.
+ """
+
+ _fields = None
+
+ @classmethod
+ def AddField(cls, name, struct_fmt, str_fmt='{}', default=None):
+ """Adds a user-specified field to this descriptor.
+
+ Adds a field to the binary structure representing this descriptor. The field
+ can be set by passing a keyword argument name=... to the object constructor
+ will be accessible as foo.name on any instance.
+
+ If no default value is provided then the constructor will through an
+ exception if this field is not one of the provided keyword arguments.
+
+ Args:
+ name: String name of the field.
+ struct_fmt: Python 'struct' module format string for this field.
+ str_fmt: Python 'string' module format string for this field.
+ default: Default value.
+ """
+ if cls._fields is None:
+ cls._fields = []
+ cls._fields.append(Field(name, str_fmt, struct_fmt, default is None))
+
+ member_name = '_{}'.format(name)
+ def Setter(self, value):
+ setattr(self, member_name, value)
+ def Getter(self):
+ try:
+ return getattr(self, member_name)
+ except AttributeError:
+ assert default is not None
+ return default
+
+ setattr(cls, name, property(Getter, Setter))
+
+ @classmethod
+ def AddFixedField(cls, name, struct_fmt, value, str_fmt='{}'):
+ """Adds a constant field to this descriptor.
+
+ Adds a constant field to the binary structure representing this descriptor.
+ The field will be accessible as foo.name on any instance.
+
+ The value of this field may not be given as a constructor parameter or
+ set on an existing instance.
+
+ Args:
+ name: String name of the field.
+ struct_fmt: Python 'struct' module format string for this field.
+ value: Field value.
+ str_fmt: Python 'string' module format string for this field.
+ """
+ if cls._fields is None:
+ cls._fields = []
+ cls._fields.append(Field(name, str_fmt, struct_fmt, False))
+
+ def Setter(unused_self, unused_value):
+ raise RuntimeError('{} is a fixed field.'.format(name))
+ def Getter(unused_self):
+ return value
+
+ setattr(cls, name, property(Getter, Setter))
+
+ @classmethod
+ def AddComputedField(cls, name, struct_fmt, property_name, str_fmt='{}'):
+ """Adds a constant field to this descriptor.
+
+ Adds a field to the binary structure representing this descriptor whos value
+ is equal to an object property. The field will be accessible as foo.name on
+ any instance.
+
+ The value of this field may not be given as a constructor parameter or
+ set on an existing instance.
+
+ Args:
+ name: String name of the field.
+ struct_fmt: Python 'struct' module format string for this field.
+ property_name: Property to read.
+ str_fmt: Python 'string' module format string for this field.
+ """
+ if cls._fields is None:
+ cls._fields = []
+ cls._fields.append(Field(name, str_fmt, struct_fmt, False))
+
+ def Setter(unused_self, unused_value):
+ raise RuntimeError('{} is a computed field.'.format(name))
+ def Getter(self):
+ return getattr(self, property_name)
+
+ setattr(cls, name, property(Getter, Setter))
+
+ def __init__(self, **kwargs):
+ """Constructs a new instance of this descriptor.
+
+ All fields which do not have a default value and are not fixed or computed
+ from a property must be specified as keyword arguments.
+
+ Args:
+ **kwargs: Field values.
+
+ Raises:
+ TypeError: A required field was missing or an unexpected field was given.
+ """
+ fields = {field.name for field in self._fields}
+ required_fields = {field.name for field in self._fields if field.required}
+
+ for arg, value in kwargs.iteritems():
+ if arg not in fields:
+ raise TypeError('Unexpected field: {}'.format(arg))
+
+ setattr(self, arg, value)
+ required_fields.discard(arg)
+
+ if required_fields:
+ raise TypeError('Missing fields: {}'.format(', '.join(required_fields)))
+
+ @property
+ def fmt(self):
+ """Returns the Python 'struct' module format string for this descriptor."""
+ return '<{}'.format(''.join([field.struct_fmt for field in self._fields]))
+
+ @property
+ def struct_size(self):
+ """Returns the size of the struct defined by fmt."""
+ return struct.calcsize(self.fmt)
+
+ @property
+ def total_size(self):
+ """Returns the total size of this descriptor."""
+ return self.struct_size
+
+ def Encode(self):
+ """Returns the binary representation of this descriptor."""
+ values = [getattr(self, field.name) for field in self._fields]
+ return struct.pack(self.fmt, *values)
+
+ def __str__(self):
+ max_length = max(len(field.name) for field in self._fields)
+
+ return '{}:\n {}'.format(
+ self.__class__.__name__,
+ '\n '.join('{} {}'.format(
+ '{}:'.format(field.name).ljust(max_length+1),
+ field.Format(getattr(self, field.name))
+ ) for field in self._fields)
+ )
+
+
+class DeviceDescriptor(Descriptor):
+ """Standard Device Descriptor.
+
+ See Universal Serial Bus Specification Revision 2.0 Table 9-8.
+ """
+ pass
+
+DeviceDescriptor.AddComputedField('bLength', 'B', 'struct_size')
+DeviceDescriptor.AddFixedField('bDescriptorType', 'B',
+ usb_constants.DescriptorType.DEVICE)
+DeviceDescriptor.AddField('bcdUSB', 'H', default=0x0200, str_fmt='0x{:04X}')
+DeviceDescriptor.AddField('bDeviceClass', 'B',
+ default=usb_constants.DeviceClass.PER_INTERFACE)
+DeviceDescriptor.AddField('bDeviceSubClass', 'B',
+ default=usb_constants.DeviceSubClass.PER_INTERFACE)
+DeviceDescriptor.AddField('bDeviceProtocol', 'B',
+ default=usb_constants.DeviceProtocol.PER_INTERFACE)
+DeviceDescriptor.AddField('bMaxPacketSize0', 'B', default=64)
+DeviceDescriptor.AddField('idVendor', 'H', str_fmt='0x{:04X}')
+DeviceDescriptor.AddField('idProduct', 'H', str_fmt='0x{:04X}')
+DeviceDescriptor.AddField('bcdDevice', 'H', str_fmt='0x{:04X}')
+DeviceDescriptor.AddField('iManufacturer', 'B', default=0)
+DeviceDescriptor.AddField('iProduct', 'B', default=0)
+DeviceDescriptor.AddField('iSerialNumber', 'B', default=0)
+DeviceDescriptor.AddField('bNumConfigurations', 'B', default=1)
+
+
+class DescriptorContainer(Descriptor):
+ """Super-class for descriptors which contain more descriptors.
+
+ This class adds the ability for a descriptor to have an array of additional
+ descriptors which follow it.
+ """
+
+ def __init__(self, **kwargs):
+ super(DescriptorContainer, self).__init__(**kwargs)
+ self._descriptors = []
+
+ @property
+ def total_size(self):
+ return self.struct_size + sum([descriptor.total_size
+ for descriptor in self._descriptors])
+
+ def Add(self, descriptor):
+ self._descriptors.append(descriptor)
+
+ def Encode(self):
+ bufs = [super(DescriptorContainer, self).Encode()]
+ bufs.extend(descriptor.Encode() for descriptor in self._descriptors)
+ return ''.join(bufs)
+
+ def __str__(self):
+ return '{}\n{}'.format(super(DescriptorContainer, self).__str__(),
+ '\n'.join(str(descriptor)
+ for descriptor in self._descriptors))
+
+
+class StringDescriptor(Descriptor):
+ """Standard String Descriptor.
+
+ See Universal Serial Bus Specification Revision 2.0 Table 9-16.
+ """
+
+ def __init__(self, **kwargs):
+ self.bString = kwargs.pop('bString', '')
+ super(StringDescriptor, self).__init__(**kwargs)
+
+ @property
+ def total_size(self):
+ return self.struct_size + len(self.bString.encode('UTF-16LE'))
+
+ def Encode(self):
+ return (
+ super(StringDescriptor, self).Encode() +
+ self.bString.encode('UTF-16LE'))
+
+ def __str__(self):
+ return '{}\n bString: "{}"'.format(
+ super(StringDescriptor, self).__str__(), self.bString)
+
+StringDescriptor.AddComputedField('bLength', 'B', 'total_size')
+StringDescriptor.AddFixedField(
+ 'bDescriptorType', 'B', usb_constants.DescriptorType.STRING)
+
+
+class ConfigurationDescriptor(DescriptorContainer):
+ """Standard Configuration Descriptor.
+
+ See Universal Serial Bus Specification Revision 2.0 Table 9-10.
+ """
+
+ def __init__(self, **kwargs):
+ super(ConfigurationDescriptor, self).__init__(**kwargs)
+ self._interfaces = {}
+
+ @property
+ def num_interfaces(self):
+ interface_numbers = {key[0] for key in self._interfaces.iterkeys()}
+ return len(interface_numbers)
+
+ def AddInterface(self, interface):
+ key = (interface.bInterfaceNumber, interface.bAlternateSetting)
+ if key in self._interfaces:
+ raise RuntimeError('Interface {} (alternate {}) already defined.'
+ .format(key[0], key[1]))
+ self._interfaces[key] = interface
+ self.Add(interface)
+
+ def GetInterfaces(self):
+ return self._interfaces.values()
+
+ConfigurationDescriptor.AddComputedField('bLength', 'B', 'struct_size')
+ConfigurationDescriptor.AddFixedField(
+ 'bDescriptorType', 'B', usb_constants.DescriptorType.CONFIGURATION)
+ConfigurationDescriptor.AddComputedField('wTotalLength', 'H', 'total_size')
+ConfigurationDescriptor.AddComputedField('bNumInterfaces', 'B',
+ 'num_interfaces')
+ConfigurationDescriptor.AddField('bConfigurationValue', 'B', default=1)
+ConfigurationDescriptor.AddField('iConfiguration', 'B', default=0)
+ConfigurationDescriptor.AddField('bmAttributes', 'B', str_fmt='0x{:02X}')
+ConfigurationDescriptor.AddField('MaxPower', 'B')
+
+
+class InterfaceDescriptor(DescriptorContainer):
+ """Standard Interface Descriptor.
+
+ See Universal Serial Bus Specification Revision 2.0 Table 9-12.
+ """
+
+ def __init__(self, **kwargs):
+ super(InterfaceDescriptor, self).__init__(**kwargs)
+ self._endpoints = {}
+
+ @property
+ def num_endpoints(self):
+ return len(self._endpoints)
+
+ def AddEndpoint(self, endpoint):
+ if endpoint.bEndpointAddress in self._endpoints:
+ raise RuntimeError('Endpoint 0x{:02X} already defined on this interface.'
+ .format(endpoint.bEndpointAddress))
+ self._endpoints[endpoint.bEndpointAddress] = endpoint
+ self.Add(endpoint)
+
+ def GetEndpoints(self):
+ return self._endpoints.values()
+
+InterfaceDescriptor.AddComputedField('bLength', 'B', 'struct_size')
+InterfaceDescriptor.AddFixedField('bDescriptorType', 'B',
+ usb_constants.DescriptorType.INTERFACE)
+InterfaceDescriptor.AddField('bInterfaceNumber', 'B')
+InterfaceDescriptor.AddField('bAlternateSetting', 'B', default=0)
+InterfaceDescriptor.AddComputedField('bNumEndpoints', 'B', 'num_endpoints')
+InterfaceDescriptor.AddField('bInterfaceClass', 'B',
+ default=usb_constants.InterfaceClass.VENDOR)
+InterfaceDescriptor.AddField('bInterfaceSubClass', 'B',
+ default=usb_constants.InterfaceSubClass.VENDOR)
+InterfaceDescriptor.AddField('bInterfaceProtocol', 'B',
+ default=usb_constants.InterfaceProtocol.VENDOR)
+InterfaceDescriptor.AddField('iInterface', 'B', default=0)
+
+
+class EndpointDescriptor(Descriptor):
+ """Standard Endpoint Descriptor.
+
+ See Universal Serial Bus Specification Revision 2.0 Table 9-13.
+ """
+ pass
+
+EndpointDescriptor.AddComputedField('bLength', 'B', 'struct_size')
+EndpointDescriptor.AddFixedField('bDescriptorType', 'B',
+ usb_constants.DescriptorType.ENDPOINT)
+EndpointDescriptor.AddField('bEndpointAddress', 'B', str_fmt='0x{:02X}')
+EndpointDescriptor.AddField('bmAttributes', 'B', str_fmt='0x{:02X}')
+EndpointDescriptor.AddField('wMaxPacketSize', 'H')
+EndpointDescriptor.AddField('bInterval', 'B')
+
+
+class HidDescriptor(Descriptor):
+ """HID Descriptor.
+
+ See Device Class Definition for Human Interface Devices (HID) Version 1.11
+ section 6.2.1.
+ """
+
+ def __init__(self, **kwargs):
+ super(HidDescriptor, self).__init__(**kwargs)
+ self._descriptors = []
+
+ def AddDescriptor(self, typ, length):
+ self._descriptors.append((typ, length))
+
+ @property
+ def struct_size(self):
+ return super(HidDescriptor, self).struct_size + self.num_descriptors * 3
+
+ @property
+ def num_descriptors(self):
+ return len(self._descriptors)
+
+ def Encode(self):
+ bufs = [super(HidDescriptor, self).Encode()]
+ bufs.extend(struct.pack('<BH', typ, length)
+ for typ, length in self._descriptors)
+ return ''.join(bufs)
+
+ def __str__(self):
+ return '{}\n{}'.format(
+ super(HidDescriptor, self).__str__(),
+ '\n'.join(' bDescriptorType: 0x{:02X}\n wDescriptorLength: {}'
+ .format(typ, length) for typ, length in self._descriptors))
+
+HidDescriptor.AddComputedField('bLength', 'B', 'struct_size')
+HidDescriptor.AddFixedField('bDescriptorType', 'B',
+ hid_constants.DescriptorType.HID)
+HidDescriptor.AddField('bcdHID', 'H', default=0x0111, str_fmt='0x{:04X}')
+HidDescriptor.AddField('bCountryCode', 'B', default=0)
+HidDescriptor.AddComputedField('bNumDescriptors', 'B', 'num_descriptors')
+
+
+class BosDescriptor(DescriptorContainer):
+ """Binary Device Object Store descriptor.
+
+ See Universal Serial Bus 3.1 Specification, Revision 1.0 Table 9-12.
+ """
+
+ def __init__(self, **kwargs):
+ super(BosDescriptor, self).__init__(**kwargs)
+ self._device_caps = []
+
+ @property
+ def num_device_caps(self):
+ return len(self._device_caps)
+
+ def AddDeviceCapability(self, device_capability):
+ self._device_caps.append(device_capability)
+ self.Add(device_capability)
+
+ def GetDeviceCapabilities(self):
+ return self._device_caps
+
+BosDescriptor.AddComputedField('bLength', 'B', 'struct_size')
+BosDescriptor.AddFixedField('bDescriptorType', 'B',
+ usb_constants.DescriptorType.BOS)
+BosDescriptor.AddComputedField('wTotalLength', 'H', 'total_size')
+BosDescriptor.AddComputedField('bNumDeviceCaps', 'B', 'num_device_caps')
+
+
+class ContainerIdDescriptor(Descriptor):
+ """Container ID descriptor.
+
+ See Universal Serial Bus 3.1 Specification, Revision 1.0 Table 9-17.
+ """
+ pass
+
+ContainerIdDescriptor.AddComputedField('bLength', 'B', 'struct_size')
+ContainerIdDescriptor.AddFixedField(
+ 'bDescriptorType', 'B', usb_constants.DescriptorType.DEVICE_CAPABILITY)
+ContainerIdDescriptor.AddFixedField(
+ 'bDevCapabilityType', 'B', usb_constants.CapabilityType.CONTAINER_ID)
+ContainerIdDescriptor.AddFixedField('bReserved', 'B', 0)
+ContainerIdDescriptor.AddField('ContainerID', '16s')
diff --git a/chromium/tools/usb_gadget/usb_descriptors_test.py b/chromium/tools/usb_gadget/usb_descriptors_test.py
new file mode 100755
index 00000000000..79f7d798530
--- /dev/null
+++ b/chromium/tools/usb_gadget/usb_descriptors_test.py
@@ -0,0 +1,214 @@
+#!/usr/bin/python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import hid_constants
+import usb_descriptors
+
+
+class DescriptorWithField(usb_descriptors.Descriptor):
+ pass
+
+DescriptorWithField.AddField('bField', 'B')
+
+
+class DescriptorWithDefault(usb_descriptors.Descriptor):
+ pass
+
+DescriptorWithDefault.AddField('bDefault', 'B', default=42)
+
+
+class DescriptorWithFixed(usb_descriptors.Descriptor):
+ pass
+
+DescriptorWithFixed.AddFixedField('bFixed', 'B', 42)
+
+
+class DescriptorWithComputed(usb_descriptors.Descriptor):
+
+ @property
+ def foo(self):
+ return 42
+
+DescriptorWithComputed.AddComputedField('bComputed', 'B', 'foo')
+
+
+class DescriptorWithDescriptors(usb_descriptors.DescriptorContainer):
+ pass
+
+DescriptorWithDescriptors.AddField('bType', 'B')
+
+
+class DescriptorTest(unittest.TestCase):
+
+ def test_default(self):
+ obj = DescriptorWithDefault()
+ self.assertEquals(obj.bDefault, 42)
+
+ def test_change_default(self):
+ obj = DescriptorWithDefault()
+ obj.bDefault = 1
+ self.assertEquals(obj.bDefault, 1)
+
+ def test_override_default(self):
+ obj = DescriptorWithDefault(bDefault=56)
+ self.assertEquals(obj.bDefault, 56)
+
+ def test_fixed(self):
+ obj = DescriptorWithFixed()
+ self.assertEquals(obj.bFixed, 42)
+
+ def test_set_fixed(self):
+ with self.assertRaises(RuntimeError):
+ DescriptorWithFixed(bFixed=1)
+
+ def test_modify_fixed(self):
+ obj = DescriptorWithFixed()
+ with self.assertRaises(RuntimeError):
+ obj.bFixed = 1
+
+ def test_computed(self):
+ obj = DescriptorWithComputed()
+ self.assertEquals(obj.bComputed, 42)
+
+ def test_set_computed(self):
+ with self.assertRaises(RuntimeError):
+ DescriptorWithComputed(bComputed=1)
+
+ def test_modify_computed(self):
+ obj = DescriptorWithComputed()
+ with self.assertRaises(RuntimeError):
+ obj.bComputed = 1
+
+ def test_unexpected(self):
+ with self.assertRaisesRegexp(TypeError, 'Unexpected'):
+ DescriptorWithField(bUnexpected=1)
+
+ def test_missing(self):
+ with self.assertRaisesRegexp(TypeError, 'Missing'):
+ DescriptorWithField()
+
+ def test_size(self):
+ obj = DescriptorWithField(bField=42)
+ self.assertEquals(obj.struct_size, 1)
+ self.assertEquals(obj.total_size, 1)
+
+ def test_encode(self):
+ obj = DescriptorWithField(bField=0xff)
+ self.assertEquals(obj.Encode(), '\xff')
+
+ def test_string(self):
+ obj = DescriptorWithField(bField=42)
+ string = str(obj)
+ self.assertIn('bField', string)
+ self.assertIn('42', string)
+
+ def test_container(self):
+ parent = DescriptorWithDescriptors(bType=0)
+ child1 = DescriptorWithField(bField=1)
+ parent.Add(child1)
+ child2 = DescriptorWithField(bField=2)
+ parent.Add(child2)
+ self.assertEquals(parent.total_size, 3)
+ self.assertEquals(parent.Encode(), '\x00\x01\x02')
+ string = str(parent)
+ self.assertIn('bType', string)
+ self.assertIn('bField', string)
+
+
+class TestUsbDescriptors(unittest.TestCase):
+
+ def test_device_descriptor(self):
+ device_desc = usb_descriptors.DeviceDescriptor(
+ idVendor=0xDEAD,
+ idProduct=0xBEEF,
+ bcdDevice=0x0100,
+ bNumConfigurations=1)
+ self.assertEquals(
+ device_desc.Encode(),
+ '\x12\x01\x00\x02\x00\x00\x00\x40\xAD\xDE\xEF\xBE\x00\x01\x00\x00\x00'
+ '\x01')
+
+ def test_unique_interfaces(self):
+ interface_desc1 = usb_descriptors.InterfaceDescriptor(bInterfaceNumber=1)
+ interface_desc2 = usb_descriptors.InterfaceDescriptor(bInterfaceNumber=1,
+ bAlternateSetting=1)
+ interface_desc3 = usb_descriptors.InterfaceDescriptor(bInterfaceNumber=1)
+
+ configuration_desc = usb_descriptors.ConfigurationDescriptor(
+ bmAttributes=0xC0,
+ MaxPower=100)
+ configuration_desc.AddInterface(interface_desc1)
+ configuration_desc.AddInterface(interface_desc2)
+ with self.assertRaisesRegexp(RuntimeError, r'Interface 1 \(alternate 0\)'):
+ configuration_desc.AddInterface(interface_desc3)
+
+ def test_unique_endpoints(self):
+ endpoint_desc1 = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x01,
+ bmAttributes=0x02,
+ wMaxPacketSize=64,
+ bInterval=1)
+ endpoint_desc2 = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x81,
+ bmAttributes=0x02,
+ wMaxPacketSize=64,
+ bInterval=1)
+ endpoint_desc3 = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x01,
+ bmAttributes=0x01,
+ wMaxPacketSize=32,
+ bInterval=10)
+
+ interface_desc = usb_descriptors.InterfaceDescriptor(bInterfaceNumber=1)
+ interface_desc.AddEndpoint(endpoint_desc1)
+ interface_desc.AddEndpoint(endpoint_desc2)
+ with self.assertRaisesRegexp(RuntimeError, 'Endpoint 0x01 already defined'):
+ interface_desc.AddEndpoint(endpoint_desc3)
+
+ def test_configuration_descriptor(self):
+ endpoint_desc = usb_descriptors.EndpointDescriptor(
+ bEndpointAddress=0x01,
+ bmAttributes=0x02,
+ wMaxPacketSize=64,
+ bInterval=1)
+ encoded_endpoint = '\x07\x05\x01\x02\x40\x00\x01'
+ self.assertEquals(endpoint_desc.Encode(), encoded_endpoint)
+
+ interface_desc = usb_descriptors.InterfaceDescriptor(bInterfaceNumber=1)
+ interface_desc.AddEndpoint(endpoint_desc)
+ self.assertEquals([endpoint_desc], interface_desc.GetEndpoints())
+ encoded_interface = ('\x09\x04\x01\x00\x01\xFF\xFF\xFF\x00' +
+ encoded_endpoint)
+ self.assertEquals(interface_desc.Encode(), encoded_interface)
+
+ configuration_desc = usb_descriptors.ConfigurationDescriptor(
+ bmAttributes=0xC0,
+ MaxPower=100)
+ configuration_desc.AddInterface(interface_desc)
+ self.assertEquals([interface_desc], configuration_desc.GetInterfaces())
+ encoded_configuration = ('\x09\x02\x19\x00\x01\x01\x00\xC0\x64' +
+ encoded_interface)
+ self.assertEquals(configuration_desc.Encode(), encoded_configuration)
+
+ def test_encode_hid_descriptor(self):
+ hid_desc = usb_descriptors.HidDescriptor()
+ hid_desc.AddDescriptor(hid_constants.DescriptorType.REPORT, 0x80)
+ hid_desc.AddDescriptor(hid_constants.DescriptorType.PHYSICAL, 0x60)
+ encoded_desc = '\x0C\x21\x11\x01\x00\x02\x22\x80\x00\x23\x60\x00'
+ self.assertEquals(hid_desc.Encode(), encoded_desc)
+
+ def test_print_hid_descriptor(self):
+ hid_desc = usb_descriptors.HidDescriptor()
+ hid_desc.AddDescriptor(hid_constants.DescriptorType.REPORT, 0x80)
+ hid_desc.AddDescriptor(hid_constants.DescriptorType.PHYSICAL, 0x60)
+ string = str(hid_desc)
+ self.assertIn('0x22', string)
+ self.assertIn('0x23', string)
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/usb_gadget/usb_gadget.inf b/chromium/tools/usb_gadget/usb_gadget.inf
new file mode 100644
index 00000000000..06922073466
--- /dev/null
+++ b/chromium/tools/usb_gadget/usb_gadget.inf
@@ -0,0 +1,64 @@
+;
+; This INF file instructs Windows to load winusb.sys against the USB devices
+; implemented by the test gadget that don't implement a standard USB class.
+;
+
+[Version]
+Signature = "$Windows NT$"
+Class = USBDevice
+ClassGUID = {88BAE032-5A81-49f0-BC3D-A4FF138216D6}
+Provider = %ManufacturerName%
+CatalogFile = WinUSBInstallation.cat
+DriverVer = 09/04/2012,13.54.20.543
+
+; ========== Manufacturer/Models sections ===========
+
+[Manufacturer]
+%ManufacturerName% = Standard,NTx86,NTia64,NTamd64
+
+[Standard.NTx86]
+%USB\DefaultDevice.DeviceDesc% = USB_Install,USB\VID_18D1&PID_58F0
+%USB\EchoDevice.DeviceDesc% = USB_Install,USB\VID_18D1&PID_58F4
+
+[Standard.NTia64]
+%USB\DefaultDevice.DeviceDesc% = USB_Install,USB\VID_18D1&PID_58F0
+%USB\EchoDevice.DeviceDesc% = USB_Install,USB\VID_18D1&PID_58F4
+
+[Standard.NTamd64]
+%USB\DefaultDevice.DeviceDesc% = USB_Install,USB\VID_18D1&PID_58F0
+%USB\EchoDevice.DeviceDesc% = USB_Install,USB\VID_18D1&PID_58F4
+
+; ========== Class definition ===========
+
+[ClassInstall32]
+AddReg = ClassInstall_AddReg
+
+[ClassInstall_AddReg]
+HKR,,,,%ClassName%
+HKR,,NoInstallClass,,1
+HKR,,IconPath,%REG_MULTI_SZ%,"%systemroot%\system32\setupapi.dll,-20"
+HKR,,LowerLogoVersion,,5.2
+
+; =================== Installation ===================
+
+[USB_Install]
+Include = winusb.inf
+Needs = WINUSB.NT
+
+[USB_Install.Services]
+Include = winusb.inf
+Needs = WINUSB.NT.Services
+
+[USB_Install.HW]
+AddReg = Dev_AddReg
+
+[Dev_AddReg]
+HKR,,DeviceInterfaceGUIDs,0x10000,"{9f543223-cede-4fa3-b376-a25ce9a30e74}"
+
+; =================== Strings ===================
+
+[Strings]
+ManufacturerName = "Google, Inc."
+ClassName = "USB Test Devices"
+USB\DefaultDevice.DeviceDesc = "Test Gadget"
+USB\EchoDevice.DeviceDesc = "Echo Gadget"
diff --git a/chromium/tools/valgrind/OWNERS b/chromium/tools/valgrind/OWNERS
new file mode 100644
index 00000000000..29891316d31
--- /dev/null
+++ b/chromium/tools/valgrind/OWNERS
@@ -0,0 +1,3 @@
+bruening@chromium.org
+glider@chromium.org
+thestig@chromium.org
diff --git a/chromium/tools/valgrind/README b/chromium/tools/valgrind/README
new file mode 100644
index 00000000000..68f793f024a
--- /dev/null
+++ b/chromium/tools/valgrind/README
@@ -0,0 +1,10 @@
+Historically this directory has been a home for Valgrind and ThreadSanitizer.
+Since then other memory tools used in Chromium started squatting here and the
+name became confusing.
+We're replacing tools/valgrind with tools/memory/ new tools should go there.
+
+Attention: ThreadSanitizer v1 has been retired and files in this dir
+should not be used anymore. Please refer to
+http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+for the instructions on using ThreadSanitizer v2.
+Namely, the suppressions now reside in base/debug/tsan_suppressions.cc
diff --git a/chromium/tools/valgrind/asan/asan_symbolize.py b/chromium/tools/valgrind/asan/asan_symbolize.py
new file mode 100755
index 00000000000..2cdae08b18a
--- /dev/null
+++ b/chromium/tools/valgrind/asan/asan_symbolize.py
@@ -0,0 +1,271 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from third_party import asan_symbolize
+
+import argparse
+import base64
+import json
+import os
+import platform
+import re
+import subprocess
+import sys
+
+class LineBuffered(object):
+ """Disable buffering on a file object."""
+ def __init__(self, stream):
+ self.stream = stream
+
+ def write(self, data):
+ self.stream.write(data)
+ if '\n' in data:
+ self.stream.flush()
+
+ def __getattr__(self, attr):
+ return getattr(self.stream, attr)
+
+
+def disable_buffering():
+ """Makes this process and child processes stdout unbuffered."""
+ if not os.environ.get('PYTHONUNBUFFERED'):
+ # Since sys.stdout is a C++ object, it's impossible to do
+ # sys.stdout.write = lambda...
+ sys.stdout = LineBuffered(sys.stdout)
+ os.environ['PYTHONUNBUFFERED'] = 'x'
+
+
+def set_symbolizer_path():
+ """Set the path to the llvm-symbolize binary in the Chromium source tree."""
+ if not os.environ.get('LLVM_SYMBOLIZER_PATH'):
+ script_dir = os.path.dirname(os.path.abspath(__file__))
+ # Assume this script resides three levels below src/ (i.e.
+ # src/tools/valgrind/asan/).
+ src_root = os.path.join(script_dir, "..", "..", "..")
+ symbolizer_path = os.path.join(src_root, 'third_party',
+ 'llvm-build', 'Release+Asserts', 'bin', 'llvm-symbolizer')
+ assert(os.path.isfile(symbolizer_path))
+ os.environ['LLVM_SYMBOLIZER_PATH'] = os.path.abspath(symbolizer_path)
+
+
+def is_hash_name(name):
+ match = re.match('[0-9a-f]+$', name)
+ return bool(match)
+
+
+def split_path(path):
+ ret = []
+ while True:
+ head, tail = os.path.split(path)
+ if head == path:
+ return [head] + ret
+ ret, path = [tail] + ret, head
+
+
+def chrome_product_dir_path(exe_path):
+ if exe_path is None:
+ return None
+ path_parts = split_path(exe_path)
+ # Make sure the product dir path isn't empty if |exe_path| consists of
+ # a single component.
+ if len(path_parts) == 1:
+ path_parts = ['.'] + path_parts
+ for index, part in enumerate(path_parts):
+ if part.endswith('.app'):
+ return os.path.join(*path_parts[:index])
+ # If the executable isn't an .app bundle, it's a commandline binary that
+ # resides right in the product dir.
+ return os.path.join(*path_parts[:-1])
+
+
+inode_path_cache = {}
+
+
+def find_inode_at_path(inode, path):
+ if inode in inode_path_cache:
+ return inode_path_cache[inode]
+ cmd = ['find', path, '-inum', str(inode)]
+ find_line = subprocess.check_output(cmd).rstrip()
+ lines = find_line.split('\n')
+ ret = None
+ if lines:
+ # `find` may give us several paths (e.g. 'Chromium Framework' in the
+ # product dir and 'Chromium Framework' inside 'Chromium.app',
+ # chrome_dsym_hints() will produce correct .dSYM path for any of them.
+ ret = lines[0]
+ inode_path_cache[inode] = ret
+ return ret
+
+
+# Create a binary name filter that works around https://crbug.com/444835.
+# When running tests on OSX swarming servers, ASan sometimes prints paths to
+# files in cache (ending with SHA1 filenames) instead of paths to hardlinks to
+# those files in the product dir.
+# For a given |binary_path| chrome_osx_binary_name_filter() returns one of the
+# hardlinks to the same inode in |product_dir_path|.
+def make_chrome_osx_binary_name_filter(product_dir_path=''):
+ def chrome_osx_binary_name_filter(binary_path):
+ basename = os.path.basename(binary_path)
+ if is_hash_name(basename) and product_dir_path:
+ inode = os.stat(binary_path).st_ino
+ new_binary_path = find_inode_at_path(inode, product_dir_path)
+ if new_binary_path:
+ return new_binary_path
+ return binary_path
+ return chrome_osx_binary_name_filter
+
+
+# Construct a path to the .dSYM bundle for the given binary.
+# There are three possible cases for binary location in Chromium:
+# 1. The binary is a standalone executable or dynamic library in the product
+# dir, the debug info is in "binary.dSYM" in the product dir.
+# 2. The binary is a standalone framework or .app bundle, the debug info is in
+# "Framework.framework.dSYM" or "App.app.dSYM" in the product dir.
+# 3. The binary is a framework or an .app bundle within another .app bundle
+# (e.g. Outer.app/Contents/Versions/1.2.3.4/Inner.app), and the debug info
+# is in Inner.app.dSYM in the product dir.
+# The first case is handled by llvm-symbolizer, so we only need to construct
+# .dSYM paths for .app bundles and frameworks.
+# We're assuming that there're no more than two nested bundles in the binary
+# path. Only one of these bundles may be a framework and frameworks cannot
+# contain other bundles.
+def chrome_dsym_hints(binary):
+ path_parts = split_path(binary)
+ app_positions = []
+ framework_positions = []
+ for index, part in enumerate(path_parts):
+ if part.endswith('.app'):
+ app_positions.append(index)
+ elif part.endswith('.framework'):
+ framework_positions.append(index)
+ bundle_positions = app_positions + framework_positions
+ bundle_positions.sort()
+ assert len(bundle_positions) <= 2, \
+ "The path contains more than two nested bundles: %s" % binary
+ if len(bundle_positions) == 0:
+ # Case 1: this is a standalone executable or dylib.
+ return []
+ assert (not (len(app_positions) == 1 and
+ len(framework_positions) == 1 and
+ app_positions[0] > framework_positions[0])), \
+ "The path contains an app bundle inside a framework: %s" % binary
+ # Cases 2 and 3. The outermost bundle (which is the only bundle in the case 2)
+ # is located in the product dir.
+ outermost_bundle = bundle_positions[0]
+ product_dir = path_parts[:outermost_bundle]
+ # In case 2 this is the same as |outermost_bundle|.
+ innermost_bundle = bundle_positions[-1]
+ dsym_path = product_dir + [path_parts[innermost_bundle]]
+ result = '%s.dSYM' % os.path.join(*dsym_path)
+ return [result]
+
+
+# We want our output to match base::EscapeJSONString(), which produces
+# doubly-escaped strings. The first escaping pass is handled by this class. The
+# second pass happens when JSON data is dumped to file.
+class StringEncoder(json.JSONEncoder):
+ def __init__(self):
+ json.JSONEncoder.__init__(self)
+
+ def encode(self, s):
+ assert(isinstance(s, basestring))
+ # Don't die on invalid utf-8 sequences.
+ s = s.decode('utf-8', 'replace')
+ encoded = json.JSONEncoder.encode(self, s)
+ assert(len(encoded) >= 2)
+ assert(encoded[0] == '"')
+ assert(encoded[-1] == '"')
+ encoded = encoded[1:-1]
+ # Special case from base::EscapeJSONString().
+ encoded = encoded.replace('<', '\u003C')
+ return encoded
+
+
+class JSONTestRunSymbolizer(object):
+ def __init__(self, symbolization_loop):
+ self.symbolization_loop = symbolization_loop
+
+ def symbolize_snippet(self, snippet):
+ symbolized_lines = []
+ for line in snippet.split('\n'):
+ symbolized_lines += self.symbolization_loop.process_line(line)
+ return '\n'.join(symbolized_lines)
+
+ def symbolize(self, test_run):
+ original_snippet = base64.b64decode(test_run['output_snippet_base64'])
+ symbolized_snippet = self.symbolize_snippet(original_snippet)
+ if symbolized_snippet == original_snippet:
+ # No sanitizer reports in snippet.
+ return
+
+ test_run['original_output_snippet'] = test_run['output_snippet']
+ test_run['original_output_snippet_base64'] = \
+ test_run['output_snippet_base64']
+
+ escaped_snippet = StringEncoder().encode(symbolized_snippet)
+ test_run['output_snippet'] = escaped_snippet
+ test_run['output_snippet_base64'] = \
+ base64.b64encode(symbolized_snippet)
+ test_run['snippet_processed_by'] = 'asan_symbolize.py'
+ # Originally, "lossless" refers to "no Unicode data lost while encoding the
+ # string". However, since we're applying another kind of transformation
+ # (symbolization), it doesn't seem right to consider the snippet lossless.
+ test_run['losless_snippet'] = False
+
+
+def symbolize_snippets_in_json(filename, symbolization_loop):
+ with open(filename, 'r') as f:
+ json_data = json.load(f)
+
+ test_run_symbolizer = JSONTestRunSymbolizer(symbolization_loop)
+ for iteration_data in json_data['per_iteration_data']:
+ for test_name, test_runs in iteration_data.iteritems():
+ for test_run in test_runs:
+ test_run_symbolizer.symbolize(test_run)
+
+ with open(filename, 'w') as f:
+ json.dump(json_data, f, indent=3, sort_keys=True)
+
+
+def main():
+ parser = argparse.ArgumentParser(description='Symbolize sanitizer reports.')
+ parser.add_argument('--test-summary-json-file',
+ help='Path to a JSON file produced by the test launcher. The script will '
+ 'ignore stdandard input and instead symbolize the output stnippets '
+ 'inside the JSON file. The result will be written back to the JSON '
+ 'file.')
+ parser.add_argument('strip_path_prefix', nargs='*',
+ help='When printing source file names, the longest prefix ending in one '
+ 'of these substrings will be stripped. E.g.: "Release/../../".')
+ parser.add_argument('--executable-path',
+ help='Path to program executable. Used on OSX swarming bots to locate '
+ 'dSYM bundles for associated frameworks and bundles.')
+ args = parser.parse_args()
+
+ disable_buffering()
+ set_symbolizer_path()
+ asan_symbolize.demangle = True
+ asan_symbolize.fix_filename_patterns = args.strip_path_prefix
+ # Most source paths for Chromium binaries start with
+ # /path/to/src/out/Release/../../
+ asan_symbolize.fix_filename_patterns.append('Release/../../')
+ binary_name_filter = None
+ if platform.uname()[0] == 'Darwin':
+ binary_name_filter = make_chrome_osx_binary_name_filter(
+ chrome_product_dir_path(args.executable_path))
+ loop = asan_symbolize.SymbolizationLoop(
+ binary_name_filter=binary_name_filter,
+ dsym_hint_producer=chrome_dsym_hints)
+
+ if args.test_summary_json_file:
+ symbolize_snippets_in_json(args.test_summary_json_file, loop)
+ else:
+ # Process stdin.
+ asan_symbolize.logfile = sys.stdin
+ loop.process_logfile()
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/valgrind/asan/third_party/README.chromium b/chromium/tools/valgrind/asan/third_party/README.chromium
new file mode 100644
index 00000000000..5c363ead601
--- /dev/null
+++ b/chromium/tools/valgrind/asan/third_party/README.chromium
@@ -0,0 +1,7 @@
+Name: asan_symbolize.py
+License: University of Illinois Open Source License.
+Version: r227327
+URL: http://llvm.org/viewvc/llvm-project/compiler-rt/trunk/lib/asan/scripts/asan_symbolize.py?view=co&content-type=text%2Fplain
+Security Critical: no
+
+asan_symbolize.py is a verbatim copy of asan_symbolize.py in the LLVM trunk.
diff --git a/chromium/tools/valgrind/asan/third_party/__init__.py b/chromium/tools/valgrind/asan/third_party/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/chromium/tools/valgrind/asan/third_party/__init__.py
diff --git a/chromium/tools/valgrind/asan/third_party/asan_symbolize.py b/chromium/tools/valgrind/asan/third_party/asan_symbolize.py
new file mode 100755
index 00000000000..59fceaaed81
--- /dev/null
+++ b/chromium/tools/valgrind/asan/third_party/asan_symbolize.py
@@ -0,0 +1,479 @@
+#!/usr/bin/env python
+#===- lib/asan/scripts/asan_symbolize.py -----------------------------------===#
+#
+# The LLVM Compiler Infrastructure
+#
+# This file is distributed under the University of Illinois Open Source
+# License. See LICENSE.TXT for details.
+#
+#===------------------------------------------------------------------------===#
+import argparse
+import bisect
+import getopt
+import os
+import re
+import subprocess
+import sys
+
+symbolizers = {}
+DEBUG = False
+demangle = False
+binutils_prefix = None
+sysroot_path = None
+binary_name_filter = None
+fix_filename_patterns = None
+logfile = sys.stdin
+
+# FIXME: merge the code that calls fix_filename().
+def fix_filename(file_name):
+ if fix_filename_patterns:
+ for path_to_cut in fix_filename_patterns:
+ file_name = re.sub('.*' + path_to_cut, '', file_name)
+ file_name = re.sub('.*asan_[a-z_]*.cc:[0-9]*', '_asan_rtl_', file_name)
+ file_name = re.sub('.*crtstuff.c:0', '???:0', file_name)
+ return file_name
+
+def sysroot_path_filter(binary_name):
+ return sysroot_path + binary_name
+
+def guess_arch(addr):
+ # Guess which arch we're running. 10 = len('0x') + 8 hex digits.
+ if len(addr) > 10:
+ return 'x86_64'
+ else:
+ return 'i386'
+
+class Symbolizer(object):
+ def __init__(self):
+ pass
+
+ def symbolize(self, addr, binary, offset):
+ """Symbolize the given address (pair of binary and offset).
+
+ Overriden in subclasses.
+ Args:
+ addr: virtual address of an instruction.
+ binary: path to executable/shared object containing this instruction.
+ offset: instruction offset in the @binary.
+ Returns:
+ list of strings (one string for each inlined frame) describing
+ the code locations for this instruction (that is, function name, file
+ name, line and column numbers).
+ """
+ return None
+
+
+class LLVMSymbolizer(Symbolizer):
+ def __init__(self, symbolizer_path, default_arch, system, dsym_hints=[]):
+ super(LLVMSymbolizer, self).__init__()
+ self.symbolizer_path = symbolizer_path
+ self.default_arch = default_arch
+ self.system = system
+ self.dsym_hints = dsym_hints
+ self.pipe = self.open_llvm_symbolizer()
+
+ def open_llvm_symbolizer(self):
+ cmd = [self.symbolizer_path,
+ '--use-symbol-table=true',
+ '--demangle=%s' % demangle,
+ '--functions=short',
+ '--inlining=true',
+ '--default-arch=%s' % self.default_arch]
+ if self.system == 'Darwin':
+ for hint in self.dsym_hints:
+ cmd.append('--dsym-hint=%s' % hint)
+ if DEBUG:
+ print ' '.join(cmd)
+ try:
+ result = subprocess.Popen(cmd, stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE)
+ except OSError:
+ result = None
+ return result
+
+ def symbolize(self, addr, binary, offset):
+ """Overrides Symbolizer.symbolize."""
+ if not self.pipe:
+ return None
+ result = []
+ try:
+ symbolizer_input = '"%s" %s' % (binary, offset)
+ if DEBUG:
+ print symbolizer_input
+ print >> self.pipe.stdin, symbolizer_input
+ while True:
+ function_name = self.pipe.stdout.readline().rstrip()
+ if not function_name:
+ break
+ file_name = self.pipe.stdout.readline().rstrip()
+ file_name = fix_filename(file_name)
+ if (not function_name.startswith('??') or
+ not file_name.startswith('??')):
+ # Append only non-trivial frames.
+ result.append('%s in %s %s' % (addr, function_name,
+ file_name))
+ except Exception:
+ result = []
+ if not result:
+ result = None
+ return result
+
+
+def LLVMSymbolizerFactory(system, default_arch, dsym_hints=[]):
+ symbolizer_path = os.getenv('LLVM_SYMBOLIZER_PATH')
+ if not symbolizer_path:
+ symbolizer_path = os.getenv('ASAN_SYMBOLIZER_PATH')
+ if not symbolizer_path:
+ # Assume llvm-symbolizer is in PATH.
+ symbolizer_path = 'llvm-symbolizer'
+ return LLVMSymbolizer(symbolizer_path, default_arch, system, dsym_hints)
+
+
+class Addr2LineSymbolizer(Symbolizer):
+ def __init__(self, binary):
+ super(Addr2LineSymbolizer, self).__init__()
+ self.binary = binary
+ self.pipe = self.open_addr2line()
+
+ def open_addr2line(self):
+ addr2line_tool = 'addr2line'
+ if binutils_prefix:
+ addr2line_tool = binutils_prefix + addr2line_tool
+ cmd = [addr2line_tool, '-f']
+ if demangle:
+ cmd += ['--demangle']
+ cmd += ['-e', self.binary]
+ if DEBUG:
+ print ' '.join(cmd)
+ return subprocess.Popen(cmd,
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+
+ def symbolize(self, addr, binary, offset):
+ """Overrides Symbolizer.symbolize."""
+ if self.binary != binary:
+ return None
+ try:
+ print >> self.pipe.stdin, offset
+ function_name = self.pipe.stdout.readline().rstrip()
+ file_name = self.pipe.stdout.readline().rstrip()
+ except Exception:
+ function_name = ''
+ file_name = ''
+ file_name = fix_filename(file_name)
+ return ['%s in %s %s' % (addr, function_name, file_name)]
+
+
+class UnbufferedLineConverter(object):
+ """
+ Wrap a child process that responds to each line of input with one line of
+ output. Uses pty to trick the child into providing unbuffered output.
+ """
+ def __init__(self, args, close_stderr=False):
+ # Local imports so that the script can start on Windows.
+ import pty
+ import termios
+ pid, fd = pty.fork()
+ if pid == 0:
+ # We're the child. Transfer control to command.
+ if close_stderr:
+ dev_null = os.open('/dev/null', 0)
+ os.dup2(dev_null, 2)
+ os.execvp(args[0], args)
+ else:
+ # Disable echoing.
+ attr = termios.tcgetattr(fd)
+ attr[3] = attr[3] & ~termios.ECHO
+ termios.tcsetattr(fd, termios.TCSANOW, attr)
+ # Set up a file()-like interface to the child process
+ self.r = os.fdopen(fd, "r", 1)
+ self.w = os.fdopen(os.dup(fd), "w", 1)
+
+ def convert(self, line):
+ self.w.write(line + "\n")
+ return self.readline()
+
+ def readline(self):
+ return self.r.readline().rstrip()
+
+
+class DarwinSymbolizer(Symbolizer):
+ def __init__(self, addr, binary):
+ super(DarwinSymbolizer, self).__init__()
+ self.binary = binary
+ self.arch = guess_arch(addr)
+ self.open_atos()
+
+ def open_atos(self):
+ if DEBUG:
+ print 'atos -o %s -arch %s' % (self.binary, self.arch)
+ cmdline = ['atos', '-o', self.binary, '-arch', self.arch]
+ self.atos = UnbufferedLineConverter(cmdline, close_stderr=True)
+
+ def symbolize(self, addr, binary, offset):
+ """Overrides Symbolizer.symbolize."""
+ if self.binary != binary:
+ return None
+ atos_line = self.atos.convert('0x%x' % int(offset, 16))
+ while "got symbolicator for" in atos_line:
+ atos_line = self.atos.readline()
+ # A well-formed atos response looks like this:
+ # foo(type1, type2) (in object.name) (filename.cc:80)
+ match = re.match('^(.*) \(in (.*)\) \((.*:\d*)\)$', atos_line)
+ if DEBUG:
+ print 'atos_line: ', atos_line
+ if match:
+ function_name = match.group(1)
+ function_name = re.sub('\(.*?\)', '', function_name)
+ file_name = fix_filename(match.group(3))
+ return ['%s in %s %s' % (addr, function_name, file_name)]
+ else:
+ return ['%s in %s' % (addr, atos_line)]
+
+
+# Chain several symbolizers so that if one symbolizer fails, we fall back
+# to the next symbolizer in chain.
+class ChainSymbolizer(Symbolizer):
+ def __init__(self, symbolizer_list):
+ super(ChainSymbolizer, self).__init__()
+ self.symbolizer_list = symbolizer_list
+
+ def symbolize(self, addr, binary, offset):
+ """Overrides Symbolizer.symbolize."""
+ for symbolizer in self.symbolizer_list:
+ if symbolizer:
+ result = symbolizer.symbolize(addr, binary, offset)
+ if result:
+ return result
+ return None
+
+ def append_symbolizer(self, symbolizer):
+ self.symbolizer_list.append(symbolizer)
+
+
+def BreakpadSymbolizerFactory(binary):
+ suffix = os.getenv('BREAKPAD_SUFFIX')
+ if suffix:
+ filename = binary + suffix
+ if os.access(filename, os.F_OK):
+ return BreakpadSymbolizer(filename)
+ return None
+
+
+def SystemSymbolizerFactory(system, addr, binary):
+ if system == 'Darwin':
+ return DarwinSymbolizer(addr, binary)
+ elif system == 'Linux':
+ return Addr2LineSymbolizer(binary)
+
+
+class BreakpadSymbolizer(Symbolizer):
+ def __init__(self, filename):
+ super(BreakpadSymbolizer, self).__init__()
+ self.filename = filename
+ lines = file(filename).readlines()
+ self.files = []
+ self.symbols = {}
+ self.address_list = []
+ self.addresses = {}
+ # MODULE mac x86_64 A7001116478B33F18FF9BEDE9F615F190 t
+ fragments = lines[0].rstrip().split()
+ self.arch = fragments[2]
+ self.debug_id = fragments[3]
+ self.binary = ' '.join(fragments[4:])
+ self.parse_lines(lines[1:])
+
+ def parse_lines(self, lines):
+ cur_function_addr = ''
+ for line in lines:
+ fragments = line.split()
+ if fragments[0] == 'FILE':
+ assert int(fragments[1]) == len(self.files)
+ self.files.append(' '.join(fragments[2:]))
+ elif fragments[0] == 'PUBLIC':
+ self.symbols[int(fragments[1], 16)] = ' '.join(fragments[3:])
+ elif fragments[0] in ['CFI', 'STACK']:
+ pass
+ elif fragments[0] == 'FUNC':
+ cur_function_addr = int(fragments[1], 16)
+ if not cur_function_addr in self.symbols.keys():
+ self.symbols[cur_function_addr] = ' '.join(fragments[4:])
+ else:
+ # Line starting with an address.
+ addr = int(fragments[0], 16)
+ self.address_list.append(addr)
+ # Tuple of symbol address, size, line, file number.
+ self.addresses[addr] = (cur_function_addr,
+ int(fragments[1], 16),
+ int(fragments[2]),
+ int(fragments[3]))
+ self.address_list.sort()
+
+ def get_sym_file_line(self, addr):
+ key = None
+ if addr in self.addresses.keys():
+ key = addr
+ else:
+ index = bisect.bisect_left(self.address_list, addr)
+ if index == 0:
+ return None
+ else:
+ key = self.address_list[index - 1]
+ sym_id, size, line_no, file_no = self.addresses[key]
+ symbol = self.symbols[sym_id]
+ filename = self.files[file_no]
+ if addr < key + size:
+ return symbol, filename, line_no
+ else:
+ return None
+
+ def symbolize(self, addr, binary, offset):
+ if self.binary != binary:
+ return None
+ res = self.get_sym_file_line(int(offset, 16))
+ if res:
+ function_name, file_name, line_no = res
+ result = ['%s in %s %s:%d' % (
+ addr, function_name, file_name, line_no)]
+ print result
+ return result
+ else:
+ return None
+
+
+class SymbolizationLoop(object):
+ def __init__(self, binary_name_filter=None, dsym_hint_producer=None):
+ if sys.platform == 'win32':
+ # ASan on Windows uses dbghelp.dll to symbolize in-process, which works
+ # even in sandboxed processes. Nothing needs to be done here.
+ self.process_line = self.process_line_echo
+ else:
+ # Used by clients who may want to supply a different binary name.
+ # E.g. in Chrome several binaries may share a single .dSYM.
+ self.binary_name_filter = binary_name_filter
+ self.dsym_hint_producer = dsym_hint_producer
+ self.system = os.uname()[0]
+ if self.system not in ['Linux', 'Darwin', 'FreeBSD']:
+ raise Exception('Unknown system')
+ self.llvm_symbolizers = {}
+ self.last_llvm_symbolizer = None
+ self.dsym_hints = set([])
+ self.frame_no = 0
+ self.process_line = self.process_line_posix
+
+ def symbolize_address(self, addr, binary, offset):
+ # On non-Darwin (i.e. on platforms without .dSYM debug info) always use
+ # a single symbolizer binary.
+ # On Darwin, if the dsym hint producer is present:
+ # 1. check whether we've seen this binary already; if so,
+ # use |llvm_symbolizers[binary]|, which has already loaded the debug
+ # info for this binary (might not be the case for
+ # |last_llvm_symbolizer|);
+ # 2. otherwise check if we've seen all the hints for this binary already;
+ # if so, reuse |last_llvm_symbolizer| which has the full set of hints;
+ # 3. otherwise create a new symbolizer and pass all currently known
+ # .dSYM hints to it.
+ if not binary in self.llvm_symbolizers:
+ use_new_symbolizer = True
+ if self.system == 'Darwin' and self.dsym_hint_producer:
+ dsym_hints_for_binary = set(self.dsym_hint_producer(binary))
+ use_new_symbolizer = bool(dsym_hints_for_binary - self.dsym_hints)
+ self.dsym_hints |= dsym_hints_for_binary
+ if self.last_llvm_symbolizer and not use_new_symbolizer:
+ self.llvm_symbolizers[binary] = self.last_llvm_symbolizer
+ else:
+ self.last_llvm_symbolizer = LLVMSymbolizerFactory(
+ self.system, guess_arch(addr), self.dsym_hints)
+ self.llvm_symbolizers[binary] = self.last_llvm_symbolizer
+ # Use the chain of symbolizers:
+ # Breakpad symbolizer -> LLVM symbolizer -> addr2line/atos
+ # (fall back to next symbolizer if the previous one fails).
+ if not binary in symbolizers:
+ symbolizers[binary] = ChainSymbolizer(
+ [BreakpadSymbolizerFactory(binary), self.llvm_symbolizers[binary]])
+ result = symbolizers[binary].symbolize(addr, binary, offset)
+ if result is None:
+ # Initialize system symbolizer only if other symbolizers failed.
+ symbolizers[binary].append_symbolizer(
+ SystemSymbolizerFactory(self.system, addr, binary))
+ result = symbolizers[binary].symbolize(addr, binary, offset)
+ # The system symbolizer must produce some result.
+ assert result
+ return result
+
+ def get_symbolized_lines(self, symbolized_lines):
+ if not symbolized_lines:
+ return [self.current_line]
+ else:
+ result = []
+ for symbolized_frame in symbolized_lines:
+ result.append(' #%s %s' % (str(self.frame_no), symbolized_frame.rstrip()))
+ self.frame_no += 1
+ return result
+
+ def process_logfile(self):
+ self.frame_no = 0
+ for line in logfile:
+ processed = self.process_line(line)
+ print '\n'.join(processed)
+
+ def process_line_echo(self, line):
+ return [line.rstrip()]
+
+ def process_line_posix(self, line):
+ self.current_line = line.rstrip()
+ #0 0x7f6e35cf2e45 (/blah/foo.so+0x11fe45)
+ stack_trace_line_format = (
+ '^( *#([0-9]+) *)(0x[0-9a-f]+) *\((.*)\+(0x[0-9a-f]+)\)')
+ match = re.match(stack_trace_line_format, line)
+ if not match:
+ return [self.current_line]
+ if DEBUG:
+ print line
+ _, frameno_str, addr, binary, offset = match.groups()
+ if frameno_str == '0':
+ # Assume that frame #0 is the first frame of new stack trace.
+ self.frame_no = 0
+ original_binary = binary
+ if self.binary_name_filter:
+ binary = self.binary_name_filter(binary)
+ symbolized_line = self.symbolize_address(addr, binary, offset)
+ if not symbolized_line:
+ if original_binary != binary:
+ symbolized_line = self.symbolize_address(addr, binary, offset)
+ return self.get_symbolized_lines(symbolized_line)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ formatter_class=argparse.RawDescriptionHelpFormatter,
+ description='ASan symbolization script',
+ epilog='Example of use:\n'
+ 'asan_symbolize.py -c "$HOME/opt/cross/bin/arm-linux-gnueabi-" '
+ '-s "$HOME/SymbolFiles" < asan.log')
+ parser.add_argument('path_to_cut', nargs='*',
+ help='pattern to be cut from the result file path ')
+ parser.add_argument('-d','--demangle', action='store_true',
+ help='demangle function names')
+ parser.add_argument('-s', metavar='SYSROOT',
+ help='set path to sysroot for sanitized binaries')
+ parser.add_argument('-c', metavar='CROSS_COMPILE',
+ help='set prefix for binutils')
+ parser.add_argument('-l','--logfile', default=sys.stdin,
+ type=argparse.FileType('r'),
+ help='set log file name to parse, default is stdin')
+ args = parser.parse_args()
+ if args.path_to_cut:
+ fix_filename_patterns = args.path_to_cut
+ if args.demangle:
+ demangle = True
+ if args.s:
+ binary_name_filter = sysroot_path_filter
+ sysroot_path = args.s
+ if args.c:
+ binutils_prefix = args.c
+ if args.logfile:
+ logfile = args.logfile
+ else:
+ logfile = sys.stdin
+ loop = SymbolizationLoop(binary_name_filter)
+ loop.process_logfile()
diff --git a/chromium/tools/valgrind/browser_wrapper_win.py b/chromium/tools/valgrind/browser_wrapper_win.py
new file mode 100644
index 00000000000..0023ca7dfb7
--- /dev/null
+++ b/chromium/tools/valgrind/browser_wrapper_win.py
@@ -0,0 +1,49 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import os
+import re
+import sys
+import subprocess
+
+# TODO(timurrrr): we may use it on POSIX too to avoid code duplication once we
+# support layout_tests, remove Dr. Memory specific code and verify it works
+# on a "clean" Mac.
+
+testcase_name = None
+for arg in sys.argv:
+ m = re.match("\-\-gtest_filter=(.*)", arg)
+ if m:
+ assert testcase_name is None
+ testcase_name = m.groups()[0]
+
+# arg #0 is the path to this python script
+cmd_to_run = sys.argv[1:]
+
+# TODO(timurrrr): this is Dr. Memory-specific
+# Usually, we pass "-logdir" "foo\bar\spam path" args to Dr. Memory.
+# To group reports per UI test, we want to put the reports for each test into a
+# separate directory. This code can be simplified when we have
+# https://github.com/DynamoRIO/drmemory/issues/684 fixed.
+logdir_idx = cmd_to_run.index("-logdir")
+old_logdir = cmd_to_run[logdir_idx + 1]
+
+wrapper_pid = str(os.getpid())
+
+# On Windows, there is a chance of PID collision. We avoid it by appending the
+# number of entries in the logdir at the end of wrapper_pid.
+# This number is monotonic and we can't have two simultaneously running wrappers
+# with the same PID.
+wrapper_pid += "_%d" % len(glob.glob(old_logdir + "\\*"))
+
+cmd_to_run[logdir_idx + 1] += "\\testcase.%s.logs" % wrapper_pid
+os.makedirs(cmd_to_run[logdir_idx + 1])
+
+if testcase_name:
+ f = open(old_logdir + "\\testcase.%s.name" % wrapper_pid, "w")
+ print >>f, testcase_name
+ f.close()
+
+exit(subprocess.call(cmd_to_run))
diff --git a/chromium/tools/valgrind/chrome_tests.bat b/chromium/tools/valgrind/chrome_tests.bat
new file mode 100755
index 00000000000..9d4c8ca8d34
--- /dev/null
+++ b/chromium/tools/valgrind/chrome_tests.bat
@@ -0,0 +1,53 @@
+@echo off
+:: Copyright (c) 2011 The Chromium Authors. All rights reserved.
+:: Use of this source code is governed by a BSD-style license that can be
+:: found in the LICENSE file.
+
+setlocal
+
+set THISDIR=%~dp0
+set TOOL_NAME="unknown"
+
+:: Get the tool name and put it into TOOL_NAME {{{1
+:: NB: SHIFT command doesn't modify %*
+:PARSE_ARGS_LOOP
+ if %1 == () GOTO:TOOLNAME_NOT_FOUND
+ if %1 == --tool GOTO:TOOLNAME_FOUND
+ SHIFT
+ goto :PARSE_ARGS_LOOP
+
+:TOOLNAME_NOT_FOUND
+echo "Please specify a tool (e.g. drmemory) by using --tool flag"
+exit /B 1
+
+:TOOLNAME_FOUND
+SHIFT
+set TOOL_NAME=%1
+:: }}}
+if "%TOOL_NAME%" == "drmemory" GOTO :SETUP_DRMEMORY
+if "%TOOL_NAME%" == "drmemory_light" GOTO :SETUP_DRMEMORY
+if "%TOOL_NAME%" == "drmemory_full" GOTO :SETUP_DRMEMORY
+if "%TOOL_NAME%" == "drmemory_pattern" GOTO :SETUP_DRMEMORY
+echo "Unknown tool: `%TOOL_NAME%`! Only drmemory is supported right now"
+exit /B 1
+
+:SETUP_DRMEMORY
+:: Set up DRMEMORY_COMMAND to invoke Dr. Memory {{{1
+set DRMEMORY_PATH=%THISDIR%..\..\third_party\drmemory
+set DRMEMORY_SFX=%DRMEMORY_PATH%\drmemory-windows-sfx.exe
+if EXIST %DRMEMORY_SFX% GOTO DRMEMORY_BINARY_OK
+echo "Can't find Dr. Memory executables."
+echo "See http://www.chromium.org/developers/how-tos/using-valgrind/dr-memory"
+echo "for the instructions on how to get them."
+exit /B 1
+
+:DRMEMORY_BINARY_OK
+%DRMEMORY_SFX% -o%DRMEMORY_PATH%\unpacked -y
+set DRMEMORY_COMMAND=%DRMEMORY_PATH%\unpacked\bin\drmemory.exe
+:: }}}
+goto :RUN_TESTS
+
+:RUN_TESTS
+set PYTHONPATH=%THISDIR%../python/google
+set RUNNING_ON_VALGRIND=yes
+python %THISDIR%/chrome_tests.py %*
diff --git a/chromium/tools/valgrind/chrome_tests.py b/chromium/tools/valgrind/chrome_tests.py
new file mode 100755
index 00000000000..e108384f1b1
--- /dev/null
+++ b/chromium/tools/valgrind/chrome_tests.py
@@ -0,0 +1,798 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+''' Runs various chrome tests through valgrind_test.py.'''
+
+import glob
+import logging
+import multiprocessing
+import optparse
+import os
+import stat
+import subprocess
+import sys
+
+import logging_utils
+import path_utils
+
+import common
+import valgrind_test
+
+class TestNotFound(Exception): pass
+
+class MultipleGTestFiltersSpecified(Exception): pass
+
+class BuildDirNotFound(Exception): pass
+
+class BuildDirAmbiguous(Exception): pass
+
+class ExecutableNotFound(Exception): pass
+
+class BadBinary(Exception): pass
+
+class ChromeTests:
+ SLOW_TOOLS = ["memcheck", "drmemory"]
+ LAYOUT_TESTS_DEFAULT_CHUNK_SIZE = 300
+
+ def __init__(self, options, args, test):
+ if ':' in test:
+ (self._test, self._gtest_filter) = test.split(':', 1)
+ else:
+ self._test = test
+ self._gtest_filter = options.gtest_filter
+
+ if self._test not in self._test_list:
+ raise TestNotFound("Unknown test: %s" % test)
+
+ if options.gtest_filter and options.gtest_filter != self._gtest_filter:
+ raise MultipleGTestFiltersSpecified("Can not specify both --gtest_filter "
+ "and --test %s" % test)
+
+ self._options = options
+ self._args = args
+
+ script_dir = path_utils.ScriptDir()
+ # Compute the top of the tree (the "source dir") from the script dir (where
+ # this script lives). We assume that the script dir is in tools/valgrind/
+ # relative to the top of the tree.
+ self._source_dir = os.path.dirname(os.path.dirname(script_dir))
+ # since this path is used for string matching, make sure it's always
+ # an absolute Unix-style path
+ self._source_dir = os.path.abspath(self._source_dir).replace('\\', '/')
+ valgrind_test_script = os.path.join(script_dir, "valgrind_test.py")
+ self._command_preamble = ["--source-dir=%s" % (self._source_dir)]
+
+ if not self._options.build_dir:
+ dirs = [
+ os.path.join(self._source_dir, "xcodebuild", "Debug"),
+ os.path.join(self._source_dir, "out", "Debug"),
+ os.path.join(self._source_dir, "build", "Debug"),
+ ]
+ build_dir = [d for d in dirs if os.path.isdir(d)]
+ if len(build_dir) > 1:
+ raise BuildDirAmbiguous("Found more than one suitable build dir:\n"
+ "%s\nPlease specify just one "
+ "using --build-dir" % ", ".join(build_dir))
+ elif build_dir:
+ self._options.build_dir = build_dir[0]
+ else:
+ self._options.build_dir = None
+
+ if self._options.build_dir:
+ build_dir = os.path.abspath(self._options.build_dir)
+ self._command_preamble += ["--build-dir=%s" % (self._options.build_dir)]
+
+ def _EnsureBuildDirFound(self):
+ if not self._options.build_dir:
+ raise BuildDirNotFound("Oops, couldn't find a build dir, please "
+ "specify it manually using --build-dir")
+
+ def _DefaultCommand(self, tool, exe=None, valgrind_test_args=None):
+ '''Generates the default command array that most tests will use.'''
+ if exe and common.IsWindows():
+ exe += '.exe'
+
+ cmd = list(self._command_preamble)
+
+ # Find all suppressions matching the following pattern:
+ # tools/valgrind/TOOL/suppressions[_PLATFORM].txt
+ # and list them with --suppressions= prefix.
+ script_dir = path_utils.ScriptDir()
+ tool_name = tool.ToolName();
+ suppression_file = os.path.join(script_dir, tool_name, "suppressions.txt")
+ if os.path.exists(suppression_file):
+ cmd.append("--suppressions=%s" % suppression_file)
+ # Platform-specific suppression
+ for platform in common.PlatformNames():
+ platform_suppression_file = \
+ os.path.join(script_dir, tool_name, 'suppressions_%s.txt' % platform)
+ if os.path.exists(platform_suppression_file):
+ cmd.append("--suppressions=%s" % platform_suppression_file)
+
+ if tool_name == "drmemory":
+ if self._options.drmemory_ops:
+ # prepending " " to avoid Dr. Memory's option confusing optparse
+ cmd += ["--drmemory_ops", " " + self._options.drmemory_ops]
+
+ if self._options.valgrind_tool_flags:
+ cmd += self._options.valgrind_tool_flags.split(" ")
+ if self._options.keep_logs:
+ cmd += ["--keep_logs"]
+ if valgrind_test_args != None:
+ for arg in valgrind_test_args:
+ cmd.append(arg)
+ if exe:
+ self._EnsureBuildDirFound()
+ exe_path = os.path.join(self._options.build_dir, exe)
+ if not os.path.exists(exe_path):
+ raise ExecutableNotFound("Couldn't find '%s'" % exe_path)
+
+ # Make sure we don't try to test ASan-built binaries
+ # with other dynamic instrumentation-based tools.
+ # TODO(timurrrr): also check TSan and MSan?
+ # `nm` might not be available, so use try-except.
+ try:
+ # Do not perform this check on OS X, as 'nm' on 10.6 can't handle
+ # binaries built with Clang 3.5+.
+ if not common.IsMac():
+ nm_output = subprocess.check_output(["nm", exe_path])
+ if nm_output.find("__asan_init") != -1:
+ raise BadBinary("You're trying to run an executable instrumented "
+ "with AddressSanitizer under %s. Please provide "
+ "an uninstrumented executable." % tool_name)
+ except OSError:
+ pass
+
+ cmd.append(exe_path)
+ # Valgrind runs tests slowly, so slow tests hurt more; show elapased time
+ # so we can find the slowpokes.
+ cmd.append("--gtest_print_time")
+ # Built-in test launcher for gtest-based executables runs tests using
+ # multiple process by default. Force the single-process mode back.
+ cmd.append("--single-process-tests")
+ if self._options.gtest_repeat:
+ cmd.append("--gtest_repeat=%s" % self._options.gtest_repeat)
+ if self._options.gtest_shuffle:
+ cmd.append("--gtest_shuffle")
+ if self._options.gtest_break_on_failure:
+ cmd.append("--gtest_break_on_failure")
+ if self._options.test_launcher_bot_mode:
+ cmd.append("--test-launcher-bot-mode")
+ if self._options.test_launcher_total_shards is not None:
+ cmd.append("--test-launcher-total-shards=%d" % self._options.test_launcher_total_shards)
+ if self._options.test_launcher_shard_index is not None:
+ cmd.append("--test-launcher-shard-index=%d" % self._options.test_launcher_shard_index)
+ return cmd
+
+ def Run(self):
+ ''' Runs the test specified by command-line argument --test '''
+ logging.info("running test %s" % (self._test))
+ return self._test_list[self._test](self)
+
+ def _AppendGtestFilter(self, tool, name, cmd):
+ '''Append an appropriate --gtest_filter flag to the googletest binary
+ invocation.
+ If the user passed his own filter mentioning only one test, just use it.
+ Othewise, filter out tests listed in the appropriate gtest_exclude files.
+ '''
+ if (self._gtest_filter and
+ ":" not in self._gtest_filter and
+ "?" not in self._gtest_filter and
+ "*" not in self._gtest_filter):
+ cmd.append("--gtest_filter=%s" % self._gtest_filter)
+ return
+
+ filters = []
+ gtest_files_dir = os.path.join(path_utils.ScriptDir(), "gtest_exclude")
+
+ gtest_filter_files = [
+ os.path.join(gtest_files_dir, name + ".gtest-%s.txt" % tool.ToolName())]
+ # Use ".gtest.txt" files only for slow tools, as they now contain
+ # Valgrind- and Dr.Memory-specific filters.
+ # TODO(glider): rename the files to ".gtest_slow.txt"
+ if tool.ToolName() in ChromeTests.SLOW_TOOLS:
+ gtest_filter_files += [os.path.join(gtest_files_dir, name + ".gtest.txt")]
+ for platform_suffix in common.PlatformNames():
+ gtest_filter_files += [
+ os.path.join(gtest_files_dir, name + ".gtest_%s.txt" % platform_suffix),
+ os.path.join(gtest_files_dir, name + ".gtest-%s_%s.txt" % \
+ (tool.ToolName(), platform_suffix))]
+ logging.info("Reading gtest exclude filter files:")
+ for filename in gtest_filter_files:
+ # strip the leading absolute path (may be very long on the bot)
+ # and the following / or \.
+ readable_filename = filename.replace("\\", "/") # '\' on Windows
+ readable_filename = readable_filename.replace(self._source_dir, "")[1:]
+ if not os.path.exists(filename):
+ logging.info(" \"%s\" - not found" % readable_filename)
+ continue
+ logging.info(" \"%s\" - OK" % readable_filename)
+ f = open(filename, 'r')
+ for line in f.readlines():
+ if line.startswith("#") or line.startswith("//") or line.isspace():
+ continue
+ line = line.rstrip()
+ test_prefixes = ["FLAKY", "FAILS"]
+ for p in test_prefixes:
+ # Strip prefixes from the test names.
+ line = line.replace(".%s_" % p, ".")
+ # Exclude the original test name.
+ filters.append(line)
+ if line[-2:] != ".*":
+ # List all possible prefixes if line doesn't end with ".*".
+ for p in test_prefixes:
+ filters.append(line.replace(".", ".%s_" % p))
+ # Get rid of duplicates.
+ filters = set(filters)
+ gtest_filter = self._gtest_filter
+ if len(filters):
+ if gtest_filter:
+ gtest_filter += ":"
+ if gtest_filter.find("-") < 0:
+ gtest_filter += "-"
+ else:
+ gtest_filter = "-"
+ gtest_filter += ":".join(filters)
+ if gtest_filter:
+ cmd.append("--gtest_filter=%s" % gtest_filter)
+
+ @staticmethod
+ def ShowTests():
+ test_to_names = {}
+ for name, test_function in ChromeTests._test_list.iteritems():
+ test_to_names.setdefault(test_function, []).append(name)
+
+ name_to_aliases = {}
+ for names in test_to_names.itervalues():
+ names.sort(key=lambda name: len(name))
+ name_to_aliases[names[0]] = names[1:]
+
+ print
+ print "Available tests:"
+ print "----------------"
+ for name, aliases in sorted(name_to_aliases.iteritems()):
+ if aliases:
+ print " {} (aka {})".format(name, ', '.join(aliases))
+ else:
+ print " {}".format(name)
+
+ def SetupLdPath(self, requires_build_dir):
+ if requires_build_dir:
+ self._EnsureBuildDirFound()
+ elif not self._options.build_dir:
+ return
+
+ # Append build_dir to LD_LIBRARY_PATH so external libraries can be loaded.
+ if (os.getenv("LD_LIBRARY_PATH")):
+ os.putenv("LD_LIBRARY_PATH", "%s:%s" % (os.getenv("LD_LIBRARY_PATH"),
+ self._options.build_dir))
+ else:
+ os.putenv("LD_LIBRARY_PATH", self._options.build_dir)
+
+ def SimpleTest(self, module, name, valgrind_test_args=None, cmd_args=None):
+ tool = valgrind_test.CreateTool(self._options.valgrind_tool)
+ cmd = self._DefaultCommand(tool, name, valgrind_test_args)
+ self._AppendGtestFilter(tool, name, cmd)
+ cmd.extend(['--test-tiny-timeout=1000'])
+ if cmd_args:
+ cmd.extend(cmd_args)
+
+ self.SetupLdPath(True)
+ return tool.Run(cmd, module)
+
+ def RunCmdLine(self):
+ tool = valgrind_test.CreateTool(self._options.valgrind_tool)
+ cmd = self._DefaultCommand(tool, None, self._args)
+ self.SetupLdPath(False)
+ return tool.Run(cmd, None)
+
+ def TestAccessibility(self):
+ return self.SimpleTest("accessibility", "accessibility_unittests")
+
+ def TestAddressInput(self):
+ return self.SimpleTest("addressinput", "libaddressinput_unittests")
+
+ def TestAngle(self):
+ return self.SimpleTest("angle", "angle_unittests")
+
+ def TestAppList(self):
+ return self.SimpleTest("app_list", "app_list_unittests")
+
+ def TestAsh(self):
+ return self.SimpleTest("ash", "ash_unittests")
+
+ def TestAura(self):
+ return self.SimpleTest("aura", "aura_unittests")
+
+ def TestBase(self):
+ return self.SimpleTest("base", "base_unittests")
+
+ def TestBlinkHeap(self):
+ return self.SimpleTest("blink_heap", "blink_heap_unittests")
+
+ def TestBlinkPlatform(self):
+ return self.SimpleTest("blink_platform", "blink_platform_unittests")
+
+ def TestCacheInvalidation(self):
+ return self.SimpleTest("cacheinvalidation", "cacheinvalidation_unittests")
+
+ def TestCast(self):
+ return self.SimpleTest("chrome", "cast_unittests")
+
+ def TestCC(self):
+ return self.SimpleTest("cc", "cc_unittests")
+
+ def TestChromeApp(self):
+ return self.SimpleTest("chrome_app", "chrome_app_unittests")
+
+ def TestChromeElf(self):
+ return self.SimpleTest("chrome_elf", "chrome_elf_unittests")
+
+ def TestChromeDriver(self):
+ return self.SimpleTest("chromedriver", "chromedriver_unittests")
+
+ def TestChromeOS(self):
+ return self.SimpleTest("chromeos", "chromeos_unittests")
+
+ def TestComponents(self):
+ return self.SimpleTest("components", "components_unittests")
+
+ def TestCompositor(self):
+ return self.SimpleTest("compositor", "compositor_unittests")
+
+ def TestContent(self):
+ return self.SimpleTest("content", "content_unittests")
+
+ def TestCourgette(self):
+ return self.SimpleTest("courgette", "courgette_unittests")
+
+ def TestCrypto(self):
+ return self.SimpleTest("crypto", "crypto_unittests")
+
+ def TestDevice(self):
+ return self.SimpleTest("device", "device_unittests")
+
+ def TestDisplay(self):
+ return self.SimpleTest("display", "display_unittests")
+
+ def TestEvents(self):
+ return self.SimpleTest("events", "events_unittests")
+
+ def TestExtensions(self):
+ return self.SimpleTest("extensions", "extensions_unittests")
+
+ def TestFFmpegRegressions(self):
+ return self.SimpleTest("chrome", "ffmpeg_regression_tests")
+
+ def TestGCM(self):
+ return self.SimpleTest("gcm", "gcm_unit_tests")
+
+ def TestGfx(self):
+ return self.SimpleTest("gfx", "gfx_unittests")
+
+ def TestGin(self):
+ return self.SimpleTest("gin", "gin_unittests")
+
+ def TestGoogleApis(self):
+ return self.SimpleTest("google_apis", "google_apis_unittests")
+
+ def TestGPU(self):
+ return self.SimpleTest("gpu", "gpu_unittests")
+
+ def TestIpc(self):
+ return self.SimpleTest("ipc", "ipc_tests",
+ valgrind_test_args=["--trace_children"])
+
+ def TestInstallerUtil(self):
+ return self.SimpleTest("installer_util", "installer_util_unittests")
+
+ def TestJingle(self):
+ return self.SimpleTest("chrome", "jingle_unittests")
+
+ def TestKeyboard(self):
+ return self.SimpleTest("keyboard", "keyboard_unittests")
+
+ def TestMedia(self):
+ return self.SimpleTest("chrome", "media_unittests")
+
+ def TestMessageCenter(self):
+ return self.SimpleTest("message_center", "message_center_unittests")
+
+ def TestMidi(self):
+ return self.SimpleTest("chrome", "midi_unittests")
+
+ def TestMojoCommon(self):
+ return self.SimpleTest("mojo_common", "mojo_common_unittests")
+
+ def TestMojoPublicBindings(self):
+ return self.SimpleTest("mojo_public_bindings",
+ "mojo_public_bindings_unittests")
+
+ def TestMojoPublicSystem(self):
+ return self.SimpleTest("mojo_public_system",
+ "mojo_public_system_unittests")
+
+ def TestMojoPublicSysPerf(self):
+ return self.SimpleTest("mojo_public_sysperf",
+ "mojo_public_system_perftests")
+
+ def TestMojoSystem(self):
+ return self.SimpleTest("mojo_system", "mojo_system_unittests")
+
+ def TestNet(self):
+ return self.SimpleTest("net", "net_unittests")
+
+ def TestNetPerf(self):
+ return self.SimpleTest("net", "net_perftests")
+
+ def TestPhoneNumber(self):
+ return self.SimpleTest("phonenumber", "libphonenumber_unittests")
+
+ def TestPPAPI(self):
+ return self.SimpleTest("chrome", "ppapi_unittests")
+
+ def TestPrinting(self):
+ return self.SimpleTest("chrome", "printing_unittests")
+
+ def TestRemoting(self):
+ return self.SimpleTest("chrome", "remoting_unittests",
+ cmd_args=[
+ "--ui-test-action-timeout=60000",
+ "--ui-test-action-max-timeout=150000"])
+
+ def TestSkia(self):
+ return self.SimpleTest("skia", "skia_unittests")
+
+ def TestSql(self):
+ return self.SimpleTest("chrome", "sql_unittests")
+
+ def TestSync(self):
+ return self.SimpleTest("chrome", "sync_unit_tests")
+
+ def TestLinuxSandbox(self):
+ return self.SimpleTest("sandbox", "sandbox_linux_unittests")
+
+ def TestUnit(self):
+ # http://crbug.com/51716
+ # Disabling all unit tests
+ # Problems reappeared after r119922
+ if common.IsMac() and (self._options.valgrind_tool == "memcheck"):
+ logging.warning("unit_tests are disabled for memcheck on MacOS.")
+ return 0;
+ return self.SimpleTest("chrome", "unit_tests")
+
+ def TestUIBaseUnit(self):
+ return self.SimpleTest("chrome", "ui_base_unittests")
+
+ def TestUIChromeOS(self):
+ return self.SimpleTest("chrome", "ui_chromeos_unittests")
+
+ def TestURL(self):
+ return self.SimpleTest("chrome", "url_unittests")
+
+ def TestViews(self):
+ return self.SimpleTest("views", "views_unittests")
+
+
+ # Valgrind timeouts are in seconds.
+ UI_VALGRIND_ARGS = ["--timeout=14400", "--trace_children", "--indirect"]
+ # UI test timeouts are in milliseconds.
+ UI_TEST_ARGS = ["--ui-test-action-timeout=60000",
+ "--ui-test-action-max-timeout=150000",
+ "--no-sandbox"]
+
+ # TODO(thestig) fine-tune these values.
+ # Valgrind timeouts are in seconds.
+ BROWSER_VALGRIND_ARGS = ["--timeout=50000", "--trace_children", "--indirect"]
+ # Browser test timeouts are in milliseconds.
+ BROWSER_TEST_ARGS = ["--ui-test-action-timeout=400000",
+ "--ui-test-action-max-timeout=800000",
+ "--no-sandbox"]
+
+ def TestBrowser(self):
+ return self.SimpleTest("chrome", "browser_tests",
+ valgrind_test_args=self.BROWSER_VALGRIND_ARGS,
+ cmd_args=self.BROWSER_TEST_ARGS)
+
+ def TestContentBrowser(self):
+ return self.SimpleTest("content", "content_browsertests",
+ valgrind_test_args=self.BROWSER_VALGRIND_ARGS,
+ cmd_args=self.BROWSER_TEST_ARGS)
+
+ def TestInteractiveUI(self):
+ return self.SimpleTest("chrome", "interactive_ui_tests",
+ valgrind_test_args=self.UI_VALGRIND_ARGS,
+ cmd_args=self.UI_TEST_ARGS)
+
+ def TestSyncIntegration(self):
+ return self.SimpleTest("chrome", "sync_integration_tests",
+ valgrind_test_args=self.UI_VALGRIND_ARGS,
+ cmd_args=(["--ui-test-action-max-timeout=450000"]))
+
+ def TestLayoutChunk(self, chunk_num, chunk_size):
+ # Run tests [chunk_num*chunk_size .. (chunk_num+1)*chunk_size) from the
+ # list of tests. Wrap around to beginning of list at end.
+ # If chunk_size is zero, run all tests in the list once.
+ # If a text file is given as argument, it is used as the list of tests.
+ assert((chunk_size == 0) != (len(self._args) == 0))
+ # Build the ginormous commandline in 'cmd'.
+ # It's going to be roughly
+ # python valgrind_test.py ...
+ # but we'll use the --indirect flag to valgrind_test.py
+ # to avoid valgrinding python.
+ # Start by building the valgrind_test.py commandline.
+ tool = valgrind_test.CreateTool(self._options.valgrind_tool)
+ cmd = self._DefaultCommand(tool)
+ cmd.append("--trace_children")
+ cmd.append("--indirect_webkit_layout")
+ cmd.append("--ignore_exit_code")
+ # Now build script_cmd, the run-webkits-tests commandline.
+ # Store each chunk in its own directory so that we can find the data later
+ chunk_dir = os.path.join("layout", "chunk_%05d" % chunk_num)
+ out_dir = os.path.join(path_utils.ScriptDir(), "latest")
+ out_dir = os.path.join(out_dir, chunk_dir)
+ if os.path.exists(out_dir):
+ old_files = glob.glob(os.path.join(out_dir, "*.txt"))
+ for f in old_files:
+ os.remove(f)
+ else:
+ os.makedirs(out_dir)
+ script = os.path.join(self._source_dir, "third_party", "WebKit", "Tools",
+ "Scripts", "run-webkit-tests")
+ # http://crbug.com/260627: After the switch to content_shell from DRT, each
+ # test now brings up 3 processes. Under Valgrind, they become memory bound
+ # and can eventually OOM if we don't reduce the total count.
+ # It'd be nice if content_shell automatically throttled the startup of new
+ # tests if we're low on memory.
+ jobs = max(1, int(multiprocessing.cpu_count() * 0.3))
+ script_cmd = ["python", script, "-v",
+ # run a separate DumpRenderTree for each test
+ "--batch-size=1",
+ "--fully-parallel",
+ "--child-processes=%d" % jobs,
+ "--time-out-ms=800000",
+ "--no-retry-failures", # retrying takes too much time
+ # http://crbug.com/176908: Don't launch a browser when done.
+ "--no-show-results",
+ "--nocheck-sys-deps",
+ "--additional-driver-flag=--no-sandbox"]
+ # Pass build mode to run-webkit-tests. We aren't passed it directly,
+ # so parse it out of build_dir. run-webkit-tests can only handle
+ # the two values "Release" and "Debug".
+ # TODO(Hercules): unify how all our scripts pass around build mode
+ # (--mode / --target / --build-dir / --debug)
+ if self._options.build_dir:
+ build_root, mode = os.path.split(self._options.build_dir)
+ script_cmd.extend(["--build-directory", build_root, "--target", mode])
+ if (chunk_size > 0):
+ script_cmd.append("--run-chunk=%d:%d" % (chunk_num, chunk_size))
+ if len(self._args):
+ # if the arg is a txt file, then treat it as a list of tests
+ if os.path.isfile(self._args[0]) and self._args[0][-4:] == ".txt":
+ script_cmd.append("--test-list=%s" % self._args[0])
+ else:
+ script_cmd.extend(self._args)
+ self._AppendGtestFilter(tool, "layout", script_cmd)
+ # Now run script_cmd with the wrapper in cmd
+ cmd.extend(["--"])
+ cmd.extend(script_cmd)
+
+ # Layout tests often times fail quickly, but the buildbot remains green.
+ # Detect this situation when running with the default chunk size.
+ if chunk_size == self.LAYOUT_TESTS_DEFAULT_CHUNK_SIZE:
+ min_runtime_in_seconds=120
+ else:
+ min_runtime_in_seconds=0
+ ret = tool.Run(cmd, "layout", min_runtime_in_seconds=min_runtime_in_seconds)
+ return ret
+
+
+ def TestLayout(self):
+ # A "chunk file" is maintained in the local directory so that each test
+ # runs a slice of the layout tests of size chunk_size that increments with
+ # each run. Since tests can be added and removed from the layout tests at
+ # any time, this is not going to give exact coverage, but it will allow us
+ # to continuously run small slices of the layout tests under valgrind rather
+ # than having to run all of them in one shot.
+ chunk_size = self._options.num_tests
+ if chunk_size == 0 or len(self._args):
+ return self.TestLayoutChunk(0, 0)
+ chunk_num = 0
+ chunk_file = os.path.join("valgrind_layout_chunk.txt")
+ logging.info("Reading state from " + chunk_file)
+ try:
+ f = open(chunk_file)
+ if f:
+ chunk_str = f.read()
+ if len(chunk_str):
+ chunk_num = int(chunk_str)
+ # This should be enough so that we have a couple of complete runs
+ # of test data stored in the archive (although note that when we loop
+ # that we almost guaranteed won't be at the end of the test list)
+ if chunk_num > 10000:
+ chunk_num = 0
+ f.close()
+ except IOError, (errno, strerror):
+ logging.error("error reading from file %s (%d, %s)" % (chunk_file,
+ errno, strerror))
+ # Save the new chunk size before running the tests. Otherwise if a
+ # particular chunk hangs the bot, the chunk number will never get
+ # incremented and the bot will be wedged.
+ logging.info("Saving state to " + chunk_file)
+ try:
+ f = open(chunk_file, "w")
+ chunk_num += 1
+ f.write("%d" % chunk_num)
+ f.close()
+ except IOError, (errno, strerror):
+ logging.error("error writing to file %s (%d, %s)" % (chunk_file, errno,
+ strerror))
+ # Since we're running small chunks of the layout tests, it's important to
+ # mark the ones that have errors in them. These won't be visible in the
+ # summary list for long, but will be useful for someone reviewing this bot.
+ return self.TestLayoutChunk(chunk_num, chunk_size)
+
+ # The known list of tests.
+ # Recognise the original abbreviations as well as full executable names.
+ _test_list = {
+ "cmdline" : RunCmdLine,
+ "addressinput": TestAddressInput,
+ "libaddressinput_unittests": TestAddressInput,
+ "accessibility": TestAccessibility,
+ "angle": TestAngle, "angle_unittests": TestAngle,
+ "app_list": TestAppList, "app_list_unittests": TestAppList,
+ "ash": TestAsh, "ash_unittests": TestAsh,
+ "aura": TestAura, "aura_unittests": TestAura,
+ "base": TestBase, "base_unittests": TestBase,
+ "blink_heap": TestBlinkHeap,
+ "blink_platform": TestBlinkPlatform,
+ "browser": TestBrowser, "browser_tests": TestBrowser,
+ "cacheinvalidation": TestCacheInvalidation,
+ "cacheinvalidation_unittests": TestCacheInvalidation,
+ "cast": TestCast, "cast_unittests": TestCast,
+ "cc": TestCC, "cc_unittests": TestCC,
+ "chrome_app": TestChromeApp,
+ "chrome_elf": TestChromeElf,
+ "chromedriver": TestChromeDriver,
+ "chromeos": TestChromeOS, "chromeos_unittests": TestChromeOS,
+ "components": TestComponents,"components_unittests": TestComponents,
+ "compositor": TestCompositor,"compositor_unittests": TestCompositor,
+ "content": TestContent, "content_unittests": TestContent,
+ "content_browsertests": TestContentBrowser,
+ "courgette": TestCourgette, "courgette_unittests": TestCourgette,
+ "crypto": TestCrypto, "crypto_unittests": TestCrypto,
+ "device": TestDevice, "device_unittests": TestDevice,
+ "display": TestDisplay, "display_unittests": TestDisplay,
+ "events": TestEvents, "events_unittests": TestEvents,
+ "extensions": TestExtensions, "extensions_unittests": TestExtensions,
+ "ffmpeg_regression_tests": TestFFmpegRegressions,
+ "gcm": TestGCM, "gcm_unit_tests": TestGCM,
+ "gin": TestGin, "gin_unittests": TestGin,
+ "gfx": TestGfx, "gfx_unittests": TestGfx,
+ "google_apis": TestGoogleApis,
+ "gpu": TestGPU, "gpu_unittests": TestGPU,
+ "ipc": TestIpc, "ipc_tests": TestIpc,
+ "installer_util": TestInstallerUtil,
+ "installer_util_unittests": TestInstallerUtil,
+ "interactive_ui": TestInteractiveUI,
+ "jingle": TestJingle, "jingle_unittests": TestJingle,
+ "keyboard": TestKeyboard, "keyboard_unittests": TestKeyboard,
+ "layout": TestLayout, "layout_tests": TestLayout,
+ "media": TestMedia, "media_unittests": TestMedia,
+ "message_center": TestMessageCenter,
+ "message_center_unittests" : TestMessageCenter,
+ "midi": TestMidi, "midi_unittests": TestMidi,
+ "mojo_common": TestMojoCommon,
+ "mojo_common_unittests": TestMojoCommon,
+ "mojo_system": TestMojoSystem,
+ "mojo_system_unittests": TestMojoSystem,
+ "mojo_public_system": TestMojoPublicSystem,
+ "mojo_public_system_unittests": TestMojoPublicSystem,
+ "mojo_public_bindings": TestMojoPublicBindings,
+ "mojo_public_bindings_unittests": TestMojoPublicBindings,
+ "mojo_public_sysperf": TestMojoPublicSysPerf,
+ "net": TestNet, "net_unittests": TestNet,
+ "net_perf": TestNetPerf, "net_perftests": TestNetPerf,
+ "phonenumber": TestPhoneNumber,
+ "libphonenumber_unittests": TestPhoneNumber,
+ "ppapi": TestPPAPI, "ppapi_unittests": TestPPAPI,
+ "printing": TestPrinting, "printing_unittests": TestPrinting,
+ "remoting": TestRemoting, "remoting_unittests": TestRemoting,
+ "sandbox": TestLinuxSandbox, "sandbox_linux_unittests": TestLinuxSandbox,
+ "skia": TestSkia, "skia_unittests": TestSkia,
+ "sql": TestSql, "sql_unittests": TestSql,
+ "sync": TestSync, "sync_unit_tests": TestSync,
+ "sync_integration_tests": TestSyncIntegration,
+ "sync_integration": TestSyncIntegration,
+ "ui_base_unit": TestUIBaseUnit, "ui_base_unittests": TestUIBaseUnit,
+ "ui_chromeos": TestUIChromeOS, "ui_chromeos_unittests": TestUIChromeOS,
+ "unit": TestUnit, "unit_tests": TestUnit,
+ "url": TestURL, "url_unittests": TestURL,
+ "views": TestViews, "views_unittests": TestViews,
+ "webkit": TestLayout,
+ }
+
+
+def _main():
+ parser = optparse.OptionParser("usage: %prog -b <dir> -t <test> "
+ "[-t <test> ...]")
+
+ parser.add_option("--help-tests", dest="help_tests", action="store_true",
+ default=False, help="List all available tests")
+ parser.add_option("-b", "--build-dir",
+ help="the location of the compiler output")
+ parser.add_option("--target", help="Debug or Release")
+ parser.add_option("-t", "--test", action="append", default=[],
+ help="which test to run, supports test:gtest_filter format "
+ "as well.")
+ parser.add_option("--baseline", action="store_true", default=False,
+ help="generate baseline data instead of validating")
+ parser.add_option("--gtest_filter",
+ help="additional arguments to --gtest_filter")
+ parser.add_option("--gtest_repeat", help="argument for --gtest_repeat")
+ parser.add_option("--gtest_shuffle", action="store_true", default=False,
+ help="Randomize tests' orders on every iteration.")
+ parser.add_option("--gtest_break_on_failure", action="store_true",
+ default=False,
+ help="Drop in to debugger on assertion failure. Also "
+ "useful for forcing tests to exit with a stack dump "
+ "on the first assertion failure when running with "
+ "--gtest_repeat=-1")
+ parser.add_option("-v", "--verbose", action="store_true", default=False,
+ help="verbose output - enable debug log messages")
+ parser.add_option("--tool", dest="valgrind_tool", default="memcheck",
+ help="specify a valgrind tool to run the tests under")
+ parser.add_option("--tool_flags", dest="valgrind_tool_flags", default="",
+ help="specify custom flags for the selected valgrind tool")
+ parser.add_option("--keep_logs", action="store_true", default=False,
+ help="store memory tool logs in the <tool>.logs directory "
+ "instead of /tmp.\nThis can be useful for tool "
+ "developers/maintainers.\nPlease note that the <tool>"
+ ".logs directory will be clobbered on tool startup.")
+ parser.add_option("-n", "--num_tests", type="int",
+ default=ChromeTests.LAYOUT_TESTS_DEFAULT_CHUNK_SIZE,
+ help="for layout tests: # of subtests per run. 0 for all.")
+ parser.add_option("--test-launcher-bot-mode", action="store_true",
+ help="run the tests with --test-launcher-bot-mode")
+ parser.add_option("--test-launcher-total-shards", type=int,
+ help="run the tests with --test-launcher-total-shards")
+ parser.add_option("--test-launcher-shard-index", type=int,
+ help="run the tests with --test-launcher-shard-index")
+ parser.add_option("--drmemory_ops",
+ help="extra options passed to Dr. Memory")
+
+ options, args = parser.parse_args()
+
+ # Bake target into build_dir.
+ if options.target and options.build_dir:
+ assert (options.target !=
+ os.path.basename(os.path.dirname(options.build_dir)))
+ options.build_dir = os.path.join(os.path.abspath(options.build_dir),
+ options.target)
+
+ if options.verbose:
+ logging_utils.config_root(logging.DEBUG)
+ else:
+ logging_utils.config_root()
+
+ if options.help_tests:
+ ChromeTests.ShowTests()
+ return 0
+
+ if not options.test:
+ parser.error("--test not specified")
+
+ if len(options.test) != 1 and options.gtest_filter:
+ parser.error("--gtest_filter and multiple tests don't make sense together")
+
+ for t in options.test:
+ tests = ChromeTests(options, args, t)
+ ret = tests.Run()
+ if ret: return ret
+ return 0
+
+
+if __name__ == "__main__":
+ sys.exit(_main())
diff --git a/chromium/tools/valgrind/chrome_tests.sh b/chromium/tools/valgrind/chrome_tests.sh
new file mode 100755
index 00000000000..479138e7ab4
--- /dev/null
+++ b/chromium/tools/valgrind/chrome_tests.sh
@@ -0,0 +1,90 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Set up some paths and re-direct the arguments to chrome_tests.py
+
+export THISDIR=`dirname $0`
+ARGV_COPY="$@"
+
+# We need to set CHROME_VALGRIND iff using Memcheck:
+# tools/valgrind/chrome_tests.sh --tool memcheck
+# or
+# tools/valgrind/chrome_tests.sh --tool=memcheck
+tool="memcheck" # Default to memcheck.
+while (( "$#" ))
+do
+ if [[ "$1" == "--tool" ]]
+ then
+ tool="$2"
+ shift
+ elif [[ "$1" =~ --tool=(.*) ]]
+ then
+ tool="${BASH_REMATCH[1]}"
+ fi
+ shift
+done
+
+NEEDS_VALGRIND=0
+NEEDS_DRMEMORY=0
+
+case "$tool" in
+ "memcheck")
+ NEEDS_VALGRIND=1
+ ;;
+ "drmemory" | "drmemory_light" | "drmemory_full" | "drmemory_pattern")
+ NEEDS_DRMEMORY=1
+ ;;
+esac
+
+if [ "$NEEDS_VALGRIND" == "1" ]
+then
+ export CHROME_VALGRIND=`sh $THISDIR/locate_valgrind.sh`
+ if [ "$CHROME_VALGRIND" = "" ]
+ then
+ # locate_valgrind.sh failed
+ exit 1
+ fi
+ echo "Using valgrind binaries from ${CHROME_VALGRIND}"
+
+ PATH="${CHROME_VALGRIND}/bin:$PATH"
+ # We need to set these variables to override default lib paths hard-coded into
+ # Valgrind binary.
+ export VALGRIND_LIB="$CHROME_VALGRIND/lib/valgrind"
+ export VALGRIND_LIB_INNER="$CHROME_VALGRIND/lib/valgrind"
+
+ # Clean up some /tmp directories that might be stale due to interrupted
+ # chrome_tests.py execution.
+ # FYI:
+ # -mtime +1 <- only print files modified more than 24h ago,
+ # -print0/-0 are needed to handle possible newlines in the filenames.
+ echo "Cleanup /tmp from Valgrind stuff"
+ find /tmp -maxdepth 1 \(\
+ -name "vgdb-pipe-*" -or -name "vg_logs_*" -or -name "valgrind.*" \
+ \) -mtime +1 -print0 | xargs -0 rm -rf
+fi
+
+if [ "$NEEDS_DRMEMORY" == "1" ]
+then
+ if [ -z "$DRMEMORY_COMMAND" ]
+ then
+ DRMEMORY_PATH="$THISDIR/../../third_party/drmemory"
+ DRMEMORY_SFX="$DRMEMORY_PATH/drmemory-windows-sfx.exe"
+ if [ ! -f "$DRMEMORY_SFX" ]
+ then
+ echo "Can't find Dr. Memory executables."
+ echo "See http://www.chromium.org/developers/how-tos/using-valgrind/dr-memory"
+ echo "for the instructions on how to get them."
+ exit 1
+ fi
+
+ chmod +x "$DRMEMORY_SFX" # Cygwin won't run it without +x.
+ "$DRMEMORY_SFX" -o"$DRMEMORY_PATH/unpacked" -y
+ export DRMEMORY_COMMAND="$DRMEMORY_PATH/unpacked/bin/drmemory.exe"
+ fi
+fi
+
+PYTHONPATH=$THISDIR/../python/google python \
+ "$THISDIR/chrome_tests.py" $ARGV_COPY
diff --git a/chromium/tools/valgrind/common.py b/chromium/tools/valgrind/common.py
new file mode 100644
index 00000000000..7e163e3c602
--- /dev/null
+++ b/chromium/tools/valgrind/common.py
@@ -0,0 +1,252 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import platform
+import os
+import signal
+import subprocess
+import sys
+import time
+
+
+class NotImplementedError(Exception):
+ pass
+
+
+class TimeoutError(Exception):
+ pass
+
+
+def RunSubprocessInBackground(proc):
+ """Runs a subprocess in the background. Returns a handle to the process."""
+ logging.info("running %s in the background" % " ".join(proc))
+ return subprocess.Popen(proc)
+
+
+def RunSubprocess(proc, timeout=0):
+ """ Runs a subprocess, until it finishes or |timeout| is exceeded and the
+ process is killed with taskkill. A |timeout| <= 0 means no timeout.
+
+ Args:
+ proc: list of process components (exe + args)
+ timeout: how long to wait before killing, <= 0 means wait forever
+ """
+
+ logging.info("running %s, timeout %d sec" % (" ".join(proc), timeout))
+ sys.stdout.flush()
+ sys.stderr.flush()
+
+ # Manually read and print out stdout and stderr.
+ # By default, the subprocess is supposed to inherit these from its parent,
+ # however when run under buildbot, it seems unable to read data from a
+ # grandchild process, so we have to read the child and print the data as if
+ # it came from us for buildbot to read it. We're not sure why this is
+ # necessary.
+ # TODO(erikkay): should we buffer stderr and stdout separately?
+ p = subprocess.Popen(proc, universal_newlines=True,
+ bufsize=0, # unbuffered
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+
+ logging.info("started subprocess")
+
+ did_timeout = False
+ if timeout > 0:
+ wait_until = time.time() + timeout
+ while p.poll() is None and not did_timeout:
+ # Have to use readline rather than readlines() or "for line in p.stdout:",
+ # otherwise we get buffered even with bufsize=0.
+ line = p.stdout.readline()
+ while line and not did_timeout:
+ sys.stdout.write(line)
+ sys.stdout.flush()
+ line = p.stdout.readline()
+ if timeout > 0:
+ did_timeout = time.time() > wait_until
+
+ if did_timeout:
+ logging.info("process timed out")
+ else:
+ logging.info("process ended, did not time out")
+
+ if did_timeout:
+ if IsWindows():
+ subprocess.call(["taskkill", "/T", "/F", "/PID", str(p.pid)])
+ else:
+ # Does this kill all children, too?
+ os.kill(p.pid, signal.SIGINT)
+ logging.error("KILLED %d" % p.pid)
+ # Give the process a chance to actually die before continuing
+ # so that cleanup can happen safely.
+ time.sleep(1.0)
+ logging.error("TIMEOUT waiting for %s" % proc[0])
+ raise TimeoutError(proc[0])
+ else:
+ for line in p.stdout:
+ sys.stdout.write(line)
+ if not IsMac(): # stdout flush fails on Mac
+ logging.info("flushing stdout")
+ sys.stdout.flush()
+
+ logging.info("collecting result code")
+ result = p.poll()
+ if result:
+ logging.error("%s exited with non-zero result code %d" % (proc[0], result))
+ return result
+
+
+def IsLinux():
+ return sys.platform.startswith('linux')
+
+
+def IsMac():
+ return sys.platform.startswith('darwin')
+
+
+def IsWindows():
+ return sys.platform == 'cygwin' or sys.platform.startswith('win')
+
+
+def WindowsVersionName():
+ """Returns the name of the Windows version if it is known, or None.
+
+ Possible return values are: xp, vista, 7, 8, or None
+ """
+ if sys.platform == 'cygwin':
+ # Windows version number is hiding in system name. Looks like:
+ # CYGWIN_NT-6.1-WOW64
+ try:
+ version_str = platform.uname()[0].split('-')[1]
+ except:
+ return None
+ elif sys.platform.startswith('win'):
+ # Normal Windows version string. Mine: 6.1.7601
+ version_str = platform.version()
+ else:
+ return None
+
+ parts = version_str.split('.')
+ try:
+ major = int(parts[0])
+ minor = int(parts[1])
+ except:
+ return None # Can't parse, unknown version.
+
+ if major == 5:
+ return 'xp'
+ elif major == 6 and minor == 0:
+ return 'vista'
+ elif major == 6 and minor == 1:
+ return '7'
+ elif major == 6 and minor == 2:
+ return '8' # Future proof. ;)
+ return None
+
+
+def PlatformNames():
+ """Return an array of string to be used in paths for the platform
+ (e.g. suppressions, gtest filters, ignore files etc.)
+ The first element of the array describes the 'main' platform
+ """
+ if IsLinux():
+ return ['linux']
+ if IsMac():
+ return ['mac']
+ if IsWindows():
+ names = ['win32']
+ version_name = WindowsVersionName()
+ if version_name is not None:
+ names.append('win-%s' % version_name)
+ return names
+ raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+
+def PutEnvAndLog(env_name, env_value):
+ os.putenv(env_name, env_value)
+ logging.info('export %s=%s', env_name, env_value)
+
+def BoringCallers(mangled, use_re_wildcards):
+ """Return a list of 'boring' function names (optinally mangled)
+ with */? wildcards (optionally .*/.).
+ Boring = we drop off the bottom of stack traces below such functions.
+ """
+
+ need_mangling = [
+ # Don't show our testing framework:
+ ("testing::Test::Run", "_ZN7testing4Test3RunEv"),
+ ("testing::TestInfo::Run", "_ZN7testing8TestInfo3RunEv"),
+ ("testing::internal::Handle*ExceptionsInMethodIfSupported*",
+ "_ZN7testing8internal3?Handle*ExceptionsInMethodIfSupported*"),
+
+ # Depend on scheduling:
+ ("MessageLoop::Run", "_ZN11MessageLoop3RunEv"),
+ ("MessageLoop::RunTask", "_ZN11MessageLoop7RunTask*"),
+ ("RunnableMethod*", "_ZN14RunnableMethod*"),
+ ("DispatchToMethod*", "_Z*16DispatchToMethod*"),
+ ("base::internal::Invoker*::DoInvoke*",
+ "_ZN4base8internal8Invoker*DoInvoke*"), # Invoker{1,2,3}
+ ("base::internal::RunnableAdapter*::Run*",
+ "_ZN4base8internal15RunnableAdapter*Run*"),
+ ]
+
+ ret = []
+ for pair in need_mangling:
+ ret.append(pair[1 if mangled else 0])
+
+ ret += [
+ # Also don't show the internals of libc/pthread.
+ "start_thread",
+ "main",
+ "BaseThreadInitThunk",
+ ]
+
+ if use_re_wildcards:
+ for i in range(0, len(ret)):
+ ret[i] = ret[i].replace('*', '.*').replace('?', '.')
+
+ return ret
+
+def NormalizeWindowsPath(path):
+ """If we're using Cygwin Python, turn the path into a Windows path.
+
+ Don't turn forward slashes into backslashes for easier copy-pasting and
+ escaping.
+
+ TODO(rnk): If we ever want to cut out the subprocess invocation, we can use
+ _winreg to get the root Cygwin directory from the registry key:
+ HKEY_LOCAL_MACHINE\SOFTWARE\Cygwin\setup\rootdir.
+ """
+ if sys.platform.startswith("cygwin"):
+ p = subprocess.Popen(["cygpath", "-m", path],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (out, err) = p.communicate()
+ if err:
+ logging.warning("WARNING: cygpath error: %s", err)
+ return out.strip()
+ else:
+ return path
+
+############################
+# Common output format code
+
+def PrintUsedSuppressionsList(suppcounts):
+ """ Prints out the list of used suppressions in a format common to all the
+ memory tools. If the list is empty, prints nothing and returns False,
+ otherwise True.
+
+ suppcounts: a dictionary of used suppression counts,
+ Key -> name, Value -> count.
+ """
+ if not suppcounts:
+ return False
+
+ print "-----------------------------------------------------"
+ print "Suppressions used:"
+ print " count name"
+ for (name, count) in sorted(suppcounts.items(), key=lambda (k,v): (v,k)):
+ print "%7d %s" % (count, name)
+ print "-----------------------------------------------------"
+ sys.stdout.flush()
+ return True
diff --git a/chromium/tools/valgrind/drmemory.bat b/chromium/tools/valgrind/drmemory.bat
new file mode 100755
index 00000000000..fe911e4c328
--- /dev/null
+++ b/chromium/tools/valgrind/drmemory.bat
@@ -0,0 +1,5 @@
+@echo off
+:: Copyright (c) 2011 The Chromium Authors. All rights reserved.
+:: Use of this source code is governed by a BSD-style license that can be
+:: found in the LICENSE file.
+%~dp0\chrome_tests.bat -t cmdline --tool drmemory %*
diff --git a/chromium/tools/valgrind/drmemory/OWNERS b/chromium/tools/valgrind/drmemory/OWNERS
new file mode 100644
index 00000000000..72e8ffc0db8
--- /dev/null
+++ b/chromium/tools/valgrind/drmemory/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/chromium/tools/valgrind/drmemory/PRESUBMIT.py b/chromium/tools/valgrind/drmemory/PRESUBMIT.py
new file mode 100644
index 00000000000..2e56b63d464
--- /dev/null
+++ b/chromium/tools/valgrind/drmemory/PRESUBMIT.py
@@ -0,0 +1,39 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into depot_tools.
+"""
+
+
+def CheckChange(input_api, output_api):
+ """Checks the DrMemory suppression files for bad suppressions."""
+
+ # TODO(timurrrr): find out how to do relative imports
+ # and remove this ugly hack. Also, the CheckChange function won't be needed.
+ tools_vg_path = input_api.os_path.join(input_api.PresubmitLocalPath(), '..')
+ import sys
+ old_path = sys.path
+ try:
+ sys.path = sys.path + [tools_vg_path]
+ import suppressions
+ return suppressions.PresubmitCheck(input_api, output_api)
+ finally:
+ sys.path = old_path
+
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CheckChange(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CheckChange(input_api, output_api)
+
+
+def GetPreferredTryMasters(project, change):
+ return {
+ 'tryserver.chromium.win': {
+ 'win_drmemory': set(['defaulttests']),
+ }
+ }
diff --git a/chromium/tools/valgrind/drmemory/suppressions.txt b/chromium/tools/valgrind/drmemory/suppressions.txt
new file mode 100644
index 00000000000..36f69910dae
--- /dev/null
+++ b/chromium/tools/valgrind/drmemory/suppressions.txt
@@ -0,0 +1,812 @@
+# This file contains suppressions for the Dr.Memory tool, see
+# http://dev.chromium.org/developers/how-tos/using-drmemory
+#
+# This file contains suppressions for the DrMemory reports happening
+# in the 'light' mode (a.k.a. drmemory_light) as well as in the 'full' mode.
+# Please use suppressions_full.txt for all the reports that can happen only
+# in the full mode (drmemory_full),
+
+############################
+# Known reports on the third party we have no control over.
+
+# Reports from Sophos antivirus
+UNADDRESSABLE ACCESS
+name=Sophos UNADDR
+...
+sophos*.dll!*
+
+UNINITIALIZED READ
+name=Sophos UNINIT
+...
+sophos*.dll!*
+
+LEAK
+name=Sophos LEAK
+...
+sophos*.dll!*
+
+# Reports from Micorosft RDP ActiveX control (mstscax.dll)
+
+GDI USAGE ERROR
+name=crbug.com/177832: mstscax.dll causes "GDI USAGE ERROR" errors.
+...
+mstscax.dll!*
+
+UNADDRESSABLE ACCESS
+name=crbug.com/177832: mstscax.dll causes "UNADDRESSABLE ACCESS" errors.
+...
+mstscax.dll!*
+
+############################
+# Suppress some false reports due to bugs in Dr.Memory like wrong analysis
+# assumptions or unhandled syscalls
+
+# Please note: the following suppressions were written in the abscense of
+# private symbols so may need to be updated when we switch to auto-loading PDBs
+
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/drmemory/issues/12 UNADDR
+...
+SHELL32.dll!SHFileOperation*
+
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/drmemory/issues/40 UNADDR
+...
+WINSPOOL.DRV!*
+
+INVALID HEAP ARGUMENT
+name=https://github.com/DynamoRIO/drmemory/issues/40 INVALID HEAP
+...
+WINSPOOL.DRV!*
+
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/drmemory/issues/59
+...
+*!SetEnvironmentVariable*
+
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/drmemory/issues/68 (UNADDR 1)
+...
+MSWSOCK.dll!WSPStartup
+
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/drmemory/issues/68 (UNADDR 2)
+...
+ntdll.dll!RtlValidateUnicodeString
+
+############################
+# TODO(timurrrr): investigate these
+UNADDRESSABLE ACCESS
+name=TODO SHParseDisplayName
+...
+*!SHParseDisplayName
+
+UNADDRESSABLE ACCESS
+name=TODO GetCanonicalPathInfo
+...
+*!GetCanonicalPathInfo*
+
+UNADDRESSABLE ACCESS
+name=TODO CreateDC
+...
+GDI32.dll!CreateDC*
+
+# This one looks interesting
+INVALID HEAP ARGUMENT
+name=TODO ExitProcess
+...
+KERNEL32.dll!ExitProcess
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/103365 (a)
+ppapi_tests.dll!*
+...
+ppapi_tests.dll!*
+*!base::internal::RunnableAdapter<*>::Run
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/103365 (b)
+ppapi_tests.dll!*
+...
+ppapi_tests.dll!*
+*!PP_RunCompletionCallback
+...
+*!base::internal::RunnableAdapter<*>::Run
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/107567 intentional mismatch in _DebugHeapDelete, no frame
+*!std::numpunct<*>::_Tidy
+*!std::numpunct<*>::~numpunct<*>
+
+############################
+# Intentional errors in Chromium tests (ToolsSanityTests)
+LEAK
+name=sanity test 01 (memory leak)
+base_unittests.exe!operator new
+base_unittests.exe!operator new[]
+base_unittests.exe!base::ToolsSanityTest_MemoryLeak_Test::TestBody
+
+# "..." is needed due to https://github.com/DynamoRIO/drmemory/issues/666
+UNADDRESSABLE ACCESS
+name=sanity test 02 (malloc/read left)
+base_unittests.exe!*ReadValueOutOfArrayBoundsLeft
+...
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 03 (malloc/read right)
+base_unittests.exe!*ReadValueOutOfArrayBoundsRight
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 04 (malloc/write left)
+base_unittests.exe!*WriteValueOutOfArrayBoundsLeft
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 05 (malloc/write right)
+base_unittests.exe!*WriteValueOutOfArrayBoundsRight
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+# "..." is needed due to https://github.com/DynamoRIO/drmemory/issues/666
+UNADDRESSABLE ACCESS
+name=sanity test 06 (new/read left)
+base_unittests.exe!*ReadValueOutOfArrayBoundsLeft
+...
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 07 (new/read right)
+base_unittests.exe!*ReadValueOutOfArrayBoundsRight
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 08 (new/write left)
+base_unittests.exe!*WriteValueOutOfArrayBoundsLeft
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 09 (new/write right)
+base_unittests.exe!*WriteValueOutOfArrayBoundsRight
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 10 (write after free)
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=sanity test 11 (write after delete)
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+INVALID HEAP ARGUMENT
+name=sanity test 12 (array deleted without [])
+base_unittests.exe!base::ToolsSanityTest_ArrayDeletedWithoutBraces_Test::TestBody
+
+INVALID HEAP ARGUMENT
+name=sanity test 13 (single element deleted with [])
+base_unittests.exe!base::ToolsSanityTest_SingleElementDeletedWithBraces_Test::TestBody
+
+UNINITIALIZED READ
+name=sanity test 14 (malloc/read uninit)
+base_unittests.exe!*ReadUninitializedValue
+...
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToMallocMemory_Test::TestBody
+
+UNINITIALIZED READ
+name=sanity test 15 (new/read uninit)
+base_unittests.exe!*ReadUninitializedValue
+...
+base_unittests.exe!*MakeSomeErrors
+base_unittests.exe!base::ToolsSanityTest_AccessesToNewMemory_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=AboutHandler::AboutCrash deliberate crash
+# TODO(bruening): switch to annotation once have support for that
+chrome.dll!AboutHandler::AboutCrash
+
+UNADDRESSABLE ACCESS
+name=NPAPITesterBase.NoHangIfInitCrashes deliberate crash
+# function is small, little risk for false negative in rest of it
+# TODO(bruening): switch to annotation once have support for that
+npapi_test_plugin.dll!NPAPIClient::PluginClient::Initialize
+
+# Deliberate NULL deref to crash the child process
+UNADDRESSABLE ACCESS
+name=CrashingChildProcess deliberate crash
+*!CrashingChildProcess
+
+UNADDRESSABLE ACCESS
+name=::Crasher::Run deliberate crash
+*!base::`anonymous namespace'::Crasher::Run
+
+############################
+# Benign issues in Chromium
+
+WARNING
+name=http://crbug.com/72463 - prefetches in generated MemCopy
+instruction=prefetch*
+<not in a module>
+chrome.dll!v8::internal::CopyChars*
+
+WARNING
+name=prefetches in NVD3DUM.dll
+instruction=prefetch*
+NVD3DUM.dll!*
+
+WARNING
+name=prefetches in igdumd32.dll
+instruction=prefetch*
+igdumd32.dll!*
+
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/drmemory/issues/582 bizarre cl-generated read-beyond-TOS
+instruction=mov 0xfffffffc(%esp) -> %eax
+chrome.dll!blink::RenderStyle::resetBorder*
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/101717 (1)
+*!sandbox::PolicyBase::~PolicyBase
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/101717 (2)
+*!scoped_ptr<>::~scoped_ptr<>
+*!sandbox::GetHandleName
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/101717 (3)
+*!scoped_ptr<>::~scoped_ptr<>
+*!sandbox::GetPathFromHandle
+
+GDI USAGE ERROR
+name=https://github.com/DynamoRIO/drmemory/issues/899 deleting bitmap which is probably safe
+system call NtGdiDeleteObjectApp
+*!skia::`anonymous namespace'::Bitmap::~Bitmap
+*!skia::`anonymous namespace'::Bitmap::`scalar deleting destructor'
+
+############################
+# Real issues in Chromium
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/88213
+*!base::win::ObjectWatcher::StopWatching
+*!base::win::ObjectWatcher::WillDestroyCurrentMessageLoop
+*!MessageLoop::~MessageLoop
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/96010
+*!TestingProfile::FinishInit
+*!TestingProfile::TestingProfile
+*!BrowserAboutHandlerTest_WillHandleBrowserAboutURL_Test::TestBody
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/106522
+npapi_test_plugin.dll!NPAPIClient::PluginTest::id
+npapi_test_plugin.dll!NPAPIClient::ExecuteGetJavascriptUrlTest::TimerProc
+
+GDI USAGE ERROR
+name=http://crbug.com/109963 c
+system call NtGdiDeleteObjectApp
+GDI32.dll!DeleteDC
+content.dll!*
+
+GDI USAGE ERROR
+name=http://crbug.com/109963 d
+system call NtGdiDeleteObjectApp
+GDI32.dll!DeleteDC
+*!base::internal::RunnableAdapter*
+
+# GDI usage errors in 3rd-party components
+GDI USAGE ERROR
+name=http://crbug.com/119552 a
+system call NtGdiDeleteObjectApp
+...
+*!OmniboxViewWin::*
+
+GDI USAGE ERROR
+name=http://crbug.com/119552 b
+system call Nt*
+...
+*!ATL::*
+
+GDI USAGE ERROR
+name=http://crbug.com/119552 c
+# optional gdi32.dll frame followed by user32.dll
+# TODO(bruening): once have
+# https://github.com/DynamoRIO/drmemory/issues/846
+# I would do "gdi32.dll!...\nuser32.dll!*"
+*32.dll!*
+...
+shell32.dll!SHGetFileInfoW
+*!IconLoader::ReadIcon
+
+GDI USAGE ERROR
+name=http://crbug.com/119552 d
+system call NtGdiDeleteObjectApp
+gdi32.dll!DeleteObject
+riched20.dll!*
+riched20.dll!*
+riched20.dll!*
+
+GDI USAGE ERROR
+name=http://crbug.com/120157
+# "ReleaseDC called from different thread than GetDC"
+system call NtUserCallOneParam.RELEASEDC
+*!*FontCache::CacheElement::~CacheElement
+
+GDI USAGE ERROR
+name=http://crbug.com/158090
+# "DC created by one thread and used by another"
+...
+content.dll!content::*::FontCache::PreCacheFont
+content.dll!content::FontCacheDispatcher::OnPreCacheFont
+content.dll!DispatchToMethod<>
+
+GDI USAGE ERROR
+name=http://crbug.com/158090 c#4
+# ReleaseDC for DC called from different thread than the thread that called GetDC
+system call NtUserCallOneParam.RELEASEDC
+ui.dll!gfx::ReadColorProfile
+ui.dll!gfx::GetColorProfile
+content.dll!content::RenderMessageFilter::OnGetMonitorColorProfile
+content.dll!DispatchToMethod*
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/158350
+# allocated with operator new[], freed with operator delete
+*!*
+*!*
+*!*
+*!*
+*!*
+content.dll!*
+content.dll!*
+content.dll!*
+content.dll!*
+content.dll!*
+*!*
+*!*
+*!*
+*!*
+*!*
+KERNEL32.dll!*
+ntdll.dll!*
+ntdll.dll!*
+
+WARNING
+name=Security test (new oveflow)
+MSVCR100D.dll!operator new
+*!operator new
+*!operator new[]
+*!`anonymous namespace'::SecurityTest_NewOverflow_Test::TestBody
+
+WARNING
+name=Security test (calloc overflow)
+*!`anonymous namespace'::CallocReturnsNull
+*!`anonymous namespace'::SecurityTest_CallocOverflow_Test::TestBody
+
+GDI USAGE ERROR
+name=http://crbug.com/234484
+# "DC created by one thread and used by another"
+...
+*!chrome::`anonymous namespace'::SetOverlayIcon
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/262088
+drmemorylib.dll!av_dup_packet
+msvcrt.dll!wcsrchr
+ntdll.dll!RtlIsCurrentThreadAttachExempt
+ntdll.dll!LdrShutdownThread
+ntdll.dll!RtlExitUserThread
+
+GDI USAGE ERROR
+name=http://crbug.com/266484
+skia.dll!HDCOffscreen::draw
+...
+skia.dll!SkScalerContext::getImage
+skia.dll!SkGlyphCache::findImage
+skia.dll!DrawOneGlyph::getImageData
+
+HANDLE LEAK
+name=http://crbug.com/346842
+system call NtGdiCreateDIBSection
+*!CreateDIBSection
+*!HDCOffscreen::draw
+*!SkScalerContext_GDI::generateImage
+*!SkScalerContext::getImage
+*!SkGlyphCache::findImage
+*!DrawOneGlyph::getImageData
+
+HANDLE LEAK
+name=http://crbug.com/346993
+system call NtDuplicateObject
+KERNELBASE.dll!DuplicateHandle
+KERNEL32.dll!DuplicateHandle*
+base.dll!base::`anonymous namespace'::ThreadFunc
+KERNEL32.dll!BaseThreadInitThunk
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/42043-uninit
+...
+QuickTime.qts!*
+
+GDI USAGE ERROR
+name=http://crbug.com/42043-gdi
+...
+QuickTime.qts!*
+
+UNADDRESSABLE ACCESS
+name=IntentionalCrash
+*!content::*::*Crash*
+*!content::*::MaybeHandleDebugURL
+
+HANDLE LEAK
+name=http://crbug.com/371357
+system call NtCreateEvent
+KERNELBASE.dll!CreateEventExW
+KERNELBASE.dll!CreateEventW
+
+HANDLE LEAK
+name=http://crbug.com/371368
+system call NtCreateNamedPipeFile
+KERNELBASE.dll!CreateNamedPipeW
+ipc.dll!IPC::ChannelWin::CreatePipe
+ipc.dll!IPC::ChannelWin::ChannelWin
+
+HANDLE LEAK
+name=http://crbug.com/371942
+system call NtCreateThreadEx
+KERNELBASE.dll!CreateRemoteThreadEx
+KERNEL32.dll!CreateThread
+
+HANDLE LEAK
+name=http://crbug.com/371946
+system call NtUserWindowFromPoint
+content.dll!content::LegacyRenderWidgetHostHWND::OnMouseLeave
+content.dll!content::LegacyRenderWidgetHostHWND::_ProcessWindowMessage
+content.dll!content::LegacyRenderWidgetHostHWND::ProcessWindowMessage
+content.dll!ATL::CWindowImplBaseT<>::WindowProc
+USER32.dll!gapfnScSendMessage
+USER32.dll!GetThreadDesktop
+USER32.dll!CharPrevW
+USER32.dll!DispatchMessageW
+base.dll!base::MessagePumpForUI::ProcessMessageHelper
+base.dll!base::MessagePumpForUI::ProcessNextWindowsMessage
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/372177
+NPCTRL.dll!DllGetClassObject
+NPCTRL.dll!DllGetClassObject
+NPCTRL.dll!DllGetClassObject
+NPCTRL.dll!DllGetClassObject
+
+HANDLE LEAK
+name=http://crbug.com/373333
+system call NtGdiCreateCompatibleDC
+*!CreateCompatibleDC
+*!SkScalerContext_GDI::SkScalerContext_GDI
+*!LogFontTypeface::onCreateScalerContext
+*!SkTypeface::createScalerContext
+*!SkGlyphCache::VisitCache
+...
+*!SkPaint::descriptorProc
+...
+*!blink::RenderBlockFlow::layoutBlockFlow
+*!blink::RenderBlockFlow::layoutBlock
+*!blink::RenderBlock::layout
+
+HANDLE LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/1545
+system call NtGdiCreateCompatibleDC
+GDI32.dll!CreateCompatibleDC
+skia.dll!LogFontTypeface::onGetTableData
+
+HANDLE LEAK
+name=http://crbug.com/379000
+system call NtCreate*
+...
+*!disk_cache::MappedFile::Init
+*!disk_cache::BlockFiles::OpenBlockFile
+*!disk_cache::BlockFiles::Init
+*!disk_cache::BackendImpl::SyncInit
+*!disk_cache::BackendIO::ExecuteBackendOperation
+
+GDI USAGE ERROR
+name=379774
+system call NtUserCallOneParam.RELEASEDC
+USER32.dll!ReleaseDC
+*!std::_Tree<>::_Erase
+*!std::_Tree<>::erase
+*!base::DefaultSingletonTraits<>::Delete
+*!base::Singleton<>::OnExit
+*!base::AtExitManager::ProcessCallbacksNow
+*!base::AtExitManager::~AtExitManager
+*!base::TestSuite::~TestSuite
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/382784
+*!content::PepperMediaStreamAudioTrackHost::AudioSink::OnData
+*!content::MediaStreamAudioSinkOwner::OnData
+*!content::WebRtcLocalAudioTrack::Capture
+*!content::WebRtcAudioCapturer::Capture
+*!media::AudioInputDevice::AudioThreadCallback::Process
+*!media::AudioDeviceThread::Thread::Run
+*!media::AudioDeviceThread::Thread::ThreadMain
+
+UNADDRESSABLE ACCESS
+name=IntentionalCrashPluginTest.plugin_client.cc
+npapi_test_plugin.dll!NP_Initialize
+...
+*!content::PluginLib::NP_Initialize
+*!content::PluginThread::PluginThread
+*!content::PluginMain
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/313788
+*!InProcessBrowserTest::AddBlankTabAndShow
+*!InProcessBrowserTest::CreateBrowser
+*!*::RunTestOnMainThread
+*!InProcessBrowserTest::RunTestOnMainThreadLoop
+*!ChromeBrowserMainParts::PreMainMessageLoopRunImpl
+*!ChromeBrowserMainParts::PreMainMessageLoopRun
+
+# This suppression is deliberately general, as bugs reported in
+# v8 generated code are difficult to track down. Xref Dr. Memory issue
+# https://github.com/DynamoRIO/drmemory/issues/1582
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/drmemory/issues/1582
+...
+*!v8::internal::Invoke
+*!v8::internal::Execution::Call
+*!v8::Function::Call
+
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/dynamorio/issues/1443
+dynamorio.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/dynamorio/issues/1443 (another instance)
+dynamorio.dll!*
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/400495
+aura.dll!aura::Window::Contains
+aura.dll!aura::WindowEventDispatcher::OnWindowHidden
+aura.dll!aura::WindowEventDispatcher::OnPostNotifiedWindowDestroying
+aura.dll!aura::Window::~Window
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/400511
+content.dll!content::WebThreadBase::TaskObserverAdapter::WillProcessTask
+base.dll!base::MessageLoop::RunTask
+base.dll!base::Thread::StopSoon
+base.dll!base::MessageLoop::DeferOrRunPendingTask
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/414675
+blink_web.dll!blink::toCoreFrame
+blink_web.dll!blink::RemoteFrameClient::firstChild
+blink_web.dll!blink::WebRemoteFrameImpl::~WebRemoteFrameImpl
+blink_web.dll!blink::WebRemoteFrameImpl::close
+content.dll!content::RenderFrameProxy::~RenderFrameProxy
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/420311
+blink_web.dll!blink::Frame::deprecatedLocalOwner
+blink_web.dll!blink::Document::topDocument
+blink_web.dll!blink::Document::axObjectCacheOwner
+blink_web.dll!blink::Document::existingAXObjectCache
+blink_web.dll!blink::FrameView::removeFromAXObjectCache
+blink_web.dll!blink::FrameView::prepareForDetach
+blink_web.dll!blink::LocalFrame::setView
+blink_web.dll!blink::FrameTree::~FrameTree
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/432070
+blink_web.dll!blink::Frame::detach
+content.dll!content::RenderFrameProxy::OnDeleteProxy
+content.dll!content::RenderFrameProxy::OnMessageReceived
+content.dll!content::MessageRouter::RouteMessage
+
+HANDLE LEAK
+name=http://crbug.com/441785
+system call NtCreateSection
+*!CreateFileMappingW
+*!base::SharedMemory::Create
+*!content::ChildThreadImpl::AllocateSharedMemory
+...
+*!content::ChildSharedBitmapManager::AllocateSharedBitmap
+
+HANDLE LEAK
+name=http://crbug.com/449989
+system call NtOpenProcess
+KERNELBASE.dll!OpenProcess
+base.dll!base::Process::OpenWithExtraPrivileges
+content.dll!content::BrowserMessageFilter::Internal::OnChannelConnected
+
+UNADDRESSABLE ACCESS
+name=http://crbug.com/455060
+*!content::FrameAccessibility::GetParent
+*!content::RenderFrameHostImpl::AccessibilityGetParentFrame
+*!content::BrowserAccessibilityManager::GetDelegateFromRootManager
+*!content::BrowserAccessibilityManager::OnWindowBlurred
+...
+*!content::RenderWidgetHostViewAura::Destroy
+
+INVALID HEAP ARGUMENT
+name=http://crbug.com/455994
+drmemorylib.dll!replace_operator_delete
+*!IPC::Listener::`vector deleting destructor'
+*!content::RenderFrameImpl::~RenderFrameImpl
+*!content::RenderFrameImpl::`vector deleting destructor'
+*!content::RenderViewImpl::~RenderViewImpl
+*!content::RenderViewImpl::`vector deleting destructor'
+*!scoped_refptr<>::Release
+*!base::internal::BindState<>::`scalar deleting destructor'
+*!scoped_refptr<>::Release
+*!base::internal::CallbackBase::~CallbackBase
+*!base::MessagePumpDefault::Run
+*!base::MessageLoop::RunHandler
+*!base::MessageLoop::Run
+*!content::RendererMain
+*!content::RunNamedProcessTypeMain
+*!content::ContentMainRunnerImpl::Run
+*!content::ContentMain
+*!content::LaunchTests
+
+UNINITIALIZED READ
+name=http://crbug.com/463204
+skia.dll!_ltod3
+skia.dll!SkPaint::measure_text
+skia.dll!SkPaint::measureText
+content.dll!content::DoPreSandboxWarmupForTypeface
+content.dll!content::`anonymous namespace'::WarmupDirectWrite
+
+HANDLE LEAK
+name=http://crbug.com/487500
+system call NtCreateEvent
+ntdll.dll!RtlDosPathNameToNtPathName_U_WithStatus
+ntdll.dll!RtlDosPathNameToNtPathName_U_WithStatus
+ntdll.dll!RtlIntegerToUnicodeString
+
+HANDLE LEAK
+name=http://crbug.com/487500b
+system call NtCreateEvent
+ntdll.dll!RtlDosPathNameToNtPathName_U_WithStatus
+ntdll.dll!RtlDosPathNameToNtPathName_U_WithStatus
+ntdll.dll!RtlEncodePointer
+
+UNADDRESSABLE ACCESS
+name=bug_522049
+...
+*!`anonymous namespace'::ConvertInputMode
+*!content::RenderWidget::UpdateTextInputState
+*!content::RenderWidget::WillBeginCompositorFrame
+*!content::`anonymous namespace'::RenderWidgetCompositorOutputSurface::SynchronousComposite
+*!base::internal::Invoker<>::Run
+*!base::debug::TaskAnnotator::RunTask
+*!base::MessageLoop::RunTask
+*!base::MessageLoop::DeferOrRunPendingTask
+*!base::MessageLoop::DoWork
+*!base::MessagePumpDefault::Run
+*!base::MessageLoop::RunHandler
+*!base::MessageLoop::Run
+
+UNADDRESSABLE ACCESS
+name=bug_536803
+*!blink::SecurityOrigin::addSuborigin
+*!blink::SecurityOriginTest_Suborigins_Test::TestBody
+
+HANDLE LEAK
+name=bug_555058_a
+system call NtCreateUserProcess
+KERNEL32.dll!CreateProcessInternalW
+KERNEL32.dll!CreateProcessW
+base.dll!base::LaunchProcess
+base.dll!base::LaunchProcess
+content.dll!content::StartSandboxedProcess
+content.dll!content::`anonymous namespace'::LaunchOnLauncherThread
+content.dll!base::internal::InvokeHelper<>::MakeItSo
+content.dll!base::internal::Invoker<>::Run
+base.dll!base::debug::TaskAnnotator::RunTask
+base.dll!base::MessageLoop::RunTask
+base.dll!base::MessageLoop::DeferOrRunPendingTask
+base.dll!base::MessageLoop::DoWork
+base.dll!base::MessagePumpDefault::Run
+base.dll!base::MessageLoop::RunHandler
+base.dll!base::MessageLoop::Run
+base.dll!base::Thread::Run
+content.dll!content::BrowserThreadImpl::ProcessLauncherThreadRun
+content.dll!content::BrowserThreadImpl::Run
+base.dll!base::Thread::ThreadMain
+base.dll!base::`anonymous namespace'::ThreadFunc
+KERNEL32.dll!BaseThreadInitThunk
+
+HANDLE LEAK
+name=bug_555058_b
+system call NtDuplicateObject
+KERNELBASE.dll!DuplicateHandle
+KERNEL32.dll!DuplicateHandle
+*!sandbox::BrokerServicesBase::AddTargetPeer
+content.dll!content::StartSandboxedProcess
+content.dll!content::`anonymous namespace'::LaunchOnLauncherThread
+content.dll!base::internal::InvokeHelper<>::MakeItSo
+content.dll!base::internal::Invoker<>::Run
+base.dll!base::debug::TaskAnnotator::RunTask
+base.dll!base::MessageLoop::RunTask
+base.dll!base::MessageLoop::DeferOrRunPendingTask
+base.dll!base::MessageLoop::DoWork
+base.dll!base::MessagePumpDefault::Run
+base.dll!base::MessageLoop::RunHandler
+base.dll!base::MessageLoop::Run
+base.dll!base::Thread::Run
+content.dll!content::BrowserThreadImpl::ProcessLauncherThreadRun
+content.dll!content::BrowserThreadImpl::Run
+base.dll!base::Thread::ThreadMain
+base.dll!base::`anonymous namespace'::ThreadFunc
+
+HANDLE LEAK
+name=bug_561803_a
+system call NtCreateNamedPipeFile
+*!CreateNamedPipeW
+*!mojo::edk::PlatformChannelPair::PlatformChannelPair
+...
+*!mojo::edk::ChildProcessLaunched
+*!content::*::OnProcessLaunched
+*!content::ChildProcessLauncher::Notify
+*!content::ChildProcessLauncher::DidLaunch
+*!base::internal::InvokeHelper<>::MakeItSo
+
+GDI USAGE ERROR
+name=bug_573352
+system call NtGdiDeleteObjectApp
+GDI32.dll!...
+SHELL32.dll!...
+ui_base.dll!drag_utils::SetDragImageOnDataObject
+
+HANDLE LEAK
+name=bug_580636_a
+system call NtDuplicateObject
+KERNELBASE.dll!DuplicateHandle
+...
+*!IPC::internal::HandleAttachmentWin::HandleAttachmentWin
+
+HANDLE LEAK
+name=bug_586668_a
+system call NtDuplicateObject
+KERNELBASE.dll!DuplicateHandle
+KERNEL32.dll!DuplicateHandle
+mojo_system_impl.dll!mojo::edk::Channel::Message::RewriteHandles
+mojo_system_impl.dll!mojo::edk::NodeController::OnRelayPortsMessage
+mojo_system_impl.dll!mojo::edk::NodeChannel::OnChannelMessage
+mojo_system_impl.dll!mojo::edk::Channel::OnReadComplete
+
+HANDLE LEAK
+name=bug_586668_b
+system call NtDuplicateObject
+KERNELBASE.dll!DuplicateHandle
+KERNEL32.dll!DuplicateHandle
+base.dll!base::SharedMemory::DuplicateHandle
+mojo_system_impl.dll!mojo::edk::PlatformSharedBuffer::DuplicatePlatformHandle
+mojo_system_impl.dll!mojo::edk::SharedBufferDispatcher::EndSerialize
+mojo_system_impl.dll!mojo::edk::MessagePipeDispatcher::WriteMessage
+mojo_system_impl.dll!mojo::edk::Core::WriteMessage
+
+HANDLE LEAK
+name=bug_586996_b
+system call NtDuplicateObject
+KERNELBASE.dll!DuplicateHandle
+KERNEL32.dll!DuplicateHandle
+ipc.dll!IPC::AttachmentBrokerPrivilegedWin::DuplicateWinHandle
+ipc.dll!IPC::AttachmentBrokerPrivilegedWin::OnDuplicateWinHandle
+ipc.dll!IPC::AttachmentBrokerPrivilegedWin::OnMessageReceived
+ipc.dll!IPC::internal::ChannelReader::HandleTranslatedMessage
+ipc.dll!IPC::internal::ChannelReader::TranslateInputData
+ipc.dll!IPC::internal::ChannelReader::AsyncReadComplete
+ipc.dll!IPC::ChannelWin::OnIOCompleted
diff --git a/chromium/tools/valgrind/drmemory/suppressions_full.txt b/chromium/tools/valgrind/drmemory/suppressions_full.txt
new file mode 100644
index 00000000000..a03efb6cb78
--- /dev/null
+++ b/chromium/tools/valgrind/drmemory/suppressions_full.txt
@@ -0,0 +1,2236 @@
+# This file contains suppressions for the Dr.Memory tool, see
+# http://dev.chromium.org/developers/how-tos/using-drmemory
+#
+# This file should contain suppressions only for the reports happening
+# in the 'full' mode (drmemory_full).
+# For the reports that can happen in the light mode (a.k.a. drmemory_light),
+# please use suppressions.txt instead.
+
+###############################################################
+# Known reports on the third party we have no control over.
+
+UNINITIALIZED READ
+name=http://crbug.com/116277
+...
+*!MOZ_Z_deflate
+
+# TODO(timurrrr): check if these frames change when NT_SYMBOLS are present.
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/406
+ADVAPI32.dll!WmiOpenBlock
+ADVAPI32.dll!WmiOpenBlock
+
+# Leaks inside GoogleDesktop - it injects into our processes for some reason
+LEAK
+name=GoogleDesktop LEAK
+...
+GoogleDesktopNetwork3.DLL!DllUnregisterServer
+
+# They deliberately use uninit local var in sqlite random generator.
+# Random bytes may mess up the call stack between randomByte and
+# sqlite3_* frames (https://github.com/DynamoRIO/drmemory/issues/1514)
+# so we also look for randomByte in form B.
+UNINITIALIZED READ
+name=sqlite3_randomness A
+*.dll!sqlite3_randomness
+
+UNINITIALIZED READ
+name=sqlite3_randomness B
+*.dll!randomByte
+
+# Intentional leak in WebKit Template Framework for ThreadData.
+LEAK
+name=intentional WTF ThreadData leak
+...
+*!WTF::wtfThreadData
+
+# Happens when winhttp returns ERROR_WINHTTP_UNABLE_TO_DOWNLOAD_SCRIPT.
+LEAK
+name=http://crbug.com/125558 a
+KERNELBASE.dll!LocalAlloc
+SECHOST.dll!...
+SECHOST.dll!NotifyServiceStatusChange
+WINHTTP.dll!...
+WINHTTP.dll!WinHttpDetectAutoProxyConfigUrl
+*!net::ProxyResolverWinHttp::GetProxyForURL
+
+# Tiny locale-related leaks in ntdll. Probably system bug.
+LEAK
+name=http://crbug.com/125558 b
+ntdll.dll!...
+ntdll.dll!*
+KERNELBASE.dll!...
+KERNELBASE.dll!GetCPInfoExW
+webio.dll!*
+webio.dll!*
+webio.dll!*
+WINHTTP.dll!...
+WINHTTP.dll!WinHttpGetIEProxyConfigForCurrentUser
+*!net::ProxyConfigServiceWin::GetCurrentProxyConfig
+
+UNINITIALIZED READ
+name=http://crbug.com/30704 #f
+libpng.dll!wk_png_write_find_filter
+libpng.dll!wk_png_write_row
+
+###############################################################
+# Suppress some false reports due to bugs in Dr.Memory like wrong analysis
+# assumptions or unhandled syscalls
+
+# Please note: the following suppressions were written in the abscense of
+# private symbols so may need to be updated when we switch to auto-loading PDBs
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/12 (1)
+ntdll.dll!Rtl*
+ntdll.dll!Rtl*
+ntdll.dll!RtlFindActivationContextSectionString
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/12 (2)
+...
+SHELL32.dll!SHFileOperation*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/12 (3)
+...
+SHELL32.dll!SHGetFolderPath*
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/12 (4)
+...
+SHELL32.dll!SHGetFolderPath*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/12 (5)
+...
+SHELL32.dll!SHCreateDirectory*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/12 (6)
+...
+SHELL32.dll!ILLoadFromStream*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/12 (7)
+...
+SHELL32.dll!ILSaveToStream*
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/12 (8)
+...
+SHELL32.dll!SHFileOperation*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/12 (9)
+...
+SHELL32.dll!SHGetItemFromDataObject
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/12 (10)
+...
+SHELL32.dll!SHGetItemFromDataObject
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/12 (11)
+...
+ole32.dll!*
+SHELL32.dll!SHChangeNotifySuspendResume
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/14 (1)
+...
+*!CreateProcess*
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/14 (2)
+...
+*!CreateProcess*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/14 (3)
+...
+*!base::LaunchApp*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/17 (1)
+...
+*!CreateWindow*
+
+POSSIBLE LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/17 (2)
+GDI32.dll!*
+GDI32.dll!CreateFontIndirectExW
+GDI32.dll!CreateFontIndirectW
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/17 (3)
+KERNELBASE.dll!LocalAlloc
+...
+USER32.dll!CreateWindow*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/18 a
+...
+*!CoInitialize*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/18 b
+...
+*!CoCreateInstance*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/18 c
+...
+*!CoUninitialize*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/18 d
+...
+UxTheme.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/40 a
+...
+WINSPOOL.DRV!*
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/40 b
+...
+WINSPOOL.DRV!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/48 a
+system call NtContinue
+...
+*!*SetThreadName
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/48 b
+system call NtContinue
+*!WTF::initializeCurrentThreadInternal
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/52 a
+...
+DBGHELP.dll!SymInitialize
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/52 b
+...
+DBGHELP.dll!SymEnumSourceFiles
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/52 c
+...
+msvcrt.dll!_RTDynamicCast
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/52 bit-level fp in dbghelp
+instruction=test 0x*(%*) $0x??
+DBGHELP.dll!SymUnloadModule64
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/53
+ADVAPI32.dll!WmiMofEnumerateResourcesA
+ADVAPI32.dll!WmiMofEnumerateResourcesA
+ADVAPI32.dll!Sta*TraceW
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/58
+...
+*!_cfltcvt_l
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/60
+USP10.dll!*
+...
+USP10.dll!ScriptStringAnalyse
+
+LEAK
+IMM32.dll!ImmGetIMCCSize
+IMM32.dll!ImmLockClientImc
+IMM32.dll!ImmDisableIME
+IMM32.dll!ImmSetActiveContext
+USER32.dll!IMPSetIMEA
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/65 a
+...
+*!SystemFunction036
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/65 b
+...
+*!talk_base::CreateRandomString
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/68 a
+...
+WS2_32.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/68 b
+...
+ADVAPI32.dll!SetSecurityDescriptorDacl
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/68 c
+...
+MSWSOCK.dll!WSPStartup
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/68 d
+...
+ntdll.dll!RtlValidateUnicodeString
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/256
+*!_mtinit
+*!__tmainCRTStartup
+*!mainCRTStartup
+
+POSSIBLE LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/274 a
+...
+GDI32.dll!CreateDCW
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/274 b
+...
+GDI32.dll!CreateDCW
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/275
+...
+*!_getptd*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/276
+...
+ntdll.dll!RtlConvertUlongToLargeInteger
+ntdll.dll!RtlConvertUlongToLargeInteger
+ntdll.dll!KiUserExceptionDispatcher
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/305
+*!free
+*!free
+*!operator new
+...
+*!MiniDumpWriteDump
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/346 a
+...
+GDI32.dll!CloseEnhMetaFile
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/346 b
+GDI32.dll!SetPolyFillMode
+GDI32.dll!CreateICW
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/362
+USER32.dll!UnregisterClass*
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/382
+...
+ntdll.dll!CsrNewThread
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/397
+system call NtDeviceIoControlFile InputBuffer
+ADVAPI32.dll!ImpersonateAnonymousToken
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/407 a
+system call NtRequestWaitReplyPort
+RPCRT4.dll!I_RpcSendReceive
+RPCRT4.dll!NdrSendReceive
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/407 b
+IMM32.dll!*
+ntdll.dll!LdrInitializeThunk
+ntdll.dll!LdrShutdownThread
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/412 a
+ADVAPI32.dll!RegDeleteValue*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/412 b
+...
+ADVAPI32.dll!Crypt*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/412 c
+...
+RPCRT4.dll!NdrClientCall2
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/412 d
+RSAENH.dll!DllUnregisterServer
+...
+ADVAPI32.dll!CryptAcquireContextA
+CRYPT32.dll!CryptEnumOIDFunction
+...
+CRYPT32.dll!CertFindCertificateInStore
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/412 e
+...
+RSAENH.dll!CPGenRandom
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/412 f
+...
+CRYPT??.dll!Crypt*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/412 g
+*!replace_memcmp
+...
+*!testing::internal::CmpHelperEQ*
+...
+*!SymmetricKeyTest_ImportGeneratedKey_Test::TestBody
+
+# We get these sometimes from AesEncrypt and AesExpandKey. AesEncrypt doesn't
+# have frame pointers, and we have trouble unwinding from it. Therefore, we use
+# this broad suppression, effectively disabling uninit checks in rsaenh.dll.
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/412 h
+RSAENH.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/425 a
+CLBCatQ.DLL!DestroyStgDatabase
+CLBCatQ.DLL!PostError
+CLBCatQ.DLL!PostError
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/425 b
+RPCRT4.dll!I_RpcBCacheFree
+RPCRT4.dll!I_RpcBCacheFree
+...
+RPCRT4.dll!NdrClientCall2
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/425 c
+msdmo.dll!*
+msdmo.dll!*
+DEVENUM.DLL!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/435 a
+...
+ntdll.dll!RtlSetSecurityObject
+ntdll.dll!RtlNewSecurityObjectEx
+ADVAPI32.dll!CreatePrivateObjectSecurityEx
+NTMARTA.dll!AccRewriteSetNamedRights
+
+POSSIBLE LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/435 b
+WLDAP32.dll!Ordinal325
+...
+WLDAP32.dll!Ordinal325
+ntdll.dll!LdrInitializeThunk
+ntdll.dll!LdrFindResourceDirectory_U
+ntdll.dll!RtlValidateUnicodeString
+ntdll.dll!LdrLoadDll
+KERNEL32.dll!LoadLibraryExW
+
+# mod+offs suppression because the symbolic makes no sense and changes
+# completely in the presence of WS2_32.dll symbols.
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/438
+<WS2_32.dll+0x260c>
+<WS2_32.dll+0x2b76>
+<WS2_32.dll+0x2c61>
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/454 a
+...
+WINMM.dll!wave*GetNumDevs
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/454 b
+...
+WINMM.dll!wave*GetNumDevs
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/466
+ntdll.dll!RtlRunOnceBeginInitialize
+ntdll.dll!RtlInitializeCriticalSectionAndSpinCount
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/473 a
+system call NtDeviceIoControlFile InputBuffer
+...
+iphlpapi.dll!GetAdaptersAddresses
+
+POSSIBLE LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/473 b
+ESENT.dll!*
+ESENT.dll!*
+ESENT.dll!*
+ntdll.dll!Ldr*Init*
+ntdll.dll!Ldr*
+ntdll.dll!*
+ntdll.dll!LdrLoadDll
+...
+iphlpapi.dll!GetPerAdapterInfo
+...
+iphlpapi.dll!GetAdaptersAddresses
+
+POSSIBLE LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/473 c
+RPCRT4.dll!*
+RPCRT4.dll!*
+...
+IPHLPAPI.DLL!GetAdaptersAddresses
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/475
+...
+ADVAPI32.dll!CryptAcquireContextA
+...
+CRYPT32.dll!CryptMsgOpenToDecode
+...
+CRYPT32.dll!CryptQueryObject
+
+# Lots of leaks from our interactions with the system certificate store. May be
+# worth reviewing our use of their API.
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/476 a
+KERNEL*.dll!LocalAlloc
+...
+CRYPT32.dll!CertGetCRLContextProperty
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/476 b
+KERNEL*.dll!LocalAlloc
+...
+CRYPT32.dll!CertAddCRLContextToStore
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/476 c
+KERNEL*.dll!LocalAlloc
+...
+CRYPT32.dll!CertOpenStore
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/476 d
+...
+CRYPT32.dll!CertOpenSystemStore?
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/476 e
+...
+CRYPT32.dll!CertGetCertificateChain
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/476 f
+...
+CRYPT32.dll!CertCompareIntegerBlob
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/476 g
+...
+CRYPT32.dll!CryptUnprotectData
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/476 h
+KERNEL*.dll!LocalAlloc
+...
+CRYPT32.dll!CertEnumCertificatesInStore
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/476 i
+...
+CRYPT32.dll!CryptProtectData
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/476 j
+...
+CRYPT32.dll!CryptExportPublicKeyInfoEx
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/502 a
+system call NtSecureConnectPort parameter #3
+GDI32.dll!*
+GDI32.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/502 b
+system call NtGdiEnumFonts parameter #6
+GDI32.dll!*
+GDI32.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/511 a
+RPCRT4.dll!...
+ole32.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/511 b
+ole32.dll!*
+ole32.dll!*
+ole32.dll!StringFromGUID2
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/512 a
+...
+*!browser_sync::Cryptographer::PackBootstrapToken
+*!browser_sync::Cryptographer::GetBootstrapToken
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/512 b
+...
+*!Encrypt*
+
+# TODO(bruening): remove these once we have v8 bitfields handled
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/513 a
+*!v8*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/513 b
+*!*
+*!v8*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/513 c
+<not in a module>
+...
+*!v8*
+
+# We have seen some cases (not yet understood: crbug.com/364146) where v8.dll
+# has no symbols. These are all on the bots using component build, so we use
+# v8.dll. TODO(bruening): remove these once we've fixed the symbol issue.
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/513 d
+v8.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/513 e
+<not in a module>
+...
+v8.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/546
+...
+mscms.dll!*
+...
+GDI32.dll!*
+*!IconUtil::Create*HICON*
+
+LEAK
+name=http://crbug.com/92152
+...
+USER32.dll!CreateWindowExW
+*!views::TooltipManagerWin::Init
+*!views::TooltipManagerWin::TooltipManagerWin
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/567 a
+dbghelp.dll!*
+...
+dbghelp.dll!StackWalk64
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/567 b
+*!*
+dbghelp.dll!*
+...
+dbghelp.dll!StackWalk64
+
+# Symbols w/o PDB make no sense, first ntdll frame is TpSetTimer w/o syms and
+# TppWorkerThread w/ syms. We used to use mod+offs here, but that was too
+# brittle, so we switched to RPCRT4.dll!*.
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/569
+RPCRT4.dll!...
+ntdll.dll!*
+ntdll.dll!*
+KERNEL*.dll!BaseThreadInitThunk
+
+# TODO(timurrrr): investigate these
+UNINITIALIZED READ
+name=http://crbug.com/TODO a
+...
+*!win_util::GetLogonSessionOnlyDACL
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO b
+...
+ntshrui.dll!IsPathSharedW
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO c
+...
+*!NetApiBufferFree
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO d
+...
+*!ShellExecute*
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO e
+...
+*!SHParseDisplayName
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO f
+...
+*!GetCanonicalPathInfo*
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO g
+...
+SHELL32.dll!Ordinal*
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO h
+...
+GDI32.dll!GetTextExtentPoint32*
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO i
+...
+*!SyncSocketClientListener::OnMsgClassResponse
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO j
+...
+*!*NSPRInitSingleton*
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO k
+*!NdrSimpleStructFree
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO l
+ntdll.dll!RtlpNtOpenKey
+ntdll.dll!RtlMakeSelfRelativeSD
+ntdll.dll!RtlAbsoluteToSelfRelativeSD
+ADVAPI32.dll!MakeSelfRelativeSD
+
+UNINITIALIZED READ
+name=http://crbug.com/TODO m
+...
+CRYPT32.dll!I_CertSyncStore
+
+# This matches the same stack as DrMem i#751, but it's an uninit read instead of
+# a leak. Must be some early thread initialization. Doesn't look like
+# bit-level though.
+UNINITIALIZED READ
+name=http://crbug.com/TODO n
+RPCRT4.dll!*
+RPCRT4.dll!*
+RPCRT4.dll!*
+ntdll.dll!*
+ntdll.dll!*
+KERNEL*.dll!BaseThreadInitThunk
+
+# No idea where this is from, but Chrome isn't even on the stack.
+POSSIBLE LEAK
+name=http://crbug.com/TODO o
+RPCRT4.dll!...
+ole32.dll!OleInitialize
+ole32.dll!...
+KERNEL32.dll!BaseThreadInitThunk
+
+# Matches lots of RPC related leaks. So far RPC handles have been mostly owned
+# by system libraries and are not something we can fix easily.
+POSSIBLE LEAK
+name=http://crbug.com/TODO p
+RPCRT4.dll!*
+RPCRT4.dll!*
+RPCRT4.dll!NDRCContextBinding
+
+# No idea, but all system code, not interesting.
+POSSIBLE LEAK
+name=http://crbug.com/TODO q
+RPCRT4.dll!...
+RPCRT4.dll!*
+RPCRT4.dll!*
+ole32.dll!...
+ole32.dll!*
+ole32.dll!*
+...
+SHELL32.dll!*
+
+LEAK
+name=http://crbug.com/109278 video device COM leaks
+...
+*!media::VideoCaptureDevice::*
+
+LEAK
+name=http://crbug.com/109278 audio device COM leaks
+...
+*!media::GetInputDeviceNamesWin
+
+# False pos uninit in shell32 when resolving links.
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/745
+SHELL*.dll!*
+...
+SHELL*.dll!*
+*!file_util::ResolveShortcut
+
+# Probable false pos uninit in ffmpeg. Probably due to running off the end of a
+# buffer with SSE/MMX instructions whose results are then masked out later.
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/747 a
+*!ff_pred4x4_vertical_vp8_mmxext
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/747 b
+*!ff_pred4x4_down_left_mmxext
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/747 c
+*!ff_vorbis_floor1_render_list
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/747 d
+*!ff_put_vp8_epel8_h6_ssse3
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/747 e
+*!ff_put_vp8_epel8_h4_ssse3
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/747 f
+*!ff_fft_permute_sse
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/747 g
+*!ff_simple_idct_add_mmx
+
+# ffmpeg seems to leak a pthread condition variable.
+LEAK
+name=http://crbug.com/110042
+*!ptw32_new
+*!pthread_self
+*!sem_wait
+*!pthread_cond_wait
+*!ff_thread_decode_frame
+*!avcodec_decode_video2
+
+# Improperly handled ioctl in bcrypt.
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/748
+system call NtDeviceIoControlFile InputBuffer
+...
+bcrypt.dll!BCryptUnregisterConfigChangeNotify
+bcrypt.dll!BCryptGetFipsAlgorithmMode
+ntdll.dll!RtlQueryEnvironmentVariable
+
+# Not sure what this is.
+POSSIBLE LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/749
+...
+fwpuclnt.dll!*
+...
+RPCRT4.dll!*
+...
+fwpuclnt.dll!*
+...
+WS2_32.dll!*
+*!talk_base::SafeGetHostByName
+*!talk_base::SocketAddress::GetLocalIPs
+*!talk_base::SocketAddress::IsLocalIP
+*!cricket::Transport::VerifyCandidate
+*!cricket::Session::OnRemoteCandidates
+*!cricket::Session::OnTransportInfoMessage
+*!cricket::Session::OnIncomingMessage
+*!cricket::SessionManager::OnIncomingMessage
+
+# More uninit false pos in rpcrt4.dll not caught by default suppressions.
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/529
+RPCRT4.dll!*
+...
+*!base::LaunchProcess
+
+# System leak from CreateEnvironmentBlock.
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/757
+...
+USERENV.dll!CreateEnvironmentBlock
+
+# Looks like another instance of 753
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/753
+...
+ntdll.dll!RtlLoadString
+
+# More bit manip fps
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/493
+USP10.dll!ScriptPositionSingleGlyph
+
+# Various TLS leaks that we don't understand yet. We should be finding a root
+# for these.
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/778 a
+KERNELBASE.dll!TlsSetValue
+
+# Originally filed as: http://crbug.com/109281
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/778 b
+*!operator new
+*!operator new[]
+*!*::ConstructTlsVector
+*!base::ThreadLocalStorage::StaticSlot::Get
+
+# This is an NSS PRThread object installed in TLS. Why isn't this detected as a
+# root? See also http://crbug.com/32624
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/778 c
+*!PR_Calloc
+*!_PR_AttachThread
+*!_PRI_AttachThread
+
+# Bit-level fps in rich edit layer.
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/791
+RICHED20.dll!*
+RICHED20.dll!*
+
+# Already suppressed by drmemory default supp we don't have yet.
+LEAK
+name=i#757: RPC binding leaks in sspicli.dll
+RPCRT4.dll!*
+...
+SspiCli.dll!*
+SspiCli.dll!Cre*
+
+# Async NtReadFile false positives. This was fixed in drmemory r772, remove
+# this supp when we pull that rev.
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/drmemory/issues/798
+system call NtReadFile parameter #5
+KERNEL32.dll!ReadFile
+
+# Probable syscall false positive.
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/drmemory/issues/809
+system call NtGdiPolyPolyDraw parameter #1
+*!gfx::Path::CreateNativeRegion
+
+# Very wide suppression for all uninits in rpcrt4.dll. We get bad stack traces
+# coming out of this module (sometimes only one frame), which makes it hard to
+# write precise suppressions. Until we have bit-level tracking (DRMi#113) we
+# should keep this.
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/113 rpcrt4.dll wildcard
+RPCRT4.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/841 a
+...
+CRYPTNET.dll!I_CryptNetGetConnectivity
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/841 b
+...
+webio.dll!*
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/841 c
+...
+winhttp.dll!*
+
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/841 d
+...
+CRYPTNET.dll!I_CryptNetGetConnectivity
+
+# Often missing a ntdll.dll!KiUserCallbackDispatcher frame.
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/810
+instruction=test %edx %edx
+USER32.dll!GetClassLongW
+...
+*!ui::CenterAndSizeWindow
+
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/815
+KERNEL*.dll!...
+dxgi.dll!*
+USER32.dll!GetMonitorInfoA
+ntdll.dll!KiUserCallbackDispatcher
+dxgi.dll!*
+WinSATAPI.DLL!*
+
+# Suppress anything in cmd.exe. It's safer to suppress these than disable
+# following, since someone might launch a Chrome process via cmd.exe.
+LEAK
+name=cmd.exe
+...
+cmd.exe!*
+
+# Possible true system use after free.
+UNADDRESSABLE ACCESS
+name=https://github.com/DynamoRIO/drmemory/issues/623
+KERNELBASE.dll!TlsGetValue
+OLEAUT32.dll!SysFreeString
+OLEAUT32.dll!SysAllocStringByteLen
+OLEACC.dll!*
+OLEACC.dll!*
+OLEACC.dll!*
+OLEACC.dll!*
+
+# basic_streambuf seems to leak something in creating a std::_Mutex
+LEAK
+name=https://github.com/DynamoRIO/drmemory/issues/857
+ntdll.dll!...
+ntdll.dll!RtlInitializeCriticalSection
+*!_Mtxinit
+*!std::_Mutex::_Mutex
+*!std::basic_streambuf<>
+
+# Seems to create a DC, sometimes. GetTextMetrics returns no pointers, though.
+LEAK
+name=GDI SetBrushOrgEx leak
+GDI32.dll!...
+GDI32.dll!GetTextMetricsW
+*!gfx::PlatformFontWin::CreateHFontRef
+*!gfx::PlatformFontWin::GetBaseFontRef
+
+###############################################################
+# Benign issues in Chromium
+
+# This test intentionally leaks an object and checks that it's never deleted.
+LEAK
+name=BrowserThreadTest.NotReleasedIfTargetThreadNonExistant leak
+...
+*!BrowserThreadTest_NotReleasedIfTargetThreadNonExistent_Test::TestBody
+
+LEAK
+name=deliberate histogram leak
+...
+*!replace_operator_new
+...
+*!*::*Histogram::FactoryGet
+
+LEAK
+name=deliberate leak for SampleMap
+...
+*!base::SampleMap::Accumulate
+*!base::SparseHistogram::Add
+
+LEAK
+name=deliberate LazyInstance leak
+...
+*!*LeakyLazyInstance*
+...
+*!base::LazyInstance*::Pointer
+
+# Test intentionally leaks an object.
+LEAK
+name=http://crbug.com/86301
+*!replace_operator_new
+...
+*!*_DeadReplyLoopDoesNotDelete_Test::TestBody
+
+# Leak in a binary copy of Firefox 3's NSS dll. Not much we can do about it.
+LEAK
+name=Firefox 3 NSS dll leak
+nspr4.dll!*
+...
+*!NSSDecryptor::~NSSDecryptor
+
+# We get uninit reports inside GMock when it prints the bytes of references to
+# partially initialized objects passed to unexpected method calls.
+UNINITIALIZED READ
+name=http://crbug.com/64887 (GMock printing uninit data)
+...
+*!testing::*::PrintBytesInObjectTo*
+
+# This is an EXPECT_DEATH() that hits a RELEASE_ASSERT(),
+# which intentionally writes to NULL in order to crash.
+UNADDRESSABLE ACCESS
+name=https://crbug.com/497344
+blink_platform.dll!blink::SecurityOrigin::addSuborigin
+*!blink::SecurityOriginTest_Suborigins_Test::TestBody
+
+###############################################################
+# Proactively borrowed from memcheck/suppressions.txt.
+# We have not yet seen these, but we are expanding the sets of tests
+# we're running, and we've hit redness in the past that could have
+# been avoided by already having the Memcheck suppressions.
+# TODO(bruening): review the entire file (not just these) once we're
+# at the peak set of tests we plan to run and remove the unused ones.
+
+UNINITIALIZED READ
+name=bug_101781
+*!encode_one_block
+*!encode_mcu_huff
+*!compress_data
+*!process_data_simple_main
+*!chromium_jpeg_write_scanlines
+*!gfx::JPEGCodec::Encode
+*!gfx::JPEGEncodedDataFromImage
+*!history::TopSites::EncodeBitmap
+*!history::TopSites::SetPageThumbnail
+*!history::ExpireHistoryTest::AddExampleData
+*!history::ExpireHistoryTest::*
+
+UNINITIALIZED READ
+name=bug_101781_d
+*!testing::AssertionResult testing::internal::CmpHelperGE<>
+*!gfx::JPEGCodec_EncodeDecodeRGBA_Test::TestBody
+
+UNINITIALIZED READ
+name=bug_105907
+...
+*!skia::BGRAConvolve2D
+*!skia::ImageOperations::ResizeBasic*
+*!skia::ImageOperations::Resize*
+
+UNINITIALIZED READ
+name=bug_112278
+*!fetch_texel_2d_f_rgba8888
+*!sample_2d_linear
+*!sample_linear_2d
+*!fetch_texel_lod
+*!fetch_texel
+*!_mesa_execute_program
+*!run_program
+*!_swrast_exec_fragment_program
+*!shade_texture_span
+*!_swrast_write_rgba_span
+*!general_triangle
+*!_swrast_validate_triangle
+*!_swrast_Triangle
+*!triangle_rgba
+*!_tnl_render_triangles_elts
+*!run_render
+*!_tnl_run_pipeline
+*!_tnl_draw_prims
+*!_tnl_vbo_draw_prims
+*!vbo_validated_drawrangeelements
+*!vbo_exec_DrawElements
+*!neutral_DrawElements
+
+UNINITIALIZED READ
+name=bug_112278b
+*!fetch_texel_2d_f_rgba8888
+*!sample_2d_nearest
+*!sample_nearest_2d
+*!fetch_texel_lod
+*!fetch_texel
+*!_mesa_execute_program
+*!run_program
+*!_swrast_exec_fragment_program
+*!shade_texture_span
+*!_swrast_write_rgba_span
+*!general_triangle
+...
+*!_swrast_Triangle
+*!triangle_rgba
+...
+*!run_render
+*!_tnl_run_pipeline
+*!_tnl_draw_prims
+*!_tnl_vbo_draw_prims
+
+UNINITIALIZED READ
+name=bug_115419_1
+*!fetch_texel_2d_f_rgba8888
+*!texture_get_row
+*!fast_read_rgba_pixels
+*!read_rgba_pixels
+*!_swrast_ReadPixels
+*!_mesa_ReadPixels
+*!glReadPixels
+*!gpu::gles2::GLES2DecoderImpl::HandleReadPixels
+*!gpu::gles2::GLES2DecoderImpl::DoCommand
+*!gpu::CommandParser::ProcessCommand
+*!gpu::CommandExecutor::PutChanged
+*!webkit::gpu::GLInProcessContext::PumpCommands
+
+UNINITIALIZED READ
+name=bug_115419_2
+*!get_src_arg_mask
+*!_mesa_remove_extra_move_use
+*!_mesa_optimize_program
+*!get_mesa_program
+*!_mesa_ir_link_shader
+*!_mesa_glsl_link_shader
+*!link_program
+*!_mesa_LinkProgramARB
+*!glLinkProgram
+...
+*!gpu::gles2::GLES2DecoderImpl::DoLinkProgram*
+*!gpu::gles2::GLES2DecoderImpl::HandleLinkProgram*
+*!gpu::gles2::GLES2DecoderImpl::DoCommand
+*!gpu::CommandParser::ProcessCommand
+*!gpu::CommandExecutor::PutChanged
+*!webkit::gpu::GLInProcessContext::PumpCommands
+
+UNINITIALIZED READ
+name=bug_138058
+...
+*!blink::WebVTTParser::constructTreeFromToken
+*!blink::WebVTTParser::createDocumentFragmentFromCueText
+*!blink::TextTrackCue::getCueAsHTML
+*!blink::TextTrackCue::updateDisplayTree
+*!blink::HTMLMediaElement::updateActiveTextTrackCues
+
+UNINITIALIZED READ
+name=bug_138220_a
+*!blink::HTMLInputElement::dataList
+*!blink::HTMLInputElement::list
+*!blink::RenderSliderContainer::layout
+*!blink::RenderBlock::layoutBlockChild
+*!blink::RenderBlock::layoutBlockChildren
+*!blink::RenderBlock::layoutBlock
+*!blink::RenderBlock::layout
+*!blink::RenderSlider::layout
+
+UNINITIALIZED READ
+name=bug_138220_b
+*!blink::HTMLInputElement::dataList
+*!blink::HTMLInputElement::list
+*!blink::RenderTheme::paintSliderTicks
+*!blink::RenderThemeChromiumLinux::paintSliderTrack
+*!blink::RenderTheme::paint
+*!blink::RenderBox::paintBoxDecorations
+*!blink::RenderBlock::paintObject
+
+UNINITIALIZED READ
+name=bug_162825
+*!bcmp
+*!gpu::gles2::ShaderTranslatorCache::ShaderTranslatorInitParams::operator<
+*!std::less<>::operator
+...
+*!std::map<>::find
+...
+*!gpu::gles2::GLES2DecoderImpl::InitializeShaderTranslator
+
+UNINITIALIZED READ
+name=bug_176616_a
+*!WebTestRunner::WebTestProxyBase::didCreateDataSource
+*!WebTestRunner::WebTestProxy<>::didCreateDataSource
+*!blink::FrameLoaderClientImpl::createDocumentLoader
+*!blink::FrameLoader::init
+*!blink::Frame::init
+*!blink::WebFrameImpl::initializeAsMainFrame
+*!blink::WebViewImpl::initializeMainFrame
+*!TestShell::createNewWindow
+*!TestShell::createMainWindow
+*!TestShell::initialize
+
+UNINITIALIZED READ
+name=bug_176616_b
+*!WebTestRunner::TestRunner::reset
+*!WebTestRunner::TestInterfaces::resetAll
+*!WebTestRunner::WebTestInterfaces::resetAll
+*!TestShell::resetTestController
+*!runTest
+
+UNINITIALIZED READ
+name=bug_222883
+*!v8::internal::ScavengeVisitor::ScavengePointer
+*!v8::internal::ScavengeVisitor::VisitPointers
+*!v8::internal::StandardFrame::IterateExpressions
+...
+*!v8::internal::Heap::Scavenge
+*!v8::internal::Heap::PerformGarbageCollection*
+
+UNINITIALIZED READ
+name=bug_238170a
+*!blink::ElementRuleCollector::collectMatchingRules
+*!blink::ElementRuleCollector::hasAnyMatchingRules
+
+UNINITIALIZED READ
+name=bug_238170b
+*!blink::ElementRuleCollector::collectMatchingRules
+*!blink::StyleResolver::matchAuthorRules
+
+UNINITIALIZED READ
+name=bug_238170c
+*!blink::ReplaceSelectionCommand::doApply
+*!blink::CompositeEditCommand::apply
+*!blink::applyCommand
+
+UNINITIALIZED READ
+name=bug_259789
+*!blink::::adjustAttributes
+*!blink::WebGLRenderingContext::maybeRestoreContext
+*!blink::Timer<>::fired
+*!blink::ThreadTimers::sharedTimerFiredInternal
+*!blink::ThreadTimers::sharedTimerFired
+*!content::BlinkPlatformImpl::DoTimeout
+
+UNINITIALIZED READ
+name=bug_290405
+*!GrGradientEffect::onIsEqual
+*!GrEffect::isEqual
+*!GrEffectStage::DeferredStage::isEqual
+*!GrDrawState::DeferredState::isEqual
+*!GrInOrderDrawBuffer::needsNewState
+*!GrInOrderDrawBuffer::onDraw
+*!GrDrawTarget::drawIndexedInstances
+*!GrTextContext::flushGlyphs
+
+UNINITIALIZED READ
+name=bug_290435
+*!blink::AudioContext::scheduleNodeDeletion
+*!blink::AudioContext::handlePostRenderTasks
+*!blink::AudioDestinationNode::render
+*!blink::OfflineAudioDestinationNode::offlineRender
+
+UNINITIALIZED READ
+name=bug_364724
+*!base::MD5DigestToBase16
+*!content::WebKitTestRunner::CaptureDumpPixels
+
+UNINITIALIZED READ
+name=bug_298143
+...
+*!blink::TypeConversionsV8Internal*AttributeGetter*
+
+UNINITIALIZED READ
+name=bug_299804
+*!GrConfigConversionEffect::TestForPreservingPMConversions
+*!::test_pm_conversions
+*!GrContext::createPMToUPMEffect
+*!GrContext::readRenderTargetPixels
+*!SkGpuDevice::onReadPixels
+*!SkBaseDevice::readPixels
+*!SkCanvas::readPixels
+*!DeferredDevice::onReadPixels
+*!SkBaseDevice::readPixels
+*!SkCanvas::readPixels
+*!blink::GraphicsContext::readPixels
+*!WTF::PassRefPtr<>
+*!blink::ImageBuffer::getUnmultipliedImageData
+*!blink::CanvasRenderingContext2D::getImageData
+*!blink::CanvasRenderingContext2D::getImageData
+*!blink::CanvasRenderingContext2DV8Internal::getImageDataMethod
+*!blink::CanvasRenderingContext2DV8Internal::getImageDataMethodCallback
+*!v8::internal::FunctionCallbackArguments::Call
+*!v8::internal::HandleApiCallHelper<>
+
+UNINITIALIZED READ
+name=bug_309477
+*!WebTestRunner::EventSender::reset
+*!WebTestRunner::TestInterfaces::resetTestHelperControllers
+*!WebTestRunner::TestInterfaces::resetAll
+...
+*!content::ShellRenderProcessObserver::WebKitInitialized
+*!content::RenderThreadImpl::EnsureWebKitInitialized
+*!content::RenderThreadImpl::OnCreateNewView
+
+###############################################################
+# Real issues in Chromium
+
+LEAK
+name=http://crbug.com/32085
+...
+chrome.dll!NotificationRegistrar::Add
+
+UNINITIALIZED READ
+name=http://crbug.com/57266 (1)
+...
+*!remoting::EncoderVp8::Encode
+
+UNINITIALIZED READ
+name=http://crbug.com/57266 (2)
+...
+*!vp8_*
+
+LEAK
+name=http://crbug.com/70062
+*!PR_Calloc
+*!PR_NewLock
+...
+*!InitSessionCacheLocks
+*!initSessionCacheLocksLazily
+*!PR_CallOnce
+*!ssl_InitSessionCacheLocks
+*!lock_cache
+*!ssl_LookupSID
+*!ssl2_BeginClientHandshake
+*!ssl_Do1stHandshake
+*!SSL_ForceHandshake
+*!net::SSL*SocketNSS::DoHandshake
+*!net::SSL*SocketNSS::DoHandshakeLoop
+
+LEAK
+name=http://crbug.com/74417 a
+*!replace_operator_new
+*!disk_cache::BackendImpl::CreateEntryImpl
+
+LEAK
+name=http://crbug.com/74417 b
+*!replace_operator_new
+*!disk_cache::BackendImpl::NewEntry
+
+# One more disk_cache::BackendImpl leak. See also http://crbug.com/87500.
+LEAK
+name=http://crbug.com/74417 c
+*!replace_operator_new
+...
+*!disk_cache::EntryImpl::UserBuffer::UserBuffer
+
+LEAK
+name=http://crbug.com/75247
+...
+*!replace_operator_new
+*!AutofillDownloadTestHelper::AutofillDownloadTestHelper
+
+LEAK
+name=http://crbug.com/78784
+*!generic_cpp_alloc
+*!operator new
+*!TestingProfile::CreateRequestContext
+*!*ProfileSyncService*::SetUp
+
+LEAK
+name=http://crbug.com/80550 (1)
+...
+*!RenderWidgetHost::WasHidden
+
+LEAK
+name=http://crbug.com/80550 (2)
+...
+*!RenderWidgetHost::WasRestored
+
+LEAK
+name=http://crbug.com/87612
+...
+*!SSL_ConfigSecureServer
+*!net::SSLServerSocketNSS::InitializeSSLOptions
+*!net::SSLServerSocketNSS::Handshake
+
+LEAK
+name=http://crbug.com/88640
+*!generic_cpp_alloc
+*!operator new
+*!ProfileImpl::InitRegisteredProtocolHandlers
+*!ProfileImpl::DoFinalInit
+*!ProfileImpl::OnPrefsLoaded
+
+LEAK
+name=http://crbug.com/91465
+*!generic_cpp_alloc
+*!operator new
+*!browser_sync::internal::WeakHandleCore<>::*
+*!browser_sync::WeakHandle<>::*
+*!syncer::SyncManager::SyncInternal::Init
+
+LEAK
+name=http://crbug.com/91491
+...
+*!CrxUpdateService::ProcessPendingItems
+
+UNINITIALIZED READ
+name=http://crbug.com/92026 (1)
+softokn3.dll!FC_GetFunctionList
+...
+softokn3.dll!NSC_ModuleDBFunc
+
+UNINITIALIZED READ
+name=http://crbug.com/92026 (2)
+freebl3.dll!FREEBL_GetVector
+...
+softokn3.dll!NSC_ModuleDBFunc
+
+# Possible real Chromium issue in DoCrossfade.
+UNINITIALIZED READ
+name=http://crbug.com/110049
+*!media::DoCrossfade<>
+*!media::Crossfade
+*!media::AudioRendererAlgorithmBase::FillBuffer
+
+# Known sqlite3 leaks.
+LEAK
+name=http://crbug.com/113847 (1)
+...
+*!sqlite3MemMalloc
+*!mallocWithAlarm
+*!sqlite3Malloc
+...
+*!yy_reduce
+
+LEAK
+name=http://crbug.com/113847 (2)
+...
+*!openDatabase
+*!sqlite3_open
+
+LEAK
+name=http://crbug.com/115328
+...
+*!GenericInfoViewTest_GenericInfoView_Test::TestBody
+
+# IE frame possible leak of COM object.
+LEAK
+name=http://crbug.com/122399
+ole32.dll!...
+ole32.dll!CoTaskMemAlloc
+urlmon.dll!...
+urlmon.dll!CreateUri
+IEFRAME.dll!*
+
+# RenderWidgetHelper leak in DelayProfileDestruction test.
+LEAK
+name=http://crbug.com/125565
+*!generic_cpp_alloc
+*!operator new
+*!RenderProcessHostImpl::RenderProcessHostImpl
+*!SiteInstanceImpl::GetProcess
+*!BrowserTestOffTheRecord_DelayProfileDestruction_Test::TestBody
+
+LEAK
+name=http://crbug.com/125807
+*!generic_cpp_alloc
+*!operator new
+*!TransportSecurityPersister::TransportSecurityPersister
+*!TransportSecurityPersisterTest::TransportSecurityPersisterTest
+
+UNINITIALIZED READ
+name=bug_113076
+*!media::ConvertYUVToRGB32_C
+*!media::LinearScaleYUVToRGB32RowWithRange_C
+*!media::ScaleYUVToRGB32WithRect
+
+UNINITIALIZED READ
+name=bug_343248
+osmesa.dll!LINTERP
+osmesa.dll!INTERP_4F
+osmesa.dll!_tnl_generic_interp
+
+UNINITIALIZED READ
+name=bug_340752
+...
+*!*::ThreadState::visitStack
+
+UNINITIALIZED READ
+name=bug_343663
+blink_web.dll!blink::RenderBlock::computeInlinePreferredLogicalWidths
+
+UNINITIALIZED READ
+name=bug_343797
+...
+blink_web.dll!blink::MediaQueryExp::create
+blink_web.dll!blink::BisonCSSParser::createFloatingMediaQueryExp
+blink_web.dll!cssyyparse
+
+UNINITIALIZED READ
+name=bug_343915
+blink_web.dll!blink::BisonCSSParser::parseFlex
+blink_web.dll!blink::BisonCSSParser::parseValue
+blink_web.dll!cssyyparse
+
+UNADDRESSABLE ACCESS
+name=BUG_343958
+blink_web.dll!blink::Node::getFlag
+blink_web.dll!blink::Node::inDocument
+blink_web.dll!blink::Node::isDescendantOf
+blink_web.dll!blink::CompositeEditCommand::cloneParagraphUnderNewElement
+blink_web.dll!blink::CompositeEditCommand::moveParagraphWithClones
+
+UNINITIALIZED READ
+name=BUG_344076
+blink_web.dll!blink::Editor::canSmartReplaceWithPasteboard
+blink_web.dll!blink::Editor::pasteAsPlainTextWithPasteboard
+blink_web.dll!blink::Editor::pasteAsPlainText
+blink_web.dll!blink::executePasteAndMatchStyle
+blink_web.dll!blink::Editor::Command::execute
+blink_web.dll!blink::Document::execCommand
+
+UNINITIALIZED READ
+name=BUG_349128
+content.dll!std::*
+...
+content.dll!content::BrowserAccessibilityManagerWin::*
+...
+*!*::UpdateNode
+
+# There are so many osmesa errors we have to suppress (mostly the unpack_RGB*
+# variety) that it's a performance hit. We avoid that by requesting
+# whole-module suppression
+# (see https://github.com/DynamoRIO/drmemory/issues/1529).
+UNINITIALIZED READ
+name=bug_347967_all_osmesa
+osmesa.dll!*
+
+UNINITIALIZED READ
+name=bug_347967
+osmesa.dll!unpack_RGB*888
+osmesa.dll!_mesa_unpack_rgba_row
+osmesa.dll!slow_read_rgba_pixels
+osmesa.dll!read_rgba_pixels
+osmesa.dll!_mesa_readpixels
+...
+gpu.dll!gpu::gles2::GLES2DecoderImpl::DoCommand
+gpu.dll!gpu::CommandParser::ProcessCommand
+gpu.dll!gpu::CommandExecutor::PutChanged
+
+UNINITIALIZED READ
+name=bug_347967,bug_348357
+osmesa.dll!clip_span
+osmesa.dll!_swrast_write_rgba_span
+osmesa.dll!general_triangle
+...
+gpu.dll!gpu::gles2::GLES2DecoderImpl::DoDrawElements
+gpu.dll!gpu::gles2::GLES2DecoderImpl::HandleDrawElements
+gpu.dll!gpu::gles2::GLES2DecoderImpl::DoCommand
+gpu.dll!gpu::CommandParser::ProcessCommand
+
+UNINITIALIZED READ
+name=bug_361594
+...
+skia.dll!SkA8_Shader_Blitter::blitH
+skia.dll!SkBlitter::blitRect
+skia.dll!blitrect
+skia.dll!SkScan::FillIRect
+...
+skia.dll!SkDraw::drawRect
+skia.dll!SkDraw::drawBitmap
+skia.dll!SkBitmapDevice::drawBitmap
+skia.dll!SkCanvas::internalDrawBitmap
+skia.dll!SkCanvas::drawBitmap
+content.dll!content::ScreenshotData::EncodeOnWorker
+
+UNINITIALIZED READ
+name=bug_363487
+blink_web.dll!blink::RenderLayerCompositor::updateIfNeeded
+blink_web.dll!blink::RenderLayerCompositor::updateIfNeededRecursive
+blink_web.dll!blink::FrameView::updateLayoutAndStyleForPainting
+blink_web.dll!blink::PageAnimator::updateLayoutAndStyleForPainting
+
+UNINITIALIZED READ
+name=bug_365101
+*!device::BluetoothAdapterWin::AdapterStateChanged
+
+UNINITIALIZED READ
+name=bug_364146
+...
+v8.dll!*
+net_with_v8.dll!net::ProxyResolverV8::Context::*
+
+UNINITIALIZED READ
+name=bug_334448
+*!CLD2::UTF8GenericReplaceInternal
+*!CLD2::UTF8GenericReplace
+*!CLD2::ScriptScanner::LowerScriptSpan
+*!CLD2::ScriptScanner::GetOneScriptSpanLower
+*!CLD2::DetectLanguageSummaryV2
+*!CLD2::DetectLanguageSummary
+
+UNINITIALIZED READ
+name=bug_42043
+...
+QuickTime.qts!*
+
+UNINITIALIZED READ
+name=bug_369141
+...
+*!blink::RenderLayerClipper::updateClipRects
+*!blink::RenderLayerClipper::parentClipRects
+*!blink::RenderLayerClipper::backgroundClipRect
+
+HANDLE LEAK
+name=bug_370178
+system call NtCreateEvent
+KERNELBASE.dll!CreateEventExW
+KERNELBASE.dll!CreateEventW
+base.dll!base::WaitableEvent::WaitableEvent
+gpu.dll!gpu::InProcessCommandBuffer::Initialize
+gl_in_process_context.dll!gpu::`anonymous namespace'::GLInProcessContextImpl::Initialize
+gl_in_process_context.dll!gpu::GLInProcessContext::CreateContext
+
+UNINITIALIZED READ
+name=bug_371844
+*!content::GamepadProvider::PadState::Match
+*!content::GamepadProvider::DoPoll
+
+UNINITIALIZED READ
+name=bug_371950
+media.dll!ConvertYUVToRGB32Row_SSE
+media.dll!media::ConvertYUVToRGB32_SSE
+media.dll!media::ConvertVideoFrameToBitmap
+media.dll!media::SkCanvasVideoRenderer::Paint
+cc.dll!cc::VideoResourceUpdater::CreateForSoftwarePlanes
+cc.dll!cc::VideoResourceUpdater::CreateExternalResourcesFromVideoFrame
+cc.dll!cc::VideoLayerImpl::WillDraw
+
+UNINITIALIZED READ
+name=bug_371959
+content.dll!webrtc::PeerConnection::DoInitialize
+content.dll!webrtc::PeerConnection::Initialize
+content.dll!webrtc::PeerConnectionFactory::CreatePeerConnection_s
+content.dll!webrtc::PeerConnectionFactory::OnMessage
+content.dll!jingle_glue::JingleThreadWrapper::Send
+content.dll!webrtc::PeerConnectionFactory::CreatePeerConnection
+content.dll!webrtc::PeerConnectionFactoryInterface::CreatePeerConnection
+
+# This suppression is deliberately general, as bugs reported in
+# ProcessOutgoingMessages are difficult to track down until we
+# get our annotations in place.
+# TODO(bruening): add annotations once we have the infrastructure.
+UNINITIALIZED READ
+name=bug_371991
+system call NtWriteFile parameter #5
+KERNELBASE.dll!WriteFile
+KERNEL32.dll!WriteFile*
+*!IPC::Channel*::ProcessOutgoingMessages
+
+UNINITIALIZED READ
+name=bug_372254
+*!content::*::NotifyPluginProcessHostHelper
+
+UNINITIALIZED READ
+name=drm_i#1546
+*!testing::internal::CmpHelperEQ<>
+...
+*!content::BrowserPluginTest_ResizeFlowControl_Test::TestBody
+
+UNINITIALIZED READ
+name=bug_374410
+*!ui::NativeThemeWin::PaintScrollbarTrack
+*!ui::NativeThemeWin::PaintDirect
+
+UNINITIALIZED READ
+name=bug_377728
+...
+*!Hunspell::suggest
+*!HunspellEngine::FillSuggestionList
+*!SpellcheckLanguage::SpellCheckWord
+*!SpellCheck::SpellCheckWord
+
+UNINITIALIZED READ
+name=bug_387373
+*!blink::WebEmbeddedWorkerImpl::startWorkerContext
+*!content::EmbeddedWorkerDispatcher::OnStartWorker
+*!EmbeddedWorkerMsg_StartWorker::Dispatch<>
+
+UNADDRESSABLE ACCESS
+name=bug_389132
+content.dll!crypto_kernel_do_load_cipher_type
+content.dll!crypto_kernel_load_cipher_type
+content.dll!crypto_kernel_init
+content.dll!cricket::SrtpSession::SetKey
+content.dll!cricket::SrtpSession::SetSend
+content.dll!cricket::SrtpFilter::SetRtpParams
+content.dll!cricket::BaseChannel::SetupDtlsSrtp
+content.dll!cricket::BaseChannel::ChannelWritable_w
+content.dll!cricket::BaseChannel::OnWritableState
+
+UNINITIALIZED READ
+name=bug_392585
+system call NtCreateFile parameter #9
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+UNADDRESSABLE ACCESS
+name=bug_398850
+...
+wtf.dll!WTF::PlatformCondition::timedWait
+
+UNINITIALIZED READ
+name=bug_399293
+blink_web.dll!blink::InputMethodController::extendSelectionAndDelete
+blink_web.dll!blink::WebLocalFrameImpl::extendSelectionAndDelete
+content.dll!content::RenderFrameImpl::OnExtendSelectionAndDelete
+*!content::RenderViewImplTest_OnExtendSelectionAndDelete_Test::TestBody
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+UNINITIALIZED READ
+name=bug_398547
+system call NtWriteFile parameter #5
+KERNELBASE.dll!WriteFile
+KERNEL32.dll!WriteFile
+mojo_system_impl.dll!mojo::system::`anonymous namespace'::RawChannelWin::WriteNoLock
+
+UNINITIALIZED READ
+name=bug_399842
+skia.dll!S32A_Opaque_BlitRow32_SSE4
+
+HANDLE LEAK
+name=bug_403544
+system call NtCreateSemaphore
+KERNELBASE.dll!CreateSemaphoreExW
+...
+v8.dll!v8::internal::SweeperThread::SweeperThread
+...
+blink_web.dll!blink::WorkerScriptController::WorkerScriptController
+blink_web.dll!blink::WorkerGlobalScope::WorkerGlobalScope
+blink_web.dll!blink::ServiceWorkerGlobalScope::ServiceWorkerGlobalScope
+
+UNINITIALIZED READ
+name=bug_414268
+pdf.dll!chrome_pdf::PDFiumEngine::OnMouseMove
+pdf.dll!chrome_pdf::PDFiumEngine::HandleEvent
+pdf.dll!chrome_pdf::OutOfProcessInstance::HandleInputEvent
+pdf.dll!pp::InputEvent_HandleEvent
+ppapi_proxy.dll!ppapi::CallWhileUnlocked<>
+ppapi_proxy.dll!ppapi::proxy::PPP_InputEvent_Proxy::OnMsgHandleFilteredInputEvent
+
+UNADDRESSABLE ACCESS
+name=bug_425097
+...
+*!ash::test::ShelfViewTest_CheckDragAndDropFromOverflowBubbleToShelf_Test::TestBody
+
+UNINITIALIZED READ
+name=bug_432067
+system call NtCreateFile parameter #9
+MSWSOCK.dll!*
+content.dll!content::AppCacheStorageImpl::DatabaseTask::CallRun
+
+UNADDRESSABLE ACCESS
+name=bug_436131
+*!ash::test::ShelfViewTestAPI::RunMessageLoopUntilAnimationsDone
+*!ash::test::ShelfViewTest_OverflowBubbleSize_Test::TestBody
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+UNADDRESSABLE ACCESS
+name=bug_436131_b
+views.dll!base::ObserverListBase<>::RemoveObserver
+*!ash::test::ShelfViewTestAPI::RunMessageLoopUntilAnimationsDone
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+UNADDRESSABLE ACCESS
+name=bug_436131_c
+*!ash::test::ShelfViewTestAPI::GetPreferredSize
+*!ash::test::ShelfViewTest_OverflowBubbleSize_Test::TestBody
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+UNINITIALIZED READ
+name=bug_455417
+*!std::char_traits<>::compare
+...
+*!*::*URLRequest*::TestBody
+
+UNINITIALIZED READ
+name=bug_468169
+...
+*!encode_nonrd_sb_row
+*!vp9_encode_tile
+
+UNINITIALIZED READ
+name=bug_470848
+blink_platform.dll!blink::Heap::RegionTree::lookup
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+HANDLE LEAK
+name=https://crbug.com/480741
+system call NtDuplicateObject
+KERNELBASE.dll!DuplicateHandle
+KERNEL32.dll!DuplicateHandle
+base.dll!base::SharedMemory::ShareToProcessCommon
+gl_wrapper.dll!gl::GLImageSharedMemory::Initialize
+content.dll!content::GpuChannel::CreateImageForGpuMemoryBuffer
+content.dll!content::GpuCommandBufferStub::OnCreateImage
+
+HANDLE LEAK
+name=https://crbug.com/481305
+system call NtCreateSection
+KERNELBASE.dll!CreateFileMappingW
+base.dll!base::SharedMemory::Create
+base.dll!base::SharedMemory::CreateAnonymous
+content.dll!content::ChildThreadImpl::AllocateSharedMemory
+content.dll!content::RenderThreadImpl::HostAllocateSharedMemoryBuffer
+content.dll!content::RenderThreadImpl::AllocateSharedMemory
+...
+content.dll!content::WebGraphicsContext3DCommandBufferImpl::CreateContext
+
+HANDLE LEAK
+name=https://crbug.com/489779
+system call NtUserCreateWindowEx
+USER32.dll!UnregisterClassW
+USER32.dll!UnregisterClassW
+USER32.dll!CreateWindowExW
+gfx.dll!gfx::WindowImpl::Init
+win_window.dll!ui::WinWindow::WinWindow
+aura.dll!aura::WindowTreeHostPlatform::WindowTreeHostPlatform
+aura.dll!aura::WindowTreeHost::Create
+
+UNINITIALIZED READ
+name=bug_492821
+*!blink::CSSPropertyParser::validUnit
+...
+*!blink::CSSPropertyParser::parseValue
+*!blink::CSSPropertyParser::parseValue
+*!blink::CSSParserImpl::consumeDeclarationValue
+*!blink::CSSParserImpl::consumeDeclaration
+*!blink::CSSParserImpl::consumeDeclarationList
+*!blink::CSSParserImpl::consumeStyleRule
+*!blink::CSSParserImpl::consumeQualifiedRule
+*!blink::CSSParserImpl::consumeRuleList<>
+*!blink::CSSParserImpl::parseStyleSheet
+*!blink::CSSParser::parseSheet
+*!blink::StyleSheetContents::parseStringAtPosition
+*!blink::StyleEngine::parseSheet
+*!blink::StyleEngine::createSheet
+*!blink::StyleElement::createSheet
+*!blink::StyleElement::process
+
+UNINITIALIZED READ
+name=bug_493167
+system call NtWriteFile parameter #5
+KERNELBASE.dll!WriteFile
+KERNEL32.dll!WriteFile
+*!base::File::WriteAtCurrentPos
+*!sessions::SessionBackend::AppendCommandsToFile
+*!sessions::SessionBackend::AppendCommands
+
+UNADDRESSABLE ACCESS
+name=bug_505734
+webcore_shared.dll!blink::FocusController::focusedOrMainFrame
+content.dll!content::RenderWidget::GetSelectionBounds
+content.dll!content::RenderWidget::UpdateSelectionBounds
+content.dll!content::RenderWidget::WillBeginCompositorFrame
+cc.dll!base::internal::InvokeHelper<>::MakeItSo
+cc.dll!base::internal::Invoker<>::Run
+
+UNADDRESSABLE ACCESS
+name=bug_506557_a
+drmemorylib.dll!replace_memmove
+base.dll!base::trace_event::TraceLog::RemoveEnabledStateObserver
+base.dll!base::trace_event::TraceEventSystemStatsMonitor::~TraceEventSystemStatsMonitor
+content.dll!content::BrowserMainLoop::ShutdownThreadsAndCleanUp
+content.dll!content::BrowserMainRunnerImpl::Shutdown
+content.dll!content::BrowserMain
+content.dll!content::RunNamedProcessTypeMain
+content.dll!content::ContentMainRunnerImpl::Run
+content.dll!content::ContentMain
+*!content::BrowserTestBase::SetUp
+*!InProcessBrowserTest::SetUp
+*!testing::internal::HandleExceptionsInMethodIfSupported<>
+
+UNINITIALIZED READ
+name=bug_506557_b
+base.dll!base::trace_event::TraceLog::RemoveEnabledStateObserver
+content.dll!content::V8SamplingProfiler::~V8SamplingProfiler
+content.dll!content::RenderThreadImpl::~RenderThreadImpl
+content.dll!content::RenderThreadImpl::`vector deleting destructor'
+content.dll!content::ChildProcess::~ChildProcess
+content.dll!content::RenderProcessImpl::`scalar deleting destructor'
+content.dll!content::InProcessRendererThread::CleanUp
+base.dll!base::`anonymous namespace'::ThreadFunc
+KERNEL32.dll!BaseThreadInitThunk
+
+UNADDRESSABLE ACCESS
+name=bug_506557_c
+base.dll!base::trace_event::TraceLog::RemoveEnabledStateObserver
+content.dll!content::V8SamplingProfiler::~V8SamplingProfiler
+content.dll!content::RenderThreadImpl::~RenderThreadImpl
+content.dll!content::RenderThreadImpl::`vector deleting destructor'
+content.dll!content::ChildProcess::~ChildProcess
+content.dll!content::RenderProcessImpl::`scalar deleting destructor'
+content.dll!content::InProcessRendererThread::CleanUp
+base.dll!base::`anonymous namespace'::ThreadFunc
+KERNEL32.dll!BaseThreadInitThunk
+
+UNADDRESSABLE ACCESS
+name=bug_506557_d
+base.dll!std::vector<>::push_back
+base.dll!base::trace_event::TraceLog::AddEnabledStateObserver
+base.dll!base::trace_event::MemoryDumpManager::Initialize
+content.dll!content::ChildThreadImpl::Init
+content.dll!content::ChildThreadImpl::ChildThreadImpl
+content.dll!content::RenderThreadImpl::RenderThreadImpl
+content.dll!content::InProcessRendererThread::Init
+base.dll!base::`anonymous namespace'::ThreadFunc
+KERNEL32.dll!BaseThreadInitThunk
+
+UNADDRESSABLE ACCESS
+name=bug_506557_e
+base.dll!std::vector<>::push_back
+base.dll!base::trace_event::TraceLog::AddEnabledStateObserver
+content.dll!content::V8SamplingProfiler::V8SamplingProfiler
+content.dll!content::RenderThreadImpl::EnsureWebKitInitialized
+content.dll!content::RenderThreadImpl::OnCreateNewView
+content.dll!ViewMsg_New::Dispatch<>
+content.dll!content::RenderThreadImpl::OnControlMessageReceived
+content.dll!content::ChildThreadImpl::OnMessageReceived
+ipc.dll!IPC::ChannelProxy::Context::OnDispatchMessage
+ipc.dll!base::internal::Invoker<>::Run
+base.dll!base::debug::TaskAnnotator::RunTask
+scheduler.dll!scheduler::TaskQueueManager::ProcessTaskFromWorkQueue
+scheduler.dll!scheduler::TaskQueueManager::DoWork
+scheduler.dll!base::internal::Invoker<>::Run
+base.dll!base::debug::TaskAnnotator::RunTask
+base.dll!base::MessageLoop::RunTask
+base.dll!base::MessageLoop::DeferOrRunPendingTask
+base.dll!base::MessageLoop::DoWork
+base.dll!base::MessagePumpForUI::DoRunLoop
+base.dll!base::MessagePumpWin::Run
+base.dll!base::MessageLoop::RunHandler
+base.dll!base::MessageLoop::Run
+base.dll!base::Thread::Run
+base.dll!base::Thread::ThreadMain
+base.dll!base::`anonymous namespace'::ThreadFunc
+KERNEL32.dll!BaseThreadInitThunk
+
+UNINITIALIZED READ
+name=bug_508794a
+...
+ucrtbase.dll!ismbblead
+ucrtbase.dll!free_base
+ucrtbase.dll!_stdio_common_vsnwprintf_s
+ucrtbase.dll!_stdio_common_vsnwprintf_s
+*!base::vswprintf
+*!base::`anonymous namespace'::StringAppendVT<>
+*!base::StringPrintf
+*!content::AccessibilityTreeFormatterWin::ToString
+*!content::AccessibilityTreeFormatter::RecursiveFormatAccessibilityTree
+
+UNADDRESSABLE ACCESS
+name=bug_508794b
+ucrtbase.dll!wcsnlen
+ucrtbase.dll!ismbblead
+ucrtbase.dll!free_base
+ucrtbase.dll!_stdio_common_vsnwprintf_s
+ucrtbase.dll!_stdio_common_vsnwprintf_s
+*!base::vswprintf
+*!base::`anonymous namespace'::StringAppendVT<>
+*!base::StringPrintf
+*!content::AccessibilityTreeFormatterWin::ToString
+*!content::AccessibilityTreeFormatter::RecursiveFormatAccessibilityTree
+
+UNADDRESSABLE ACCESS
+name=bug_534881
+...
+v8.dll!v8::internal::`anonymous namespace'::Invoke
+v8.dll!v8::internal::Execution::Call
+v8.dll!v8::Function::Call
+webcore_shared.dll!blink::V8ScriptRunner::callFunction
+webcore_shared.dll!blink::ScriptController::callFunction
+...
+webcore_shared.dll!blink::LocalDOMWindow::dispatchEvent
+
+UNINITIALIZED READ
+name=bug_519041_a
+ntdll.dll!RtlSetLastWin32ErrorAndNtStatusFromNtStatus
+
+UNINITIALIZED READ
+name=bug_519041_b
+ntdll.dll!RtlDecodePointer
+
+UNADDRESSABLE ACCESS
+name=bug_545273
+v8.dll!v8::internal::LookupIterator::LookupIterator
+v8.dll!v8::internal::LookupIterator::PropertyOrElement
+v8.dll!v8::internal::Runtime::GetObjectProperty
+v8.dll!v8::internal::KeyedLoadIC::Load
+v8.dll!v8::internal::Runtime_KeyedLoadIC_Miss
+v8.dll!v8::internal::`anonymous namespace'::Invoke
+v8.dll!v8::internal::Execution::Call
+...
+net_with_v8.dll!net::ProxyResolverV8::Context::*
+
+HANDLE LEAK
+name=bug_548039
+system call NtCreateSection
+KERNELBASE.dll!CreateFileMappingW
+base.dll!base::SharedMemory::Create
+content.dll!content::ChildThreadImpl::AllocateSharedMemory
+content.dll!content::RenderThreadImpl::HostAllocateSharedMemoryBuffer
+content.dll!content::RenderThreadImpl::AllocateSharedMemory
+...
+content.dll!content::WebGraphicsContext3DCommandBufferImpl::CreateContext
+content.dll!content::WebGraphicsContext3DCommandBufferImpl::MaybeInitializeGL
+
+# Dr. Memory does not yet propagate uninits through ymm registers so we
+# avoid any false positives in the meantime that might show up in
+# libyuv and other code:
+UNINITIALIZED READ
+name=https://github.com/DynamoRIO/drmemory/issues/1485
+instruction=*ymm*
+media.dll!*
+
+HANDLE LEAK
+name=bug_562701
+system call NtCreateSection
+KERNELBASE.dll!CreateFileMappingW
+base.dll!base::SharedMemory::Create
+content.dll!content::ChildThreadImpl::AllocateSharedMemory
+content.dll!content::RenderThreadImpl::HostAllocateSharedMemoryBuffer
+content.dll!content::ImageDataPlatformBackend::Init
+content.dll!content::PPB_ImageData_Impl::Init
+content.dll!content::PPB_ImageData_Impl::Create
+content.dll!content::ResourceCreationImpl::CreateImageData
+ppapi_proxy.dll!ppapi::proxy::PPB_ImageData_Proxy::CreateImageData
+ppapi_proxy.dll!ppapi::proxy::PPB_ImageData_Proxy::OnHostMsgCreatePlatform
+
+INVALID HEAP ARGUMENT
+name=bug_571551_a
+...
+media.dll!mkvmuxer::Segment::DoNewClusterProcessing
+media.dll!mkvmuxer::Segment::AddGenericFrame
+media.dll!mkvmuxer::Segment::AddFrame
+media.dll!media::WebmMuxer::AddFrame
+media.dll!media::WebmMuxer::OnEncodedVideo
+content.dll!content::MediaRecorderHandler::OnEncodedVideo
+
+UNADDRESSABLE ACCESS
+name=bug_571551_b
+...
+media.dll!mkvmuxer::Segment::DoNewClusterProcessing
+media.dll!mkvmuxer::Segment::AddGenericFrame
+media.dll!mkvmuxer::Segment::AddFrame
+media.dll!media::WebmMuxer::AddFrame
+media.dll!media::WebmMuxer::OnEncodedVideo
+content.dll!content::MediaRecorderHandler::OnEncodedVideo
+
+HANDLE LEAK
+name=bug_571553
+system call NtUserWindowFromPoint
+content.dll!content::RenderWidgetHostViewAura::UpdateCursorIfOverSelf
+content.dll!content::RenderWidgetHostViewAura::SetIsLoading
+content.dll!content::RenderWidgetHostImpl::SetIsLoading
+content.dll!content::RenderFrameHostManager::SetIsLoading
+
+HANDLE LEAK
+name=bug_571554_a
+system call NtGdiCreateDIBSection
+GDI32.dll!CreateDIBSection
+skia.dll!`anonymous namespace'::CreateHBitmap
+skia.dll!skia::BitmapPlatformDevice::Create
+skia.dll!skia::CreatePlatformCanvas
+surface.dll!TransportDIB::GetPlatformCanvas
+content.dll!content::ImageDataPlatformBackend::Map
+content.dll!content::PepperGraphics2DHost::Init
+content.dll!content::PepperGraphics2DHost::Create
+content.dll!content::ContentRendererPepperHostFactory::CreateResourceHost
+
+HANDLE LEAK
+name=bug_571554_b
+system call NtCreateSection
+KERNELBASE.dll!CreateFileMappingW
+base.dll!base::SharedMemory::Create
+content.dll!content::ChildThreadImpl::AllocateSharedMemory
+content.dll!content::RenderThreadImpl::HostAllocateSharedMemoryBuffer
+content.dll!content::ImageDataPlatformBackend::Init
+content.dll!content::PPB_ImageData_Impl::Init
+content.dll!content::PepperGraphics2DHost::Init
+content.dll!content::PepperGraphics2DHost::Create
+content.dll!content::ContentRendererPepperHostFactory::CreateResourceHost
+
+UNINITIALIZED READ
+name=bug_591092
+webcore_shared.dll!std::_Equal<>
+webcore_shared.dll!WTF::operator==<>
+webcore_shared.dll!blink::CSSVariableData::operator==
+webcore_shared.dll!blink::dataEquivalent<>
+webcore_shared.dll!blink::dataEquivalent<>
+...
+webcore_shared.dll!blink::Document::updateStyle
+webcore_shared.dll!blink::Document::updateLayoutTree
+
+UNINITIALIZED READ
+name=bug_593594
+blink_platform.dll!qcms_transform_data_rgba_out_lut_sse2
+blink_platform.dll!qcms_transform_data_type
+blink_platform.dll!blink::JPEGImageDecoder::outputScanlines
+blink_platform.dll!blink::JPEGImageReader::decode
+
+UNINITIALIZED READ
+name=bug_594657
+*!std::_Tree<>::_Eqrange<>
+*!courgette::adjustment_method_2::AssignmentProblem::Declassify
+*!courgette::adjustment_method_2::AssignmentProblem::AssignOne
+*!courgette::adjustment_method_2::AssignmentProblem::FindAndAssignBestLeader
+*!courgette::adjustment_method_2::AssignmentProblem::Solve
+*!courgette::adjustment_method_2::Adjuster::Solve
+*!courgette::adjustment_method_2::Adjuster::Finish
+*!courgette::adjustment_method_2::Adjuster::Adjust
+*!courgette::Adjust
+
+UNINITIALIZED READ
+name=bug_594781a
+chromium_sqlite3.dll!PSCreateMemoryPropertyStore
+...
+chromium_sqlite3.dll!StgDeserializePropVariant
+
+UNADDRESSABLE ACCESS
+name=bug_594781b
+...
+chromium_sqlite3.dll!PropVariantToGUID
+sql.dll!sql::Connection::GetUniqueStatement
+
+HANDLE LEAK
+name=bug_594829a
+system call NtDuplicateObject
+KERNELBASE.dll!DuplicateHandle
+KERNEL32.dll!DuplicateHandle
+*!sandbox::BrokerServicesBase::AddTargetPeer
+content.dll!content::StartSandboxedProcess
+
+HANDLE LEAK
+name=bug_594829b
+system call NtCreateUserProcess
+KERNEL32.dll!CreateProcessInternalW
+KERNEL32.dll!CreateProcessW
+base.dll!base::LaunchProcess
+base.dll!base::LaunchProcess
+content.dll!content::StartSandboxedProcess
+
+HANDLE LEAK
+name=bug_595093a
+system call NtCreateFile
+KERNELBASE.dll!CreateFileW
+KERNEL32.dll!CreateFileW
+net.dll!disk_cache::File::Init
+net.dll!disk_cache::MappedFile::Init
+net.dll!disk_cache::BackendImpl::InitBackingStore
+net.dll!disk_cache::BackendImpl::SyncInit
+net.dll!disk_cache::BackendIO::ExecuteBackendOperation
+
+HANDLE LEAK
+name=bug_595093b
+system call NtCreateSection
+KERNELBASE.dll!CreateFileMappingW
+net.dll!disk_cache::MappedFile::Init
+net.dll!disk_cache::BackendImpl::InitBackingStore
+net.dll!disk_cache::BackendImpl::SyncInit
+net.dll!disk_cache::BackendIO::ExecuteBackendOperation
+
+HANDLE LEAK
+name=bug_595105
+system call NtDuplicateObject
+KERNELBASE.dll!DuplicateHandle
+KERNEL32.dll!DuplicateHandle
+base.dll!`anonymous namespace'::CreateFileMappingWithReducedPermissions
+base.dll!base::SharedMemory::Create
+base.dll!base::SharedMemory::CreateAndMapAnonymous
+content.dll!content::GpuMemoryBufferImplSharedMemory::Create
+content.dll!content::BrowserGpuMemoryBufferManager::HandleCreateGpuMemoryBufferOnIO
+
+HANDLE LEAK
+name=bug_595146
+system call NtCreateFile
+KERNELBASE.dll!CreateFileW
+KERNEL32.dll!CreateFileW
+base.dll!base::File::DoInitialize
+base.dll!base::File::Initialize
+net.dll!net::FileStream::Context::OpenFileImpl
+
+HANDLE LEAK
+name=bug_595149
+system call NtCreateIoCompletion
+KERNELBASE.dll!CreateIoCompletionPort
+base.dll!base::MessagePumpForIO::MessagePumpForIO
+
+HANDLE LEAK
+name=bug_598364
+system call NtGdiCreateDIBSection
+GDI32.dll!CreateDIBSection
+skia.dll!`anonymous namespace'::CreateHBitmap
+skia.dll!skia::BitmapPlatformDevice::Create
+...
+webcore_shared.dll!blink::ScriptController::createPluginWrapper
+webcore_shared.dll!blink::HTMLPlugInElement::pluginWrapper
diff --git a/chromium/tools/valgrind/drmemory_analyze.py b/chromium/tools/valgrind/drmemory_analyze.py
new file mode 100755
index 00000000000..29fc0ed4b0c
--- /dev/null
+++ b/chromium/tools/valgrind/drmemory_analyze.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# drmemory_analyze.py
+
+''' Given a Dr. Memory output file, parses errors and uniques them.'''
+
+from collections import defaultdict
+import common
+import hashlib
+import logging
+import optparse
+import os
+import re
+import subprocess
+import sys
+import time
+
+class DrMemoryError:
+ def __init__(self, report, suppression, testcase):
+ self._report = report
+ self._testcase = testcase
+
+ # Chromium-specific transformations of the suppressions:
+ # Replace 'any_test.exe' and 'chrome.dll' with '*', then remove the
+ # Dr.Memory-generated error ids from the name= lines as they don't
+ # make sense in a multiprocess report.
+ supp_lines = suppression.split("\n")
+ for l in xrange(len(supp_lines)):
+ if supp_lines[l].startswith("name="):
+ supp_lines[l] = "name=<insert_a_suppression_name_here>"
+ if supp_lines[l].startswith("chrome.dll!"):
+ supp_lines[l] = supp_lines[l].replace("chrome.dll!", "*!")
+ bang_index = supp_lines[l].find("!")
+ d_exe_index = supp_lines[l].find(".exe!")
+ if bang_index >= 4 and d_exe_index + 4 == bang_index:
+ supp_lines[l] = "*" + supp_lines[l][bang_index:]
+ self._suppression = "\n".join(supp_lines)
+
+ def __str__(self):
+ output = ""
+ output += "### BEGIN MEMORY TOOL REPORT (error hash=#%016X#)\n" % \
+ self.ErrorHash()
+ output += self._report + "\n"
+ if self._testcase:
+ output += "The report came from the `%s` test.\n" % self._testcase
+ output += "Suppression (error hash=#%016X#):\n" % self.ErrorHash()
+ output += (" For more info on using suppressions see "
+ "http://dev.chromium.org/developers/how-tos/using-drmemory#TOC-Suppressing-error-reports-from-the-\n")
+ output += "{\n%s\n}\n" % self._suppression
+ output += "### END MEMORY TOOL REPORT (error hash=#%016X#)\n" % \
+ self.ErrorHash()
+ return output
+
+ # This is a device-independent hash identifying the suppression.
+ # By printing out this hash we can find duplicate reports between tests and
+ # different shards running on multiple buildbots
+ def ErrorHash(self):
+ return int(hashlib.md5(self._suppression).hexdigest()[:16], 16)
+
+ def __hash__(self):
+ return hash(self._suppression)
+
+ def __eq__(self, rhs):
+ return self._suppression == rhs
+
+
+class DrMemoryAnalyzer:
+ ''' Given a set of Dr.Memory output files, parse all the errors out of
+ them, unique them and output the results.'''
+
+ def __init__(self):
+ self.known_errors = set()
+ self.error_count = 0;
+
+ def ReadLine(self):
+ self.line_ = self.cur_fd_.readline()
+
+ def ReadSection(self):
+ result = [self.line_]
+ self.ReadLine()
+ while len(self.line_.strip()) > 0:
+ result.append(self.line_)
+ self.ReadLine()
+ return result
+
+ def ParseReportFile(self, filename, testcase):
+ ret = []
+
+ # First, read the generated suppressions file so we can easily lookup a
+ # suppression for a given error.
+ supp_fd = open(filename.replace("results", "suppress"), 'r')
+ generated_suppressions = {} # Key -> Error #, Value -> Suppression text.
+ for line in supp_fd:
+ # NOTE: this regexp looks fragile. Might break if the generated
+ # suppression format slightly changes.
+ m = re.search("# Suppression for Error #([0-9]+)", line.strip())
+ if not m:
+ continue
+ error_id = int(m.groups()[0])
+ assert error_id not in generated_suppressions
+ # OK, now read the next suppression:
+ cur_supp = ""
+ for supp_line in supp_fd:
+ if supp_line.startswith("#") or supp_line.strip() == "":
+ break
+ cur_supp += supp_line
+ generated_suppressions[error_id] = cur_supp.strip()
+ supp_fd.close()
+
+ self.cur_fd_ = open(filename, 'r')
+ while True:
+ self.ReadLine()
+ if (self.line_ == ''): break
+
+ match = re.search("^Error #([0-9]+): (.*)", self.line_)
+ if match:
+ error_id = int(match.groups()[0])
+ self.line_ = match.groups()[1].strip() + "\n"
+ report = "".join(self.ReadSection()).strip()
+ suppression = generated_suppressions[error_id]
+ ret.append(DrMemoryError(report, suppression, testcase))
+
+ if re.search("SUPPRESSIONS USED:", self.line_):
+ self.ReadLine()
+ while self.line_.strip() != "":
+ line = self.line_.strip()
+ (count, name) = re.match(" *([0-9\?]+)x(?: \(.*?\))?: (.*)",
+ line).groups()
+ if (count == "?"):
+ # Whole-module have no count available: assume 1
+ count = 1
+ else:
+ count = int(count)
+ self.used_suppressions[name] += count
+ self.ReadLine()
+
+ if self.line_.startswith("ASSERT FAILURE"):
+ ret.append(self.line_.strip())
+
+ self.cur_fd_.close()
+ return ret
+
+ def Report(self, filenames, testcase, check_sanity):
+ sys.stdout.flush()
+ # TODO(timurrrr): support positive tests / check_sanity==True
+ self.used_suppressions = defaultdict(int)
+
+ to_report = []
+ reports_for_this_test = set()
+ for f in filenames:
+ cur_reports = self.ParseReportFile(f, testcase)
+
+ # Filter out the reports that were there in previous tests.
+ for r in cur_reports:
+ if r in reports_for_this_test:
+ # A similar report is about to be printed for this test.
+ pass
+ elif r in self.known_errors:
+ # A similar report has already been printed in one of the prev tests.
+ to_report.append("This error was already printed in some "
+ "other test, see 'hash=#%016X#'" % r.ErrorHash())
+ reports_for_this_test.add(r)
+ else:
+ self.known_errors.add(r)
+ reports_for_this_test.add(r)
+ to_report.append(r)
+
+ common.PrintUsedSuppressionsList(self.used_suppressions)
+
+ if not to_report:
+ logging.info("PASS: No error reports found")
+ return 0
+
+ sys.stdout.flush()
+ sys.stderr.flush()
+ logging.info("Found %i error reports" % len(to_report))
+ for report in to_report:
+ self.error_count += 1
+ logging.info("Report #%d\n%s" % (self.error_count, report))
+ logging.info("Total: %i error reports" % len(to_report))
+ sys.stdout.flush()
+ return -1
+
+
+def main():
+ '''For testing only. The DrMemoryAnalyze class should be imported instead.'''
+ parser = optparse.OptionParser("usage: %prog <files to analyze>")
+
+ (options, args) = parser.parse_args()
+ if len(args) == 0:
+ parser.error("no filename specified")
+ filenames = args
+
+ logging.getLogger().setLevel(logging.INFO)
+ return DrMemoryAnalyzer().Report(filenames, None, False)
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/valgrind/fixed_suppressions.sh b/chromium/tools/valgrind/fixed_suppressions.sh
new file mode 100755
index 00000000000..d2aae911fd2
--- /dev/null
+++ b/chromium/tools/valgrind/fixed_suppressions.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+bugs=$(egrep -o 'bug_[0-9]+' tools/valgrind/memcheck/suppressions.txt |\
+ sed -e 's/bug_//' | sort -n | uniq);
+fixed_status='(Fixed|Verified|Duplicate|FixUnreleased|WontFix|Invalid|IceBox)'
+fixed_status="${fixed_status}</span>"
+for bug in $bugs; do
+ echo "Checking bug #$bug";
+ curl -s "http://code.google.com/p/chromium/issues/detail?id=$bug" |\
+ egrep -q $fixed_status;
+ if [ $? -eq 0 ]; then echo "Bug #$bug seems to be closed (http://crbug.com/$bug)"; fi
+done
diff --git a/chromium/tools/valgrind/gdb_helper.py b/chromium/tools/valgrind/gdb_helper.py
new file mode 100644
index 00000000000..548ee9474e6
--- /dev/null
+++ b/chromium/tools/valgrind/gdb_helper.py
@@ -0,0 +1,87 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+''' A bunch of helper functions for querying gdb.'''
+
+import logging
+import os
+import re
+import tempfile
+
+GDB_LINE_RE = re.compile(r'Line ([0-9]*) of "([^"]*)".*')
+
+def _GdbOutputToFileLine(output_line):
+ ''' Parse the gdb output line, return a pair (file, line num) '''
+ match = GDB_LINE_RE.match(output_line)
+ if match:
+ return match.groups()[1], match.groups()[0]
+ else:
+ return None
+
+def ResolveAddressesWithinABinary(binary_name, load_address, address_list):
+ ''' For each address, return a pair (file, line num) '''
+ commands = tempfile.NamedTemporaryFile()
+ commands.write('add-symbol-file "%s" %s\n' % (binary_name, load_address))
+ for addr in address_list:
+ commands.write('info line *%s\n' % addr)
+ commands.write('quit\n')
+ commands.flush()
+ gdb_commandline = 'gdb -batch -x %s 2>/dev/null' % commands.name
+ gdb_pipe = os.popen(gdb_commandline)
+ result = gdb_pipe.readlines()
+
+ address_count = 0
+ ret = {}
+ for line in result:
+ if line.startswith('Line'):
+ ret[address_list[address_count]] = _GdbOutputToFileLine(line)
+ address_count += 1
+ if line.startswith('No line'):
+ ret[address_list[address_count]] = (None, None)
+ address_count += 1
+ gdb_pipe.close()
+ commands.close()
+ return ret
+
+class AddressTable(object):
+ ''' Object to do batched line number lookup. '''
+ def __init__(self):
+ self._load_addresses = {}
+ self._binaries = {}
+ self._all_resolved = False
+
+ def AddBinaryAt(self, binary, load_address):
+ ''' Register a new shared library or executable. '''
+ self._load_addresses[binary] = load_address
+
+ def Add(self, binary, address):
+ ''' Register a lookup request. '''
+ if binary == '':
+ logging.warn('adding address %s in empty binary?' % address)
+ if binary in self._binaries:
+ self._binaries[binary].append(address)
+ else:
+ self._binaries[binary] = [address]
+ self._all_resolved = False
+
+ def ResolveAll(self):
+ ''' Carry out all lookup requests. '''
+ self._translation = {}
+ for binary in self._binaries.keys():
+ if binary != '' and binary in self._load_addresses:
+ load_address = self._load_addresses[binary]
+ addr = ResolveAddressesWithinABinary(
+ binary, load_address, self._binaries[binary])
+ self._translation[binary] = addr
+ self._all_resolved = True
+
+ def GetFileLine(self, binary, addr):
+ ''' Get the (filename, linenum) result of a previously-registered lookup
+ request.
+ '''
+ if self._all_resolved:
+ if binary in self._translation:
+ if addr in self._translation[binary]:
+ return self._translation[binary][addr]
+ return (None, None)
diff --git a/chromium/tools/valgrind/gtest_exclude/OWNERS b/chromium/tools/valgrind/gtest_exclude/OWNERS
new file mode 100644
index 00000000000..72e8ffc0db8
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/chromium/tools/valgrind/gtest_exclude/ash_unittests.gtest-memcheck.txt b/chromium/tools/valgrind/gtest_exclude/ash_unittests.gtest-memcheck.txt
new file mode 100644
index 00000000000..524292a01a7
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/ash_unittests.gtest-memcheck.txt
@@ -0,0 +1,14 @@
+# http://crbug.com/336351
+AutoclickTest.UserInputCancelsAutoclick
+# http://crbug.com/337149
+AutoclickTest.SynthesizedMouseMovesIgnored
+# http://crbug.com/383384
+SystemGestureEventFilterTest.TwoFingerDragDelayed
+# http://crbug.com/421888
+WebNotificationTrayTest.*TouchFeedback*
+# http://crbug.com/504071
+MagnificationControllerTest.CenterTextCaretNotInsideViewport
+# https://crbug.com/516898
+TrayIMETest.PerformActionOnDetailedView
+# https://crbug.com/598950
+ToastManagerTest.QueueMessage
diff --git a/chromium/tools/valgrind/gtest_exclude/aura_unittests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/aura_unittests.gtest.txt
new file mode 100644
index 00000000000..69ce8dcba52
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/aura_unittests.gtest.txt
@@ -0,0 +1,2 @@
+# Flaky under Valgrind, see http://crbug.com/348331
+WindowEventDispatcherTest.TouchMovesHeld
diff --git a/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..58486452974
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,45 @@
+# TODO(timurrrr) investigate the failures and enable these tests one-by-one.
+RSA*
+GmockTest.*
+EtwTrace*
+StatsTableTest.*
+ProcessMemoryTest.EnableLFH
+ScopedNativeLibrary.Basic
+# TODO(zhaoqin) investigate the failures and enable it later, 106043
+ConditionVariableTest.LargeFastTaskTest
+# Next test creates a child that crashes, which naturally generates an
+# unaddressable report as well as a handful of leak reports that we don't need
+# to see.
+ProcessUtilTest.GetTerminationStatusCrash
+# See crbug.com/130668
+ProcessUtilTest.GetTerminationStatusKill
+ProcessUtilTest.KillSlowChild
+ProcessUtilTest.SpawnChild
+ScopedProcessInformationTest.Duplicate
+ScopedProcessInformationTest.Swap
+ScopedProcessInformationTest.TakeBoth
+ScopedProcessInformationTest.TakeProcess
+ScopedProcessInformationTest.TakeWholeStruct
+SharedMemoryProcessTest.Tasks
+
+# crbug/144018
+StartupInformationTest.InheritStdOut
+
+# http://crbug.com/308273
+# This only fails occasionally under full mode -- we perturb it enough that
+# it hits deadlocks that it also hits under tsan?
+TraceEventTestFixture.TraceContinuousSampling
+
+# http://crbug.com/93843
+# This fails only under full mode.
+# TODO(bruening): add a gtest_exclude distinction for light vs full mode
+# so we can avoid excluding this for light mode.
+MessageLoopTestTypeUI.RecursiveDenial3
+
+# https://crbug.com/577417
+# Too slow under Dr. Memory (times out tests).
+AllocationRegisterTest.OverflowDeathTest
+
+# https://crbug.com/592753
+# TODO(wfh): Times out under Dr. Memory. Creates 200 threads in child process.
+ScopedHandleTest.MultiProcess
diff --git a/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest.txt
new file mode 100644
index 00000000000..989459dcb55
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest.txt
@@ -0,0 +1,36 @@
+# This test currently times out in valgrind, see http://crbug.com/9194
+WatchdogTest.AlarmTest
+
+# These tests occassionally hangs under Valgrind on Mac. valgrind-darwin r9573
+# Revisit with better valgrind.
+# Valgrind bug: https://bugs.kde.org/show_bug.cgi?id=189661
+TimerTest.RepeatingTimer
+TimerTest.RepeatingTimer_Cancel
+
+# Crashes occasionally, see http://crbug.com/7477
+base::ObserverListThreadSafeTest.CrossThreadObserver
+base::ObserverListThreadSafeTest.CrossThreadNotifications
+
+# Hangs sometimes on linux, see http://crbug.com/22138
+ClipboardTest.*
+
+# These tests trigger a CHECK so they will leak memory. They don't test
+# anything else, so just disable them on valgrind. Bug 28179.
+OutOfMemoryDeathTest.*
+
+# Flaky under slow tools or just when the VM is under load.
+# See http://crbug.com/43972
+ConditionVariableTest.LargeFastTaskTest
+
+# Flaky under Valgrind, see http://crbug.com/55517
+PlatformFile.TouchGetInfoPlatformFile
+
+# Crashes under Valgrind, see http://crbug.com/355436
+OutOfMemoryHandledTest.Unchecked*
+
+# Running under Valgrind breaks TCMalloc tests, see https://crbug.com/567315
+TCMallocTest.*
+TCMallocFreeTest.*
+
+# https://crbug.com/582398#c4
+SecurityTest.NewOverflow
diff --git a/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest_win-8.txt b/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest_win-8.txt
new file mode 100644
index 00000000000..1d24cdf8e52
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest_win-8.txt
@@ -0,0 +1,2 @@
+# Fails natively as well: http://crbug.com/251517
+PEImageTest.EnumeratesPE
diff --git a/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest_win32.txt b/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest_win32.txt
new file mode 100644
index 00000000000..dca1b4228a9
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/base_unittests.gtest_win32.txt
@@ -0,0 +1,11 @@
+# Too slow under Valgrind/Wine and TSan/Windows
+TimeTicks.WinRollover
+
+# Very sensitive to slowdown
+TimeTicks.Deltas
+TimerTest.RepeatingTimer*
+
+# This Windows-native sampling profiler test does not work under our tools
+# because it assumes the original code runs, not the modified version
+# with instrumentation. See http://crbug.com/106829
+SamplingProfilerTest.Sample
diff --git a/chromium/tools/valgrind/gtest_exclude/blink_heap_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/blink_heap_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..1a858a950c8
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/blink_heap_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,5 @@
+# crbug.com/396172: flaky under Dr. Memory
+HeapTest.ThreadedWeakness
+
+# crbug.com/420699: Crash under Dr. Memory
+HeapTest.CheckAndMarkPointer
diff --git a/chromium/tools/valgrind/gtest_exclude/blink_platform_unittests.gtest_win32.txt b/chromium/tools/valgrind/gtest_exclude/blink_platform_unittests.gtest_win32.txt
new file mode 100644
index 00000000000..8a9c9188ea5
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/blink_platform_unittests.gtest_win32.txt
@@ -0,0 +1,4 @@
+# https://crbug.com/480650: fails
+HarfBuzzShaperTest.ResolveCandidateRunsLatin
+HarfBuzzShaperTest.ResolveCandidateRunsLeadingCommon
+HarfBuzzShaperTest.ResolveCandidateRunsUnicodeVariants
diff --git a/chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory.txt b/chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory.txt
new file mode 100644
index 00000000000..80514c28ab2
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory.txt
@@ -0,0 +1,159 @@
+# http://crbug.com/400503
+InterstitialUITest.OpenInterstitial
+
+# http://crbug.com/400509
+CustomLauncherPageBrowserTest.LoadPageAndOpenLauncher
+
+# http://crbug.com/403628
+RangeHistoryWebUITest.monthViewGrouped
+
+# http://crbug.com/403687
+ContentScriptCssInjectionTest.ContentScriptInjectsStyles
+MediaFileValidatorTest.UnsupportedExtension
+MessageCenterNotificationsTest.RetrieveBaseParts
+RequirementsCheckerBrowserTest.Check3DExtension
+SSLUITest.TestBadFrameNavigation
+SSLUITest.TestInterstitialJavaScriptProceeds
+SSLUITest.TestRefNavigation
+SSLUITest.TestWSSInvalidCertAndGoForward
+
+# http://crbug.com/403007
+WorkerDevToolsSanityTest.InspectSharedWorker
+
+# http://crbug.com/432444
+WorkerDevToolsSanityTest.PauseInSharedWorkerInitialization
+
+# http://crbug.com/451991
+AppViewTest.TestAppViewMultipleConnects
+BrowserViewTest.DevToolsUpdatesBrowserWindow
+
+# http://crbug.com/451992
+ExtensionLoadingTest.UpgradeAfterNavigatingFromOverriddenNewTabPage
+
+# http://crbug.com/459000
+ClipboardApiTest.HostedAppNoPermission
+
+# http://crbug.com/475172
+BasicExtensionSettingsWebUITest.testUninstall
+
+# https://crbug.com/504192
+NaClBrowserTestGLibc.ExitStatus0
+NaClBrowserTestGLibc.ExitStatusNeg2
+NaClBrowserTestGLibc.PPAPIPPBInstance
+NaClBrowserTestGLibc.PPAPIPPPInstance
+NaClBrowserTestGLibc.SimpleLoad
+NaClBrowserTestGLibc.SuccessfulLoadUMA
+
+# https://crbug.com/512140
+PluginPowerSaverBrowserTest.BackgroundTabPlugins
+PluginPowerSaverBrowserTest.LargeCrossOriginObscured
+
+# https://crbug.com/504885
+# The test takes too long. Excluding with and without PRE_ to crbug.com/581105.
+SmartSessionRestoreMRUTest.PRE_CorrectLoadingOrder
+SmartSessionRestoreMRUTest.CorrectLoadingOrder
+SmartSessionRestoreTest.PRE_CorrectLoadingOrder
+SmartSessionRestoreTest.CorrectLoadingOrder
+
+# https://crbug.com/516368
+PushMessagingBrowserTest.PushEventSuccess
+
+# https://crbug.com/519039
+AppViewTest.KillGuestCommunicatingWithWrongAppView
+
+# https://crbug.com/519087
+ExtensionTabsTest.GetAllWindowsAllTypes
+
+# https://crbug.com/530404
+WakeEventPageTest.ClosedEventPage
+WakeEventPageTest.OpenEventPage
+
+# https://crbug.com/530696
+PolicyTest.ForceSafeSearch
+PolicyTest.URLBlacklist
+
+# https://crbug.com/530727
+MaterialPDFExtensionTest.Bookmark
+MaterialPDFExtensionTest.WhitespaceTitle
+MaterialPDFExtensionTest.ZoomManager
+PDFExtensionTest.WhitespaceTitle
+PDFExtensionTest.ToolbarManager
+
+# https://crbug.com/530729
+SiteDetailsBrowserTest.ManyCrossSiteIframes
+
+# https://crbug.com/534718
+CrSettingsBrowserTest.CrSettingsTest
+
+# https://crbug.com/550653
+CrashRecoveryBrowserTest.LoadInNewTab
+
+# https://crbug.com/561085
+DistillablePageUtilsBrowserTestAdaboost.TestDelegate
+
+# https://crbug.com/561088
+CrExtensionsBrowserTestWithMultipleExtensionTypesInstalled.ExtensionManagerSplitSectionsTest
+CrExtensionsBrowserTestWithMultipleExtensionTypesInstalled.ExtensionManagerAppSectionVisibilityTest
+
+# https://crbug.com/562434
+AppearanceSettingsBrowserTest.uiTests
+
+# https://crbug.com/571263
+DistillablePageUtilsBrowserTestAlways.TestDelegate
+DistillablePageUtilsBrowserTestOG.TestDelegate
+
+# https://crbug.com/577850
+LoginPromptBrowserTest.ShouldReplaceExistingInterstitialWhenNavigated
+
+# https://crbug.com/577853
+OnStartupSettingsBrowserTest.uiTests
+
+# https://579666
+SearchEngineManagerWebUITest.testOpenSearchEngineManager
+
+# https://crbug.com/580668
+ZoomControllerBrowserTest.SettingsZoomAfterSigninWorks
+
+# https://crbug.com/581368
+TracingBrowserTest.TestMemoryInfra
+
+# https://crbug.com/581575
+PredictorBrowserTest.PreconnectCORSAndFetchNonCORS
+
+# https://crbug.com/587669
+ClipboardApiTest.ExtensionNoPermission
+
+# https://crbug.com/1310994
+MetricsServiceBrowserTest.CrashRenderers
+
+# https://crbug.com/592900: Timeouts
+AppViewTest.TestAppViewWithUndefinedDataShouldSucceed
+BasicExtensionSettingsWebUITest.testNonEmptyExtensionList
+BookmarkBubbleSignInDelegateTest.OnSignInLinkClickedReusesBlank
+BrowserOptionsOverlayWebUITest.testNavigationInBackground
+ChromeSitePerProcessTest*
+CrashRecoveryBrowserTest*
+CrExtensions*
+CrSettings*
+DeferredMediaBrowserTest*
+DownloadTestWithShelf.IncognitoDownload
+DurableStorageBrowserTest.Incognito
+ExtensionTabUtil*
+ExtensionLoadingTest*
+HostedAppTest*
+ImageWriterPrivateApiTest.TestWriteFromFile
+InlineInstallPrivateApiTestApp*
+InstallGoodExtensionSettingsWebUITest.showOptions
+MaterialHistoryBrowserTest*
+MimeHandlerViewTest.Abort
+MimeHandlerViewTest.EmbeddedDataUrlObject
+MultilanguageOptionsWebUI*
+PDFExtensionTest*
+PasswordsPrivateApiTest.RequestPlaintextPassword
+PolicyPref*
+ProfileWindow*
+PushMessagingBrowserTest.SubscribeWithoutKeySuccessNotificationsGranted
+SubframeTaskBrowserTest.TaskManagerShowsSubframeTasks
+SyncSetupWebUITestAsync.RestoreSyncDataTypes
+WebUIWebView*
+ZoomControllerBrowserTest.NavigationResetsManualMode
diff --git a/chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..00294391623
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-drmemory_win32.txt
@@ -0,0 +1,211 @@
+# TODO(zhaoqin): File bugs for those failing browser tests.
+
+# Dr.Memory i#1052: https://github.com/DynamoRIO/drmemory/issues/1052
+#
+# The list is too long for gtest_filter, so we exclude the whole
+# test case if any of its tests failed.
+*FLAKY*
+
+# http://crbug.com/450221
+ProfileChooserViewExtensionsTest.LockProfileNoBlockOtherProfileExtensions
+
+# it takes too long to run all browser_tests with Dr.Memory,
+# and we only select subset to run
+# A*
+Act*
+Ad*
+All*
+AppA*
+AppB*
+AppL*
+AppW*
+Auto*
+# B*
+BaseD*
+Bac*App*.R*
+Blue*
+Bit*
+Bo*sT*
+Br*erA*
+Br*erC*
+Br*erN*
+Br*erT*
+# C*
+Cal*
+Cap*
+Cas*
+Clou*cy
+Clear*
+Click*
+Com*
+ChromeA*
+ChromeC*
+ChromeR*
+Col*
+Con*UITest.*
+Con*.Re*
+Con*Se*
+Cont*ns
+Context*
+Coo*
+# D*
+Dia*E*
+Dec*
+Dev*
+Dns*
+DoNot*
+Down*Ext*
+Down*UI*
+Do*adTest.*
+Do*tTest.*All
+Drive*
+# E*
+ECK*
+Ed*
+Enc*
+Ep*
+Er*
+Exe*
+Ext*.D*
+Ext*.P*
+Ext*.N*
+Ext*.RS*
+Ext*Api*
+Ext*Man*
+Ext*Se*
+Ext*re*
+Ext*Re*
+Ext*nB*
+Ext*Crx*
+# F*
+FileSys*
+Find*
+Fl*
+Font*gs
+Ftp*
+# G*
+GcdPri*.*Remove
+GcdPri*.*Query
+Gcm*.R*
+GetA*
+Ge*Br*
+# H*
+Hot*
+His*
+HostR*
+Ht*
+# I*
+IE*
+Ide*
+Inv*
+Insp*
+Inl*Safe*
+# J*
+# K*
+# L*
+Lan*
+Lau*
+Laz*
+LoadT*
+Loc*
+Log*hs
+# M*
+MDn*
+Med*B*
+Med*Se*
+Med*Ge*
+Mock*
+MSC_C*
+MSE*
+Man*t.*
+# N*
+NaCl*lib*
+NaCl*Pn*
+NaCl*Vc*
+NetIn*
+Noti*
+NoSe*
+# O*
+Opt*WebUI*
+Out*PPAPITe*
+# P*
+Pag*
+Pas*erBro*
+Pe*
+Plat*Bro*
+Policy*H*
+# PopupBlockerBrowserTest.TapGestureWithCtrlKey
+Pop*
+Port*
+Prefe*
+PrefsF*
+Print*
+Prox*
+Prof*ta
+Prof*t
+Prer*
+PPA*ib*
+PPA*lT*.*
+PPAPIT*.F*
+PPAPIT*.W*
+PPAPIT*.U*3
+# Q*
+QU*
+# R*
+# RangeHistoryWebUITest.*
+Ran*
+Red*
+Ref*
+Reg*
+Rem*De*
+Rem*Ad*
+Res*
+# RequestContentScriptAPITest.PermissionMatcherAgreementInjection
+Req*.Per*ion
+# S*
+Sa*
+Sea*Pr*
+# SerialApiTest.SerialFakeHardware_1 (http://crbug.com/372452)
+Ser*
+Ses*
+Set*
+Scr*
+# SiteDetailsBrowserTest.ManyIframes times out
+Si*
+Soc*
+Spe*
+SRC*.Pl*
+SRC*.Fr*
+SSL*ed
+Sup*Mo*
+# StartupBrowserCreatorTest.ProfilesLaunchedAfterCrash
+St*orTest.*
+SyncF*
+SyncInt*
+Sys*Di*
+Sys*In*
+Sys*Ne*
+# T*
+Tab*
+Tas*
+Tran*Bu*
+Tran*Up*
+Tran*ag
+# U*
+Us*
+Un*.Br*
+# V*
+# W*
+WebViewT*
+WebRtcB*
+WebUIA*
+WebUIBr*
+WebC*
+WebN*
+WebstoreIn*
+WebstoreSt*
+WebSo*
+Win*.Cl*
+# X*
+# Y*
+# Z*
diff --git a/chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-memcheck.txt b/chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-memcheck.txt
new file mode 100644
index 00000000000..337b3673896
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/browser_tests.gtest-memcheck.txt
@@ -0,0 +1,58 @@
+# Don't run FLAKY or FAILS ui tests under Valgrind.
+# They tend to generate way too many flaky Valgrind reports.
+*FLAKY_*
+*FAILS_*
+
+# NaCl tests fail with Data Execution Prevention error http://crbug.com/104517
+NaClGdbTest.Empty
+PPAPINaClGLibcTest.*
+PPAPINaClNewlibTest.*
+PPAPINaClTest*
+
+# http://crbug.com/109336
+OutOfProcessPPAPITest.View_PageHideShow
+
+# TODO(thestig) File bugs for these failing browser tests.
+AllUrlsApiTest.WhitelistedExtension
+AppBackgroundPageApiTest.NoJsManifestBackgroundPage
+BrowserCloseTest.DownloadsCloseCheck_2
+BrowserCloseTest.DownloadsCloseCheck_5
+BrowserEncodingTest.SLOW_TestEncodingAliasMapping
+BrowserNavigatorTest.Disposition_Bookmarks_DoNothingIfIncognitoIsForced
+BrowserNavigatorTest.Disposition_Incognito
+BrowserNavigatorTest.Disposition_SyncPromo_DoNothingIfIncognitoIsForced
+BrowserTest.ForwardDisabledOnForward
+ClickToPlayPluginTest.Basic
+ClickToPlayPluginTest.LoadAllBlockedPlugins
+ClickToPlayPluginTest.NoCallbackAtLoad
+DevToolsExperimentalExtensionTest.TestDevToolsExperimentalExtensionAPI
+DevToolsExtensionTest.TestDevToolsExtensionMessaging
+DownloadExtensionTest.DownloadExtensionTest_FileIcon_Active
+DownloadExtensionTest.DownloadExtensionTest_FileIcon_History
+DownloadExtensionTest.DownloadExtensionTest_SearchPauseResumeCancelGetFileIconIncognito
+DownloadExtensionTestIncognito.DownloadExtensionTest_SearchPauseResumeCancelGetFileIconIncognito
+ErrorPageTest.DNSError_Basic
+ErrorPageTest.DNSError_GoBack1
+ExecuteScriptApiTest.ExecuteScriptPermissions
+ExtensionApiTest.FontSettingsIncognito
+ExtensionApiTest.PopupBlockingExtension
+ExtensionApiTest.PopupBlockingHostedApp
+FastShutdown.SlowTermination
+IndexedDBLayoutTest.IndexTests
+NetInternalsTest.netInternalsPrerenderViewFail
+NewTabUIBrowserTest.LoadNTPInExistingProcess
+OutOfProcessPPAPITest.NetAddressPrivate_GetAnyAddress
+OutOfProcessPPAPITest.NetAddressPrivate_ReplacePort
+PPAPITest.ImeInputEvent
+PrerenderBrowserTest.*
+PrerenderBrowserTestWithNaCl.PrerenderNaClPluginEnabled
+PrintPreviewWebUITest.TestPrinterList
+PrintPreviewWebUITest.TestPrinterListCloudEmpty
+PrintPreviewWebUITest.TestSectionsDisabled
+PrintWebViewHelperTest.BlockScriptInitiatedPrinting
+SafeBrowsingInterstitialVersions/SafeBrowsingBlockingPageBrowserTest.MalwareDontProceed*
+SafeBrowsingInterstitialVersions/SafeBrowsingBlockingPageBrowserTest.ProceedDisabled*
+SocketApiTest.SocketTCPExtension
+SocketApiTest.SocketUDPExtension
+SSLUITest.TestWSSInvalidCertAndGoForward
+WebViewTest.Shim
diff --git a/chromium/tools/valgrind/gtest_exclude/cast_unittests.gtest-drmemory.txt b/chromium/tools/valgrind/gtest_exclude/cast_unittests.gtest-drmemory.txt
new file mode 100644
index 00000000000..d8511283fe5
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/cast_unittests.gtest-drmemory.txt
@@ -0,0 +1,2 @@
+# https://crbug.com/582177
+End2EndTest.ShoveHighFrameRateDownYerThroat
diff --git a/chromium/tools/valgrind/gtest_exclude/cc_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/cc_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..563f0c76243
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/cc_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,54 @@
+# DR-i#1476
+LayerTreeHostAnimationTestNoBackgroundTickingWithoutActiveTree.RunSingleThread_DirectRenderer
+LayerTreeHostBlendingPixelTest.*
+LayerTreeHostFiltersPixelTest.*
+SchedulerTest.*
+LayerTreeHostTestContinuousPainting.*
+LayerTreeHostAnimationTestContinuousAnimate.RunMultiThread_DirectRenderer_ImplSidePaint
+LayerTreeHostAnimationTestContinuousAnimate.RunMultiThread_DirectRenderer_MainThreadPaint
+
+# http://crbug.com/416643
+LayerTreeHostCopyRequestTestMultipleRequests.GLRenderer_RunSingleThread
+LayerTreeHostCopyRequestTestMultipleRequests.SoftwareRenderer_RunSingleThread
+LayerTreeHostTestReadyToDrawNonEmpty.*
+LayerTreeHostTestReadyToDrawVisibility.*
+LayerTreeHostTestMaxTransferBufferUsageBytes.*
+LayerTreeHostTestReadyToActivateNonEmpty.*
+
+# http://crbug.com/430400
+PixelResourceTest*
+
+# https://crbug.com/460581
+LayerTreeHostPictureTestRSLLMembershipWithScale.RunMultiThread_DirectRenderer_ImplSidePaint
+
+# https://crbug.com/486619
+LayerTreeHostTestWillBeginImplFrameHasDidFinishImplFrame.RunSingleThread*
+
+# https://crbug.com/516384
+LayerTreeHostTestContinuousDrawWhenCreatingVisibleTiles.RunMultiThread_DelegatingRenderer
+LayerTreeHostTestContinuousDrawWhenCreatingVisibleTiles.RunMultiThread_DirectRenderer
+LayerTreeHostTestCrispUpAfterPinchEnds.RunMultiThread_DelegatingRenderer
+LayerTreeHostTestCrispUpAfterPinchEnds.RunMultiThread_DirectRenderer
+LayerTreeHostTestCrispUpAfterPinchEndsWithOneCopy.RunMultiThread_DelegatingRenderer
+LayerTreeHostTestCrispUpAfterPinchEndsWithOneCopy.RunMultiThread_DirectRenderer
+
+# https://crbug.com/523689
+ImageBackgroundFilter.BackgroundFilterRotated_GL
+
+# https://crbug.com/537621
+LayerTreeHostTestBeginMainFrameTimeIsAlsoImplTime.RunMultiThread_DelegatingRenderer
+LayerTreeHostTestBeginMainFrameTimeIsAlsoImplTime.RunMultiThread_DirectRenderer
+
+# https://crbug.com/567439
+LayerTreeHostCopyRequestCompletionCausesCommit.RunMultiThread_DirectRenderer
+LayerTreeHostCopyRequestTestLostOutputSurface.RunMultiThread_DirectRenderer
+LayerTreeHostTestStartPageScaleAnimation.RunMultiThread_DelegatingRenderer
+
+# https://crbug.com/571268
+LayerTreeHostTestGpuRasterDeviceSizeChanged.*
+
+# https://crbug.com/577781
+GpuRasterizationRasterizesBorderTiles.RunMultiThread_DelegatingRenderer
+GpuRasterizationRasterizesBorderTiles.RunMultiThread_DirectRenderer
+GpuRasterizationRasterizesBorderTiles.RunSingleThread_DelegatingRenderer
+GpuRasterizationRasterizesBorderTiles.RunSingleThread_DirectRenderer
diff --git a/chromium/tools/valgrind/gtest_exclude/chromeos_unittests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/chromeos_unittests.gtest.txt
new file mode 100644
index 00000000000..ee21d7ec472
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/chromeos_unittests.gtest.txt
@@ -0,0 +1,3 @@
+# crbug.com/437847
+AutoConnectHandlerTest.ReconnectOnCertPatternResolved
+NetworkConnectionHandlerTest.ConnectWithCertificateSuccess
diff --git a/chromium/tools/valgrind/gtest_exclude/components_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/components_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..82265dc5a22
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/components_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,2 @@
+# http://crbug.com/514421
+WatcherMetricsProviderWinTest.DoesNotReportOwnProcessId
diff --git a/chromium/tools/valgrind/gtest_exclude/components_unittests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/components_unittests.gtest.txt
new file mode 100644
index 00000000000..9289e21187b
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/components_unittests.gtest.txt
@@ -0,0 +1,11 @@
+# Flaky, see http://crbug.com/420023
+WebDataServiceAutofillTest.ProfileUpdate
+
+# Fails under valgrind, see http://crbug.com/444516
+AudioDirectiveListTest.*
+
+# http://crbug.com/523462
+PluginsFieldTrialTest.NoPrefLeftBehind
+
+# Fails under valgrind Linux, see http://crbug.com/587664
+DataReductionProxyConfigServiceClientTest.HTTPRequests
diff --git a/chromium/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory.txt b/chromium/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory.txt
new file mode 100644
index 00000000000..676da4d82b3
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory.txt
@@ -0,0 +1,66 @@
+# http://crbug.com/293125
+WebRtcBrowserTest.CanMakeEmptyCallThenAddStreamsAndRenegotiate
+
+# https://crbug.com/376668
+TouchActionBrowserTest.DefaultAuto
+
+# crbug.com/395172
+WebRtcBrowserTest.CanForwardRemoteStream
+WebRtcBrowserTest.CanForwardRemoteStream720p
+
+# crbug.com/399289
+WebRtcBrowserTest.NoCrashWhenConnectChromiumSinkToRemoteTrack
+
+# crbug.com/400490
+PluginTest.OpenPopupWindowWithPlugin
+PluginTest.PluginSingleRangeRequest
+PluginTest.PluginThreadAsyncCall
+PluginTest.PrivateDisabled
+PluginTest.ScheduleTimer
+PluginTest.SelfDelete*
+
+# crbug.com/419396
+WebRtcBrowserTest.CallWithDataAndMedia
+
+# http://crbug.com/456131
+BrowserSideNavigationBrowserTest.BrowserInitiatedNavigations
+BrowserSideNavigationBrowserTest.FailedNavigation
+
+# crbug.com/461489
+ServiceWorkerVersionBrowserTest.TimeoutStartingWorker
+
+# http://crbug.com/464029
+WebRtcBrowserTest.CallAndModifyStream
+
+# http://crbug.com/470507
+SRC_ClearKey/EncryptedMediaTest.Playback_VideoClearAudio_WebM/0
+
+# http://crbug.com/484351
+PluginTest.VerifyPluginWindowRect
+
+# http://crbug.com/484852
+PluginTest.NPObjectIdentity
+
+# http://crbug.com/500070
+DumpAccessibilityEventsTest.AccessibilityEventsListboxNext
+
+# https://crbug.com/500576
+NavigationControllerBrowserTest.DontIgnoreBackAfterNavEntryLimit
+
+# https://crbug.com/504186
+WebRtcBrowserTest.CallInsideIframe
+
+# https://crbug.com/536881
+IndexedDBBrowserTest.DiskFullOnCommit
+
+# https://crbug.com/571264
+WebRtcMediaRecorderTest.MediaRecorderPausePreventsDataavailableFromBeingFired
+
+# https://crbug.com/571548
+RenderWidgetHostViewAuraCopyRequestTest.DedupeFrameSubscriberRequests
+
+# https://crbug.com/577796
+WebRtcMediaRecorderTest.MediaRecorderPeerConnection
+
+# https://crbug.com/592320
+SitePerProcessBrowserTest.SubframeGestureEventRouting
diff --git a/chromium/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..a4eb1709073
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/content_browsertests.gtest-drmemory_win32.txt
@@ -0,0 +1,69 @@
+# crbug.com/389132
+WebRtcAecDumpBrowserTest.*WithAecDump*
+
+# crbug.com/448876
+ScreenOrientationBrowserTest.LockSmoke
+
+# crbug.com/450217
+DumpAccessibilityTreeTest.AccessibilityIframeCoordinates
+
+# https://github.com/DynamoRIO/drmemory/issues/1528
+# Un-analyzed test failures:
+DeviceInertialSensorBrowserTest.MotionNullTestWithAlert
+DeviceInertialSensorBrowserTest.OrientationNullTestWithAlert
+DumpAccessibilityEventsTest.AccessibilityEventsMenuListNext
+File/MediaTest.VideoTulipWebm/0
+Http/MediaTest.VideoBearTheora/0
+Http/MediaTest.VideoTulipWebm/0
+IndexedDBBrowserTest.DeleteCompactsBackingStore
+MSE_ClearKey/EncryptedMediaTest.FrameSizeChangeVideo/0
+MSE_ClearKey/EncryptedMediaTest.Playback_VideoAudio_WebM/0
+OutOfProcessPPAPITest.MediaStreamAudioTrack
+OutOfProcessPPAPITest.NetworkProxy
+OutOfProcessPPAPITest.VideoDecoder
+RenderViewImplTest.DontIgnoreBackAfterNavEntryLimit
+RenderViewImplTest.ReloadWhileSwappedOut
+RenderViewImplTest.SendSwapOutACK
+RenderViewImplTest.StaleNavigationsIgnored
+RenderFrameHostManagerTest.RestoreFileAccessForHistoryNavigation
+RenderFrameHostManagerTest.RestoreSubframeFileAccessForHistoryNavigation
+ResourceFetcherTests.ResourceFetcher404
+ResourceFetcherTests.ResourceFetcherDidFail
+ResourceFetcherTests.ResourceFetcherDownload
+ResourceFetcherTests.ResourceFetcherPost
+ResourceFetcherTests.ResourceFetcherSetHeader
+SitePerProcessBrowserTest.CrashSubframe
+SitePerProcessBrowserTest.RenderViewHostPendingDeletionIsNotReused
+SRC_ClearKey/EncryptedMediaTest.FrameSizeChangeVideo/0
+SRC_ClearKey/EncryptedMediaTest.Playback_AudioClearVideo_WebM/0
+SRC_ClearKey/EncryptedMediaTest.Playback_VideoAudio_WebM/0
+WebContentsViewAuraTest.WebContentsViewReparent
+WebRtcBrowserTest.CallAndVerifyVideoMutingWorks
+WebRtcBrowserTest.CallWithAecDump
+WebRtcBrowserTest.CallWithAecDumpEnabledThenDisabled
+WebRtcBrowserTest.CallWithDataOnly
+WebRtcBrowserTest.CallWithNewVideoMediaStream
+WebRtcBrowserTest.CallWithSctpDataAndMedia
+WebRtcBrowserTest.CallWithSctpDataOnly
+WebRtcBrowserTest.CanSetupAudioAndVideoCall
+WebRtcBrowserTest.CanSetupAudioAndVideoCallWithoutMsidAndBundle
+WebRtcBrowserTest.CanSetupDefaultVideoCall
+WebRtcBrowserTest.CanSetupVideoCallAndDisableLocalVideo
+WebRtcBrowserTest.CanSetupLegacyCall
+WebRtcBrowserTest.CanSetupVideoCallWith16To9AspectRatio
+WebRtcBrowserTest.CanSetupVideoCallWith1To1AspectRatio
+WebRtcBrowserTest.CanSetupVideoCallWith4To3AspectRatio
+WebRtcBrowserTest.NegotiateOfferWithBLine
+WebRtcBrowserTest.CanMakeAudioCallAndThenRenegotiateToVideo
+
+# https://crbug.com/480750
+*_ClearKey/EncryptedMediaTest.Playback_*
+
+# https://crbug.com/487501
+*/MediaTest.VideoBearSilentWebm/0
+
+# https://crbug.com/505714
+NaClBrowserTestGLibc.PPAPICore
+
+# https://crbug.com/589174
+WebContentsImplBrowserTest.SetTitleOnUnload
diff --git a/chromium/tools/valgrind/gtest_exclude/content_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/content_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..4cc5686e176
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/content_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,13 @@
+# Crashes: http://crbug.com/383054
+BrowserAccessibilityManagerWinTest.TestAccessibleHWND
+
+# http://crbug.com/391513
+GamepadServiceTest.ConnectionsTest
+
+# http://crbug.com/522049
+RenderWidgetCompositorOutputSurfaceTest.SucceedTwice
+RenderWidgetCompositorOutputSurfaceTest.FallbackSuccessNormalSuccess
+
+# http://crbug.com/554665
+WebContentsVideoCaptureDeviceTest.VariableResolution_AnyWithinLimits
+WebContentsVideoCaptureDeviceTest.VariableResolution_FixedAspectRatio
diff --git a/chromium/tools/valgrind/gtest_exclude/content_unittests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/content_unittests.gtest.txt
new file mode 100644
index 00000000000..60e98f2e6ac
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/content_unittests.gtest.txt
@@ -0,0 +1,16 @@
+# False positive, according to crbug.com/386080
+WebLayerImplFixedBoundsTest.CompareToWebLayerImplComplex
+WebLayerImplFixedBoundsTest.CompareToWebLayerImplSimple
+
+# http://crbug.com/418258
+DevToolsManagerTest.TestObserver
+
+# http://crbug.com/430391
+WebDataConsumerHandleImplTest.*
+
+# https://crbug.com/449103
+WebInputEventAuraTest.TestMakeWebKeyboardEventWindowsKeyCode
+
+# Flaky: https://crbug.com/460578
+DesktopCaptureDeviceTest.InvertedFrame
+DesktopCaptureDeviceTest.UnpackedFrame
diff --git a/chromium/tools/valgrind/gtest_exclude/extensions_unittests.gtest-drmemory.txt b/chromium/tools/valgrind/gtest_exclude/extensions_unittests.gtest-drmemory.txt
new file mode 100644
index 00000000000..a4b1dcbb45a
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/extensions_unittests.gtest-drmemory.txt
@@ -0,0 +1,2 @@
+# https://crbug.com/571266
+URLRequestThrottlerSimulation.HelpsInAttack
diff --git a/chromium/tools/valgrind/gtest_exclude/extensions_unittests.gtest-memcheck.txt b/chromium/tools/valgrind/gtest_exclude/extensions_unittests.gtest-memcheck.txt
new file mode 100644
index 00000000000..de74c1f57c4
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/extensions_unittests.gtest-memcheck.txt
@@ -0,0 +1,4 @@
+# These contain un-analyzed test failures and leaks
+# http://crbug.com/402257
+ApiTestBaseTest.*
+SerialApiTest.*
diff --git a/chromium/tools/valgrind/gtest_exclude/gin_unittests.gtest-drmemory.txt b/chromium/tools/valgrind/gtest_exclude/gin_unittests.gtest-drmemory.txt
new file mode 100644
index 00000000000..9fcc7cc637e
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/gin_unittests.gtest-drmemory.txt
@@ -0,0 +1,2 @@
+# http://crbug.com/398549
+GinShellTest.HelloWorld
diff --git a/chromium/tools/valgrind/gtest_exclude/installer_util_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/installer_util_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..3c956b78c67
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/installer_util_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,6 @@
+# https://crbug.com/460584
+CopyTreeWorkItemTest.NewNameAndCopyTest
+
+# https://crbug.com/504069
+InstallUtilTest.IsPerUserInstall
+SystemLevelChrome/DefaultBrowserBeaconTest.All/0
diff --git a/chromium/tools/valgrind/gtest_exclude/interactive_ui_tests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/interactive_ui_tests.gtest.txt
new file mode 100644
index 00000000000..6ae761f0a91
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/interactive_ui_tests.gtest.txt
@@ -0,0 +1,34 @@
+# These test fail due to mmap Valgrind failures, see http://crbug.com/66677
+CollectedCookiesTest.DoubleDisplay
+CollectedCookiesTest.NavigateAway
+InfoBarsUITest.TestInfoBarsCloseOnNewTheme
+FastShutdown.SlowTermination
+MouseLeaveTest.TestOnMouseOut
+NotificationsPermissionTest.TestNoUserGestureInfobar
+NotificationsPermissionTest.TestUserGestureInfobar
+
+# These test fail due to timeout or limited buildslave support;
+# http://crbug.com/67301
+BrowserFocusTest.InterstitialFocus
+BrowserFocusTest.FindFocusTest
+BrowserFocusTest.FocusTraversalOnInterstitial
+
+# Don't run FLAKY or FAILS tests under Valgrind and TSan
+# as they tend to generate too many reports, see http://crbug.com/67959
+# NB: Can't use FAILS_/FLAKY_ as it will be turned into *.* by chrome_tests.py!
+*.FLAKY*
+*.FAILS*
+
+# Fails under Valgrind, see http://crbug.com/68068
+DevToolsSanityTest.TestPauseWhenScriptIsRunning
+
+# These tests time out under Valgrind, see http://crbug.com/163880
+BrowserFocusTest.FocusOnReload
+CommandsApiTest.Basic
+ExtensionApiTest.NotificationsHasPermissionManifest
+ExtensionCrashRecoveryTest.ReloadTabsWithBackgroundPage
+ExtensionCrashRecoveryTest.TwoExtensionsCrashBothAtOnce
+ExtensionCrashRecoveryTest.TwoExtensionsCrashFirst
+ExtensionCrashRecoveryTest.TwoExtensionsOneByOne
+FullscreenControllerInteractiveTest.TestTabExitsMouseLockOnNavigation
+OmniboxViewTest.Escape
diff --git a/chromium/tools/valgrind/gtest_exclude/ipc_tests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/ipc_tests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..ac62a9a6e95
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/ipc_tests.gtest-drmemory_win32.txt
@@ -0,0 +1,2 @@
+# TODO(timurrrr): investigate
+IPCSyncChannelTest.*
diff --git a/chromium/tools/valgrind/gtest_exclude/ipc_tests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/ipc_tests.gtest.txt
new file mode 100644
index 00000000000..30a1f89323b
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/ipc_tests.gtest.txt
@@ -0,0 +1,6 @@
+# Takes 27-40 seconds to run.
+IPCSyncChannelTest.ChattyServer
+# Hangs on Linux sometimes. See http://crbug.com/22141
+IPCChannelTest.ChannelTest
+# Crashes under Valgrind. See http://crbug.com/46782
+IPCSyncChannelTest.Multiple
diff --git a/chromium/tools/valgrind/gtest_exclude/libphonenumber_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/libphonenumber_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..4d2b324bbf8
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/libphonenumber_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,3 @@
+# fail in native run
+PhoneNumberMatcherTest.MatchesWithStrictGroupingLeniency
+PhoneNumberMatcherTest.MatchesWithExactGroupingLeniency
diff --git a/chromium/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory.txt b/chromium/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory.txt
new file mode 100644
index 00000000000..d73c5724936
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory.txt
@@ -0,0 +1,2 @@
+# http://crbug.com/470517
+VideoRendererImplTest.Underflow
diff --git a/chromium/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..8ee264d52f2
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/media_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,4 @@
+# Hangs under Dr. Memory
+# https://github.com/DynamoRIO/drmemory/issues/978
+WinAudioTest.SyncSocketBasic
+AudioBusTest.CopyTo
diff --git a/chromium/tools/valgrind/gtest_exclude/media_unittests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/media_unittests.gtest.txt
new file mode 100644
index 00000000000..26504745420
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/media_unittests.gtest.txt
@@ -0,0 +1,19 @@
+# This test tries to record fake audio in real-time.
+# This appears to be too sensitive to slowdown, see http://crbug.com/49497
+FakeAudioInputTest.BasicCallbacks
+
+# Flaky under all Valgrind-based tools, see http://crbug.com/298771
+PipelineIntegrationTest.MediaSource_Opus_Seeking_WebM
+
+# Flaky under valgrind, http://crbug.com/492882
+PipelineIntegrationTest.BasicPlaybackHashed
+PipelineIntegrationTest.BasicPlaybackLive
+
+# crbug.com/409485, cannot revert due to git migration,
+# exclude the test now
+AudioInputTest.Record
+MacAudioInputTest.AUAudioInputStreamVerifyStereoRecording
+AUHALStreamTest.CreateOpenStartStopClose
+
+# Too slow, takes over 10 minutes to run.
+PipelineIntegrationTest.HD_VP9_WebM
diff --git a/chromium/tools/valgrind/gtest_exclude/message_center_unittests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/message_center_unittests.gtest.txt
new file mode 100644
index 00000000000..a9863781166
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/message_center_unittests.gtest.txt
@@ -0,0 +1,4 @@
+# Fails http://crbug.com/256911
+MessageCenterImplTest.PopupTimersControllerResetTimer
+MessageCenterImplTest.PopupTimersControllerStartMultipleTimers
+MessageCenterImplTest.PopupTimersControllerStartMultipleTimersPause
diff --git a/chromium/tools/valgrind/gtest_exclude/mojo_system_unittests.gtest-drmemory.txt b/chromium/tools/valgrind/gtest_exclude/mojo_system_unittests.gtest-drmemory.txt
new file mode 100644
index 00000000000..5149d6cdd1e
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/mojo_system_unittests.gtest-drmemory.txt
@@ -0,0 +1,6 @@
+# bug_372452
+RemoteMessagePipeTest.HandlePassing
+# crbug.com/440828
+EmbedderTest.MultiprocessChannels
+# crbug.com/464019
+DataPipeImplTest*.TwoPhaseAllOrNone
diff --git a/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..c865baf0e4f
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,36 @@
+# Times out. See http://crbug.com/134313
+URLRequestTestHTTP.GetTest_ManyCookies
+
+# Dr. Memory hits an assertion:
+# https://github.com/DynamoRIO/drmemory/issues/422
+HttpAuthTest.*
+HttpAuthHandlerFactoryTest.*
+X509CertificateTest.*
+
+# Too many uninits and too slow. TODO(timurrrr): investigate uninits
+ProxyResolverV8Test.*
+
+# Slow
+CookieMonsterTest.GarbageCollectionTriggers
+
+# Flaky, see http://crbug.com/108422
+SSLClientSocketTest.*
+
+# DrM-i#1465
+URLRequestTest.ResolveShortcutTest
+
+# http://crbug.com/400521
+HttpServerTest.RequestWithTooLargeBody
+
+# http://crbug.com/504198
+DiskCacheEntryTest.ExternalAsyncIONoBuffer
+
+# http://crbug.com/554545
+MDnsTest.RefreshQuery
+
+# http://crbug.com/557993
+ProxyResolverV8TracingTest.*
+ProxyResolverV8TracingWrapperTest.*
+
+# https://crbug.com/598953
+*QuicConnectionTest.TooManyReceivedPackets*
diff --git a/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest-memcheck.txt b/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest-memcheck.txt
new file mode 100644
index 00000000000..c3a1b6a65b3
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest-memcheck.txt
@@ -0,0 +1,23 @@
+# These tests leak data intentionally, so are inappropriate for Valgrind tests.
+# Similar list in ../purify/net_unittests.exe.gtest.txt
+# TODO(dkegel): either merge the two files or keep them in sync,
+# see http://code.google.com/p/chromium/issues/detail?id=8951
+DiskCacheBackendTest.AppCacheInvalidEntry
+DiskCacheBackendTest.AppCacheInvalidEntryRead
+DiskCacheBackendTest.AppCacheInvalidEntryWithLoad
+DiskCacheBackendTest.InvalidEntry
+DiskCacheBackendTest.InvalidEntryRead
+DiskCacheBackendTest.InvalidEntryWithLoad
+DiskCacheBackendTest.TrimInvalidEntry
+DiskCacheBackendTest.TrimInvalidEntry2
+DiskCacheBackendTest.InvalidEntryEnumeration
+DiskCacheBackendTest.NewEvictionInvalidEntry
+DiskCacheBackendTest.NewEvictionInvalidEntryRead
+DiskCacheBackendTest.NewEvictionInvalidEntryWithLoad
+DiskCacheBackendTest.NewEvictionTrimInvalidEntry
+DiskCacheBackendTest.NewEvictionTrimInvalidEntry2
+DiskCacheBackendTest.NewEvictionInvalidEntryEnumeration
+DiskCacheBackendTest.ShutdownWithPendingCreate_Fast
+DiskCacheBackendTest.ShutdownWithPendingFileIO_Fast
+DiskCacheBackendTest.ShutdownWithPendingIO_Fast
+EndToEndTests/EndToEndTest.*
diff --git a/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest.txt
new file mode 100644
index 00000000000..df71748f53c
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest.txt
@@ -0,0 +1,7 @@
+# Very slow under Valgrind.
+KeygenHandlerTest.*SmokeTest
+KeygenHandlerTest.*ConcurrencyTest
+CTLogVerifierTest.VerifiesValidConsistencyProofsFromReferenceGenerator
+
+# Hangs, see http://crbug.com/61908
+DirectoryListerTest.BigDirRecursiveTest
diff --git a/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest_linux.txt b/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest_linux.txt
new file mode 100644
index 00000000000..691167d7484
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/net_unittests.gtest_linux.txt
@@ -0,0 +1,7 @@
+# Flaky. crbug.com/234776
+DiskCacheEntryTest.SimpleCacheStreamAccess
+DiskCacheEntryTest.SimpleCacheGrowData
+DiskCacheEntryTest.SimpleCacheSizeChanges
+
+# Flaky. crbug.com/548006
+Spdy_SpdyNetworkTransactionTest.StartTransactionOnReadCallback_0
diff --git a/chromium/tools/valgrind/gtest_exclude/printing_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/printing_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..58a6a8da51d
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/printing_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,3 @@
+# CreateDC returns NULL, see http://crbug.com/73652
+PrintingContextTest.Base
+PrintingContextTest.PrintAll
diff --git a/chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..276d6acb4d8
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest-drmemory_win32.txt
@@ -0,0 +1,18 @@
+# This test fails on an assertion, see http://crbug.com/57266
+EncoderVp8Test.TestEncoder
+DecoderVp8Test.EncodeAndDecode
+
+# These test intentionally generate exceptions to verify if a dump is generated
+# during the crash.
+BreakpadWinDeathTest.TestAccessViolation
+BreakpadWinDeathTest.TestInvalidParameter
+BreakpadWinDeathTest.TestDebugbreak
+
+# DrM-i#1465
+RdpClientTest.Basic
+
+# https://crbug.com/581519
+Webrtc/ConnectionTest.Video/0
+
+# https://crbug.com/599769
+RemoteSecurityKeyMessageReaderTest.MultipleMessages
diff --git a/chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest.txt
new file mode 100644
index 00000000000..4f136a9911f
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest.txt
@@ -0,0 +1,5 @@
+# http://crbug.com/241856
+VideoSchedulerTest.StartAndStop
+
+# https://crbug.com/496910
+BackoffTimer.Basic
diff --git a/chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest_win-8.txt b/chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest_win-8.txt
new file mode 100644
index 00000000000..eaf36f8f047
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/remoting_unittests.gtest_win-8.txt
@@ -0,0 +1,2 @@
+# Fails natively as well: http://crbug.com/251517
+RdpClientTest.Basic
diff --git a/chromium/tools/valgrind/gtest_exclude/sandbox_linux_unittests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/sandbox_linux_unittests.gtest.txt
new file mode 100644
index 00000000000..ffb28578098
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/sandbox_linux_unittests.gtest.txt
@@ -0,0 +1,5 @@
+# This test intentionally crashes with a NULL deref.
+UnitTests.SEGVDeathWithMessage
+# http://crbug.com/407357
+SandboxBPF.StartSingleThreadedAsMultiThreaded
+SandboxBPF.StartMultiThreadedAsSingleThreaded
diff --git a/chromium/tools/valgrind/gtest_exclude/suppressions.txt b/chromium/tools/valgrind/gtest_exclude/suppressions.txt
new file mode 100644
index 00000000000..e8cc21038a2
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/suppressions.txt
@@ -0,0 +1,39 @@
+{
+ Test DiskCacheBackendTest.InvalidEntryEnumeration leaks.
+ Memcheck:Leak
+ fun:_Znwj
+ fun:_ZN10disk_cache12StorageBlockINS_12RankingsNodeEE12AllocateDataEv
+ fun:_ZN10disk_cache12StorageBlockINS_12RankingsNodeEE4LoadEv
+ fun:_ZN10disk_cache9EntryImpl15LoadNodeAddressEv
+ fun:_ZN10disk_cache11BackendImpl8NewEntryENS_4AddrEPPNS_9EntryImplEPb
+ fun:_ZN10disk_cache11BackendImpl10MatchEntryERKSsjb
+ fun:_ZN10disk_cache11BackendImpl9OpenEntryERKSsPPNS_5EntryE
+ fun:_ZN49DiskCacheBackendTest_InvalidEntryEnumeration_Test8TestBodyEv
+ fun:_ZN7testing4Test3RunEv
+}
+{
+ Test DiskCacheBackendTest.InvalidEntryRead leaks.
+ Memcheck:Leak
+ fun:_Znwj
+ fun:_ZN10disk_cache11BackendImpl8NewEntryENS_4AddrEPPNS_9EntryImplEPb
+ fun:_ZN10disk_cache11BackendImpl10MatchEntryERKSsjb
+ fun:_ZN10disk_cache11BackendImpl9OpenEntryERKSsPPNS_5EntryE
+ fun:_ZN42DiskCacheBackendTest_InvalidEntryRead_Test8TestBodyEv
+ fun:_ZN7testing4Test3RunEv
+}
+{
+ Test DiskCacheBackendTest.InvalidEntryWithLoad leaks.
+ Memcheck:Leak
+ fun:_Znwj
+ fun:_ZN10disk_cache11BackendImpl11CreateEntryERKSsPPNS_5EntryE
+ fun:_ZN46DiskCacheBackendTest_InvalidEntryWithLoad_Test8TestBodyEv
+ fun:_ZN7testing4Test3RunEv
+}
+{
+ Test FlipNetworkTransactionTest.WriteError Bug 29004
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3net26FlipNetworkTransactionTest17TransactionHelperERKNS_15HttpRequestInfoEPNS_17DelayedSocketDataE
+ fun:_ZN3net42FlipNetworkTransactionTest_WriteError_Test8TestBodyEv
+}
diff --git a/chromium/tools/valgrind/gtest_exclude/sync_unit_tests.gtest-asan.txt b/chromium/tools/valgrind/gtest_exclude/sync_unit_tests.gtest-asan.txt
new file mode 100644
index 00000000000..fc2cc8ec845
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/sync_unit_tests.gtest-asan.txt
@@ -0,0 +1,2 @@
+# Flaky, see http://crbug.com/118370
+SyncSchedulerTest.TransientPollFailure
diff --git a/chromium/tools/valgrind/gtest_exclude/ui_base_unittests.gtest-memcheck.txt b/chromium/tools/valgrind/gtest_exclude/ui_base_unittests.gtest-memcheck.txt
new file mode 100644
index 00000000000..7c5a4bea34c
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/ui_base_unittests.gtest-memcheck.txt
@@ -0,0 +1,8 @@
+# http://crbug.com/222606
+RenderTextTest.DisplayRectShowsCursorLTR
+
+# http://crbug.com/370168
+TouchExplorationTest.*
+
+# http://crbug.com/414191
+MenuControllerTest.OpenClose
diff --git a/chromium/tools/valgrind/gtest_exclude/ui_unittests.gtest-memcheck_linux.txt b/chromium/tools/valgrind/gtest_exclude/ui_unittests.gtest-memcheck_linux.txt
new file mode 100644
index 00000000000..282de6b7762
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/ui_unittests.gtest-memcheck_linux.txt
@@ -0,0 +1,2 @@
+# http://crbug.com/431708
+TouchExplorationTest.TwoFingerTapAndHold
diff --git a/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win-xp.txt b/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win-xp.txt
new file mode 100644
index 00000000000..18a4d4508a3
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win-xp.txt
@@ -0,0 +1,7 @@
+# Crashing (!) since forever, needs analysis.
+BookmarkNodeDataTest.*
+
+# https://github.com/DynamoRIO/drmemory/issues/842
+# Fails assertion. App data corrupted by DrMemory?
+JsonSchemaTest.TestType
+JsonSchemaTest.TestNumber
diff --git a/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win32.txt b/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win32.txt
new file mode 100644
index 00000000000..06fd527ed91
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-drmemory_win32.txt
@@ -0,0 +1,86 @@
+##################################################
+# known Dr. Memory bugs:
+
+# https://github.com/DynamoRIO/drmemory/issues/318
+AudioRendererHostTest.*
+
+##################################################
+# un-analyzed Dr. Memory bugs:
+
+# https://github.com/DynamoRIO/drmemory/issues/979
+FirefoxProfileImporterTest.Firefox35Importer
+
+# https://github.com/DynamoRIO/drmemory/issues/980
+MetricsLogManagerTest.*
+
+# http://crbug.com/514921
+TaskManagerTest.*
+
+##################################################
+# Chromium bugs:
+
+# times out on the bot
+# http://crbug.com/87887
+VideoCaptureHostTest.*
+
+# crashes due to use-after-free's, http://crbug.com/90980
+FirefoxImporterTest.Firefox*NSS3Decryptor
+
+# fails http://crbug.com/92144
+ServiceProcessStateTest.ForceShutdown
+
+# fails sporadically: http://crbug.com/108205
+MultiProcessLockTest.RecursiveLock
+
+# fails to create thread
+# http://crbug.com/144087
+DesktopNotificationServiceTest.SettingsForSchemes
+TemplateURLFetcherTest.*
+
+# times out on the bot.
+# http://crbug.com/148644
+GAIAInfoUpdateServiceTest.*
+ProfileManagerTest.*
+ProfileInfoCacheTest.*
+
+# Failing on the bot. http://crbug.com/168882
+UserCloudPolicyStoreTest.LoadWithInvalidFile
+UserCloudPolicyStoreTest.LoadWithNoFile
+UserCloudPolicyStoreTest.Store
+UserCloudPolicyStoreTest.StoreThenClear
+UserCloudPolicyStoreTest.StoreThenLoad
+UserCloudPolicyStoreTest.StoreTwoTimes
+UserCloudPolicyStoreTest.StoreValidationError
+
+# Failing on the bot. crbug.com/266972
+OneClickSigninBubbleViewTest.ShowBubble
+
+# http://crbug.com/292960
+SyncBackendHostTest.DownloadControlTypes
+SyncBackendHostTest.SilentlyFailToDownloadControlTypes
+
+# DrM-i#1339: https://github.com/DynamoRIO/drmemory/issues/1339
+ExtensionServiceTest.InstallTheme
+
+# http://crbug.com/302156
+TabStripModelTest.FastShutdown
+
+# http://crbug.com/336349
+NTPUserDataLoggerTest.TestLogging
+
+# http://crbug.com/340837
+ChromeBlacklistTrialTest.*
+
+# http://crbug.com/349778
+TranslateManagerRenderViewHostTest.*
+
+# http://crbug.com/403073
+SafeBrowsingModuleVerifierWinTest.VerifyModuleExportModified
+SafeBrowsingModuleVerifierWinTest.VerifyModuleModified
+SafeBrowsingModuleVerifierWinTest.VerifyModuleUnmodified
+
+# http://crbug.com/467004
+SigninErrorNotifierTest.NoErrorAuthStatusProviders
+
+# https://crbug.com/577410
+PluginInfoMessageFilterTest.FindEnabledPlugin
diff --git a/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-memcheck.txt b/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-memcheck.txt
new file mode 100644
index 00000000000..ee8e10a3f24
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest-memcheck.txt
@@ -0,0 +1,37 @@
+# Timing issues. http://crbug.com/241051
+ExtensionAlarmsTest.*
+
+# SEGV_MAPERR. http://crbug.com/245797
+ClientSideDetectionHostTest.NavigationCancelsShouldClassifyUrl
+
+# Flaky. http://crbug.com/308309
+CloudPrintURLFetcherBasicTest.HandleRawData
+CloudPrintURLFetcherOverloadTest.Protect
+
+# Test fail: crbug.com/314216
+ExtensionIconManagerTest.LoadComponentExtensionResource
+
+# Test fail: crbug.com/476731
+PluginInfoMessageFilterTest.FindEnabledPlugin
+
+# Test fail: crbug.com/492705
+StatusUploaderTest.NoUploadAfterVideoCapture
+
+# Flaky test: crbug.com/493187
+DesktopMediaListAshTest.WindowOnly
+
+# Crashing test: https://crbug.com/515031
+NetErrorTabHelperTest.*
+
+# Test fail: https://crbug.com/515031
+ChromeBrowserMainExtraPartsMetricsTest.VerifyTouchEventsEnabledIsNotRecordedAfterPostBrowserStart
+
+# Test fail: https://crbug.com/349778
+# Valgrind bot purple: https://crbug.com/522692
+TranslateManagerRenderViewHostTest.FetchLanguagesFromTranslateServer*
+
+# Test fail: https://crbug.com/537064
+ExtensionMessageBubbleTest.*
+DesktopMediaListAshTest.*
+ExtensionTestMessageListenerUnittest.*
+SigninErrorNotifierTest.*
diff --git a/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest.txt b/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest.txt
new file mode 100644
index 00000000000..92fce224a6c
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest.txt
@@ -0,0 +1,23 @@
+# Hangs sometimes; see http://crbug.com/22146
+VisitedLinkEventsTest.Coalescense
+# Hangs (or takes forever?) reliably on bots; see http://crbug.com/23580
+RenderViewTest.ImeComposition
+# Hangs sometimes; see http://crbug.com/52844
+PredictorTest.MassiveConcurrentLookupTest
+
+# Timing out all over the place. Disabling for now. http://crbug.com/149715
+ExtensionWebRequestTest.*
+# Timing out all over the place. Disabling for now. http://crbug.com/149882
+NativeMessagingTest.*
+
+# Failing, see http://crbug.com/408443
+SyncFileSystemServiceTest.SimpleLocalSyncFlow
+
+# Failing on CrOS, see http://crbug.com/408013
+ProxyConfigServiceImplTest.*
+
+# Failing gMock expectations on both Valgrind and Dr. Memory. Possibly timeouts?
+# https://crbug.com/567866
+ExtensionServiceTestSupervised.UpdateWithPermissionIncreaseApprovalMatchingVersion
+ExtensionServiceTestSupervised.UpdateWithPermissionIncreaseApprovalNewVersion
+ExtensionServiceTestSupervised.UpdateWithPermissionIncreaseApprovalOldVersion
diff --git a/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest_linux.txt b/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest_linux.txt
new file mode 100644
index 00000000000..43d190b8188
--- /dev/null
+++ b/chromium/tools/valgrind/gtest_exclude/unit_tests.gtest_linux.txt
@@ -0,0 +1,29 @@
+# Fails under Valgrind; see http://crbug.com/44552
+RenderViewTest.OnHandleKeyboardEvent
+
+# http://crbug.com/139652
+BackgroundApplicationListModelTest.RandomTest
+
+# http://crbug.com/179427
+ExtensionPrefsDelayedInstallInfo.DelayedInstallInfo
+ExtensionServiceTest.*
+
+# http://crbug.com/180335
+AutocompleteActionPredictorTest.RecommendActionURL
+
+# http://crbug.com/238964
+CpuInfoProviderTest.*
+
+# http://crbug.com/336349
+NTPUserDataLoggerTest.TestLogging
+
+# http://crbug.com/403533
+ExtensionPathUtilTest.BasicPrettifyPathTest
+
+# http://crbug.com/483642
+MultiUserWindowManagerChromeOSTest.FullUserSwitchAnimationTests
+
+# http://crbug.com/523600
+ClientCertStoreChromeOSTest.Filter
+ClientCertStoreChromeOSTest.RequestsAfterNSSInitSucceed
+ClientCertStoreChromeOSTest.CertRequestMatching
diff --git a/chromium/tools/valgrind/locate_valgrind.sh b/chromium/tools/valgrind/locate_valgrind.sh
new file mode 100755
index 00000000000..11d4d83d9c7
--- /dev/null
+++ b/chromium/tools/valgrind/locate_valgrind.sh
@@ -0,0 +1,69 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Prints a path to Valgrind binaries to be used for Chromium.
+# Select the valgrind from third_party/valgrind by default,
+# but allow users to override this default without editing scripts and
+# without specifying a commandline option
+
+export THISDIR=`dirname $0`
+
+# User may use his own valgrind by giving its path with CHROME_VALGRIND env.
+if [ "$CHROME_VALGRIND" = "" ]
+then
+ # Guess which binaries we should use by uname
+ case "$(uname -a)" in
+ *Linux*x86_64*)
+ PLATFORM="linux_x64"
+ ;;
+ *Linux*86*)
+ PLATFORM="linux_x86"
+ ;;
+ *Darwin*9.[678].[01]*i386*)
+ # Didn't test other kernels.
+ PLATFORM="mac"
+ ;;
+ *Darwin*10.[0-9].[0-9]*i386*)
+ PLATFORM="mac_10.6"
+ ;;
+ *Darwin*10.[0-9].[0-9]*x86_64*)
+ PLATFORM="mac_10.6"
+ ;;
+ *Darwin*11.[0-9].[0-9]*x86_64*)
+ PLATFORM="mac_10.7"
+ ;;
+ *)
+ (echo "Sorry, your platform is not supported:" &&
+ uname -a
+ echo
+ echo "If you're on Mac OS X, please see http://crbug.com/441425") >&2
+ exit 42
+ esac
+
+ # The binaries should be in third_party/valgrind
+ # (checked out from deps/third_party/valgrind/binaries).
+ CHROME_VALGRIND="$THISDIR/../../third_party/valgrind/$PLATFORM"
+
+ # TODO(timurrrr): readlink -f is not present on Mac...
+ if [ "$PLATFORM" != "mac" ] && \
+ [ "$PLATFORM" != "mac_10.6" ] && \
+ [ "$PLATFORM" != "mac_10.7" ]
+ then
+ # Get rid of all "../" dirs
+ CHROME_VALGRIND=$(readlink -f $CHROME_VALGRIND)
+ fi
+fi
+
+if ! test -x $CHROME_VALGRIND/bin/valgrind
+then
+ echo "Oops, could not find Valgrind binaries in your checkout." >&2
+ echo "Please see" >&2
+ echo " http://dev.chromium.org/developers/how-tos/using-valgrind/get-valgrind" >&2
+ echo "for the instructions on how to download pre-built binaries." >&2
+ exit 1
+fi
+
+echo $CHROME_VALGRIND
diff --git a/chromium/tools/valgrind/memcheck/OWNERS b/chromium/tools/valgrind/memcheck/OWNERS
new file mode 100644
index 00000000000..72e8ffc0db8
--- /dev/null
+++ b/chromium/tools/valgrind/memcheck/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/chromium/tools/valgrind/memcheck/PRESUBMIT.py b/chromium/tools/valgrind/memcheck/PRESUBMIT.py
new file mode 100644
index 00000000000..99be83497fd
--- /dev/null
+++ b/chromium/tools/valgrind/memcheck/PRESUBMIT.py
@@ -0,0 +1,78 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into depot_tools.
+"""
+
+import re
+
+def CheckChange(input_api, output_api):
+ """Checks the memcheck suppressions files for bad data."""
+ sup_regex = re.compile('suppressions.*\.txt$')
+ suppressions = {}
+ errors = []
+ check_for_memcheck = False
+ # skip_next_line has 3 possible values:
+ # - False: don't skip the next line.
+ # - 'skip_suppression_name': the next line is a suppression name, skip.
+ # - 'skip_param': the next line is a system call parameter error, skip.
+ skip_next_line = False
+ for f in filter(lambda x: sup_regex.search(x.LocalPath()),
+ input_api.AffectedFiles()):
+ for line, line_num in zip(f.NewContents(),
+ xrange(1, len(f.NewContents()) + 1)):
+ line = line.lstrip()
+ if line.startswith('#') or not line:
+ continue
+
+ if skip_next_line:
+ if skip_next_line == 'skip_suppression_name':
+ if 'insert_a_suppression_name_here' in line:
+ errors.append('"insert_a_suppression_name_here" is not a valid '
+ 'suppression name')
+ if suppressions.has_key(line):
+ if f.LocalPath() == suppressions[line][1]:
+ errors.append('suppression with name "%s" at %s line %s '
+ 'has already been defined at line %s' %
+ (line, f.LocalPath(), line_num,
+ suppressions[line][1]))
+ else:
+ errors.append('suppression with name "%s" at %s line %s '
+ 'has already been defined at %s line %s' %
+ (line, f.LocalPath(), line_num,
+ suppressions[line][0], suppressions[line][1]))
+ else:
+ suppressions[line] = (f, line_num)
+ check_for_memcheck = True;
+ skip_next_line = False
+ continue
+ if check_for_memcheck:
+ if not line.startswith('Memcheck:'):
+ errors.append('"%s" should be "Memcheck:..." in %s line %s' %
+ (line, f.LocalPath(), line_num))
+ check_for_memcheck = False;
+ if line == '{':
+ skip_next_line = 'skip_suppression_name'
+ continue
+ if line == "Memcheck:Param":
+ skip_next_line = 'skip_param'
+ continue
+
+ if (line.startswith('fun:') or line.startswith('obj:') or
+ line.startswith('Memcheck:') or line == '}' or
+ line == '...'):
+ continue
+ errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(),
+ line_num))
+ if errors:
+ return [output_api.PresubmitError('\n'.join(errors))]
+ return []
+
+def CheckChangeOnUpload(input_api, output_api):
+ return CheckChange(input_api, output_api)
+
+def CheckChangeOnCommit(input_api, output_api):
+ return CheckChange(input_api, output_api)
diff --git a/chromium/tools/valgrind/memcheck/suppressions.txt b/chromium/tools/valgrind/memcheck/suppressions.txt
new file mode 100644
index 00000000000..9d5b4ad330d
--- /dev/null
+++ b/chromium/tools/valgrind/memcheck/suppressions.txt
@@ -0,0 +1,3158 @@
+# There are four kinds of suppressions in this file.
+# 1. third party stuff we have no control over
+#
+# 2. intentional unit test errors, or stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing
+#
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+# These should all be in chromium's bug tracking system (but a few aren't yet).
+# Periodically we should sweep this file and the bug tracker clean by
+# running overnight and removing outdated bugs/suppressions.
+#-----------------------------------------------------------------------
+
+# 1. third party stuff we have no control over
+{
+ Uninitialized value in deflate (Third Party)
+ Memcheck:Uninitialized
+ ...
+ fun:MOZ_Z_deflate
+}
+{
+ #gtk developers don't like cleaning up one-time leaks. See http://mail.gnome.org/archives/gtk-devel-list/2004-April/msg00230.html
+ gtk_init_check leak (Third Party)
+ Memcheck:Leak
+ ...
+ fun:gtk_init_check
+}
+{
+ Fontconfig leak?
+ Memcheck:Leak
+ ...
+ fun:XML_ParseBuffer
+ fun:FcConfigParseAndLoad
+}
+{
+ bug_9245_FcConfigAppFontAddFile_leak
+ Memcheck:Leak
+ ...
+ fun:FcConfigAppFontAddFile
+}
+{
+ pango_font_leak_todo_3
+ Memcheck:Leak
+ ...
+ fun:FcFontRenderPrepare
+ ...
+ fun:pango_itemize_with_base_dir
+}
+{
+ pango_font_leak_todo_4
+ Memcheck:Leak
+ ...
+ fun:FcFontRenderPrepare
+ ...
+ fun:pango_ot_buffer_output
+}
+{
+ pango_font_leak_todo_5
+ Memcheck:Leak
+ ...
+ fun:FcFontRenderPrepare
+ ...
+ fun:pango_context_get_metrics
+}
+{
+ pango_font_leak_todo_6
+ Memcheck:Leak
+ ...
+ fun:FcDefaultSubstitute
+ ...
+ fun:pango_itemize_with_base_dir
+}
+{
+ # Similar to fontconfig_bug_8428 below. Reported in
+ # https://bugs.freedesktop.org/show_bug.cgi?id=8215
+ fontconfig_bug_8215
+ Memcheck:Leak
+ fun:malloc
+ fun:FcPatternObjectInsertElt
+ fun:FcPatternObjectAddWithBinding
+}
+{
+ # Fontconfig leak, seen in shard 16 of 20 of ui_tests
+ # See https://bugs.freedesktop.org/show_bug.cgi?id=8428
+ # and http://www.gnome.org/~johan/gtk.suppression
+ fontconfig_bug_8428
+ Memcheck:Leak
+ ...
+ fun:realloc
+ fun:FcPatternObjectInsertElt
+ fun:FcPatternObjectAddWithBinding
+}
+{
+ bug_18590 (Third Party)
+ Memcheck:Leak
+ ...
+ fun:malloc
+ fun:FcConfigValues
+ fun:FcConfigValues
+ ...
+ fun:FcConfigValues
+ fun:FcConfigValues
+}
+{
+ # dlopen leak on error. Chromium issues 268368,273385. See http://sourceware.org/bugzilla/show_bug.cgi?id=12878.
+ bug_268368_273385a
+ Memcheck:Leak
+ fun:calloc
+ fun:_dlerror_run
+ fun:dlopen@@GLIBC_2.2.5
+}
+{
+ bug_268368_273385b
+ Memcheck:Leak
+ fun:calloc
+ fun:_dlerror_run
+ fun:dlsym
+}
+{
+ bug_58730_libc.so_value8 (Third Party)
+ Memcheck:Uninitialized
+ obj:/lib/libc-2.11.1.so
+}
+# net::SniffXML() clearly tries to read < 8 bytes, but strncasecmp() reads 8.
+{
+ bug_58730_strncasecmp_uninit (Third Party)
+ Memcheck:Uninitialized
+ ...
+ fun:strncasecmp
+ fun:_ZN4base11strncasecmpEPKcS1_m
+ fun:_ZN3netL8SniffXMLEPKcmPbPSs
+}
+{
+ bug_76386a (Third Party)
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNSs4_Rep9_S_createE*RKSaIcE
+ ...
+ fun:_ZNSsC1*KS*
+}
+{
+ bug_76386b (Third Party)
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNSs4_Rep9_S_createE*RKSaIcE
+ fun:_ZNSs4_Rep8_M_cloneERKSaIcE*
+}
+{
+ getpwuid_and_getgrouplist
+ Memcheck:Leak
+ fun:malloc
+ fun:nss_parse_service_list
+ fun:__nss_database_lookup
+ obj:*
+ ...
+ fun:get*
+}
+
+# XRandRInfo object seems to be leaking inside XRRFindDisplay. This happens the
+# first time it is called, no matter who the caller is. We have observed this
+# problem with both XRRSelectInput and XRRQueryExtension.
+{
+ bug_119677
+ Memcheck:Leak
+ fun:malloc
+ fun:XRRFindDisplay
+}
+{
+ Ubuntu_Precise_Fontconfig_Optimized_Code
+ Memcheck:Unaddressable
+ fun:FcConfigFileExists
+}
+{
+ Ubuntu_Precise_Itoa_Optimized_Code
+ Memcheck:Uninitialized
+ fun:_itoa_word
+ fun:vfprintf
+ fun:__vsnprintf_chk
+ fun:__snprintf_chk
+}
+{
+ Ubuntu_Precise_Wcscmp_Optimized_Code_In_Tests
+ Memcheck:Uninitialized
+ fun:wcscmp
+ fun:_ZN7testing8internal6String17WideCStringEqualsEPKwS3_
+}
+{
+ mesa_glsl_compile_shader
+ Memcheck:Leak
+ ...
+ fun:_mesa_glsl_compile_shader
+ fun:compile_shader
+ fun:_mesa_CompileShaderARB
+ fun:shared_dispatch_stub_529
+}
+{
+ bug_515618
+ Memcheck:Unaddressable
+ fun:do_lookup_x
+ obj:*
+ fun:_dl_lookup_symbol_x
+}
+
+#-----------------------------------------------------------------------
+# 2. intentional unit test errors, or stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing
+
+# See tools/valgrind/memcheck_analyze.py before modifying sanity tests.
+{
+ Memcheck sanity test 01 (memory leak).
+ Memcheck:Leak
+ fun:_Zna*
+ fun:_ZN4base31ToolsSanityTest_MemoryLeak_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 02 (malloc/read left).
+ Memcheck:Unaddressable
+ fun:*ReadValueOutOfArrayBoundsLeft*
+ ...
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 03 (malloc/read right).
+ Memcheck:Unaddressable
+ fun:*ReadValueOutOfArrayBoundsRight*
+ ...
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 04 (malloc/write left).
+ Memcheck:Unaddressable
+ fun:*WriteValueOutOfArrayBoundsLeft*
+ ...
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 05 (malloc/write right).
+ Memcheck:Unaddressable
+ fun:*WriteValueOutOfArrayBoundsRight*
+ ...
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 06 (new/read left).
+ Memcheck:Unaddressable
+ fun:*ReadValueOutOfArrayBoundsLeft*
+ ...
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 07 (new/read right).
+ Memcheck:Unaddressable
+ fun:*ReadValueOutOfArrayBoundsRight*
+ ...
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 08 (new/write left).
+ Memcheck:Unaddressable
+ fun:*WriteValueOutOfArrayBoundsLeft*
+ ...
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 09 (new/write right).
+ Memcheck:Unaddressable
+ fun:*WriteValueOutOfArrayBoundsRight*
+ ...
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 10 (write after free).
+ Memcheck:Unaddressable
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 11 (write after delete).
+ Memcheck:Unaddressable
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 12 (array deleted without []).
+ Memcheck:Free
+ ...
+ fun:_ZN4base46ToolsSanityTest_ArrayDeletedWithoutBraces_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 13 (single element deleted with []).
+ Memcheck:Free
+ ...
+ fun:_ZN4base51ToolsSanityTest_SingleElementDeletedWithBraces_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 14 (malloc/read uninit).
+ Memcheck:Uninitialized
+ fun:*ReadUninitializedValue*
+ ...
+ fun:_ZN4base43ToolsSanityTest_AccessesToMallocMemory_Test8TestBodyEv
+}
+{
+ Memcheck sanity test 15 (new/read uninit).
+ Memcheck:Uninitialized
+ fun:*ReadUninitializedValue*
+ ...
+ fun:_ZN4base40ToolsSanityTest_AccessesToNewMemory_Test8TestBodyEv
+}
+{
+ bug_86301 This test explicitly verifies PostTaskAndReply leaks the task if the originating MessageLoop has been deleted.
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvELNS0_8CopyMode*
+ fun:_ZN4base10TaskRunner16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvELNS_8internal8CopyMode*
+ fun:_ZN4base74MessageLoopTaskRunnerTest_PostTaskAndReply_DeadReplyLoopDoesNotDelete_Test8TestBodyEv
+}
+{
+ # Non-joinable thread doesn't clean up all state on program exit
+ # very common in ui tests
+ bug_16096 (WontFix)
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNSs4_Rep9_S_createE*RKSaIcE
+ fun:_ZNSs4_Rep8_M_cloneERKSaIcE*
+ fun:_ZNSs7reserveE*
+ fun:_ZNSs6appendEPKc*
+ fun:*StringAppendV*
+ ...
+ fun:_ZN4base12StringPrintfEPKcz
+}
+{
+ # According to dglazkov, these are one-time leaks and intentional.
+ # They may go away if the change to move these off the heap lands.
+ bug_17996 (Intentional)
+ Memcheck:Leak
+ ...
+ fun:_ZN5blink8SVGNames4initEv
+}
+{
+ # This is an on demand initialization which is done and then intentionally
+ # kept around (not freed) while the process is running.
+ intentional_blink_XMLNames_init_leak
+ Memcheck:Leak
+ ...
+ fun:_ZN5blink8XMLNames4initEv
+}
+{
+ # Intentionally leaking NSS to prevent shutdown crashes
+ bug_61585a (Intentional)
+ Memcheck:Leak
+ fun:calloc
+ ...
+ fun:error_get_my_stack
+}
+{
+ FileStream::Context can leak through WorkerPool by design
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3net10FileStreamC1EPNS_6NetLogE
+}
+{
+ # Histograms are used on un-joined threads, and can't be deleted atexit.
+ Histograms via FactoryGet including Linear Custom Boolean and Basic
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN4base*Histogram10FactoryGet*
+}
+{
+ Intentional leak for SampleMap (stores SparseHistogram counts).
+ Memcheck:Leak
+ ...
+ fun:_ZN4base9SampleMap10AccumulateEii
+ ...
+ fun:_ZN4base15SparseHistogram*
+}
+{
+ Intentional leak for PersistentSampleMap (stores SparseHistogram counts).
+ Memcheck:Leak
+ ...
+ fun:_ZN4base19PersistentSampleMap10AccumulateEii
+ ...
+ fun:_ZN4base15SparseHistogram*
+}
+{
+ Intentional leak for BucketRanges.
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base12_GLOBAL__N_120CreateRangesFromDataEPijm
+ fun:_ZN4base28PersistentHistogramAllocator15CreateHistogramEPNS0_23PersistentHistogramDataE
+}
+{
+ bug_73299 (Intentional)
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content17WorkerProcessHost20CreateMessageFiltersEi
+ fun:_ZN7content17WorkerProcessHost4InitE*
+ fun:_ZN7content17WorkerServiceImpl24CreateWorkerFromInstanceENS_17WorkerProcessHost14WorkerInstanceE
+ fun:_ZN7content17WorkerServiceImpl12CreateWorkerE*
+ fun:_ZN7content19WorkerMessageFilter14OnCreateWorkerERK31ViewHostMsg_CreateWorker_ParamsPi
+}
+{
+ bug_83345 (Needs_Annotation)
+ Memcheck:Leak
+ ...
+ fun:_ZN4base*23LeakyLazyInstanceTraits*NewEPv
+ fun:_ZN4base12LazyInstance*LeakyLazyInstanceTraits*PointerEv
+}
+{
+ bug_87500_a (Intentional)
+ Memcheck:Leak
+ ...
+ fun:_ZN10disk_cache9BackendIO23ExecuteBackendOperationEv
+ fun:_ZN10disk_cache9BackendIO16ExecuteOperationEv
+}
+{
+ bug_79322 (Intentional)
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN4base*StatisticsRecorderTest_*_Test8TestBodyEv
+}
+{
+ # According to dglazkov, UA style sheets are intentionally leaked.
+ # As such, treat any leaks originating from parseUASheet as intentional.
+ bug_121729 (Intentional)
+ Memcheck:Leak
+ ...
+ fun:_ZN5blinkL12parseUASheetEPKcj
+}
+{
+ bug_121729_b (Intentional)
+ Memcheck:Leak
+ ...
+ fun:_ZN5blinkL12parseUASheetERKN3WTF6StringE
+}
+{
+ intentional_see_bug_156466
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3ash5ShellC1EPNS_13ShellDelegateE
+ fun:_ZN3ash5Shell14CreateInstanceEPNS_13ShellDelegateE
+}
+
+# According to http://crbug.com/242672, this is a false positive caused by
+# BitVector storing the pointer shifted right by 1. Leak checkers aren't smart
+# enough to comprehend that.
+{
+ bug_242672
+ Memcheck:Leak
+ fun:malloc
+ ...
+ fun:_ZN3WTF9BitVector13OutOfLineBits6createEm
+ fun:_ZN3WTF9BitVector15resizeOutOfLineEm
+ fun:_ZN3WTF9BitVector10ensureSizeEm
+ fun:_ZN5blink10UseCounterC1Ev
+ fun:_ZN5blink4PageC1ERNS0_11PageClientsE
+}
+
+# http://crbug.com/269278 causes really widespread, flaky leaks in
+# value objects that own some memory. These suppressions will cover
+# all such objects, even though it's possible to get real leaks that
+# look the same way (e.g. by allocating such an object in an arena).
+{
+ bug_269278a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base4Bind*Callback*BindState*
+}
+{
+ bug_269278b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocator*allocate*
+ fun:_ZNSt12_Vector_base*_M_allocate*
+}
+
+# Externally allocated objects referenced by V8 objects can currently
+# be diagnosed as (false) leaks, since memcheck does not know how to
+# handle V8 leaks. More detailed discussion in http://crbug.com/328552
+{
+ bug_328552
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10StringImpl19createUninitializedEjRPh
+}
+{
+ bug_364821 (WontFix)
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF10RefCountedIN5blink11ScriptStateEEnwEm
+ fun:_ZN5blink11ScriptState6createEN2v85LocalINS1_7ContextEEEN3WTF10PassRefPtrINS_15DOMWrapperWorldEEE
+ ...
+ fun:_ZN3WTF15FunctionWrapperIMN5blink12WorkerThread*
+}
+{
+ bug_383956
+ Memcheck:Leak
+ fun:calloc
+ fun:_ZN18hb_object_header_t6createEj
+ fun:_Z*hb_object_createI9hb_face_tEPT_v
+ fun:hb_face_create_for_tables
+ fun:_ZN3gfx12_GLOBAL__N_118CreateHarfBuzzFaceEP10SkTypeface
+ fun:_ZN3gfx12_GLOBAL__N_118CreateHarfBuzzFontEP10SkTypefacei
+ fun:_ZN3gfx18RenderTextHarfBuzz8ShapeRunEPNS_8internal15TextRunHarfBuzzE
+ fun:_ZN3gfx18RenderTextHarfBuzz12EnsureLayoutEv
+}
+
+{
+ bug_391510
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base21CancelableTaskTracker16NewTrackedTaskId*
+ fun:_ZN14HistoryService14ScheduleDBTask*
+ fun:_ZN7history19URLIndexPrivateData26ScheduleUpdateRecentVisits*
+}
+{
+ bug_399852_a
+ Memcheck:Uninitialized
+ fun:_ZN5blink14DateComponents9parseTimeERKN3WTF6StringEjRj
+ fun:_ZNK5blink13TimeInputType29parseToDateComponentsInternalERKN3WTF6StringEPNS_14DateComponentsE
+ fun:_ZNK5blink24BaseDateAndTimeInputType21parseToDateComponentsERKN3WTF6StringEPNS_14DateComponentsE
+}
+{
+ bug_399852_b
+ Memcheck:Uninitialized
+ fun:_ZN5blink12_GLOBAL__N_117parseJSONInternalIhEEN3WTF10PassRefPtrINS_9JSONValueEEEPKT_j
+ fun:_ZN5blink9parseJSONERKN3WTF6StringE
+ fun:_ZN5blink*InspectorBackendDispatcher*
+ ...
+ fun:_ZN5blink*WebDevToolsAgent*
+ fun:_ZN7content*DevToolsAgent*
+}
+{
+ bug_399852_c
+ Memcheck:Uninitialized
+ fun:_ZN5blinkL21extractRangeComponentEPN3WTF6StringERKNS0_6RefPtrINS_10JSONObjectEEERKS1_Rj
+ fun:_ZN5blinkL22jsonRangeToSourceRangeEPN3WTF6StringEPNS_23InspectorStyleSheetBase*
+ fun:_ZN5blink17InspectorCSSAgent*
+ fun:_ZThn40_N5blink17InspectorCSSAgent*
+ fun:_ZN5blink30InspectorBackendDispatcherImpl*
+ fun:_ZN5blink30InspectorBackendDispatcherImpl8dispatchERKN3WTF6StringE
+ fun:_ZN5blink19InspectorController27dispatchMessageFromFrontendERKN3WTF6StringE
+ fun:_ZN5blink20WebDevToolsAgentImpl26dispatchOnInspectorBackendERKNS_9WebStringE
+ fun:_ZN7content13DevToolsAgent28OnDispatchOnInspectorBackendERKSs
+}
+{
+ bug_399852_d
+ Memcheck:Uninitialized
+ fun:_ZN5blink14DateComponents10parseMonthERKN3WTF6StringEjRj
+ ...
+ fun:_ZNK5blink24BaseDateAndTimeInputType21parseToDateComponentsERKN3WTF6StringEPNS_14DateComponentsE
+ fun:_ZNK5blink24BaseDateAndTimeInputType15typeMismatchForERKN3WTF6StringE
+ fun:_ZNK5blink24BaseDateAndTimeInputType13sanitizeValueERKN3WTF6StringE
+}
+{
+ bug_418234
+ Memcheck:Uninitialized
+ fun:_ZN10extensions19ExtensionManagement7RefreshEv
+ fun:_ZN10extensions19ExtensionManagement22OnExtensionPrefChangedEv
+}
+{
+ bug_464462
+ Memcheck:Uninitialized
+ fun:_ZN7content14ManifestParser16ParseIconDensityERKN4base15DictionaryValueE
+ fun:_ZN7content14ManifestParser10ParseIconsERKN4base15DictionaryValueE
+ fun:_ZN7content14ManifestParser5ParseEv
+ fun:_ZN7content18ManifestParserTest21ParseManifestWithURLsERKN4base16BasicStringPieceISsEERK4GURLS8_
+ fun:_ZN7content18ManifestParserTest13ParseManifestERKN4base16BasicStringPieceISsEE
+ fun:_ZN7content45ManifestParserTest_IconDensityParseRules_Test8TestBodyEv
+}
+{
+ bug_415092
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base22PosixDynamicThreadPool7AddTaskEPNS_11PendingTaskE
+ fun:_ZN4base22PosixDynamicThreadPool8PostTaskERKN15tracked_objects8LocationERKNS_8CallbackIF*
+ fun:_ZN4base12_GLOBAL__N_114WorkerPoolImpl8PostTaskERKN15tracked_objects8LocationERKNS_8CallbackIF*
+ fun:_ZN4base10WorkerPool8PostTaskERKN15tracked_objects8LocationERKNS_8CallbackIF*
+}
+
+
+#-----------------------------------------------------------------------
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+# These should all be in chromium's bug tracking system (but a few aren't yet).
+
+{
+ # webkit leak? See http://crbug.com/9503
+ bug_9503
+ Memcheck:Leak
+ ...
+ fun:_ZN19TestWebViewDelegate24UpdateSelectionClipboardEb
+}
+{
+ # very common in ui tests
+ bug_16091
+ Memcheck:Leak
+ ...
+ fun:_ZN4base11MessageLoop22AddDestructionObserverEPNS0_19DestructionObserverE
+ ...
+ fun:_ZN3IPC11SyncChannel11SyncContext15OnChannelOpenedEv
+}
+{
+ # very common in ui tests
+ bug_16092
+ Memcheck:Leak
+ fun:*
+ fun:_ZN4base11MessageLoopC1ENS0_4TypeE
+ fun:_ZN4base6Thread10ThreadMainEv
+}
+{
+ # very common in ui tests
+ bug_16092b
+ Memcheck:Leak
+ ...
+ fun:_ZNSt11_Deque_baseIN4base11PendingTaskESaIS1_EE17_M_initialize_mapE*
+ ...
+ fun:_ZN4base11MessageLoopC1ENS0_4TypeE
+}
+{
+ # very common in ui tests
+ bug_16092c
+ Memcheck:Leak
+ ...
+ fun:_ZNSt14priority_queueIN11MessageLoop11PendingTaskESt6vectorIS1_SaIS1_EESt4lessIS1_EEC1ERKS6_RKS4_
+ fun:_ZN4base11MessageLoopC1ENS0_4TypeE
+ fun:_ZN4base6Thread10ThreadMainEv
+}
+{
+ # also bug 17979. It's a nest of leaks.
+ bug_17385
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3IPC12ChannelProxy7Context13CreateChannel*Channel4ModeE
+ fun:_ZN3IPC12ChannelProxy4Init*
+ ...
+ fun:_ZN3IPC11SyncChannel*Channel4Mode*Listener*
+}
+{
+ bug_17540_16661
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base19MessagePumpLibevent19WatchFileDescriptor*FileDescriptorWatcherEPNS0_7WatcherE
+ fun:_ZN4base16MessageLoopForIO19WatchFileDescriptor*MessagePumpLibevent21FileDescriptorWatcherEPNS2_7WatcherE
+ ...
+ fun:_ZN3IPC*Channel*ConnectEv
+ fun:_ZN3IPC12ChannelProxy7Context15OnChannelOpenedEv
+}
+{
+ bug_16661
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base19MessagePumpLibevent3RunEPNS_11MessagePump8DelegateE
+ fun:_ZN4base11MessageLoop10RunHandlerEv
+}
+{
+ # slight variant of the above
+ bug_19371a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN4base13WaitableEvent7EnqueueEPNS0_6WaiterE
+ fun:_ZN4base13WaitableEvent9TimedWaitERKNS_9TimeDeltaE
+ fun:_ZN4base18MessagePumpDefault3RunEPNS_11MessagePump8DelegateE
+}
+{
+ bug_19775_a
+ Memcheck:Leak
+ ...
+ fun:malloc
+ fun:sqlite3MemMalloc
+ fun:mallocWithAlarm
+ fun:sqlite3Malloc
+ ...
+ fun:sqlite3VdbeExec
+ fun:sqlite3Step
+ fun:sqlite3_step
+ fun:sqlite3_exec
+ fun:_ZN3sql10Connection7Execute*
+ ...
+ fun:_ZN7history*Database*Create*
+}
+{
+ bug_19775_c
+ Memcheck:Leak
+ ...
+ fun:openDatabase
+ fun:sqlite3_open
+ fun:_ZN3sql10Connection12OpenInternalERKSs
+}
+{
+ bug_19775_g
+ Memcheck:Leak
+ fun:malloc
+ fun:sqlite3MemMalloc
+ fun:mallocWithAlarm
+ fun:sqlite3Malloc
+ fun:sqlite3ParserAlloc
+ fun:sqlite3RunParser
+ fun:sqlite3Prepare
+ fun:sqlite3LockAndPrepare
+ fun:sqlite3_prepare*
+}
+{
+ bug_19775_h
+ Memcheck:Leak
+ ...
+ fun:malloc
+ fun:sqlite3MemMalloc
+ fun:mallocWithAlarm
+ fun:sqlite3Malloc
+ ...
+ fun:yy_reduce
+}
+# The following three suppressions are related to the workers code.
+{
+ bug_27837
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN19WebSharedWorkerStub9OnConnectEii
+}
+{
+ bug_32085
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorIN7content21NotificationRegistrar6RecordEE8allocate*
+ fun:_ZNSt12_Vector_baseIN7content21NotificationRegistrar6RecordESaIS*
+ fun:_ZNSt6vectorIN7content21NotificationRegistrar6RecordESaIS2_EE13_M_insert_auxEN9__gnu_cxx17__normal_iteratorIPS2_S*
+ fun:_ZNSt6vectorIN7content21NotificationRegistrar6RecordESaIS*
+ fun:_ZN7content21NotificationRegistrar3Add*
+}
+{
+ bug_32273_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3IPC12ChannelProxy4SendEPNS_7MessageE
+ fun:_ZN3IPC11SyncChannel15SendWithTimeoutEPNS_7MessageEi
+ fun:_ZN3IPC11SyncChannel4SendEPNS_7MessageE
+ fun:_ZN11ChildThread4SendEPN3IPC7MessageE
+ fun:_ZN12RenderThread4SendEPN3IPC7MessageE
+ fun:_ZN12RenderWidget4SendEPN3IPC7MessageE
+ fun:_ZN12RenderWidget16DoDeferredUpdateEv
+ fun:_ZN12RenderWidget20CallDoDeferredUpdateEv
+}
+{
+ bug_32273_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN24BrowserRenderProcessHost4SendEPN3IPC7MessageE
+ fun:_ZN16RenderWidgetHost4SendEPN3IPC7MessageE
+}
+{
+ bug_32624_b
+ Memcheck:Leak
+ fun:malloc
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ obj:*
+ fun:secmod_ModuleInit
+}
+{
+ bug_32624_c
+ Memcheck:Leak
+ ...
+ fun:malloc
+ ...
+ fun:PORT_Alloc_Util
+ ...
+ fun:PK11_InitPin
+}
+{
+ bug_32624_f
+ Memcheck:Leak
+ ...
+ fun:CERT_PKIXVerifyCert
+ fun:_ZN3net12_GLOBAL__N_114PKIXVerifyCertE*
+}
+{
+ bug_32624_g
+ Memcheck:Leak
+ ...
+ fun:CERT_VerifySignedData
+ fun:cert_VerifyCertChain
+ fun:CERT_VerifyCertChain
+ fun:CERT_VerifyCert
+}
+{
+ bug_64887_a
+ Memcheck:Uninitialized
+ ...
+ fun:*vfprintf
+ ...
+ fun:_ZN7testing*PrintByteSegmentInObjectTo*
+ ...
+ fun:_ZN7testing*PrintBytesInObjectTo*
+ fun:_ZN7testing9internal220PrintBytesInObjectToEPKh*
+ fun:_ZN7testing9internal220TypeWithoutFormatter*
+}
+{
+ bug_64887_b
+ Memcheck:Uninitialized
+ ...
+ fun:_ZNSolsEx
+ fun:_ZN7testing9internal220TypeWithoutFormatterIN5media7PreloadELNS0_8TypeKindE1EE10PrintValueERKS3_PSo
+ fun:_ZN7testing9internal2lsIcSt11char_traitsIcEN5media7PreloadEEERSt13basic_ostreamIT_T0_ESA_RKT1_
+ fun:_ZN16testing_internal26DefaultPrintNonContainerToIN5media7PreloadEEEvRKT_PSo
+ fun:_ZN7testing8internal14DefaultPrintToIN5media7PreloadEEEvcNS0_13bool_constantILb0EEERKT_PSo
+ fun:_ZN7testing8internal7PrintToIN5media7PreloadEEEvRKT_PSo
+ fun:_ZN7testing8internal16UniversalPrinterIN5media7PreloadEE5PrintERKS3_PSo
+ fun:_ZN7testing8internal18TuplePrefixPrinter*
+ fun:_ZN7testing8internal12PrintTupleToINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZN7testing8internal7PrintToIN5media7PreloadEEEvRKNSt3tr15tupleIT*
+ fun:_ZN7testing8internal16UniversalPrinterINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZN7testing8internal14UniversalPrintINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZNK7testing8internal18FunctionMockerBaseIFvN5media7PreloadEEE32UntypedDescribeUninterestingCallEPKvPSo
+ fun:_ZN7testing8internal25UntypedFunctionMockerBase17UntypedInvokeWithEPKv
+ fun:_ZN7testing8internal18FunctionMockerBaseIFvN5media7PreloadEEE10InvokeWithERKNSt3tr15tupleIS3*
+ fun:_ZN7testing8internal14FunctionMockerIFvN5media7PreloadEEE6InvokeES3_
+ fun:_ZN5media11MockDemuxer10SetPreloadENS_7PreloadE
+}
+{
+ bug_64887_c
+ Memcheck:Uninitialized
+ ...
+ fun:_ZNSolsEx
+ fun:_ZN7testing9internal220TypeWithoutFormatterIN5media7PreloadELNS0_8TypeKindE1EE10PrintValueERKS3_PSo
+ fun:_ZN7testing9internal2lsIcSt11char_traitsIcEN5media7PreloadEEERSt13basic_ostreamIT_T0_ESA_RKT1_
+ fun:_ZN16testing_internal26DefaultPrintNonContainerToIN5media7PreloadEEEvRKT_PSo
+ fun:_ZN7testing8internal14DefaultPrintToIN5media7PreloadEEEvcNS0_13bool_constantILb0EEERKT_PSo
+ fun:_ZN7testing8internal7PrintToIN5media7PreloadEEEvRKT_PSo
+ fun:_ZN7testing8internal16UniversalPrinterIN5media7PreloadEE5PrintERKS3_PSo
+ fun:_ZN7testing8internal18TuplePrefixPrinter*
+ fun:_ZN7testing8internal12PrintTupleToINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZN7testing8internal7PrintToIN5media7PreloadEEEvRKNSt3tr15tupleIT*
+ fun:_ZN7testing8internal16UniversalPrinterINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZN7testing8internal14UniversalPrintINSt3tr15tupleIN5media7PreloadENS2*
+ fun:_ZNK7testing8internal18FunctionMockerBaseIFvN5media7PreloadEEE32UntypedDescribeUninterestingCallEPKvPSo
+ fun:_ZN7testing8internal25UntypedFunctionMockerBase17UntypedInvokeWithEPKv
+ fun:_ZN7testing8internal18FunctionMockerBaseIFvN5media7PreloadEEE10InvokeWithERKNSt3tr15tupleIS3*
+ fun:_ZN7testing8internal14FunctionMockerIFvN5media7PreloadEEE6InvokeES3_
+ fun:_ZN5media11MockDemuxer10SetPreloadENS_7PreloadE
+}
+{
+ bug_65940_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3IPC12ChannelProxy7Context13CreateChannelERKNS_13ChannelHandleERKNS_7Channel4ModeE
+ fun:_ZN3IPC12ChannelProxy4InitERKNS_13ChannelHandleENS_7Channel4ModeEP11MessageLoopb
+ fun:_ZN3IPC12ChannelProxyC2ERKNS_13ChannelHandleENS_7Channel4ModeEP11MessageLoopPNS0_7ContextEb
+ fun:_ZN3IPC11SyncChannelC1ERKNS_13ChannelHandleENS_7Channel4ModeEPNS4_8ListenerEP11MessageLoopbPN4base13WaitableEventE
+}
+{
+ bug_65940_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3IPC11SyncChannelC1ERKNS_13ChannelHandleENS_7Channel4ModeEPNS_8ListenerEPN4base22SingleThreadTaskRunnerEbPNS8_13WaitableEventE
+ fun:_ZN7content11ChildThread4InitEv
+ fun:_ZN7content11ChildThreadC2ERKSs
+}
+{
+ bug_65940_c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorI13scoped_refptrIN3IPC12ChannelProxy13MessageFilterEEE8allocateEmPKv
+ fun:_ZNSt12_Vector_baseI13scoped_refptrIN3IPC12ChannelProxy13MessageFilterEESaIS4_EE11_M_allocateEm
+ fun:_ZNSt6vectorI13scoped_refptrIN3IPC12ChannelProxy13MessageFilterEESaIS4_EE13_M_insert_auxEN9__gnu_cxx17__normal_iteratorIPS4_S6_EERKS4_
+ fun:_ZNSt6vectorI13scoped_refptrIN3IPC12ChannelProxy13MessageFilterEESaIS4_EE9push_backERKS4_
+ fun:_ZN3IPC12ChannelProxy7Context11OnAddFilterEv
+}
+{
+ bug_65940_d
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content11ChildThread4InitEv
+ fun:_ZN7content11ChildThreadC*
+ ...
+ fun:_ZN7content21WebRTCAudioDeviceTest5SetUpEv
+}
+{
+ bug_65940_e
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content16RenderThreadImpl4InitEv
+ fun:_ZN7content16RenderThreadImplC*
+ ...
+ fun:_ZN7content21WebRTCAudioDeviceTest5SetUpEv
+}
+{
+ bug_66853_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN11ProfileImpl14GetHostZoomMapEv
+ ...
+ fun:_ZNK17ProfileImplIOData6Handle27GetMainRequestContextGetterEv
+ fun:_ZN11ProfileImpl17GetRequestContextEv
+ fun:_ZN19SafeBrowsingService5StartEv
+ fun:_ZN19SafeBrowsingService10InitializeEv
+ fun:_ZN22ResourceDispatcherHost10InitializeEv
+ fun:_ZN18BrowserProcessImpl28CreateResourceDispatcherHostEv
+ fun:_ZN18BrowserProcessImpl24resource_dispatcher_hostEv
+ fun:_ZN16ExtensionService4InitEv
+ fun:_ZN11ProfileImpl14InitExtensionsE*
+ fun:_ZN14ProfileManager10AddProfileEP7Profileb
+}
+{
+ bug_67142
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN16ChildProcessHost13CreateChannelEv
+ fun:_ZN14GpuProcessHost4InitEv
+}
+{
+ bug_67261a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3sql10Connection18GetUniqueStatementEPKc
+ fun:_ZN3sql10Connection18GetCachedStatementERKNS_11StatementIDEPKc
+ fun:_ZN8appcache16AppCacheDatabase22PrepareCachedStatementERKN3sql11StatementIDEPKcPNS1_9StatementE
+}
+{
+ bug_67261b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3sql10Connection18GetUniqueStatementEPKc
+ fun:_ZN3sql10Connection18GetCachedStatementERKNS_11StatementIDEPKc
+ fun:_ZN3sql9MetaTable19PrepareGetStatementEPNS_9StatementEPKc
+ ...
+ fun:_ZN7storage13QuotaDatabase28IsOriginDatabaseBootstrappedEv
+}
+{
+ bug_67553
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZNSt3mapISs13scoped_refptrIK9ExtensionESt4lessISsESaISt4pairIKSsS3_EEEixERS7_
+ fun:_ZN16ExtensionInfoMap12AddExtensionEPK9Extension
+}
+{
+ Bug_69934_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN*NPObjectProxy10NPAllocateEP4_NPPP7NPClass
+ fun:_NPN_CreateObject
+ fun:_ZN5blink11WebBindings12createObjectEP4_NPPP7NPClass
+}
+{
+ Bug_69934_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3IPC11SyncMessage13GenerateReplyEPKNS_7MessageE
+ fun:_ZN3IPC17SyncMessageSchema*
+}
+{
+ bug_71728
+ Memcheck:Leak
+ fun:_Znw*
+ fun:*DownloadFileTest5SetUpEv
+}
+{
+ bug_72698_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN13ProfileIOData20InitializeOnUIThreadEP7Profile
+}
+{
+ bug_73415
+ Memcheck:Unaddressable
+ fun:_ZN23AccessibilityController36shouldDumpAccessibilityNotificationsEv
+ fun:_ZN11WebViewHost29postAccessibilityNotificationERKN5blink22WebAccessibilityObjectENS0_28WebAccessibilityNotificationE
+ fun:_ZN5blink16ChromeClientImpl29postAccessibilityNotificationEPN7blink19AccessibilityObjectENS1_13AXObjectCache14AXNotificationE
+ fun:_ZN5blink13AXObjectCache24postPlatformNotificationEPNS_19AccessibilityObjectENS0_14AXNotificationE
+}
+{
+ bug_73675
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN20LayoutTestController13waitUntilDoneERKN3WTF6VectorI10CppVariantLj0EEEPS2_
+ fun:_ZN13CppBoundClass14MemberCallbackI20LayoutTestControllerE3runERKN3WTF6VectorI10CppVariantLj0EEEPS5_
+ fun:_ZN13CppBoundClass6invokeEPvPK10_NPVariantjPS1_
+ fun:_ZN11CppNPObject6invokeEP8NPObjectPvPK10_NPVariantjPS3_
+ fun:_ZN5blink18npObjectInvokeImplERKN2v89ArgumentsENS_18InvokeFunctionTypeE
+ fun:_ZN5blink21npObjectMethodHandlerERKN2v89ArgumentsE
+ fun:_ZN2v88internal19HandleApiCallHelperILb0EEEPNS0_11MaybeObjectENS0_47_GLOBAL__N_v8_src_builtins.cc_*BuiltinArgumentsILNS0_21BuiltinExtraArgumentsE1EEE
+ obj:*
+}
+{
+ bug_75019
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN14GpuDataManagerC1Ev
+ fun:_ZN22DefaultSingletonTraitsI14GpuDataManagerE3NewEv
+ fun:_ZN9SingletonI14GpuDataManager22DefaultSingletonTraitsIS0_ES0_E3getEv
+ fun:_ZN14GpuDataManager11GetInstanceEv
+ fun:_Z11BrowserMainRK18MainFunctionParams
+ fun:_ZN20InProcessBrowserTest5SetUpEv
+}
+{
+ bug_76197a
+ Memcheck:Unaddressable
+ fun:sqlite3DbFree
+ fun:releaseMemArray
+ fun:sqlite3VdbeDelete
+ fun:sqlite3VdbeFinalize
+ fun:sqlite3_finalize
+ fun:_ZN3sql10Connection12StatementRef5CloseEv
+ fun:_ZN3sql10Connection12StatementRefD2Ev
+ fun:_ZN3sql10Connection12StatementRefD1Ev
+ fun:_ZNK4base10RefCountedIN3sql10Connection12StatementRefEE7ReleaseEv
+ fun:_ZN13scoped_refptrIN3sql10Connection12StatementRefEED2Ev
+ fun:_ZN13scoped_refptrIN3sql10Connection12StatementRefEED1Ev
+ fun:_ZNSt4pairIKN3sql11StatementIDE13scoped_refptrINS0_10Connection12StatementRefEEED2Ev
+ fun:_ZNSt4pairIKN3sql11StatementIDE13scoped_refptrINS0_10Connection12StatementRefEEED1Ev
+ fun:_ZN9__gnu_cxx13new_allocatorISt4pairIKN3sql11StatementIDE13scoped_refptrINS2_10Connection12StatementRefEEEE7destroyEPS9_
+ fun:_ZNSt8_Rb_treeIN3sql11StatementIDESt4pairIKS1_13scoped_refptrINS0_10Connection12StatementRefEEESt10_Select1stIS8_ESt4lessIS1_ESaIS8_EE12destroy_nodeEPSt13_Rb_tree_nodeIS8_E
+ fun:_ZNSt8_Rb_treeIN3sql11StatementIDESt4pairIKS1_13scoped_refptrINS0_10Connection12StatementRefEEESt10_Select1stIS8_ESt4lessIS1_ESaIS8_EE8_M_eraseEPSt13_Rb_tree_nodeIS8_E
+ fun:_ZNSt8_Rb_treeIN3sql11StatementIDESt4pairIKS1_13scoped_refptrINS0_10Connection12StatementRefEEESt10_Select1stIS8_ESt4lessIS1_ESaIS8_EE5clearEv
+ fun:_ZNSt3mapIN3sql11StatementIDE13scoped_refptrINS0_10Connection12StatementRefEESt4lessIS1_ESaISt4pairIKS1_S5_EEE5clearEv
+ fun:_ZN3sql10Connection5CloseEv
+ fun:_ZN3sql10ConnectionD2Ev
+ fun:_ZN3sql10ConnectionD1Ev
+ fun:_ZN7history16InMemoryDatabaseD0Ev
+}
+{
+ bug_76197b
+ Memcheck:Unaddressable
+ ...
+ fun:sqlite3_step
+ fun:sqlite3_exec
+ fun:_ZN3sql10Connection7ExecuteEPKc
+ fun:_ZN7history11URLDatabase31CreateKeywordSearchTermsIndicesEv
+ fun:_ZN7history16InMemoryDatabase12InitFromDiskE*
+ fun:_ZN7history22InMemoryHistoryBackend4InitE*
+}
+{
+ bug_79654_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZNSt3setIP16RenderWidgetHostSt4lessIS1_ESaIS1_EE6insertERKS1_
+ fun:_ZN*9TabLoader12TabIsLoadingEP24NavigationControllerImpl
+ fun:_ZN*18SessionRestoreImpl21ProcessSessionWindowsEPSt6vectorIP13SessionWindowSaIS3_EE
+ fun:_ZN*18SessionRestoreImpl12OnGotSessionEiPSt6vectorIP13SessionWindowSaIS3_EE
+}
+{
+ bug_79654_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:*RenderWidgetHost*
+ ...
+ fun:_ZNSt3setIP16RenderWidgetHostSt4lessIS1_ESaIS1_EE6insertERKS1_
+ fun:*TabLoader7ObserveEiRKN7content18NotificationSourceERKNS1_19NotificationDetailsE
+ fun:_ZN23NotificationServiceImpl*
+ fun:_ZN15WebContentsImpl12SetIsLoading*
+ fun:_ZN15WebContentsImpl14RenderViewGone*
+}
+{
+ bug_84265
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN12LoginHandler6CreateEPN3net17AuthChallengeInfoEPNS0_10URLRequestE
+ fun:_Z17CreateLoginPromptPN3net17AuthChallengeInfoEPNS_10URLRequestE
+ fun:_ZN22ResourceDispatcherHost14OnAuthRequiredEPN3net10URLRequestEPNS0_17AuthChallengeInfoE
+ fun:_ZN3net13URLRequestJob21NotifyHeadersCompleteEv
+}
+{
+ bug_84770_a
+ Memcheck:Unaddressable
+ fun:_ZN5blink21FrameLoaderClientImpl12allowPluginsEb
+ fun:_ZN5blink14SubframeLoader12allowPluginsENS_28ReasonForCallingAllowPluginsE
+}
+{
+ bug_84770_b
+ Memcheck:Unaddressable
+ fun:_ZN5blink21FrameLoaderClientImpl15allowJavaScriptEb
+ fun:_ZN5blink16ScriptController17canExecuteScriptsENS_33ReasonForCallingCanExecuteScriptsE
+}
+{
+ bug_84770_c
+ Memcheck:Unaddressable
+ fun:_ZN5blink21FrameLoaderClientImpl20allowScriptExtensionERKN3WTF6StringEi
+ fun:_ZN5blink16V8DOMWindowShell16createNewContextEN2v86HandleINS1_6ObjectEEEi
+}
+{
+ bug_86481
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocator*FilePath*allocate*
+ fun:_ZNSt11_Deque_base*FilePath*_M_allocate_map*
+ fun:_ZNSt11_Deque_base*FilePath*_M_initialize_map*
+ fun:_ZNSt11_Deque_baseI*FilePath*
+ fun:_ZNSt5dequeI*FilePath*
+ fun:_ZNSt5stackI*FilePath*deque*
+ fun:_ZN9file_util14FileEnumeratorC1E*
+ fun:_ZN7history20ExpireHistoryBackend25DoExpireHistoryIndexFilesEv
+}
+{
+ bug_90215_c
+ Memcheck:Leak
+ ...
+ fun:_ZN3net13URLRequestJob21NotifyRestartRequiredEv
+ fun:_ZN8appcache21AppCacheURLRequestJob13BeginDeliveryEv
+}
+{
+ bug_90215_d
+ Memcheck:Leak
+ ...
+ fun:_ZN8appcache19AppCacheStorageImpl23RunOnePendingSimpleTaskEv
+}
+{
+ bug_90215_e
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN8appcache15AppCacheService10InitializeE*
+ fun:_ZN21ChromeAppCacheService20InitializeOnIOThreadE*
+}
+{
+ bug_90215_f
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN26TransportSecurityPersisterC1EPN3net22TransportSecurityStateERKN4base8FilePathEb
+ fun:_ZNK13ProfileIOData4InitEPSt3mapISs10linked_ptrIN3net20URLRequestJobFactory15ProtocolHandlerEESt4lessISsESaISt4pairIKSsS5_EEE
+ fun:_ZN12_GLOBAL__N_114FactoryForMain6CreateEv
+ fun:_ZN29ChromeURLRequestContextGetter20GetURLRequestContextEv
+ fun:_ZN7content21ChromeAppCacheService20InitializeOnIOThreadERKN4base8FilePathEPNS_15ResourceContextEPN3net23URLRequestContextGetterE13scoped_refptrIN5quota20SpecialStoragePolicyEE
+}
+{
+ bug_90240
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN2pp5proxy26PPP_Instance_Private_Proxy22OnMsgGetInstanceObjectEiNS0_24SerializedVarReturnValueE
+}
+{
+ bug_93730_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN14ServiceProcess10InitializeEP16MessageLoopForUIRK11CommandLineP19ServiceProcessState
+ fun:_Z18ServiceProcessMainRK18MainFunctionParams
+ ...
+ fun:ChromeMain
+ fun:main
+}
+{
+ bug_93730_b
+ Memcheck:Leak
+ fun:_Zna*
+ fun:_ZN4base13LaunchProcessERKSt6vectorISsSaISsEERKNS_13LaunchOptionsEPi
+ fun:_ZN4base13LaunchProcessERK11CommandLineRKNS_13LaunchOptionsEPi
+ fun:_ZN21ServiceProcessControl8Launcher5DoRunEv
+}
+{
+ bug_93730_c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_Z17NewRunnableMethodIN21ServiceProcessControl8LauncherEMS1_FvvEEP14CancelableTaskPT_T0_
+ fun:_ZN21ServiceProcessControl8Launcher5DoRunEv
+}
+{
+ bug_94764
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN8remoting13ClientSession11UnpressKeysEv
+ fun:_ZN8remoting34ClientSessionTest_UnpressKeys_Test8TestBodyEv
+}
+{
+ bug_95448
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISt13_Rb_tree_nodeISt4pairIKSsPN4base5ValueEEEE8allocateEjPKv
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN4base5ValueEESt10_Select1stIS5_ESt4lessISsESaIS5_EE11_M_get_nodeEv
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN4base5ValueEESt10_Select1stIS5_ESt4lessISsESaIS5_EE14_M_create_nodeERKS5_
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN4base5ValueEESt10_Select1stIS5_ESt4lessISsESaIS5_EE10_M_insert_EPKSt18_Rb_tree_node_baseSE_RKS5_
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN4base5ValueEESt10_Select1stIS5_ESt4lessISsESaIS5_EE17_M_insert_unique_ESt23_Rb_tree_const_iteratorIS5_ERKS5_
+ fun:_ZNSt3mapISsPN4base5ValueESt4lessISsESaISt4pairIKSsS2_EEE6insertESt17_Rb_tree_iteratorIS7_ERKS7_
+ fun:_ZNSt3mapISsPN4base5ValueESt4lessISsESaISt4pairIKSsS2_EEEixERS6_
+ fun:_ZN4base15DictionaryValue23SetWithoutPathExpansionERKSsPNS_5ValueE
+ fun:_ZN4base15DictionaryValue3SetERKSsPNS_5ValueE
+ fun:_ZN4base15DictionaryValue9SetStringERKSsRKSbItNS_20string16_char_traitsESaItEE
+ fun:_ZN11PluginPrefs23CreatePluginFileSummaryERKN6webkit13WebPluginInfoE
+ fun:_ZN11PluginPrefs19OnUpdatePreferencesESt6vectorIN6webkit13WebPluginInfoESaIS2_EES0_INS1_5npapi11PluginGroupESaIS6_EE
+}
+{
+ bug_98867
+ Memcheck:Jump
+ obj:*
+ obj:*
+ obj:*
+}
+{
+ bug_100982
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blink12RenderRegion22setRenderBoxRegionInfoEPKNS_9RenderBoxEiib
+ fun:_ZNK7blink9RenderBox19renderBoxRegionInfoEPNS_12RenderRegionEiNS0_24RenderBoxRegionInfoFlagsE
+ ...
+ fun:_ZN5blink11RenderBlock5paintERNS_9PaintInfoERKNS_8IntPointE
+}
+{
+ bug_101750
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEj
+ fun:_ZN3WTF7HashSetIPN7blink16SVGStyledElementENS_7PtrHashIS3_EENS_10HashTraitsIS3_EEEnwEj
+ fun:_ZN5blink21SVGDocumentExtensions18addPendingResourceERKN3WTF12AtomicStringEPNS_16SVGStyledElementE
+}
+{
+ bug_101781_d
+ Memcheck:Uninitialized
+ fun:_ZN7testing8internal11CmpHelperGEIddEENS_15AssertionResultEPKcS4_RKT_RKT0_
+ fun:_ZN3gfx31JPEGCodec_EncodeDecodeRGBA_Test8TestBodyEv
+}
+{
+ bug_102327a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN15tracked_objects10ThreadData10InitializeEv
+ fun:_ZN15tracked_objects10ThreadData30InitializeAndSetTrackingStatusEb
+ fun:_ZN15tracked_objects10ThreadData29ShutdownSingleThreadedCleanupEb
+}
+{
+ bug_102327d
+ Memcheck:Uninitialized
+ fun:_ZN15tracked_objects9DeathData11RecordDeathEiii
+ fun:_ZN15tracked_objects10ThreadData11TallyADeathERKNS_6BirthsEii
+ fun:_ZN15tracked_objects10ThreadData31TallyRunOnNamedThreadIfTrackingERKN4base12TrackingInfoERKNS_11TrackedTimeES7_
+}
+{
+ Intentional leak of stl map during thread cleanup in profiler
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNK15tracked_objects10ThreadData26OnThreadTerminationCleanupEv
+}
+{
+ bug_102831_a
+ Memcheck:Leak
+ ...
+ fun:_ZN17PluginLoaderPosix19LoadPluginsInternalEv
+}
+{
+ bug_104447
+ Memcheck:Leak
+ ...
+ fun:HB_OpenTypeShape
+ fun:arabicSyriacOpenTypeShape
+ fun:HB_ArabicShape
+ fun:HB_ShapeItem
+ fun:_ZN5blink21ComplexTextController11shapeGlyphsEv
+ fun:_ZN5blink21ComplexTextController13nextScriptRunEv
+ fun:_ZN5blink21ComplexTextController14widthOfFullRunEv
+ fun:_ZNK7blink4Font24floatWidthForComplexTextERKNS_7TextRunEPN3WTF7HashSetIPKNS_14SimpleFontDataENS4_7PtrHashIS8_EENS4_10HashTraitsIS8_EEEEPNS_13GlyphOverflowE
+ fun:_ZNK7blink4Font5widthERKNS_7TextRunERiRN3WTF6StringE
+ fun:_ZN5blink14SVGTextMetricsC1EPNS_19RenderSVGInlineTextERKNS_7TextRunE
+ fun:_ZN5blink14SVGTextMetrics21measureCharacterRangeEPNS_19RenderSVGInlineTextEjj
+ fun:_ZNK7blink30SVGTextLayoutAttributesBuilder25propagateLayoutAttributesEPNS_12RenderObjectERN3WTF6VectorINS_23SVGTextLayoutAttributesELm0EEERjRt
+}
+{
+ bug_104806_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:*tracked_objects*ThreadData*TallyABirth*
+}
+{
+ bug_104806_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:*tracked_objects*ThreadData*TallyADeath*
+}
+{
+ bug_105744b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZNSt6vector*9push_back*
+ fun:_ZN4skia19ConvolutionFilter1D9AddFilterEiPKsi
+ fun:_ZN4skia12_GLOBAL__N_112ResizeFilter14ComputeFiltersEiiiffPNS_19ConvolutionFilter1DE
+ fun:_ZN4skia12_GLOBAL__N_112ResizeFilterC1ENS_15ImageOperations12ResizeMethodEiiiiRK7SkIRect
+ fun:_ZN4skia15ImageOperations11ResizeBasicERK8SkBitmapNS0_12ResizeMethodEiiRK7SkIRect
+ fun:_ZN4skia15ImageOperations6ResizeERK8SkBitmapNS0_12ResizeMethodEiiRK7SkIRect
+ fun:_ZN4skia15ImageOperations6ResizeERK8SkBitmapNS0_12ResizeMethodEii
+ fun:_ZN24ChromeRenderViewObserver21CaptureFrameThumbnailEPN5blink7WebViewEiiP8SkBitmapP14ThumbnailScore
+ fun:_ZN24ChromeRenderViewObserver16CaptureThumbnailEv
+ fun:_ZN24ChromeRenderViewObserver15CapturePageInfoEib
+}
+{
+ bug_105907
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN4skia14BGRAConvolve2DEPKhibRKNS_19ConvolutionFilter1DES4_iPhb
+ fun:_ZN4skia15ImageOperations11ResizeBasicE*
+ fun:_ZN4skia15ImageOperations6ResizeE*
+}
+{
+ bug_106912
+ Memcheck:Leak
+ ...
+ fun:*tracked_objects*ThreadData*InitializeThreadContext*
+ fun:*base*PlatformThread*SetName*
+}
+{
+ bug_112278
+ Memcheck:Uninitialized
+ fun:fetch_texel_2d_f_rgba8888
+ ...
+ fun:sample_nearest_2d
+ fun:fetch_texel_lod
+ fun:fetch_texel
+ fun:_mesa_execute_program
+ fun:run_program*
+ fun:_swrast_exec_fragment_program
+ fun:shade_texture_span
+ fun:_swrast_write_rgba_span
+ fun:general_triangle
+ ...
+ fun:_swrast_Triangle
+ fun:triangle_rgba
+ ...
+ fun:run_render
+ fun:_tnl_run_pipeline
+ fun:_tnl_draw_prims
+ fun:_tnl_vbo_draw_prims
+}
+{
+ bug_122717_use_after_free
+ Memcheck:Unaddressable
+ fun:__pthread_mutex_unlock_usercnt
+ fun:_ZN4base8internal8LockImpl6UnlockEv
+ fun:_ZN4base4Lock7ReleaseEv
+ fun:_ZN4base8AutoLockD1Ev
+ fun:_ZN5gdata15GDataFileSystem21RunTaskOnIOThreadPoolERKN4base8CallbackIFvvEEE
+}
+{
+ bug_122717_leak
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEESA_
+ fun:_ZN4base10TaskRunner16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEES9_
+ fun:_ZN7content13BrowserThread16PostTaskAndReplyENS0_2IDERKN15tracked_objects8LocationERKN4base8CallbackIFvvEEESB_
+}
+{
+ bug_123307
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF16fastZeroedMallocEm
+ ...
+ fun:_ZN5blink12_GLOBAL__N_111V8ObjectMapIN2v86ObjectEjE3setERKNS2_6HandleIS3_EERKj
+ fun:_ZN5blink12_GLOBAL__N_110Serializer10greyObjectERKN2v86HandleINS2_6ObjectEEE
+ fun:_ZN5blink12_GLOBAL__N_110Serializer11doSerializeEN2v86HandleINS2_5ValueEEEPNS1_9StateBaseE
+ fun:_ZN5blink12_GLOBAL__N_110Serializer9serializeEN2v86HandleINS2_5ValueEEE
+ fun:_ZN5blink21SerializedScriptValueC1EN2v86HandleINS1_5ValueEEEPN3WTF6VectorINS5_6RefPtrINS_11MessagePortEEELm1EEEPNS6_INS7_INS5_11ArrayBufferEEELm1EEERb
+ fun:_ZN5blink21SerializedScriptValue6createEN2v86HandleINS1_5ValueEEEPN3WTF6VectorINS5_6RefPtrINS_11MessagePortEEELm1EEEPNS6_INS7_INS5_11ArrayBufferEEELm1EEERb
+ fun:_ZN5blinkL25handlePostMessageCallbackERKN2v89ArgumentsEb
+ fun:_ZN5blink11V8DOMWindow19postMessageCallbackERKN2v89ArgumentsE
+ fun:_ZN2v88internalL19HandleApiCallHelperILb0EEEPNS0_11MaybeObjectENS0_12_GLOBAL__N_116BuiltinArgumentsILNS0_21BuiltinExtraArgumentsE1EEEPNS0_7IsolateE
+ fun:_ZN2v88internalL21Builtin_HandleApiCallENS0_12_GLOBAL__N_116BuiltinArgumentsILNS0_21BuiltinExtraArgumentsE1EEEPNS0_7IsolateE
+}
+{
+ bug_124488
+ Memcheck:Leak
+ fun:malloc
+ fun:strdup
+ ...
+ fun:_ZN34CopyTextureCHROMIUMResourceManager10InitializeEv
+ fun:_ZN3gpu5gles216GLES2DecoderImpl10InitializeERK13scoped_refptrIN3gfx9GLSurfaceEERKS2_INS3_9GLContextEERKNS3_4SizeERKNS0_18DisallowedFeaturesEPKcRKSt6vectorIiSaIiEE
+ fun:_ZN6webkit3gpu18GLInProcessContext10InitializeERKN3gfx4SizeEPS1_PKcPKiNS2_13GpuPreferenceE
+ fun:_ZN6webkit3gpu18GLInProcessContext22CreateOffscreenContextEPS1_RKN3gfx4SizeES2_PKcPKiNS3_13GpuPreferenceE
+ fun:_ZN6webkit3gpu46WebGraphicsContext3DInProcessCommandBufferImpl10InitializeEN5blink20WebGraphicsContext3D10AttributesEPS3_
+}
+{
+ bug_124496
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN8notifier26ProxyResolvingClientSocket23ProcessProxyResolveDoneEi
+}
+{
+ bug_127716
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3gfx5ImageC1ERK8SkBitmap
+ fun:_ZN16BrowserThemePack16LoadRawBitmapsToE*
+ fun:_ZN16BrowserThemePack18BuildFromExtensionEPK9Extension
+ fun:_ZN45BrowserThemePackTest_CanBuildAndReadPack_Test8TestBodyEv
+}
+{
+ bug_130362
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN12invalidation20NewPermanentCallbackINS_22InvalidationClientImplES1_St4pairINS_6StatusESsEEEPN4base8CallbackIFvT1_EEEPT_MT0_FvS7_E
+ fun:_ZN12invalidation22InvalidationClientImpl34ScheduleStartAfterReadingStateBlobEv
+ fun:_ZN12invalidation22InvalidationClientImpl5StartEv
+ fun:_ZN6syncer24SyncInvalidationListener5StartERKSsS2_S2_RKSt3mapIN8syncable9ModelTypeElSt4lessIS5_ESaISt4pairIKS5_lEEERKN12browser_sync10WeakHandleINS_24InvalidationStateTrackerEEEPNS0_8ListenerEPNS_11StateWriterE
+ fun:_ZN6syncer20InvalidationNotifier17UpdateCredentialsERKSsS2_
+ fun:_ZN6syncer31NonBlockingInvalidationNotifier4Core17UpdateCredentialsERKSsS3_
+}
+{
+ bug_130449
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN12invalidation20NewPermanentCallbackINS_22InvalidationClientImplES1_St4pairINS_6StatusESsEEEPN4base8CallbackIFvT1_EEEPT_MT0_FvS7_E
+ fun:_ZN12invalidation22InvalidationClientImpl34ScheduleStartAfterReadingStateBlobEv
+ fun:_ZN12invalidation22InvalidationClientImpl5StartEv
+ fun:_ZN6syncer24SyncInvalidationListener5StartERKSsS2_S2_RKSt3mapIN8syncable9ModelTypeElSt4lessIS5_ESaISt4pairIKS5_lEEERKN12browser_sync10WeakHandleINS_24InvalidationStateTrackerEEEPNS0_8ListenerE
+ fun:_ZN6syncer20InvalidationNotifier17UpdateCredentialsERKSsS2_
+ fun:_ZN6syncer31NonBlockingInvalidationNotifier4Core17UpdateCredentialsERKSsS3_
+}
+{
+ bug_130619
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blink9ClipRects6createERKS0_
+ fun:_ZN5blink11RenderLayer15updateClipRectsEPKS0_PNS_12RenderRegionENS_13ClipRectsTypeENS_29OverlayScrollbarSizeRelevancyE
+ ...
+ fun:_ZNK7blink11RenderLayer15parentClipRectsEPKS0_PNS_12RenderRegionENS_13ClipRectsTypeERNS_9ClipRectsENS_29OverlayScrollbarSizeRelevancyE
+ fun:_ZNK7blink11RenderLayer18backgroundClipRectEPKS0_PNS_12RenderRegionENS_13ClipRectsTypeENS_29OverlayScrollbarSizeRelevancyE
+}
+{
+ bug_138058
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink12WebVTTParser22constructTreeFromTokenEPNS_8DocumentE
+ fun:_ZN5blink12WebVTTParser33createDocumentFragmentFromCueTextERKN3WTF6StringE
+ fun:_ZN5blink12TextTrackCue12getCueAsHTMLEv
+ fun:_ZN5blink12TextTrackCue17updateDisplayTreeEf
+ fun:_ZN5blink16HTMLMediaElement25updateActiveTextTrackCuesEf
+}
+{
+ bug_138060
+ Memcheck:Uninitialized
+ fun:_NPN_EvaluateHelper
+ fun:_NPN_Evaluate
+ fun:_ZN5blink11WebBindings8evaluateEP4_NPPP8NPObjectP9_NPStringP10_NPVariant
+ fun:_ZL13executeScriptPK12PluginObjectPKc
+ fun:NPP_Destroy
+ fun:_ZN6webkit5npapi14PluginInstance11NPP_DestroyEv
+ fun:_ZN6webkit5npapi21WebPluginDelegateImpl15DestroyInstanceEv
+ fun:_ZN6webkit5npapi21WebPluginDelegateImplD0Ev
+ fun:_ZN6webkit5npapi21WebPluginDelegateImpl15PluginDestroyedEv
+ fun:_ZN6webkit5npapi13WebPluginImpl22TearDownPluginInstanceEPN5blink12WebURLLoaderE
+ fun:_ZN6webkit5npapi13WebPluginImpl12SetContainerEPN5blink18WebPluginContainerE
+ fun:_ZN6webkit5npapi13WebPluginImpl7destroyEv
+ fun:_ZN5blink22WebPluginContainerImplD0Ev
+ fun:_ZN3WTF10RefCountedIN7blink6WidgetEE5derefEv
+ fun:_ZNSt4pairIN3WTF6RefPtrIN7blink6WidgetEEEPNS2_9FrameViewEED1Ev
+ fun:_ZN3WTF9HashTableINS_6RefPtrIN7blink6WidgetEEESt4pairIS4_PNS2_9FrameViewEENS_18PairFirstExtractorIS8_EENS_7PtrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSE_IS7_EEEESF_E15deallocateTableEPS8_i
+ fun:_ZN3WTF9HashTableINS_6RefPtrIN7blink6WidgetEEESt4pairIS4_PNS2_9FrameViewEENS_18PairFirstExtractorIS8_EENS_7PtrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSE_IS7_EEEESF_ED1Ev
+ fun:_ZN3WTF7HashMapINS_6RefPtrIN7blink6WidgetEEEPNS2_9FrameViewENS_7PtrHashIS4_EENS_10HashTraitsIS4_EENS9_IS6_EEED1Ev
+ fun:_ZN5blink12RenderWidget28resumeWidgetHierarchyUpdatesEv
+ fun:_ZN5blink7Element6detachEv
+ fun:_ZN5blink13ContainerNode14detachChildrenEv
+ fun:_ZN5blink13ContainerNode6detachEv
+}
+{
+ bug_138220_a
+ Memcheck:Uninitialized
+ fun:_ZNK7blink16HTMLInputElement8dataListEv
+ fun:_ZNK7blink16HTMLInputElement4listEv
+ fun:_ZN5blink21RenderSliderContainer6layoutEv
+ fun:_ZN5blink11RenderBlock16layoutBlockChildEPNS_9RenderBoxERNS0_10MarginInfoERNS_20FractionalLayoutUnitES6_
+ fun:_ZN5blink11RenderBlock19layoutBlockChildrenEbRNS_20FractionalLayoutUnitE
+ fun:_ZN5blink11RenderBlock11layoutBlockEbNS_20FractionalLayoutUnitE
+ fun:_ZN5blink11RenderBlock6layoutEv
+ fun:_ZN5blink12RenderSlider6layoutEv
+}
+{
+ bug_138220_b
+ Memcheck:Uninitialized
+ fun:_ZNK7blink16HTMLInputElement8dataListEv
+ fun:_ZNK7blink16HTMLInputElement4listEv
+ fun:_ZN5blink11RenderTheme16paintSliderTicksEPNS_12RenderObjectERKNS_9PaintInfoERKNS_7IntRectE
+ fun:_ZN5blink24RenderThemeChromiumLinux16paintSliderTrackEPNS_12RenderObjectERKNS_9PaintInfoERKNS_7IntRectE
+ fun:_ZN5blink11RenderTheme5paintEPNS_12RenderObjectERKNS_9PaintInfoERKNS_7IntRectE
+ fun:_ZN5blink9RenderBox19paintBoxDecorationsERNS_9PaintInfoERKNS_21FractionalLayoutPointE
+ fun:_ZN5blink11RenderBlock11paintObjectERNS_9PaintInfoERKNS_21FractionalLayoutPointE
+}
+{
+ bug_138233_a
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF10RefCountedIN7blink17ScriptProfileNodeEEnwEm
+ fun:_ZN5blink17ScriptProfileNode6createEPKN2v814CpuProfileNodeE
+ fun:_ZNK7blink13ScriptProfile4headEv
+ fun:_ZN5blink23ScriptProfileV8InternalL14headAttrGetterEN2v85LocalINS1_6StringEEERKNS1_12AccessorInfoE
+ fun:_ZN2v88internal8JSObject23GetPropertyWithCallbackEPNS0_6ObjectES3_PNS0_6StringE
+ fun:_ZN2v88internal6Object11GetPropertyEPS1_PNS0_12LookupResultEPNS0_6StringEP18PropertyAttributes
+ fun:_ZN2v88internal6LoadIC4LoadENS0_16InlineCacheStateENS0_6HandleINS0_6ObjectEEENS3_INS0_6StringEEE
+ fun:_ZN2v88internal11LoadIC_MissENS0_9ArgumentsEPNS0_7IsolateE
+}
+{
+ bug_138233_b
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF10RefCountedIN7blink17ScriptProfileNodeEEnwEm
+ fun:_ZN5blink17ScriptProfileNode6createEPKN2v814CpuProfileNodeE
+ fun:_ZNK7blink17ScriptProfileNode8childrenEv
+ fun:_ZN5blink27ScriptProfileNodeV8InternalL16childrenCallbackERKN2v89ArgumentsE
+}
+{
+ bug_138712
+ Memcheck:Uninitialized
+ fun:_ZN7testing8internal11CmpHelperGEIddEENS_15AssertionResultEPKcS4_RKT_RKT0_
+ fun:_ZN3gfx30JPEGCodec_EncodeDecodeRGB_Test8TestBodyEv
+}
+{
+ bug_144118_b
+ Memcheck:Unaddressable
+ fun:_ZNK3WTF6OwnPtrIN5blink14ScrollbarGroupEEcvMS3_PS2_Ev
+ fun:_ZN5blink22WebPluginContainerImpl14reportGeometryEv
+ fun:_ZN5blink22WebPluginContainerImpl12setFrameRectERKN7blink7IntRectE
+ ...
+ fun:_ZN9TestShell4dumpEv
+}
+{
+ bug_144913_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN8chromeos17DBusThreadManager10InitializeEv
+ fun:_ZN8chromeos23KioskModeIdleLogoutTest5SetUpEv
+}
+{
+ bug_144913_c
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN8chromeos21DBusThreadManagerImplC1ENS_28DBusClientImplementationTypeE
+ fun:_ZN8chromeos17DBusThreadManager10InitializeEv
+ fun:_ZN8chromeos23KioskModeIdleLogoutTest5SetUpEv
+}
+{
+ bug_144930_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZL21cachedDeviceLuminancef
+}
+{
+ bug_145650a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN14WebDataService10AddKeywordERK15TemplateURLData
+ fun:_ZN18TemplateURLService11AddNoNotifyEP11TemplateURLb
+}
+{
+ bug_145650b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN14WebDataService13RemoveKeywordEl
+ fun:_ZN18TemplateURLService14RemoveNoNotifyEP11TemplateURL
+ fun:_ZN18TemplateURLService6RemoveEP11TemplateURL
+ fun:_ZN9protector71DefaultSearchProviderChangeTest_CurrentSearchProviderRemovedByUser_Test19RunTestOnMainThreadEv
+}
+{
+ bug_145650c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN14WebDataService13UpdateKeywordERK15TemplateURLData
+ fun:_ZN18TemplateURLService32SetDefaultSearchProviderNoNotifyEP11TemplateURL
+}
+{
+ bug_125692a
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal11StoreBuffer28IteratePointersInStoreBufferEPFvPPNS0_10HeapObjectES3_E
+ fun:_ZN2v88internal11StoreBuffer25IteratePointersToNewSpaceEPFvPPNS0_10HeapObjectES3_E
+ fun:_ZN2v88internal20MarkCompactCollector29EvacuateNewSpaceAndCandidatesEv
+ fun:_ZN2v88internal20MarkCompactCollector11SweepSpacesEv
+ fun:_ZN2v88internal20MarkCompactCollector14CollectGarbageEv
+ fun:_ZN2v88internal4Heap11MarkCompactEPNS0_8GCTracerE
+}
+{
+ bug_125692b
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal11StoreBuffer7CompactEv
+ fun:_ZN2v88internal11StoreBuffer19PrepareForIterationEv
+ fun:_ZN2v88internal11StoreBuffer25IteratePointersToNewSpaceEPFvPPNS0_10HeapObjectES3_E
+ fun:_ZN2v88internal20MarkCompactCollector29EvacuateNewSpaceAndCandidatesEv
+ fun:_ZN2v88internal20MarkCompactCollector11SweepSpacesEv
+ fun:_ZN2v88internal20MarkCompactCollector14CollectGarbageEv
+ fun:_ZN2v88internal4Heap11MarkCompactEPNS0_8GCTracerE
+ fun:_ZN2v88internal4Heap24PerformGarbageCollectionENS0_16GarbageCollectorEPNS0_8GCTracerE
+ fun:_ZN2v88internal4Heap14CollectGarbageENS0_15AllocationSpaceENS0_16GarbageCollectorEPKcS5_
+ fun:_ZN2v88internal4Heap14CollectGarbageENS0_15AllocationSpaceEPKc
+ fun:_ZN2v88internal4Heap17CollectAllGarbageEiPKc
+ fun:_ZN2v88internal4Heap16IdleNotificationEi
+ fun:_ZN2v88internal2V816IdleNotificationEi
+ fun:_ZN2v82V816IdleNotificationEi
+ fun:_ZN16RenderThreadImpl11IdleHandlerEv
+}
+{
+ bug_145693
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN10extensions18PermissionsUpdater17RecordOAuth2GrantEPKNS_9ExtensionE
+ fun:_ZN10extensions18PermissionsUpdater22GrantActivePermissionsEPKNS_9ExtensionEb
+ fun:_ZN10extensions12CrxInstaller25ReportSuccessFromUIThreadEv
+}
+{
+ bug_145695
+ Memcheck:Leak
+ fun:malloc
+ fun:NaClDescImcBoundDescAcceptConn
+ fun:RevRpcHandlerBase
+ fun:NaClThreadInterfaceStart
+}
+{
+ bug_162825
+ Memcheck:Uninitialized
+ fun:bcmp
+ fun:_ZNK3gpu5gles221ShaderTranslatorCache26ShaderTranslatorInitParamsltERKS2_
+ fun:_ZNKSt4lessIN3gpu5gles221ShaderTranslatorCache26ShaderTranslatorInitParams*
+ ...
+ fun:*ShaderTranslatorInitParams*
+ ...
+ fun:_ZN3gpu5gles216GLES2DecoderImpl26InitializeShaderTranslatorEv
+}
+{
+ bug_163924
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN28JSONAsynchronousUnpackerImpl22StartProcessOnIOThreadEN7content13BrowserThread2IDERKSs
+}
+{
+ bug_164176
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN18BrowserProcessImpl21PreMainMessageLoopRunEv
+ fun:_ZN22ChromeBrowserMainParts25PreMainMessageLoopRunImplEv
+ fun:_ZN22ChromeBrowserMainParts21PreMainMessageLoopRunEv
+ fun:_ZN7content15BrowserMainLoop13CreateThreadsEv
+ fun:_ZN7content21BrowserMainRunnerImpl10InitializeERKNS_18MainFunctionParamsE
+ fun:_ZN7content11BrowserMainERKNS_18MainFunctionParamsE
+ fun:_ZN7content23RunNamedProcessTypeMainERKSsRKNS_18MainFunctionParamsEPNS_19ContentMainDelegateE
+ fun:_ZN7content21ContentMainRunnerImpl3RunEv
+ fun:_ZN7content11ContentMainEiPPKcPNS_19ContentMainDelegateE
+ fun:ChromeMain
+}
+{
+ bug_164179
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN3net10URLFetcher6CreateERK4GURLNS0_11RequestTypeEPNS_18URLFetcherDelegateE
+ fun:_ZN18WebResourceService10StartFetchEv
+}
+{
+ bug_166819
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNK3sql10Connection21GetUntrackedStatementEPKc
+ fun:_ZNK3sql10Connection21DoesTableOrIndexExistEPKcS2_
+ fun:_ZNK3sql10Connection14DoesTableExistEPKc
+ fun:_ZN3sql9MetaTable14DoesTableExistEPNS_10ConnectionE
+ ...
+ fun:_ZN7history16TopSitesDatabase4InitE*
+ fun:_ZN7history15TopSitesBackend16InitDBOnDBThreadE*
+}
+{
+ bug_166819b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNK3sql10Connection21GetUntrackedStatementEPKc
+ fun:_ZNK3sql10Connection21DoesTableOrIndexExistEPKcS2_
+ fun:_ZNK3sql10Connection14DoesTableExistEPKc
+ fun:_ZN7history17ShortcutsDatabase11EnsureTableEv
+ fun:_ZN7history17ShortcutsDatabase4InitEv
+ fun:_ZN7history16ShortcutsBackend12InitInternalEv
+}
+{
+ bug_167175a
+ Memcheck:Leak
+ ...
+ fun:g_*
+ ...
+ fun:_ZN16BrowserWindowGtk11InitWidgetsEv
+ fun:_ZN16BrowserWindowGtk4InitEv
+ fun:_ZN13BrowserWindow19CreateBrowserWindowEP7Browser
+}
+{
+ bug_167175b
+ Memcheck:Leak
+ fun:malloc
+ obj:/lib/libpng12.so.0.42.0
+ fun:png_create_read_struct_2
+ ...
+ fun:_ZN15ReloadButtonGtkC1EP18LocationBarViewGtkP7Browser
+ fun:_ZN17BrowserToolbarGtk4InitEP10_GtkWindow
+ fun:_ZN16BrowserWindowGtk11InitWidgetsEv
+ fun:_ZN16BrowserWindowGtk4InitEv
+ fun:_ZN13BrowserWindow19CreateBrowserWindowEP7Browser
+}
+{
+ bug_167175d
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISbItN4base20string16_char_traitsESaItEEE8allocateEmPKv
+ fun:_ZNSt12_Vector_baseISbItN4base20string16_char_traitsESaItEESaIS3_EE11_M_allocateEm
+ ...
+ fun:_ZN15WrenchMenuModel5BuildEbb
+ fun:_ZN15WrenchMenuModelC1EPN2ui19AcceleratorProviderEP7Browserbb
+ fun:_ZN17BrowserToolbarGtkC1EP7BrowserP16BrowserWindowGtk
+ fun:_ZN16BrowserWindowGtk11InitWidgetsEv
+ fun:_ZN16BrowserWindowGtk4InitEv
+ fun:_ZN13BrowserWindow19CreateBrowserWindowEP7Browser
+}
+{
+ bug_172005
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7leveldb10VersionSet11LogAndApplyEPNS_11VersionEditEPNS_4port5MutexE
+ fun:_ZN7leveldb2DB4OpenERKNS_7OptionsERKSsPPS0_
+ fun:_ZN11dom_storage22SessionStorageDatabase9TryToOpenEPPN7leveldb2DBE
+ fun:_ZN11dom_storage22SessionStorageDatabase8LazyOpenEb
+ fun:_ZN11dom_storage22SessionStorageDatabase24ReadNamespacesAndOriginsEPSt3mapISsSt6vectorI4GURLSaIS3_EESt4lessISsESaISt4pairIKSsS5_EEE
+ fun:_ZN11dom_storage17DomStorageContext36FindUnusedNamespacesInCommitSequenceERKSt3setISsSt4lessISsESaISsEES7_
+}
+{
+ bug_172005b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7leveldb6DBImplC1ERKNS_7OptionsERKSs
+ fun:_ZN7leveldb2DB4OpenERKNS_7OptionsERKSsPPS0_
+ fun:_ZN11dom_storage22SessionStorageDatabase9TryToOpenEPPN7leveldb2DBE
+ fun:_ZN11dom_storage22SessionStorageDatabase8LazyOpenEb
+ fun:_ZN11dom_storage22SessionStorageDatabase24ReadNamespacesAndOriginsEPSt3mapISsSt6vectorI4GURLSaIS3_EESt4lessISsESaISt4pairIKSsS5_EEE
+ fun:_ZN11dom_storage17DomStorageContext36FindUnusedNamespacesInCommitSequenceERKSt3setISsSt4lessISsESaISsEES7_
+}
+{
+ bug_175823
+ Memcheck:Leak
+ ...
+ fun:_ZN18ValueStoreFrontend*
+}
+{
+ bug_176619_a
+ Memcheck:Uninitialized
+ fun:_ZN3WTF6StringC1EPKt
+ fun:_ZN5blink12WebVTTParser22constructTreeFromTokenEPNS_8DocumentE
+ fun:_ZN5blink12WebVTTParser33createDocumentFragmentFromCueTextERKN3WTF6StringE
+ fun:_ZN5blink12TextTrackCue20createWebVTTNodeTreeEv
+ fun:_ZN5blink12TextTrackCue22createCueRenderingTreeEv
+ fun:_ZN5blink12TextTrackCue17updateDisplayTreeEf
+}
+{
+ bug_176619_b
+ Memcheck:Uninitialized
+ fun:_ZN5blink12WebVTTParser13collectDigitsERKN3WTF6StringEPj
+ fun:_ZN5blink12WebVTTParser16collectTimeStampERKN3WTF6StringEPj
+ fun:_ZN5blink12WebVTTParser22constructTreeFromTokenEPNS_8DocumentE
+ fun:_ZN5blink12WebVTTParser33createDocumentFragmentFromCueTextERKN3WTF6StringE
+ fun:_ZN5blink12TextTrackCue20createWebVTTNodeTreeEv
+ fun:_ZN5blink12TextTrackCue22createCueRenderingTreeEv
+ fun:_ZN5blink12TextTrackCue17updateDisplayTreeEf
+}
+{
+ bug_176891a
+ Memcheck:Leak
+ fun:calloc
+ fun:nss_ZAlloc
+ fun:nssCryptokiObject_Create
+ fun:create_objects_from_handles
+ fun:find_objects
+ fun:find_objects_by_template
+ fun:nssToken_FindCertificateByEncodedCertificate
+ fun:PK11_FindCertFromDERCertItem
+ fun:_ZN24mozilla_security_manager12_GLOBAL__N_125nsPKCS12Blob_ImportHelper*
+}
+{
+ bug_176891b
+ Memcheck:Leak
+ ...
+ fun:nssPKIObject_Create
+ fun:nssTrustDomain_FindTrustForCertificate
+ fun:STAN_DeleteCertTrustMatchingSlot
+ fun:SEC_DeletePermCertificate
+}
+{
+ bug_252241_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN7content19BlinkTestController20PrepareForLayoutTestERK4GURLRKN4base8FilePathEbRKSs
+ fun:_Z16ShellBrowserMainRKN7content18MainFunctionParams*
+ fun:_ZN7content17ShellMainDelegate10RunProcessERKSsRKNS_18MainFunctionParamsE
+ fun:_ZN7content23RunNamedProcessTypeMainERKSsRKNS_18MainFunctionParamsEPNS_19ContentMainDelegateE
+ fun:_ZN7content21ContentMainRunnerImpl3RunEv
+ fun:_ZN7content11ContentMainEiPPKcPNS_19ContentMainDelegateE
+}
+{
+ bug_252241_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content19ContentMainDelegate26CreateContentUtilityClientEv
+ fun:_ZN7content24ContentClientInitializer3SetERKSsPNS_19ContentMainDelegateE
+ fun:_ZN7content21ContentMainRunnerImpl10InitializeEiPPKcPNS_19ContentMainDelegateE
+ fun:_ZN7content11ContentMainEiPPKcPNS_19ContentMainDelegateE
+}
+{
+ bug_252641_a
+ Memcheck:Uninitialized
+ fun:pthread_rwlock_init$UNIX2003
+ fun:_ZN3re25MutexC2Ev
+ fun:_ZN3re25MutexC1Ev
+ ...
+ fun:_ZN11leveldb_env19ParseMethodAndErrorEPKcPNS_8MethodIDEPi
+}
+{
+ bug_252641_b
+ Memcheck:Uninitialized
+ fun:pthread_rwlock_init$UNIX2003
+ fun:_ZN3re25MutexC2Ev
+ fun:_ZN3re25MutexC1Ev
+ ...
+ fun:_ZN3gpu12_GLOBAL__N_114StringMismatchERKSsS2_
+}
+{
+ bug_258132a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5ppapi5proxy15PPP_Class_Proxy19CreateProxiedObjectEPK18PPB_Var_DeprecatedPNS0_10DispatcherEill
+ fun:_ZN5ppapi5proxy24PPB_Var_Deprecated_Proxy27OnMsgCreateObjectDeprecatedEillNS0_24SerializedVarReturnValueE
+}
+{
+ bug_258132b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5ppapi5proxy26PluginProxyMultiThreadTest7RunTestEv
+ fun:_ZN5ppapi*ThreadAwareCallback*Test_*
+}
+{
+ bug_259357d
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN3gpu5gles239ShaderTranslatorTest_OptionsString_Test8TestBodyEv
+}
+{
+ bug_259357f
+ Memcheck:Uninitialized
+ fun:_ZNK3gpu12AsyncAPIMock6IsArgsclEPKv
+ fun:_ZNK7testing8internal12TrulyMatcherIN3gpu12AsyncAPIMock6IsArgsEE15MatchAndExplainIPKvEEbRT_PNS_19MatchResultListenerE
+ fun:_ZNK7testing18PolymorphicMatcherINS_8internal12TrulyMatcherIN3gpu12AsyncAPIMock6IsArgsEEEE15MonomorphicImplIPKvE15MatchAndExplainESA_PNS_19MatchResultListenerE
+ fun:_ZNK7testing8internal11MatcherBaseIPKvE15MatchAndExplainES3_PNS_19MatchResultListenerE
+ fun:_ZNK7testing8internal11MatcherBaseIPKvE7MatchesES3_
+ fun:_ZN7testing8internal11TuplePrefixILm3EE7MatchesINSt3tr15tupleIINS_7MatcherIjEES7_NS6_IPKvEEEEENS5_IIjjS9_EEEEEbRKT_RKT0_
+ fun:_ZN7testing8internal12TupleMatchesINSt3tr15tupleIINS_7MatcherIjEES5_NS4_IPKvEEEEENS3_IIjjS7_EEEEEbRKT_RKT0_
+ fun:_ZNK7testing8internal16TypedExpectationIFN3gpu5error5ErrorEjjPKvEE7MatchesERKNSt3tr15tupleIIjjS6_EEE
+ fun:_ZNK7testing8internal16TypedExpectationIFN3gpu5error5ErrorEjjPKvEE21ShouldHandleArgumentsERKNSt3tr15tupleIIjjS6_EEE
+ fun:_ZNK7testing8internal18FunctionMockerBaseIFN3gpu5error5ErrorEjjPKvEE29FindMatchingExpectationLockedERKNSt3tr15tupleIIjjS6_EEE
+ fun:_ZN7testing8internal18FunctionMockerBaseIFN3gpu5error5ErrorEjjPKvEE30UntypedFindMatchingExpectationES6_PS6_PbPSoSB_
+ fun:_ZN7testing8internal25UntypedFunctionMockerBase17UntypedInvokeWithEPKv
+ fun:_ZN7testing8internal18FunctionMockerBaseIFN3gpu5error5ErrorEjjPKvEE10InvokeWithERKNSt3tr15tupleIIjjS6_EEE
+ fun:_ZN7testing8internal14FunctionMockerIFN3gpu5error5ErrorEjjPKvEE6InvokeEjjS6_
+ fun:_ZN3gpu12AsyncAPIMock9DoCommandEjjPKv
+ fun:_ZN3gpu13CommandParser14ProcessCommandEv
+ fun:_ZN3gpu12CommandExecutor10PutChangedEv
+}
+{
+ bug_259789b
+ Memcheck:Uninitialized
+ fun:_ZN5blink12_GLOBAL__N_116adjustAttributesERKNS_17GraphicsContext3D10AttributesEPNS_8SettingsE
+ fun:_ZN5blink21WebGLRenderingContext6createEPNS_17HTMLCanvasElementEPNS_22WebGLContextAttributesE
+ fun:_ZN5blink17HTMLCanvasElement10getContextERKN3WTF6StringEPNS_23CanvasContextAttributesE
+}
+{
+ bug_273398
+ Memcheck:Leak
+ ...
+ fun:_ZN6Pickle6ResizeEm
+ fun:_ZN6PickleC1Ev
+ fun:_ZN7content14ZygoteHostImpl20GetTerminationStatusEibPi
+ fun:_ZN7content20ChildProcessLauncher25GetChildTerminationStatusEbPi
+}
+{
+ bug_290407
+ Memcheck:Leak
+ fun:calloc
+ fun:_swrast_new_soft_renderbuffer
+ fun:_mesa_BindRenderbufferEXT
+ fun:shared_dispatch_stub_939
+ fun:_ZN3gfx9GLApiBase23glBindRenderbufferEXTFnEjj
+ fun:_ZN3gpu5gles216GLES2DecoderImpl18DoBindRenderbufferEjj
+}
+{
+ bug_293024_b
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN3WTF24ThreadSafeRefCountedBasenwEm
+ fun:_ZN5blink12_GLOBAL__N_131AllowFileSystemMainThreadBridge6createEPN7blink17WorkerGlobalScopeEPNS_13WebWorkerBaseERKN3WTF6StringE
+ fun:_ZN5blink22WorkerFileSystemClient15allowFileSystem*
+ ...
+ fun:_ZN2v88internal25FunctionCallbackArguments4CallEPFvRKNS_20FunctionCallbackInfoINS_5ValueEEEE
+}
+{
+ bug_298143
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink25TypeConversionsV8Internal*AttributeGetterE*
+}
+{
+ bug_298788
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN10extensions19TestExtensionSystem22CreateExtensionServiceEPKN4base11CommandLineERKNS1_8FilePathEb
+ fun:_ZN10extensions12_GLOBAL__N_130ExtensionActionIconFactoryTest5SetUpEv
+}
+{
+ bug_299804
+ Memcheck:Uninitialized
+ fun:_ZN24GrConfigConversionEffect30TestForPreservingPMConversionsEP9GrContextPNS_12PMConversionES3_
+ fun:_ZN12_GLOBAL__N_119test_pm_conversionsEP9GrContextPiS2_
+ fun:_ZN9GrContext19createPMToUPMEffectEP9GrTexturebRK8SkMatrix
+ fun:_ZN9GrContext22readRenderTargetPixelsEP14GrRenderTargetiiii13GrPixelConfigPvmj
+ fun:_ZN11SkGpuDevice12onReadPixelsE*
+ fun:_ZN12SkBaseDevice10readPixelsE*
+ fun:_ZN8SkCanvas10readPixelsE*
+ fun:_ZN*DeferredDevice12onReadPixelsE*
+ fun:_ZN12SkBaseDevice10readPixelsE*
+ fun:_ZN8SkCanvas10readPixelsE*
+ fun:_ZN5blink15GraphicsContext10readPixelsE*
+ ...
+ fun:_ZN*blink24CanvasRenderingContext2D12getImageDataE*
+ ...
+ fun:_ZN5blink34CanvasRenderingContext2DV8InternalL18getImageDataMethodERKN2v820FunctionCallbackInfoINS1_5ValueEEE
+ fun:_ZN5blink34CanvasRenderingContext2DV8InternalL26getImageDataMethodCallbackERKN2v820FunctionCallbackInfoINS1_5ValueEEE
+}
+{
+ bug_317166
+ Memcheck:Leak
+ fun:malloc
+ fun:_dl_close_worker
+ fun:_dl_close
+ fun:_dl_catch_error
+ fun:_dlerror_run
+ fun:dlclose
+ obj:/usr/lib/x86_64-linux-gnu/libasound.so.2.0.0
+ fun:snd_config_searcha_hooks
+ fun:snd_config_searchva_hooks
+ obj:/usr/lib/x86_64-linux-gnu/libasound.so.2.0.0
+ fun:snd_config_search_definition
+ obj:/usr/lib/x86_64-linux-gnu/libasound.so.2.0.0
+ fun:_ZN5media11AlsaWrapper7PcmOpenEPP8_snd_pcmPKc15_snd_pcm_streami
+ fun:_ZN9alsa_utilL10OpenDeviceEPN5media11AlsaWrapperEPKc15_snd_pcm_streamii15_snd_pcm_formati
+ fun:_ZN9alsa_util18OpenPlaybackDeviceEPN5media11AlsaWrapperEPKcii15_snd_pcm_formati
+ fun:_ZN5media19AlsaPcmOutputStream16AutoSelectDeviceEj
+ fun:_ZN5media19AlsaPcmOutputStream4OpenEv
+ fun:_ZN5media25AudioOutputDispatcherImpl19CreateAndOpenStreamEv
+ fun:_ZN5media25AudioOutputDispatcherImpl10OpenStreamEv
+ fun:_ZN5media20AudioOutputResampler10OpenStreamEv
+ fun:_ZN5media16AudioOutputProxy4OpenEv
+ fun:_ZN5media18AudioStreamHandler20AudioStreamContainer4PlayEv
+}
+{
+ bug_318221
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base23EnsureProcessTerminatedEi
+}
+{
+ bug_321976
+ Memcheck:Leak
+ ...
+ fun:nssList_Create
+ fun:nssTrustDomain_UpdateCachedTokenCerts
+}
+{
+ bug_331925
+ Memcheck:Leak
+ ...
+ fun:_ZN3net27TestURLRequestContextGetter20GetURLRequestContextEv
+ fun:_ZN3net14URLFetcherCore30StartURLRequestWhenAppropriateEv
+ fun:_ZN3net14URLFetcherCore19DidInitializeWriterEi
+ fun:_ZN3net14URLFetcherCore15StartOnIOThreadEv
+}
+{
+ bug_332330
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN5blink8ResourcenwEm
+ fun:_ZN5blinkL14createResourceENS_8Resource4TypeERKNS_15ResourceRequestERKN3WTF6StringE
+ fun:_ZN5blink15ResourceFetcher12loadResourceENS_8Resource4TypeERNS_12FetchRequestERKN3WTF6StringE
+ fun:_ZN5blink15ResourceFetcher15requestResourceENS_8Resource4TypeERNS_12FetchRequestE
+ fun:_ZN5blink15ResourceFetcher16fetchRawResourceERNS_12FetchRequestE
+ fun:_ZN5blink24DocumentThreadableLoader11loadRequestERKNS_15ResourceRequestENS_19SecurityCheckPolicyE
+}
+{
+ bug_340952
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blink15DOMWrapperWorldC2Eii
+ fun:_ZN5blink15DOMWrapperWorldC1Eii
+ fun:_ZN5blink15DOMWrapperWorld6createEii
+ fun:_ZN5blink15DOMWrapperWorld9mainWorldEv
+ ...
+ fun:_ZN7content22BufferedDataSourceTestC2Ev
+}
+{
+ bug_340752
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink4Heap19checkAndMarkPointerEPNS_7VisitorEPh
+ fun:_ZN5blink11ThreadState10visitStackEPNS_7VisitorE
+ ...
+ fun:_ZN5blink4Heap14collectGarbageENS_7BlinkGC10StackState*
+}
+{
+ bug_342591
+ Memcheck:Param
+ write(buf)
+ obj:*libpthread*
+ fun:_ZN3IPC7Channel11ChannelImpl23ProcessOutgoingMessagesEv
+ fun:_ZN3IPC7Channel11ChannelImpl4SendEPNS_7MessageE
+ fun:_ZN3IPC7Channel4SendEPNS_7MessageE
+ fun:_ZN3IPC12ChannelProxy7Context13OnSendMessageE10scoped_ptrINS_7MessageEN4base14DefaultDeleterIS3_EEE
+}
+{
+ bug_345432
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3IPC11SyncChannel23CreateSyncMessageFilterEv
+ fun:_ZN7content14GpuChannelHost7ConnectERKN3IPC13ChannelHandleEPN4base13WaitableEventE
+ fun:_ZN7content14GpuChannelHost6CreateEPNS_21GpuChannelHostFactoryEiRKN3gpu7GPUInfoERKN3IPC13ChannelHandleEPN4base13WaitableEventEPNS3_22GpuMemoryBufferManagerE
+ fun:_ZN7content28BrowserGpuChannelHostFactory21GpuChannelEstablishedEv
+ fun:_ZN7content28BrowserGpuChannelHostFactory16EstablishRequest12FinishOnMainEv
+}
+{
+ bug_346336_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content16SiteInstanceImpl10GetProcessEv
+ fun:_ZN7content22RenderFrameHostManager21CreateRenderFrameHostEPNS_12SiteInstanceEiibb
+ fun:_ZN7content22RenderFrameHostManager4InitEPNS_14BrowserContextEPNS_12SiteInstanceEii
+ fun:_ZN7content15WebContentsImpl4InitERKNS_11WebContents12CreateParamsE
+ fun:_ZN7content15WebContentsImpl16CreateWithOpenerERKNS_11WebContents12CreateParamsEPS0_
+ fun:_ZN7content11WebContents6CreateERKNS0_12CreateParamsE
+ fun:_ZN7content5Shell15CreateNewWindowEPNS_14BrowserContextERK4GURLPNS_12SiteInstanceEiRKN3gfx4SizeE
+ fun:_ZN7content19BlinkTestController20PrepareForLayoutTestERK4GURLRKN4base8FilePathEbRKSs
+ fun:_ZN12_GLOBAL__N_110RunOneTestERKSsPbRK10scoped_ptrIN7content17BrowserMainRunnerEN4base14DefaultDeleterIS5_EEE
+ fun:_Z16ShellBrowserMainRKN7content18MainFunctionParamsERK10scoped_ptrINS_17BrowserMainRunnerEN4base14DefaultDeleterIS4_EEE
+ fun:_ZN7content17ShellMainDelegate10RunProcessERKSsRKNS_18MainFunctionParamsE
+ fun:_ZN7content23RunNamedProcessTypeMainERKSsRKNS_18MainFunctionParamsEPNS_19ContentMainDelegateE
+ fun:_ZN7content21ContentMainRunnerImpl3RunEv
+ fun:_ZN7content11ContentMainEiPPKcPNS_19ContentMainDelegateE
+}
+{
+ bug_347683
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNK13LoginDatabase9GetLoginsERKN8autofill12PasswordFormEPSt6vectorIPS1_SaIS5_EE
+ fun:_ZN50LoginDatabaseTest_UpdateIncompleteCredentials_Test8TestBodyEv
+}
+{
+ bug_347967
+ Memcheck:Uninitialized
+ fun:unpack_RGB*888
+ fun:_mesa_unpack_rgba_row
+ fun:slow_read_rgba_pixels
+ fun:read_rgba_pixels
+ fun:_mesa_readpixels
+ ...
+ fun:shared_dispatch_stub_*
+ ...
+ fun:*gpu*gles*GLES2DecoderImpl*DoCommand*
+ fun:*gpu*CommandParser*ProcessCommand*
+ fun:*gpu*CommandExecutor*PutChanged*
+}
+{
+ bug_348863
+ Memcheck:Unaddressable
+ fun:_ZNK7blink32PlatformSpeechSynthesisUtterance6clientEv
+ fun:_ZN5blink15SpeechSynthesis17didFinishSpeakingEN3WTF10PassRefPtrINS_32PlatformSpeechSynthesisUtteranceEEE
+ fun:_ZN5blink29PlatformSpeechSynthesizerMock16speakingFinishedEPNS_5TimerIS0_EE
+ fun:_ZN5blink5TimerINS_29PlatformSpeechSynthesizerMockEE5firedEv
+ fun:_ZN5blink12ThreadTimers24sharedTimerFiredInternalEv
+ fun:_ZN5blink12ThreadTimers16sharedTimerFiredEv
+ fun:_ZN7content17BlinkPlatformImpl9DoTimeoutEv
+}
+{
+ bug_350809
+ Memcheck:Uninitialized
+ fun:_ZN5blink23ReplaceSelectionCommand7doApplyEv
+ fun:_ZN5blink20CompositeEditCommand5applyEv
+ fun:_ZN5blink6Editor28replaceSelectionWithFragmentEN3WTF10PassRefPtrINS_16DocumentFragmentEEEbbb
+ fun:_ZN5blink6Editor24replaceSelectionWithTextERKN3WTF6StringEbb
+}
+{
+ bug_361594
+ Memcheck:Uninitialized
+ ...
+ fun:*SkA8_Shader_Blitter*blitH*
+ ...
+ fun:*content*ScreenshotData*EncodeOnWorker*
+}
+{
+ bug_364274
+ Memcheck:Uninitialized
+ fun:_ZN5blink21RenderLayerCompositor14updateIfNeededEv
+}
+{
+ bug_364724
+ Memcheck:Param
+ write(buf)
+ obj:/lib/x86_64-linux-gnu/libpthread-2.15.so
+ fun:_ZN3IPC12ChannelPosix23ProcessOutgoingMessagesEv
+ fun:_ZN3IPC12ChannelPosix29OnFileCanWriteWithoutBlockingEi
+ ...
+ fun:_ZN4base19MessagePumpLibevent21FileDescriptorWatcher29OnFileCanWriteWithoutBlockingEiPS0_
+ ...
+ fun:event_process_active
+ fun:event_base_loop
+}
+{
+ bug_364724b
+ Memcheck:Uninitialized
+ fun:_ZN4base17MD5DigestToBase16ERKNS_9MD5DigestE
+ fun:_ZN7content15BlinkTestRunner17CaptureDumpPixelsERK8SkBitmap
+}
+{
+ bug_364724c
+ Memcheck:Param
+ write(buf)
+ obj:/lib/x86_64-linux-gnu/libpthread-2.15.so
+ fun:_ZN3IPC12ChannelPosix23ProcessOutgoingMessagesEv
+ fun:_ZN3IPC12ChannelPosix4SendEPNS_7MessageE
+ fun:_ZN3IPC12ChannelProxy7Context13OnSendMessageE10scoped_ptrINS_7MessageEN4base14DefaultDeleterIS3_EEE
+}
+{
+ bug_365258
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10fastMallocEm
+ fun:_ZN5blink8ResourcenwEm
+ fun:_ZN5blinkL14createResourceENS_8Resource4TypeERKNS_15ResourceRequestERKN3WTF6StringE
+ fun:_ZN5blink15ResourceFetcher18revalidateResourceERKNS_12FetchRequestEPNS_8ResourceE
+ fun:_ZN5blink15ResourceFetcher15requestResourceENS_8Resource4TypeERNS_12FetchRequestE
+ fun:_ZN5blink15ResourceFetcher11fetchScriptERNS_12FetchRequestE
+ fun:_ZN5blink12ScriptLoader11fetchScriptERKN3WTF6StringE
+ fun:_ZN5blink12ScriptLoader13prepareScriptERKN3WTF12TextPositionENS0_17LegacyTypeSupportE
+ fun:_ZN5blink16HTMLScriptRunner9runScriptEPNS_7ElementERKN3WTF12TextPositionE
+ fun:_ZN5blink16HTMLScriptRunner7executeEN3WTF10PassRefPtrINS_7ElementEEERKNS1_12TextPositionE
+ fun:_ZN5blink18HTMLDocumentParser30runScriptsForPausedTreeBuilderEv
+ fun:_ZN5blink18HTMLDocumentParser38processParsedChunkFromBackgroundParserEN3WTF10PassOwnPtrINS0_11ParsedChunkEEE
+ fun:_ZN5blink18HTMLDocumentParser23pumpPendingSpeculationsEv
+ fun:_ZN5blink18HTMLDocumentParser41didReceiveParsedChunkFromBackgroundParserEN3WTF10PassOwnPtrINS0_11ParsedChunkEEE
+ fun:_ZN3WTF15FunctionWrapperIMN7blink18HTMLDocumentParserEFvNS_10PassOwnPtrINS2_11ParsedChunkEEEEEclERKNS_7WeakPtrIS2_EES5_
+ fun:_ZN3WTF17BoundFunctionImplINS_15FunctionWrapperIMN7blink18HTMLDocumentParserEFvNS_10PassOwnPtrINS3_11ParsedChunkEEEEEEFvNS_7WeakPtrIS3_EES6_EEclEv
+ fun:_ZNK3WTF8FunctionIFvvEEclEv
+ fun:_ZN3WTFL18callFunctionObjectEPv
+}
+{
+ bug_367809_d
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN7content21RenderProcessHostImpl4InitEv
+ ...
+ fun:_ZN7content5Shell7LoadURLERK4GURL
+ fun:_ZN7content19BlinkTestController20PrepareForLayoutTestERK4GURLRKN4base8FilePathEbRKSs
+ fun:_ZN12_GLOBAL__N_110RunOneTestERKSsPbRK10scoped_ptrIN7content17BrowserMainRunnerEN4base14DefaultDeleterIS5_EEE
+}
+{
+ bug_369843
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content27ServiceWorkerContextWrapper12InitInternalE*
+}
+{
+ bug_371844
+ Memcheck:Uninitialized
+ fun:bcmp
+ fun:_ZNK7content15GamepadProvider8PadState5MatchERKN5blink10WebGamepadE
+ fun:_ZN7content15GamepadProvider6DoPollEv
+}
+{
+ bug_371860
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN8feedback16FeedbackDataTestC1Ev
+ fun:_ZN8feedback*FeedbackDataTest*
+ fun:_ZN7testing8internal15TestFactoryImplIN8feedback*
+}
+{
+ bug_379943
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN7content20StoragePartitionImpl6CreateEPNS_14BrowserContextEbRKN4base8FilePathE
+ fun:_ZN7content23StoragePartitionImplMap3GetERKSsS2_b
+ fun:_ZN7content12_GLOBAL__N_129GetStoragePartitionFromConfigEPNS_14BrowserContext*
+ fun:_ZN7content14BrowserContext19GetStoragePartitionEPS0_PNS_12SiteInstanceE
+ fun:_ZN7content14BrowserContext26GetDefaultStoragePartitionEPS0_
+ ...
+ fun:_ZN7content21ShellBrowserMainParts21PreMainMessageLoopRunEv
+ fun:_ZN7content15BrowserMainLoop21PreMainMessageLoopRunEv
+}
+{
+ bug_380575
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISt13_Rb_tree_nodeISt4pairIKSsPN3net20URLRequestJobFactory15ProtocolHandlerEEEE8allocateEmPKv
+ ...
+ fun:_ZNSt3mapISsPN3net20URLRequestJobFactory15ProtocolHandlerESt4lessISsESaISt4pairIKSsS3_EEEixERS7_
+ fun:_ZN3net24URLRequestJobFactoryImpl18SetProtocolHandlerERKSsPNS_20URLRequestJobFactory15ProtocolHandlerE
+ ...
+ fun:_ZN7content28ShellURLRequestContextGetter20GetURLRequestContextEv
+}
+{
+ bug_381065
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN5blink18ModulesInitializer4initEv
+ fun:_ZN5blink19initializeWithoutV8EPNS_8PlatformE
+ fun:_ZN5blink10initializeEPNS_8PlatformE
+ fun:_ZN7content27TestBlinkWebUnitTestSupportC2Ev
+ fun:_ZN7content27TestBlinkWebUnitTestSupportC1Ev
+ fun:_ZN7content17UnitTestTestSuiteC2EPN4base9TestSuiteE
+ fun:_ZN7content17UnitTestTestSuiteC1EPN4base9TestSuiteE
+}
+{
+ bug_385381
+ Memcheck:Unaddressable
+ fun:_ZN5blink23FrameLoaderStateMachine9advanceToENS0_5StateE
+ fun:_ZN5blink11FrameLoader4initEv
+ fun:_ZN5blink10LocalFrame4initEv
+ fun:_ZN5blink17WebLocalFrameImpl22initializeAsChildFrameEPN7blink9FrameHostEPNS1_10FrameOwnerERKN3WTF12AtomicStringES9_
+ fun:_ZN5blink17WebLocalFrameImpl16createChildFrameERKN7blink16FrameLoadRequestEPNS1_21HTMLFrameOwnerElementE
+}
+{
+ bug_385396a
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink11RenderLayer7hitTestERKNS_14HitTestRequestERKNS_15HitTestLocationERNS_13HitTestResultE
+ fun:_ZN5blink10RenderView7hitTestERKNS_14HitTestRequestERKNS_15HitTestLocationERNS_13HitTestResultE
+ fun:_ZN5blink10RenderView7hitTestERKNS_14HitTestRequestERNS_13HitTestResultE
+ ...
+ fun:_ZN5blink12EventHandler18handleGestureEventERKNS_20PlatformGestureEventE
+ fun:_ZN5blink11WebViewImpl18handleGestureEventERKNS_15WebGestureEventE
+}
+{
+ bug_385396b
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink11LayoutPointC*ERKNS_8IntPointE
+ ...
+ fun:_ZN5blink12EventHandler18handleGestureEventERKNS_20PlatformGestureEventE
+ fun:_ZN5blink11WebViewImpl18handleGestureEventERKNS_15WebGestureEventE
+}
+{
+ bug_385396c
+ Memcheck:Uninitialized
+ fun:_ZNK5blink7IntRect8containsEii
+ ...
+ fun:_ZN5blink12EventHandler18handleGestureEventERKNS_20PlatformGestureEventE
+ fun:_ZN5blink11WebViewImpl18handleGestureEventERKNS_15WebGestureEventE
+}
+{
+ bug_385396d
+ Memcheck:Uninitialized
+ fun:_ZNK7blink10LayoutUnit5floorEv
+ ...
+ fun:_ZN5blink12EventHandler18handleGestureEventERKNS_20PlatformGestureEventE
+ fun:_ZN5blink11WebViewImpl18handleGestureEventERKNS_15WebGestureEventE
+ fun:_ZN5blink18PageWidgetDelegate16handleInputEventEPN7blink4PageERNS_22PageWidgetEventHandlerERKNS_13WebInputEventE
+}
+{
+ bug_385396e
+ Memcheck:Uninitialized
+ fun:_ZN5blink15roundedIntPointERKNS_11LayoutPointE
+ fun:_ZNK5blink15HitTestLocation12roundedPointEv
+ fun:_ZN5blink10RenderView7hitTestERKNS_14HitTestRequestERKNS_15HitTestLocationERNS_13HitTestResultE
+ fun:_ZN5blink10RenderView7hitTestERKNS_14HitTestRequestERNS_13HitTestResultE
+ fun:_ZN5blink12EventHandler20hitTestResultAtPointERKNS_11LayoutPointEjRKNS_10LayoutSizeE
+ fun:_ZN5blink12EventHandler18targetGestureEventERKNS_20PlatformGestureEventEb
+}
+{
+ bug_387435
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content16WebURLLoaderImplC1Ev
+ fun:_ZN7content17BlinkPlatformImpl15createURLLoaderEv
+ fun:_ZN5blink10PingLoaderC1EPNS_10LocalFrameERNS_15ResourceRequestERKNS_18FetchInitiatorInfoENS_17StoredCredentialsE
+ fun:_ZN5blink10PingLoader5startEPNS_10LocalFrameERNS_15ResourceRequestERKNS_18FetchInitiatorInfoENS_17StoredCredentialsE
+ fun:_ZN5blink10PingLoader9loadImageEPNS_10LocalFrameERKNS_4KURLE
+ fun:_ZN5blink15ResourceFetcher10fetchImageERNS_12FetchRequestE
+ fun:_ZN5blink11ImageLoader19doUpdateFromElementEb
+ fun:_ZN5blink11ImageLoader4Task3runEv
+}
+{
+ bug_386418
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIF*
+ fun:_ZN4base10WorkerPool16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIF*
+ fun:_ZN3net16HostResolverImpl16LoopbackProbeJob*
+ fun:_ZN3net16HostResolverImpl*
+}
+{
+ bug_388668
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN20data_reduction_proxy69DataReductionProxyBypassStatsTest_isDataReductionProxyUnreachable_Test8TestBodyEv
+}
+{
+ bug_392912
+ Memcheck:Uninitialized
+ fun:_ZNK8SkStroke10strokePathERK6SkPathPS0_
+ fun:_ZNK11SkStrokeRec11applyToPathEP6SkPathRKS0_
+ fun:_ZNK7SkPaint11getFillPathERK6SkPathPS0_PK6SkRectf
+}
+{
+ bug_394558
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN32ProfileSyncComponentsFactoryMockC1Ev
+}
+{
+ bug_394624
+ Memcheck:Leak
+ ...
+ fun:_ZN3net24URLRequestJobFactoryImpl18SetProtocolHandlerERKSsPNS_20URLRequestJobFactory15ProtocolHandlerE
+ ...
+ fun:_ZN7content28ShellURLRequestContextGetter20GetURLRequestContextEv
+ fun:_ZN7content21ChromeAppCacheService20InitializeOnIOThreadERKN4base8FilePathEPNS_15ResourceContextEPN3net23URLRequestContextGetterE13scoped_refptrIN5quota20SpecialStoragePolicyEE
+}
+{
+ bug_396658
+ Memcheck:Uninitialized
+ ...
+ fun:wk_png_write_find_filter
+ fun:wk_png_write_row
+}
+{
+ bug_397066_a
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink13InlineFlowBox9addToLineEPNS_9InlineBoxE
+ ...
+ fun:_ZN5blink15RenderBlockFlow13constructLineERNS_11BidiRunListINS_7BidiRunEEERKNS_8LineInfoE
+ fun:_ZN5blink15RenderBlockFlow27createLineBoxesFromBidiRunsEjRNS_11BidiRunListINS_7BidiRunEEERKNS_14InlineIteratorERNS_8LineInfoERNS_21VerticalPositionCacheEPS2_RN3WTF6VectorINS_15WordMeasurementELm64ENSD_16DefaultAllocatorEEE
+ fun:_ZN5blink15RenderBlockFlow26layoutRunsAndFloatsInRangeERNS_15LineLayoutStateERNS_12BidiResolverINS_14InlineIteratorENS_7BidiRunEEERKS4_RKNS_10BidiStatusE
+ fun:_ZN5blink15RenderBlockFlow19layoutRunsAndFloatsERNS_15LineLayoutStateE
+ fun:_ZN5blink15RenderBlockFlow20layoutInlineChildrenEbRNS_10LayoutUnitES2_S1_
+ ...
+ fun:_ZN5blink15RenderBlockFlow11layoutBlockEb
+}
+{
+ bug_397066_b
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink13InlineFlowBox24computeLogicalBoxHeightsEPNS_13RootInlineBoxERNS_10LayoutUnitES4_RiS5_RbS6_bRN3WTF7HashMapIPKNS_13InlineTextBoxESt4pairINS7_6VectorIPKNS_14SimpleFontDataELm0ENS7_16DefaultAllocatorEEENS_13GlyphOverflowEENS7_7PtrHashISB_EENS7_10HashTraitsISB_EENSN_ISK_EESH_EENS_12FontBaselineERNS_21VerticalPositionCacheE
+ fun:_ZN5blink13RootInlineBox26alignBoxesInBlockDirectionENS_10LayoutUnitERN3WTF7HashMapIPKNS_13InlineTextBoxESt4pairINS2_6VectorIPKNS_14SimpleFontDataELm0ENS2_16DefaultAllocatorEEENS_13GlyphOverflowEENS2_7PtrHashIS6_EENS2_10HashTraitsIS6_EENSI_ISF_EESC_EERNS_21VerticalPositionCacheE
+ ...
+ fun:_ZN5blink15RenderBlockFlow19layoutRunsAndFloatsERNS_15LineLayoutStateE
+ fun:_ZN5blink15RenderBlockFlow20layoutInlineChildrenEbRNS_10LayoutUnitES2_S1_
+ fun:_ZN5blink15RenderBlockFlow15layoutBlockFlowEbRNS_10LayoutUnitERNS_18SubtreeLayoutScopeE
+ fun:_ZN5blink15RenderBlockFlow11layoutBlockEb
+}
+{
+ bug_397066_c
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink13InlineFlowBox26placeBoxesInBlockDirectionENS_10LayoutUnitES1_ibRS1_S2_S2_RbS2_S2_S3_S3_NS_12FontBaselineE
+ fun:_ZN5blink13RootInlineBox26alignBoxesInBlockDirectionENS_10LayoutUnitERN3WTF7HashMapIPKNS_13InlineTextBoxESt4pairINS2_6VectorIPKNS_14SimpleFontDataELm0ENS2_16DefaultAllocatorEEENS_13GlyphOverflowEENS2_7PtrHashIS6_EENS2_10HashTraitsIS6_EENSI_ISF_EESC_EERNS_21VerticalPositionCacheE
+ ...
+ fun:_ZN5blink15RenderBlockFlow19layoutRunsAndFloatsERNS_15LineLayoutStateE
+ fun:_ZN5blink15RenderBlockFlow20layoutInlineChildrenEbRNS_10LayoutUnitES2_S1_
+ fun:_ZN5blink15RenderBlockFlow15layoutBlockFlowEbRNS_10LayoutUnitERNS_18SubtreeLayoutScopeE
+ fun:_ZN5blink15RenderBlockFlow11layoutBlockEb
+}
+{
+ bug_397066_d
+ Memcheck:Uninitialized
+ fun:_ZN5blink13InlineFlowBox45clearDescendantsHaveSameLineHeightAndBaselineEv
+ fun:_ZN5blink13InlineFlowBox9addToLineEPNS_9InlineBoxE
+ ...
+ fun:_ZN5blink15RenderBlockFlow13constructLineERNS_11BidiRunListINS_7BidiRunEEERKNS_8LineInfoE
+ fun:_ZN5blink15RenderBlockFlow27createLineBoxesFromBidiRuns*
+}
+{
+ bug_397066_e
+ Memcheck:Uninitialized
+ fun:_ZN5blink13InlineFlowBox9addToLineEPNS_9InlineBox*
+ fun:_ZN5blink15RenderBlockFlow*
+ ...
+ fun:_ZN5blink15RenderBlockFlow27createLineBoxesFromBidiRunsEjRNS*
+ fun:_ZN5blink15RenderBlockFlow26layoutRunsAndFloatsInRangeERNS_15LineLayoutStateERNS_12BidiResolver*
+ fun:_ZN5blink15RenderBlockFlow19layoutRunsAndFloatsERNS_15LineLayoutState*
+}
+{
+ bug_397066_f
+ Memcheck:Uninitialized
+ fun:_ZNK5blink13InlineFlowBox35constrainToLineTopAndBottomIfNeededERNS_10LayoutRectE
+ fun:_ZN5blink13InlineFlowBox28paintBoxDecorationBackgroundERNS_9PaintInfoERKNS_11LayoutPointE
+ fun:_ZN5blink13InlineFlowBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13InlineFlowBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZN5blink13RootInlineBox5paintERNS_9PaintInfoERKNS_11LayoutPointENS_10LayoutUnitES6_
+ fun:_ZNK5blink17RenderLineBoxList5paintEPNS_20RenderBoxModelObjectERNS_9PaintInfoERKNS_11LayoutPointE
+}
+{
+ bug_397075_a
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal6Object11SetPropertyEPNS0_14LookupIteratorENS0_6HandleIS1_EENS0_10StrictModeENS1_14StoreFromKeyedE
+ fun:_ZN2v88internal6Object11SetPropertyENS0_6HandleIS1_EENS2_INS0_4NameEEES3_NS0_10StrictModeENS1_14StoreFromKeyedE
+ fun:_ZN2v88internal7Runtime17SetObjectPropertyEPNS0_7IsolateENS0_6HandleINS0_6ObjectEEES6_S6_NS0_10StrictModeE
+ fun:_ZN2v86Object3SetENS_6HandleINS_5ValueEEES3_
+ fun:_ZN18WebCoreTestSupport21injectInternalsObjectEN2v85LocalINS0_7ContextEEE
+ fun:_ZN5blink17WebTestingSupport21injectInternalsObjectEPNS_13WebLocalFrameE
+ fun:_ZN7content15BlinkTestRunner20DidClearWindowObjectEPN5blink13WebLocalFrameE
+ fun:_ZN7content14RenderViewImpl20didClearWindowObjectEPN5blink13WebLocalFrameE
+ fun:_ZN7content15RenderFrameImpl20didClearWindowObjectEPN5blink13WebLocalFrameE
+ fun:_ZThn16_N7content15RenderFrameImpl20didClearWindowObjectEPN5blink13WebLocalFrameE
+ fun:_ZN5blink21FrameLoaderClientImpl39dispatchDidClearWindowObjectInMainWorldEv
+ fun:_ZN5blink11FrameLoader39dispatchDidClearWindowObjectInMainWorldEv
+ fun:_ZN5blink16ScriptController11windowShellERNS_15DOMWrapperWorldE
+ fun:_ZN5blink11toV8ContextEPNS_10LocalFrameERNS_15DOMWrapperWorldE
+ fun:_ZNK5blink17WebLocalFrameImpl22mainWorldScriptContextEv
+ fun:_ZN5blink17WebTestingSupport20resetInternalsObjectEPNS_13WebLocalFrameE
+ fun:_ZN7content15BlinkTestRunner5ResetEv
+ fun:_ZN7content26ShellContentRendererClient17RenderViewCreatedEPNS_10RenderViewE
+ fun:_ZN7content14RenderViewImpl10InitializeEPNS_20RenderViewImplParamsE
+ fun:_ZN7content14RenderViewImpl6CreateEibRKNS_19RendererPreferencesERKNS_14WebPreferencesEiiilRKSbItN4base20string16_char_traitsESaItEEbbibbiRKN5blink13WebScreenInfoE17AccessibilityMode
+ fun:_ZN7content16RenderThreadImpl15OnCreateNewViewERK18ViewMsg_New_Params
+}
+{
+ bug_397075_b
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal6Object11SetPropertyEPNS0_14LookupIteratorENS0_6HandleIS1_EENS0_10StrictModeENS1_14StoreFromKeyedE
+ fun:_ZN2v88internal6Object11SetPropertyENS0_6HandleIS1_EENS2_INS0_4NameEEES3_NS0_10StrictModeENS1_14StoreFromKeyedE
+ ...
+ fun:_ZN2v88internalL6InvokeEbNS0_6HandleINS0_10JSFunctionEEENS1_INS0_6ObjectEEEiPS5_
+ fun:_ZN2v88internal9Execution4CallEPNS0_7IsolateENS0_6HandleINS0_6ObjectEEES6_iPS6_b
+}
+{
+ bug_398349_a
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal19JSObjectWalkVisitorINS0_29AllocationSiteCreationContextEE13StructureWalkENS0_6HandleINS0_8JSObjectEEE
+ fun:_ZN2v88internal8JSObject8DeepWalkENS0_6HandleIS1_EEPNS0_29AllocationSiteCreationContextE
+ fun:_ZN2v88internal27Runtime_CreateObjectLiteralEiPPNS0_6ObjectEPNS0_7IsolateE
+}
+{
+ bug_398349_b
+ Memcheck:Uninitialized
+ fun:_ZN2v88internal19JSObjectWalkVisitorINS0_26AllocationSiteUsageContextEE13StructureWalkENS0_6HandleINS0_8JSObjectEEE
+ fun:_ZN2v88internal8JSObject8DeepCopyENS0_6HandleIS1_EEPNS0_26AllocationSiteUsageContextENS1_13DeepCopyHintsE
+ fun:_ZN2v88internal27Runtime_CreateObjectLiteralEiPPNS0_6ObjectEPNS0_7IsolateE
+}
+{
+ bug_399853_a
+ Memcheck:Uninitialized
+ fun:_ZNK8SkStroke10strokePathERK6SkPathPS0_
+ fun:_ZNK11SkStrokeRec11applyToPathEP6SkPathRKS0_
+ fun:_ZNK7SkPaint11getFillPathERK6SkPathPS0_PK6SkRect
+}
+{
+ bug_399853_b
+ Memcheck:Uninitialized
+ fun:_ZNK8SkStroke10strokePathERK6SkPathPS0_
+ fun:_ZNK11SkStrokeRec11applyToPathEP6SkPathRKS0_
+ fun:_ZN15SkScalerContext15internalGetPathERK7SkGlyphP6SkPathS4_P8SkMatrix
+}
+{
+ bug_417119
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN7storage27TaskRunnerBoundObserverListINS_18FileUpdateObserverEPS1_EaSERKS3_
+ fun:_ZN7storage26FileSystemOperationContext20set_update_observersERKNS_27TaskRunnerBoundObserverListINS_18FileUpdateObserverEPS2_EE
+ fun:_ZNK7storage32SandboxFileSystemBackendDelegate32CreateFileSystemOperationContextERKNS_13FileSystemURLEPNS_17FileSystemContextEPN4base4File5ErrorE
+ fun:_ZNK7storage24SandboxFileSystemBackend25CreateFileSystemOperationERKNS_13FileSystemURLEPNS_17FileSystemContextEPN4base4File5ErrorE
+ fun:_ZN7storage17FileSystemContext25CreateFileSystemOperationERKNS_13FileSystemURLEPN4base4File5ErrorE
+}
+{
+ bug_417526
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN16sync_file_system13drive_backend14SyncEngineTest5SetUpEv
+}
+{
+ bug_431209a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN8remoting13ClipboardAuraC1E13scoped_refptrIN4base22SingleThreadTaskRunnerEE
+ fun:_ZN8remoting17ClipboardAuraTest5SetUpEv
+}
+{
+ bug_431209b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN2ui9Clipboard6CreateEv
+ fun:_ZN2ui9Clipboard19GetForCurrentThreadEv
+ fun:_ZN2ui21ScopedClipboardWriterD1Ev
+ fun:_ZN8remoting13ClipboardAura4Core20InjectClipboardEventERKNS_8protocol14ClipboardEventE
+}
+{
+ bug_431213_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3gin22CreateFunctionTemplateIF*LocalINS6_16FunctionTemplateEEEPNS6_7IsolateEN4base8CallbackIT*
+ fun:_ZN3gin12_GLOBAL__N_114CallbackTraitsIMN*CreateTemplateEPN2v87IsolateES6_
+ fun:_ZN3gin21ObjectTemplateBuilder9SetMethodIMN*0_RKN4base16BasicStringPieceISsEERKT_
+ fun:_ZN*24GetObjectTemplateBuilderEPN2v87IsolateE
+}
+{
+ bug_431213_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3gin22CreateFunctionTemplateIF*IsolateEN4base8CallbackIT*
+ fun:_ZN3gin12_GLOBAL__N_114CallbackTraitsIF*
+ fun:_ZN3gin21ObjectTemplateBuilder9SetMethod*RKN4base16BasicStringPieceISsEERKT_
+ fun:_ZN4mojo2js*GetModuleEPN2v87IsolateE
+}
+{
+ bug_431213_c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3gin22CreateFunctionTemplateIF*IsolateEN4base8CallbackIT*
+ fun:_ZN3gin12_GLOBAL__N_114CallbackTraitsI*
+ fun:_ZN3gin21ObjectTemplateBuilder9SetMethod*RKN4base16BasicStringPieceISsEERKT_
+ fun:_ZN10extensions19TestServiceProvider24GetObjectTemplateBuilderEPN2v87IsolateE
+}
+{
+ bug_431213_d
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3gin22CreateFunctionTemplateIF*IsolateEN4base8CallbackIT*
+ fun:_ZN3gin12_GLOBAL__N_114CallbackTraitsI*
+ fun:_ZN3gin21ObjectTemplateBuilder9SetMethod*RKN4base16BasicStringPieceISsEERKT_
+ fun:_ZN10extensions12_GLOBAL__N_111TestNatives24GetObjectTemplateBuilderEPN2v87IsolateE
+}
+{
+ bug_431213_e
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3gin22CreateFunctionTemplateIF*IsolateEN4base8CallbackIT*
+ fun:_ZN3gin12_GLOBAL__N_114CallbackTraitsIF*
+ fun:_ZN3gin21ObjectTemplateBuilder9SetMethodIF*
+ fun:_ZN4mojo3edk2js4Core9GetModuleEPN2v87IsolateE
+ fun:_ZN10extensions18ApiTestEnvironment15RegisterModulesEv
+ fun:_ZN10extensions18ApiTestEnvironmentC1EPNS_27ModuleSystemTestEnvironmentE
+ fun:_ZN10extensions11ApiTestBase5SetUpEv
+}
+{
+ bug_431213_f
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3gin22CreateFunctionTemplateIF*IsolateEN4base8CallbackIT*
+ fun:_ZN3gin12_GLOBAL__N_114CallbackTraitsIF*
+ fun:_ZN3gin21ObjectTemplateBuilder9SetMethodIF*
+ fun:_ZN4mojo3edk2js7Support9GetModuleEPN2v87IsolateE
+ fun:_ZN10extensions18ApiTestEnvironment15RegisterModulesEv
+ fun:_ZN10extensions18ApiTestEnvironmentC1EPNS_27ModuleSystemTestEnvironmentE
+ fun:_ZN10extensions11ApiTestBase5SetUpEv
+}
+{
+ bug_436172
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3IPC11SyncChannelC1EPNS_8ListenerERK13scoped_refptrIN4base22SingleThreadTaskRunnerEEPNS4_13WaitableEventE
+ ...
+ fun:_ZN7content28BrowserGpuChannelHostFactory16EstablishRequest12FinishOnMainEv
+}
+{
+ bug_436172_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3IPC7Channel6CreateERKNS_13ChannelHandleENS0_4ModeEPNS_8ListenerE*
+ fun:_ZN3IPC12_GLOBAL__N_122PlatformChannelFactory12BuildChannelEPNS_8ListenerE
+ fun:_ZN3IPC12ChannelProxy7Context13CreateChannelE10scoped_ptrINS_14ChannelFactoryEN4base14DefaultDeleterIS3_EEE
+ ...
+ fun:_ZN7content28BrowserGpuChannelHostFactory16EstablishRequest12FinishOnMainEv
+}
+{
+ bug_436172_c
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3IPC12ChannelProxy7ContextC1EPNS_8ListenerERK13scoped_refptrIN4base22SingleThreadTaskRunnerEE
+ fun:_ZN3IPC11SyncChannel11SyncContextC1EPNS_8ListenerERK13scoped_refptrIN4base22SingleThreadTaskRunnerEEPNS5_13WaitableEventE
+ fun:_ZN3IPC11SyncChannelC1EPNS_8ListenerERK13scoped_refptrIN4base22SingleThreadTaskRunnerEEPNS4_13WaitableEventE
+ fun:_ZN3IPC11SyncChannel6CreateEPNS_8ListenerERK13scoped_refptrIN4base22SingleThreadTaskRunnerEEPNS4_13WaitableEventE
+ ...
+ fun:_ZN7content28BrowserGpuChannelHostFactory16EstablishRequest12FinishOnMainEv
+}
+{
+ bug_436172_d
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content14GpuChannelHost7ConnectERKN3IPC13ChannelHandleEPN4base13WaitableEventE
+ fun:_ZN7content14GpuChannelHost6CreateEPNS_21GpuChannelHostFactoryERKN3gpu7GPUInfoERKN3IPC13ChannelHandleEPN4base13WaitableEventEPNS3_22GpuMemoryBufferManagerE
+ fun:_ZN7content28BrowserGpuChannelHostFactory21GpuChannelEstablishedEv
+ fun:_ZN7content28BrowserGpuChannelHostFactory16EstablishRequest12FinishOnMainEv
+}
+{
+ Expected_leak_due_to_gpu_thread_leaked_by_lazy_instance
+ Memcheck:Leak
+ fun:calloc
+ fun:pthread_setspecific
+ ...
+ fun:_ZN3gpu22InProcessCommandBuffer21InitializeOnGpuThreadERKNS0_27InitializeOnGpuThreadParamsE
+}
+{
+ bug_441333
+ Memcheck:Uninitialized
+ fun:av_packet_unpack_dictionary
+ fun:add_metadata_from_side_data
+}
+{
+ bug_448700_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN10extensions11ApiTestBase15RegisterModulesEv
+ fun:_ZN10extensions11ApiTestBase5SetUpEv
+}
+{
+ bug_448700_b
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN10extensions12_GLOBAL__N_111TestNatives24GetObjectTemplateBuilderEPN2v87IsolateE
+ fun:_ZN3gin13WrappableBase14GetWrapperImplEPN2v87IsolateEPNS_11WrapperInfoE
+ fun:_ZN3gin9WrappableIN10extensions12_GLOBAL__N_111TestNativesEE10GetWrapperEPN2v87IsolateE
+ fun:_ZN3gin12CreateHandleIN10extensions12_GLOBAL__N_111TestNativesEEENS_6HandleIT_EEPN2v87IsolateEPS5_
+ fun:_ZN10extensions12_GLOBAL__N_111TestNatives6CreateEPN2v87IsolateERKN4base8CallbackIFvvEEE
+ fun:_ZN10extensions11ApiTestBase7RunTestERKSsS2_
+ fun:_ZN10extensions30MojoPrivateApiTest_Define_Test8TestBodyEv
+}
+{
+ bug_449156_a
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN7leveldb2DB4OpenERKNS_7OptionsERKSsPPS0_
+ fun:_ZN7storage21SandboxOriginDatabase4InitENS0_10InitOptionENS0_14RecoveryOptionE
+ fun:_ZN7storage21SandboxOriginDatabase16GetPathForOriginERKSsPN4base8FilePathE
+ fun:_ZN7storage32SandboxPrioritizedOriginDatabase16GetPathForOriginERKSsPN4base8FilePathE
+ fun:_ZN7storage18ObfuscatedFileUtil21GetDirectoryForOriginERK4GURLbPN4base4File5ErrorE
+ fun:_ZN7storage18ObfuscatedFileUtil28GetDirectoryForOriginAndTypeERK4GURLRKSsbPN4base4File5ErrorE
+ fun:_ZN7storage12_GLOBAL__N_130OpenFileSystemOnFileTaskRunnerEPNS_18ObfuscatedFileUtilERK4GURLNS_14FileSystemTypeENS_18OpenFileSystemModeEPN4base4File5ErrorE
+}
+{
+ bug_449156_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7storage18ObfuscatedFileUtil8MarkUsedEv
+ fun:_ZN7storage18ObfuscatedFileUtil20GetDirectoryDatabaseERKNS_13FileSystemURLEb
+}
+{
+ bug_449175_b
+ Memcheck:Leak
+ fun:calloc
+ fun:pthread_setspecific
+ fun:_ZN4base8internal19ThreadLocalPlatform14SetValueInSlotEjPv
+ fun:_ZN4base18ThreadLocalPointerIN4mojo6common15MessagePumpMojoEE3SetEPS3_
+ fun:_ZN4mojo6common15MessagePumpMojoC1Ev
+ fun:_ZN4mojo6common15MessagePumpMojo6CreateEv
+}
+{
+ bug_476940
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF9BitVector13OutOfLineBits6createEm
+ fun:_ZN3WTF9BitVector15resizeOutOfLineEm
+ fun:_ZN3WTF9BitVector10ensureSizeEm
+ fun:_ZN3WTF9BitVectorC2Em
+ fun:_ZN5blink10UseCounter9CountBitsC2Ev
+ fun:_ZN5blink10UseCounterC1Ev
+ fun:_ZN5blink4PageC1ERNS0_11PageClientsE
+}
+{
+ bug_484456
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content26GpuProcessTransportFactory23CreatePerCompositorDataEPN2ui10CompositorE
+ fun:_ZN7content26GpuProcessTransportFactory19CreateOutputSurfaceEN4base7WeakPtrIN2ui10CompositorEEE
+ fun:_ZN2ui10Compositor23RequestNewOutputSurfaceEv
+ fun:_ZN2cc13LayerTreeHost23RequestNewOutputSurfaceEv
+ fun:_ZN2cc17SingleThreadProxy23RequestNewOutputSurfaceEv
+}
+{
+ bug_484459
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content17ResourceScheduler15OnClientCreatedEiibb
+ fun:_ZN7content26ResourceDispatcherHostImpl23OnRenderViewHostCreatedEiibb
+}
+{
+ bug_492821
+ Memcheck:Uninitialized
+ fun:_ZN5blink17CSSPropertyParser9validUnitEPNS_14CSSParserValueENS0_5UnitsENS_13CSSParserModeENS0_31ReleaseParsedCalcValueConditionE
+ fun:_ZN5blink17CSSPropertyParser9validUnitEPNS_14CSSParserValueENS0_5UnitsENS0_31ReleaseParsedCalcValueConditionE
+ ...
+ fun:_ZN5blink17CSSPropertyParser10parseValueENS_13CSSPropertyIDEb
+ fun:_ZN5blink17CSSPropertyParser10parseValueENS_13CSSPropertyID*
+ fun:_ZN5blink13CSSParserImpl23consumeDeclarationValueENS_19CSSParserTokenRangeENS_13CSSPropertyIDEbNS_13StyleRuleBase4TypeE
+}
+{
+ bug_514434
+ Memcheck:Leak
+ fun:malloc
+ fun:__netlink_request
+ fun:getifaddrs_internal
+ fun:getifaddrs
+ fun:_ZN3net25HaveOnlyLoopbackAddressesEv
+ fun:_ZN3net16HostResolverImpl16LoopbackProbeJob7DoProbeEv
+}
+{
+ bug_514443
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZNSs4_Rep9_S_createEmmRKSaIcE
+ fun:_ZNSs9_M_mutateEmmm
+ fun:_ZNSs15_M_replace_safeEmmPKcm
+ fun:_ZN4base8internal13CopyToStringTISsEEvRKNS_16BasicStringPieceIT_EEPS3_
+ fun:_ZN4base8internal12CopyToStringERKNS_16BasicStringPieceISsEEPSs
+ fun:_ZNK4base16BasicStringPieceISsE12CopyToStringEPSs
+ fun:_ZN4base8FilePathC1ENS_16BasicStringPieceISsEE
+}
+{
+ bug_514868
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base12_GLOBAL__N_112CreateThreadEmbPNS_14PlatformThread8DelegateEPNS_20PlatformThreadHandleENS_14ThreadPriorityE
+ fun:_ZN4base14PlatformThread18CreateWithPriorityEmPNS0_8DelegateEPNS_20PlatformThreadHandleENS_14ThreadPriorityE
+ fun:_ZN4base6Thread16StartWithOptionsERKNS0_7OptionsE
+ fun:_ZN7content17BrowserThreadImpl16StartWithOptionsERKN4base6Thread7OptionsE
+ fun:_ZN7content17TestBrowserThread13StartIOThreadEv
+ fun:_ZN22BrowserProcessImplTest21StartSecondaryThreadsEv
+ fun:_ZN37BrowserProcessImplTest_LifeCycle_Test8TestBodyEv
+}
+{
+ bug_515263
+ Memcheck:Uninitialized
+ fun:_ZNK5blink6MemberINS_15ResourceFetcherEE3getEv
+ fun:_ZN5blink13VisitorHelperINS_27InlinedGlobalMarkingVisitorEE5traceINS_15ResourceFetcherEEEvRKNS_6MemberIT_EE
+ fun:_ZN5blink14ResourceLoader9traceImplINS_27InlinedGlobalMarkingVisitorEEEvT_
+ fun:_ZN5blink14ResourceLoader5traceENS_27InlinedGlobalMarkingVisitorE
+ fun:_ZN5blink10TraceTraitINS_14ResourceLoaderEE5traceEPNS_7VisitorEPv
+ fun:_ZN5blink13CallbackStack4Item4callEPNS_7VisitorE
+}
+{
+ bug_515266
+ Memcheck:Uninitialized
+ fun:_ZN3WTF16VectorBufferBaseIN5blink6MemberINS1_11MessagePortEEELb1ENS1_13HeapAllocatorEE6bufferEv
+ fun:_ZN3WTF6VectorIN5blink6MemberINS1_11MessagePortEEELm1ENS1_13HeapAllocatorEE5traceINS1_27InlinedGlobalMarkingVisitorEEEvT_
+ fun:_ZN5blink10TraceTraitIN3WTF6VectorINS_6MemberINS_11MessagePortEEELm1ENS_13HeapAllocatorEEEE5traceEPNS_7VisitorEPv
+ fun:_ZN5blink13CallbackStack4Item4callEPNS_7VisitorE
+ fun:_ZN5blink4Heap25popAndInvokeTraceCallbackEPNS_7VisitorE
+ fun:_ZN5blink4Heap19processMarkingStackEPNS_7VisitorE
+}
+{
+ bug_515330
+ Memcheck:Leak
+ fun:malloc
+ ...
+ fun:_ZN3net13CTLogVerifier4InitERKN4base16BasicStringPieceISsEE
+ fun:_ZN3net13CTLogVerifier6CreateERKN4base16BasicStringPieceISsEES5_S5_
+ fun:_ZN3net2ct30CreateLogVerifiersForKnownLogsEv
+}
+{
+ bug_522049
+ Memcheck:Unaddressable
+ ...
+ fun:_ZNKSt8_Rb_treeISsSt4pairIKSsN2ui13TextInputModeEESt10_Select1stIS4_ESt4lessISsESaIS4_EE4findERS1_
+ fun:_ZNKSt3mapISsN2ui13TextInputModeESt4lessISsESaISt4pairIKSsS1_EEE4findERS5_
+ fun:_ZN12_GLOBAL__N_116ConvertInputModeERKN5blink9WebStringE
+ fun:_ZN7content12RenderWidget20UpdateTextInputStateENS0_7ShowImeENS0_12ChangeSourceE
+ fun:_ZN7content12RenderWidget24WillBeginCompositorFrameEv
+ fun:_ZN7content22RenderWidgetCompositor18WillBeginMainFrameEv
+ fun:_ZThn8_N7content22RenderWidgetCompositor18WillBeginMainFrameEv
+ fun:_ZN2cc13LayerTreeHost18WillBeginMainFrameEv
+ fun:_ZN2cc17SingleThreadProxy16DoBeginMainFrameERKNS_14BeginFrameArgsE
+ fun:_ZN2cc17SingleThreadProxy20CompositeImmediatelyEN4base9TimeTicksE
+ fun:_ZN2cc13LayerTreeHost9CompositeEN4base9TimeTicksE
+ fun:_ZN7content12_GLOBAL__N_135RenderWidgetCompositorOutputSurface20SynchronousCompositeEv
+}
+{
+ bug_525328
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorISt13_Rb_tree_nodeISt4pairIKSsPN3net20URLRequestJobFactory15ProtocolHandlerEEEE8allocateEmPKv
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN3net20URLRequestJobFactory15ProtocolHandlerEESt10_Select1stIS6_ESt4lessISsESaIS6_EE11_M_get_nodeEv
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN3net20URLRequestJobFactory15ProtocolHandlerEESt10_Select1stIS6_ESt4lessISsESaIS6_EE14_M_create_nodeIJS6_EEEPSt13_Rb_tree_nodeIS6_EDpOT_
+ fun:_ZNSt8_Rb_treeISsSt4pairIKSsPN3net20URLRequestJobFactory15ProtocolHandlerEESt10_Select1stIS6_ESt4lessISsESaIS6_EE10_M_insert_IS6_EESt17_Rb_tree_iteratorIS6_EPKSt18_Rb_tree_node_baseSI_OT_
+}
+{
+ bug_536907_a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content26PushMessagingMessageFilterC1EiPNS_27ServiceWorkerContextWrapperE
+ fun:_ZN7content21RenderProcessHostImpl20CreateMessageFiltersEv
+ fun:_ZN7content21RenderProcessHostImpl4InitEv
+ fun:_ZN7content22RenderFrameHostManager14InitRenderViewEPNS_18RenderViewHostImplEi
+ fun:_ZN7content22RenderFrameHostManager8NavigateERK4GURLRKNS_20FrameNavigationEntryERKNS_19NavigationEntryImplE
+ fun:_ZN7content13NavigatorImpl15NavigateToEntryEPNS_13FrameTreeNodeERKNS_20FrameNavigationEntryERKNS_19NavigationEntryImplENS_20NavigationController10ReloadTypeEb
+ fun:_ZN7content13NavigatorImpl22NavigateToPendingEntryEPNS_13FrameTreeNodeERKNS_20FrameNavigationEntryENS_20NavigationController10ReloadTypeEb
+ fun:_ZN7content24NavigationControllerImpl30NavigateToPendingEntryInternalENS_20NavigationController10ReloadTypeE
+ fun:_ZN7content24NavigationControllerImpl22NavigateToPendingEntryENS_20NavigationController10ReloadTypeE
+ fun:_ZN7content24NavigationControllerImpl9LoadEntryE10scoped_ptrINS_19NavigationEntryImplEN4base14DefaultDeleterIS2_EEE
+ fun:_ZN7content24NavigationControllerImpl17LoadURLWithParamsERKNS_20NavigationController13LoadURLParamsE
+ fun:_ZN7content5Shell15LoadURLForFrameERK4GURLRKSs
+}
+{
+ bug_542543
+ Memcheck:Leak
+ fun:_Znw*
+ ...
+ fun:_ZN8IOThread4InitEv
+ fun:_ZN7content17BrowserThreadImpl4InitEv
+ fun:_ZN7content21TestBrowserThreadImpl4InitEv
+ fun:_ZN4base6Thread10ThreadMainEv
+ fun:_ZN4base12_GLOBAL__N_110ThreadFuncEPv
+}
+{
+ bug_545259a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content21RenderProcessHostImpl4InitEv
+ fun:_ZN7content22RenderFrameHostManager14InitRenderViewEPNS_18RenderViewHostImplEPNS_20RenderFrameProxyHostE
+}
+{
+ bug_545259b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content26PushMessagingMessageFilterC1EiPNS_27ServiceWorkerContextWrapperE
+ fun:_ZN7content21RenderProcessHostImpl20CreateMessageFiltersEv
+ fun:_ZN7content21RenderProcessHostImpl4InitEv
+ fun:_ZN7content22RenderFrameHostManager14InitRenderViewEPNS_18RenderViewHostImplEPNS_20RenderFrameProxyHostE
+}
+{
+ bug_557778
+ Memcheck:Uninitialized
+ fun:vp9_pick_inter_mode
+ fun:nonrd_pick_sb_modes
+}
+{
+ bug_558179
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7content27ServiceWorkerContextWrapperC1EPNS_14BrowserContextE
+ fun:_ZN7content20StoragePartitionImpl6CreateEPNS_14BrowserContextEbRKN4base8FilePathE
+ fun:_ZN7content23StoragePartitionImplMap3GetERKSsS2_b
+ fun:_ZN7content12_GLOBAL__N_129GetStoragePartitionFromConfigEPNS_14BrowserContextERKSsS4_b
+ fun:_ZN7content14BrowserContext26GetStoragePartitionForSiteEPS0_RK4GURL
+}
+{
+ bug_562703
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN7storage12QuotaManager14LazyInitializeEv
+ fun:_ZN7storage12QuotaManager26GetUsageAndQuotaForWebAppsERK4GURLNS_11StorageTypeERKN4base8CallbackIFvNS_15QuotaStatusCodeEllELNS5_8internal8CopyModeE1EEE
+}
+{
+ bug_562712
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base8internal20PostTaskAndReplyImpl16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEESA_
+ fun:_ZN4base10TaskRunner16PostTaskAndReplyERKN15tracked_objects8LocationERKNS_8CallbackIFvvEEES9_
+ fun:_ZN10disk_cache15SimpleEntryImpl13CloseInternalEv
+ fun:_ZN10disk_cache15SimpleEntryImpl24RunNextOperationIfNeededEv
+ fun:_ZN10disk_cache15SimpleEntryImpl5CloseEv
+}
+{
+ bug_562718a
+ Memcheck:Uninitialized
+ fun:_ZN5blink10PaintLayer24removeFilterInfoIfNeededEv
+ fun:_ZN5blink10PaintLayerD1Ev
+ fun:_ZN5blink10PaintLayerD0Ev
+ fun:_ZN3WTF15OwnedPtrDeleterIN5blink10PaintLayerEE9deletePtrEPS2_
+ fun:_ZN3WTF6OwnPtrIN5blink10PaintLayerEE5clearEv
+ fun:_ZN3WTF6OwnPtrIN5blink10PaintLayerEEaSEDn
+}
+{
+ bug_562718b
+ Memcheck:Uninitialized
+ fun:_ZN5blink10PaintLayer31setNeedsCompositingInputsUpdateEv
+}
+{
+ bug_562718c
+ Memcheck:Uninitialized
+ fun:_ZN5blink10PaintLayer30updateDescendantDependentFlagsEv
+}
+{
+ bug_562718d
+ Memcheck:Uninitialized
+ fun:_ZN7GrGLGpu18createCopyProgramsEv
+ fun:_ZN7GrGLGpuC1EP11GrGLContextP9GrContext
+ fun:_ZN7GrGLGpu6CreateElRK16GrContextOptionsP9GrContext
+ fun:_ZN5GrGpu6CreateE9GrBackendlRK16GrContextOptionsP9GrContext
+ fun:_ZN9GrContext4initE9GrBackendlRK16GrContextOptions
+}
+{
+ bug_562718e
+ Memcheck:Uninitialized
+ fun:_ZN8GrGLCaps8initGLSLERK15GrGLContextInfo
+ fun:_ZN8GrGLCaps4initERK16GrContextOptionsRK15GrGLContextInfoPK13GrGLInterface
+ fun:_ZN8GrGLCapsC1ERK16GrContextOptionsRK15GrGLContextInfoPK13GrGLInterface
+ fun:_ZN15GrGLContextInfoC1ERKNS_15ConstructorArgsE
+ fun:_ZN11GrGLContextC2ERKN15GrGLContextInfo15ConstructorArgsE
+}
+{
+ bug_562718f
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink17PaintLayerPainter13paintChildrenEjRNS_15GraphicsContextERKNS_22PaintLayerPaintingInfoEj
+ fun:_ZN5blink17PaintLayerPainter18paintLayerContentsERNS_15GraphicsContextERKNS_22PaintLayerPaintingInfoEjNS0_14FragmentPolicyE
+}
+{
+ bug_562718g
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink20PaintLayerCompositor14updateIfNeededEv
+ fun:_ZN5blink20PaintLayerCompositor23updateIfNeededRecursiveEv
+ ...
+ fun:_ZN5blink9FrameView29updateLifecyclePhasesInternalENS0_21LifeCycleUpdateOptionE
+}
+{
+ bug_562718h
+ Memcheck:Uninitialized
+ fun:_ZN5blink10PaintLayer35update3DTransformedDescendantStatusEv
+ fun:_ZN5blink10PaintLayer12hitTestLayerEPS0_S1_RNS_13HitTestResultERKNS_10LayoutRectERKNS_15HitTestLocationEbPKNS_24HitTestingTransformStateEPd
+ fun:_ZN5blink10PaintLayer7hitTestERNS_13HitTestResultE
+ fun:_ZN5blink10LayoutView24hitTestNoLifecycleUpdateERNS_13HitTestResultE
+}
+{
+ bug_562718i
+ Memcheck:Uninitialized
+ ...
+ fun:_ZNK5blink10PaintLayer37containingLayerForOutOfFlowPositionedEPKS0_Pb
+ fun:_ZN5blink10PaintLayer19updateLayerPositionEv
+ ...
+ fun:_ZN5blink10PaintLayer28updateLayerPositionRecursiveEv
+ fun:_ZN5blink10PaintLayer31updateLayerPositionsAfterLayoutEv
+ fun:_ZN5blink9FrameView6layoutEv
+}
+{
+ bug_571272
+ Memcheck:Overlap
+ fun:memcpy*
+ fun:_ZN3net10QuicFramer14EncryptPayloadENS_15EncryptionLevel*
+ fun:_ZN3net17QuicPacketCreator15SerializePacketEPcm
+}
+{
+ bug_571543
+ Memcheck:Leak
+ ...
+ fun:_ZN4base11MessageLoopC1ENS0_4TypeENS_8Callback*
+ fun:_ZN4base11MessageLoop13CreateUnboundENS0_4TypeENS_8Callback*
+ fun:_ZN4base6Thread16StartWithOptionsERKNS0_7OptionsE
+ fun:_ZN4base6Thread5StartEv
+ fun:_ZN4base6Thread22StartAndWaitForTestingEv
+ fun:_ZN12browser_sync12_GLOBAL__N_124SyncBackendRegistrarTest5SetUpEv
+}
+{
+ bug_576259_a
+ Memcheck:Uninitialized
+ fun:_ZN7GrGLGpu11bindTextureEiRK15GrTextureParamsbP11GrGLTexture
+ fun:_ZN7GrGLGpu12flushGLStateERK10GrPipelineRK20GrPrimitiveProcessor
+ fun:_ZN7GrGLGpu6onDrawERK10GrPipelineRK20GrPrimitiveProcessorPK6GrMeshi
+ fun:_ZN5GrGpu4drawERK10GrPipelineRK20GrPrimitiveProcessorPK6GrMeshi
+ fun:_ZN13GrVertexBatch6onDrawEP17GrBatchFlushState
+ ...
+ fun:_ZN2cc17SingleThreadProxy36ScheduledActionDrawAndSwapIfPossibleEv
+}
+{
+ bug_576259_b
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN8GrGLCaps4initERK16GrContextOptionsRK15GrGLContextInfoPK13GrGLInterface
+ fun:_ZN8GrGLCapsC1ERK16GrContextOptionsRK15GrGLContextInfoPK13GrGLInterface
+ fun:_ZN15GrGLContextInfoC1ERKNS_15ConstructorArgsE
+ fun:_ZN11GrGLContextC2ERKN15GrGLContextInfo15ConstructorArgsE
+ ...
+ fun:_ZN7GrGLGpu6CreateElRK16GrContextOptionsP9GrContext
+ fun:_ZN5GrGpu6CreateE9GrBackendlRK16GrContextOptionsP9GrContext
+ fun:_ZN9GrContext4initE9GrBackendlRK16GrContextOptions
+ fun:_ZN9GrContext6CreateE9GrBackendlRK16GrContextOptions
+ fun:_ZN9GrContext6CreateE9GrBackendl
+ ...
+ fun:_ZN2cc10GLRenderer18DrawRenderPassQuadEPNS_14DirectRenderer12DrawingFrameEPKNS_18RenderPassDrawQuadEPKN3gfx5QuadFE
+ fun:_ZN2cc10GLRenderer10DoDrawQuadEPNS_14DirectRenderer12DrawingFrameEPKNS_8DrawQuadEPKN3gfx5QuadFE
+}
+{
+ bug_562431_a
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10Partitions10fastMallocEmPKc
+ fun:_ZN5blink14PersistentBaseINS_15StyleFilterDataELNS_31WeaknessPersistentConfigurationE0ELNS_38CrossThreadnessPersistentConfigurationE0EEnwEm
+ fun:_ZN5blink26RefCountedGarbageCollectedINS_15StyleFilterDataEE13makeKeepAliveEv
+ fun:_ZN5blink26RefCountedGarbageCollectedINS_15StyleFilterDataEE3refEv
+ fun:_ZN3WTF6RefPtrIN5blink15StyleFilterDataEEC2IS2_EERKNS_6RawPtrIT_EEPNSt9enable_ifIXsr3std10is_base_ofIS2_S6_EE5valueEvE4typeE
+ fun:_ZN5blink7DataRefINS_15StyleFilterDataEE4initEv
+ fun:_ZN5blink13ComputedStyle18createInitialStyleEv
+ fun:_ZN5blink13ComputedStyle19mutableInitialStyleEv
+ fun:_ZN5blink13ComputedStyle12initialStyleEv
+ fun:_ZN5blink13ComputedStyle6createEv
+ fun:_ZN5blink13StyleResolver16styleForDocumentERNS_8DocumentE
+ ...
+ fun:_ZN5blink14DocumentLoader15finishedLoadingEd
+ fun:_ZN5blink14DocumentLoader14maybeLoadEmptyEv
+ fun:_ZN5blink14DocumentLoader24startLoadingMainResourceEv
+ fun:_ZN5blink11FrameLoader4initEv
+ fun:_ZN5blink10LocalFrame4initEv
+}
+{
+ bug_562431_b
+ Memcheck:Leak
+ fun:malloc
+ fun:_ZN3WTF10Partitions10fastMallocEmPKc
+ ...
+ fun:_ZN5blink15ContextFeatures13defaultSwitchEv
+ fun:_ZN5blink8DocumentC2ERKNS_12DocumentInitEh
+ fun:_ZN5blink12HTMLDocumentC1ERKNS_12DocumentInitEh
+ fun:_ZN5blink12HTMLDocument6createERKNS_12DocumentInitE
+ ...
+ fun:_ZN5blink14DocumentLoader15finishedLoadingEd
+ fun:_ZN5blink14DocumentLoader14maybeLoadEmptyEv
+ fun:_ZN5blink14DocumentLoader24startLoadingMainResourceEv
+ fun:_ZN5blink11FrameLoader4initEv
+ fun:_ZN5blink10LocalFrame4initEv
+}
+{
+ bug_581092_a
+ Memcheck:Leak
+ ...
+ fun:_ZN5blink15ContextFeatures13defaultSwitchEv
+ fun:_ZN5blink8DocumentC2ERKNS_12DocumentInitEh
+ fun:_ZN5blink12HTMLDocumentC1ERKNS_12DocumentInitEh
+ fun:_ZN5blink12HTMLDocument6createERKNS_12DocumentInitE
+ fun:_ZN5blink17DOMImplementation14createDocumentERKN3WTF6StringERKNS_12DocumentInitEb
+ fun:_ZN5blink14LocalDOMWindow14createDocumentERKN3WTF6StringERKNS_12DocumentInitEb
+ fun:_ZN5blink14LocalDOMWindow18installNewDocumentERKN3WTF6StringERKNS_12DocumentInitEb
+ fun:_ZN5blink14DocumentLoader15createWriterForEPKNS_*
+}
+{
+ bug_581092_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN5blink26RefCountedGarbageCollectedINS_15StyleFilterDataEE13makeKeepAliveEv
+ fun:_ZN5blink26RefCountedGarbageCollectedINS_15StyleFilterDataEE3refEv
+ ...
+ fun:_ZN5blink7DataRefINS_15StyleFilterDataEE4initEv
+ ...
+ fun:_ZN5blink13ComputedStyle6createEv
+ fun:_ZN5blink13StyleResolver16styleForDocumentERNS_8DocumentE
+ fun:_ZN5blink8Document6attachERKNS_4Node13AttachContextE
+ fun:_ZN5blink14LocalDOMWindow18installNewDocumentERKN3WTF6StringERKNS_12DocumentInitEb
+ fun:_ZN5blink14DocumentLoader15createWriterForEPKNS_*
+}
+{
+ bug_581959
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN9__gnu_cxx13new_allocatorIPN10disk_cache20SimpleEntryOperationEE8allocateEmPKv
+ fun:_ZNSt11_Deque_baseIN10disk_cache20SimpleEntryOperationESaIS1_EE15_M_allocate_mapEm
+ fun:_ZNSt11_Deque_baseIN10disk_cache20SimpleEntryOperationESaIS1_EE17_M_initialize_mapEm
+ fun:_ZNSt11_Deque_baseIN10disk_cache20SimpleEntryOperationESaIS1_EEC2Ev
+ fun:_ZNSt5dequeIN10disk_cache20SimpleEntryOperationESaIS1_EEC2Ev
+ fun:_ZN10disk_cache15SimpleEntryImplC1EN3net9CacheTypeERKN4base8FilePathEmNS0_14OperationsModeEPNS_17SimpleBackendImplEPNS1_6NetLogE
+ fun:_ZN10disk_cache17SimpleBackendImpl23CreateOrFindActiveEntryEmRKSs
+ fun:_ZN10disk_cache17SimpleBackendImpl11CreateEntryERKSsPPNS_5EntryERKN4base8CallbackIFviEEE
+}
+{
+ bug_586203
+ Memcheck:Unaddressable
+ fun:XInternAtoms
+ fun:_ZN2ui12X11AtomCacheC1EP9_XDisplayPPKc
+ fun:_ZN2ui20DeviceDataManagerX11C1Ev
+ fun:_ZN2ui20DeviceDataManagerX1114CreateInstanceEv
+ fun:_ZN2ui14X11EventSourceC1EPNS_22X11EventSourceDelegateEP9_XDisplay
+ fun:_ZN2ui18X11EventSourceGlibC1EP9_XDisplay
+}
+{
+ bug_586206
+ Memcheck:Leak
+ fun:calloc
+ fun:_XConnectXCB
+ fun:XOpenDisplay
+}
+{
+ bug_587270
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink21LayoutObjectChildList15removeChildNodeEPNS_12LayoutObjectES2_b
+ fun:_ZN5blink12LayoutObject11removeChildEPS0_
+ ...
+ fun:_ZN5blink12LayoutObject6removeEv
+ fun:_ZN5blink12LayoutObject15willBeDestroyedEv
+ ...
+ fun:_ZN5blink12LayoutObject34destroyAndCleanupAnonymousWrappersEv
+ fun:_ZN5blink4Node6detachERKNS0_13AttachContextE
+}
+{
+ bug_587283
+ Memcheck:Uninitialized
+ fun:_ZNK5blink12LayoutObject29invalidatePaintUsingContainerERKNS_20LayoutBoxModelObjectERKNS_10LayoutRectENS_23PaintInvalidationReasonE
+}
+{
+ bug_587283_b
+ Memcheck:Uninitialized
+ ...
+ fun:_ZN5blink15LayoutBlockFlow11layoutBlockEb
+ fun:_ZN5blink11LayoutBlock6layoutEv
+}
+{
+ bug_587544
+ Memcheck:Leak
+ ...
+ fun:_ZN7content28ShellURLRequestContextGetter20GetURLRequestContextEv
+ fun:_ZN7content21ChromeAppCacheService20InitializeOnIOThreadERKN4base8FilePathEPNS_15ResourceContextEPN3net23URLRequestContextGetterE13scoped_refptrIN7storage20SpecialStoragePolicyEE
+}
+{
+ bug_587544_b
+ Memcheck:Leak
+ ...
+ fun:_ZN4base4BindIMN7content21ChromeAppCacheServiceEFvRKNS_8FilePathEPNS1_15ResourceContextEPN3net23URLRequestContextGetterE13scoped_refptrIN7storage20SpecialStoragePolicyEEEJPS2_S3_S7_SB_IS9_ESE_EEENS_8CallbackINS_8internal22MakeUnboundRunTypeImplIT_JDpT0_EE4TypeEEESM_DpOSN_
+}
+{
+ bug_600484
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4mojo3edk2js13HandleWrapper6Create*
+ fun:_ZN3gin9ConverterIN4mojo6Handle*
+ fun:_ZN3gin11ConvertToV8IN4mojo6Handle*
+ fun:_ZN3gin10ToV8TraitsIN4mojo6HandleELb0EE14TryConvertToV8*
+ fun:_ZN3gin14TryConvertToV8IN4mojo6Handle*
+ fun:_ZN3gin10Dictionary3SetIN4mojo6Handle*
+ fun:_ZN4mojo3edk2js12_GLOBAL__N_114CreateDataPipe*
+}
diff --git a/chromium/tools/valgrind/memcheck/suppressions_linux.txt b/chromium/tools/valgrind/memcheck/suppressions_linux.txt
new file mode 100644
index 00000000000..870cee2d360
--- /dev/null
+++ b/chromium/tools/valgrind/memcheck/suppressions_linux.txt
@@ -0,0 +1,143 @@
+# There are three kinds of suppressions in this file:
+# 1. Third party stuff we have no control over.
+#
+# 2. Intentional unit test errors, stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing.
+#
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+# These should all be in chromium's bug tracking system.
+# Periodically we should sweep this file and the bug tracker clean by
+# running overnight and removing outdated bugs/suppressions.
+#
+# TODO(rnk): Should we move all of the Linux-only system library suppressions
+# over from suppressions.txt? We'd avoid wasting time parsing and matching
+# suppressions on non-Linux, which is basically just Mac.
+#
+#-----------------------------------------------------------------------
+
+# 1. Third party stuff we have no control over.
+
+# 2. Intentional unit test errors, stuff that is somehow a false positive
+# in our own code, or stuff that is so trivial it's not worth fixing.
+
+# 3. Suppressions for real chromium bugs that are not yet fixed.
+{
+ bug_436253
+ Memcheck:Overlap
+ fun:memcpy@@GLIBC_2.14
+ fun:_ZN7content14WebPreferencesaSERKS0_
+ fun:_ZN7content14RenderViewImpl22OnUpdateWebPreferencesERKNS_14WebPreferencesE
+ fun:_ZN7content14RenderViewImpl20SetWebkitPreferencesERKNS_14WebPreferencesE
+ fun:_ZThn*_N7content14RenderViewImpl20SetWebkitPreferencesERKNS_14WebPreferencesE
+ fun:_ZN7content15BlinkTestRunner5ResetEv
+}
+{
+ bug_436292
+ Memcheck:Param
+ sendmsg(msg.msg_iov[0])
+ ...
+ fun:MojoWriteMessage
+ fun:_ZN3IPC8internal13MessageReader4SendE10scoped_ptrINS_7MessageEN4base14DefaultDeleterIS3_EEE
+ fun:_ZN3IPC11ChannelMojo4SendEPNS_7MessageE
+ fun:_ZN7content20BrowserMessageFilter4SendEPN3IPC7MessageE
+ fun:_ZN7content17AudioRendererHost18DoCompleteCreationEi
+}
+{
+ bug_436292_b
+ Memcheck:Param
+ sendmsg(msg.msg_iov[0])
+ ...
+ fun:MojoWriteMessage
+ fun:_ZN3IPC8internal13MessageReader4SendE10scoped_ptrINS_7MessageEN4base14DefaultDeleterIS3_EEE
+ fun:_ZN3IPC11ChannelMojo17InitMessageReaderEN4mojo16ScopedHandleBaseINS1_17MessagePipeHandleEEEi
+ fun:_ZN3IPC12_GLOBAL__N_117ServerChannelMojo27ClientChannelWasInitializedEi
+}
+{
+ bug_452002
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN11leveldb_env11ChromiumEnv11StartThreadEPFvPvES1_
+ fun:_ZN11leveldb_env11ChromiumEnv8ScheduleEPFvPvES1_
+ fun:_ZN7leveldb6DBImpl23MaybeScheduleCompactionEv
+ fun:_ZN7leveldb6DBImpl16MakeRoomForWriteEb
+ fun:_ZN7leveldb6DBImpl5WriteERKNS_12WriteOptionsEPNS_10WriteBatchE
+ fun:_ZN17LeveldbValueStore9WriteToDbEPN7leveldb10WriteBatchE
+ fun:_ZN17LeveldbValueStore3SetEiRKSsRKN4base5ValueE
+ fun:_ZN10extensions28SettingsStorageQuotaEnforcer3SetEiRKSsRKN4base5ValueE
+ fun:_ZN10extensions28WeakUnlimitedSettingsStorage3SetEiRKSsRKN4base5ValueE
+ fun:_ZN10extensions12_GLOBAL__N_1L33UnlimitedLocalStorageTestCallbackEP10ValueStore
+}
+{
+ bug_512204
+ Memcheck:Leak
+ fun:malloc
+ fun:CRYPTO_set_thread_local
+ fun:err_get_state
+ fun:ERR_clear_error
+ fun:_ZN6crypto20ClearOpenSSLERRStackERKN15tracked_objects8LocationE
+ fun:_ZN6crypto21OpenSSLErrStackTracerD2Ev
+}
+{
+ bug_512204_b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN3net13CTLogVerifier6CreateERKN4base16BasicStringPieceISsEES5_S5_
+ fun:_ZN3net2ct30CreateLogVerifiersForKnownLogsEv
+ fun:_ZN8IOThread4InitEv
+ fun:_ZN7content17BrowserThreadImpl4InitEv
+ fun:_ZN7content21TestBrowserThreadImpl4InitEv
+ fun:_ZN4base6Thread10ThreadMainEv
+ fun:_ZN4base12_GLOBAL__N_110ThreadFuncEPv
+}
+{
+ bug_555798_a
+ Memcheck:Leak
+ fun:malloc
+ fun:strdup
+ obj:*
+ ...
+ fun:*LoadNativeLibrary*
+ fun:*ExternalClearKeyTestHelper*
+}
+{
+ bug_555798_b
+ Memcheck:Leak
+ fun:_Znw*
+ obj:*
+ ...
+ fun:_dl_init
+ fun:dl_open_worker
+}
+{
+ bug_569736
+ Memcheck:Leak
+ fun:_Znw*
+ obj:*
+ ...
+ fun:_ZN5media14CdmWrapperImplIN3cdm25ContentDecryptionModule_8EE7DecryptERKNS1_11InputBufferEPNS1_14DecryptedBlockE
+}
+{
+ bug_588788
+ Memcheck:Unaddressable
+ ...
+ fun:_ZN5views14AXAuraObjCache14GetFocusedViewEv
+ fun:_ZN5views14AXAuraObjCache8GetFocusEv
+ fun:_ZNK16AXTreeSourceAura11GetTreeDataEv
+ fun:_ZN2ui16AXTreeSerializerIPN5views16AXAuraObjWrapperENS_10AXNodeDataENS_10AXTreeDataEE16SerializeChangesES3_PNS_16AXTreeUpdateBaseIS4_S5_EE
+ fun:_ZN35AXTreeSourceAuraTest_Serialize_Test8TestBodyEv
+}
+{
+ bug_588849a
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN4base4BindIMN6syncer15ModelSafeWorkerEFvNS_8CallbackIFvNS1_14ModelSafeGroupEELNS_8internal8CopyModeE1EEEEJPS2_RS8_EEENS3_INS6_22MakeUnboundRunTypeImplIT_JDpT0_EE4TypeELS7_1EEESE_DpOSF_
+ fun:_ZN6syncer15ModelSafeWorker28UnregisterForLoopDestructionEN4base8CallbackIFvNS_14ModelSafeGroupEELNS1_8internal8CopyModeE1EEE
+ fun:_ZN12browser_sync20SyncBackendRegistrar8ShutdownEv
+}
+{
+ bug_588849b
+ Memcheck:Leak
+ fun:_Znw*
+ fun:_ZN30ProfileSyncServiceAutofillTest24CreateDataTypeControllerEN6syncer9ModelTypeE
+ fun:_ZN30ProfileSyncServiceAutofillTest16StartSyncServiceERKN4base8CallbackIF*syncer9ModelTypeE
+}
diff --git a/chromium/tools/valgrind/memcheck_analyze.py b/chromium/tools/valgrind/memcheck_analyze.py
new file mode 100755
index 00000000000..a5c78c144de
--- /dev/null
+++ b/chromium/tools/valgrind/memcheck_analyze.py
@@ -0,0 +1,640 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# memcheck_analyze.py
+
+''' Given a valgrind XML file, parses errors and uniques them.'''
+
+import gdb_helper
+
+from collections import defaultdict
+import hashlib
+import logging
+import optparse
+import os
+import re
+import subprocess
+import sys
+import time
+from xml.dom.minidom import parse
+from xml.parsers.expat import ExpatError
+
+import common
+
+# Global symbol table (yuck)
+TheAddressTable = None
+
+# These are regexps that define functions (using C++ mangled names)
+# we don't want to see in stack traces while pretty printing
+# or generating suppressions.
+# Just stop printing the stack/suppression frames when the current one
+# matches any of these.
+_BORING_CALLERS = common.BoringCallers(mangled=True, use_re_wildcards=True)
+
+def getTextOf(top_node, name):
+ ''' Returns all text in all DOM nodes with a certain |name| that are children
+ of |top_node|.
+ '''
+
+ text = ""
+ for nodes_named in top_node.getElementsByTagName(name):
+ text += "".join([node.data for node in nodes_named.childNodes
+ if node.nodeType == node.TEXT_NODE])
+ return text
+
+def getCDATAOf(top_node, name):
+ ''' Returns all CDATA in all DOM nodes with a certain |name| that are children
+ of |top_node|.
+ '''
+
+ text = ""
+ for nodes_named in top_node.getElementsByTagName(name):
+ text += "".join([node.data for node in nodes_named.childNodes
+ if node.nodeType == node.CDATA_SECTION_NODE])
+ if (text == ""):
+ return None
+ return text
+
+def shortenFilePath(source_dir, directory):
+ '''Returns a string with the string prefix |source_dir| removed from
+ |directory|.'''
+ prefixes_to_cut = ["build/src/", "valgrind/coregrind/", "out/Release/../../"]
+
+ if source_dir:
+ prefixes_to_cut.append(source_dir)
+
+ for p in prefixes_to_cut:
+ index = directory.rfind(p)
+ if index != -1:
+ directory = directory[index + len(p):]
+
+ return directory
+
+# Constants that give real names to the abbreviations in valgrind XML output.
+INSTRUCTION_POINTER = "ip"
+OBJECT_FILE = "obj"
+FUNCTION_NAME = "fn"
+SRC_FILE_DIR = "dir"
+SRC_FILE_NAME = "file"
+SRC_LINE = "line"
+
+def gatherFrames(node, source_dir):
+ frames = []
+ for frame in node.getElementsByTagName("frame"):
+ frame_dict = {
+ INSTRUCTION_POINTER : getTextOf(frame, INSTRUCTION_POINTER),
+ OBJECT_FILE : getTextOf(frame, OBJECT_FILE),
+ FUNCTION_NAME : getTextOf(frame, FUNCTION_NAME),
+ SRC_FILE_DIR : shortenFilePath(
+ source_dir, getTextOf(frame, SRC_FILE_DIR)),
+ SRC_FILE_NAME : getTextOf(frame, SRC_FILE_NAME),
+ SRC_LINE : getTextOf(frame, SRC_LINE)
+ }
+
+ # Ignore this frame and all the following if it's a "boring" function.
+ enough_frames = False
+ for regexp in _BORING_CALLERS:
+ if re.match("^%s$" % regexp, frame_dict[FUNCTION_NAME]):
+ enough_frames = True
+ break
+ if enough_frames:
+ break
+
+ frames += [frame_dict]
+
+ global TheAddressTable
+ if TheAddressTable != None and frame_dict[SRC_LINE] == "":
+ # Try using gdb
+ TheAddressTable.Add(frame_dict[OBJECT_FILE],
+ frame_dict[INSTRUCTION_POINTER])
+ return frames
+
+class ValgrindError:
+ ''' Takes a <DOM Element: error> node and reads all the data from it. A
+ ValgrindError is immutable and is hashed on its pretty printed output.
+ '''
+
+ def __init__(self, source_dir, error_node, commandline, testcase):
+ ''' Copies all the relevant information out of the DOM and into object
+ properties.
+
+ Args:
+ error_node: The <error></error> DOM node we're extracting from.
+ source_dir: Prefix that should be stripped from the <dir> node.
+ commandline: The command that was run under valgrind
+ testcase: The test case name, if known.
+ '''
+
+ # Valgrind errors contain one <what><stack> pair, plus an optional
+ # <auxwhat><stack> pair, plus an optional <origin><what><stack></origin>,
+ # plus (since 3.5.0) a <suppression></suppression> pair.
+ # (Origin is nicely enclosed; too bad the other two aren't.)
+ # The most common way to see all three in one report is
+ # a syscall with a parameter that points to uninitialized memory, e.g.
+ # Format:
+ # <error>
+ # <unique>0x6d</unique>
+ # <tid>1</tid>
+ # <kind>SyscallParam</kind>
+ # <what>Syscall param write(buf) points to uninitialised byte(s)</what>
+ # <stack>
+ # <frame>
+ # ...
+ # </frame>
+ # </stack>
+ # <auxwhat>Address 0x5c9af4f is 7 bytes inside a block of ...</auxwhat>
+ # <stack>
+ # <frame>
+ # ...
+ # </frame>
+ # </stack>
+ # <origin>
+ # <what>Uninitialised value was created by a heap allocation</what>
+ # <stack>
+ # <frame>
+ # ...
+ # </frame>
+ # </stack>
+ # </origin>
+ # <suppression>
+ # <sname>insert_a_suppression_name_here</sname>
+ # <skind>Memcheck:Param</skind>
+ # <skaux>write(buf)</skaux>
+ # <sframe> <fun>__write_nocancel</fun> </sframe>
+ # ...
+ # <sframe> <fun>main</fun> </sframe>
+ # <rawtext>
+ # <![CDATA[
+ # {
+ # <insert_a_suppression_name_here>
+ # Memcheck:Param
+ # write(buf)
+ # fun:__write_nocancel
+ # ...
+ # fun:main
+ # }
+ # ]]>
+ # </rawtext>
+ # </suppression>
+ # </error>
+ #
+ # Each frame looks like this:
+ # <frame>
+ # <ip>0x83751BC</ip>
+ # <obj>/data/dkegel/chrome-build/src/out/Release/base_unittests</obj>
+ # <fn>_ZN7testing8internal12TestInfoImpl7RunTestEPNS_8TestInfoE</fn>
+ # <dir>/data/dkegel/chrome-build/src/testing/gtest/src</dir>
+ # <file>gtest-internal-inl.h</file>
+ # <line>655</line>
+ # </frame>
+ # although the dir, file, and line elements are missing if there is
+ # no debug info.
+
+ self._kind = getTextOf(error_node, "kind")
+ self._backtraces = []
+ self._suppression = None
+ self._commandline = commandline
+ self._testcase = testcase
+ self._additional = []
+
+ # Iterate through the nodes, parsing <what|auxwhat><stack> pairs.
+ description = None
+ for node in error_node.childNodes:
+ if node.localName == "what" or node.localName == "auxwhat":
+ description = "".join([n.data for n in node.childNodes
+ if n.nodeType == n.TEXT_NODE])
+ elif node.localName == "xwhat":
+ description = getTextOf(node, "text")
+ elif node.localName == "stack":
+ assert description
+ self._backtraces.append([description, gatherFrames(node, source_dir)])
+ description = None
+ elif node.localName == "origin":
+ description = getTextOf(node, "what")
+ stack = node.getElementsByTagName("stack")[0]
+ frames = gatherFrames(stack, source_dir)
+ self._backtraces.append([description, frames])
+ description = None
+ stack = None
+ frames = None
+ elif description and node.localName != None:
+ # The lastest description has no stack, e.g. "Address 0x28 is unknown"
+ self._additional.append(description)
+ description = None
+
+ if node.localName == "suppression":
+ self._suppression = getCDATAOf(node, "rawtext");
+
+ def __str__(self):
+ ''' Pretty print the type and backtrace(s) of this specific error,
+ including suppression (which is just a mangled backtrace).'''
+ output = ""
+ output += "\n" # Make sure the ### is at the beginning of line.
+ output += "### BEGIN MEMORY TOOL REPORT (error hash=#%016X#)\n" % \
+ self.ErrorHash()
+ if (self._commandline):
+ output += self._commandline + "\n"
+
+ output += self._kind + "\n"
+ for backtrace in self._backtraces:
+ output += backtrace[0] + "\n"
+ filter = subprocess.Popen("c++filt -n", stdin=subprocess.PIPE,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ shell=True,
+ close_fds=True)
+ buf = ""
+ for frame in backtrace[1]:
+ buf += (frame[FUNCTION_NAME] or frame[INSTRUCTION_POINTER]) + "\n"
+ (stdoutbuf, stderrbuf) = filter.communicate(buf.encode('latin-1'))
+ demangled_names = stdoutbuf.split("\n")
+
+ i = 0
+ for frame in backtrace[1]:
+ output += (" " + demangled_names[i])
+ i = i + 1
+
+ global TheAddressTable
+ if TheAddressTable != None and frame[SRC_FILE_DIR] == "":
+ # Try using gdb
+ foo = TheAddressTable.GetFileLine(frame[OBJECT_FILE],
+ frame[INSTRUCTION_POINTER])
+ if foo[0] != None:
+ output += (" (" + foo[0] + ":" + foo[1] + ")")
+ elif frame[SRC_FILE_DIR] != "":
+ output += (" (" + frame[SRC_FILE_DIR] + "/" + frame[SRC_FILE_NAME] +
+ ":" + frame[SRC_LINE] + ")")
+ else:
+ output += " (" + frame[OBJECT_FILE] + ")"
+ output += "\n"
+
+ for additional in self._additional:
+ output += additional + "\n"
+
+ assert self._suppression != None, "Your Valgrind doesn't generate " \
+ "suppressions - is it too old?"
+
+ if self._testcase:
+ output += "The report came from the `%s` test.\n" % self._testcase
+ output += "Suppression (error hash=#%016X#):\n" % self.ErrorHash()
+ output += (" For more info on using suppressions see "
+ "http://dev.chromium.org/developers/tree-sheriffs/sheriff-details-chromium/memory-sheriff#TOC-Suppressing-memory-reports")
+
+ # Widen suppression slightly to make portable between mac and linux
+ # TODO(timurrrr): Oops, these transformations should happen
+ # BEFORE calculating the hash!
+ supp = self._suppression;
+ supp = supp.replace("fun:_Znwj", "fun:_Znw*")
+ supp = supp.replace("fun:_Znwm", "fun:_Znw*")
+ supp = supp.replace("fun:_Znaj", "fun:_Zna*")
+ supp = supp.replace("fun:_Znam", "fun:_Zna*")
+
+ # Make suppressions even less platform-dependent.
+ for sz in [1, 2, 4, 8]:
+ supp = supp.replace("Memcheck:Addr%d" % sz, "Memcheck:Unaddressable")
+ supp = supp.replace("Memcheck:Value%d" % sz, "Memcheck:Uninitialized")
+ supp = supp.replace("Memcheck:Cond", "Memcheck:Uninitialized")
+
+ # Split into lines so we can enforce length limits
+ supplines = supp.split("\n")
+ supp = None # to avoid re-use
+
+ # Truncate at line 26 (VG_MAX_SUPP_CALLERS plus 2 for name and type)
+ # or at the first 'boring' caller.
+ # (https://bugs.kde.org/show_bug.cgi?id=199468 proposes raising
+ # VG_MAX_SUPP_CALLERS, but we're probably fine with it as is.)
+ newlen = min(26, len(supplines));
+
+ # Drop boring frames and all the following.
+ enough_frames = False
+ for frameno in range(newlen):
+ for boring_caller in _BORING_CALLERS:
+ if re.match("^ +fun:%s$" % boring_caller, supplines[frameno]):
+ newlen = frameno
+ enough_frames = True
+ break
+ if enough_frames:
+ break
+ if (len(supplines) > newlen):
+ supplines = supplines[0:newlen]
+ supplines.append("}")
+
+ for frame in range(len(supplines)):
+ # Replace the always-changing anonymous namespace prefix with "*".
+ m = re.match("( +fun:)_ZN.*_GLOBAL__N_.*\.cc_" +
+ "[0-9a-fA-F]{8}_[0-9a-fA-F]{8}(.*)",
+ supplines[frame])
+ if m:
+ supplines[frame] = "*".join(m.groups())
+
+ output += "\n".join(supplines) + "\n"
+ output += "### END MEMORY TOOL REPORT (error hash=#%016X#)\n" % \
+ self.ErrorHash()
+
+ return output
+
+ def UniqueString(self):
+ ''' String to use for object identity. Don't print this, use str(obj)
+ instead.'''
+ rep = self._kind + " "
+ for backtrace in self._backtraces:
+ for frame in backtrace[1]:
+ rep += frame[FUNCTION_NAME]
+
+ if frame[SRC_FILE_DIR] != "":
+ rep += frame[SRC_FILE_DIR] + "/" + frame[SRC_FILE_NAME]
+ else:
+ rep += frame[OBJECT_FILE]
+
+ return rep
+
+ # This is a device-independent hash identifying the suppression.
+ # By printing out this hash we can find duplicate reports between tests and
+ # different shards running on multiple buildbots
+ def ErrorHash(self):
+ return int(hashlib.md5(self.UniqueString()).hexdigest()[:16], 16)
+
+ def __hash__(self):
+ return hash(self.UniqueString())
+ def __eq__(self, rhs):
+ return self.UniqueString() == rhs
+
+def log_is_finished(f, force_finish):
+ f.seek(0)
+ prev_line = ""
+ while True:
+ line = f.readline()
+ if line == "":
+ if not force_finish:
+ return False
+ # Okay, the log is not finished but we can make it up to be parseable:
+ if prev_line.strip() in ["</error>", "</errorcounts>", "</status>"]:
+ f.write("</valgrindoutput>\n")
+ return True
+ return False
+ if '</valgrindoutput>' in line:
+ # Valgrind often has garbage after </valgrindoutput> upon crash.
+ f.truncate()
+ return True
+ prev_line = line
+
+class MemcheckAnalyzer:
+ ''' Given a set of Valgrind XML files, parse all the errors out of them,
+ unique them and output the results.'''
+
+ SANITY_TEST_SUPPRESSIONS = {
+ "Memcheck sanity test 01 (memory leak).": 1,
+ "Memcheck sanity test 02 (malloc/read left).": 1,
+ "Memcheck sanity test 03 (malloc/read right).": 1,
+ "Memcheck sanity test 04 (malloc/write left).": 1,
+ "Memcheck sanity test 05 (malloc/write right).": 1,
+ "Memcheck sanity test 06 (new/read left).": 1,
+ "Memcheck sanity test 07 (new/read right).": 1,
+ "Memcheck sanity test 08 (new/write left).": 1,
+ "Memcheck sanity test 09 (new/write right).": 1,
+ "Memcheck sanity test 10 (write after free).": 1,
+ "Memcheck sanity test 11 (write after delete).": 1,
+ "Memcheck sanity test 12 (array deleted without []).": 1,
+ "Memcheck sanity test 13 (single element deleted with []).": 1,
+ "Memcheck sanity test 14 (malloc/read uninit).": 1,
+ "Memcheck sanity test 15 (new/read uninit).": 1,
+ }
+
+ # Max time to wait for memcheck logs to complete.
+ LOG_COMPLETION_TIMEOUT = 180.0
+
+ def __init__(self, source_dir, show_all_leaks=False, use_gdb=False):
+ '''Create a parser for Memcheck logs.
+
+ Args:
+ source_dir: Path to top of source tree for this build
+ show_all_leaks: Whether to show even less important leaks
+ use_gdb: Whether to use gdb to resolve source filenames and line numbers
+ in the report stacktraces
+ '''
+ self._source_dir = source_dir
+ self._show_all_leaks = show_all_leaks
+ self._use_gdb = use_gdb
+
+ # Contains the set of unique errors
+ self._errors = set()
+
+ # Contains the time when the we started analyzing the first log file.
+ # This variable is used to skip incomplete logs after some timeout.
+ self._analyze_start_time = None
+
+
+ def Report(self, files, testcase, check_sanity=False):
+ '''Reads in a set of files and prints Memcheck report.
+
+ Args:
+ files: A list of filenames.
+ check_sanity: if true, search for SANITY_TEST_SUPPRESSIONS
+ '''
+ # Beyond the detailed errors parsed by ValgrindError above,
+ # the xml file contain records describing suppressions that were used:
+ # <suppcounts>
+ # <pair>
+ # <count>28</count>
+ # <name>pango_font_leak_todo</name>
+ # </pair>
+ # <pair>
+ # <count>378</count>
+ # <name>bug_13243</name>
+ # </pair>
+ # </suppcounts
+ # Collect these and print them at the end.
+ #
+ # With our patch for https://bugs.kde.org/show_bug.cgi?id=205000 in,
+ # the file also includes records of the form
+ # <load_obj><obj>/usr/lib/libgcc_s.1.dylib</obj><ip>0x27000</ip></load_obj>
+ # giving the filename and load address of each binary that was mapped
+ # into the process.
+
+ global TheAddressTable
+ if self._use_gdb:
+ TheAddressTable = gdb_helper.AddressTable()
+ else:
+ TheAddressTable = None
+ cur_report_errors = set()
+ suppcounts = defaultdict(int)
+ badfiles = set()
+
+ if self._analyze_start_time == None:
+ self._analyze_start_time = time.time()
+ start_time = self._analyze_start_time
+
+ parse_failed = False
+ for file in files:
+ # Wait up to three minutes for valgrind to finish writing all files,
+ # but after that, just skip incomplete files and warn.
+ f = open(file, "r+")
+ pid = re.match(".*\.([0-9]+)$", file)
+ if pid:
+ pid = pid.groups()[0]
+ found = False
+ running = True
+ firstrun = True
+ skip = False
+ origsize = os.path.getsize(file)
+ while (running and not found and not skip and
+ (firstrun or
+ ((time.time() - start_time) < self.LOG_COMPLETION_TIMEOUT))):
+ firstrun = False
+ f.seek(0)
+ if pid:
+ # Make sure the process is still running so we don't wait for
+ # 3 minutes if it was killed. See http://crbug.com/17453
+ ps_out = subprocess.Popen("ps p %s" % pid, shell=True,
+ stdout=subprocess.PIPE).stdout
+ if len(ps_out.readlines()) < 2:
+ running = False
+ else:
+ skip = True
+ running = False
+ found = log_is_finished(f, False)
+ if not running and not found:
+ logging.warn("Valgrind process PID = %s is not running but its "
+ "XML log has not been finished correctly.\n"
+ "Make it up by adding some closing tags manually." % pid)
+ found = log_is_finished(f, not running)
+ if running and not found:
+ time.sleep(1)
+ f.close()
+ if not found:
+ badfiles.add(file)
+ else:
+ newsize = os.path.getsize(file)
+ if origsize > newsize+1:
+ logging.warn(str(origsize - newsize) +
+ " bytes of junk were after </valgrindoutput> in %s!" %
+ file)
+ try:
+ parsed_file = parse(file);
+ except ExpatError, e:
+ parse_failed = True
+ logging.warn("could not parse %s: %s" % (file, e))
+ lineno = e.lineno - 1
+ context_lines = 5
+ context_start = max(0, lineno - context_lines)
+ context_end = lineno + context_lines + 1
+ context_file = open(file, "r")
+ for i in range(0, context_start):
+ context_file.readline()
+ for i in range(context_start, context_end):
+ context_data = context_file.readline().rstrip()
+ if i != lineno:
+ logging.warn(" %s" % context_data)
+ else:
+ logging.warn("> %s" % context_data)
+ context_file.close()
+ continue
+ if TheAddressTable != None:
+ load_objs = parsed_file.getElementsByTagName("load_obj")
+ for load_obj in load_objs:
+ obj = getTextOf(load_obj, "obj")
+ ip = getTextOf(load_obj, "ip")
+ TheAddressTable.AddBinaryAt(obj, ip)
+
+ commandline = None
+ preamble = parsed_file.getElementsByTagName("preamble")[0];
+ for node in preamble.getElementsByTagName("line"):
+ if node.localName == "line":
+ for x in node.childNodes:
+ if x.nodeType == node.TEXT_NODE and "Command" in x.data:
+ commandline = x.data
+ break
+
+ raw_errors = parsed_file.getElementsByTagName("error")
+ for raw_error in raw_errors:
+ # Ignore "possible" leaks for now by default.
+ if (self._show_all_leaks or
+ getTextOf(raw_error, "kind") != "Leak_PossiblyLost"):
+ error = ValgrindError(self._source_dir,
+ raw_error, commandline, testcase)
+ if error not in cur_report_errors:
+ # We haven't seen such errors doing this report yet...
+ if error in self._errors:
+ # ... but we saw it in earlier reports, e.g. previous UI test
+ cur_report_errors.add("This error was already printed in "
+ "some other test, see 'hash=#%016X#'" % \
+ error.ErrorHash())
+ else:
+ # ... and we haven't seen it in other tests as well
+ self._errors.add(error)
+ cur_report_errors.add(error)
+
+ suppcountlist = parsed_file.getElementsByTagName("suppcounts")
+ if len(suppcountlist) > 0:
+ suppcountlist = suppcountlist[0]
+ for node in suppcountlist.getElementsByTagName("pair"):
+ count = getTextOf(node, "count");
+ name = getTextOf(node, "name");
+ suppcounts[name] += int(count)
+
+ if len(badfiles) > 0:
+ logging.warn("valgrind didn't finish writing %d files?!" % len(badfiles))
+ for file in badfiles:
+ logging.warn("Last 20 lines of %s :" % file)
+ os.system("tail -n 20 '%s' 1>&2" % file)
+
+ if parse_failed:
+ logging.error("FAIL! Couldn't parse Valgrind output file")
+ return -2
+
+ common.PrintUsedSuppressionsList(suppcounts)
+
+ retcode = 0
+ if cur_report_errors:
+ logging.error("FAIL! There were %s errors: " % len(cur_report_errors))
+
+ if TheAddressTable != None:
+ TheAddressTable.ResolveAll()
+
+ for error in cur_report_errors:
+ logging.error(error)
+
+ retcode = -1
+
+ # Report tool's insanity even if there were errors.
+ if check_sanity:
+ remaining_sanity_supp = MemcheckAnalyzer.SANITY_TEST_SUPPRESSIONS
+ for (name, count) in suppcounts.iteritems():
+ # Workaround for http://crbug.com/334074
+ if (name in remaining_sanity_supp and
+ remaining_sanity_supp[name] <= count):
+ del remaining_sanity_supp[name]
+ if remaining_sanity_supp:
+ logging.error("FAIL! Sanity check failed!")
+ logging.info("The following test errors were not handled: ")
+ for (name, count) in remaining_sanity_supp.iteritems():
+ logging.info(" * %dx %s" % (count, name))
+ retcode = -3
+
+ if retcode != 0:
+ return retcode
+
+ logging.info("PASS! No errors found!")
+ return 0
+
+
+def _main():
+ '''For testing only. The MemcheckAnalyzer class should be imported instead.'''
+ parser = optparse.OptionParser("usage: %prog [options] <files to analyze>")
+ parser.add_option("", "--source-dir",
+ help="path to top of source tree for this build"
+ "(used to normalize source paths in baseline)")
+
+ (options, args) = parser.parse_args()
+ if len(args) == 0:
+ parser.error("no filename specified")
+ filenames = args
+
+ analyzer = MemcheckAnalyzer(options.source_dir, use_gdb=True)
+ return analyzer.Report(filenames, None)
+
+
+if __name__ == "__main__":
+ sys.exit(_main())
diff --git a/chromium/tools/valgrind/regrind.sh b/chromium/tools/valgrind/regrind.sh
new file mode 100755
index 00000000000..0f90ba737f0
--- /dev/null
+++ b/chromium/tools/valgrind/regrind.sh
@@ -0,0 +1,138 @@
+#!/bin/sh
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Scape errors from the valgrind bots, reproduce them locally,
+# save logs as regrind-TESTNAME.log, and display any errors found.
+# Also save files regrind-failed.txt listing failed tests,
+# and regrind-failed-map.txt showing which bot URLs have which failed tests
+# (handy when filing bugs).
+#
+# Only scrapes linux layout bot at the moment.
+# TODO: handle layout tests that don't have obvious path to test file
+# TODO: extend script to handle more kinds of errors and more tests
+
+# where the valgrind layout bot results live
+LAYOUT_URL="http://build.chromium.org/p/chromium.memory.fyi/builders/Webkit%20Linux%20(valgrind%20layout)"
+# how many builds back to check
+LAYOUT_COUNT=250
+
+# regexp to match valgrind errors
+PATTERN="are definitely|uninitialised|Unhandled exception|\
+Invalid read|Invalid write|Invalid free|Source and desti|Mismatched free|\
+unaddressable byte|vex x86|the 'impossible' happened|\
+valgrind:.*: Assertion.*failed|VALGRIND INTERNAL ERROR"
+
+usage() {
+ echo "Usage: regrind.sh [--noscrape][--norepro][--keep]"
+ echo "--noscrape: don't scrape bots, just use old regrind-failed.txt"
+ echo "--norepro: don't reproduce locally"
+ echo "--keep: keep temp files"
+ exit 1
+}
+
+# Given a log on stdin, list all the tests that failed in that log.
+layout_list_failed_tests() {
+ grep "Command:.*LayoutTests" |
+ sed 's/<.*>//' |
+ sed 's/.*LayoutTests/LayoutTests/' |
+ sort -u |
+ tr -d '\015'
+}
+
+# Generate a list of failed tests in regrind-failed.txt by scraping bot.
+# Scrape most recent first, so if user interrupts, he is left with fresh-ish data.
+scrape_layout() {
+ rm -f regrind-*.tmp* regrind-failed.txt regrind-failed-map.txt
+ touch regrind-failed.txt
+
+ # First, grab the number of the latest complete build.
+ wget -q -O regrind-builds.html "$LAYOUT_URL"
+ latest=`grep "<li><font .*" < regrind-builds.html | head -1 | sed 's/.*#//;s/<.*//'`
+
+ echo "Fetching $LAYOUT_COUNT logs from bot"
+ # Scrape the desired number of runs (150 is about one cycle)
+ first=`expr $latest - $LAYOUT_COUNT`
+ i=$latest
+ while test $i -ge $first
+ do
+ url="$LAYOUT_URL/builds/$i/steps/valgrind%20test:%20layout/logs/stdio"
+ wget -q -O regrind-$i.tmp "$url"
+ # Did any tests fail in this file?
+ layout_list_failed_tests < regrind-$i.tmp > regrind-$i.tmp.failed
+ if test -s regrind-$i.tmp.failed
+ then
+ # Yes. Log them to stdout,
+ echo "$url"
+ cat regrind-$i.tmp.failed
+ # to the table regrind-failed-map.txt,
+ cat regrind-$i.tmp.failed | sed "s,^,$url ," >> regrind-failed-map.txt
+ # and, if not already there, to regrind-failed.txt.
+ for test in `cat regrind-$i.tmp.failed`
+ do
+ fgrep "$test" regrind-failed.txt > /dev/null 2>&1 || echo "$test" >> regrind-failed.txt
+ done
+ else
+ rm regrind-$i.tmp.failed
+ fi
+ # Sleep 1/3 sec per fetch
+ case $i in
+ *[036]) sleep 1;;
+ esac
+ i=`expr $i - 1`
+ done
+
+ # Finally, munge the logs to identify tests that probably failed.
+ sh c.sh -l regrind-*.tmp > regrind-errfiles.txt
+ cat `cat regrind-errfiles.txt` | layout_list_failed_tests > regrind-failed.txt
+}
+
+# Run the tests identified in regrind-failed.txt locally under valgrind.
+# Save logs in regrind-$TESTNAME.log.
+repro_layout() {
+ echo Running `wc -l < regrind-failed.txt` layout tests.
+ for test in `cat regrind-failed.txt`
+ do
+ logname="`echo $test | tr / _`"
+ echo "sh tools/valgrind/valgrind_webkit_tests.sh $test"
+ sh tools/valgrind/valgrind_webkit_tests.sh "$test" > regrind-"$logname".log 2>&1
+ egrep "$PATTERN" < regrind-"$logname".log | sed 's/==.*==//'
+ done
+}
+
+do_repro=1
+do_scrape=1
+do_cleanup=1
+while test ! -z "$1"
+do
+ case "$1" in
+ --noscrape) do_scrape=0;;
+ --norepro) do_repro=0;;
+ --keep) do_cleanup=0;;
+ *) usage;;
+ esac
+ shift
+done
+
+echo "WARNING: This script is not supported and may be out of date"
+
+if test $do_scrape = 0 && test $do_repro = 0
+then
+ usage
+fi
+
+if test $do_scrape = 1
+then
+ scrape_layout
+fi
+
+if test $do_repro = 1
+then
+ repro_layout
+fi
+
+if test $do_cleanup = 1
+then
+ rm -f regrind-errfiles.txt regrind-*.tmp*
+fi
diff --git a/chromium/tools/valgrind/reliability/url_list.txt b/chromium/tools/valgrind/reliability/url_list.txt
new file mode 100644
index 00000000000..ac531225602
--- /dev/null
+++ b/chromium/tools/valgrind/reliability/url_list.txt
@@ -0,0 +1,11 @@
+www.google.com
+maps.google.com
+news.google.com
+www.youtube.com
+build.chromium.org/p/chromium/waterfall
+build.chromium.org/p/chromium.memory/console
+build.chromium.org/f/chromium/perf/dashboard/overview.html
+www.slashdot.org
+www.ibanez.co.jp/japan/index.html
+www.bbc.co.uk/arabic/
+www.uni.edu/becker/chinese2.html
diff --git a/chromium/tools/valgrind/scan-build.py b/chromium/tools/valgrind/scan-build.py
new file mode 100755
index 00000000000..b117d1ea58a
--- /dev/null
+++ b/chromium/tools/valgrind/scan-build.py
@@ -0,0 +1,246 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import errno
+import json
+import os
+import re
+import sys
+import urllib
+import urllib2
+
+# Where all the data lives.
+ROOT_URL = "http://build.chromium.org/p/chromium.memory.fyi/builders"
+
+# TODO(groby) - support multi-line search from the command line. Useful when
+# scanning for classes of failures, see below.
+SEARCH_STRING = """<p class=\"failure result\">
+Failed memory test: content
+</p>"""
+
+# Location of the log cache.
+CACHE_DIR = "buildlogs.tmp"
+
+# If we don't find anything after searching |CUTOFF| logs, we're probably done.
+CUTOFF = 200
+
+def EnsurePath(path):
+ """Makes sure |path| does exist, tries to create it if it doesn't."""
+ try:
+ os.makedirs(path)
+ except OSError as exception:
+ if exception.errno != errno.EEXIST:
+ raise
+
+
+class Cache(object):
+ def __init__(self, root_dir):
+ self._root_dir = os.path.abspath(root_dir)
+
+ def _LocalName(self, name):
+ """If name is a relative path, treat it as relative to cache root.
+ If it is absolute and under cache root, pass it through.
+ Otherwise, raise error.
+ """
+ if os.path.isabs(name):
+ assert os.path.commonprefix([name, self._root_dir]) == self._root_dir
+ else:
+ name = os.path.join(self._root_dir, name)
+ return name
+
+ def _FetchLocal(self, local_name):
+ local_name = self._LocalName(local_name)
+ EnsurePath(os.path.dirname(local_name))
+ if os.path.exists(local_name):
+ f = open(local_name, 'r')
+ return f.readlines();
+ return None
+
+ def _FetchRemote(self, remote_name):
+ try:
+ response = urllib2.urlopen(remote_name)
+ except:
+ print "Could not fetch", remote_name
+ raise
+ return response.read()
+
+ def Update(self, local_name, remote_name):
+ local_name = self._LocalName(local_name)
+ EnsurePath(os.path.dirname(local_name))
+ blob = self._FetchRemote(remote_name)
+ f = open(local_name, "w")
+ f.write(blob)
+ return blob.splitlines()
+
+ def FetchData(self, local_name, remote_name):
+ result = self._FetchLocal(local_name)
+ if result:
+ return result
+ # If we get here, the local cache does not exist yet. Fetch, and store.
+ return self.Update(local_name, remote_name)
+
+
+class Builder(object):
+ def __init__(self, waterfall, name):
+ self._name = name
+ self._waterfall = waterfall
+
+ def Name(self):
+ return self._name
+
+ def LatestBuild(self):
+ return self._waterfall.GetLatestBuild(self._name)
+
+ def GetBuildPath(self, build_num):
+ return "%s/%s/builds/%d" % (
+ self._waterfall._root_url, urllib.quote(self._name), build_num)
+
+ def _FetchBuildLog(self, build_num):
+ local_build_path = "builds/%s" % self._name
+ local_build_file = os.path.join(local_build_path, "%d.log" % build_num)
+ return self._waterfall._cache.FetchData(local_build_file,
+ self.GetBuildPath(build_num))
+
+ def _CheckLog(self, build_num, tester):
+ log_lines = self._FetchBuildLog(build_num)
+ return any(tester(line) for line in log_lines)
+
+ def ScanLogs(self, tester):
+ occurrences = []
+ build = self.LatestBuild()
+ no_results = 0
+ while build != 0 and no_results < CUTOFF:
+ if self._CheckLog(build, tester):
+ occurrences.append(build)
+ else:
+ no_results = no_results + 1
+ build = build - 1
+ return occurrences
+
+
+class Waterfall(object):
+ def __init__(self, root_url, cache_dir):
+ self._root_url = root_url
+ self._builders = {}
+ self._top_revision = {}
+ self._cache = Cache(cache_dir)
+
+ def Builders(self):
+ return self._builders.values()
+
+ def Update(self):
+ self._cache.Update("builders", self._root_url)
+ self.FetchInfo()
+
+ def FetchInfo(self):
+ if self._top_revision:
+ return
+
+ html = self._cache.FetchData("builders", self._root_url)
+
+ """ Search for both builders and latest build number in HTML
+ <td class="box"><a href="builders/<builder-name>"> identifies a builder
+ <a href="builders/<builder-name>/builds/<build-num>"> is the latest build.
+ """
+ box_matcher = re.compile('.*a href[^>]*>([^<]*)\<')
+ build_matcher = re.compile('.*a href=\"builders/(.*)/builds/([0-9]+)\".*')
+ last_builder = ""
+ for line in html:
+ if 'a href="builders/' in line:
+ if 'td class="box"' in line:
+ last_builder = box_matcher.match(line).group(1)
+ self._builders[last_builder] = Builder(self, last_builder)
+ else:
+ result = build_matcher.match(line)
+ builder = result.group(1)
+ assert builder == urllib.quote(last_builder)
+ self._top_revision[last_builder] = int(result.group(2))
+
+ def GetLatestBuild(self, name):
+ self.FetchInfo()
+ assert self._top_revision
+ return self._top_revision[name]
+
+
+class MultiLineChange(object):
+ def __init__(self, lines):
+ self._tracked_lines = lines
+ self._current = 0
+
+ def __call__(self, line):
+ """ Test a single line against multi-line change.
+
+ If it matches the currently active line, advance one line.
+ If the current line is the last line, report a match.
+ """
+ if self._tracked_lines[self._current] in line:
+ self._current = self._current + 1
+ if self._current == len(self._tracked_lines):
+ self._current = 0
+ return True
+ else:
+ self._current = 0
+ return False
+
+
+def main(argv):
+ # Create argument parser.
+ parser = argparse.ArgumentParser()
+ commands = parser.add_mutually_exclusive_group(required=True)
+ commands.add_argument("--update", action='store_true')
+ commands.add_argument("--find", metavar='search term')
+ parser.add_argument("--json", action='store_true',
+ help="Output in JSON format")
+ args = parser.parse_args()
+
+ path = os.path.abspath(os.path.dirname(argv[0]))
+ cache_path = os.path.join(path, CACHE_DIR)
+
+ fyi = Waterfall(ROOT_URL, cache_path)
+
+ if args.update:
+ fyi.Update()
+ for builder in fyi.Builders():
+ print "Updating", builder.Name()
+ builder.ScanLogs(lambda x:False)
+
+ if args.find:
+ result = []
+ tester = MultiLineChange(args.find.splitlines())
+ fyi.FetchInfo()
+
+ if not args.json:
+ print "SCANNING FOR ", args.find
+ for builder in fyi.Builders():
+ if not args.json:
+ print "Scanning", builder.Name()
+ occurrences = builder.ScanLogs(tester)
+ if occurrences:
+ min_build = min(occurrences)
+ path = builder.GetBuildPath(min_build)
+ if args.json:
+ data = {}
+ data['builder'] = builder.Name()
+ data['first_affected'] = min_build
+ data['last_affected'] = max(occurrences)
+ data['last_build'] = builder.LatestBuild()
+ data['frequency'] = ((int(builder.LatestBuild()) - int(min_build)) /
+ len(occurrences))
+ data['total'] = len(occurrences)
+ data['first_url'] = path
+ result.append(data)
+ else:
+ print "Earliest occurrence in build %d" % min_build
+ print "Latest occurrence in build %d" % max(occurrences)
+ print "Latest build: %d" % builder.LatestBuild()
+ print path
+ print "%d total" % len(occurrences)
+ if args.json:
+ json.dump(result, sys.stdout, indent=2, sort_keys=True)
+
+if __name__ == "__main__":
+ sys.exit(main(sys.argv))
+
diff --git a/chromium/tools/valgrind/suppressions.py b/chromium/tools/valgrind/suppressions.py
new file mode 100755
index 00000000000..e066bedead1
--- /dev/null
+++ b/chromium/tools/valgrind/suppressions.py
@@ -0,0 +1,945 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# suppressions.py
+
+"""Post-process Valgrind suppression matcher.
+
+Suppressions are defined as follows:
+
+# optional one-line comments anywhere in the suppressions file.
+{
+ <Short description of the error>
+ Toolname:Errortype
+ fun:function_name
+ obj:object_filename
+ fun:wildcarded_fun*_name
+ # an ellipsis wildcards zero or more functions in a stack.
+ ...
+ fun:some_other_function_name
+}
+
+If ran from the command line, suppressions.py does a self-test
+of the Suppression class.
+"""
+
+import os
+import re
+import sys
+
+sys.path.insert(0, os.path.join(os.path.dirname(__file__),
+ '..', 'python', 'google'))
+import path_utils
+
+
+ELLIPSIS = '...'
+
+
+def GetSuppressions():
+ suppressions_root = path_utils.ScriptDir()
+ JOIN = os.path.join
+
+ result = {}
+
+ supp_filename = JOIN(suppressions_root, "memcheck", "suppressions.txt")
+ vg_common = ReadSuppressionsFromFile(supp_filename)
+ result['common_suppressions'] = vg_common
+
+ supp_filename = JOIN(suppressions_root, "memcheck", "suppressions_linux.txt")
+ vg_linux = ReadSuppressionsFromFile(supp_filename)
+ result['linux_suppressions'] = vg_linux
+
+ supp_filename = JOIN(suppressions_root, "memcheck", "suppressions_mac.txt")
+ vg_mac = ReadSuppressionsFromFile(supp_filename)
+ result['mac_suppressions'] = vg_mac
+
+ supp_filename = JOIN(suppressions_root, "drmemory", "suppressions.txt")
+ result['drmem_suppressions'] = ReadSuppressionsFromFile(supp_filename)
+ supp_filename = JOIN(suppressions_root, "drmemory", "suppressions_full.txt")
+ result['drmem_full_suppressions'] = ReadSuppressionsFromFile(supp_filename)
+
+ return result
+
+
+def GlobToRegex(glob_pattern, ignore_case=False):
+ """Translate glob wildcards (*?) into regex syntax. Escape the rest."""
+ regex = ''
+ for char in glob_pattern:
+ if char == '*':
+ regex += '.*'
+ elif char == '?':
+ regex += '.'
+ elif ignore_case and char.isalpha():
+ regex += '[%s%s]' % (char.lower(), char.upper())
+ else:
+ regex += re.escape(char)
+ return ''.join(regex)
+
+
+def StripAndSkipCommentsIterator(lines):
+ """Generator of (line_no, line) pairs that strips comments and whitespace."""
+ for (line_no, line) in enumerate(lines):
+ line = line.strip() # Drop \n
+ if line.startswith('#'):
+ continue # Comments
+ # Skip comment lines, but not empty lines, they indicate the end of a
+ # suppression. Add one to the line number as well, since most editors use
+ # 1-based numberings, and enumerate is 0-based.
+ yield (line_no + 1, line)
+
+
+class Suppression(object):
+ """This class represents a single stack trace suppression.
+
+ Attributes:
+ description: A string representing the error description.
+ type: A string representing the error type, e.g. Memcheck:Leak.
+ stack: The lines comprising the stack trace for the suppression.
+ regex: The actual regex used to match against scraped reports.
+ """
+
+ def __init__(self, description, type, stack, defined_at, regex):
+ """Inits Suppression.
+
+ description, type, stack, regex: same as class attributes
+ defined_at: file:line identifying where the suppression was defined
+ """
+ self.description = description
+ self.type = type
+ self.stack = stack
+ self.defined_at = defined_at
+ self.regex = re.compile(regex, re.MULTILINE)
+
+ def Match(self, suppression_from_report):
+ """Returns bool indicating whether this suppression matches
+ the suppression generated from Valgrind error report.
+
+ We match our suppressions against generated suppressions
+ (not against reports) since they have the same format
+ while the reports are taken from XML, contain filenames,
+ they are demangled, and are generally more difficult to
+ parse.
+
+ Args:
+ suppression_from_report: list of strings (function names).
+ Returns:
+ True if the suppression is not empty and matches the report.
+ """
+ if not self.stack:
+ return False
+ lines = [f.strip() for f in suppression_from_report]
+ return self.regex.match('\n'.join(lines) + '\n') is not None
+
+
+def FilenameToTool(filename):
+ """Return the name of the tool that a file is related to, or None.
+
+ Example mappings:
+ tools/valgrind/drmemory/suppressions.txt -> drmemory
+ tools/valgrind/drmemory/suppressions_full.txt -> drmemory
+ tools/valgrind/memcheck/suppressions.txt -> memcheck
+ tools/valgrind/memcheck/suppressions_mac.txt -> memcheck
+ """
+ filename = os.path.abspath(filename)
+ parts = filename.split(os.sep)
+ tool = parts[-2]
+ if tool in ('drmemory', 'memcheck'):
+ return tool
+ return None
+
+
+def ReadSuppressionsFromFile(filename):
+ """Read suppressions from the given file and return them as a list"""
+ tool_to_parser = {
+ "drmemory": ReadDrMemorySuppressions,
+ "memcheck": ReadValgrindStyleSuppressions,
+ }
+ tool = FilenameToTool(filename)
+ assert tool in tool_to_parser, (
+ "unknown tool %s for filename %s" % (tool, filename))
+ parse_func = tool_to_parser[tool]
+
+ # Consider non-existent files to be empty.
+ if not os.path.exists(filename):
+ return []
+
+ input_file = file(filename, 'r')
+ try:
+ return parse_func(input_file, filename)
+ except SuppressionError:
+ input_file.close()
+ raise
+
+
+class ValgrindStyleSuppression(Suppression):
+ """A suppression using the Valgrind syntax.
+
+ Most tools, even ones that are not Valgrind-based, use this syntax.
+
+ Attributes:
+ Same as Suppression.
+ """
+
+ def __init__(self, description, type, stack, defined_at):
+ """Creates a suppression using the Memcheck syntax."""
+ regex = '{\n.*\n%s\n' % type
+ for line in stack:
+ if line == ELLIPSIS:
+ regex += '(.*\n)*'
+ else:
+ regex += GlobToRegex(line)
+ regex += '\n'
+ regex += '(.*\n)*'
+ regex += '}'
+
+ # In the recent version of valgrind-variant we've switched
+ # from memcheck's default Addr[1248]/Value[1248]/Cond suppression types
+ # to simply Unaddressable/Uninitialized.
+ # The suppression generator no longer gives us "old" types thus
+ # for the "new-type" suppressions:
+ # * Memcheck:Unaddressable should also match Addr* reports,
+ # * Memcheck:Uninitialized should also match Cond and Value reports,
+ #
+ # We also want to support legacy suppressions (e.g. copied from
+ # upstream bugs etc), so:
+ # * Memcheck:Addr[1248] suppressions should match Unaddressable reports,
+ # * Memcheck:Cond and Memcheck:Value[1248] should match Uninitialized.
+ # Please note the latest two rules only apply to the
+ # tools/valgrind/waterfall.sh suppression matcher and the real
+ # valgrind-variant Memcheck will not suppress
+ # e.g. Addr1 printed as Unaddressable with Addr4 suppression.
+ # Be careful to check the access size while copying legacy suppressions!
+ for sz in [1, 2, 4, 8]:
+ regex = regex.replace("\nMemcheck:Addr%d\n" % sz,
+ "\nMemcheck:(Addr%d|Unaddressable)\n" % sz)
+ regex = regex.replace("\nMemcheck:Value%d\n" % sz,
+ "\nMemcheck:(Value%d|Uninitialized)\n" % sz)
+ regex = regex.replace("\nMemcheck:Cond\n",
+ "\nMemcheck:(Cond|Uninitialized)\n")
+ regex = regex.replace("\nMemcheck:Unaddressable\n",
+ "\nMemcheck:(Addr.|Unaddressable)\n")
+ regex = regex.replace("\nMemcheck:Uninitialized\n",
+ "\nMemcheck:(Cond|Value.|Uninitialized)\n")
+
+ return super(ValgrindStyleSuppression, self).__init__(
+ description, type, stack, defined_at, regex)
+
+ def __str__(self):
+ """Stringify."""
+ lines = [self.description, self.type] + self.stack
+ return "{\n %s\n}\n" % "\n ".join(lines)
+
+
+class SuppressionError(Exception):
+ def __init__(self, message, happened_at):
+ self._message = message
+ self._happened_at = happened_at
+
+ def __str__(self):
+ return 'Error reading suppressions at %s!\n%s' % (
+ self._happened_at, self._message)
+
+
+def ReadValgrindStyleSuppressions(lines, supp_descriptor):
+ """Given a list of lines, returns a list of suppressions.
+
+ Args:
+ lines: a list of lines containing suppressions.
+ supp_descriptor: should typically be a filename.
+ Used only when printing errors.
+ """
+ result = []
+ cur_descr = ''
+ cur_type = ''
+ cur_stack = []
+ in_suppression = False
+ nline = 0
+ for line in lines:
+ nline += 1
+ line = line.strip()
+ if line.startswith('#'):
+ continue
+ if not in_suppression:
+ if not line:
+ # empty lines between suppressions
+ pass
+ elif line.startswith('{'):
+ in_suppression = True
+ pass
+ else:
+ raise SuppressionError('Expected: "{"',
+ "%s:%d" % (supp_descriptor, nline))
+ elif line.startswith('}'):
+ result.append(
+ ValgrindStyleSuppression(cur_descr, cur_type, cur_stack,
+ "%s:%d" % (supp_descriptor, nline)))
+ cur_descr = ''
+ cur_type = ''
+ cur_stack = []
+ in_suppression = False
+ elif not cur_descr:
+ cur_descr = line
+ continue
+ elif not cur_type:
+ if not line.startswith("Memcheck:"):
+ raise SuppressionError(
+ 'Expected "Memcheck:TYPE", got "%s"' % line,
+ "%s:%d" % (supp_descriptor, nline))
+ supp_type = line.split(':')[1]
+ if not supp_type in ["Addr1", "Addr2", "Addr4", "Addr8",
+ "Cond", "Free", "Jump", "Leak", "Overlap", "Param",
+ "Value1", "Value2", "Value4", "Value8",
+ "Unaddressable", "Uninitialized"]:
+ raise SuppressionError('Unknown suppression type "%s"' % supp_type,
+ "%s:%d" % (supp_descriptor, nline))
+ cur_type = line
+ continue
+ elif re.match("^fun:.*|^obj:.*|^\.\.\.$", line):
+ cur_stack.append(line.strip())
+ elif len(cur_stack) == 0 and cur_type == "Memcheck:Param":
+ cur_stack.append(line.strip())
+ else:
+ raise SuppressionError(
+ '"fun:function_name" or "obj:object_file" or "..." expected',
+ "%s:%d" % (supp_descriptor, nline))
+ return result
+
+
+def PresubmitCheckSuppressions(supps):
+ """Check a list of suppressions and return a list of SuppressionErrors.
+
+ Mostly useful for separating the checking logic from the Presubmit API for
+ testing.
+ """
+ known_supp_names = {} # Key: name, Value: suppression.
+ errors = []
+ for s in supps:
+ if re.search("<.*suppression.name.here>", s.description):
+ # Suppression name line is
+ # <insert_a_suppression_name_here> for Memcheck,
+ # name=<insert_a_suppression_name_here> for DrMemory
+ errors.append(
+ SuppressionError(
+ "You've forgotten to put a suppression name like bug_XXX",
+ s.defined_at))
+ continue
+
+ if s.description in known_supp_names:
+ errors.append(
+ SuppressionError(
+ 'Suppression named "%s" is defined more than once, '
+ 'see %s' % (s.description,
+ known_supp_names[s.description].defined_at),
+ s.defined_at))
+ else:
+ known_supp_names[s.description] = s
+ return errors
+
+
+def PresubmitCheck(input_api, output_api):
+ """A helper function useful in PRESUBMIT.py
+ Returns a list of errors or [].
+ """
+ sup_regex = re.compile('suppressions.*\.txt$')
+ filenames = [f.AbsoluteLocalPath() for f in input_api.AffectedFiles()
+ if sup_regex.search(f.LocalPath())]
+
+ errors = []
+
+ for f in filenames:
+ try:
+ supps = ReadSuppressionsFromFile(f)
+ errors.extend(PresubmitCheckSuppressions(supps))
+ except SuppressionError as e:
+ errors.append(e)
+
+ return [output_api.PresubmitError(str(e)) for e in errors]
+
+
+class DrMemorySuppression(Suppression):
+ """A suppression using the DrMemory syntax.
+
+ Attributes:
+ instr: The instruction to match.
+ Rest inherited from Suppression.
+ """
+
+ def __init__(self, name, report_type, instr, stack, defined_at):
+ """Constructor."""
+ self.instr = instr
+
+ # Construct the regex.
+ regex = '{\n'
+ if report_type == 'LEAK':
+ regex += '(POSSIBLE )?LEAK'
+ else:
+ regex += report_type
+ regex += '\nname=.*\n'
+
+ # TODO(rnk): Implement http://crbug.com/107416#c5 .
+ # drmemory_analyze.py doesn't generate suppressions with an instruction in
+ # them, so these suppressions will always fail to match. We should override
+ # Match to fetch the instruction from the report and try to match against
+ # that.
+ if instr:
+ regex += 'instruction=%s\n' % GlobToRegex(instr)
+
+ for line in stack:
+ if line == ELLIPSIS:
+ regex += '(.*\n)*'
+ elif '!' in line:
+ (mod, func) = line.split('!')
+ if func == ELLIPSIS: # mod!ellipsis frame
+ regex += '(%s\!.*\n)+' % GlobToRegex(mod, ignore_case=True)
+ else: # mod!func frame
+ # Ignore case for the module match, but not the function match.
+ regex += '%s\!%s\n' % (GlobToRegex(mod, ignore_case=True),
+ GlobToRegex(func, ignore_case=False))
+ else:
+ regex += GlobToRegex(line)
+ regex += '\n'
+ regex += '(.*\n)*' # Match anything left in the stack.
+ regex += '}'
+ return super(DrMemorySuppression, self).__init__(name, report_type, stack,
+ defined_at, regex)
+
+ def __str__(self):
+ """Stringify."""
+ text = self.type + "\n"
+ if self.description:
+ text += "name=%s\n" % self.description
+ if self.instr:
+ text += "instruction=%s\n" % self.instr
+ text += "\n".join(self.stack)
+ text += "\n"
+ return text
+
+
+# Possible DrMemory error report types. Keep consistent with suppress_name
+# array in drmemory/drmemory/report.c.
+DRMEMORY_ERROR_TYPES = [
+ 'UNADDRESSABLE ACCESS',
+ 'UNINITIALIZED READ',
+ 'INVALID HEAP ARGUMENT',
+ 'GDI USAGE ERROR',
+ 'HANDLE LEAK',
+ 'LEAK',
+ 'POSSIBLE LEAK',
+ 'WARNING',
+ ]
+
+
+# Regexes to match valid drmemory frames.
+DRMEMORY_FRAME_PATTERNS = [
+ re.compile(r"^.*\!.*$"), # mod!func
+ re.compile(r"^.*!\.\.\.$"), # mod!ellipsis
+ re.compile(r"^\<.*\+0x.*\>$"), # <mod+0xoffs>
+ re.compile(r"^\<not in a module\>$"),
+ re.compile(r"^system call .*$"),
+ re.compile(r"^\*$"), # wildcard
+ re.compile(r"^\.\.\.$"), # ellipsis
+ ]
+
+
+def ReadDrMemorySuppressions(lines, supp_descriptor):
+ """Given a list of lines, returns a list of DrMemory suppressions.
+
+ Args:
+ lines: a list of lines containing suppressions.
+ supp_descriptor: should typically be a filename.
+ Used only when parsing errors happen.
+ """
+ lines = StripAndSkipCommentsIterator(lines)
+ suppressions = []
+ for (line_no, line) in lines:
+ if not line:
+ continue
+ if line not in DRMEMORY_ERROR_TYPES:
+ raise SuppressionError('Expected a DrMemory error type, '
+ 'found %r instead\n Valid error types: %s' %
+ (line, ' '.join(DRMEMORY_ERROR_TYPES)),
+ "%s:%d" % (supp_descriptor, line_no))
+
+ # Suppression starts here.
+ report_type = line
+ name = ''
+ instr = None
+ stack = []
+ defined_at = "%s:%d" % (supp_descriptor, line_no)
+ found_stack = False
+ for (line_no, line) in lines:
+ if not found_stack and line.startswith('name='):
+ name = line.replace('name=', '')
+ elif not found_stack and line.startswith('instruction='):
+ instr = line.replace('instruction=', '')
+ else:
+ # Unrecognized prefix indicates start of stack trace.
+ found_stack = True
+ if not line:
+ # Blank line means end of suppression.
+ break
+ if not any([regex.match(line) for regex in DRMEMORY_FRAME_PATTERNS]):
+ raise SuppressionError(
+ ('Unexpected stack frame pattern at line %d\n' +
+ 'Frames should be one of the following:\n' +
+ ' module!function\n' +
+ ' module!...\n' +
+ ' <module+0xhexoffset>\n' +
+ ' <not in a module>\n' +
+ ' system call Name\n' +
+ ' *\n' +
+ ' ...\n') % line_no, defined_at)
+ stack.append(line)
+
+ if len(stack) == 0: # In case we hit EOF or blank without any stack frames.
+ raise SuppressionError('Suppression "%s" has no stack frames, ends at %d'
+ % (name, line_no), defined_at)
+ if stack[-1] == ELLIPSIS:
+ raise SuppressionError('Suppression "%s" ends in an ellipsis on line %d' %
+ (name, line_no), defined_at)
+
+ suppressions.append(
+ DrMemorySuppression(name, report_type, instr, stack, defined_at))
+
+ return suppressions
+
+
+def ParseSuppressionOfType(lines, supp_descriptor, def_line_no, report_type):
+ """Parse the suppression starting on this line.
+
+ Suppressions start with a type, have an optional name and instruction, and a
+ stack trace that ends in a blank line.
+ """
+
+
+
+def TestStack(stack, positive, negative, suppression_parser=None):
+ """A helper function for SelfTest() that checks a single stack.
+
+ Args:
+ stack: the stack to match the suppressions.
+ positive: the list of suppressions that must match the given stack.
+ negative: the list of suppressions that should not match.
+ suppression_parser: optional arg for the suppression parser, default is
+ ReadValgrindStyleSuppressions.
+ """
+ if not suppression_parser:
+ suppression_parser = ReadValgrindStyleSuppressions
+ for supp in positive:
+ parsed = suppression_parser(supp.split("\n"), "positive_suppression")
+ assert parsed[0].Match(stack.split("\n")), (
+ "Suppression:\n%s\ndidn't match stack:\n%s" % (supp, stack))
+ for supp in negative:
+ parsed = suppression_parser(supp.split("\n"), "negative_suppression")
+ assert not parsed[0].Match(stack.split("\n")), (
+ "Suppression:\n%s\ndid match stack:\n%s" % (supp, stack))
+
+
+def TestFailPresubmit(supp_text, error_text, suppression_parser=None):
+ """A helper function for SelfTest() that verifies a presubmit check fires.
+
+ Args:
+ supp_text: suppression text to parse.
+ error_text: text of the presubmit error we expect to find.
+ suppression_parser: optional arg for the suppression parser, default is
+ ReadValgrindStyleSuppressions.
+ """
+ if not suppression_parser:
+ suppression_parser = ReadValgrindStyleSuppressions
+ try:
+ supps = suppression_parser(supp_text.split("\n"), "<presubmit suppression>")
+ except SuppressionError, e:
+ # If parsing raised an exception, match the error text here.
+ assert error_text in str(e), (
+ "presubmit text %r not in SuppressionError:\n%r" %
+ (error_text, str(e)))
+ else:
+ # Otherwise, run the presubmit checks over the supps. We expect a single
+ # error that has text matching error_text.
+ errors = PresubmitCheckSuppressions(supps)
+ assert len(errors) == 1, (
+ "expected exactly one presubmit error, got:\n%s" % errors)
+ assert error_text in str(errors[0]), (
+ "presubmit text %r not in SuppressionError:\n%r" %
+ (error_text, str(errors[0])))
+
+
+def SelfTest():
+ """Tests the Suppression.Match() capabilities."""
+
+ test_memcheck_stack_1 = """{
+ test
+ Memcheck:Leak
+ fun:absolutly
+ fun:brilliant
+ obj:condition
+ fun:detection
+ fun:expression
+ }"""
+
+ test_memcheck_stack_2 = """{
+ test
+ Memcheck:Uninitialized
+ fun:absolutly
+ fun:brilliant
+ obj:condition
+ fun:detection
+ fun:expression
+ }"""
+
+ test_memcheck_stack_3 = """{
+ test
+ Memcheck:Unaddressable
+ fun:absolutly
+ fun:brilliant
+ obj:condition
+ fun:detection
+ fun:expression
+ }"""
+
+ test_memcheck_stack_4 = """{
+ test
+ Memcheck:Addr4
+ fun:absolutly
+ fun:brilliant
+ obj:condition
+ fun:detection
+ fun:expression
+ }"""
+
+ positive_memcheck_suppressions_1 = [
+ "{\nzzz\nMemcheck:Leak\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:ab*ly\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:absolutly\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\n...\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\n...\nfun:detection\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:absolutly\n...\nfun:detection\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:ab*ly\n...\nfun:detection\n}",
+ "{\nzzz\nMemcheck:Leak\n...\nobj:condition\n}",
+ "{\nzzz\nMemcheck:Leak\n...\nobj:condition\nfun:detection\n}",
+ "{\nzzz\nMemcheck:Leak\n...\nfun:brilliant\nobj:condition\n}",
+ ]
+
+ positive_memcheck_suppressions_2 = [
+ "{\nzzz\nMemcheck:Uninitialized\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Uninitialized\nfun:ab*ly\n}",
+ "{\nzzz\nMemcheck:Uninitialized\nfun:absolutly\nfun:brilliant\n}",
+ # Legacy suppression types
+ "{\nzzz\nMemcheck:Value1\n...\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Cond\n...\nfun:detection\n}",
+ "{\nzzz\nMemcheck:Value8\nfun:absolutly\nfun:brilliant\n}",
+ ]
+
+ positive_memcheck_suppressions_3 = [
+ "{\nzzz\nMemcheck:Unaddressable\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Unaddressable\nfun:absolutly\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Unaddressable\nfun:absolutly\nfun:brilliant\n}",
+ # Legacy suppression types
+ "{\nzzz\nMemcheck:Addr1\n...\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Addr8\n...\nfun:detection\n}",
+ ]
+
+ positive_memcheck_suppressions_4 = [
+ "{\nzzz\nMemcheck:Addr4\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Unaddressable\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Addr4\nfun:absolutly\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Unaddressable\n...\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Addr4\n...\nfun:detection\n}",
+ ]
+
+ negative_memcheck_suppressions_1 = [
+ "{\nzzz\nMemcheck:Leak\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:ab*liant\n}",
+ "{\nzzz\nMemcheck:Leak\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\nobj:condition\n}",
+ "{\nzzz\nMemcheck:Addr8\nfun:brilliant\n}",
+ ]
+
+ negative_memcheck_suppressions_2 = [
+ "{\nzzz\nMemcheck:Cond\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Value2\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Uninitialized\nfun:ab*liant\n}",
+ "{\nzzz\nMemcheck:Value4\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\nobj:condition\n}",
+ "{\nzzz\nMemcheck:Addr8\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Unaddressable\nfun:brilliant\n}",
+ ]
+
+ negative_memcheck_suppressions_3 = [
+ "{\nzzz\nMemcheck:Addr1\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Uninitialized\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Addr2\nfun:ab*liant\n}",
+ "{\nzzz\nMemcheck:Value4\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\nobj:condition\n}",
+ "{\nzzz\nMemcheck:Addr8\nfun:brilliant\n}",
+ ]
+
+ negative_memcheck_suppressions_4 = [
+ "{\nzzz\nMemcheck:Addr1\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Addr4\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Unaddressable\nfun:abnormal\n}",
+ "{\nzzz\nMemcheck:Addr1\nfun:absolutly\n}",
+ "{\nzzz\nMemcheck:Addr2\nfun:ab*liant\n}",
+ "{\nzzz\nMemcheck:Value4\nfun:brilliant\n}",
+ "{\nzzz\nMemcheck:Leak\nobj:condition\n}",
+ "{\nzzz\nMemcheck:Addr8\nfun:brilliant\n}",
+ ]
+
+ TestStack(test_memcheck_stack_1,
+ positive_memcheck_suppressions_1,
+ negative_memcheck_suppressions_1)
+ TestStack(test_memcheck_stack_2,
+ positive_memcheck_suppressions_2,
+ negative_memcheck_suppressions_2)
+ TestStack(test_memcheck_stack_3,
+ positive_memcheck_suppressions_3,
+ negative_memcheck_suppressions_3)
+ TestStack(test_memcheck_stack_4,
+ positive_memcheck_suppressions_4,
+ negative_memcheck_suppressions_4)
+
+ # TODO(timurrrr): add TestFailPresubmit tests.
+
+ ### DrMemory self tests.
+
+ # http://crbug.com/96010 suppression.
+ stack_96010 = """{
+ UNADDRESSABLE ACCESS
+ name=<insert_a_suppression_name_here>
+ *!TestingProfile::FinishInit
+ *!TestingProfile::TestingProfile
+ *!BrowserAboutHandlerTest_WillHandleBrowserAboutURL_Test::TestBody
+ *!testing::Test::Run
+ }"""
+
+ suppress_96010 = [
+ "UNADDRESSABLE ACCESS\nname=zzz\n...\n*!testing::Test::Run\n",
+ ("UNADDRESSABLE ACCESS\nname=zzz\n...\n" +
+ "*!BrowserAboutHandlerTest_WillHandleBrowserAboutURL_Test::TestBody\n"),
+ "UNADDRESSABLE ACCESS\nname=zzz\n...\n*!BrowserAboutHandlerTest*\n",
+ "UNADDRESSABLE ACCESS\nname=zzz\n*!TestingProfile::FinishInit\n",
+ # No name should be needed
+ "UNADDRESSABLE ACCESS\n*!TestingProfile::FinishInit\n",
+ # Whole trace
+ ("UNADDRESSABLE ACCESS\n" +
+ "*!TestingProfile::FinishInit\n" +
+ "*!TestingProfile::TestingProfile\n" +
+ "*!BrowserAboutHandlerTest_WillHandleBrowserAboutURL_Test::TestBody\n" +
+ "*!testing::Test::Run\n"),
+ ]
+
+ negative_96010 = [
+ # Wrong type
+ "UNINITIALIZED READ\nname=zzz\n*!TestingProfile::FinishInit\n",
+ # No ellipsis
+ "UNADDRESSABLE ACCESS\nname=zzz\n*!BrowserAboutHandlerTest*\n",
+ ]
+
+ TestStack(stack_96010, suppress_96010, negative_96010,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Invalid heap arg
+ stack_invalid = """{
+ INVALID HEAP ARGUMENT
+ name=asdf
+ *!foo
+ }"""
+ suppress_invalid = [
+ "INVALID HEAP ARGUMENT\n*!foo\n",
+ ]
+ negative_invalid = [
+ "UNADDRESSABLE ACCESS\n*!foo\n",
+ ]
+
+ TestStack(stack_invalid, suppress_invalid, negative_invalid,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Suppress only ntdll
+ stack_in_ntdll = """{
+ UNADDRESSABLE ACCESS
+ name=<insert_a_suppression_name_here>
+ ntdll.dll!RtlTryEnterCriticalSection
+ }"""
+ stack_not_ntdll = """{
+ UNADDRESSABLE ACCESS
+ name=<insert_a_suppression_name_here>
+ notntdll.dll!RtlTryEnterCriticalSection
+ }"""
+
+ suppress_in_ntdll = [
+ "UNADDRESSABLE ACCESS\nntdll.dll!RtlTryEnterCriticalSection\n",
+ ]
+ suppress_in_any = [
+ "UNADDRESSABLE ACCESS\n*!RtlTryEnterCriticalSection\n",
+ ]
+
+ TestStack(stack_in_ntdll, suppress_in_ntdll + suppress_in_any, [],
+ suppression_parser=ReadDrMemorySuppressions)
+ # Make sure we don't wildcard away the "not" part and match ntdll.dll by
+ # accident.
+ TestStack(stack_not_ntdll, suppress_in_any, suppress_in_ntdll,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Suppress a POSSIBLE LEAK with LEAK.
+ stack_foo_possible = """{
+ POSSIBLE LEAK
+ name=foo possible
+ *!foo
+ }"""
+ suppress_foo_possible = [ "POSSIBLE LEAK\n*!foo\n" ]
+ suppress_foo_leak = [ "LEAK\n*!foo\n" ]
+ TestStack(stack_foo_possible, suppress_foo_possible + suppress_foo_leak, [],
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Don't suppress LEAK with POSSIBLE LEAK.
+ stack_foo_leak = """{
+ LEAK
+ name=foo leak
+ *!foo
+ }"""
+ TestStack(stack_foo_leak, suppress_foo_leak, suppress_foo_possible,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Test case insensitivity of module names.
+ stack_user32_mixed_case = """{
+ LEAK
+ name=<insert>
+ USER32.dll!foo
+ user32.DLL!bar
+ user32.dll!baz
+ }"""
+ suppress_user32 = [ # Module name case doesn't matter.
+ "LEAK\nuser32.dll!foo\nuser32.dll!bar\nuser32.dll!baz\n",
+ "LEAK\nUSER32.DLL!foo\nUSER32.DLL!bar\nUSER32.DLL!baz\n",
+ ]
+ no_suppress_user32 = [ # Function name case matters.
+ "LEAK\nuser32.dll!FOO\nuser32.dll!BAR\nuser32.dll!BAZ\n",
+ "LEAK\nUSER32.DLL!FOO\nUSER32.DLL!BAR\nUSER32.DLL!BAZ\n",
+ ]
+ TestStack(stack_user32_mixed_case, suppress_user32, no_suppress_user32,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Test mod!... frames.
+ stack_kernel32_through_ntdll = """{
+ LEAK
+ name=<insert>
+ kernel32.dll!foo
+ KERNEL32.dll!bar
+ kernel32.DLL!baz
+ ntdll.dll!quux
+ }"""
+ suppress_mod_ellipsis = [
+ "LEAK\nkernel32.dll!...\nntdll.dll!quux\n",
+ "LEAK\nKERNEL32.DLL!...\nntdll.dll!quux\n",
+ ]
+ no_suppress_mod_ellipsis = [
+ # Need one or more matching frames, not zero, unlike regular ellipsis.
+ "LEAK\nuser32.dll!...\nkernel32.dll!...\nntdll.dll!quux\n",
+ ]
+ TestStack(stack_kernel32_through_ntdll, suppress_mod_ellipsis,
+ no_suppress_mod_ellipsis,
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Test that the presubmit checks work.
+ forgot_to_name = """
+ UNADDRESSABLE ACCESS
+ name=<insert_a_suppression_name_here>
+ ntdll.dll!RtlTryEnterCriticalSection
+ """
+ TestFailPresubmit(forgot_to_name, 'forgotten to put a suppression',
+ suppression_parser=ReadDrMemorySuppressions)
+
+ named_twice = """
+ UNADDRESSABLE ACCESS
+ name=http://crbug.com/1234
+ *!foo
+
+ UNADDRESSABLE ACCESS
+ name=http://crbug.com/1234
+ *!bar
+ """
+ TestFailPresubmit(named_twice, 'defined more than once',
+ suppression_parser=ReadDrMemorySuppressions)
+
+ forgot_stack = """
+ UNADDRESSABLE ACCESS
+ name=http://crbug.com/1234
+ """
+ TestFailPresubmit(forgot_stack, 'has no stack frames',
+ suppression_parser=ReadDrMemorySuppressions)
+
+ ends_in_ellipsis = """
+ UNADDRESSABLE ACCESS
+ name=http://crbug.com/1234
+ ntdll.dll!RtlTryEnterCriticalSection
+ ...
+ """
+ TestFailPresubmit(ends_in_ellipsis, 'ends in an ellipsis',
+ suppression_parser=ReadDrMemorySuppressions)
+
+ bad_stack_frame = """
+ UNADDRESSABLE ACCESS
+ name=http://crbug.com/1234
+ fun:memcheck_style_frame
+ """
+ TestFailPresubmit(bad_stack_frame, 'Unexpected stack frame pattern',
+ suppression_parser=ReadDrMemorySuppressions)
+
+ # Test FilenameToTool.
+ filenames_to_tools = {
+ "tools/valgrind/drmemory/suppressions.txt": "drmemory",
+ "tools/valgrind/drmemory/suppressions_full.txt": "drmemory",
+ "tools/valgrind/memcheck/suppressions.txt": "memcheck",
+ "tools/valgrind/memcheck/suppressions_mac.txt": "memcheck",
+ "asdf/tools/valgrind/memcheck/suppressions_mac.txt": "memcheck",
+ "foo/bar/baz/tools/valgrind/memcheck/suppressions_mac.txt": "memcheck",
+ "foo/bar/baz/tools/valgrind/suppressions.txt": None,
+ "tools/valgrind/suppressions.txt": None,
+ }
+ for (filename, expected_tool) in filenames_to_tools.items():
+ filename.replace('/', os.sep) # Make the path look native.
+ tool = FilenameToTool(filename)
+ assert tool == expected_tool, (
+ "failed to get expected tool for filename %r, expected %s, got %s" %
+ (filename, expected_tool, tool))
+
+ # Test ValgrindStyleSuppression.__str__.
+ supp = ValgrindStyleSuppression("http://crbug.com/1234", "Memcheck:Leak",
+ ["...", "fun:foo"], "supp.txt:1")
+ # Intentional 3-space indent. =/
+ supp_str = ("{\n"
+ " http://crbug.com/1234\n"
+ " Memcheck:Leak\n"
+ " ...\n"
+ " fun:foo\n"
+ "}\n")
+ assert str(supp) == supp_str, (
+ "str(supp) != supp_str:\nleft: %s\nright: %s" % (str(supp), supp_str))
+
+ # Test DrMemorySuppression.__str__.
+ supp = DrMemorySuppression(
+ "http://crbug.com/1234", "LEAK", None, ["...", "*!foo"], "supp.txt:1")
+ supp_str = ("LEAK\n"
+ "name=http://crbug.com/1234\n"
+ "...\n"
+ "*!foo\n")
+ assert str(supp) == supp_str, (
+ "str(supp) != supp_str:\nleft: %s\nright: %s" % (str(supp), supp_str))
+
+ supp = DrMemorySuppression(
+ "http://crbug.com/1234", "UNINITIALIZED READ", "test 0x08(%eax) $0x01",
+ ["ntdll.dll!*", "*!foo"], "supp.txt:1")
+ supp_str = ("UNINITIALIZED READ\n"
+ "name=http://crbug.com/1234\n"
+ "instruction=test 0x08(%eax) $0x01\n"
+ "ntdll.dll!*\n"
+ "*!foo\n")
+ assert str(supp) == supp_str, (
+ "str(supp) != supp_str:\nleft: %s\nright: %s" % (str(supp), supp_str))
+
+
+if __name__ == '__main__':
+ SelfTest()
+ print 'PASS'
diff --git a/chromium/tools/valgrind/test_suppressions.py b/chromium/tools/valgrind/test_suppressions.py
new file mode 100755
index 00000000000..3ad374d86ad
--- /dev/null
+++ b/chromium/tools/valgrind/test_suppressions.py
@@ -0,0 +1,195 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+from collections import defaultdict
+import json
+import os
+import re
+import subprocess
+import sys
+
+import suppressions
+
+
+def ReadReportsFromFile(filename):
+ """ Returns a list of (report_hash, report) and the URL of the report on the
+ waterfall.
+ """
+ input_file = file(filename, 'r')
+ # reports is a list of (error hash, report) pairs.
+ reports = []
+ in_suppression = False
+ cur_supp = []
+ # This stores the last error hash found while reading the file.
+ last_hash = ""
+ for line in input_file:
+ line = line.strip()
+ line = line.replace("</span><span class=\"stdout\">", "")
+ line = line.replace("</span><span class=\"stderr\">", "")
+ line = line.replace("&lt;", "<")
+ line = line.replace("&gt;", ">")
+ if in_suppression:
+ if line == "}":
+ cur_supp += ["}"]
+ reports += [[last_hash, "\n".join(cur_supp)]]
+ in_suppression = False
+ cur_supp = []
+ last_hash = ""
+ else:
+ cur_supp += [" "*3 + line]
+ elif line == "{":
+ in_suppression = True
+ cur_supp = ["{"]
+ elif line.find("Suppression (error hash=#") == 0:
+ last_hash = line[25:41]
+ # The line at the end of the file is assumed to store the URL of the report.
+ return reports,line
+
+def Demangle(names):
+ """ Demangle a list of C++ symbols, return a list of human-readable symbols.
+ """
+ # -n is not the default on Mac.
+ args = ['c++filt', '-n']
+ pipe = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ stdout, _ = pipe.communicate(input='\n'.join(names))
+ demangled = stdout.split("\n")
+ # Each line ends with a newline, so the final entry of the split output
+ # will always be ''.
+ assert len(demangled) == len(names)
+ return demangled
+
+def GetSymbolsFromReport(report):
+ """Extract all symbols from a suppression report."""
+ symbols = []
+ prefix = "fun:"
+ prefix_len = len(prefix)
+ for line in report.splitlines():
+ index = line.find(prefix)
+ if index != -1:
+ symbols.append(line[index + prefix_len:])
+ return symbols
+
+def PrintTopSymbols(symbol_reports, top_count):
+ """Print the |top_count| symbols with the most occurrences."""
+ boring_symbols=['malloc', '_Znw*', 'TestBody']
+ sorted_reports = sorted(filter(lambda x:x[0] not in boring_symbols,
+ symbol_reports.iteritems()),
+ key=lambda x:len(set(x[1])), reverse=True)
+ symbols = symbol_reports.keys()
+ demangled = Demangle(symbols)
+ assert len(demangled) == len(symbols)
+ symboltable = dict(zip(symbols, demangled))
+
+ print "\n"
+ print "Top %d symbols" % top_count
+ for (symbol, suppressions) in sorted_reports[:top_count]:
+ print "%4d occurrences : %s" % (len(set(suppressions)), symboltable[symbol])
+
+def ReadHashExclusions(exclusions):
+ input_file = file(exclusions, 'r')
+ contents = json.load(input_file)
+ return contents['hashes']
+
+
+def main(argv):
+ supp = suppressions.GetSuppressions()
+
+ # all_reports is a map {report: list of urls containing this report}
+ all_reports = defaultdict(list)
+ report_hashes = {}
+ symbol_reports = defaultdict(list)
+
+ # Create argument parser.
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--top-symbols', type=int, default=0,
+ help='Print a list of the top <n> symbols')
+ parser.add_argument('--symbol-filter', action='append',
+ help='Filter out all suppressions not containing the specified symbol(s). '
+ 'Matches against the mangled names.')
+ parser.add_argument('--exclude-symbol', action='append',
+ help='Filter out all suppressions containing the specified symbol(s). '
+ 'Matches against the mangled names.')
+ parser.add_argument('--exclude-hashes', action='append',
+ help='Specify a .json file with a list of hashes to exclude.')
+
+ parser.add_argument('reports', metavar='report file', nargs='+',
+ help='List of report files')
+ args = parser.parse_args(argv)
+
+ # exclude_hashes is a list of strings, each string an error hash.
+ exclude_hashes = []
+
+ exclude_hashes = []
+ if args.exclude_hashes:
+ for excl in args.exclude_hashes:
+ print "reading exclusion", excl
+ exclude_hashes += ReadHashExclusions(excl)
+
+ for f in args.reports:
+ f_reports, url = ReadReportsFromFile(f)
+ for (hash, report) in f_reports:
+ if hash in exclude_hashes:
+ continue
+ all_reports[report] += [url]
+ report_hashes[report] = hash
+
+ reports_count = 0
+ for r in all_reports:
+ cur_supp = supp['common_suppressions']
+ if all([re.search("%20Mac%20|mac_valgrind", url)
+ for url in all_reports[r]]):
+ # Include mac suppressions if the report is only present on Mac
+ cur_supp += supp['mac_suppressions']
+ elif all([re.search("Linux%20", url) for url in all_reports[r]]):
+ cur_supp += supp['linux_suppressions']
+ if all(["DrMemory" in url for url in all_reports[r]]):
+ cur_supp += supp['drmem_suppressions']
+ if all(["DrMemory%20full" in url for url in all_reports[r]]):
+ cur_supp += supp['drmem_full_suppressions']
+
+ # Test if this report is already suppressed
+ skip = False
+ for s in cur_supp:
+ if s.Match(r.split("\n")):
+ skip = True
+ break
+
+ # Skip reports if none of the symbols are in the report.
+ if args.symbol_filter and all(not s in r for s in args.symbol_filter):
+ skip = True
+ if args.exclude_symbol and any(s in r for s in args.exclude_symbol):
+ skip = True
+
+ if not skip:
+ reports_count += 1
+ print "==================================="
+ print "This report observed at"
+ for url in all_reports[r]:
+ print " %s" % url
+ print "didn't match any suppressions:"
+ print "Suppression (error hash=#%s#):" % (report_hashes[r])
+ print r
+ print "==================================="
+
+ if args.top_symbols > 0:
+ symbols = GetSymbolsFromReport(r)
+ for symbol in symbols:
+ symbol_reports[symbol].append(report_hashes[r])
+
+ if reports_count > 0:
+ print ("%d unique reports don't match any of the suppressions" %
+ reports_count)
+ if args.top_symbols > 0:
+ PrintTopSymbols(symbol_reports, args.top_symbols)
+
+ else:
+ print "Congratulations! All reports are suppressed!"
+ # TODO(timurrrr): also make sure none of the old suppressions
+ # were narrowed too much.
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/chromium/tools/valgrind/tsan_v2/suppressions.txt b/chromium/tools/valgrind/tsan_v2/suppressions.txt
new file mode 100644
index 00000000000..80c43986f47
--- /dev/null
+++ b/chromium/tools/valgrind/tsan_v2/suppressions.txt
@@ -0,0 +1,2 @@
+# This file is deprecated; please add new suppressions to
+# build/sanitizers/tsan_suppressions.cc.
diff --git a/chromium/tools/valgrind/unused_suppressions.py b/chromium/tools/valgrind/unused_suppressions.py
new file mode 100755
index 00000000000..0f336f136d0
--- /dev/null
+++ b/chromium/tools/valgrind/unused_suppressions.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+import urllib2
+
+import suppressions
+
+
+def main():
+ supp = suppressions.GetSuppressions()
+
+ all_supps = []
+ for supps in supp.values():
+ all_supps += [s.description for s in supps]
+ sys.stdout.write(urllib2.urlopen(
+ 'http://chromium-build-logs.appspot.com/unused_suppressions',
+ '\n'.join(all_supps)).read())
+ return 0
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/chromium/tools/valgrind/valgrind.sh b/chromium/tools/valgrind/valgrind.sh
new file mode 100755
index 00000000000..59b40719419
--- /dev/null
+++ b/chromium/tools/valgrind/valgrind.sh
@@ -0,0 +1,106 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a small script for manually launching valgrind, along with passing
+# it the suppression file, and some helpful arguments (automatically attaching
+# the debugger on failures, etc). Run it from your repo root, something like:
+# $ sh ./tools/valgrind/valgrind.sh ./out/Debug/chrome
+#
+# This is mostly intended for running the chrome browser interactively.
+# To run unit tests, you probably want to run chrome_tests.sh instead.
+# That's the script used by the valgrind buildbot.
+
+export THISDIR=`dirname $0`
+
+setup_memcheck() {
+ RUN_COMMAND="valgrind"
+
+ # Prompt to attach gdb when there was an error detected.
+ DEFAULT_TOOL_FLAGS=("--db-command=gdb -nw %f %p" "--db-attach=yes" \
+ # Keep the registers in gdb in sync with the code.
+ "--vex-iropt-register-updates=allregs-at-mem-access" \
+ # Overwrite newly allocated or freed objects
+ # with 0x41 to catch inproper use.
+ "--malloc-fill=41" "--free-fill=41" \
+ # Increase the size of stacks being tracked.
+ "--num-callers=30")
+}
+
+setup_unknown() {
+ echo "Unknown tool \"$TOOL_NAME\" specified, the result is not guaranteed"
+ DEFAULT_TOOL_FLAGS=()
+}
+
+set -e
+
+if [ $# -eq 0 ]; then
+ echo "usage: <command to run> <arguments ...>"
+ exit 1
+fi
+
+TOOL_NAME="memcheck"
+declare -a DEFAULT_TOOL_FLAGS[0]
+
+# Select a tool different from memcheck with --tool=TOOL as a first argument
+TMP_STR=`echo $1 | sed 's/^\-\-tool=//'`
+if [ "$TMP_STR" != "$1" ]; then
+ TOOL_NAME="$TMP_STR"
+ shift
+fi
+
+if echo "$@" | grep "\-\-tool" ; then
+ echo "--tool=TOOL must be the first argument" >&2
+ exit 1
+fi
+
+case $TOOL_NAME in
+ memcheck*) setup_memcheck "$1";;
+ *) setup_unknown;;
+esac
+
+
+SUPPRESSIONS="$THISDIR/$TOOL_NAME/suppressions.txt"
+
+CHROME_VALGRIND=`sh $THISDIR/locate_valgrind.sh`
+if [ "$CHROME_VALGRIND" = "" ]
+then
+ # locate_valgrind.sh failed
+ exit 1
+fi
+echo "Using valgrind binaries from ${CHROME_VALGRIND}"
+
+set -x
+PATH="${CHROME_VALGRIND}/bin:$PATH"
+# We need to set these variables to override default lib paths hard-coded into
+# Valgrind binary.
+export VALGRIND_LIB="$CHROME_VALGRIND/lib/valgrind"
+export VALGRIND_LIB_INNER="$CHROME_VALGRIND/lib/valgrind"
+
+# G_SLICE=always-malloc: make glib use system malloc
+# NSS_DISABLE_UNLOAD=1: make nss skip dlclosing dynamically loaded modules,
+# which would result in "obj:*" in backtraces.
+# NSS_DISABLE_ARENA_FREE_LIST=1: make nss use system malloc
+# G_DEBUG=fatal_warnings: make GTK abort on any critical or warning assertions.
+# If it crashes on you in the Options menu, you hit bug 19751,
+# comment out the G_DEBUG=fatal_warnings line.
+#
+# GTEST_DEATH_TEST_USE_FORK=1: make gtest death tests valgrind-friendly
+#
+# When everyone has the latest valgrind, we might want to add
+# --show-possibly-lost=no
+# to ignore possible but not definite leaks.
+
+G_SLICE=always-malloc \
+NSS_DISABLE_UNLOAD=1 \
+NSS_DISABLE_ARENA_FREE_LIST=1 \
+G_DEBUG=fatal_warnings \
+GTEST_DEATH_TEST_USE_FORK=1 \
+$RUN_COMMAND \
+ --trace-children=yes \
+ --leak-check=yes \
+ --suppressions="$SUPPRESSIONS" \
+ "${DEFAULT_TOOL_FLAGS[@]}" \
+ "$@"
diff --git a/chromium/tools/valgrind/valgrind_test.py b/chromium/tools/valgrind/valgrind_test.py
new file mode 100644
index 00000000000..7c2de9948bc
--- /dev/null
+++ b/chromium/tools/valgrind/valgrind_test.py
@@ -0,0 +1,846 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs an exe through Valgrind and puts the intermediate files in a
+directory.
+"""
+
+import datetime
+import glob
+import logging
+import optparse
+import os
+import re
+import shutil
+import stat
+import subprocess
+import sys
+import tempfile
+
+import common
+
+import drmemory_analyze
+import memcheck_analyze
+
+class BaseTool(object):
+ """Abstract class for running dynamic error detection tools.
+
+ Always subclass this and implement ToolCommand with framework- and
+ tool-specific stuff.
+ """
+
+ def __init__(self):
+ temp_parent_dir = None
+ self.log_parent_dir = ""
+ if common.IsWindows():
+ # gpu process on Windows Vista+ runs at Low Integrity and can only
+ # write to certain directories (http://crbug.com/119131)
+ #
+ # TODO(bruening): if scripts die in middle and don't clean up temp
+ # dir, we'll accumulate files in profile dir. should remove
+ # really old files automatically.
+ profile = os.getenv("USERPROFILE")
+ if profile:
+ self.log_parent_dir = profile + "\\AppData\\LocalLow\\"
+ if os.path.exists(self.log_parent_dir):
+ self.log_parent_dir = common.NormalizeWindowsPath(self.log_parent_dir)
+ temp_parent_dir = self.log_parent_dir
+ # Generated every time (even when overridden)
+ self.temp_dir = tempfile.mkdtemp(prefix="vg_logs_", dir=temp_parent_dir)
+ self.log_dir = self.temp_dir # overridable by --keep_logs
+ self.option_parser_hooks = []
+ # TODO(glider): we may not need some of the env vars on some of the
+ # platforms.
+ self._env = {
+ "G_SLICE" : "always-malloc",
+ "NSS_DISABLE_UNLOAD" : "1",
+ "NSS_DISABLE_ARENA_FREE_LIST" : "1",
+ "GTEST_DEATH_TEST_USE_FORK": "1",
+ }
+
+ def ToolName(self):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def Analyze(self, check_sanity=False):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def RegisterOptionParserHook(self, hook):
+ # Frameworks and tools can add their own flags to the parser.
+ self.option_parser_hooks.append(hook)
+
+ def CreateOptionParser(self):
+ # Defines Chromium-specific flags.
+ self._parser = optparse.OptionParser("usage: %prog [options] <program to "
+ "test>")
+ self._parser.disable_interspersed_args()
+ self._parser.add_option("-t", "--timeout",
+ dest="timeout", metavar="TIMEOUT", default=10000,
+ help="timeout in seconds for the run (default 10000)")
+ self._parser.add_option("", "--build-dir",
+ help="the location of the compiler output")
+ self._parser.add_option("", "--source-dir",
+ help="path to top of source tree for this build"
+ "(used to normalize source paths in baseline)")
+ self._parser.add_option("", "--gtest_filter", default="",
+ help="which test case to run")
+ self._parser.add_option("", "--gtest_repeat",
+ help="how many times to run each test")
+ self._parser.add_option("", "--gtest_print_time", action="store_true",
+ default=False,
+ help="show how long each test takes")
+ self._parser.add_option("", "--ignore_exit_code", action="store_true",
+ default=False,
+ help="ignore exit code of the test "
+ "(e.g. test failures)")
+ self._parser.add_option("", "--keep_logs", action="store_true",
+ default=False,
+ help="store memory tool logs in the <tool>.logs "
+ "directory instead of /tmp.\nThis can be "
+ "useful for tool developers/maintainers.\n"
+ "Please note that the <tool>.logs directory "
+ "will be clobbered on tool startup.")
+
+ # To add framework- or tool-specific flags, please add a hook using
+ # RegisterOptionParserHook in the corresponding subclass.
+ # See ValgrindTool for an example.
+ for hook in self.option_parser_hooks:
+ hook(self, self._parser)
+
+ def ParseArgv(self, args):
+ self.CreateOptionParser()
+
+ # self._tool_flags will store those tool flags which we don't parse
+ # manually in this script.
+ self._tool_flags = []
+ known_args = []
+
+ """ We assume that the first argument not starting with "-" is a program
+ name and all the following flags should be passed to the program.
+ TODO(timurrrr): customize optparse instead
+ """
+ while len(args) > 0 and args[0][:1] == "-":
+ arg = args[0]
+ if (arg == "--"):
+ break
+ if self._parser.has_option(arg.split("=")[0]):
+ known_args += [arg]
+ else:
+ self._tool_flags += [arg]
+ args = args[1:]
+
+ if len(args) > 0:
+ known_args += args
+
+ self._options, self._args = self._parser.parse_args(known_args)
+
+ self._timeout = int(self._options.timeout)
+ self._source_dir = self._options.source_dir
+ if self._options.keep_logs:
+ # log_parent_dir has trailing slash if non-empty
+ self.log_dir = self.log_parent_dir + "%s.logs" % self.ToolName()
+ if os.path.exists(self.log_dir):
+ shutil.rmtree(self.log_dir)
+ os.mkdir(self.log_dir)
+ logging.info("Logs are in " + self.log_dir)
+
+ self._ignore_exit_code = self._options.ignore_exit_code
+ if self._options.gtest_filter != "":
+ self._args.append("--gtest_filter=%s" % self._options.gtest_filter)
+ if self._options.gtest_repeat:
+ self._args.append("--gtest_repeat=%s" % self._options.gtest_repeat)
+ if self._options.gtest_print_time:
+ self._args.append("--gtest_print_time")
+
+ return True
+
+ def Setup(self, args):
+ return self.ParseArgv(args)
+
+ def ToolCommand(self):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def Cleanup(self):
+ # You may override it in the tool-specific subclass
+ pass
+
+ def Execute(self):
+ """ Execute the app to be tested after successful instrumentation.
+ Full execution command-line provided by subclassers via proc."""
+ logging.info("starting execution...")
+ proc = self.ToolCommand()
+ for var in self._env:
+ common.PutEnvAndLog(var, self._env[var])
+ return common.RunSubprocess(proc, self._timeout)
+
+ def RunTestsAndAnalyze(self, check_sanity):
+ exec_retcode = self.Execute()
+ analyze_retcode = self.Analyze(check_sanity)
+
+ if analyze_retcode:
+ logging.error("Analyze failed.")
+ logging.info("Search the log for '[ERROR]' to see the error reports.")
+ return analyze_retcode
+
+ if exec_retcode:
+ if self._ignore_exit_code:
+ logging.info("Test execution failed, but the exit code is ignored.")
+ else:
+ logging.error("Test execution failed.")
+ return exec_retcode
+ else:
+ logging.info("Test execution completed successfully.")
+
+ if not analyze_retcode:
+ logging.info("Analysis completed successfully.")
+
+ return 0
+
+ def Main(self, args, check_sanity, min_runtime_in_seconds):
+ """Call this to run through the whole process: Setup, Execute, Analyze"""
+ start_time = datetime.datetime.now()
+ retcode = -1
+ if self.Setup(args):
+ retcode = self.RunTestsAndAnalyze(check_sanity)
+ shutil.rmtree(self.temp_dir, ignore_errors=True)
+ self.Cleanup()
+ else:
+ logging.error("Setup failed")
+ end_time = datetime.datetime.now()
+ runtime_in_seconds = (end_time - start_time).seconds
+ hours = runtime_in_seconds / 3600
+ seconds = runtime_in_seconds % 3600
+ minutes = seconds / 60
+ seconds = seconds % 60
+ logging.info("elapsed time: %02d:%02d:%02d" % (hours, minutes, seconds))
+ if (min_runtime_in_seconds > 0 and
+ runtime_in_seconds < min_runtime_in_seconds):
+ logging.error("Layout tests finished too quickly. "
+ "It should have taken at least %d seconds. "
+ "Something went wrong?" % min_runtime_in_seconds)
+ retcode = -1
+ return retcode
+
+ def Run(self, args, module, min_runtime_in_seconds=0):
+ MODULES_TO_SANITY_CHECK = ["base"]
+
+ check_sanity = module in MODULES_TO_SANITY_CHECK
+ return self.Main(args, check_sanity, min_runtime_in_seconds)
+
+
+class ValgrindTool(BaseTool):
+ """Abstract class for running Valgrind tools.
+
+ Always subclass this and implement ToolSpecificFlags() and
+ ExtendOptionParser() for tool-specific stuff.
+ """
+ def __init__(self):
+ super(ValgrindTool, self).__init__()
+ self.RegisterOptionParserHook(ValgrindTool.ExtendOptionParser)
+
+ def UseXML(self):
+ # Override if tool prefers nonxml output
+ return True
+
+ def ExtendOptionParser(self, parser):
+ parser.add_option("", "--suppressions", default=[],
+ action="append",
+ help="path to a valgrind suppression file")
+ parser.add_option("", "--indirect", action="store_true",
+ default=False,
+ help="set BROWSER_WRAPPER rather than "
+ "running valgrind directly")
+ parser.add_option("", "--indirect_webkit_layout", action="store_true",
+ default=False,
+ help="set --wrapper rather than running Dr. Memory "
+ "directly.")
+ parser.add_option("", "--trace_children", action="store_true",
+ default=False,
+ help="also trace child processes")
+ parser.add_option("", "--num-callers",
+ dest="num_callers", default=30,
+ help="number of callers to show in stack traces")
+ parser.add_option("", "--generate_dsym", action="store_true",
+ default=False,
+ help="Generate .dSYM file on Mac if needed. Slow!")
+
+ def Setup(self, args):
+ if not BaseTool.Setup(self, args):
+ return False
+ if common.IsMac():
+ self.PrepareForTestMac()
+ return True
+
+ def PrepareForTestMac(self):
+ """Runs dsymutil if needed.
+
+ Valgrind for Mac OS X requires that debugging information be in a .dSYM
+ bundle generated by dsymutil. It is not currently able to chase DWARF
+ data into .o files like gdb does, so executables without .dSYM bundles or
+ with the Chromium-specific "fake_dsym" bundles generated by
+ build/mac/strip_save_dsym won't give source file and line number
+ information in valgrind.
+
+ This function will run dsymutil if the .dSYM bundle is missing or if
+ it looks like a fake_dsym. A non-fake dsym that already exists is assumed
+ to be up-to-date.
+ """
+ test_command = self._args[0]
+ dsym_bundle = self._args[0] + '.dSYM'
+ dsym_file = os.path.join(dsym_bundle, 'Contents', 'Resources', 'DWARF',
+ os.path.basename(test_command))
+ dsym_info_plist = os.path.join(dsym_bundle, 'Contents', 'Info.plist')
+
+ needs_dsymutil = True
+ saved_test_command = None
+
+ if os.path.exists(dsym_file) and os.path.exists(dsym_info_plist):
+ # Look for the special fake_dsym tag in dsym_info_plist.
+ dsym_info_plist_contents = open(dsym_info_plist).read()
+
+ if not re.search('^\s*<key>fake_dsym</key>$', dsym_info_plist_contents,
+ re.MULTILINE):
+ # fake_dsym is not set, this is a real .dSYM bundle produced by
+ # dsymutil. dsymutil does not need to be run again.
+ needs_dsymutil = False
+ else:
+ # fake_dsym is set. dsym_file is a copy of the original test_command
+ # before it was stripped. Copy it back to test_command so that
+ # dsymutil has unstripped input to work with. Move the stripped
+ # test_command out of the way, it will be restored when this is
+ # done.
+ saved_test_command = test_command + '.stripped'
+ os.rename(test_command, saved_test_command)
+ shutil.copyfile(dsym_file, test_command)
+ shutil.copymode(saved_test_command, test_command)
+
+ if needs_dsymutil:
+ if self._options.generate_dsym:
+ # Remove the .dSYM bundle if it exists.
+ shutil.rmtree(dsym_bundle, True)
+
+ dsymutil_command = ['dsymutil', test_command]
+
+ # dsymutil is crazy slow. Ideally we'd have a timeout here,
+ # but common.RunSubprocess' timeout is only checked
+ # after each line of output; dsymutil is silent
+ # until the end, and is then killed, which is silly.
+ common.RunSubprocess(dsymutil_command)
+
+ if saved_test_command:
+ os.rename(saved_test_command, test_command)
+ else:
+ logging.info("No real .dSYM for test_command. Line numbers will "
+ "not be shown. Either tell xcode to generate .dSYM "
+ "file, or use --generate_dsym option to this tool.")
+
+ def ToolCommand(self):
+ """Get the valgrind command to run."""
+ # Note that self._args begins with the exe to be run.
+ tool_name = self.ToolName()
+
+ # Construct the valgrind command.
+ if 'CHROME_VALGRIND' in os.environ:
+ path = os.path.join(os.environ['CHROME_VALGRIND'], "bin", "valgrind")
+ else:
+ path = "valgrind"
+ proc = [path, "--tool=%s" % tool_name]
+
+ proc += ["--num-callers=%i" % int(self._options.num_callers)]
+
+ if self._options.trace_children:
+ proc += ["--trace-children=yes"]
+ proc += ["--trace-children-skip='*dbus-daemon*'"]
+ proc += ["--trace-children-skip='*dbus-launch*'"]
+ proc += ["--trace-children-skip='*perl*'"]
+ proc += ["--trace-children-skip='*python*'"]
+ # This is really Python, but for some reason Valgrind follows it.
+ proc += ["--trace-children-skip='*lsb_release*'"]
+
+ proc += self.ToolSpecificFlags()
+ proc += self._tool_flags
+
+ suppression_count = 0
+ for suppression_file in self._options.suppressions:
+ if os.path.exists(suppression_file):
+ suppression_count += 1
+ proc += ["--suppressions=%s" % suppression_file]
+
+ if not suppression_count:
+ logging.warning("WARNING: NOT USING SUPPRESSIONS!")
+
+ logfilename = self.log_dir + ("/%s." % tool_name) + "%p"
+ if self.UseXML():
+ proc += ["--xml=yes", "--xml-file=" + logfilename]
+ else:
+ proc += ["--log-file=" + logfilename]
+
+ # The Valgrind command is constructed.
+
+ # Handle --indirect_webkit_layout separately.
+ if self._options.indirect_webkit_layout:
+ # Need to create the wrapper before modifying |proc|.
+ wrapper = self.CreateBrowserWrapper(proc, webkit=True)
+ proc = self._args
+ proc.append("--wrapper")
+ proc.append(wrapper)
+ return proc
+
+ if self._options.indirect:
+ wrapper = self.CreateBrowserWrapper(proc)
+ os.environ["BROWSER_WRAPPER"] = wrapper
+ logging.info('export BROWSER_WRAPPER=' + wrapper)
+ proc = []
+ proc += self._args
+ return proc
+
+ def ToolSpecificFlags(self):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def CreateBrowserWrapper(self, proc, webkit=False):
+ """The program being run invokes Python or something else that can't stand
+ to be valgrinded, and also invokes the Chrome browser. In this case, use a
+ magic wrapper to only valgrind the Chrome browser. Build the wrapper here.
+ Returns the path to the wrapper. It's up to the caller to use the wrapper
+ appropriately.
+ """
+ command = " ".join(proc)
+ # Add the PID of the browser wrapper to the logfile names so we can
+ # separate log files for different UI tests at the analyze stage.
+ command = command.replace("%p", "$$.%p")
+
+ (fd, indirect_fname) = tempfile.mkstemp(dir=self.log_dir,
+ prefix="browser_wrapper.",
+ text=True)
+ f = os.fdopen(fd, "w")
+ f.write('#!/bin/bash\n'
+ 'echo "Started Valgrind wrapper for this test, PID=$$" >&2\n')
+
+ f.write('DIR=`dirname $0`\n'
+ 'TESTNAME_FILE=$DIR/testcase.$$.name\n\n')
+
+ if webkit:
+ # Webkit layout_tests pass the URL as the first line of stdin.
+ f.write('tee $TESTNAME_FILE | %s "$@"\n' % command)
+ else:
+ # Try to get the test case name by looking at the program arguments.
+ # i.e. Chromium ui_tests used --test-name arg.
+ # TODO(timurrrr): This doesn't handle "--test-name Test.Name"
+ # TODO(timurrrr): ui_tests are dead. Where do we use the non-webkit
+ # wrapper now? browser_tests? What do they do?
+ f.write('for arg in $@\ndo\n'
+ ' if [[ "$arg" =~ --test-name=(.*) ]]\n then\n'
+ ' echo ${BASH_REMATCH[1]} >$TESTNAME_FILE\n'
+ ' fi\n'
+ 'done\n\n'
+ '%s "$@"\n' % command)
+
+ f.close()
+ os.chmod(indirect_fname, stat.S_IRUSR|stat.S_IXUSR)
+ return indirect_fname
+
+ def CreateAnalyzer(self):
+ raise NotImplementedError, "This method should be implemented " \
+ "in the tool-specific subclass"
+
+ def GetAnalyzeResults(self, check_sanity=False):
+ # Glob all the files in the log directory
+ filenames = glob.glob(self.log_dir + "/" + self.ToolName() + ".*")
+
+ # If we have browser wrapper, the logfiles are named as
+ # "toolname.wrapper_PID.valgrind_PID".
+ # Let's extract the list of wrapper_PIDs and name it ppids
+ ppids = set([int(f.split(".")[-2]) \
+ for f in filenames if re.search("\.[0-9]+\.[0-9]+$", f)])
+
+ analyzer = self.CreateAnalyzer()
+ if len(ppids) == 0:
+ # Fast path - no browser wrapper was set.
+ return analyzer.Report(filenames, None, check_sanity)
+
+ ret = 0
+ for ppid in ppids:
+ testcase_name = None
+ try:
+ f = open(self.log_dir + ("/testcase.%d.name" % ppid))
+ testcase_name = f.read().strip()
+ f.close()
+ wk_layout_prefix="third_party/WebKit/LayoutTests/"
+ wk_prefix_at = testcase_name.rfind(wk_layout_prefix)
+ if wk_prefix_at != -1:
+ testcase_name = testcase_name[wk_prefix_at + len(wk_layout_prefix):]
+ except IOError:
+ pass
+ print "====================================================="
+ print " Below is the report for valgrind wrapper PID=%d." % ppid
+ if testcase_name:
+ print " It was used while running the `%s` test." % testcase_name
+ else:
+ print " You can find the corresponding test"
+ print " by searching the above log for 'PID=%d'" % ppid
+ sys.stdout.flush()
+
+ ppid_filenames = [f for f in filenames \
+ if re.search("\.%d\.[0-9]+$" % ppid, f)]
+ # check_sanity won't work with browser wrappers
+ assert check_sanity == False
+ ret |= analyzer.Report(ppid_filenames, testcase_name)
+ print "====================================================="
+ sys.stdout.flush()
+
+ if ret != 0:
+ print ""
+ print "The Valgrind reports are grouped by test names."
+ print "Each test has its PID printed in the log when the test was run"
+ print "and at the beginning of its Valgrind report."
+ print "Hint: you can search for the reports by Ctrl+F -> `=#`"
+ sys.stdout.flush()
+
+ return ret
+
+
+# TODO(timurrrr): Split into a separate file.
+class Memcheck(ValgrindTool):
+ """Memcheck
+ Dynamic memory error detector for Linux & Mac
+
+ http://valgrind.org/info/tools.html#memcheck
+ """
+
+ def __init__(self):
+ super(Memcheck, self).__init__()
+ self.RegisterOptionParserHook(Memcheck.ExtendOptionParser)
+
+ def ToolName(self):
+ return "memcheck"
+
+ def ExtendOptionParser(self, parser):
+ parser.add_option("--leak-check", "--leak_check", type="string",
+ default="yes", # --leak-check=yes is equivalent of =full
+ help="perform leak checking at the end of the run")
+ parser.add_option("", "--show_all_leaks", action="store_true",
+ default=False,
+ help="also show less blatant leaks")
+ parser.add_option("", "--track_origins", action="store_true",
+ default=False,
+ help="Show whence uninitialized bytes came. 30% slower.")
+
+ def ToolSpecificFlags(self):
+ ret = ["--gen-suppressions=all", "--demangle=no"]
+ ret += ["--leak-check=%s" % self._options.leak_check]
+
+ if self._options.show_all_leaks:
+ ret += ["--show-reachable=yes"]
+ else:
+ ret += ["--show-possibly-lost=no"]
+
+ if self._options.track_origins:
+ ret += ["--track-origins=yes"]
+
+ # TODO(glider): this is a temporary workaround for http://crbug.com/51716
+ # Let's see whether it helps.
+ if common.IsMac():
+ ret += ["--smc-check=all"]
+
+ return ret
+
+ def CreateAnalyzer(self):
+ use_gdb = common.IsMac()
+ return memcheck_analyze.MemcheckAnalyzer(self._source_dir,
+ self._options.show_all_leaks,
+ use_gdb=use_gdb)
+
+ def Analyze(self, check_sanity=False):
+ ret = self.GetAnalyzeResults(check_sanity)
+
+ if ret != 0:
+ logging.info("Please see http://dev.chromium.org/developers/how-tos/"
+ "using-valgrind for the info on Memcheck/Valgrind")
+ return ret
+
+
+class DrMemory(BaseTool):
+ """Dr.Memory
+ Dynamic memory error detector for Windows.
+
+ http://dev.chromium.org/developers/how-tos/using-drmemory
+ It is not very mature at the moment, some things might not work properly.
+ """
+
+ def __init__(self, full_mode, pattern_mode):
+ super(DrMemory, self).__init__()
+ self.full_mode = full_mode
+ self.pattern_mode = pattern_mode
+ self.RegisterOptionParserHook(DrMemory.ExtendOptionParser)
+
+ def ToolName(self):
+ return "drmemory"
+
+ def ExtendOptionParser(self, parser):
+ parser.add_option("", "--suppressions", default=[],
+ action="append",
+ help="path to a drmemory suppression file")
+ parser.add_option("", "--follow_python", action="store_true",
+ default=False, dest="follow_python",
+ help="Monitor python child processes. If off, neither "
+ "python children nor any children of python children "
+ "will be monitored.")
+ parser.add_option("", "--indirect", action="store_true",
+ default=False,
+ help="set BROWSER_WRAPPER rather than "
+ "running Dr. Memory directly on the harness")
+ parser.add_option("", "--indirect_webkit_layout", action="store_true",
+ default=False,
+ help="set --wrapper rather than running valgrind "
+ "directly.")
+ parser.add_option("", "--use_debug", action="store_true",
+ default=False, dest="use_debug",
+ help="Run Dr. Memory debug build")
+ parser.add_option("", "--trace_children", action="store_true",
+ default=True,
+ help="TODO: default value differs from Valgrind")
+ parser.add_option("", "--drmemory_ops",
+ help="Extra options passed to Dr. Memory")
+
+ def ToolCommand(self):
+ """Get the tool command to run."""
+ # WINHEAP is what Dr. Memory supports as there are issues w/ both
+ # jemalloc (https://github.com/DynamoRIO/drmemory/issues/320) and
+ # tcmalloc (https://github.com/DynamoRIO/drmemory/issues/314)
+ add_env = {
+ "CHROME_ALLOCATOR" : "WINHEAP",
+ "JSIMD_FORCEMMX" : "1", # https://github.com/DynamoRIO/drmemory/issues/540
+ }
+ for k,v in add_env.iteritems():
+ logging.info("export %s=%s", k, v)
+ os.putenv(k, v)
+
+ drmem_cmd = os.getenv("DRMEMORY_COMMAND")
+ if not drmem_cmd:
+ raise RuntimeError, "Please set DRMEMORY_COMMAND environment variable " \
+ "with the path to drmemory.exe"
+ proc = drmem_cmd.split(" ")
+
+ # By default, don't run python (this will exclude python's children as well)
+ # to reduce runtime. We're not really interested in spending time finding
+ # bugs in the python implementation.
+ # With file-based config we must update the file every time, and
+ # it will affect simultaneous drmem uses by this user. While file-based
+ # config has many advantages, here we may want this-instance-only
+ # (https://github.com/DynamoRIO/drmemory/issues/334).
+ drconfig_cmd = [ proc[0].replace("drmemory.exe", "drconfig.exe") ]
+ drconfig_cmd += ["-quiet"] # suppress errors about no 64-bit libs
+ run_drconfig = True
+ if self._options.follow_python:
+ logging.info("Following python children")
+ # -unreg fails if not already registered so query for that first
+ query_cmd = drconfig_cmd + ["-isreg", "python.exe"]
+ query_proc = subprocess.Popen(query_cmd, stdout=subprocess.PIPE,
+ shell=True)
+ (query_out, query_err) = query_proc.communicate()
+ if re.search("exe not registered", query_out):
+ run_drconfig = False # all set
+ else:
+ drconfig_cmd += ["-unreg", "python.exe"]
+ else:
+ logging.info("Excluding python children")
+ drconfig_cmd += ["-reg", "python.exe", "-norun"]
+ if run_drconfig:
+ drconfig_retcode = common.RunSubprocess(drconfig_cmd, self._timeout)
+ if drconfig_retcode:
+ logging.error("Configuring whether to follow python children failed " \
+ "with %d.", drconfig_retcode)
+ raise RuntimeError, "Configuring python children failed "
+
+ suppression_count = 0
+ supp_files = self._options.suppressions
+ if self.full_mode:
+ supp_files += [s.replace(".txt", "_full.txt") for s in supp_files]
+ for suppression_file in supp_files:
+ if os.path.exists(suppression_file):
+ suppression_count += 1
+ proc += ["-suppress", common.NormalizeWindowsPath(suppression_file)]
+
+ if not suppression_count:
+ logging.warning("WARNING: NOT USING SUPPRESSIONS!")
+
+ # Un-comment to dump Dr.Memory events on error
+ #proc += ["-dr_ops", "-dumpcore_mask", "-dr_ops", "0x8bff"]
+
+ # Un-comment and comment next line to debug Dr.Memory
+ #proc += ["-dr_ops", "-no_hide"]
+ #proc += ["-dr_ops", "-msgbox_mask", "-dr_ops", "15"]
+ #Proc += ["-dr_ops", "-stderr_mask", "-dr_ops", "15"]
+ # Ensure we see messages about Dr. Memory crashing!
+ proc += ["-dr_ops", "-stderr_mask", "-dr_ops", "12"]
+
+ if self._options.use_debug:
+ proc += ["-debug"]
+
+ proc += ["-logdir", common.NormalizeWindowsPath(self.log_dir)]
+
+ if self.log_parent_dir:
+ # gpu process on Windows Vista+ runs at Low Integrity and can only
+ # write to certain directories (http://crbug.com/119131)
+ symcache_dir = os.path.join(self.log_parent_dir, "drmemory.symcache")
+ elif self._options.build_dir:
+ # The other case is only possible with -t cmdline.
+ # Anyways, if we omit -symcache_dir the -logdir's value is used which
+ # should be fine.
+ symcache_dir = os.path.join(self._options.build_dir, "drmemory.symcache")
+ if symcache_dir:
+ if not os.path.exists(symcache_dir):
+ try:
+ os.mkdir(symcache_dir)
+ except OSError:
+ logging.warning("Can't create symcache dir?")
+ if os.path.exists(symcache_dir):
+ proc += ["-symcache_dir", common.NormalizeWindowsPath(symcache_dir)]
+
+ # Use -no_summary to suppress DrMemory's summary and init-time
+ # notifications. We generate our own with drmemory_analyze.py.
+ proc += ["-batch", "-no_summary"]
+
+ # Un-comment to disable interleaved output. Will also suppress error
+ # messages normally printed to stderr.
+ #proc += ["-quiet", "-no_results_to_stderr"]
+
+ proc += ["-callstack_max_frames", "40"]
+
+ # disable leak scan for now
+ proc += ["-no_count_leaks", "-no_leak_scan"]
+
+ # disable warnings about unaddressable prefetches
+ proc += ["-no_check_prefetch"]
+
+ # crbug.com/413215, no heap mismatch check for Windows release build binary
+ if common.IsWindows() and "Release" in self._options.build_dir:
+ proc += ["-no_check_delete_mismatch"]
+
+ # We are seeing false positive invalid heap args on 64-bit, so we are
+ # disabling the feature for now (xref
+ # https://github.com/DynamoRIO/drmemory/issues/1839).
+ if common.IsWindows() and "Release_x64" in self._options.build_dir:
+ proc += ["-no_check_heap_mismatch"]
+
+ # make callstacks easier to read
+ proc += ["-callstack_srcfile_prefix",
+ "build\\src,chromium\\src,crt_build\\self_x86"]
+ proc += ["-callstack_modname_hide",
+ "*drmemory*,chrome.dll"]
+
+ boring_callers = common.BoringCallers(mangled=False, use_re_wildcards=False)
+ # TODO(timurrrr): In fact, we want "starting from .." instead of "below .."
+ proc += ["-callstack_truncate_below", ",".join(boring_callers)]
+
+ # crbug.com/155839: extra Dr. Memory options passed via --drmemory_ops
+ if self._options.drmemory_ops:
+ proc.extend(self._options.drmemory_ops.split())
+
+ if self.pattern_mode:
+ proc += ["-pattern", "0xf1fd", "-no_count_leaks", "-redzone_size", "0x20"]
+ elif not self.full_mode:
+ proc += ["-light"]
+
+ proc += self._tool_flags
+
+ # Dr.Memory requires -- to separate tool flags from the executable name.
+ proc += ["--"]
+
+ if self._options.indirect or self._options.indirect_webkit_layout:
+ wrapper_path = os.path.join(self._source_dir,
+ "tools", "valgrind", "browser_wrapper_win.py")
+ wrapper = " ".join(["python", wrapper_path] + proc)
+ self.CreateBrowserWrapper(wrapper)
+ logging.info("browser wrapper = " + " ".join(proc))
+ if self._options.indirect_webkit_layout:
+ proc = self._args
+ # Layout tests want forward slashes.
+ wrapper = wrapper.replace('\\', '/')
+ proc += ["--wrapper", wrapper]
+ return proc
+ else:
+ proc = []
+
+ # Note that self._args begins with the name of the exe to be run.
+ self._args[0] = common.NormalizeWindowsPath(self._args[0])
+ proc += self._args
+ return proc
+
+ def CreateBrowserWrapper(self, command):
+ os.putenv("BROWSER_WRAPPER", command)
+
+ def Analyze(self, check_sanity=False):
+ # Use one analyzer for all the log files to avoid printing duplicate reports
+ #
+ # TODO(timurrrr): unify this with Valgrind and other tools when we have
+ # https://github.com/DynamoRIO/drmemory/issues/684
+ analyzer = drmemory_analyze.DrMemoryAnalyzer()
+
+ ret = 0
+ if not self._options.indirect and not self._options.indirect_webkit_layout:
+ filenames = glob.glob(self.log_dir + "/*/results.txt")
+
+ ret = analyzer.Report(filenames, None, check_sanity)
+ else:
+ testcases = glob.glob(self.log_dir + "/testcase.*.logs")
+ # If we have browser wrapper, the per-test logdirs are named as
+ # "testcase.wrapper_PID.name".
+ # Let's extract the list of wrapper_PIDs and name it ppids.
+ # NOTE: ppids may contain '_', i.e. they are not ints!
+ ppids = set([f.split(".")[-2] for f in testcases])
+
+ for ppid in ppids:
+ testcase_name = None
+ try:
+ f = open("%s/testcase.%s.name" % (self.log_dir, ppid))
+ testcase_name = f.read().strip()
+ f.close()
+ except IOError:
+ pass
+ print "====================================================="
+ print " Below is the report for drmemory wrapper PID=%s." % ppid
+ if testcase_name:
+ print " It was used while running the `%s` test." % testcase_name
+ else:
+ # TODO(timurrrr): hm, the PID line is suppressed on Windows...
+ print " You can find the corresponding test"
+ print " by searching the above log for 'PID=%s'" % ppid
+ sys.stdout.flush()
+ ppid_filenames = glob.glob("%s/testcase.%s.logs/*/results.txt" %
+ (self.log_dir, ppid))
+ ret |= analyzer.Report(ppid_filenames, testcase_name, False)
+ print "====================================================="
+ sys.stdout.flush()
+
+ logging.info("Please see http://dev.chromium.org/developers/how-tos/"
+ "using-drmemory for the info on Dr. Memory")
+ return ret
+
+
+class ToolFactory:
+ def Create(self, tool_name):
+ if tool_name == "memcheck":
+ return Memcheck()
+ if tool_name == "drmemory" or tool_name == "drmemory_light":
+ # TODO(timurrrr): remove support for "drmemory" when buildbots are
+ # switched to drmemory_light OR make drmemory==drmemory_full the default
+ # mode when the tool is mature enough.
+ return DrMemory(False, False)
+ if tool_name == "drmemory_full":
+ return DrMemory(True, False)
+ if tool_name == "drmemory_pattern":
+ return DrMemory(False, True)
+ try:
+ platform_name = common.PlatformNames()[0]
+ except common.NotImplementedError:
+ platform_name = sys.platform + "(Unknown)"
+ raise RuntimeError, "Unknown tool (tool=%s, platform=%s)" % (tool_name,
+ platform_name)
+
+def CreateTool(tool):
+ return ToolFactory().Create(tool)
diff --git a/chromium/tools/valgrind/waterfall.sh b/chromium/tools/valgrind/waterfall.sh
new file mode 100755
index 00000000000..8bb90028b7f
--- /dev/null
+++ b/chromium/tools/valgrind/waterfall.sh
@@ -0,0 +1,244 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script can be used by waterfall sheriffs to fetch the status
+# of Valgrind bots on the memory waterfall and test if their local
+# suppressions match the reports on the waterfall.
+
+set -e
+
+THISDIR=$(dirname "${0}")
+LOGS_DIR=$THISDIR/waterfall.tmp
+WATERFALL_PAGE="https://build.chromium.org/p/chromium.memory/builders"
+WATERFALL_FYI_PAGE="https://build.chromium.org/p/chromium.memory.fyi/builders"
+
+download() {
+ # Download a file.
+ # $1 = URL to download
+ # $2 = Path to the output file
+ # {{{1
+ if [ "$(which curl)" != "" ]
+ then
+ if ! curl -s -o "$2" "$1"
+ then
+ echo
+ echo "Failed to download '$1'... aborting"
+ exit 1
+ fi
+ elif [ "$(which wget)" != "" ]
+ then
+ if ! wget "$1" -O "$2" -q
+ then
+ echo
+ echo "Failed to download '$1'... aborting"
+ exit 1
+ fi
+ else
+ echo "Need either curl or wget to download stuff... aborting"
+ exit 1
+ fi
+ # }}}
+}
+
+fetch_logs() {
+ # Fetch Valgrind logs from the waterfall {{{1
+
+ # TODO(timurrrr,maruel): use JSON, see
+ # http://build.chromium.org/p/chromium.memory/json/help
+
+ rm -rf "$LOGS_DIR" # Delete old logs
+ mkdir "$LOGS_DIR"
+
+ URL=$1
+ LAYOUT_ONLY=0
+ if [ $# != 0 ]; then
+ shift
+ if [ "$1" = "layout_only" ]; then
+ LAYOUT_ONLY=1
+ fi
+ fi
+
+ echo "Fetching the list of builders..."
+ download "$URL" "$LOGS_DIR/builders"
+ SLAVES=$(grep "<a href=\"builders\/" "$LOGS_DIR/builders" | \
+ grep 'td class="box"' | \
+ sed "s/.*<a href=\"builders\///" | sed "s/\".*//" | \
+ sort | uniq)
+
+ for S in $SLAVES
+ do
+ if [ "$LAYOUT_ONLY" = "1" ]; then
+ if [ "$S" != "Webkit%20Linux%20%28valgrind%20layout%29" ]; then
+ continue;
+ fi
+ fi
+
+ SLAVE_URL="$URL/$S"
+ SLAVE_NAME=$(echo $S | sed -e "s/%20/ /g" -e "s/%28/(/g" -e "s/%29/)/g")
+ echo -n "Fetching builds by slave '${SLAVE_NAME}'"
+ download $SLAVE_URL?numbuilds=${NUMBUILDS} "$LOGS_DIR/slave_${S}"
+
+ # We speed up the 'fetch' step by skipping the builds/tests which succeeded.
+ # TODO(timurrrr): OTOH, we won't be able to check
+ # if some suppression is not used anymore.
+ #
+ # The awk script here joins the lines ending with </td> to make it possible
+ # to find the failed builds.
+ LIST_OF_BUILDS=$(cat "$LOGS_DIR/slave_$S" | \
+ awk 'BEGIN { buf = "" }
+ {
+ if ($0 ~ /<\/td>/) { buf = (buf $0); }
+ else {
+ if (buf) { print buf; buf="" }
+ print $0
+ }
+ }
+ END {if (buf) print buf}' | \
+ grep "success\|failure" | \
+ head -n $NUMBUILDS | \
+ grep "failure" | \
+ grep -v "failed compile" | \
+ sed "s/.*\/builds\///" | sed "s/\".*//")
+
+ for BUILD in $LIST_OF_BUILDS
+ do
+ # We'll fetch a few tiny URLs now, let's use a temp file.
+ TMPFILE=$(mktemp -t memory_waterfall.XXXXXX)
+ download $SLAVE_URL/builds/$BUILD "$TMPFILE"
+
+ REPORT_FILE="$LOGS_DIR/report_${S}_${BUILD}"
+ rm -f $REPORT_FILE 2>/dev/null || true # make sure it doesn't exist
+
+ REPORT_URLS=$(grep -o "[0-9]\+/steps/memory.*/logs/[0-9A-F]\{16\}" \
+ "$TMPFILE" \
+ || true) # `true` is to succeed on empty output
+ FAILED_TESTS=$(grep -o "[0-9]\+/steps/memory.*/logs/[A-Za-z0-9_.]\+" \
+ "$TMPFILE" | grep -v "[0-9A-F]\{16\}" \
+ | grep -v "stdio" || true)
+
+ for REPORT in $REPORT_URLS
+ do
+ download "$SLAVE_URL/builds/$REPORT/text" "$TMPFILE"
+ echo "" >> "$TMPFILE" # Add a newline at the end
+ cat "$TMPFILE" | tr -d '\r' >> "$REPORT_FILE"
+ done
+
+ for FAILURE in $FAILED_TESTS
+ do
+ echo -n "FAILED:" >> "$REPORT_FILE"
+ echo "$FAILURE" | sed -e "s/.*\/logs\///" -e "s/\/.*//" \
+ >> "$REPORT_FILE"
+ done
+
+ rm "$TMPFILE"
+ echo $SLAVE_URL/builds/$BUILD >> "$REPORT_FILE"
+ done
+ echo " DONE"
+ done
+ # }}}
+}
+
+match_suppressions() {
+ PYTHONPATH=$THISDIR/../python/google \
+ python "$THISDIR/test_suppressions.py" $@ "$LOGS_DIR/report_"*
+}
+
+match_gtest_excludes() {
+ for PLATFORM in "Linux" "Chromium%20Mac" "Chromium%20OS" "Windows"
+ do
+ echo
+ echo "Test failures on ${PLATFORM}:" | sed "s/%20/ /"
+ grep -h -o "^FAILED:.*" -R "$LOGS_DIR"/*${PLATFORM}* | \
+ grep -v "FAILS\|FLAKY" | sort | uniq | \
+ sed -e "s/^FAILED://" -e "s/^/ /"
+ # Don't put any operators between "grep | sed" and "RESULT=$PIPESTATUS"
+ RESULT=$PIPESTATUS
+
+ if [ "$RESULT" == 1 ]
+ then
+ echo " None!"
+ else
+ echo
+ echo " Note: we don't check for failures already excluded locally yet"
+ echo " TODO(timurrrr): don't list tests we've already excluded locally"
+ fi
+ done
+ echo
+ echo "Note: we don't print FAILS/FLAKY tests and 1200s-timeout failures"
+}
+
+usage() {
+ cat <<EOF
+usage: $0 fetch|match options
+
+This script can be used by waterfall sheriffs to fetch the status
+of Valgrind bots on the memory waterfall and test if their local
+suppressions match the reports on the waterfall.
+
+OPTIONS:
+ -h Show this message
+ -n N Fetch N builds from each slave.
+
+COMMANDS:
+ fetch Fetch Valgrind logs from the memory waterfall
+ fetch_layout Fetch many Valgrind logs from the layout test bot only
+ match Test the local suppression files against the downloaded logs
+
+EOF
+}
+
+CMD=$1
+if [ $# != 0 ]; then
+ shift
+fi
+
+NUMBUILDS=3
+if [ "$CMD" = "fetch_layout" ]; then
+ NUMBUILDS=30
+fi
+
+# Arguments for "match" are handled in match_suppressions
+if [ "$CMD" != "match" ]; then
+ while getopts “hn:†OPTION
+ do
+ case $OPTION in
+ h)
+ usage
+ exit
+ ;;
+ n)
+ NUMBUILDS=$OPTARG
+ ;;
+ ?)
+ usage
+ exit
+ ;;
+ esac
+ done
+ shift $((OPTIND-1))
+ if [ $# != 0 ]; then
+ usage
+ exit 1
+ fi
+fi
+
+if [ "$CMD" = "fetch" ]; then
+ echo "Fetching $NUMBUILDS builds"
+ fetch_logs "$WATERFALL_PAGE"
+ fetch_logs "$WATERFALL_FYI_PAGE"
+elif [ "$CMD" = "fetch_layout" ]; then
+ echo "Fetching $NUMBUILDS builds"
+ fetch_logs "$WATERFALL_FYI_PAGE" layout_only
+elif [ "$CMD" = "match" ]; then
+ match_suppressions $@
+ match_gtest_excludes
+elif [ "$CMD" = "blame" ]; then
+ echo The blame command died of bitrot. If you need it, please reimplement it.
+ echo Reimplementation is blocked on http://crbug.com/82688
+else
+ usage
+ exit 1
+fi
diff --git a/chromium/tools/variations/OWNERS b/chromium/tools/variations/OWNERS
new file mode 100644
index 00000000000..5468518ba75
--- /dev/null
+++ b/chromium/tools/variations/OWNERS
@@ -0,0 +1,2 @@
+asvitkine@chromium.org
+danduong@chromium.org
diff --git a/chromium/tools/variations/fieldtrial_to_struct.py b/chromium/tools/variations/fieldtrial_to_struct.py
new file mode 100755
index 00000000000..dcaed3ab412
--- /dev/null
+++ b/chromium/tools/variations/fieldtrial_to_struct.py
@@ -0,0 +1,101 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os.path
+import sys
+import optparse
+_script_path = os.path.realpath(__file__)
+
+sys.path.insert(0, os.path.normpath(_script_path + "/../../json_comment_eater"))
+try:
+ import json_comment_eater
+finally:
+ sys.path.pop(0)
+
+sys.path.insert(0, os.path.normpath(_script_path + "/../../json_to_struct"))
+try:
+ import json_to_struct
+finally:
+ sys.path.pop(0)
+
+def _Load(filename):
+ """Loads a JSON file into a Python object and return this object.
+ """
+ with open(filename, 'r') as handle:
+ result = json.loads(json_comment_eater.Nom(handle.read()))
+ return result
+
+def _LoadFieldTrialConfig(filename):
+ """Loads a field trial config JSON and converts it into a format that can be
+ used by json_to_struct.
+ """
+ return _FieldTrialConfigToDescription(_Load(filename))
+
+def _FieldTrialConfigToDescription(config):
+ element = {'groups': []}
+ for study in sorted(config.keys()):
+ group_data = config[study][0]
+ group = {
+ 'study': study,
+ 'group_name': group_data['group_name']
+ }
+ params_data = group_data.get('params')
+ if (params_data):
+ params = []
+ for param in sorted(params_data.keys()):
+ params.append({'key': param, 'value': params_data[param]})
+ group['params'] = params
+ enable_features_data = group_data.get('enable_features')
+ if enable_features_data:
+ group['enable_features'] = enable_features_data
+ disable_features_data = group_data.get('disable_features')
+ if disable_features_data:
+ group['disable_features'] = disable_features_data
+ element['groups'].append(group)
+ return {'elements': {'kFieldTrialConfig': element}}
+
+def main(arguments):
+ parser = optparse.OptionParser(
+ description='Generates an C++ array of struct from a JSON description.',
+ usage='usage: %prog [option] -s schema description')
+ parser.add_option('-b', '--destbase',
+ help='base directory of generated files.')
+ parser.add_option('-d', '--destdir',
+ help='directory to output generated files, relative to destbase.')
+ parser.add_option('-n', '--namespace',
+ help='C++ namespace for generated files. e.g search_providers.')
+ parser.add_option('-s', '--schema', help='path to the schema file, '
+ 'mandatory.')
+ parser.add_option('-o', '--output', help='output filename, '
+ 'mandatory.')
+ parser.add_option('-y', '--year',
+ help='year to put in the copy-right.')
+ (opts, args) = parser.parse_args(args=arguments)
+
+ if not opts.schema:
+ parser.error('You must specify a --schema.')
+
+ description_filename = os.path.normpath(args[0])
+ shortroot = opts.output
+ if opts.destdir:
+ output_root = os.path.join(os.path.normpath(opts.destdir), shortroot)
+ else:
+ output_root = shortroot
+
+ if opts.destbase:
+ basepath = os.path.normpath(opts.destbase)
+ else:
+ basepath = ''
+
+ schema = _Load(opts.schema)
+ description = _LoadFieldTrialConfig(description_filename)
+ json_to_struct.GenerateStruct(
+ basepath, output_root, opts.namespace, schema, description,
+ os.path.split(description_filename)[1], os.path.split(opts.schema)[1],
+ opts.year)
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/chromium/tools/variations/fieldtrial_to_struct_unittest.py b/chromium/tools/variations/fieldtrial_to_struct_unittest.py
new file mode 100644
index 00000000000..7575e833ebf
--- /dev/null
+++ b/chromium/tools/variations/fieldtrial_to_struct_unittest.py
@@ -0,0 +1,82 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import fieldtrial_to_struct
+import os
+
+
+class FieldTrialToStruct(unittest.TestCase):
+
+ def test_FieldTrialToDescription(self):
+ config = {
+ 'Study1': [
+ {
+ 'group_name': 'Group1',
+ 'params': {
+ 'x': '1',
+ 'y': '2'
+ },
+ 'enable_features': ['A', 'B'],
+ 'disable_features': ['C']
+ }
+ ],
+ 'Study2': [{'group_name': 'OtherGroup'}]
+ }
+ result = fieldtrial_to_struct._FieldTrialConfigToDescription(config)
+ expected = {
+ 'elements': {
+ 'kFieldTrialConfig': {
+ 'groups': [
+ {
+ 'study': 'Study1',
+ 'group_name': 'Group1',
+ 'params': [
+ {'key': 'x', 'value': '1'},
+ {'key': 'y', 'value': '2'}
+ ],
+ 'enable_features': ['A',
+ 'B'],
+ 'disable_features': ['C']
+ },
+ {
+ 'study': 'Study2',
+ 'group_name': 'OtherGroup'
+ }
+ ]
+ }
+ }
+ }
+ self.maxDiff = None
+ self.assertEqual(expected, result)
+
+ def test_FieldTrialToStructMain(self):
+ schema = (
+ '../../chrome/common/variations/fieldtrial_testing_config_schema.json')
+ test_ouput_filename = 'test_ouput'
+ fieldtrial_to_struct.main([
+ '--schema=' + schema,
+ '--output=' + test_ouput_filename,
+ '--year=2015',
+ 'unittest_data/test_config.json'
+ ])
+ header_filename = test_ouput_filename + '.h'
+ with open(header_filename, 'r') as header:
+ test_header = header.read()
+ with open('unittest_data/expected_output.h', 'r') as expected:
+ expected_header = expected.read()
+ self.assertEqual(expected_header, test_header)
+ os.unlink(header_filename)
+
+ cc_filename = test_ouput_filename + '.cc'
+ with open(cc_filename, 'r') as cc:
+ test_cc = cc.read()
+ with open('unittest_data/expected_output.cc', 'r') as expected:
+ expected_cc = expected.read()
+ self.assertEqual(expected_cc, test_cc)
+ os.unlink(cc_filename)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/chromium/tools/variations/fieldtrial_util.py b/chromium/tools/variations/fieldtrial_util.py
new file mode 100644
index 00000000000..ae029aa45b0
--- /dev/null
+++ b/chromium/tools/variations/fieldtrial_util.py
@@ -0,0 +1,101 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import sys
+
+
+def _hex(ch):
+ hv = hex(ord(ch)).replace('0x', '')
+ hv.zfill(2)
+ return hv.upper()
+
+# URL escapes the delimiter characters from the output. urllib.quote is not
+# used because it cannot escape '.'.
+def _escape(str):
+ result = str
+ # Must perform replace on '%' first before the others.
+ for c in '%:/.,':
+ result = result.replace(c, '%' + _hex(c))
+ return result
+
+def _FindDuplicates(entries):
+ seen = set()
+ duplicates = set()
+ for entry in entries:
+ if entry in seen:
+ duplicates.add(entry)
+ else:
+ seen.add(entry)
+ return duplicates
+
+def _CheckForDuplicateFeatures(enable_features, disable_features):
+ enable_features_set = set(enable_features)
+ if len(enable_features_set) != len(enable_features):
+ raise Exception('Duplicate feature(s) in enable_features: ' +
+ ', '.join(_FindDuplicates(enable_features)))
+ disable_features_set = set(disable_features)
+ if len(disable_features_set) != len(disable_features):
+ raise Exception('Duplicate feature(s) in disable_features: ' +
+ ', '.join(_FindDuplicates(disable_features)))
+ features_in_both = enable_features_set.intersection(disable_features_set)
+ if len(features_in_both) > 0:
+ raise Exception('Conflicting features set as both enabled and disabled: ' +
+ ', '.join(features_in_both))
+
+# Generate a list of command-line switches to enable field trials defined in
+# fieldtrial_testing_config_*.json.
+def GenerateArgs(config_path):
+ try:
+ with open(config_path, 'r') as base_file:
+ variations = json.load(base_file)
+ except (IOError, ValueError):
+ return []
+
+ field_trials = []
+ params = []
+ enable_features = []
+ disable_features = []
+ for trial, groups in variations.iteritems():
+ if not len(groups):
+ continue
+ # For now, only take the first group.
+ group = groups[0]
+ trial_group = [trial, group['group_name']]
+ field_trials.extend(trial_group)
+ param_list = []
+ if 'params' in group:
+ for key, value in group['params'].iteritems():
+ param_list.append(key)
+ param_list.append(value)
+ if len(param_list):
+ # Escape the variables for the command-line.
+ trial_group = [_escape(x) for x in trial_group]
+ param_list = [_escape(x) for x in param_list]
+ param = '%s:%s' % ('.'.join(trial_group), '/'.join(param_list))
+ params.append(param)
+ if 'enable_features' in group:
+ enable_features.extend(group['enable_features'])
+ if 'disable_features' in group:
+ disable_features.extend(group['disable_features'])
+ if not len(field_trials):
+ return []
+ _CheckForDuplicateFeatures(enable_features, disable_features)
+ args = ['--force-fieldtrials=%s' % '/'.join(field_trials)]
+ if len(params):
+ args.append('--force-fieldtrial-params=%s' % ','.join(params))
+ if len(enable_features):
+ args.append('--enable-features=%s' % ','.join(enable_features))
+ if len(disable_features):
+ args.append('--disable-features=%s' % ','.join(disable_features))
+ return args
+
+def main():
+ if len(sys.argv) < 3:
+ print 'Usage: fieldtrial_util.py [base_config_path] [platform_config_path]'
+ exit(-1)
+ print GenerateArgs(sys.argv[1], sys.argv[2])
+
+if __name__ == '__main__':
+ main()
diff --git a/chromium/tools/variations/fieldtrial_util_unittest.py b/chromium/tools/variations/fieldtrial_util_unittest.py
new file mode 100644
index 00000000000..9d53a7f3ea8
--- /dev/null
+++ b/chromium/tools/variations/fieldtrial_util_unittest.py
@@ -0,0 +1,121 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+import fieldtrial_util
+import os
+import tempfile
+
+
+class FieldTrialUtilUnittest(unittest.TestCase):
+
+ def runGenerateArgs(self, config):
+ result = None
+ with tempfile.NamedTemporaryFile('w', delete=False) as base_file:
+ try:
+ base_file.write(config)
+ base_file.close()
+ result = fieldtrial_util.GenerateArgs(base_file.name)
+ finally:
+ os.unlink(base_file.name)
+ return result
+
+ def test_GenArgsEmptyPaths(self):
+ args = fieldtrial_util.GenerateArgs('')
+ self.assertEqual([], args)
+
+ def test_GenArgsOneConfig(self):
+ config = '''{
+ "BrowserBlackList": [
+ { "group_name": "Enabled" }
+ ],
+ "c": [
+ {
+ "group_name": "d.",
+ "params": {"url": "http://www.google.com"},
+ "enable_features": ["x"],
+ "disable_features": ["y"]
+ }
+ ],
+ "SimpleParams": [
+ {
+ "group_name": "Default",
+ "params": {"id": "abc"},
+ "enable_features": ["a", "b"]
+ }
+ ]
+ }'''
+ result = self.runGenerateArgs(config)
+ self.assertEqual(['--force-fieldtrials='
+ 'BrowserBlackList/Enabled/c/d./SimpleParams/Default',
+ '--force-fieldtrial-params='
+ 'c.d%2E:url/http%3A%2F%2Fwww%2Egoogle%2Ecom,'
+ 'SimpleParams.Default:id/abc',
+ '--enable-features=x,a,b',
+ '--disable-features=y'], result)
+
+ def test_DuplicateEnableFeatures(self):
+ config = '''{
+ "X": [
+ {
+ "group_name": "x",
+ "enable_features": ["x"]
+ }
+ ],
+ "Y": [
+ {
+ "group_name": "Default",
+ "enable_features": ["x", "y"]
+ }
+ ]
+ }'''
+ with self.assertRaises(Exception) as raised:
+ self.runGenerateArgs(config)
+ self.assertEqual('Duplicate feature(s) in enable_features: x',
+ str(raised.exception))
+
+ def test_DuplicateDisableFeatures(self):
+ config = '''{
+ "X": [
+ {
+ "group_name": "x",
+ "enable_features": ["y", "z"]
+ }
+ ],
+ "Y": [
+ {
+ "group_name": "Default",
+ "enable_features": ["z", "x", "y"]
+ }
+ ]
+ }'''
+ with self.assertRaises(Exception) as raised:
+ self.runGenerateArgs(config)
+ self.assertEqual('Duplicate feature(s) in enable_features: y, z',
+ str(raised.exception))
+
+
+ def test_DuplicateEnableDisable(self):
+ config = '''{
+ "X": [
+ {
+ "group_name": "x",
+ "enable_features": ["x"]
+ }
+ ],
+ "Y": [
+ {
+ "group_name": "Default",
+ "disable_features": ["x", "y"]
+ }
+ ]
+ }'''
+ with self.assertRaises(Exception) as raised:
+ self.runGenerateArgs(config)
+ self.assertEqual('Conflicting features set as both enabled and disabled: x',
+ str(raised.exception))
+
+if __name__ == '__main__':
+ unittest.main() \ No newline at end of file
diff --git a/chromium/tools/variations/unittest_data/expected_output.cc b/chromium/tools/variations/unittest_data/expected_output.cc
new file mode 100644
index 00000000000..d2f5cacb069
--- /dev/null
+++ b/chromium/tools/variations/unittest_data/expected_output.cc
@@ -0,0 +1,68 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// GENERATED FROM THE SCHEMA DEFINITION AND DESCRIPTION IN
+// fieldtrial_testing_config_schema.json
+// test_config.json
+// DO NOT EDIT.
+
+#include "test_ouput.h"
+
+
+const char* const array_kFieldTrialConfig_enable_features_0[] = {
+ "X",
+};
+const char* const array_kFieldTrialConfig_disable_features[] = {
+ "C",
+};
+const char* const array_kFieldTrialConfig_enable_features[] = {
+ "A",
+ "B",
+};
+const FieldTrialGroupParams array_kFieldTrialConfig_params[] = {
+ {
+ "x",
+ "1",
+ },
+ {
+ "y",
+ "2",
+ },
+};
+const FieldTrialTestingGroup array_kFieldTrialConfig_groups[] = {
+ {
+ "TestStudy1",
+ "TestGroup1",
+ NULL,
+ 0,
+ NULL,
+ 0,
+ NULL,
+ 0,
+ },
+ {
+ "TestStudy2",
+ "TestGroup2",
+ array_kFieldTrialConfig_params,
+ 2,
+ array_kFieldTrialConfig_enable_features,
+ 2,
+ array_kFieldTrialConfig_disable_features,
+ 1,
+ },
+ {
+ "TestStudy3",
+ "TestGroup3",
+ NULL,
+ 0,
+ array_kFieldTrialConfig_enable_features_0,
+ 1,
+ NULL,
+ 0,
+ },
+};
+const FieldTrialTestingConfig kFieldTrialConfig = {
+ array_kFieldTrialConfig_groups,
+ 3,
+};
diff --git a/chromium/tools/variations/unittest_data/expected_output.h b/chromium/tools/variations/unittest_data/expected_output.h
new file mode 100644
index 00000000000..3ecaddc3f8a
--- /dev/null
+++ b/chromium/tools/variations/unittest_data/expected_output.h
@@ -0,0 +1,40 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// GENERATED FROM THE SCHEMA DEFINITION AND DESCRIPTION IN
+// fieldtrial_testing_config_schema.json
+// test_config.json
+// DO NOT EDIT.
+
+#ifndef TEST_OUPUT_H_
+#define TEST_OUPUT_H_
+
+#include <cstddef>
+
+
+struct FieldTrialGroupParams {
+ const char* const key;
+ const char* const value;
+};
+
+struct FieldTrialTestingGroup {
+ const char* const study;
+ const char* const group_name;
+ const FieldTrialGroupParams * params;
+ const size_t params_size;
+ const char* const * enable_features;
+ const size_t enable_features_size;
+ const char* const * disable_features;
+ const size_t disable_features_size;
+};
+
+struct FieldTrialTestingConfig {
+ const FieldTrialTestingGroup * groups;
+ const size_t groups_size;
+};
+
+
+extern const FieldTrialTestingConfig kFieldTrialConfig;
+
+#endif // TEST_OUPUT_H_
diff --git a/chromium/tools/variations/unittest_data/test_config.json b/chromium/tools/variations/unittest_data/test_config.json
new file mode 100644
index 00000000000..fca6883c806
--- /dev/null
+++ b/chromium/tools/variations/unittest_data/test_config.json
@@ -0,0 +1,19 @@
+{
+ "TestStudy1": [
+ {"group_name": "TestGroup1"}
+ ],
+ "TestStudy2": [
+ {
+ "group_name": "TestGroup2",
+ "params": {
+ "x": "1",
+ "y": "2"
+ },
+ "enable_features": ["A", "B"],
+ "disable_features": ["C"]
+ }
+ ],
+ "TestStudy3": [
+ {"group_name": "TestGroup3", "enable_features": ["X"]}
+ ]
+} \ No newline at end of file
diff --git a/chromium/tools/vim/OWNERS b/chromium/tools/vim/OWNERS
new file mode 100644
index 00000000000..4480539fab6
--- /dev/null
+++ b/chromium/tools/vim/OWNERS
@@ -0,0 +1,3 @@
+asanka@chromium.org
+eroman@chromium.org
+scottmg@chromium.org
diff --git a/chromium/tools/vim/PRESUBMIT.py b/chromium/tools/vim/PRESUBMIT.py
new file mode 100644
index 00000000000..33cdd6d82bc
--- /dev/null
+++ b/chromium/tools/vim/PRESUBMIT.py
@@ -0,0 +1,31 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit tests for /tools/vim.
+
+Runs Python unit tests in /tools/vim/tests on upload.
+"""
+
+def CheckChangeOnUpload(input_api, output_api):
+ results = []
+
+ # affected_files is list of files affected by this change. The paths are
+ # relative to the directory containing PRESUBMIT.py.
+ affected_files = [
+ input_api.os_path.relpath(f, input_api.PresubmitLocalPath())
+ for f in input_api.AbsoluteLocalPaths()]
+
+ # Run the chromium.ycm_extra_conf_unittest test if the YCM config file is
+ # changed or if any change is affecting the tests directory. This specific
+ # test requires access to 'ninja' and hasn't been tested on platforms other
+ # than Linux.
+ if 'chromium.ycm_extra_conf.py' in affected_files or \
+ 'ninja_output.py' in affected_files or \
+ any([input_api.re.match(r'tests(/|\\)',f) for f in affected_files]):
+ results += input_api.RunTests(
+ input_api.canned_checks.GetUnitTests(
+ input_api, output_api,
+ ['tests/chromium.ycm_extra_conf_unittest.py']))
+
+ return results
diff --git a/chromium/tools/vim/chromium.ycm_extra_conf.py b/chromium/tools/vim/chromium.ycm_extra_conf.py
new file mode 100644
index 00000000000..97897b2d951
--- /dev/null
+++ b/chromium/tools/vim/chromium.ycm_extra_conf.py
@@ -0,0 +1,362 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Autocompletion config for YouCompleteMe in Chromium.
+#
+# USAGE:
+#
+# 1. Install YCM [https://github.com/Valloric/YouCompleteMe]
+# (Googlers should check out [go/ycm])
+#
+# 2. Create a symbolic link to this file called .ycm_extra_conf.py in the
+# directory above your Chromium checkout (i.e. next to your .gclient file).
+#
+# cd src
+# ln -rs tools/vim/chromium.ycm_extra_conf.py ../.ycm_extra_conf.py
+#
+# 3. (optional) Whitelist the .ycm_extra_conf.py from step #2 by adding the
+# following to your .vimrc:
+#
+# let g:ycm_extra_conf_globlist=['<path to .ycm_extra_conf.py>']
+#
+# You can also add other .ycm_extra_conf.py files you want to use to this
+# list to prevent excessive prompting each time you visit a directory
+# covered by a config file.
+#
+# 4. Profit
+#
+#
+# Usage notes:
+#
+# * You must use ninja & clang to build Chromium.
+#
+# * You must have run gyp_chromium and built Chromium recently.
+#
+#
+# Hacking notes:
+#
+# * The purpose of this script is to construct an accurate enough command line
+# for YCM to pass to clang so it can build and extract the symbols.
+#
+# * Right now, we only pull the -I and -D flags. That seems to be sufficient
+# for everything I've used it for.
+#
+# * That whole ninja & clang thing? We could support other configs if someone
+# were willing to write the correct commands and a parser.
+#
+# * This has only been tested on gPrecise.
+
+
+import os
+import os.path
+import re
+import shlex
+import subprocess
+import sys
+
+# Flags from YCM's default config.
+_default_flags = [
+ '-DUSE_CLANG_COMPLETER',
+ '-std=c++11',
+ '-x',
+ 'c++',
+]
+
+
+def PathExists(*args):
+ return os.path.exists(os.path.join(*args))
+
+
+def FindChromeSrcFromFilename(filename):
+ """Searches for the root of the Chromium checkout.
+
+ Simply checks parent directories until it finds .gclient and src/.
+
+ Args:
+ filename: (String) Path to source file being edited.
+
+ Returns:
+ (String) Path of 'src/', or None if unable to find.
+ """
+ curdir = os.path.normpath(os.path.dirname(filename))
+ while not (os.path.basename(os.path.realpath(curdir)) == 'src'
+ and PathExists(curdir, 'DEPS')
+ and (PathExists(curdir, '..', '.gclient')
+ or PathExists(curdir, '.git'))):
+ nextdir = os.path.normpath(os.path.join(curdir, '..'))
+ if nextdir == curdir:
+ return None
+ curdir = nextdir
+ return curdir
+
+
+def GetDefaultSourceFile(chrome_root, filename):
+ """Returns the default source file to use as an alternative to |filename|.
+
+ Compile flags used to build the default source file is assumed to be a
+ close-enough approximation for building |filename|.
+
+ Args:
+ chrome_root: (String) Absolute path to the root of Chromium checkout.
+ filename: (String) Absolute path to the source file.
+
+ Returns:
+ (String) Absolute path to substitute source file.
+ """
+ blink_root = os.path.join(chrome_root, 'third_party', 'WebKit')
+ if filename.startswith(blink_root):
+ return os.path.join(blink_root, 'Source', 'core', 'Init.cpp')
+ else:
+ return os.path.join(chrome_root, 'base', 'logging.cc')
+
+
+def GetBuildableSourceFile(chrome_root, filename):
+ """Returns a buildable source file corresponding to |filename|.
+
+ A buildable source file is one which is likely to be passed into clang as a
+ source file during the build. For .h files, returns the closest matching .cc,
+ .cpp or .c file. If no such file is found, returns the same as
+ GetDefaultSourceFile().
+
+ Args:
+ chrome_root: (String) Absolute path to the root of Chromium checkout.
+ filename: (String) Absolute path to the target source file.
+
+ Returns:
+ (String) Absolute path to source file.
+ """
+ if filename.endswith('.h'):
+ # Header files can't be built. Instead, try to match a header file to its
+ # corresponding source file.
+ alternates = ['.cc', '.cpp', '.c']
+ for alt_extension in alternates:
+ alt_name = filename[:-2] + alt_extension
+ if os.path.exists(alt_name):
+ return alt_name
+
+ return GetDefaultSourceFile(chrome_root, filename)
+
+ return filename
+
+
+def GetNinjaBuildOutputsForSourceFile(out_dir, filename):
+ """Returns a list of build outputs for filename.
+
+ The list is generated by invoking 'ninja -t query' tool to retrieve a list of
+ inputs and outputs of |filename|. This list is then filtered to only include
+ .o and .obj outputs.
+
+ Args:
+ out_dir: (String) Absolute path to ninja build output directory.
+ filename: (String) Absolute path to source file.
+
+ Returns:
+ (List of Strings) List of target names. Will return [] if |filename| doesn't
+ yield any .o or .obj outputs.
+ """
+ # Ninja needs the path to the source file relative to the output build
+ # directory.
+ rel_filename = os.path.relpath(os.path.realpath(filename), out_dir)
+
+ p = subprocess.Popen(['ninja', '-C', out_dir, '-t', 'query', rel_filename],
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ stdout, _ = p.communicate()
+ if p.returncode:
+ return []
+
+ # The output looks like:
+ # ../../relative/path/to/source.cc:
+ # outputs:
+ # obj/reative/path/to/target.source.o
+ # obj/some/other/target2.source.o
+ # another/target.txt
+ #
+ outputs_text = stdout.partition('\n outputs:\n')[2]
+ output_lines = [line.strip() for line in outputs_text.split('\n')]
+ return [target for target in output_lines
+ if target and (target.endswith('.o') or target.endswith('.obj'))]
+
+
+def GetClangCommandLineForNinjaOutput(out_dir, build_target):
+ """Returns the Clang command line for building |build_target|
+
+ Asks ninja for the list of commands used to build |filename| and returns the
+ final Clang invocation.
+
+ Args:
+ out_dir: (String) Absolute path to ninja build output directory.
+ build_target: (String) A build target understood by ninja
+
+ Returns:
+ (String or None) Clang command line or None if a Clang command line couldn't
+ be determined.
+ """
+ p = subprocess.Popen(['ninja', '-v', '-C', out_dir,
+ '-t', 'commands', build_target],
+ stdout=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ if p.returncode:
+ return None
+
+ # Ninja will return multiple build steps for all dependencies up to
+ # |build_target|. The build step we want is the last Clang invocation, which
+ # is expected to be the one that outputs |build_target|.
+ for line in reversed(stdout.split('\n')):
+ if 'clang' in line:
+ return line
+ return None
+
+
+def GetClangCommandLineFromNinjaForSource(out_dir, filename):
+ """Returns a Clang command line used to build |filename|.
+
+ The same source file could be built multiple times using different tool
+ chains. In such cases, this command returns the first Clang invocation. We
+ currently don't prefer one toolchain over another. Hopefully the tool chain
+ corresponding to the Clang command line is compatible with the Clang build
+ used by YCM.
+
+ Args:
+ out_dir: (String) Absolute path to Chromium checkout.
+ filename: (String) Absolute path to source file.
+
+ Returns:
+ (String or None): Command line for Clang invocation using |filename| as a
+ source. Returns None if no such command line could be found.
+ """
+ build_targets = GetNinjaBuildOutputsForSourceFile(out_dir, filename)
+ for build_target in build_targets:
+ command_line = GetClangCommandLineForNinjaOutput(out_dir, build_target)
+ if command_line:
+ return command_line
+ return None
+
+
+def GetClangOptionsFromCommandLine(clang_commandline, out_dir,
+ additional_flags):
+ """Extracts relevant command line options from |clang_commandline|
+
+ Args:
+ clang_commandline: (String) Full Clang invocation.
+ out_dir: (String) Absolute path to ninja build directory. Relative paths in
+ the command line are relative to |out_dir|.
+ additional_flags: (List of String) Additional flags to return.
+
+ Returns:
+ (List of Strings) The list of command line flags for this source file. Can
+ be empty.
+ """
+ clang_flags = [] + additional_flags
+
+ # Parse flags that are important for YCM's purposes.
+ clang_tokens = shlex.split(clang_commandline)
+ for flag_index, flag in enumerate(clang_tokens):
+ if flag.startswith('-I'):
+ # Relative paths need to be resolved, because they're relative to the
+ # output dir, not the source.
+ if flag[2] == '/':
+ clang_flags.append(flag)
+ else:
+ abs_path = os.path.normpath(os.path.join(out_dir, flag[2:]))
+ clang_flags.append('-I' + abs_path)
+ elif flag.startswith('-std'):
+ clang_flags.append(flag)
+ elif flag.startswith('-') and flag[1] in 'DWFfmO':
+ if flag == '-Wno-deprecated-register' or flag == '-Wno-header-guard':
+ # These flags causes libclang (3.3) to crash. Remove it until things
+ # are fixed.
+ continue
+ clang_flags.append(flag)
+ elif flag == '-isysroot':
+ # On Mac -isysroot <path> is used to find the system headers.
+ # Copy over both flags.
+ if flag_index + 1 < len(clang_tokens):
+ clang_flags.append(flag)
+ clang_flags.append(clang_tokens[flag_index + 1])
+ elif flag.startswith('--sysroot='):
+ # On Linux we use a sysroot image.
+ sysroot_path = flag.lstrip('--sysroot=')
+ if sysroot_path.startswith('/'):
+ clang_flags.append(flag)
+ else:
+ abs_path = os.path.normpath(os.path.join(out_dir, sysroot_path))
+ clang_flags.append('--sysroot=' + abs_path)
+ return clang_flags
+
+
+def GetClangOptionsFromNinjaForFilename(chrome_root, filename):
+ """Returns the Clang command line options needed for building |filename|.
+
+ Command line options are based on the command used by ninja for building
+ |filename|. If |filename| is a .h file, uses its companion .cc or .cpp file.
+ If a suitable companion file can't be located or if ninja doesn't know about
+ |filename|, then uses default source files in Blink and Chromium for
+ determining the commandline.
+
+ Args:
+ chrome_root: (String) Path to src/.
+ filename: (String) Absolute path to source file being edited.
+
+ Returns:
+ (List of Strings) The list of command line flags for this source file. Can
+ be empty.
+ """
+ if not chrome_root:
+ return []
+
+ # Generally, everyone benefits from including Chromium's src/, because all of
+ # Chromium's includes are relative to that.
+ additional_flags = ['-I' + os.path.join(chrome_root)]
+
+ # Version of Clang used to compile Chromium can be newer then version of
+ # libclang that YCM uses for completion. So it's possible that YCM's libclang
+ # doesn't know about some used warning options, which causes compilation
+ # warnings (and errors, because of '-Werror');
+ additional_flags.append('-Wno-unknown-warning-option')
+
+ sys.path.append(os.path.join(chrome_root, 'tools', 'vim'))
+ from ninja_output import GetNinjaOutputDirectory
+ out_dir = os.path.realpath(GetNinjaOutputDirectory(chrome_root))
+
+ clang_line = GetClangCommandLineFromNinjaForSource(
+ out_dir, GetBuildableSourceFile(chrome_root, filename))
+ if not clang_line:
+ # If ninja didn't know about filename or it's companion files, then try a
+ # default build target. It is possible that the file is new, or build.ninja
+ # is stale.
+ clang_line = GetClangCommandLineFromNinjaForSource(
+ out_dir, GetDefaultSourceFile(chrome_root, filename))
+
+ if not clang_line:
+ return (additional_flags, [])
+
+ return GetClangOptionsFromCommandLine(clang_line, out_dir, additional_flags)
+
+
+def FlagsForFile(filename):
+ """This is the main entry point for YCM. Its interface is fixed.
+
+ Args:
+ filename: (String) Path to source file being edited.
+
+ Returns:
+ (Dictionary)
+ 'flags': (List of Strings) Command line flags.
+ 'do_cache': (Boolean) True if the result should be cached.
+ """
+ abs_filename = os.path.abspath(filename)
+ chrome_root = FindChromeSrcFromFilename(abs_filename)
+ clang_flags = GetClangOptionsFromNinjaForFilename(chrome_root, abs_filename)
+
+ # If clang_flags could not be determined, then assume that was due to a
+ # transient failure. Preventing YCM from caching the flags allows us to try to
+ # determine the flags again.
+ should_cache_flags_for_file = bool(clang_flags)
+
+ final_flags = _default_flags + clang_flags
+
+ return {
+ 'flags': final_flags,
+ 'do_cache': should_cache_flags_for_file
+ }
diff --git a/chromium/tools/vim/clang-format.vim b/chromium/tools/vim/clang-format.vim
new file mode 100644
index 00000000000..982b8d2ed0f
--- /dev/null
+++ b/chromium/tools/vim/clang-format.vim
@@ -0,0 +1,19 @@
+" Copyright (c) 2014 The Chromium Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style license that can be
+" found in the LICENSE file.
+
+" Binds cmd-shift-i (on Mac) or ctrl-shift-i (elsewhere) to invoking
+" clang-format.py.
+" It will format the current selection (and if there's no selection, the
+" current line.)
+
+let s:script = expand('<sfile>:p:h') .
+ \'/../../buildtools/clang_format/script/clang-format.py'
+
+if has('mac')
+ execute "map <D-I> :pyf " . s:script . "<CR>"
+ execute "imap <D-I> <ESC>:pyf " . s:script . "<CR>i"
+else
+ execute "map <C-I> :pyf " . s:script . "<CR>"
+ execute "imap <C-I> <ESC>:pyf " . s:script . "<CR>i"
+endif
diff --git a/chromium/tools/vim/filetypes.vim b/chromium/tools/vim/filetypes.vim
new file mode 100644
index 00000000000..3e7c8f9eada
--- /dev/null
+++ b/chromium/tools/vim/filetypes.vim
@@ -0,0 +1,9 @@
+" To get syntax highlighting and tab settings for gyp(i) and DEPS files,
+" add the following to your .vimrc file:
+" so /path/to/src/tools/vim/filetypes.vim
+
+augroup filetype
+ au! BufRead,BufNewFile *.gyp set filetype=python expandtab tabstop=2 shiftwidth=2
+ au! BufRead,BufNewFile *.gypi set filetype=python expandtab tabstop=2 shiftwidth=2
+ au! BufRead,BufNewFile DEPS set filetype=python expandtab tabstop=2 shiftwidth=2
+augroup END
diff --git a/chromium/tools/vim/mojom/ftdetect/mojomfiletype.vim b/chromium/tools/vim/mojom/ftdetect/mojomfiletype.vim
new file mode 100644
index 00000000000..cff7ce6d192
--- /dev/null
+++ b/chromium/tools/vim/mojom/ftdetect/mojomfiletype.vim
@@ -0,0 +1,28 @@
+" Copyright 2015 The Chromium Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style license that can be
+" found in the LICENSE file.
+
+" We take care to preserve the user's fileencodings and fileformats,
+" because those settings are global (not buffer local), yet we want
+" to override them for loading mojom files, which should be UTF-8.
+
+let s:current_fileformats = ''
+let s:current_fileencodings = ''
+
+" define fileencodings to open as utf-8 encoding even if it's ascii.
+function! s:mojomfiletype_pre()
+ let s:current_fileformats = &g:fileformats
+ let s:current_fileencodings = &g:fileencodings
+ set fileencodings=utf-8 fileformats=unix
+ setlocal filetype=mojom
+endfunction
+
+" restore fileencodings as others
+function! s:mojomfiletype_post()
+ let &g:fileformats = s:current_fileformats
+ let &g:fileencodings = s:current_fileencodings
+endfunction
+
+au BufNewFile *.mojom setlocal filetype=mojom fileencoding=utf-8 fileformat=unix
+au BufRead *.mojom call s:mojomfiletype_pre()
+au BufReadPost *.mojom call s:mojomfiletype_post()
diff --git a/chromium/tools/vim/mojom/syntax/mojom.vim b/chromium/tools/vim/mojom/syntax/mojom.vim
new file mode 100644
index 00000000000..cdd3f7e723e
--- /dev/null
+++ b/chromium/tools/vim/mojom/syntax/mojom.vim
@@ -0,0 +1,48 @@
+" Copyright 2015 The Chromium Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style license that can be
+" found in the LICENSE file.
+
+" Vim syntax file " Language: Mojom
+" To get syntax highlighting for .mojom files, add the following to your .vimrc
+" file:
+" set runtimepath^=/path/to/src/tools/vim/mojom
+
+syn case match
+
+syntax region mojomFold start="{" end="}" transparent fold
+
+" keyword definitions
+syntax keyword mojomType bool int8 int16 int32 int64 uint8 uint16 uint32 uint64 float double array
+syntax match mojomImport "^\(import\)\s"
+syntax keyword mojomKeyword const module interface enum struct union
+syntax match mojomOperator /=>/
+syntax match mojomOperator /?/
+
+" Comments
+syntax keyword mojomTodo contained TODO FIXME XXX
+syntax region mojomComment start="/\*" end="\*/" contains=mojomTodo,mojomDocLink,@Spell
+syntax match mojomLineComment "//.*" contains=mojomTodo,@Spell
+syntax match mojomLineDocComment "///.*" contains=mojomTodo,mojomDocLink,@Spell
+syntax region mojomDocLink contained start=+\[+ end=+\]+
+
+" Strings
+syn region mojomString start=+L\="+ skip=+\\\\\|\\"+ end=+"+ contains=@Spell
+hi def link mojomString String
+
+" The default highlighting.
+highlight default link mojomTodo Todo
+highlight default link mojomComment Comment
+highlight default link mojomLineComment Comment
+highlight default link mojomLineDocComment Comment
+highlight default link mojomDocLink SpecialComment
+highlight default link mojomType Type
+highlight default link mojomImport Include
+highlight default link mojomKeyword Keyword
+highlight default link mojomOperator Operator
+
+let b:current_syntax = "mojom"
+let b:spell_options = "contained"
+
+syn sync minlines=500
+
+let b:current_syntax = "mojom"
diff --git a/chromium/tools/vim/ninja-build.vim b/chromium/tools/vim/ninja-build.vim
new file mode 100644
index 00000000000..70e5a83cf21
--- /dev/null
+++ b/chromium/tools/vim/ninja-build.vim
@@ -0,0 +1,119 @@
+" Copyright (c) 2012 The Chromium Authors. All rights reserved.
+" Use of this source code is governed by a BSD-style license that can be
+" found in the LICENSE file.
+"
+" Adds a "Compile this file" function, using ninja. On Mac, binds Cmd-k to
+" this command. On Windows, Ctrl-F7 (which is the same as the VS default).
+" On Linux, <Leader>o, which is \o by default ("o"=creates .o files)
+"
+" Adds a "Build this target" function, using ninja. This is not bound
+" to any key by default, but can be used via the :CrBuild command.
+" It builds 'chrome' by default, but :CrBuild target1 target2 etc works as well.
+"
+" Requires that gyp has already generated build.ninja files, and that ninja is
+" in your path (which it is automatically if depot_tools is in your path).
+"
+" Add the following to your .vimrc file:
+" so /path/to/src/tools/vim/ninja-build.vim
+
+python << endpython
+import os
+import vim
+
+
+def path_to_current_buffer():
+ """Returns the absolute path of the current buffer."""
+ return vim.current.buffer.name
+
+
+def path_to_source_root():
+ """Returns the absolute path to the chromium source root."""
+ candidate = os.path.dirname(path_to_current_buffer())
+ # This is a list of files that need to identify the src directory. The shorter
+ # it is, the more likely it's wrong (checking for just "build/common.gypi"
+ # would find "src/v8" for files below "src/v8", as "src/v8/build/common.gypi"
+ # exists). The longer it is, the more likely it is to break when we rename
+ # directories.
+ fingerprints = ['chrome', 'net', 'v8', 'build', 'skia']
+ while candidate and not all(
+ [os.path.isdir(os.path.join(candidate, fp)) for fp in fingerprints]):
+ candidate = os.path.dirname(candidate)
+ return candidate
+
+
+def path_to_build_dir(configuration):
+ """Returns <chrome_root>/<output_dir>/(Release|Debug)."""
+
+ chrome_root = path_to_source_root()
+ sys.path.append(os.path.join(chrome_root, 'tools', 'vim'))
+ from ninja_output import GetNinjaOutputDirectory
+ return GetNinjaOutputDirectory(chrome_root, configuration)
+
+def compute_ninja_command_for_current_buffer(configuration=None):
+ """Returns the shell command to compile the file in the current buffer."""
+ build_dir = path_to_build_dir(configuration)
+
+ # ninja needs filepaths for the ^ syntax to be relative to the
+ # build directory.
+ file_to_build = path_to_current_buffer()
+ file_to_build = os.path.relpath(file_to_build, build_dir)
+
+ build_cmd = ' '.join(['ninja', '-C', build_dir, file_to_build + '^'])
+ if sys.platform == 'win32':
+ # Escape \ for Vim, and ^ for both Vim and shell.
+ build_cmd = build_cmd.replace('\\', '\\\\').replace('^', '^^^^')
+ vim.command('return "%s"' % build_cmd)
+
+
+def compute_ninja_command_for_targets(targets='', configuration=None):
+ build_cmd = ' '.join(['ninja', '-C', path_to_build_dir(configuration),
+ targets])
+ vim.command('return "%s"' % build_cmd)
+endpython
+
+fun! s:MakeWithCustomCommand(build_cmd)
+ let l:oldmakepgr = &makeprg
+ let &makeprg=a:build_cmd
+ if exists(':Make') == 2
+ Make
+ else
+ silent make | cwindow
+ endif
+ if !has('gui_running')
+ redraw!
+ endif
+ let &makeprg = l:oldmakepgr
+endfun
+
+fun! s:NinjaCommandForCurrentBuffer()
+ python compute_ninja_command_for_current_buffer()
+endfun
+
+fun! s:NinjaCommandForTargets(targets)
+ python compute_ninja_command_for_targets(vim.eval('a:targets'))
+endfun
+
+fun! CrCompileFile()
+ call s:MakeWithCustomCommand(s:NinjaCommandForCurrentBuffer())
+endfun
+
+fun! CrBuild(...)
+ let l:targets = a:0 > 0 ? join(a:000, ' ') : ''
+ if (l:targets !~ '\i')
+ let l:targets = 'chrome'
+ endif
+ call s:MakeWithCustomCommand(s:NinjaCommandForTargets(l:targets))
+endfun
+
+command! CrCompileFile call CrCompileFile()
+command! -nargs=* CrBuild call CrBuild(<q-args>)
+
+if has('mac')
+ map <D-k> :CrCompileFile<cr>
+ imap <D-k> <esc>:CrCompileFile<cr>
+elseif has('win32')
+ map <C-F7> :CrCompileFile<cr>
+ imap <C-F7> <esc>:CrCompileFile<cr>
+elseif has('unix')
+ map <Leader>o :CrCompileFile<cr>
+endif
diff --git a/chromium/tools/vim/ninja_output.py b/chromium/tools/vim/ninja_output.py
new file mode 100644
index 00000000000..5bb638ab6d9
--- /dev/null
+++ b/chromium/tools/vim/ninja_output.py
@@ -0,0 +1,72 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import sys
+import os
+import exceptions
+import itertools
+import re
+
+
+def GetNinjaOutputDirectory(chrome_root, configuration=None):
+ """Returns <chrome_root>/<output_dir>/(Release|Debug).
+
+ If either of the following environment variables are set, their
+ value is used to determine the output directory:
+ 1. CHROMIUM_OUT_DIR environment variable.
+ 2. GYP_GENERATOR_FLAGS environment variable output_dir property.
+
+ Otherwise, all directories starting with the word out are examined.
+
+ The output directory must contain {configuration}/build.ninja (if
+ configuration is None, both Debug and Release will be checked).
+
+ The configuration chosen is the one most recently generated/built,
+ but can be overriden via the <configuration> parameter.
+ """
+
+ output_dirs = []
+ if ('CHROMIUM_OUT_DIR' in os.environ and
+ os.path.isdir(os.path.join(chrome_root, os.environ['CHROMIUM_OUT_DIR']))):
+ output_dirs = [os.environ['CHROMIUM_OUT_DIR']]
+ if not output_dirs:
+ generator_flags = os.getenv('GYP_GENERATOR_FLAGS', '').split(' ')
+ for flag in generator_flags:
+ name_value = flag.split('=', 1)
+ if (len(name_value) == 2 and name_value[0] == 'output_dir' and
+ os.path.isdir(os.path.join(chrome_root, name_value[1]))):
+ output_dirs = [name_value[1]]
+ if not output_dirs:
+ for f in os.listdir(chrome_root):
+ if re.match(r'out(\b|_)', f):
+ out = os.path.realpath(os.path.join(chrome_root, f))
+ if os.path.isdir(out):
+ output_dirs.append(os.path.relpath(out, start = chrome_root))
+
+ configs = ['Debug', 'Release', 'Default']
+ if configuration:
+ configs = [configuration]
+
+ def generate_paths():
+ for out_dir, config in itertools.product(output_dirs, configs):
+ path = os.path.join(chrome_root, out_dir, config)
+ if os.path.exists(os.path.join(path, 'build.ninja')):
+ yield path
+
+ def approx_directory_mtime(path):
+ # This is a heuristic; don't recurse into subdirectories.
+ paths = [path] + [os.path.join(path, f) for f in os.listdir(path)]
+ return max(os.path.getmtime(p) for p in paths)
+
+ try:
+ return max(generate_paths(), key=approx_directory_mtime)
+ except ValueError:
+ raise exceptions.RuntimeError(
+ 'Unable to find a valid ninja output directory.')
+
+if __name__ == '__main__':
+ if len(sys.argv) != 2:
+ raise exceptions.RuntimeError('Expected a single path argument.')
+ print GetNinjaOutputDirectory(sys.argv[1])
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug.sln b/chromium/tools/win/ChromeDebug/ChromeDebug.sln
new file mode 100644
index 00000000000..ade84fdd90f
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug.sln
@@ -0,0 +1,26 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 2012
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ChromeDebug", "ChromeDebug\ChromeDebug.csproj", "{4CC60BED-569D-481A-B56B-6ECBC23CBC16}"
+EndProject
+Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "LowLevel", "LowLevel\LowLevel.csproj", "{998C0725-F123-4ED3-9D44-12C1945F00D1}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Any CPU = Debug|Any CPU
+ Release|Any CPU = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {4CC60BED-569D-481A-B56B-6ECBC23CBC16}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {4CC60BED-569D-481A-B56B-6ECBC23CBC16}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {4CC60BED-569D-481A-B56B-6ECBC23CBC16}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {4CC60BED-569D-481A-B56B-6ECBC23CBC16}.Release|Any CPU.Build.0 = Release|Any CPU
+ {998C0725-F123-4ED3-9D44-12C1945F00D1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
+ {998C0725-F123-4ED3-9D44-12C1945F00D1}.Debug|Any CPU.Build.0 = Debug|Any CPU
+ {998C0725-F123-4ED3-9D44-12C1945F00D1}.Release|Any CPU.ActiveCfg = Release|Any CPU
+ {998C0725-F123-4ED3-9D44-12C1945F00D1}.Release|Any CPU.Build.0 = Release|Any CPU
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.Designer.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.Designer.cs
new file mode 100644
index 00000000000..3465e3e5fd6
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.Designer.cs
@@ -0,0 +1,201 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+namespace ChromeDebug
+{
+ partial class AttachDialog
+ {
+ /// <summary>
+ /// Required designer variable.
+ /// </summary>
+ private System.ComponentModel.IContainer components = null;
+
+ /// <summary>
+ /// Clean up any resources being used.
+ /// </summary>
+ /// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
+ protected override void Dispose(bool disposing)
+ {
+ if (disposing && (components != null))
+ {
+ components.Dispose();
+ }
+ base.Dispose(disposing);
+ }
+
+ #region Windows Form Designer generated code
+
+ /// <summary>
+ /// Required method for Designer support - do not modify
+ /// the contents of this method with the code editor.
+ /// </summary>
+ private void InitializeComponent()
+ {
+ this.listViewProcesses = new System.Windows.Forms.ListView();
+ this.columnHeaderProcess = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
+ this.columnHeaderPid = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
+ this.columnHeaderTitle = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
+ this.columnHeaderType = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
+ this.columnHeaderSession = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
+ this.columnHeaderCmdLine = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
+ this.buttonAttach = new System.Windows.Forms.Button();
+ this.buttonCancel = new System.Windows.Forms.Button();
+ this.groupBox1 = new System.Windows.Forms.GroupBox();
+ this.buttonRefresh = new System.Windows.Forms.Button();
+ this.checkBoxOnlyChrome = new System.Windows.Forms.CheckBox();
+ this.groupBox1.SuspendLayout();
+ this.SuspendLayout();
+ //
+ // listViewProcesses
+ //
+ this.listViewProcesses.AllowColumnReorder = true;
+ this.listViewProcesses.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
+ | System.Windows.Forms.AnchorStyles.Left)
+ | System.Windows.Forms.AnchorStyles.Right)));
+ this.listViewProcesses.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] {
+ this.columnHeaderProcess,
+ this.columnHeaderPid,
+ this.columnHeaderTitle,
+ this.columnHeaderType,
+ this.columnHeaderSession,
+ this.columnHeaderCmdLine});
+ this.listViewProcesses.FullRowSelect = true;
+ this.listViewProcesses.Location = new System.Drawing.Point(14, 27);
+ this.listViewProcesses.Name = "listViewProcesses";
+ this.listViewProcesses.Size = new System.Drawing.Size(884, 462);
+ this.listViewProcesses.TabIndex = 0;
+ this.listViewProcesses.UseCompatibleStateImageBehavior = false;
+ this.listViewProcesses.View = System.Windows.Forms.View.Details;
+ //
+ // columnHeaderProcess
+ //
+ this.columnHeaderProcess.Text = "Executable";
+ this.columnHeaderProcess.Width = 65;
+ //
+ // columnHeaderPid
+ //
+ this.columnHeaderPid.Text = "PID";
+ this.columnHeaderPid.Width = 30;
+ //
+ // columnHeaderTitle
+ //
+ this.columnHeaderTitle.Text = "Title";
+ this.columnHeaderTitle.Width = 32;
+ //
+ // columnHeaderType
+ //
+ this.columnHeaderType.Text = "Type";
+ this.columnHeaderType.Width = 36;
+ //
+ // columnHeaderSession
+ //
+ this.columnHeaderSession.Text = "Session";
+ this.columnHeaderSession.Width = 49;
+ //
+ // columnHeaderCmdLine
+ //
+ this.columnHeaderCmdLine.Text = "Command Line";
+ this.columnHeaderCmdLine.Width = 668;
+ //
+ // buttonAttach
+ //
+ this.buttonAttach.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
+ this.buttonAttach.DialogResult = System.Windows.Forms.DialogResult.OK;
+ this.buttonAttach.Location = new System.Drawing.Point(684, 603);
+ this.buttonAttach.Name = "buttonAttach";
+ this.buttonAttach.Size = new System.Drawing.Size(118, 41);
+ this.buttonAttach.TabIndex = 2;
+ this.buttonAttach.Text = "Attach";
+ this.buttonAttach.UseVisualStyleBackColor = true;
+ this.buttonAttach.Click += new System.EventHandler(this.buttonAttach_Click);
+ //
+ // buttonCancel
+ //
+ this.buttonCancel.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
+ this.buttonCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
+ this.buttonCancel.Location = new System.Drawing.Point(808, 603);
+ this.buttonCancel.Name = "buttonCancel";
+ this.buttonCancel.Size = new System.Drawing.Size(118, 41);
+ this.buttonCancel.TabIndex = 3;
+ this.buttonCancel.Text = "Cancel";
+ this.buttonCancel.UseVisualStyleBackColor = true;
+ //
+ // groupBox1
+ //
+ this.groupBox1.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
+ | System.Windows.Forms.AnchorStyles.Left)
+ | System.Windows.Forms.AnchorStyles.Right)));
+ this.groupBox1.Controls.Add(this.listViewProcesses);
+ this.groupBox1.Location = new System.Drawing.Point(12, 27);
+ this.groupBox1.Name = "groupBox1";
+ this.groupBox1.Size = new System.Drawing.Size(914, 511);
+ this.groupBox1.TabIndex = 5;
+ this.groupBox1.TabStop = false;
+ this.groupBox1.Text = "Available Processes";
+ //
+ // buttonRefresh
+ //
+ this.buttonRefresh.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
+ this.buttonRefresh.Location = new System.Drawing.Point(808, 552);
+ this.buttonRefresh.Name = "buttonRefresh";
+ this.buttonRefresh.Size = new System.Drawing.Size(117, 33);
+ this.buttonRefresh.TabIndex = 6;
+ this.buttonRefresh.Text = "Refresh";
+ this.buttonRefresh.UseVisualStyleBackColor = true;
+ this.buttonRefresh.Click += new System.EventHandler(this.buttonRefresh_Click);
+ //
+ // checkBoxOnlyChrome
+ //
+ this.checkBoxOnlyChrome.AutoSize = true;
+ this.checkBoxOnlyChrome.Checked = true;
+ this.checkBoxOnlyChrome.CheckState = System.Windows.Forms.CheckState.Checked;
+ this.checkBoxOnlyChrome.Location = new System.Drawing.Point(12, 561);
+ this.checkBoxOnlyChrome.Name = "checkBoxOnlyChrome";
+ this.checkBoxOnlyChrome.Size = new System.Drawing.Size(165, 17);
+ this.checkBoxOnlyChrome.TabIndex = 7;
+ this.checkBoxOnlyChrome.Text = "Only show Chrome processes";
+ this.checkBoxOnlyChrome.UseVisualStyleBackColor = true;
+ this.checkBoxOnlyChrome.CheckedChanged += new System.EventHandler(this.checkBoxOnlyChrome_CheckedChanged);
+ //
+ // AttachDialog
+ //
+ this.AcceptButton = this.buttonAttach;
+ this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
+ this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
+ this.CancelButton = this.buttonCancel;
+ this.ClientSize = new System.Drawing.Size(940, 656);
+ this.ControlBox = false;
+ this.Controls.Add(this.checkBoxOnlyChrome);
+ this.Controls.Add(this.buttonRefresh);
+ this.Controls.Add(this.groupBox1);
+ this.Controls.Add(this.buttonCancel);
+ this.Controls.Add(this.buttonAttach);
+ this.MaximizeBox = false;
+ this.MinimizeBox = false;
+ this.Name = "AttachDialog";
+ this.ShowInTaskbar = false;
+ this.Text = "Attach to Chrome";
+ this.Load += new System.EventHandler(this.AttachDialog_Load);
+ this.groupBox1.ResumeLayout(false);
+ this.ResumeLayout(false);
+ this.PerformLayout();
+
+ }
+
+ #endregion
+
+ private System.Windows.Forms.ListView listViewProcesses;
+ private System.Windows.Forms.Button buttonAttach;
+ private System.Windows.Forms.Button buttonCancel;
+ private System.Windows.Forms.ColumnHeader columnHeaderProcess;
+ private System.Windows.Forms.ColumnHeader columnHeaderPid;
+ private System.Windows.Forms.ColumnHeader columnHeaderTitle;
+ private System.Windows.Forms.ColumnHeader columnHeaderCmdLine;
+ private System.Windows.Forms.ColumnHeader columnHeaderType;
+ private System.Windows.Forms.ColumnHeader columnHeaderSession;
+ private System.Windows.Forms.GroupBox groupBox1;
+ private System.Windows.Forms.Button buttonRefresh;
+ private System.Windows.Forms.CheckBox checkBoxOnlyChrome;
+ }
+} \ No newline at end of file
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.cs
new file mode 100644
index 00000000000..2c38983b0d4
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.cs
@@ -0,0 +1,263 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Data;
+using System.Diagnostics;
+using System.Drawing;
+using System.IO;
+using System.Linq;
+using System.Management;
+using System.Text;
+using System.Threading.Tasks;
+using System.Windows.Forms;
+
+using ChromeDebug.LowLevel;
+
+namespace ChromeDebug {
+ // The form that is displayed to allow the user to select processes to attach to. Note that we
+ // cannot interact with the DTE object from here (I assume this is because the dialog is running
+ // on a different thread, although I don't fully understand), so any access to the DTE object
+ // will have to be done through events that get posted back to the main package thread.
+ public partial class AttachDialog : Form {
+ private class ProcessViewItem : ListViewItem {
+ public ProcessViewItem() {
+ Category = ProcessCategory.Other;
+ MachineType = LowLevelTypes.MachineType.UNKNOWN;
+ }
+
+ public string Exe;
+ public int ProcessId;
+ public int SessionId;
+ public string Title;
+ public string DisplayCmdLine;
+ public string[] CmdLineArgs;
+ public ProcessCategory Category;
+ public LowLevelTypes.MachineType MachineType;
+
+ public ProcessDetail Detail;
+ }
+
+ private Dictionary<ProcessCategory, List<ProcessViewItem>> loadedProcessTable = null;
+ private Dictionary<ProcessCategory, ListViewGroup> processGroups = null;
+ private List<int> selectedProcesses = null;
+
+ public AttachDialog() {
+ InitializeComponent();
+
+ loadedProcessTable = new Dictionary<ProcessCategory, List<ProcessViewItem>>();
+ processGroups = new Dictionary<ProcessCategory, ListViewGroup>();
+ selectedProcesses = new List<int>();
+
+ // Create and initialize the groups and process lists only once. On a reset
+ // we don't clear the groups manually, clearing the list view should clear the
+ // groups. And we don't clear the entire processes_ dictionary, only the
+ // individual buckets inside the dictionary.
+ foreach (object value in Enum.GetValues(typeof(ProcessCategory))) {
+ ProcessCategory category = (ProcessCategory)value;
+
+ ListViewGroup group = new ListViewGroup(category.ToGroupTitle());
+ processGroups[category] = group;
+ listViewProcesses.Groups.Add(group);
+
+ loadedProcessTable[category] = new List<ProcessViewItem>();
+ }
+ }
+
+ // Provides an iterator that evaluates to the process ids of the entries that are selected
+ // in the list view.
+ public IEnumerable<int> SelectedItems {
+ get {
+ foreach (ProcessViewItem item in listViewProcesses.SelectedItems)
+ yield return item.ProcessId;
+ }
+ }
+
+ private void AttachDialog_Load(object sender, EventArgs e) {
+ RepopulateListView();
+ }
+
+ // Remove command line arguments that we aren't interested in displaying as part of the command
+ // line of the process.
+ private string[] FilterCommandLine(string[] args) {
+ Func<string, int, bool> AllowArgument = delegate(string arg, int index) {
+ if (index == 0)
+ return false;
+ return !arg.StartsWith("--force-fieldtrials", StringComparison.CurrentCultureIgnoreCase);
+ };
+
+ // The force-fieldtrials command line option makes the command line view useless, so remove
+ // it. Also remove args[0] since that is the process name.
+ args = args.Where(AllowArgument).ToArray();
+ return args;
+ }
+
+ private void ReloadNativeProcessInfo() {
+ foreach (List<ProcessViewItem> list in loadedProcessTable.Values) {
+ list.Clear();
+ }
+
+ Process[] processes = Process.GetProcesses();
+ foreach (Process p in processes) {
+ ProcessViewItem item = new ProcessViewItem();
+ try {
+ item.Detail = new ProcessDetail(p.Id);
+ if (item.Detail.CanReadPeb && item.Detail.CommandLine != null) {
+ item.CmdLineArgs = Utility.SplitArgs(item.Detail.CommandLine);
+ item.DisplayCmdLine = GetFilteredCommandLineString(item.CmdLineArgs);
+ }
+ item.MachineType = item.Detail.MachineType;
+ }
+ catch (Exception) {
+ // Generally speaking, an exception here means the process is privileged and we cannot
+ // get any information about the process. For those processes, we will just display the
+ // information that the Framework gave us in the Process structure.
+ }
+
+ // If we don't have the machine type, its privilege level is high enough that we won't be
+ // able to attach a debugger to it anyway, so skip it.
+ if (item.MachineType == LowLevelTypes.MachineType.UNKNOWN)
+ continue;
+
+ item.ProcessId = p.Id;
+ item.SessionId = p.SessionId;
+ item.Title = p.MainWindowTitle;
+ item.Exe = p.ProcessName;
+ if (item.CmdLineArgs != null)
+ item.Category = DetermineProcessCategory(item.Detail.Win32ProcessImagePath,
+ item.CmdLineArgs);
+
+ Icon icon = item.Detail.SmallIcon;
+ List<ProcessViewItem> items = loadedProcessTable[item.Category];
+ item.Group = processGroups[item.Category];
+ items.Add(item);
+ }
+ }
+
+ // Filter the command line arguments to remove extraneous arguments that we don't wish to
+ // display.
+ private string GetFilteredCommandLineString(string[] args) {
+ if (args == null || args.Length == 0)
+ return string.Empty;
+
+ args = FilterCommandLine(args);
+ return string.Join(" ", args, 0, args.Length);
+ }
+
+ // Using a heuristic based on the command line, tries to determine what type of process this
+ // is.
+ private ProcessCategory DetermineProcessCategory(string imagePath, string[] cmdline) {
+ if (cmdline == null || cmdline.Length == 0)
+ return ProcessCategory.Other;
+
+ string file = Path.GetFileName(imagePath);
+ if (file.Equals("delegate_execute.exe", StringComparison.CurrentCultureIgnoreCase))
+ return ProcessCategory.DelegateExecute;
+ else if (file.Equals("chrome.exe", StringComparison.CurrentCultureIgnoreCase)) {
+ if (cmdline.Contains("--type=renderer"))
+ return ProcessCategory.Renderer;
+ else if (cmdline.Contains("--type=plugin") || cmdline.Contains("--type=ppapi"))
+ return ProcessCategory.Plugin;
+ else if (cmdline.Contains("--type=gpu-process"))
+ return ProcessCategory.Gpu;
+ else if (cmdline.Contains("--type=service"))
+ return ProcessCategory.Service;
+ else if (cmdline.Any(arg => arg.StartsWith("-ServerName")))
+ return ProcessCategory.MetroViewer;
+ else
+ return ProcessCategory.Browser;
+ } else
+ return ProcessCategory.Other;
+ }
+
+ private void InsertCategoryItems(ProcessCategory category) {
+ foreach (ProcessViewItem item in loadedProcessTable[category]) {
+ item.Text = item.Exe;
+ item.SubItems.Add(item.ProcessId.ToString());
+ item.SubItems.Add(item.Title);
+ item.SubItems.Add(item.MachineType.ToString());
+ item.SubItems.Add(item.SessionId.ToString());
+ item.SubItems.Add(item.DisplayCmdLine);
+ listViewProcesses.Items.Add(item);
+
+ Icon icon = item.Detail.SmallIcon;
+ if (icon != null) {
+ item.ImageList.Images.Add(icon);
+ item.ImageIndex = item.ImageList.Images.Count - 1;
+ }
+ }
+ }
+
+ private void AutoResizeColumns() {
+ // First adjust to the width of the headers, since it's fast.
+ listViewProcesses.AutoResizeColumns(ColumnHeaderAutoResizeStyle.HeaderSize);
+
+ // Save the widths so we can use them again later.
+ List<int> widths = new List<int>();
+ foreach (ColumnHeader header in listViewProcesses.Columns)
+ widths.Add(header.Width);
+
+ // Now let Windows do the slow adjustment based on the content.
+ listViewProcesses.AutoResizeColumns(ColumnHeaderAutoResizeStyle.ColumnContent);
+
+ // Finally, iterate over each column, and resize those columns that just got smaller.
+ int total = 0;
+ for (int i = 0; i < listViewProcesses.Columns.Count; ++i) {
+ // Resize to the largest of the two, but don't let it go over a pre-defined maximum.
+ int max = Math.Max(listViewProcesses.Columns[i].Width, widths[i]);
+ int capped = Math.Min(max, 300);
+
+ // We do still want to fill up the available space in the list view however, so if we're
+ // under then we can fill.
+ int globalMinWidth = listViewProcesses.Width - SystemInformation.VerticalScrollBarWidth;
+ if (i == listViewProcesses.Columns.Count - 1 && (total + capped) < (globalMinWidth - 4))
+ capped = globalMinWidth - total - 4;
+
+ total += capped;
+ listViewProcesses.Columns[i].Width = capped;
+ }
+ }
+
+ private void RepopulateListView() {
+ listViewProcesses.Items.Clear();
+ listViewProcesses.SmallImageList = new ImageList();
+ listViewProcesses.SmallImageList.ImageSize = new Size(16, 16);
+
+ ReloadNativeProcessInfo();
+
+ InsertCategoryItems(ProcessCategory.Browser);
+ InsertCategoryItems(ProcessCategory.Renderer);
+ InsertCategoryItems(ProcessCategory.Gpu);
+ InsertCategoryItems(ProcessCategory.Plugin);
+ InsertCategoryItems(ProcessCategory.MetroViewer);
+ InsertCategoryItems(ProcessCategory.Service);
+ InsertCategoryItems(ProcessCategory.DelegateExecute);
+ if (!checkBoxOnlyChrome.Checked)
+ InsertCategoryItems(ProcessCategory.Other);
+
+ AutoResizeColumns();
+ }
+
+ private void buttonRefresh_Click(object sender, EventArgs e) {
+ RepopulateListView();
+ }
+
+ private void buttonAttach_Click(object sender, EventArgs e) {
+ System.Diagnostics.Debug.WriteLine("Closing dialog.");
+ this.Close();
+ }
+
+ private void checkBoxOnlyChrome_CheckedChanged(object sender, EventArgs e) {
+ if (!checkBoxOnlyChrome.Checked)
+ InsertCategoryItems(ProcessCategory.Other);
+ else {
+ foreach (ProcessViewItem item in loadedProcessTable[ProcessCategory.Other]) {
+ listViewProcesses.Items.Remove(item);
+ }
+ }
+ }
+ }
+}
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.resx b/chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.resx
new file mode 100644
index 00000000000..1af7de150c9
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/AttachDialog.resx
@@ -0,0 +1,120 @@
+<?xml version="1.0" encoding="utf-8"?>
+<root>
+ <!--
+ Microsoft ResX Schema
+
+ Version 2.0
+
+ The primary goals of this format is to allow a simple XML format
+ that is mostly human readable. The generation and parsing of the
+ various data types are done through the TypeConverter classes
+ associated with the data types.
+
+ Example:
+
+ ... ado.net/XML headers & schema ...
+ <resheader name="resmimetype">text/microsoft-resx</resheader>
+ <resheader name="version">2.0</resheader>
+ <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
+ <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
+ <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
+ <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
+ <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
+ <value>[base64 mime encoded serialized .NET Framework object]</value>
+ </data>
+ <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
+ <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
+ <comment>This is a comment</comment>
+ </data>
+
+ There are any number of "resheader" rows that contain simple
+ name/value pairs.
+
+ Each data row contains a name, and value. The row also contains a
+ type or mimetype. Type corresponds to a .NET class that support
+ text/value conversion through the TypeConverter architecture.
+ Classes that don't support this are serialized and stored with the
+ mimetype set.
+
+ The mimetype is used for serialized objects, and tells the
+ ResXResourceReader how to depersist the object. This is currently not
+ extensible. For a given mimetype the value must be set accordingly:
+
+ Note - application/x-microsoft.net.object.binary.base64 is the format
+ that the ResXResourceWriter will generate, however the reader can
+ read any of the formats listed below.
+
+ mimetype: application/x-microsoft.net.object.binary.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.soap.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Soap.SoapFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.bytearray.base64
+ value : The object must be serialized into a byte array
+ : using a System.ComponentModel.TypeConverter
+ : and then encoded with base64 encoding.
+ -->
+ <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
+ <xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
+ <xsd:element name="root" msdata:IsDataSet="true">
+ <xsd:complexType>
+ <xsd:choice maxOccurs="unbounded">
+ <xsd:element name="metadata">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" />
+ </xsd:sequence>
+ <xsd:attribute name="name" use="required" type="xsd:string" />
+ <xsd:attribute name="type" type="xsd:string" />
+ <xsd:attribute name="mimetype" type="xsd:string" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="assembly">
+ <xsd:complexType>
+ <xsd:attribute name="alias" type="xsd:string" />
+ <xsd:attribute name="name" type="xsd:string" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="data">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
+ <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
+ <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="resheader">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" />
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:choice>
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:schema>
+ <resheader name="resmimetype">
+ <value>text/microsoft-resx</value>
+ </resheader>
+ <resheader name="version">
+ <value>2.0</value>
+ </resheader>
+ <resheader name="reader">
+ <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <resheader name="writer">
+ <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+</root> \ No newline at end of file
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebug.csproj b/chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebug.csproj
new file mode 100644
index 00000000000..97bd38d4a4c
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebug.csproj
@@ -0,0 +1,211 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003" ToolsVersion="4.0">
+ <PropertyGroup>
+ <MinimumVisualStudioVersion>11.0</MinimumVisualStudioVersion>
+ <VisualStudioVersion Condition="'$(VisualStudioVersion)' == ''">11.0</VisualStudioVersion>
+ <VSToolsPath Condition="'$(VSToolsPath)' == ''">$(MSBuildExtensionsPath32)\Microsoft\VisualStudio\v$(VisualStudioVersion)</VSToolsPath>
+ <FileUpgradeFlags>
+ </FileUpgradeFlags>
+ <UpgradeBackupLocation>
+ </UpgradeBackupLocation>
+ <OldToolsVersion>4.0</OldToolsVersion>
+ </PropertyGroup>
+ <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
+ <PropertyGroup>
+ <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+ <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+ <SchemaVersion>2.0</SchemaVersion>
+ <ProjectGuid>{4CC60BED-569D-481A-B56B-6ECBC23CBC16}</ProjectGuid>
+ <ProjectTypeGuids>{82b43b9b-a64c-4715-b499-d71e9ca2bd60};{60dc8134-eba5-43b8-bcc9-bb4bc16c2548};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids>
+ <OutputType>Library</OutputType>
+ <AppDesignerFolder>Properties</AppDesignerFolder>
+ <RootNamespace>ChromeDebug</RootNamespace>
+ <AssemblyName>ChromeDebug</AssemblyName>
+ <SignAssembly>True</SignAssembly>
+ <AssemblyOriginatorKeyFile>
+ </AssemblyOriginatorKeyFile>
+ <TargetFrameworkVersion>v4.5</TargetFrameworkVersion>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+ <DebugSymbols>true</DebugSymbols>
+ <DebugType>full</DebugType>
+ <Optimize>false</Optimize>
+ <OutputPath>bin\Debug\</OutputPath>
+ <DefineConstants>DEBUG;TRACE</DefineConstants>
+ <ErrorReport>prompt</ErrorReport>
+ <WarningLevel>4</WarningLevel>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+ <DebugType>pdbonly</DebugType>
+ <Optimize>true</Optimize>
+ <OutputPath>bin\Release\</OutputPath>
+ <DefineConstants>TRACE</DefineConstants>
+ <ErrorReport>prompt</ErrorReport>
+ <WarningLevel>4</WarningLevel>
+ <RunCodeAnalysis>true</RunCodeAnalysis>
+ </PropertyGroup>
+ <ItemGroup>
+ <Reference Include="Microsoft.CSharp" />
+ <Reference Include="Microsoft.VisualStudio.OLE.Interop" />
+ <Reference Include="Microsoft.VisualStudio.Shell.Interop" />
+ <Reference Include="Microsoft.VisualStudio.Shell.Interop.8.0" />
+ <Reference Include="Microsoft.VisualStudio.Shell.Interop.9.0" />
+ <Reference Include="Microsoft.VisualStudio.Shell.Interop.10.0" />
+ <Reference Include="Microsoft.VisualStudio.Shell.Interop.11.0">
+ <EmbedInteropTypes>true</EmbedInteropTypes>
+ </Reference>
+ <Reference Include="Microsoft.VisualStudio.TextManager.Interop" />
+ <Reference Include="Microsoft.VisualStudio.Shell.11.0" />
+ <Reference Include="Microsoft.VisualStudio.Shell.Immutable.10.0" />
+ <Reference Include="Microsoft.VisualStudio.Shell.Immutable.11.0" />
+ <Reference Include="System" />
+ <Reference Include="System.Core" />
+ <Reference Include="System.Data" />
+ <Reference Include="System.Design" />
+ <Reference Include="System.Drawing" />
+ <Reference Include="System.Management" />
+ <Reference Include="System.Windows.Forms" />
+ <Reference Include="System.Xml" />
+ </ItemGroup>
+ <ItemGroup>
+ <COMReference Include="EnvDTE">
+ <Guid>{80CC9F66-E7D8-4DDD-85B6-D9E6CD0E93E2}</Guid>
+ <VersionMajor>8</VersionMajor>
+ <VersionMinor>0</VersionMinor>
+ <Lcid>0</Lcid>
+ <WrapperTool>primary</WrapperTool>
+ <Isolated>False</Isolated>
+ <EmbedInteropTypes>False</EmbedInteropTypes>
+ </COMReference>
+ <COMReference Include="EnvDTE100">
+ <Guid>{26AD1324-4B7C-44BC-84F8-B86AED45729F}</Guid>
+ <VersionMajor>10</VersionMajor>
+ <VersionMinor>0</VersionMinor>
+ <Lcid>0</Lcid>
+ <WrapperTool>primary</WrapperTool>
+ <Isolated>False</Isolated>
+ <EmbedInteropTypes>False</EmbedInteropTypes>
+ </COMReference>
+ <COMReference Include="EnvDTE80">
+ <Guid>{1A31287A-4D7D-413E-8E32-3B374931BD89}</Guid>
+ <VersionMajor>8</VersionMajor>
+ <VersionMinor>0</VersionMinor>
+ <Lcid>0</Lcid>
+ <WrapperTool>primary</WrapperTool>
+ <Isolated>False</Isolated>
+ <EmbedInteropTypes>False</EmbedInteropTypes>
+ </COMReference>
+ <COMReference Include="EnvDTE90">
+ <Guid>{2CE2370E-D744-4936-A090-3FFFE667B0E1}</Guid>
+ <VersionMajor>9</VersionMajor>
+ <VersionMinor>0</VersionMinor>
+ <Lcid>0</Lcid>
+ <WrapperTool>primary</WrapperTool>
+ <Isolated>False</Isolated>
+ <EmbedInteropTypes>False</EmbedInteropTypes>
+ </COMReference>
+ <COMReference Include="EnvDTE90a1">
+ <Guid>{64A96FE8-CCCF-4EDF-B341-FF7C528B60C9}</Guid>
+ <VersionMajor>9</VersionMajor>
+ <VersionMinor>0</VersionMinor>
+ <Lcid>0</Lcid>
+ <WrapperTool>primary</WrapperTool>
+ <Isolated>False</Isolated>
+ <EmbedInteropTypes>False</EmbedInteropTypes>
+ </COMReference>
+ <COMReference Include="Microsoft.VisualStudio.CommandBars">
+ <Guid>{1CBA492E-7263-47BB-87FE-639000619B15}</Guid>
+ <VersionMajor>8</VersionMajor>
+ <VersionMinor>0</VersionMinor>
+ <Lcid>0</Lcid>
+ <WrapperTool>primary</WrapperTool>
+ <Isolated>False</Isolated>
+ <EmbedInteropTypes>False</EmbedInteropTypes>
+ </COMReference>
+ <COMReference Include="stdole">
+ <Guid>{00020430-0000-0000-C000-000000000046}</Guid>
+ <VersionMajor>2</VersionMajor>
+ <VersionMinor>0</VersionMinor>
+ <Lcid>0</Lcid>
+ <WrapperTool>primary</WrapperTool>
+ <Isolated>False</Isolated>
+ <EmbedInteropTypes>False</EmbedInteropTypes>
+ </COMReference>
+ </ItemGroup>
+ <ItemGroup>
+ <Compile Include="AttachDialog.cs">
+ <SubType>Form</SubType>
+ </Compile>
+ <Compile Include="AttachDialog.Designer.cs">
+ <DependentUpon>AttachDialog.cs</DependentUpon>
+ </Compile>
+ <Compile Include="ProcessCategory.cs" />
+ <Compile Include="ProcessDetail.cs" />
+ <Compile Include="Utility.cs" />
+ <Compile Include="Guids.cs" />
+ <Compile Include="Resources.Designer.cs">
+ <AutoGen>True</AutoGen>
+ <DesignTime>True</DesignTime>
+ <DependentUpon>Resources.resx</DependentUpon>
+ </Compile>
+ <Compile Include="GlobalSuppressions.cs" />
+ <Compile Include="ChromeDebugPackage.cs" />
+ <Compile Include="Properties\AssemblyInfo.cs" />
+ <Compile Include="PkgCmdID.cs" />
+ </ItemGroup>
+ <ItemGroup>
+ <EmbeddedResource Include="AttachDialog.resx">
+ <DependentUpon>AttachDialog.cs</DependentUpon>
+ </EmbeddedResource>
+ <EmbeddedResource Include="Resources.resx">
+ <Generator>ResXFileCodeGenerator</Generator>
+ <LastGenOutput>Resources.Designer.cs</LastGenOutput>
+ <SubType>Designer</SubType>
+ </EmbeddedResource>
+ <EmbeddedResource Include="VSPackage.resx">
+ <MergeWithCTO>true</MergeWithCTO>
+ <ManifestResourceName>VSPackage</ManifestResourceName>
+ <SubType>Designer</SubType>
+ </EmbeddedResource>
+ </ItemGroup>
+ <ItemGroup>
+ <None Include="Key.snk" />
+ <Content Include="LICENSE">
+ <CopyToOutputDirectory>Always</CopyToOutputDirectory>
+ <IncludeInVSIX>true</IncludeInVSIX>
+ </Content>
+ <None Include="source.extension.vsixmanifest">
+ <SubType>Designer</SubType>
+ </None>
+ </ItemGroup>
+ <ItemGroup>
+ <VSCTCompile Include="ChromeDebug.vsct">
+ <ResourceName>Menus.ctmenu</ResourceName>
+ <SubType>Designer</SubType>
+ </VSCTCompile>
+ </ItemGroup>
+ <ItemGroup>
+ <None Include="Resources\Images.png" />
+ </ItemGroup>
+ <ItemGroup>
+ <Content Include="Resources\Package.ico" />
+ </ItemGroup>
+ <ItemGroup>
+ <ProjectReference Include="..\LowLevel\LowLevel.csproj">
+ <Project>{998c0725-f123-4ed3-9d44-12c1945f00d1}</Project>
+ <Name>LowLevel</Name>
+ </ProjectReference>
+ </ItemGroup>
+ <PropertyGroup>
+ <UseCodebase>true</UseCodebase>
+ </PropertyGroup>
+ <Import Project="$(MSBuildBinPath)\Microsoft.CSharp.targets" />
+ <Import Project="$(VSToolsPath)\VSSDK\Microsoft.VsSDK.targets" Condition="'$(VSToolsPath)' != ''" />
+ <!-- To modify your build process, add your task inside one of the targets below and uncomment it.
+ Other similar extension points exist, see Microsoft.Common.targets.
+ <Target Name="BeforeBuild">
+ </Target>
+ <Target Name="AfterBuild">
+ </Target>
+ -->
+</Project> \ No newline at end of file
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebug.vsct b/chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebug.vsct
new file mode 100644
index 00000000000..bfc92463701
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebug.vsct
@@ -0,0 +1,125 @@
+<?xml version="1.0" encoding="utf-8"?>
+<CommandTable xmlns="http://schemas.microsoft.com/VisualStudio/2005-10-18/CommandTable"
+ xmlns:xs="http://www.w3.org/2001/XMLSchema">
+
+ <!-- This is the file that defines the actual layout and type of the commands.
+ It is divided in different sections (e.g. command definition, command
+ placement, ...), with each defining a specific set of properties.
+ See the comment before each section for more details about how to
+ use it. -->
+
+ <!-- The VSCT compiler (the tool that translates this file into the binary
+ format that VisualStudio will consume) has the ability to run a preprocessor
+ on the vsct file; this preprocessor is (usually) the C++ preprocessor, so
+ it is possible to define includes and macros with the same syntax used
+ in C++ files. Using this ability of the compiler here, we include some files
+ defining some of the constants that we will use inside the file. -->
+
+ <!--This is the file that defines the IDs for all the commands exposed by VisualStudio. -->
+ <Extern href="stdidcmd.h"/>
+
+ <!--This header contains the command ids for the menus provided by the shell. -->
+ <Extern href="vsshlids.h"/>
+
+ <!--These headers contain the menu guids for attaching commands to the debug menu. -->
+ <Extern href="VSDbgCmd.h"/>
+ <Extern href="VSDebugGuids.h"/>
+
+
+
+
+ <!--The Commands section is where we the commands, menus and menu groups are defined.
+ This section uses a Guid to identify the package that provides the command defined inside it.
+ -->
+ <Commands package="guidChromeDebugPkg">
+ <!-- Inside this section we have different sub-sections: one for the menus, another
+ for the menu groups, one for the buttons (the actual commands), one for the combos
+ and the last one for the bitmaps used. Each element is identified by a command id that
+ is a unique pair of guid and numeric identifier; the guid part of the identifier is usually
+ called "command set" and is used to group different command inside a logically related
+ group; your package should define its own command set in order to avoid collisions
+ with command ids defined by other packages. -->
+
+
+ <!-- In this section you can define new menu groups. A menu group is a container for
+ other menus or buttons (commands); from a visual point of view you can see the
+ group as the part of a menu contained between two lines. The parent of a group
+ must be a menu. -->
+ <Groups>
+
+ <Group guid="guidChromeDebugCmdSet" id="MyMenuGroup" priority="0x0600">
+ <Parent guid="guidVSDebugGroup" id="IDM_DEBUG_MENU"/>
+ </Group>
+
+
+
+ </Groups>
+
+ <!--Buttons section. -->
+ <!--This section defines the elements the user can interact with, like a menu command or a
+ button or combo box in a toolbar. -->
+ <Buttons>
+ <!--To define a menu group you have to specify its ID, the parent menu and its display
+ priority. The command is visible and enabled by default. If you need to change the
+ visibility, status, etc, you can use the CommandFlag node. You can add more than one
+ CommandFlag node e.g.:
+ <CommandFlag>DefaultInvisible</CommandFlag>
+ <CommandFlag>DynamicVisibility</CommandFlag>
+ If you do not want an image next to your command, remove the Icon node /> -->
+
+ <Button guid="guidChromeDebugCmdSet" id="cmdidAttachToProcess" priority="0x0100"
+ type="Button">
+ <Parent guid="guidChromeDebugCmdSet" id="MyMenuGroup" />
+ <Icon guid="guidImages" id="bmpPic1" />
+ <Strings>
+ <ButtonText>Attach to Chrome</ButtonText>
+ </Strings>
+ </Button>
+
+
+
+ </Buttons>
+
+ <!--The bitmaps section is used to define the bitmaps that are used for the commands.-->
+ <Bitmaps>
+ <!-- The bitmap id is defined in a way that is a little bit different from the others:
+ the declaration starts with a guid for the bitmap strip, then there is the resource id
+ of the bitmap strip containing the bitmaps and then there are the numeric ids of the
+ elements used inside a button definition. An important aspect of this declaration is
+ that the element id must be the actual index (1-based) of the bitmap inside the bitmap
+ strip. -->
+ <Bitmap guid="guidImages" href="Resources\Images.png"
+ usedList="bmpPic1, bmpPic2, bmpPicSearch, bmpPicX, bmpPicArrows"/>
+
+ </Bitmaps>
+
+ </Commands>
+
+
+
+
+
+ <Symbols>
+ <!-- This is the package guid. -->
+ <GuidSymbol name="guidChromeDebugPkg" value="{7de8bbab-82c7-4871-b82c-4d5d44a3979d}" />
+
+ <!-- This is the guid used to group the menu commands together -->
+ <GuidSymbol name="guidChromeDebugCmdSet" value="{6608d840-ce6c-45ab-b856-eb0a0b471ff1}">
+
+ <IDSymbol name="MyMenuGroup" value="0x1020" />
+ <IDSymbol name="cmdidAttachToProcess" value="0x0100" />
+ </GuidSymbol>
+
+
+
+ <GuidSymbol name="guidImages" value="{7142ff8d-aa4e-45a5-a090-2e2ed8c5672b}" >
+ <IDSymbol name="bmpPic1" value="1" />
+ <IDSymbol name="bmpPic2" value="2" />
+ <IDSymbol name="bmpPicSearch" value="3" />
+ <IDSymbol name="bmpPicX" value="4" />
+ <IDSymbol name="bmpPicArrows" value="5" />
+ <IDSymbol name="bmpPicStrikethrough" value="6" />
+ </GuidSymbol>
+ </Symbols>
+
+</CommandTable>
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebugPackage.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebugPackage.cs
new file mode 100644
index 00000000000..2a697b64f55
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/ChromeDebugPackage.cs
@@ -0,0 +1,107 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Diagnostics;
+using System.Globalization;
+using System.Runtime.InteropServices;
+using System.ComponentModel.Design;
+using Microsoft.Win32;
+using Microsoft.VisualStudio;
+using Microsoft.VisualStudio.Shell.Interop;
+using Microsoft.VisualStudio.OLE.Interop;
+using Microsoft.VisualStudio.Shell;
+using System.Windows.Forms;
+
+namespace ChromeDebug {
+ /// <summary>
+ /// This is the class that implements the package exposed by this assembly.
+ ///
+ /// The minimum requirement for a class to be considered a valid package for Visual Studio
+ /// is to implement the IVsPackage interface and register itself with the shell.
+ /// This package uses the helper classes defined inside the Managed Package Framework (MPF)
+ /// to do it: it derives from the Package class that provides the implementation of the
+ /// IVsPackage interface and uses the registration attributes defined in the framework to
+ /// register itself and its components with the shell.
+ /// </summary>
+ // This attribute tells the PkgDef creation utility (CreatePkgDef.exe) that this class is
+ // a package.
+ [PackageRegistration(UseManagedResourcesOnly = true)]
+ // This attribute is used to register the information needed to show this package
+ // in the Help/About dialog of Visual Studio.
+ [InstalledProductRegistration("#110", "#112", "1.0", IconResourceID = 400)]
+ // This attribute is needed to let the shell know that this package exposes some menus.
+ [ProvideMenuResource("Menus.ctmenu", 1)]
+ [Guid(GuidList.guidChromeDebugPkgString)]
+ public sealed class ChromeDebugPackage : Package {
+ /// <summary>
+ /// Default constructor of the package.
+ /// Inside this method you can place any initialization code that does not require
+ /// any Visual Studio service because at this point the package object is created but
+ /// not sited yet inside Visual Studio environment. The place to do all the other
+ /// initialization is the Initialize method.
+ /// </summary>
+ public ChromeDebugPackage() {
+ Debug.WriteLine(string.Format(CultureInfo.CurrentCulture, "Entering constructor for: {0}",
+ this.ToString()));
+ }
+
+
+
+ /////////////////////////////////////////////////////////////////////////////
+ // Overridden Package Implementation
+ #region Package Members
+
+ /// <summary>
+ /// Initialization of the package; this method is called right after the package is sited, so this is the place
+ /// where you can put all the initialization code that rely on services provided by VisualStudio.
+ /// </summary>
+ protected override void Initialize() {
+ Debug.WriteLine(string.Format(CultureInfo.CurrentCulture, "Entering Initialize() of: {0}", this.ToString()));
+ base.Initialize();
+
+ // Add our command handlers for menu (commands must exist in the .vsct file)
+ OleMenuCommandService mcs = GetService(typeof(IMenuCommandService)) as OleMenuCommandService;
+ if (null != mcs) {
+ // Create the command for the menu item.
+ CommandID menuCommandID = new CommandID(GuidList.guidChromeDebugCmdSet, (int)PkgCmdIDList.cmdidAttachToProcess);
+ MenuCommand menuItem = new MenuCommand(MenuItemCallback, menuCommandID);
+ mcs.AddCommand(menuItem);
+ }
+ }
+ #endregion
+
+ /// <summary>
+ /// This function is the callback used to execute a command when the a menu item is clicked.
+ /// See the Initialize method to see how the menu item is associated to this function using
+ /// the OleMenuCommandService service and the MenuCommand class.
+ /// </summary>
+ private void MenuItemCallback(object sender, EventArgs e) {
+ // Show a Message Box to prove we were here
+ EnvDTE.DTE dte = (EnvDTE.DTE)GetService(typeof(EnvDTE.DTE));
+
+ IVsUIShell uiShell = (IVsUIShell)GetService(typeof(SVsUIShell));
+ Guid clsid = Guid.Empty;
+ IntPtr parentHwnd = IntPtr.Zero;
+ uiShell.GetDialogOwnerHwnd(out parentHwnd);
+
+ NativeWindow parentShim = new NativeWindow();
+ parentShim.AssignHandle(parentHwnd);
+ AttachDialog dialog = new AttachDialog();
+ DialogResult result = dialog.ShowDialog(parentShim);
+ if (result == DialogResult.OK) {
+ foreach (int selected_id in dialog.SelectedItems) {
+ foreach (EnvDTE90a.Process4 p in dte.Debugger.LocalProcesses) {
+ System.Diagnostics.Debug.WriteLine("Found process {0}", p.ProcessID);
+ if (p.ProcessID != selected_id)
+ continue;
+ p.Attach();
+ System.Diagnostics.Debug.WriteLine("Attaching to process successful.");
+ break;
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/GlobalSuppressions.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/GlobalSuppressions.cs
new file mode 100644
index 00000000000..f9678628ccc
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/GlobalSuppressions.cs
@@ -0,0 +1,16 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is used by Code Analysis to maintain SuppressMessage
+// attributes that are applied to this project. Project-level
+// suppressions either have no target or are given a specific target
+// and scoped to a namespace, type, member, etc.
+//
+// To add a suppression to this file, right-click the message in the
+// Error List, point to "Suppress Message(s)", and click "In Project
+// Suppression File". You do not need to add suppressions to this
+// file manually.
+
+[assembly: System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Design",
+ "CA1017:MarkAssembliesWithComVisible")]
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/Guids.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/Guids.cs
new file mode 100644
index 00000000000..7a7a660dcac
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/Guids.cs
@@ -0,0 +1,16 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Guids.cs
+// MUST match guids.h
+using System;
+
+namespace ChromeDebug {
+ static class GuidList {
+ public const string guidChromeDebugPkgString = "7de8bbab-82c7-4871-b82c-4d5d44a3979d";
+ public const string guidChromeDebugCmdSetString = "6608d840-ce6c-45ab-b856-eb0a0b471ff1";
+
+ public static readonly Guid guidChromeDebugCmdSet = new Guid(guidChromeDebugCmdSetString);
+ };
+} \ No newline at end of file
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/Key.snk b/chromium/tools/win/ChromeDebug/ChromeDebug/Key.snk
new file mode 100644
index 00000000000..332ff224dcc
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/Key.snk
Binary files differ
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/LICENSE b/chromium/tools/win/ChromeDebug/ChromeDebug/LICENSE
new file mode 100644
index 00000000000..8fb3cc26f48
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/LICENSE
@@ -0,0 +1,27 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/PkgCmdID.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/PkgCmdID.cs
new file mode 100644
index 00000000000..816b804a646
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/PkgCmdID.cs
@@ -0,0 +1,13 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// PkgCmdID.cs
+// MUST match PkgCmdID.h
+using System;
+
+namespace ChromeDebug {
+ static class PkgCmdIDList {
+ public const uint cmdidAttachToProcess = 0x100;
+ };
+} \ No newline at end of file
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/ProcessCategory.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/ProcessCategory.cs
new file mode 100644
index 00000000000..ffcffddfa7f
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/ProcessCategory.cs
@@ -0,0 +1,37 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace ChromeDebug {
+ internal enum ProcessCategory {
+ Browser,
+ Renderer,
+ Gpu,
+ Plugin,
+ DelegateExecute,
+ MetroViewer,
+ Service,
+ Other
+ }
+
+ // Defines an extension method for the ProcessCategory enum which converts the enum value into
+ // the group title.
+ internal static class ProcessCategoryExtensions {
+ public static string ToGroupTitle(this ProcessCategory category) {
+ switch (category) {
+ case ProcessCategory.DelegateExecute:
+ return "Delegate Execute";
+ case ProcessCategory.MetroViewer:
+ return "Metro Viewer";
+ default:
+ return category.ToString();
+ }
+ }
+ }
+}
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/ProcessDetail.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/ProcessDetail.cs
new file mode 100644
index 00000000000..973ea0cc26f
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/ProcessDetail.cs
@@ -0,0 +1,286 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using Microsoft.Win32.SafeHandles;
+using System;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.IO;
+using System.Linq;
+using System.Text;
+using System.Threading.Tasks;
+
+using ChromeDebug.LowLevel;
+using System.Runtime.InteropServices;
+using System.Drawing;
+
+namespace ChromeDebug {
+ internal class ProcessDetail : IDisposable {
+ public ProcessDetail(int pid) {
+ // Initialize everything to null in case something fails.
+ this.processId = pid;
+ this.processHandleFlags = LowLevelTypes.ProcessAccessFlags.NONE;
+ this.cachedProcessBasicInfo = null;
+ this.machineTypeIsLoaded = false;
+ this.machineType = LowLevelTypes.MachineType.UNKNOWN;
+ this.cachedPeb = null;
+ this.cachedProcessParams = null;
+ this.cachedCommandLine = null;
+ this.processHandle = IntPtr.Zero;
+
+ OpenAndCacheProcessHandle();
+ }
+
+ // Returns the machine type (x86, x64, etc) of this process. Uses lazy evaluation and caches
+ // the result.
+ public LowLevelTypes.MachineType MachineType {
+ get {
+ if (machineTypeIsLoaded)
+ return machineType;
+ if (!CanQueryProcessInformation)
+ return LowLevelTypes.MachineType.UNKNOWN;
+
+ CacheMachineType();
+ return machineType;
+ }
+ }
+
+ public string NativeProcessImagePath {
+ get {
+ if (nativeProcessImagePath == null) {
+ nativeProcessImagePath = QueryProcessImageName(
+ LowLevelTypes.ProcessQueryImageNameMode.NATIVE_SYSTEM_FORMAT);
+ }
+ return nativeProcessImagePath;
+ }
+ }
+
+ public string Win32ProcessImagePath {
+ get {
+ if (win32ProcessImagePath == null) {
+ win32ProcessImagePath = QueryProcessImageName(
+ LowLevelTypes.ProcessQueryImageNameMode.WIN32_FORMAT);
+ }
+ return win32ProcessImagePath;
+ }
+ }
+
+ public Icon SmallIcon {
+ get {
+ LowLevel.LowLevelTypes.SHFILEINFO info = new LowLevelTypes.SHFILEINFO(true);
+ LowLevel.LowLevelTypes.SHGFI flags = LowLevel.LowLevelTypes.SHGFI.Icon
+ | LowLevelTypes.SHGFI.SmallIcon
+ | LowLevelTypes.SHGFI.OpenIcon
+ | LowLevelTypes.SHGFI.UseFileAttributes;
+ int cbFileInfo = Marshal.SizeOf(info);
+ LowLevel.NativeMethods.SHGetFileInfo(Win32ProcessImagePath,
+ 256,
+ ref info,
+ (uint)cbFileInfo,
+ (uint)flags);
+ return Icon.FromHandle(info.hIcon);
+ }
+ }
+
+ // Returns the command line that this process was launched with. Uses lazy evaluation and
+ // caches the result. Reads the command line from the PEB of the running process.
+ public string CommandLine {
+ get {
+ if (!CanReadPeb)
+ throw new InvalidOperationException();
+ CacheProcessInformation();
+ CachePeb();
+ CacheProcessParams();
+ CacheCommandLine();
+ return cachedCommandLine;
+ }
+ }
+
+ // Determines if we have permission to read the process's PEB.
+ public bool CanReadPeb {
+ get {
+ LowLevelTypes.ProcessAccessFlags required_flags =
+ LowLevelTypes.ProcessAccessFlags.VM_READ
+ | LowLevelTypes.ProcessAccessFlags.QUERY_INFORMATION;
+
+ // In order to read the PEB, we must have *both* of these flags.
+ if ((processHandleFlags & required_flags) != required_flags)
+ return false;
+
+ // If we're on a 64-bit OS, in a 32-bit process, and the target process is not 32-bit,
+ // we can't read its PEB.
+ if (Environment.Is64BitOperatingSystem && !Environment.Is64BitProcess
+ && (MachineType != LowLevelTypes.MachineType.X86))
+ return false;
+
+ return true;
+ }
+ }
+
+ // If we can't read the process's PEB, we may still be able to get other kinds of information
+ // from the process. This flag determines if we can get lesser information.
+ private bool CanQueryProcessInformation {
+ get {
+ LowLevelTypes.ProcessAccessFlags required_flags =
+ LowLevelTypes.ProcessAccessFlags.QUERY_LIMITED_INFORMATION
+ | LowLevelTypes.ProcessAccessFlags.QUERY_INFORMATION;
+
+ // In order to query the process, we need *either* of these flags.
+ return (processHandleFlags & required_flags) != LowLevelTypes.ProcessAccessFlags.NONE;
+ }
+ }
+
+ private string QueryProcessImageName(LowLevelTypes.ProcessQueryImageNameMode mode) {
+ StringBuilder moduleBuffer = new StringBuilder(1024);
+ int size = moduleBuffer.Capacity;
+ NativeMethods.QueryFullProcessImageName(
+ processHandle,
+ mode,
+ moduleBuffer,
+ ref size);
+ if (mode == LowLevelTypes.ProcessQueryImageNameMode.NATIVE_SYSTEM_FORMAT)
+ moduleBuffer.Insert(0, "\\\\?\\GLOBALROOT");
+ return moduleBuffer.ToString();
+ }
+
+ // Loads the top-level structure of the process's information block and caches it.
+ private void CacheProcessInformation() {
+ System.Diagnostics.Debug.Assert(CanReadPeb);
+
+ // Fetch the process info and set the fields.
+ LowLevelTypes.PROCESS_BASIC_INFORMATION temp = new LowLevelTypes.PROCESS_BASIC_INFORMATION();
+ int size;
+ LowLevelTypes.NTSTATUS status = NativeMethods.NtQueryInformationProcess(
+ processHandle,
+ LowLevelTypes.PROCESSINFOCLASS.PROCESS_BASIC_INFORMATION,
+ ref temp,
+ Utility.UnmanagedStructSize<LowLevelTypes.PROCESS_BASIC_INFORMATION>(),
+ out size);
+
+ if (status != LowLevelTypes.NTSTATUS.SUCCESS) {
+ throw new Win32Exception();
+ }
+
+ cachedProcessBasicInfo = temp;
+ }
+
+ // Follows a pointer from the PROCESS_BASIC_INFORMATION structure in the target process's
+ // address space to read the PEB.
+ private void CachePeb() {
+ System.Diagnostics.Debug.Assert(CanReadPeb);
+
+ if (cachedPeb == null) {
+ cachedPeb = Utility.ReadUnmanagedStructFromProcess<LowLevelTypes.PEB>(
+ processHandle,
+ cachedProcessBasicInfo.Value.PebBaseAddress);
+ }
+ }
+
+ // Follows a pointer from the PEB structure in the target process's address space to read the
+ // RTL_USER_PROCESS_PARAMETERS structure.
+ private void CacheProcessParams() {
+ System.Diagnostics.Debug.Assert(CanReadPeb);
+
+ if (cachedProcessParams == null) {
+ cachedProcessParams =
+ Utility.ReadUnmanagedStructFromProcess<LowLevelTypes.RTL_USER_PROCESS_PARAMETERS>(
+ processHandle, cachedPeb.Value.ProcessParameters);
+ }
+ }
+
+ private void CacheCommandLine() {
+ System.Diagnostics.Debug.Assert(CanReadPeb);
+
+ if (cachedCommandLine == null) {
+ cachedCommandLine = Utility.ReadStringUniFromProcess(
+ processHandle,
+ cachedProcessParams.Value.CommandLine.Buffer,
+ cachedProcessParams.Value.CommandLine.Length / 2);
+ }
+ }
+
+ private void CacheMachineType() {
+ System.Diagnostics.Debug.Assert(CanQueryProcessInformation);
+
+ // If our extension is running in a 32-bit process (which it is), then attempts to access
+ // files in C:\windows\system (and a few other files) will redirect to C:\Windows\SysWOW64
+ // and we will mistakenly think that the image file is a 32-bit image. The way around this
+ // is to use a native system format path, of the form:
+ // \\?\GLOBALROOT\Device\HarddiskVolume0\Windows\System\foo.dat
+ // NativeProcessImagePath gives us the full process image path in the desired format.
+ string path = NativeProcessImagePath;
+
+ // Open the PE File as a binary file, and parse just enough information to determine the
+ // machine type.
+ //http://www.microsoft.com/whdc/system/platform/firmware/PECOFF.mspx
+ using (SafeFileHandle safeHandle = NativeMethods.CreateFile(
+ path,
+ LowLevelTypes.FileAccessFlags.GENERIC_READ,
+ LowLevelTypes.FileShareFlags.SHARE_READ,
+ IntPtr.Zero,
+ LowLevelTypes.FileCreationDisposition.OPEN_EXISTING,
+ LowLevelTypes.FileFlagsAndAttributes.NORMAL,
+ IntPtr.Zero)) {
+ FileStream fs = new FileStream(safeHandle, FileAccess.Read);
+ using (BinaryReader br = new BinaryReader(fs)) {
+ fs.Seek(0x3c, SeekOrigin.Begin);
+ Int32 peOffset = br.ReadInt32();
+ fs.Seek(peOffset, SeekOrigin.Begin);
+ UInt32 peHead = br.ReadUInt32();
+ if (peHead != 0x00004550) // "PE\0\0", little-endian
+ throw new Exception("Can't find PE header");
+ machineType = (LowLevelTypes.MachineType)br.ReadUInt16();
+ machineTypeIsLoaded = true;
+ }
+ }
+ }
+
+ private void OpenAndCacheProcessHandle() {
+ // Try to open a handle to the process with the highest level of privilege, but if we can't
+ // do that then fallback to requesting access with a lower privilege level.
+ processHandleFlags = LowLevelTypes.ProcessAccessFlags.QUERY_INFORMATION
+ | LowLevelTypes.ProcessAccessFlags.VM_READ;
+ processHandle = NativeMethods.OpenProcess(processHandleFlags, false, processId);
+ if (processHandle == IntPtr.Zero) {
+ processHandleFlags = LowLevelTypes.ProcessAccessFlags.QUERY_LIMITED_INFORMATION;
+ processHandle = NativeMethods.OpenProcess(processHandleFlags, false, processId);
+ if (processHandle == IntPtr.Zero) {
+ processHandleFlags = LowLevelTypes.ProcessAccessFlags.NONE;
+ throw new Win32Exception();
+ }
+ }
+ }
+
+ // An open handle to the process, along with the set of access flags that the handle was
+ // open with.
+ private int processId;
+ private IntPtr processHandle;
+ private LowLevelTypes.ProcessAccessFlags processHandleFlags;
+ private string nativeProcessImagePath;
+ private string win32ProcessImagePath;
+
+ // The machine type is read by parsing the PE image file of the running process, so we cache
+ // its value since the operation expensive.
+ private bool machineTypeIsLoaded;
+ private LowLevelTypes.MachineType machineType;
+
+ // The following fields exist ultimately so that we can access the command line. However,
+ // each field must be read separately through a pointer into another process's address
+ // space so the access is expensive, hence we cache the values.
+ private Nullable<LowLevelTypes.PROCESS_BASIC_INFORMATION> cachedProcessBasicInfo;
+ private Nullable<LowLevelTypes.PEB> cachedPeb;
+ private Nullable<LowLevelTypes.RTL_USER_PROCESS_PARAMETERS> cachedProcessParams;
+ private string cachedCommandLine;
+
+ ~ProcessDetail() {
+ Dispose();
+ }
+
+ public void Dispose() {
+ if (processHandle != IntPtr.Zero)
+ NativeMethods.CloseHandle(processHandle);
+ processHandle = IntPtr.Zero;
+ }
+ }
+}
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/Properties/AssemblyInfo.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/Properties/AssemblyInfo.cs
new file mode 100644
index 00000000000..29c82030494
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/Properties/AssemblyInfo.cs
@@ -0,0 +1,40 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Reflection;
+using System.Resources;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("ChromeDebug")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("The Chromium Authors")]
+[assembly: AssemblyProduct("ChromeDebug")]
+[assembly: AssemblyCopyright("")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+[assembly: ComVisible(false)]
+[assembly: CLSCompliant(false)]
+[assembly: NeutralResourcesLanguage("en-US")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+// You can specify all the values or you can default the Revision and Build Numbers
+// by using the '*' as shown below:
+
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
+
+
+
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/Resources.Designer.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/Resources.Designer.cs
new file mode 100644
index 00000000000..529a0a74a5b
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/Resources.Designer.cs
@@ -0,0 +1,63 @@
+//------------------------------------------------------------------------------
+// <auto-generated>
+// This code was generated by a tool.
+// Runtime Version:4.0.30319.33439
+//
+// Changes to this file may cause incorrect behavior and will be lost if
+// the code is regenerated.
+// </auto-generated>
+//------------------------------------------------------------------------------
+
+namespace ChromeDebug {
+ using System;
+
+
+ /// <summary>
+ /// A strongly-typed resource class, for looking up localized strings, etc.
+ /// </summary>
+ // This class was auto-generated by the StronglyTypedResourceBuilder
+ // class via a tool like ResGen or Visual Studio.
+ // To add or remove a member, edit your .ResX file then rerun ResGen
+ // with the /str option, or rebuild your VS project.
+ [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
+ [global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
+ [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
+ internal class Resources {
+
+ private static global::System.Resources.ResourceManager resourceMan;
+
+ private static global::System.Globalization.CultureInfo resourceCulture;
+
+ [global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
+ internal Resources() {
+ }
+
+ /// <summary>
+ /// Returns the cached ResourceManager instance used by this class.
+ /// </summary>
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Resources.ResourceManager ResourceManager {
+ get {
+ if (object.ReferenceEquals(resourceMan, null)) {
+ global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("ChromeDebug.Resources", typeof(Resources).Assembly);
+ resourceMan = temp;
+ }
+ return resourceMan;
+ }
+ }
+
+ /// <summary>
+ /// Overrides the current thread's CurrentUICulture property for all
+ /// resource lookups using this strongly typed resource class.
+ /// </summary>
+ [global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
+ internal static global::System.Globalization.CultureInfo Culture {
+ get {
+ return resourceCulture;
+ }
+ set {
+ resourceCulture = value;
+ }
+ }
+ }
+}
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/Resources.resx b/chromium/tools/win/ChromeDebug/ChromeDebug/Resources.resx
new file mode 100644
index 00000000000..9fa88263bb4
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/Resources.resx
@@ -0,0 +1,140 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ VS SDK Notes: This resx file contains the resources that will be consumed directly by your
+ package. For example, if you chose to create a tool window, there is a resource with ID
+ 'CanNotCreateWindow'. This is used in VsPkg.cs to determine the string to show the user if
+ there is an error when attempting to create the tool window.
+
+ Resources that are accessed directly from your package *by Visual Studio* are stored in the
+ VSPackage.resx file.
+-->
+<root>
+ <!--
+ Microsoft ResX Schema
+
+ Version 2.0
+
+ The primary goals of this format is to allow a simple XML format
+ that is mostly human readable. The generation and parsing of the
+ various data types are done through the TypeConverter classes
+ associated with the data types.
+
+ Example:
+
+ ... ado.net/XML headers & schema ...
+ <resheader name="resmimetype">text/microsoft-resx</resheader>
+ <resheader name="version">2.0</resheader>
+ <resheader name="reader">
+ System.Resources.ResXResourceReader, System.Windows.Forms, ...
+ </resheader>
+ <resheader name="writer">
+ System.Resources.ResXResourceWriter, System.Windows.Forms, ...
+ </resheader>
+ <data name="Name1">
+ <value>this is my long string</value>
+ <comment>this is a comment</comment>
+ </data>
+ <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
+ <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
+ <value>[base64 mime encoded serialized .NET Framework object]</value>
+ </data>
+ <data name="Icon1" type="System.Drawing.Icon, System.Drawing"
+ mimetype="application/x-microsoft.net.object.bytearray.base64">
+ <value>
+ [base64 mime encoded string representing a byte array form of the .NET Framework object]
+ </value>
+ <comment>This is a comment</comment>
+ </data>
+
+ There are any number of "resheader" rows that contain simple
+ name/value pairs.
+
+ Each data row contains a name, and value. The row also contains a
+ type or mimetype. Type corresponds to a .NET class that support
+ text/value conversion through the TypeConverter architecture.
+ Classes that don't support this are serialized and stored with the
+ mimetype set.
+
+ The mimetype is used for serialized objects, and tells the
+ ResXResourceReader how to depersist the object. This is currently not
+ extensible. For a given mimetype the value must be set accordingly:
+
+ Note - application/x-microsoft.net.object.binary.base64 is the format
+ that the ResXResourceWriter will generate, however the reader can
+ read any of the formats listed below.
+
+ mimetype: application/x-microsoft.net.object.binary.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.soap.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Soap.SoapFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.bytearray.base64
+ value : The object must be serialized into a byte array
+ : using a System.ComponentModel.TypeConverter
+ : and then encoded with base64 encoding.
+ -->
+ <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema"
+ xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
+ <xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
+ <xsd:element name="root" msdata:IsDataSet="true">
+ <xsd:complexType>
+ <xsd:choice maxOccurs="unbounded">
+ <xsd:element name="metadata">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" />
+ </xsd:sequence>
+ <xsd:attribute name="name" use="required" type="xsd:string" />
+ <xsd:attribute name="type" type="xsd:string" />
+ <xsd:attribute name="mimetype" type="xsd:string" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="assembly">
+ <xsd:complexType>
+ <xsd:attribute name="alias" type="xsd:string" />
+ <xsd:attribute name="name" type="xsd:string" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="data">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
+ <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
+ <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="resheader">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" />
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:choice>
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:schema>
+ <resheader name="resmimetype">
+ <value>text/microsoft-resx</value>
+ </resheader>
+ <resheader name="version">
+ <value>2.0</value>
+ </resheader>
+ <resheader name="reader">
+ <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <resheader name="writer">
+ <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+</root> \ No newline at end of file
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/Resources/Images.png b/chromium/tools/win/ChromeDebug/ChromeDebug/Resources/Images.png
new file mode 100644
index 00000000000..51fe0d1577e
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/Resources/Images.png
Binary files differ
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/Resources/Package.ico b/chromium/tools/win/ChromeDebug/ChromeDebug/Resources/Package.ico
new file mode 100644
index 00000000000..449296f495a
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/Resources/Package.ico
Binary files differ
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/Utility.cs b/chromium/tools/win/ChromeDebug/ChromeDebug/Utility.cs
new file mode 100644
index 00000000000..bdba408863e
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/Utility.cs
@@ -0,0 +1,85 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Collections.Generic;
+using System.ComponentModel;
+using System.Linq;
+using System.Runtime.InteropServices;
+using System.Text;
+using System.Threading.Tasks;
+
+using ChromeDebug.LowLevel;
+
+namespace ChromeDebug {
+ internal static class Utility {
+ public static string[] SplitArgs(string unsplitArgumentLine) {
+ if (unsplitArgumentLine == null)
+ return new string[0];
+
+ int numberOfArgs;
+ IntPtr ptrToSplitArgs;
+ string[] splitArgs;
+
+ ptrToSplitArgs = NativeMethods.CommandLineToArgvW(unsplitArgumentLine, out numberOfArgs);
+
+ // CommandLineToArgvW returns NULL upon failure.
+ if (ptrToSplitArgs == IntPtr.Zero)
+ throw new ArgumentException("Unable to split argument.", new Win32Exception());
+
+ // Make sure the memory ptrToSplitArgs to is freed, even upon failure.
+ try {
+ splitArgs = new string[numberOfArgs];
+
+ // ptrToSplitArgs is an array of pointers to null terminated Unicode strings.
+ // Copy each of these strings into our split argument array.
+ for (int i = 0; i < numberOfArgs; i++)
+ splitArgs[i] = Marshal.PtrToStringUni(
+ Marshal.ReadIntPtr(ptrToSplitArgs, i * IntPtr.Size));
+
+ return splitArgs;
+ }
+ finally {
+ // Free memory obtained by CommandLineToArgW.
+ NativeMethods.LocalFree(ptrToSplitArgs);
+ }
+ }
+
+ public static T ReadUnmanagedStructFromProcess<T>(IntPtr processHandle,
+ IntPtr addressInProcess) {
+ int bytesRead;
+ int bytesToRead = Marshal.SizeOf(typeof(T));
+ IntPtr buffer = Marshal.AllocHGlobal(bytesToRead);
+ if (!NativeMethods.ReadProcessMemory(processHandle, addressInProcess, buffer, bytesToRead,
+ out bytesRead))
+ throw new Win32Exception();
+ T result = (T)Marshal.PtrToStructure(buffer, typeof(T));
+ Marshal.FreeHGlobal(buffer);
+ return result;
+ }
+
+ public static string ReadStringUniFromProcess(IntPtr processHandle,
+ IntPtr addressInProcess,
+ int NumChars) {
+ int bytesRead;
+ IntPtr outBuffer = Marshal.AllocHGlobal(NumChars * 2);
+
+ bool bresult = NativeMethods.ReadProcessMemory(processHandle,
+ addressInProcess,
+ outBuffer,
+ NumChars * 2,
+ out bytesRead);
+ if (!bresult)
+ throw new Win32Exception();
+
+ string result = Marshal.PtrToStringUni(outBuffer, bytesRead / 2);
+ Marshal.FreeHGlobal(outBuffer);
+ return result;
+ }
+
+ public static int UnmanagedStructSize<T>() {
+ return Marshal.SizeOf(typeof(T));
+ }
+ }
+}
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/VSPackage.resx b/chromium/tools/win/ChromeDebug/ChromeDebug/VSPackage.resx
new file mode 100644
index 00000000000..36cd9ee57be
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/VSPackage.resx
@@ -0,0 +1,151 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ VS SDK Notes: This resx file contains the resources that will be consumed from your package by
+ Visual Studio. For example, Visual Studio will attempt to load resource '400' from this
+ resource stream when it needs to load your package's icon. Because Visual Studio will always
+ look in the VSPackage.resources stream first for resources it needs, you should put additional
+ resources that Visual Studio will load directly into this resx file.
+
+ Resources that you would like to access directly from your package in a strong-typed fashion
+ should be stored in Resources.resx or another resx file.
+-->
+<root>
+ <!--
+ Microsoft ResX Schema
+
+ Version 2.0
+
+ The primary goals of this format is to allow a simple XML format
+ that is mostly human readable. The generation and parsing of the
+ various data types are done through the TypeConverter classes
+ associated with the data types.
+
+ Example:
+
+ ... ado.net/XML headers & schema ...
+ <resheader name="resmimetype">text/microsoft-resx</resheader>
+ <resheader name="version">2.0</resheader>
+ <resheader name="reader">
+ System.Resources.ResXResourceReader, System.Windows.Forms, ...
+ </resheader>
+ <resheader name="writer">
+ System.Resources.ResXResourceWriter, System.Windows.Forms, ...
+ </resheader>
+ <data name="Name1">
+ <value>this is my long string</value>
+ <comment>this is a comment</comment>
+ </data>
+ <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
+ <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
+ <value>[base64 mime encoded serialized .NET Framework object]</value>
+ </data>
+ <data name="Icon1" type="System.Drawing.Icon, System.Drawing"
+ mimetype="application/x-microsoft.net.object.bytearray.base64">
+ <value>
+ [base64 mime encoded string representing a byte array form of the .NET Framework object]
+ </value>
+ <comment>This is a comment</comment>
+ </data>
+
+ There are any number of "resheader" rows that contain simple
+ name/value pairs.
+
+ Each data row contains a name, and value. The row also contains a
+ type or mimetype. Type corresponds to a .NET class that support
+ text/value conversion through the TypeConverter architecture.
+ Classes that don't support this are serialized and stored with the
+ mimetype set.
+
+ The mimetype is used for serialized objects, and tells the
+ ResXResourceReader how to depersist the object. This is currently not
+ extensible. For a given mimetype the value must be set accordingly:
+
+ Note - application/x-microsoft.net.object.binary.base64 is the format
+ that the ResXResourceWriter will generate, however the reader can
+ read any of the formats listed below.
+
+ mimetype: application/x-microsoft.net.object.binary.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.soap.base64
+ value : The object must be serialized with
+ : System.Runtime.Serialization.Formatters.Soap.SoapFormatter
+ : and then encoded with base64 encoding.
+
+ mimetype: application/x-microsoft.net.object.bytearray.base64
+ value : The object must be serialized into a byte array
+ : using a System.ComponentModel.TypeConverter
+ : and then encoded with base64 encoding.
+ -->
+ <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema"
+ xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
+ <xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
+ <xsd:element name="root" msdata:IsDataSet="true">
+ <xsd:complexType>
+ <xsd:choice maxOccurs="unbounded">
+ <xsd:element name="metadata">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" />
+ </xsd:sequence>
+ <xsd:attribute name="name" use="required" type="xsd:string" />
+ <xsd:attribute name="type" type="xsd:string" />
+ <xsd:attribute name="mimetype" type="xsd:string" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="assembly">
+ <xsd:complexType>
+ <xsd:attribute name="alias" type="xsd:string" />
+ <xsd:attribute name="name" type="xsd:string" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="data">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
+ <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
+ <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
+ <xsd:attribute ref="xml:space" />
+ </xsd:complexType>
+ </xsd:element>
+ <xsd:element name="resheader">
+ <xsd:complexType>
+ <xsd:sequence>
+ <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
+ </xsd:sequence>
+ <xsd:attribute name="name" type="xsd:string" use="required" />
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:choice>
+ </xsd:complexType>
+ </xsd:element>
+ </xsd:schema>
+ <resheader name="resmimetype">
+ <value>text/microsoft-resx</value>
+ </resheader>
+ <resheader name="version">
+ <value>2.0</value>
+ </resheader>
+ <resheader name="reader">
+ <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <resheader name="writer">
+ <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
+ </resheader>
+ <assembly alias="System.Windows.Forms" name="System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" />
+ <data name="110" xml:space="preserve">
+ <value>ChromeDebug</value>
+ </data>
+ <data name="112" xml:space="preserve">
+ <value>Advanced Debugging Features</value>
+ </data>
+ <data name="400" type="System.Resources.ResXFileRef, System.Windows.Forms">
+ <value>Resources\Package.ico;System.Drawing.Icon, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a</value>
+ </data>
+</root> \ No newline at end of file
diff --git a/chromium/tools/win/ChromeDebug/ChromeDebug/source.extension.vsixmanifest b/chromium/tools/win/ChromeDebug/ChromeDebug/source.extension.vsixmanifest
new file mode 100644
index 00000000000..c0814aa3963
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/ChromeDebug/source.extension.vsixmanifest
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="utf-8"?>
+<PackageManifest Version="2.0.0" xmlns="http://schemas.microsoft.com/developer/vsx-schema/2011"
+ xmlns:d="http://schemas.microsoft.com/developer/vsx-schema-design/2011">
+ <Metadata>
+ <Identity Id="7de8bbab-82c7-4871-b82c-4d5d44a3979d" Version="1.0" Language="en-US"
+ Publisher="The Chromium Authors" />
+ <DisplayName>ChromeDebug</DisplayName>
+ <Description xml:space="preserve">Debugging Features for Chromium</Description>
+ <License>LICENSE</License>
+ </Metadata>
+ <Installation InstalledByMsi="false">
+ <InstallationTarget Id="Microsoft.VisualStudio.Pro" Version="[11.0,12.0]" />
+ </Installation>
+ <Dependencies>
+ <Dependency Id="Microsoft.Framework.NDP" DisplayName="Microsoft .NET Framework"
+ d:Source="Manual" Version="4.5" />
+ <Dependency Id="Microsoft.VisualStudio.MPF.11.0" DisplayName="Visual Studio MPF 11.0"
+ d:Source="Installed" Version="11.0" />
+ </Dependencies>
+ <Assets>
+ <Asset Type="Microsoft.VisualStudio.VsPackage" d:Source="Project"
+ d:ProjectName="%CurrentProject%" Path="|%CurrentProject%;PkgdefProjectOutputGroup|" />
+ </Assets>
+</PackageManifest>
diff --git a/chromium/tools/win/ChromeDebug/LowLevel/Key.snk b/chromium/tools/win/ChromeDebug/LowLevel/Key.snk
new file mode 100644
index 00000000000..332ff224dcc
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/LowLevel/Key.snk
Binary files differ
diff --git a/chromium/tools/win/ChromeDebug/LowLevel/LowLevel.csproj b/chromium/tools/win/ChromeDebug/LowLevel/LowLevel.csproj
new file mode 100644
index 00000000000..4649f33c681
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/LowLevel/LowLevel.csproj
@@ -0,0 +1,64 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" DefaultTargets="Build" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <Import Project="$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props" Condition="Exists('$(MSBuildExtensionsPath)\$(MSBuildToolsVersion)\Microsoft.Common.props')" />
+ <PropertyGroup>
+ <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
+ <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform>
+ <ProjectGuid>{998C0725-F123-4ED3-9D44-12C1945F00D1}</ProjectGuid>
+ <OutputType>Library</OutputType>
+ <AppDesignerFolder>Properties</AppDesignerFolder>
+ <RootNamespace>LowLevel</RootNamespace>
+ <AssemblyName>LowLevel</AssemblyName>
+ <TargetFrameworkVersion>v4.5</TargetFrameworkVersion>
+ <FileAlignment>512</FileAlignment>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
+ <DebugSymbols>true</DebugSymbols>
+ <DebugType>full</DebugType>
+ <Optimize>false</Optimize>
+ <OutputPath>bin\Debug\</OutputPath>
+ <DefineConstants>DEBUG;TRACE</DefineConstants>
+ <ErrorReport>prompt</ErrorReport>
+ <WarningLevel>4</WarningLevel>
+ </PropertyGroup>
+ <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' ">
+ <DebugType>pdbonly</DebugType>
+ <Optimize>true</Optimize>
+ <OutputPath>bin\Release\</OutputPath>
+ <DefineConstants>TRACE</DefineConstants>
+ <ErrorReport>prompt</ErrorReport>
+ <WarningLevel>4</WarningLevel>
+ </PropertyGroup>
+ <PropertyGroup>
+ <SignAssembly>true</SignAssembly>
+ </PropertyGroup>
+ <PropertyGroup>
+ <AssemblyOriginatorKeyFile>
+ </AssemblyOriginatorKeyFile>
+ </PropertyGroup>
+ <ItemGroup>
+ <Reference Include="System" />
+ <Reference Include="System.Core" />
+ <Reference Include="System.Xml.Linq" />
+ <Reference Include="System.Data.DataSetExtensions" />
+ <Reference Include="Microsoft.CSharp" />
+ <Reference Include="System.Data" />
+ <Reference Include="System.Xml" />
+ </ItemGroup>
+ <ItemGroup>
+ <Compile Include="NativeMethods.cs" />
+ <Compile Include="Properties\AssemblyInfo.cs" />
+ <Compile Include="Types.cs" />
+ </ItemGroup>
+ <ItemGroup>
+ <None Include="Key.snk" />
+ </ItemGroup>
+ <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />
+ <!-- To modify your build process, add your task inside one of the targets below and uncomment it.
+ Other similar extension points exist, see Microsoft.Common.targets.
+ <Target Name="BeforeBuild">
+ </Target>
+ <Target Name="AfterBuild">
+ </Target>
+ -->
+</Project> \ No newline at end of file
diff --git a/chromium/tools/win/ChromeDebug/LowLevel/NativeMethods.cs b/chromium/tools/win/ChromeDebug/LowLevel/NativeMethods.cs
new file mode 100644
index 00000000000..170cd91ddeb
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/LowLevel/NativeMethods.cs
@@ -0,0 +1,72 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using Microsoft.Win32.SafeHandles;
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.InteropServices;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace ChromeDebug.LowLevel {
+ public static class NativeMethods {
+ [DllImport("kernel32.dll", SetLastError = true)]
+ [return: MarshalAs(UnmanagedType.Bool)]
+ public static extern bool ReadProcessMemory(IntPtr hProcess,
+ IntPtr lpBaseAddress,
+ IntPtr lpBuffer,
+ int dwSize,
+ out int lpNumberOfBytesRead);
+
+ [DllImport("ntdll.dll", SetLastError = true)]
+ public static extern LowLevelTypes.NTSTATUS NtQueryInformationProcess(
+ IntPtr hProcess,
+ LowLevelTypes.PROCESSINFOCLASS pic,
+ ref LowLevelTypes.PROCESS_BASIC_INFORMATION pbi,
+ int cb,
+ out int pSize);
+
+ [DllImport("shell32.dll", SetLastError = true)]
+ public static extern IntPtr CommandLineToArgvW(
+ [MarshalAs(UnmanagedType.LPWStr)] string lpCmdLine,
+ out int pNumArgs);
+
+ [DllImport("kernel32.dll", SetLastError = true)]
+ public static extern IntPtr LocalFree(IntPtr hMem);
+
+ [DllImport("kernel32.dll", SetLastError = true)]
+ public static extern IntPtr OpenProcess(
+ LowLevelTypes.ProcessAccessFlags dwDesiredAccess,
+ [MarshalAs(UnmanagedType.Bool)] bool bInheritHandle,
+ int dwProcessId);
+
+ [DllImport("kernel32.dll", SetLastError = true, CallingConvention = CallingConvention.StdCall,
+ CharSet = CharSet.Unicode)]
+ public static extern uint QueryFullProcessImageName(
+ IntPtr hProcess,
+ [MarshalAs(UnmanagedType.U4)] LowLevelTypes.ProcessQueryImageNameMode flags,
+ [Out] StringBuilder lpImageName, ref int size);
+
+ [DllImport("kernel32.dll", SetLastError = true)]
+ [return: MarshalAs(UnmanagedType.Bool)]
+ public static extern bool CloseHandle(IntPtr hObject);
+
+ [DllImport("kernel32.dll", SetLastError = true, CharSet = CharSet.Unicode)]
+ public static extern SafeFileHandle CreateFile(string lpFileName,
+ LowLevelTypes.FileAccessFlags dwDesiredAccess,
+ LowLevelTypes.FileShareFlags dwShareMode,
+ IntPtr lpSecurityAttributes,
+ LowLevelTypes.FileCreationDisposition dwDisp,
+ LowLevelTypes.FileFlagsAndAttributes dwFlags,
+ IntPtr hTemplateFile);
+
+ [DllImport("shell32.dll", CharSet = CharSet.Unicode)]
+ public static extern IntPtr SHGetFileInfo(string pszPath,
+ uint dwFileAttributes,
+ ref LowLevelTypes.SHFILEINFO psfi,
+ uint cbFileInfo,
+ uint uFlags);
+ }
+}
diff --git a/chromium/tools/win/ChromeDebug/LowLevel/Properties/AssemblyInfo.cs b/chromium/tools/win/ChromeDebug/LowLevel/Properties/AssemblyInfo.cs
new file mode 100644
index 00000000000..7a77cff6bf4
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/LowLevel/Properties/AssemblyInfo.cs
@@ -0,0 +1,40 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System.Reflection;
+using System.Runtime.CompilerServices;
+using System.Runtime.InteropServices;
+
+// General Information about an assembly is controlled through the following
+// set of attributes. Change these attribute values to modify the information
+// associated with an assembly.
+[assembly: AssemblyTitle("LowLevel")]
+[assembly: AssemblyDescription("")]
+[assembly: AssemblyConfiguration("")]
+[assembly: AssemblyCompany("")]
+[assembly: AssemblyProduct("LowLevel")]
+[assembly: AssemblyCopyright("Copyright © 2013")]
+[assembly: AssemblyTrademark("")]
+[assembly: AssemblyCulture("")]
+
+// Setting ComVisible to false makes the types in this assembly not visible
+// to COM components. If you need to access a type in this assembly from
+// COM, set the ComVisible attribute to true on that type.
+[assembly: ComVisible(false)]
+
+// The following GUID is for the ID of the typelib if this project is exposed to COM
+[assembly: Guid("5bfd12c9-dfa1-4994-b31d-755f3e064640")]
+
+// Version information for an assembly consists of the following four values:
+//
+// Major Version
+// Minor Version
+// Build Number
+// Revision
+//
+// You can specify all the values or you can default the Build and Revision Numbers
+// by using the '*' as shown below:
+// [assembly: AssemblyVersion("1.0.*")]
+[assembly: AssemblyVersion("1.0.0.0")]
+[assembly: AssemblyFileVersion("1.0.0.0")]
diff --git a/chromium/tools/win/ChromeDebug/LowLevel/Types.cs b/chromium/tools/win/ChromeDebug/LowLevel/Types.cs
new file mode 100644
index 00000000000..483acf76975
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/LowLevel/Types.cs
@@ -0,0 +1,219 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+using System;
+using System.Collections.Generic;
+using System.Linq;
+using System.Runtime.InteropServices;
+using System.Text;
+using System.Threading.Tasks;
+
+namespace ChromeDebug.LowLevel {
+ // Defines structures, enumerations, and types used by Win32 API calls. In some cases, the API
+ // calls support (and even document) many more values than what are listed here. Should
+ // additional values be required, they can be added to the respective types.
+ public static class LowLevelTypes {
+
+ #region Constants and Enums
+ // Represents the image format of a DLL or executable.
+ public enum ImageFormat {
+ NATIVE,
+ MANAGED,
+ UNKNOWN
+ }
+
+ // Flags used for opening a file handle (e.g. in a call to CreateFile), that determine the
+ // requested permission level.
+ [Flags]
+ public enum FileAccessFlags : uint {
+ GENERIC_WRITE = 0x40000000,
+ GENERIC_READ = 0x80000000
+ }
+
+ // Value used for CreateFile to determine how to behave in the presence (or absence) of a
+ // file with the requested name. Used only for CreateFile.
+ public enum FileCreationDisposition : uint {
+ CREATE_NEW = 1,
+ CREATE_ALWAYS = 2,
+ OPEN_EXISTING = 3,
+ OPEN_ALWAYS = 4,
+ TRUNCATE_EXISTING = 5
+ }
+
+ // Flags that determine what level of sharing this application requests on the target file.
+ // Used only for CreateFile.
+ [Flags]
+ public enum FileShareFlags : uint {
+ EXCLUSIVE_ACCESS = 0x0,
+ SHARE_READ = 0x1,
+ SHARE_WRITE = 0x2,
+ SHARE_DELETE = 0x4
+ }
+
+ // Flags that control caching and other behavior of the underlying file object. Used only for
+ // CreateFile.
+ [Flags]
+ public enum FileFlagsAndAttributes : uint {
+ NORMAL = 0x80,
+ OPEN_REPARSE_POINT = 0x200000,
+ SEQUENTIAL_SCAN = 0x8000000,
+ RANDOM_ACCESS = 0x10000000,
+ NO_BUFFERING = 0x20000000,
+ OVERLAPPED = 0x40000000
+ }
+
+ // The target architecture of a given executable image. The various values correspond to the
+ // magic numbers defined by the PE Executable Image File Format.
+ // http://www.microsoft.com/whdc/system/platform/firmware/PECOFF.mspx
+ public enum MachineType : ushort {
+ UNKNOWN = 0x0,
+ X64 = 0x8664,
+ X86 = 0x14c,
+ IA64 = 0x200
+ }
+
+ // A flag indicating the format of the path string that Windows returns from a call to
+ // QueryFullProcessImageName().
+ public enum ProcessQueryImageNameMode : uint {
+ WIN32_FORMAT = 0,
+ NATIVE_SYSTEM_FORMAT = 1
+ }
+
+ // Flags indicating the level of permission requested when opening a handle to an external
+ // process. Used by OpenProcess().
+ [Flags]
+ public enum ProcessAccessFlags : uint {
+ NONE = 0x0,
+ ALL = 0x001F0FFF,
+ VM_OPERATION = 0x00000008,
+ VM_READ = 0x00000010,
+ QUERY_INFORMATION = 0x00000400,
+ QUERY_LIMITED_INFORMATION = 0x00001000
+ }
+
+ // Defines return value codes used by various Win32 System APIs.
+ public enum NTSTATUS : int {
+ SUCCESS = 0,
+ }
+
+ // Determines the amount of information requested (and hence the type of structure returned)
+ // by a call to NtQueryInformationProcess.
+ public enum PROCESSINFOCLASS : int {
+ PROCESS_BASIC_INFORMATION = 0
+ };
+
+ [Flags]
+ public enum SHGFI : uint {
+ Icon = 0x000000100,
+ DisplayName = 0x000000200,
+ TypeName = 0x000000400,
+ Attributes = 0x000000800,
+ IconLocation = 0x000001000,
+ ExeType = 0x000002000,
+ SysIconIndex = 0x000004000,
+ LinkOverlay = 0x000008000,
+ Selected = 0x000010000,
+ Attr_Specified = 0x000020000,
+ LargeIcon = 0x000000000,
+ SmallIcon = 0x000000001,
+ OpenIcon = 0x000000002,
+ ShellIconSize = 0x000000004,
+ PIDL = 0x000000008,
+ UseFileAttributes = 0x000000010,
+ AddOverlays = 0x000000020,
+ OverlayIndex = 0x000000040,
+ }
+ #endregion
+
+ #region Structures
+ // In general, for all structures below which contains a pointer (represented here by IntPtr),
+ // the pointers refer to memory in the address space of the process from which the original
+ // structure was read. While this seems obvious, it means we cannot provide an elegant
+ // interface to the various fields in the structure due to the de-reference requiring a
+ // handle to the target process. Instead, that functionality needs to be provided at a
+ // higher level.
+ //
+ // Additionally, since we usually explicitly define the fields that we're interested in along
+ // with their respective offsets, we frequently specify the exact size of the native structure.
+
+ // Win32 UNICODE_STRING structure.
+ [StructLayout(LayoutKind.Sequential)]
+ public struct UNICODE_STRING {
+ // The length in bytes of the string pointed to by buffer, not including the null-terminator.
+ private ushort length;
+ // The total allocated size in memory pointed to by buffer.
+ private ushort maximumLength;
+ // A pointer to the buffer containing the string data.
+ private IntPtr buffer;
+
+ public ushort Length { get { return length; } }
+ public ushort MaximumLength { get { return maximumLength; } }
+ public IntPtr Buffer { get { return buffer; } }
+ }
+
+ // Win32 RTL_USER_PROCESS_PARAMETERS structure.
+ [StructLayout(LayoutKind.Explicit, Size = 72)]
+ public struct RTL_USER_PROCESS_PARAMETERS {
+ [FieldOffset(56)]
+ private UNICODE_STRING imagePathName;
+ [FieldOffset(64)]
+ private UNICODE_STRING commandLine;
+
+ public UNICODE_STRING ImagePathName { get { return imagePathName; } }
+ public UNICODE_STRING CommandLine { get { return commandLine; } }
+ };
+
+ // Win32 PEB structure. Represents the process environment block of a process.
+ [StructLayout(LayoutKind.Explicit, Size = 472)]
+ public struct PEB {
+ [FieldOffset(2), MarshalAs(UnmanagedType.U1)]
+ private bool isBeingDebugged;
+ [FieldOffset(12)]
+ private IntPtr ldr;
+ [FieldOffset(16)]
+ private IntPtr processParameters;
+ [FieldOffset(468)]
+ private uint sessionId;
+
+ public bool IsBeingDebugged { get { return isBeingDebugged; } }
+ public IntPtr Ldr { get { return ldr; } }
+ public IntPtr ProcessParameters { get { return processParameters; } }
+ public uint SessionId { get { return sessionId; } }
+ };
+
+ // Win32 PROCESS_BASIC_INFORMATION. Contains a pointer to the PEB, and various other
+ // information about a process.
+ [StructLayout(LayoutKind.Explicit, Size = 24)]
+ public struct PROCESS_BASIC_INFORMATION {
+ [FieldOffset(4)]
+ private IntPtr pebBaseAddress;
+ [FieldOffset(16)]
+ private UIntPtr uniqueProcessId;
+
+ public IntPtr PebBaseAddress { get { return pebBaseAddress; } }
+ public UIntPtr UniqueProcessId { get { return uniqueProcessId; } }
+ }
+
+ [StructLayout(LayoutKind.Sequential, CharSet = CharSet.Unicode)]
+ public struct SHFILEINFO {
+ // C# doesn't support overriding the default constructor of value types, so we need to use
+ // a dummy constructor.
+ public SHFILEINFO(bool dummy) {
+ hIcon = IntPtr.Zero;
+ iIcon = 0;
+ dwAttributes = 0;
+ szDisplayName = "";
+ szTypeName = "";
+ }
+ public IntPtr hIcon;
+ public int iIcon;
+ public uint dwAttributes;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 260)]
+ public string szDisplayName;
+ [MarshalAs(UnmanagedType.ByValTStr, SizeConst = 80)]
+ public string szTypeName;
+ };
+ #endregion
+ }
+}
diff --git a/chromium/tools/win/ChromeDebug/README.txt b/chromium/tools/win/ChromeDebug/README.txt
new file mode 100644
index 00000000000..69a6b068a40
--- /dev/null
+++ b/chromium/tools/win/ChromeDebug/README.txt
@@ -0,0 +1,7 @@
+Usage Instructions:
+
+0) Install the version of Visual Studio SDK which matches
+ the Visual Studio version with which you will compile.
+1) Compile with VS2012 or VS2013.
+2) Double click the .vsix in the output directory.
+3) Choose the version of VS to install to. \ No newline at end of file
diff --git a/chromium/tools/win/DebugVisualizers/chrome.natvis b/chromium/tools/win/DebugVisualizers/chrome.natvis
new file mode 100644
index 00000000000..7a7c45e1ac1
--- /dev/null
+++ b/chromium/tools/win/DebugVisualizers/chrome.natvis
@@ -0,0 +1,266 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<AutoVisualizer
+ xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
+ <Type Name="gfx::Point">
+ <AlternativeType Name="gfx::PointF"/>
+ <DisplayString>({x_}, {y_})</DisplayString>
+ <Expand>
+ <Item Name="X">x_</Item>
+ <Item Name="Y">y_</Item>
+ </Expand>
+ </Type>
+ <Type Name="gfx::Size">
+ <AlternativeType Name="gfx::SizeF"/>
+ <DisplayString>({width_}, {height_})</DisplayString>
+ <Expand>
+ <Item Name="Width">width_</Item>
+ <Item Name="Height">height_</Item>
+ </Expand>
+ </Type>
+ <Type Name="gfx::Rect">
+ <AlternativeType Name="gfx::RectF"/>
+ <DisplayString>({origin_.x_}, {origin_.y_}) x ({size_.width_}, {size_.height_})</DisplayString>
+ <Expand>
+ <Item Name="Left">origin_.x_</Item>
+ <Item Name="Top">origin_.y_</Item>
+ <Item Name="Width">size_.width_</Item>
+ <Item Name="Height">size_.height_</Item>
+ </Expand>
+ </Type>
+ <Type Name="aura::Window::Value">
+ <DisplayString>{name,s}: {value}</DisplayString>
+ <Expand/>
+ </Type>
+ <Type Name="aura::Window">
+ <DisplayString>{name_,s}</DisplayString>
+ <Expand>
+ <Item Name="Name">name_</Item>
+ <Item Name="Id">id_</Item>
+ <Item Name="Parent">parent_</Item>
+ <Item Name="Children">children_</Item>
+ <Item Name="Bounds">bounds_</Item>
+ <Item Name="Type">type_</Item>
+ <Item Name="Visible">visible_</Item>
+ <Item Name="Transparent">transparent_</Item>
+ <!--<Synthetic Name="Property Map">
+ <DisplayString>Size = {prop_map_._Mysize}</DisplayString>
+ <Expand>
+ <TreeItems>
+ <Size>prop_map_._Mysize</Size>
+ <HeadPointer>prop_map_._Myhead-&gt;_Parent</HeadPointer>
+ <LeftPointer>_Left</LeftPointer>
+ <RightPointer>_Right</RightPointer>
+ <ValueNode Condition="_Isnil == 0">_Myval.second</ValueNode>
+ </TreeItems>
+ </Expand>
+ </Synthetic>-->
+ <Item Name="Layer">layer_</Item>
+ </Expand>
+ </Type>
+ <Type Name="scoped_ptr&lt;*,*&gt;">
+ <DisplayString Condition="impl_.data_.ptr == 0">null</DisplayString>
+ <DisplayString>{impl_.data_.ptr}</DisplayString>
+ <Expand>
+ <ExpandedItem>impl_.data_.ptr</ExpandedItem>
+ </Expand>
+ </Type>
+ <Type Name="scoped_refptr&lt;*&gt;">
+ <DisplayString Condition="ptr_ == 0">null</DisplayString>
+ <DisplayString>[{((base::subtle::RefCountedBase*)ptr_)-&gt;ref_count_}] {(void*)ptr_} {*ptr_}</DisplayString>
+ <Expand>
+ <Item Name="Ptr">ptr_</Item>
+ <Item Name="RefCount">((base::subtle::RefCountedBase*)ptr_)-&gt;ref_count_</Item>
+ </Expand>
+ </Type>
+ <Type Name="base::RefCounted&lt;*&gt;">
+ <DisplayString>RefCount: {ref_count_}</DisplayString>
+ <Expand>
+ <Item Name="RefCount">ref_count_</Item>
+ </Expand>
+ </Type>
+ <Type Name="IPC::Message::Header">
+ <DisplayString>{{Routing: {routing}, Type: {type}}}</DisplayString>
+ <Expand>
+ <Item Name="RoutingId">routing</Item>
+ <Item Name="Type">type</Item>
+ <Synthetic Name="Priority"
+ Condition="(flags &amp; IPC::Message::PRIORITY_MASK) ==
+ IPC::Message::PRIORITY_LOW">
+ <DisplayString>Low</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Priority"
+ Condition="(flags &amp; IPC::Message::PRIORITY_MASK) ==
+ IPC::Message::PRIORITY_NORMAL">
+ <DisplayString>Normal</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Priority"
+ Condition="(flags &amp; IPC::Message::PRIORITY_MASK) ==
+ IPC::Message::PRIORITY_HIGH">
+ <DisplayString>High</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Sync"
+ Condition="(flags &amp; IPC::Message::SYNC_BIT) != 0">
+ <DisplayString>true</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Sync"
+ Condition="(flags &amp; IPC::Message::SYNC_BIT) == 0">
+ <DisplayString>false</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Reply"
+ Condition="(flags &amp; IPC::Message::REPLY_BIT) != 0">
+ <DisplayString>true</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Reply"
+ Condition="(flags &amp; IPC::Message::REPLY_BIT) == 0">
+ <DisplayString>false</DisplayString>
+ </Synthetic>
+ <Synthetic Name="ReplyError"
+ Condition="(flags &amp; IPC::Message::REPLY_ERROR_BIT) != 0">
+ <DisplayString>true</DisplayString>
+ </Synthetic>
+ <Synthetic Name="ReplyError"
+ Condition="(flags &amp; IPC::Message::REPLY_ERROR_BIT) == 0">
+ <DisplayString>false</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Unblock"
+ Condition="(flags &amp; IPC::Message::UNBLOCK_BIT) != 0">
+ <DisplayString>true</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Unblock"
+ Condition="(flags &amp; IPC::Message::UNBLOCK_BIT) == 0">
+ <DisplayString>false</DisplayString>
+ </Synthetic>
+ <Synthetic Name="PumpingMessages"
+ Condition="(flags &amp; IPC::Message::PUMPING_MSGS_BIT) != 0">
+ <DisplayString>true</DisplayString>
+ </Synthetic>
+ <Synthetic Name="PumpingMessages"
+ Condition="(flags &amp; IPC::Message::PUMPING_MSGS_BIT) == 0">
+ <DisplayString>false</DisplayString>
+ </Synthetic>
+ <Synthetic Name="HasSentTime"
+ Condition="(flags &amp; IPC::Message::HAS_SENT_TIME_BIT) != 0">
+ <DisplayString>true</DisplayString>
+ </Synthetic>
+ <Synthetic Name="HasSentTime"
+ Condition="(flags &amp; IPC::Message::HAS_SENT_TIME_BIT) == 0">
+ <DisplayString>false</DisplayString>
+ </Synthetic>
+ </Expand>
+ </Type>
+ <Type Name="IPC::Message">
+ <DisplayString>{{size = {header_size_+capacity_after_header_}}}</DisplayString>
+ <Expand>
+ <ExpandedItem>*((IPC::Message::Header*)header_),nd</ExpandedItem>
+ <Item Name="Payload">(void*)((char*)header_ + header_size_)</Item>
+ </Expand>
+ </Type>
+ <Type Name="base::TimeDelta">
+ <DisplayString>{delta_}</DisplayString>
+ <Expand>
+ <Synthetic Name="Days">
+ <DisplayString>{(int)(delta_ / {,,base.dll}base::Time::kMicrosecondsPerDay)}</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Hours">
+ <DisplayString>{(int)(delta_ / {,,base.dll}base::Time::kMicrosecondsPerHour)}</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Minutes">
+ <DisplayString>{(int)(delta_ / {,,base.dll}base::Time::kMicrosecondsPerMinute)}</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Seconds">
+ <DisplayString>{(int)(delta_ / {,,base.dll}base::Time::kMicrosecondsPerSecond)}</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Milliseconds">
+ <DisplayString>{(int)(delta_ / {,,base.dll}base::Time::kMicrosecondsPerMillisecond)}</DisplayString>
+ </Synthetic>
+ <Item Name="Microseconds">delta_</Item>
+ </Expand>
+ </Type>
+ <Type Name="GURL">
+ <DisplayString>{spec_}</DisplayString>
+ <Expand>
+ <Item Name="Spec">spec_</Item>
+ <Item Name="IsValid">is_valid_</Item>
+ <Synthetic Name="Scheme">
+ <DisplayString
+ Condition="parsed_.scheme.len==-1">undefined</DisplayString>
+ <DisplayString>spec_._Myres[parsed_.scheme.begin][</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Username">
+ <DisplayString
+ Condition="parsed_.username.len==-1">undefined</DisplayString>
+ <DisplayString>spec_._Myres[parsed_.username.begin][</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Password">
+ <DisplayString
+ Condition="parsed_.password.len==-1">undefined</DisplayString>
+ <DisplayString>spec_._Myres[parsed_.password.begin][</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Host">
+ <DisplayString
+ Condition="parsed_.host.len==-1">undefined</DisplayString>
+ <DisplayString>spec_._Myres[parsed_.host.begin][</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Port">
+ <DisplayString
+ Condition="parsed_.port.len==-1">undefined</DisplayString>
+ <DisplayString>spec_._Myres[parsed_.port.begin][</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Path">
+ <DisplayString
+ Condition="parsed_.path.len==-1">undefined</DisplayString>
+ <DisplayString>spec_._Myres[parsed_.path.begin][</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Query">
+ <DisplayString
+ Condition="parsed_.query.len==-1">undefined</DisplayString>
+ <DisplayString>spec_._Myres[parsed_.query.begin][</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Ref">
+ <DisplayString Condition="parsed_.ref.len==-1">undefined</DisplayString>
+ <DisplayString>spec_._Myres[parsed_.ref.begin][</DisplayString>
+ </Synthetic>
+ </Expand>
+ </Type>
+ <Type Name="base::Value">
+ <DisplayString>{type_}</DisplayString>
+ <Expand>
+ <Item Name="Type">type_</Item>
+ </Expand>
+ </Type>
+ <Type Name="base::FundamentalValue">
+ <DisplayString>Fundamental</DisplayString>
+ <Expand>
+ <ExpandedItem>(base::Value*)this,nd</ExpandedItem>
+ <Item Name="Int">integer_value_</Item>
+ <Item Name="Bool">boolean_value_</Item>
+ <Item Name="Double">double_value_</Item>
+ </Expand>
+ </Type>
+ <Type Name="base::StringValue">
+ <DisplayString>String ({value_})</DisplayString>
+ <Expand>
+ <ExpandedItem>(base::Value*)this,nd</ExpandedItem>
+ <Item Name="Value">value_</Item>
+ </Expand>
+ </Type>
+ <Type Name="base::BinaryValue">
+ <DisplayString>Binary ({size_} byte(s))</DisplayString>
+ <Expand>
+ <ExpandedItem>(base::Value*)this,nd</ExpandedItem>
+ <Item Name="Data">buffer_</Item>
+ </Expand>
+ </Type>
+ <Type Name="base::DictionaryValue">
+ <DisplayString>Dictionary ({dictionary_._Mysize} entries)</DisplayString>
+ <Expand>
+ <ExpandedItem>dictionary_</ExpandedItem>
+ </Expand>
+ </Type>
+ <Type Name="base::ListValue">
+ <DisplayString>List ({list_._Mysize} entries)</DisplayString>
+ <Expand>
+ <ExpandedItem>list_</ExpandedItem>
+ </Expand>
+ </Type>
+</AutoVisualizer> \ No newline at end of file
diff --git a/chromium/tools/win/DebugVisualizers/skia.natvis b/chromium/tools/win/DebugVisualizers/skia.natvis
new file mode 100644
index 00000000000..40502fdd659
--- /dev/null
+++ b/chromium/tools/win/DebugVisualizers/skia.natvis
@@ -0,0 +1,38 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<AutoVisualizer
+ xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
+ <Type Name="SkPoint">
+ <AlternativeType Name="SkIPoint"/>
+ <DisplayString>({fX}, {fY})</DisplayString>
+ <Expand>
+ <Item Name="X">fX</Item>
+ <Item Name="Y">fY</Item>
+ </Expand>
+ </Type>
+ <Type Name="SkSize">
+ <DisplayString>({fWidth}, {fHeight})</DisplayString>
+ <Expand>
+ <Item Name="Width">fWidth</Item>
+ <Item Name="Height">fHeight</Item>
+ </Expand>
+ </Type>
+ <Type Name="SkRect">
+ <AlternativeType Name="SkIRect"/>
+ <DisplayString>({fLeft}, {fTop}) x ({fRight - fLeft}, {fBottom - fTop})</DisplayString>
+ <Expand>
+ <Item Name="Left">fLeft</Item>
+ <Item Name="Top">fTop</Item>
+ <Item Name="Right">fRight</Item>
+ <Item Name="Bottom">fBottom</Item>
+ <Synthetic Name="Width">
+ <DisplayString>{fRight - fLeft}</DisplayString>
+ </Synthetic>
+ <Synthetic Name="Height">
+ <DisplayString>{fBottom - fTop}</DisplayString>
+ </Synthetic>
+ </Expand>
+ </Type>
+ <Type Name="LogFontTypeface">
+ <DisplayString>{fLogFont.lfFaceName,su}</DisplayString>
+ </Type>
+</AutoVisualizer> \ No newline at end of file
diff --git a/chromium/tools/win/DebugVisualizers/webkit.natvis b/chromium/tools/win/DebugVisualizers/webkit.natvis
new file mode 100644
index 00000000000..8451850d003
--- /dev/null
+++ b/chromium/tools/win/DebugVisualizers/webkit.natvis
@@ -0,0 +1,211 @@
+<?xml version="1.0" encoding="utf-8" ?>
+<AutoVisualizer
+ xmlns="http://schemas.microsoft.com/vstudio/debugger/natvis/2010">
+ <Type Name="blink::Member&lt;*&gt;">
+ <DisplayString Condition="m_raw == 0">null</DisplayString>
+ <DisplayString>{*m_raw}</DisplayString>
+ <Expand>
+ <Item Name="m_raw">m_raw</Item>
+ </Expand>
+ </Type>
+ <Type Name="WTF::String">
+ <DisplayString Condition="m_impl.m_ptr == 0">(null)</DisplayString>
+ <DisplayString IncludeView="bare">{*m_impl.m_ptr,view(bare)}</DisplayString>
+ <DisplayString>{*m_impl.m_ptr}</DisplayString>
+ <Expand>
+ <Item Name="Impl">m_impl.m_ptr</Item>
+ </Expand>
+ </Type>
+ <Type Name="WTF::StringImpl">
+ <DisplayString IncludeView="bare"
+ Condition="m_is8Bit">{(this+1),[m_length]sb}</DisplayString>
+ <DisplayString
+ Condition="m_is8Bit">[{m_length}] {(this+1),[m_length]s}</DisplayString>
+ <DisplayString IncludeView="bare">{(this+1),[m_length]sub}</DisplayString>
+ <DisplayString>[{m_length}] {(this+1),[m_length]su}</DisplayString>
+ <Expand>
+ <Item Name="Length">m_length</Item>
+ <Item Name="Hash">m_hash</Item>
+ <Item Name="AsciiText" Condition="m_is8Bit">(this+1),[m_length]s</Item>
+ <Item Name="UnicodeText" Condition="!m_is8Bit">(this+1),[m_length]su</Item>
+ </Expand>
+ </Type>
+ <Type Name="WTF::AtomicString">
+ <DisplayString IncludeView="bare">{m_string,view(bare)}</DisplayString>
+ <DisplayString>{m_string}</DisplayString>
+ </Type>
+ <Type Name="WTF::Vector&lt;*&gt;">
+ <DisplayString Condition="m_size==0">(empty)</DisplayString>
+ <DisplayString Condition="m_size==1">[{m_size}] {m_buffer,1}</DisplayString>
+ <DisplayString Condition="m_size==2">[{m_size}] {m_buffer,2}</DisplayString>
+ <DisplayString Condition="m_size==3">[{m_size}] {m_buffer,3}</DisplayString>
+ <DisplayString Condition="m_size==4">[{m_size}] {m_buffer,4}</DisplayString>
+ <DisplayString
+ Condition="m_size>=5">[{m_size}] {m_buffer,4}...</DisplayString>
+ <Expand>
+ <Item Name="Buffer">m_buffer</Item>
+ <Item Name="Size">m_size</Item>
+ <Item Name="Capacity">m_capacity</Item>
+ <ArrayItems Condition="m_size>0">
+ <Size>m_size</Size>
+ <ValuePointer>m_buffer</ValuePointer>
+ </ArrayItems>
+ </Expand>
+ </Type>
+ <Type Name="WTF::RefPtr&lt;*&gt;">
+ <AlternativeType Name="WTF::PassRefPtr&lt;*&gt;"/>
+ <DisplayString Condition="m_ptr == 0">null</DisplayString>
+ <DisplayString>{*m_ptr}</DisplayString>
+ <Expand>
+ <Item Name="Ptr">m_ptr</Item>
+ </Expand>
+ </Type>
+ <Type Name="blink::LayoutUnit">
+ <DisplayString>{(float)m_value / kFixedPointDenominator}</DisplayString>
+ <Expand>
+ <Item Name="FloatVal">(float)m_value / kFixedPointDenominator</Item>
+ <Item Name="RawVal">m_value</Item>
+ </Expand>
+ </Type>
+ <Type Name="blink::LayoutSize">
+ <AlternativeType Name="blink::IntSize"/>
+ <AlternativeType Name="blink::FloatSize"/>
+ <DisplayString>({m_width}, {m_height})</DisplayString>
+ <Expand>
+ <Item Name="Width">m_width</Item>
+ <Item Name="Height">m_height</Item>
+ </Expand>
+ </Type>
+ <Type Name="blink::LayoutPoint">
+ <AlternativeType Name="blink::IntPoint"/>
+ <AlternativeType Name="blink::FloatPoint"/>
+ <DisplayString>({m_x}, {m_y})</DisplayString>
+ <Expand>
+ <Item Name="X">m_x</Item>
+ <Item Name="Y">m_y</Item>
+ </Expand>
+ </Type>
+ <Type Name="blink::LayoutRect">
+ <AlternativeType Name="blink::IntRect"/>
+ <AlternativeType Name="blink::FloatRect"/>
+ <DisplayString>({m_location.m_x}, {m_location.m_y}) x ({m_size.m_width}, {m_size.m_height})</DisplayString>
+ <Expand>
+ <Item Name="Location">m_location</Item>
+ <Item Name="Size">m_size</Item>
+ </Expand>
+ </Type>
+ <Type Name="blink::Length">
+ <DisplayString Condition="m_isFloat">{(blink::LengthType)m_type} {m_floatValue}</DisplayString>
+ <DisplayString>{(blink::LengthType)m_type} {m_intValue}</DisplayString>
+ </Type>
+ <Type Name="blink::WebRect">
+ <AlternativeType Name="blink::WebFloatRect"/>
+ <DisplayString>({x}, {y}) x ({width}, {height})</DisplayString>
+ <Expand>
+ <Item Name="x">x</Item>
+ <Item Name="y">y</Item>
+ <Item Name="width">width</Item>
+ <Item Name="height">height</Item>
+ <Synthetic Name="right">
+ <DisplayString>{x + width}</DisplayString>
+ </Synthetic>
+ <Synthetic Name="bottom">
+ <DisplayString>{y + height}</DisplayString>
+ </Synthetic>
+ </Expand>
+ </Type>
+ <Type Name="blink::WebPoint">
+ <AlternativeType Name="blink::WebFloatPoint"/>
+ <DisplayString>({x}, {y})</DisplayString>
+ <Expand>
+ <Item Name="x">x</Item>
+ <Item Name="y">y</Item>
+ </Expand>
+ </Type>
+ <!-- Component build version -->
+ <Type Name="blink::WebString">
+ <DisplayString>{(blink_platform.dll!WTF::StringImpl*)(m_private.m_storage)}</DisplayString>
+ </Type>
+ <!-- Non-component build version -->
+ <Type Name="blink::WebString">
+ <DisplayString>{(WTF::StringImpl*)(m_private.m_storage)}</DisplayString>
+ </Type>
+ <!-- DOM -->
+ <Type Name="blink::QualifiedName">
+ <DisplayString Condition="m_impl.m_ptr == 0">(null)</DisplayString>
+ <DisplayString>{*m_impl.m_ptr}</DisplayString>
+ </Type>
+ <Type Name="blink::QualifiedName::QualifiedNameImpl">
+ <DisplayString>{m_localName,view(bare)}</DisplayString>
+ </Type>
+ <Type Name="blink::CharacterData">
+ <DisplayString>{m_data,view(bare)}</DisplayString>
+ </Type>
+ <Type Name="blink::ContainerNode">
+ <Expand>
+ <LinkedListItems>
+ <HeadPointer>m_firstChild.m_raw</HeadPointer>
+ <NextPointer>m_next.m_raw</NextPointer>
+ <ValueNode>this</ValueNode>
+ </LinkedListItems>
+ </Expand>
+ </Type>
+ <Type Name="blink::Element">
+ <DisplayString Condition="m_firstChild.m_raw != 0">&lt;{m_tagName}>{m_firstChild}</DisplayString>
+ <DisplayString>&lt;{m_tagName}></DisplayString>
+ </Type>
+ <!-- Layout: LayoutObject -->
+ <Type Name="blink::LayoutObject">
+ <DisplayString Condition="m_bitfields.m_isAnonymous">Anonymous</DisplayString>
+ <DisplayString>{m_node}</DisplayString>
+ </Type>
+ <Type Name="blink::LayoutObjectChildList">
+ <Expand>
+ <LinkedListItems>
+ <HeadPointer>m_firstChild</HeadPointer>
+ <NextPointer>m_next</NextPointer>
+ <ValueNode>this</ValueNode>
+ </LinkedListItems>
+ </Expand>
+ </Type>
+ <!-- Layout: InlineBox -->
+ <Type Name="blink::InlineBox">
+ <DisplayString>{m_layoutObject}</DisplayString>
+ </Type>
+ <Type Name="blink::InlineFlowBox">
+ <Expand>
+ <LinkedListItems>
+ <HeadPointer>m_firstChild</HeadPointer>
+ <NextPointer>m_next</NextPointer>
+ <ValueNode>this</ValueNode>
+ </LinkedListItems>
+ </Expand>
+ </Type>
+ <Type Name="blink::LineBoxList">
+ <Expand>
+ <LinkedListItems>
+ <HeadPointer>m_firstLineBox</HeadPointer>
+ <NextPointer>m_nextLineBox</NextPointer>
+ <ValueNode>this</ValueNode>
+ </LinkedListItems>
+ </Expand>
+ </Type>
+ <Type Name="blink::LineLayoutItem">
+ <DisplayString>{m_layoutObject}</DisplayString>
+ </Type>
+ <!-- Layout: TextRun -->
+ <Type Name="blink::TextRun">
+ <DisplayString Condition="m_is8Bit">{m_data.characters8,[m_len]s}</DisplayString>
+ <DisplayString>{(m_data.characters16),[m_len]su}</DisplayString>
+ </Type>
+ <Type Name="blink::BidiRun">
+ <DisplayString>{*m_object} {m_start}-{m_stop}</DisplayString>
+ </Type>
+ <!-- Fonts -->
+ <Type Name="blink::SimpleFontData">
+ <DisplayString>{m_platformData}</DisplayString>
+ </Type>
+ <Type Name="blink::FontPlatformData">
+ <DisplayString>{*m_typeface.m_ptr}, {m_textSize}px</DisplayString>
+ </Type>
+</AutoVisualizer> \ No newline at end of file
diff --git a/chromium/tools/win/OWNERS b/chromium/tools/win/OWNERS
new file mode 100644
index 00000000000..7afbac6b039
--- /dev/null
+++ b/chromium/tools/win/OWNERS
@@ -0,0 +1,2 @@
+brucedawson@chromium.org
+scottmg@chromium.org
diff --git a/chromium/tools/win/RetrieveSymbols/ReadMe.txt b/chromium/tools/win/RetrieveSymbols/ReadMe.txt
new file mode 100644
index 00000000000..fb9c5106fe7
--- /dev/null
+++ b/chromium/tools/win/RetrieveSymbols/ReadMe.txt
@@ -0,0 +1,37 @@
+This tool is designed to download PE files and symbols from symbol servers.
+Normally this is done automatically by tools such as windbg but it can be
+helpful to be able to download these files on demand.
+
+Sample usage:
+
+> rem Add the VS tools to the path, for access to dumpbin
+> "%vs120comntools%vsvars32.bat"
+
+D:\src\chromium\src\tools\win>dumpbin /headers "c:\Program Files (x86)\Google\Chrome\Application\chrome.exe" | findstr "RSDS date image"
+ 54E3AECF time date stamp Tue Feb 17 13:12:47 2015
+ 400000 image base (00400000 to 004D2FFF)
+ 0.00 image version
+ D3000 size of image
+ 54E3AECF cv 5D 0008DF80 8D380 Format: RSDS, {283A66AE-3EF3-4383-8798-F6617112B1F6}, 1, C:\b\build\slave\win\build\src\out\Release\initialexe\chrome.exe.pdb
+
+> RetrieveSymbols {283A66AE-3EF3-4383-8798-F6617112B1F6}, 1 chrome.exe.pdb
+Parsing symbol data for a PDB file.
+Looking for 283A66AE3EF343838798F6617112B1F6 1 chrome.exe.pdb.
+Found symbol file - placed it in d:\src\symbols\chrome.exe.pdb\283A66AE3EF343838798F6617112B1F61\chrome.exe.pdb.
+
+> RetrieveSymbols 54E3AECF D3000 chrome.exe
+Parsing symbol data for a PE (.dll or .exe) file.
+Looking for chrome.exe 54e3aecf d3000.
+Found symbol file - placed it in d:\src\symbols\chrome.exe\54E3AECFd3000\chrome.exe.
+
+The first invocation of RetrieveSymbols uses the GUID, age, and PDB name from
+the RSDS line of the dumpbin output -- the extraneous '{', '}', ',' and '-'
+characters are stripped out.
+
+The second invocation of RetrieveSymbols uses the time date stamp from the
+first line of the dumpbin output, the "size of image" data, and the
+executable name.
+
+This information can also be obtained from breakpad reports, from windbg
+by using "lmv m chrome_elf" and "!lmi chrome_elf.dll", from ETW traces,
+and from other sources.
diff --git a/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.cpp b/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.cpp
new file mode 100644
index 00000000000..f183673f287
--- /dev/null
+++ b/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.cpp
@@ -0,0 +1,162 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Symbol downloading demonstration code.
+// For more information see ReadMe.txt and this blog post:
+// https://randomascii.wordpress.com/2013/03/09/symbols-the-microsoft-way/
+
+#include <stdio.h>
+#include <Windows.h>
+#include <DbgHelp.h>
+#include <string>
+
+// Link with the dbghelp import library
+#pragma comment(lib, "dbghelp.lib")
+
+// Uncomment this line to test with known-good parameters.
+//#define TESTING
+
+int main(int argc, char* argv[])
+{
+ // Tell dbghelp to print diagnostics to the debugger output.
+ SymSetOptions(SYMOPT_DEBUG);
+
+ // Initialize dbghelp
+ const HANDLE fakeProcess = (HANDLE)1;
+ BOOL result = SymInitialize(fakeProcess, NULL, FALSE);
+
+#ifdef TESTING
+ // Set a search path and cache directory. If this isn't set
+ // then _NT_SYMBOL_PATH will be used instead.
+ // Force setting it here to make sure that the test succeeds.
+ SymSetSearchPath(fakeProcess,
+ "SRV*c:\\symbolstest*http://msdl.microsoft.com/download/symbols");
+
+ // Valid PDB data to test the code.
+ std::string gTextArg = "072FF0EB54D24DFAAE9D13885486EE09";
+ const char* ageText = "2";
+ const char* fileName = "kernel32.pdb";
+
+ // Valid PE data to test the code
+ fileName = "crypt32.dll";
+ const char* dateStampText = "4802A0D7";
+ const char* sizeText = "95000";
+ //fileName = "chrome_child.dll";
+ //const char* dateStampText = "5420D824";
+ //const char* sizeText = "20a6000";
+#else
+ if (argc < 4)
+ {
+ printf("Error: insufficient arguments.\n");
+ printf("Usage: %s guid age pdbname\n", argv[0]);
+ printf("Usage: %s dateStamp size pename\n", argv[0]);
+ printf("Example: %s 6720c31f4ac24f3ab0243e0641a4412f 1 "
+ "chrome_child.dll.pdb\n", argv[0]);
+ printf("Example: %s 4802A0D7 95000 crypt32.dll\n", argv[0]);
+ return 0;
+ }
+
+ std::string gTextArg = argv[1];
+ const char* dateStampText = argv[1];
+ const char* ageText = argv[2];
+ const char* sizeText = argv[2];
+ const char* fileName = argv[3];
+#endif
+
+ // Parse the GUID and age from the text
+ GUID g = {};
+ DWORD age = 0;
+ DWORD dateStamp = 0;
+ DWORD size = 0;
+
+ // Settings for SymFindFileInPath
+ void* id = nullptr;
+ DWORD flags = 0;
+ DWORD two = 0;
+
+ const char* ext = strrchr(fileName, '.');
+ if (!ext)
+ {
+ printf("No extension found on %s. Fatal error.\n", fileName);
+ return 0;
+ }
+
+ if (_stricmp(ext, ".pdb") == 0)
+ {
+ std::string gText;
+ // Scan the GUID argument and remove all non-hex characters. This allows
+ // passing GUIDs with '-', '{', and '}' characters.
+ for (auto c : gTextArg)
+ {
+ if (isxdigit(c))
+ {
+ gText.push_back(c);
+ }
+ }
+ printf("Parsing symbol data for a PDB file.\n");
+ if (gText.size() != 32)
+ {
+ printf("Error: GUIDs must be exactly 32 characters"
+ " (%s was stripped to %s).\n", gTextArg.c_str(), gText.c_str());
+ return 10;
+ }
+
+ int count = sscanf_s(gText.substr(0, 8).c_str(), "%x", &g.Data1);
+ DWORD temp;
+ count += sscanf_s(gText.substr(8, 4).c_str(), "%x", &temp);
+ g.Data2 = (unsigned short)temp;
+ count += sscanf_s(gText.substr(12, 4).c_str(), "%x", &temp);
+ g.Data3 = (unsigned short)temp;
+ for (auto i = 0; i < ARRAYSIZE(g.Data4); ++i)
+ {
+ count += sscanf_s(gText.substr(16 + i * 2, 2).c_str(), "%x", &temp);
+ g.Data4[i] = (unsigned char)temp;
+ }
+ count += sscanf_s(ageText, "%x", &age);
+
+ if (count != 12)
+ {
+ printf("Error: couldn't parse the GUID/age string. Sorry.\n");
+ return 10;
+ }
+ flags = SSRVOPT_GUIDPTR;
+ id = &g;
+ two = age;
+ printf("Looking for %s %s %s.\n", gText.c_str(), ageText, fileName);
+ }
+ else
+ {
+ printf("Parsing symbol data for a PE (.dll or .exe) file.\n");
+ if (strlen(dateStampText) != 8)
+ printf("Warning!!! The datestamp (%s) is not eight characters long. "
+ "This is usually wrong.\n", dateStampText);
+ int count = sscanf_s(dateStampText, "%x", &dateStamp);
+ count += sscanf_s(sizeText, "%x", &size);
+ flags = SSRVOPT_DWORDPTR;
+ id = &dateStamp;
+ two = size;
+ printf("Looking for %s %x %x.\n", fileName, dateStamp, two);
+ }
+
+ char filePath[MAX_PATH] = {};
+ DWORD three = 0;
+
+ if (SymFindFileInPath(fakeProcess, NULL, fileName, id, two, three,
+ flags, filePath, NULL, NULL))
+ {
+ printf("Found symbol file - placed it in %s.\n", filePath);
+ }
+ else
+ {
+ printf("Error: symbols not found - error %u. Are dbghelp.dll and "
+ "symsrv.dll in the same directory as this executable?\n",
+ GetLastError());
+ printf("Note that symbol server lookups sometimes fail randomly. "
+ "Try again?\n");
+ }
+
+ SymCleanup(fakeProcess);
+
+ return 0;
+}
diff --git a/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.sln b/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.sln
new file mode 100644
index 00000000000..957c0faa7e7
--- /dev/null
+++ b/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.sln
@@ -0,0 +1,22 @@
+
+Microsoft Visual Studio Solution File, Format Version 12.00
+# Visual Studio 2013
+VisualStudioVersion = 12.0.30723.0
+MinimumVisualStudioVersion = 10.0.40219.1
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "RetrieveSymbols", "RetrieveSymbols.vcxproj", "{A7E25ADF-5FEA-4506-BF39-B88EC8CBACA5}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Win32 = Debug|Win32
+ Release|Win32 = Release|Win32
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {A7E25ADF-5FEA-4506-BF39-B88EC8CBACA5}.Debug|Win32.ActiveCfg = Debug|Win32
+ {A7E25ADF-5FEA-4506-BF39-B88EC8CBACA5}.Debug|Win32.Build.0 = Debug|Win32
+ {A7E25ADF-5FEA-4506-BF39-B88EC8CBACA5}.Release|Win32.ActiveCfg = Release|Win32
+ {A7E25ADF-5FEA-4506-BF39-B88EC8CBACA5}.Release|Win32.Build.0 = Release|Win32
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
diff --git a/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.vcxproj b/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.vcxproj
new file mode 100644
index 00000000000..100d2ed6748
--- /dev/null
+++ b/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.vcxproj
@@ -0,0 +1,88 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="12.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+ <ProjectConfiguration Include="Debug|Win32">
+ <Configuration>Debug</Configuration>
+ <Platform>Win32</Platform>
+ </ProjectConfiguration>
+ <ProjectConfiguration Include="Release|Win32">
+ <Configuration>Release</Configuration>
+ <Platform>Win32</Platform>
+ </ProjectConfiguration>
+ </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>{A7E25ADF-5FEA-4506-BF39-B88EC8CBACA5}</ProjectGuid>
+ <Keyword>Win32Proj</Keyword>
+ <RootNamespace>RetrieveSymbols</RootNamespace>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <UseDebugLibraries>true</UseDebugLibraries>
+ <PlatformToolset>v120</PlatformToolset>
+ <CharacterSet>Unicode</CharacterSet>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
+ <ConfigurationType>Application</ConfigurationType>
+ <UseDebugLibraries>false</UseDebugLibraries>
+ <PlatformToolset>v120</PlatformToolset>
+ <WholeProgramOptimization>true</WholeProgramOptimization>
+ <CharacterSet>Unicode</CharacterSet>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+ <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <PropertyGroup Label="UserMacros" />
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ <LinkIncremental>true</LinkIncremental>
+ </PropertyGroup>
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ <LinkIncremental>false</LinkIncremental>
+ </PropertyGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+ <ClCompile>
+ <PrecompiledHeader>NotUsing</PrecompiledHeader>
+ <WarningLevel>Level3</WarningLevel>
+ <Optimization>Disabled</Optimization>
+ <PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <SDLCheck>true</SDLCheck>
+ </ClCompile>
+ <Link>
+ <SubSystem>Console</SubSystem>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+ <ClCompile>
+ <WarningLevel>Level3</WarningLevel>
+ <PrecompiledHeader>NotUsing</PrecompiledHeader>
+ <Optimization>MaxSpeed</Optimization>
+ <FunctionLevelLinking>true</FunctionLevelLinking>
+ <IntrinsicFunctions>true</IntrinsicFunctions>
+ <PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;_LIB;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <SDLCheck>true</SDLCheck>
+ <RuntimeLibrary>MultiThreaded</RuntimeLibrary>
+ </ClCompile>
+ <Link>
+ <SubSystem>Console</SubSystem>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ <EnableCOMDATFolding>true</EnableCOMDATFolding>
+ <OptimizeReferences>true</OptimizeReferences>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemGroup>
+ <Text Include="ReadMe.txt" />
+ </ItemGroup>
+ <ItemGroup>
+ <ClCompile Include="RetrieveSymbols.cpp" />
+ </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project> \ No newline at end of file
diff --git a/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.vcxproj.filters b/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.vcxproj.filters
new file mode 100644
index 00000000000..fe67e547b80
--- /dev/null
+++ b/chromium/tools/win/RetrieveSymbols/RetrieveSymbols.vcxproj.filters
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup>
+ <Text Include="ReadMe.txt" />
+ </ItemGroup>
+ <ItemGroup>
+ <ClCompile Include="RetrieveSymbols.cpp" />
+ </ItemGroup>
+</Project> \ No newline at end of file
diff --git a/chromium/tools/win/copy-installer.bat b/chromium/tools/win/copy-installer.bat
new file mode 100755
index 00000000000..1bb0f2934d2
--- /dev/null
+++ b/chromium/tools/win/copy-installer.bat
@@ -0,0 +1,124 @@
+ECHO OFF
+
+REM Copyright (c) 2012 The Chromium Authors. All rights reserved.
+REM Use of this source code is governed by a BSD-style license that can be
+REM found in the LICENSE file.
+
+REM Copies an installer and symbols from a build directory on a network share
+REM into the directory \[out|build]\[Debug|Release] on the current drive.
+REM
+REM Usage:
+REM \\build.share\<path_to_checkout>\src\tools\win\copy-installer.bat
+REM
+REM By default, the script will copy the Debug build in the tree, falling back
+REM to the Release build if one is not found. Similarly, the ninja output
+REM directory is preferred over the devenv output directory. The x86 build is
+REM preferred over the x64 build. Specify any of "out|build", "Debug|Release"
+REM (case matters), or "x64" on the command line in any order to influence
+REM selection. The defaults for location and build type can also be overridden
+REM in a given build tree by creating a "copy-installer.cfg" file alongside the
+REM .gclient file that sets any of OUTPUT, BUILDTYPE, or ARCH variables.
+REM
+REM Install Robocopy for superior performance on Windows XP if desired (it is
+REM present by default on Vista+).
+
+SETLOCAL
+
+REM Get the path to the build tree's src directory.
+CALL :_canonicalize "%~dp0..\.."
+SET FROM=%RET%
+
+REM Read local configuration (set OUTPUT and BUILDTYPE there).
+IF EXIST "%FROM%\..\copy-installer.cfg" CALL "%FROM%\..\copy-installer.cfg"
+
+REM Read any of OUTPUT, BUILDTYPE, or ARCH from command line.
+FOR %%a IN (%1 %2) do (
+IF "%%a"=="out" SET OUTPUT=out
+IF "%%a"=="build" SET OUTPUT=build
+IF "%%a"=="Debug" SET BUILDTYPE=Debug
+IF "%%a"=="Release" SET BUILDTYPE=Release
+IF "%%a"=="x64" SET ARCH=_x64
+)
+
+CALL :_find_build
+IF "%OUTPUT%%BUILDTYPE%%ARCH%"=="" (
+ECHO No build found to copy.
+EXIT 1
+)
+
+SET FROM=%FROM%\%OUTPUT%\%BUILDTYPE%%ARCH%
+SET TO=\%OUTPUT%\%BUILDTYPE%%ARCH%
+
+SET TOCOPY=mini_installer.exe *.dll.pdb chrome.exe.pdb mini_installer.exe.pdb^
+ setup.exe.pdb
+
+CALL :_copyfiles
+
+REM incremental_chrome_dll=1 puts chrome_dll.pdb into the "initial" dir.
+IF EXIST "%FROM%\initial" (
+SET FROM=%FROM%\initial
+SET TOCOPY=*.pdb
+CALL :_copyfiles
+)
+
+ECHO Ready to run/debug %TO%\mini_installer.exe.
+GOTO :EOF
+
+REM All labels henceforth are subroutines intended to be invoked by CALL.
+
+REM Canonicalize the first argument, returning it in RET.
+:_canonicalize
+SET RET=%~f1
+GOTO :EOF
+
+REM Search for a mini_installer.exe in the candidate build outputs.
+:_find_build
+IF "%OUTPUT%"=="" (
+SET OUTPUTS=out build
+) ELSE (
+SET OUTPUTS=%OUTPUT%
+SET OUTPUT=
+)
+
+IF "%BUILDTYPE%"=="" (
+SET BUILDTYPES=Debug Release
+) ELSE (
+SET BUILDTYPES=%BUILDTYPE%
+SET BUILDTYPE=
+)
+
+FOR %%o IN (%OUTPUTS%) DO (
+FOR %%f IN (%BUILDTYPES%) DO (
+IF EXIST "%FROM%\%%o\%%f\mini_installer.exe" (
+SET OUTPUT=%%o
+SET BUILDTYPE=%%f
+GOTO :EOF
+)
+IF EXIST "%FROM%\%%o\%%f_x64\mini_installer.exe" (
+SET OUTPUT=%%o
+SET BUILDTYPE=%%f
+SET ARCH=_x64
+GOTO :EOF
+)
+)
+)
+GOTO :EOF
+
+REM Branch to handle copying via robocopy (fast) or xcopy (slow).
+:_copyfiles
+robocopy /? 1> nul 2> nul
+IF NOT "%ERRORLEVEL%"=="9009" (
+robocopy "%FROM%" "%TO%" %TOCOPY% /MT /XX
+) ELSE (
+IF NOT EXIST "%TO%" mkdir "%TO%"
+call :_xcopy_hack %TOCOPY%
+)
+GOTO :EOF
+
+REM We can't use a for..in..do loop since we have wildcards, so we make a call
+REM to this with the files to copy.
+:_xcopy_hack
+SHIFT
+IF "%0"=="" GOTO :EOF
+xcopy "%FROM%\%0" "%TO%" /d /y
+GOTO _xcopy_hack
diff --git a/chromium/tools/win/link_limiter/build_link_limiter.py b/chromium/tools/win/link_limiter/build_link_limiter.py
new file mode 100755
index 00000000000..464d30c57b2
--- /dev/null
+++ b/chromium/tools/win/link_limiter/build_link_limiter.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+BUILD_DIR = 'build'
+
+
+def run_with_vsvars(cmd, tmpdir=None):
+ fd, filename = tempfile.mkstemp('.bat', text=True)
+ with os.fdopen(fd, 'w') as f:
+ print >> f, '@echo off'
+ print >> f, r'call "%VS100COMNTOOLS%\vsvars32.bat"'
+ if tmpdir:
+ print >> f, r'cd %s' % tmpdir
+ print >> f, cmd
+ try:
+ p = subprocess.Popen([filename], shell=True, stdout=subprocess.PIPE,
+ universal_newlines=True)
+ out, _ = p.communicate()
+ return p.returncode, out
+ finally:
+ os.unlink(filename)
+
+
+def get_vc_dir():
+ _, out = run_with_vsvars('echo VCINSTALLDIR=%VCINSTALLDIR%')
+ for line in out.splitlines(): # pylint: disable-msg=E1103
+ if line.startswith('VCINSTALLDIR='):
+ return line[len('VCINSTALLDIR='):]
+ return None
+
+
+def build(infile):
+ if not os.path.exists(BUILD_DIR):
+ os.makedirs(BUILD_DIR)
+ outfile = 'limiter.exe'
+ outpath = os.path.join(BUILD_DIR, outfile)
+ cpptime = os.path.getmtime(infile)
+ if not os.path.exists(outpath) or cpptime > os.path.getmtime(outpath):
+ print 'Building %s...' % outfile
+ rc, out = run_with_vsvars(
+ 'cl /nologo /Ox /Zi /W4 /WX /D_UNICODE /DUNICODE'
+ ' /D_CRT_SECURE_NO_WARNINGS /EHsc %s /link /out:%s'
+ % (os.path.join('..', infile), outfile), BUILD_DIR)
+ if rc:
+ print out
+ print 'Failed to build %s' % outfile
+ sys.exit(1)
+ else:
+ print '%s already built' % outfile
+ return outpath
+
+
+def main():
+ # Switch to our own dir.
+ os.chdir(os.path.dirname(os.path.abspath(__file__)))
+
+ if sys.argv[-1] == 'clean':
+ if os.path.exists(BUILD_DIR):
+ shutil.rmtree(BUILD_DIR)
+ for exe in glob.glob('*.exe'):
+ os.unlink(exe)
+ return 0
+
+ vcdir = os.environ.get('VCINSTALLDIR')
+ if not vcdir:
+ vcdir = get_vc_dir()
+ if not vcdir:
+ print 'Could not get VCINSTALLDIR. Run vsvars32.bat?'
+ return 1
+ os.environ['PATH'] += (';' + os.path.join(vcdir, 'bin') +
+ ';' + os.path.join(vcdir, r'..\Common7\IDE'))
+
+ # Verify that we can find link.exe.
+ link = os.path.join(vcdir, 'bin', 'link.exe')
+ if not os.path.exists(link):
+ print 'link.exe not found at %s' % link
+ return 1
+
+ exe_name = build('limiter.cc')
+ for shim_exe in ('lib.exe', 'link.exe'):
+ newpath = '%s__LIMITER.exe' % shim_exe
+ shutil.copyfile(exe_name, newpath)
+ print '%s shim built. Use with msbuild like: "/p:LinkToolExe=%s"' \
+ % (shim_exe, os.path.abspath(newpath))
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/win/link_limiter/limiter.cc b/chromium/tools/win/link_limiter/limiter.cc
new file mode 100644
index 00000000000..cbb1f2a8593
--- /dev/null
+++ b/chromium/tools/win/link_limiter/limiter.cc
@@ -0,0 +1,337 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#define NOMINMAX
+#include <windows.h>
+
+#include <algorithm>
+#include <iterator>
+#include <sstream>
+#include <string>
+#include <vector>
+
+typedef std::basic_string<TCHAR> tstring;
+
+namespace {
+ const bool g_is_debug = (_wgetenv(L"LIMITER_DEBUG") != NULL);
+}
+
+// Don't use stderr for errors because VS has large buffers on them, leading
+// to confusing error output.
+static void Error(const wchar_t* msg, ...) {
+ tstring new_msg = tstring(L"limiter fatal error: ") + msg + L"\n";
+ va_list args;
+ va_start(args, msg);
+ vwprintf(new_msg.c_str(), args);
+ va_end(args);
+}
+
+static void Warn(const wchar_t* msg, ...) {
+ if (!g_is_debug)
+ return;
+ tstring new_msg = tstring(L"limiter warning: ") + msg + L"\n";
+ va_list args;
+ va_start(args, msg);
+ vwprintf(new_msg.c_str(), args);
+ va_end(args);
+}
+
+static tstring ErrorMessageToString(DWORD err) {
+ TCHAR* msg_buf = NULL;
+ DWORD rc = FormatMessage(
+ FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
+ NULL, // lpSource
+ err,
+ MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
+ reinterpret_cast<LPTSTR>(&msg_buf),
+ 0, // nSize
+ NULL); // Arguments
+ if (!rc)
+ return L"unknown error";
+ tstring ret(msg_buf);
+ LocalFree(msg_buf);
+ return ret;
+}
+
+static DWORD RunExe(const tstring& exe_name) {
+ STARTUPINFO startup_info = { sizeof(STARTUPINFO) };
+ PROCESS_INFORMATION process_info;
+ DWORD exit_code;
+
+ GetStartupInfo(&startup_info);
+ tstring cmdline = tstring(GetCommandLine());
+
+ size_t first_space = cmdline.find(' ');
+ if (first_space == -1) {
+ // I'm not sure why this would ever happen, but just in case...
+ cmdline = exe_name;
+ } else {
+ cmdline = exe_name + cmdline.substr(first_space);
+ }
+
+ if (!CreateProcess(NULL, // lpApplicationName
+ &cmdline[0],
+ NULL, // lpProcessAttributes
+ NULL, // lpThreadAttributes
+ TRUE, // bInheritHandles
+ 0, // dwCreationFlags,
+ NULL, // lpEnvironment,
+ NULL, // lpCurrentDirectory,
+ &startup_info,
+ &process_info)) {
+ Error(L"Error in CreateProcess[%s]: %s",
+ cmdline.c_str(), ErrorMessageToString(GetLastError()).c_str());
+ return MAXDWORD;
+ }
+ CloseHandle(process_info.hThread);
+ WaitForSingleObject(process_info.hProcess, INFINITE);
+ GetExitCodeProcess(process_info.hProcess, &exit_code);
+ CloseHandle(process_info.hProcess);
+ return exit_code;
+}
+
+// Returns 0 if there was an error
+static int CpuConcurrencyMetric(const tstring& envvar_name) {
+ int max_concurrent = 0;
+ std::vector<char> buffer(1);
+ BOOL ok = false;
+ DWORD last_error = 0;
+ do {
+ DWORD bufsize = buffer.size();
+ ok = GetLogicalProcessorInformation(
+ reinterpret_cast<PSYSTEM_LOGICAL_PROCESSOR_INFORMATION>(&buffer[0]),
+ &bufsize);
+ last_error = GetLastError();
+ if (!ok && last_error == ERROR_INSUFFICIENT_BUFFER &&
+ bufsize > buffer.size()) {
+ buffer.resize(bufsize);
+ }
+ } while (!ok && last_error == ERROR_INSUFFICIENT_BUFFER);
+
+ if (!ok) {
+ Warn(L"Error while getting number of cores. Try setting the "
+ L" environment variable '%s' to (num_cores - 1): %s",
+ envvar_name.c_str(), ErrorMessageToString(last_error).c_str());
+ return 0;
+ }
+
+ PSYSTEM_LOGICAL_PROCESSOR_INFORMATION pproc_info =
+ reinterpret_cast<PSYSTEM_LOGICAL_PROCESSOR_INFORMATION>(&buffer[0]);
+ int num_entries = buffer.size() /
+ sizeof(SYSTEM_LOGICAL_PROCESSOR_INFORMATION);
+
+ for (int i = 0; i < num_entries; ++i) {
+ SYSTEM_LOGICAL_PROCESSOR_INFORMATION& info = pproc_info[i];
+ if (info.Relationship == RelationProcessorCore) {
+ ++max_concurrent;
+ }
+ }
+
+ // Leave one core for other tasks
+ return max_concurrent - 1;
+}
+
+// TODO(defaults): Create a better heuristic than # of CPUs. It seems likely
+// that the right value will, in fact, be based on the memory capacity of the
+// machine, not on the number of CPUs.
+enum ConcurrencyMetricEnum {
+ CONCURRENCY_METRIC_ONE,
+ CONCURRENCY_METRIC_CPU,
+ CONCURRENCY_METRIC_DEFAULT = CONCURRENCY_METRIC_CPU
+};
+
+static int GetMaxConcurrency(const tstring& base_pipename,
+ ConcurrencyMetricEnum metric) {
+ static int max_concurrent = -1;
+
+ if (max_concurrent == -1) {
+ tstring envvar_name = base_pipename + L"_MAXCONCURRENCY";
+
+ const LPTSTR max_concurrent_str = _wgetenv(envvar_name.c_str());
+ max_concurrent = max_concurrent_str ? _wtoi(max_concurrent_str) : 0;
+
+ if (max_concurrent == 0) {
+ switch (metric) {
+ case CONCURRENCY_METRIC_CPU:
+ max_concurrent = CpuConcurrencyMetric(envvar_name);
+ if (max_concurrent)
+ break;
+ // else fall through
+ case CONCURRENCY_METRIC_ONE:
+ max_concurrent = 1;
+ break;
+ }
+ }
+
+ max_concurrent = std::min(std::max(max_concurrent, 1),
+ PIPE_UNLIMITED_INSTANCES);
+ }
+
+ return max_concurrent;
+}
+
+static HANDLE WaitForPipe(const tstring& pipename,
+ HANDLE event,
+ int max_concurrency) {
+ // We're using a named pipe instead of a semaphore so the Kernel can clean up
+ // after us if we crash while holding onto the pipe (A real semaphore will
+ // not release on process termination).
+ HANDLE pipe = INVALID_HANDLE_VALUE;
+ for (;;) {
+ pipe = CreateNamedPipe(
+ pipename.c_str(),
+ PIPE_ACCESS_DUPLEX,
+ PIPE_TYPE_BYTE,
+ max_concurrency,
+ 1, // nOutBufferSize
+ 1, // nInBufferSize
+ 0, // nDefaultTimeOut
+ NULL); // Default security attributes (noinherit)
+ if (pipe != INVALID_HANDLE_VALUE)
+ break;
+
+ DWORD error = GetLastError();
+ if (error == ERROR_PIPE_BUSY) {
+ if (event) {
+ WaitForSingleObject(event, 60 * 1000 /* ms */);
+ } else {
+ // TODO(iannucci): Maybe we should error out here instead of falling
+ // back to a sleep-poll
+ Sleep(5 * 1000 /* ms */);
+ }
+ } else {
+ Warn(L"Got error %d while waiting for pipe: %s", error,
+ ErrorMessageToString(error).c_str());
+ return INVALID_HANDLE_VALUE;
+ }
+ }
+
+ return pipe;
+}
+
+static int WaitAndRun(const tstring& shimmed_exe,
+ const tstring& base_pipename) {
+ ULONGLONG start_time = 0, end_time = 0;
+ tstring pipename = L"\\\\.\\pipe\\" + base_pipename;
+ tstring event_name = L"Local\\EVENT_" + base_pipename;
+
+ // This event lets us do better than strict polling, but we don't rely on it
+ // (in case a process crashes before signalling the event).
+ HANDLE event = CreateEvent(
+ NULL, // Default security attributes
+ FALSE, // Manual reset
+ FALSE, // Initial state
+ event_name.c_str());
+
+ if (g_is_debug)
+ start_time = GetTickCount64();
+
+ HANDLE pipe =
+ WaitForPipe(pipename, event,
+ GetMaxConcurrency(base_pipename, CONCURRENCY_METRIC_DEFAULT));
+
+ if (g_is_debug) {
+ end_time = GetTickCount64();
+ wprintf(L" took %.2fs to acquire semaphore.\n",
+ (end_time - start_time) / 1000.0);
+ }
+
+ DWORD ret = RunExe(shimmed_exe);
+
+ if (pipe != INVALID_HANDLE_VALUE)
+ CloseHandle(pipe);
+ if (event != NULL)
+ SetEvent(event);
+
+ return ret;
+}
+
+void Usage(const tstring& msg) {
+ tstring usage(msg);
+ usage += L"\n"
+ L"Usage: SHIMED_NAME__SEMAPHORE_NAME\n"
+ L"\n"
+ L" SHIMMED_NAME - ex. 'link.exe' or 'lib.exe'\n"
+ L" - can be exe, bat, or com\n"
+ L" - must exist in PATH\n"
+ L"\n"
+ L" SEMAPHORE_NAME - ex. 'SOME_NAME' or 'GROOVY_SEMAPHORE'\n"
+ L"\n"
+ L" Example:\n"
+ L" link.exe__LINK_LIMITER.exe\n"
+ L" lib.exe__LINK_LIMITER.exe\n"
+ L" * Both will limit on the same semaphore\n"
+ L"\n"
+ L" link.exe__LINK_LIMITER.exe\n"
+ L" lib.exe__LIB_LIMITER.exe\n"
+ L" * Both will limit on independent semaphores\n"
+ L"\n"
+ L" This program is meant to be run after renaming it into the\n"
+ L" above format. Once you have done so, executing it will block\n"
+ L" on the availability of the semaphore SEMAPHORE_NAME. Once\n"
+ L" the semaphore is obtained, it will execute SHIMMED_NAME, \n"
+ L" passing through all arguments as-is.\n"
+ L"\n"
+ L" The maximum concurrency can be manually set by setting the\n"
+ L" environment variable <SEMAPHORE_NAME>_MAXCONCURRENCY to an\n"
+ L" integer value (1, 254).\n"
+ L" * This value must be set the same for ALL invocations.\n"
+ L" * If the value is not set, it defaults to (num_cores-1).\n"
+ L"\n"
+ L" The semaphore is automatically released when the program\n"
+ L" completes normally, OR if the program crashes (or even if\n"
+ L" limiter itself crashes).\n";
+ Error(usage.c_str());
+ exit(-1);
+}
+
+// Input command line is assumed to be of the form:
+//
+// thing.exe__PIPE_NAME.exe ...
+//
+// Specifically, wait for a semaphore (whose concurrency is specified by
+// LIMITER_MAXCONCURRENT), and then pass through everything once we have
+// acquired the semaphore.
+//
+// argv[0] is parsed for:
+// * exe_to_shim_including_extension.exe
+// * This could also be a bat or com. Anything that CreateProcess will
+// accept.
+// * "__"
+// * We search for this separator from the end of argv[0], so the exe name
+// could contain a double underscore if necessary.
+// * PIPE_NAME
+// * Can only contain single underscores, not a double underscore.
+// * i.e. HELLO_WORLD_PIPE will work, but HELLO__WORLD_PIPE will not.
+// * This would allow the shimmed exe to contain arbitrary numbers of
+// underscores. We control the pipe name, but not necessarily the thing
+// we're shimming.
+//
+int wmain(int, wchar_t** argv) {
+ tstring shimmed_plus_pipename = argv[0];
+ size_t last_slash = shimmed_plus_pipename.find_last_of(L"/\\");
+ if (last_slash != tstring::npos) {
+ shimmed_plus_pipename = shimmed_plus_pipename.substr(last_slash + 1);
+ }
+
+ size_t separator = shimmed_plus_pipename.rfind(L"__");
+ if (separator == tstring::npos) {
+ Usage(L"Cannot parse argv[0]. No '__' found. "
+ L"Should be like '[...(\\|/)]link.exe__PIPE_NAME.exe'");
+ }
+ tstring shimmed_exe = shimmed_plus_pipename.substr(0, separator);
+ tstring base_pipename = shimmed_plus_pipename.substr(separator + 2);
+
+ size_t dot = base_pipename.find(L'.');
+ if (dot == tstring::npos) {
+ Usage(L"Expected an executable extension in argv[0]. No '.' found.");
+ }
+ base_pipename = base_pipename.substr(0, dot);
+
+ return WaitAndRun(shimmed_exe, base_pipename);
+}
+
diff --git a/chromium/tools/win/new_analyze_warnings/README b/chromium/tools/win/new_analyze_warnings/README
new file mode 100644
index 00000000000..ac85fcd60c8
--- /dev/null
+++ b/chromium/tools/win/new_analyze_warnings/README
@@ -0,0 +1,20 @@
+Use these scripts to retrieve and summarize the latest /analyze warnings
+from buildbot. Run retrieve_latest_warnings.bat.
+
+For best results you should have a Chromium repo that can be synced to the
+state of the tree that produced the latest set of warnings. If you set
+ANALYZE_REPO to point at such a repo then after the warnings are retrieved,
+summarized, and diffed the repo will be synced appropriately. Once this
+process has completed you can investigate the individual warnings. Typical
+usage is like this:
+
+> set ANALYZE_REPO=d:\src\analyze_chromium
+> retrieve_latests_warnings.bat
+
+For each version that is retrieved three files will be created. The _full.txt
+file is the raw results and should usually not be looked at. The _summary.txt
+file is a sorted, grouped, and uniqified copy of the results. The _new.txt
+file is only created if previous results were found and it contains the diffs.
+- analyze0067_full.txt
+- analyze0067_summary.txt
+- analyze0067_new.txt
diff --git a/chromium/tools/win/new_analyze_warnings/retrieve_latest_warnings.bat b/chromium/tools/win/new_analyze_warnings/retrieve_latest_warnings.bat
new file mode 100755
index 00000000000..dfa593808e0
--- /dev/null
+++ b/chromium/tools/win/new_analyze_warnings/retrieve_latest_warnings.bat
@@ -0,0 +1,71 @@
+@rem Copyright (c) 2012 The Chromium Authors. All rights reserved.
+@rem Use of this source code is governed by a BSD-style license that can be
+@rem found in the LICENSE file.
+@setlocal
+
+@if "%ANALYZE_REPO%" == "" goto skipCD
+@rem If ANALYZE_REPO is set then the results are put in that directory
+cd /d "%ANALYZE_REPO%"
+:skipCD
+
+@rem Delete previous data
+del set_analyze_revision.bat
+@rem Retrieve the latest warnings
+python %~dp0retrieve_warnings.py -0
+@if not exist set_analyze_revision.bat goto failure
+
+@rem Set ANALYZE_REVISION and ANALYZE_BUILD_NUMBER
+call set_analyze_revision.bat
+
+@set fullWarnings=analyze%ANALYZE_BUILD_NUMBER%_full.txt
+@set summaryWarnings=analyze%ANALYZE_BUILD_NUMBER%_summary.txt
+
+python %~dp0warnings_by_type.py %fullWarnings%
+
+@set oldSummary=analyze%ANALYZE_PREV_BUILD_NUMBER%_summary.txt
+@set oldFull=analyze%ANALYZE_PREV_BUILD_NUMBER%_full.txt
+@if exist %oldSummary% goto doDiff
+@if exist %oldFull% goto makeOldSummary
+@echo No previous results. To get some earlier results for comparison
+@echo use: %~dp0retrieve_warnings.py %ANALYZE_PREV_BUILD_NUMBER%
+@goto skipDiff
+:makeOldSummary
+python %~dp0warnings_by_type.py %oldFull%
+:doDiff
+@set newWarnings=analyze%ANALYZE_BUILD_NUMBER%_new.txt
+python %~dp0warning_diff.py %oldSummary% %summaryWarnings% >%newWarnings%
+start %newWarnings%
+:skipDiff
+
+@if "%ANALYZE_REPO%" == "" goto notSet
+if not exist "%ANALYZE_REPO%\src" goto notSet
+
+cd src
+
+@echo Retrieving source for the latest build results.
+
+@rem Pull the latest code, go to the same revision that the builder used, and
+@rem then do a gclient sync. echo has to be enabled after each call to git
+@rem because it (erroneously) disables it.
+call git fetch
+@echo on
+call git checkout %ANALYZE_REVISION%
+@echo on
+@rem Display where we are to make sure the git pull worked? Redirect to 'tee'
+@rem to avoid invoking the pager.
+call git log -1 | tee
+@echo on
+call gclient sync --jobs=16
+@echo on
+
+@exit /b
+
+:notSet
+@echo If ANALYZE_REPO is set to point at a repo -- parent of src directory --
+@echo then that repo will be synced to match the build machine.
+@echo See set_analyze_revision.bat for parameters.
+@exit /b
+
+:failure
+@echo Failed to retrieve results from the latest build
+@exit /b
diff --git a/chromium/tools/win/new_analyze_warnings/retrieve_warnings.py b/chromium/tools/win/new_analyze_warnings/retrieve_warnings.py
new file mode 100644
index 00000000000..33c5d407797
--- /dev/null
+++ b/chromium/tools/win/new_analyze_warnings/retrieve_warnings.py
@@ -0,0 +1,100 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+This retrieves the latest warnings from the Chrome /analyze build machine, and
+does a diff.
+This script is intended to be run from retrieve_latest_warnings.bat which
+fills out the functionality.
+"""
+
+import urllib
+import sys
+import glob
+import os
+
+if len(sys.argv) < 2:
+ print "Missing build number."
+ sys.exit(10)
+
+buildNumber = int(sys.argv[1])
+
+baseURL = "http://build.chromium.org/p/chromium.fyi/builders/" + \
+ "Chromium%20Windows%20Analyze/"
+
+print "Finding recent builds on %s" % baseURL
+baseData = urllib.urlopen(baseURL).read()
+recentOff = baseData.find("Recent Builds:")
+buildPattern = 'success</td> <td><a href="' + \
+ '../../builders/Chromium%20Windows%20Analyze/builds/'
+# For some reason I couldn't get regular expressions to work on this data.
+latestBuildOff = baseData.find(buildPattern, recentOff) + len(buildPattern)
+if latestBuildOff < len(buildPattern):
+ print "Couldn't find successful build."
+ sys.exit(10)
+latestEndOff = baseData.find('"', latestBuildOff)
+latestBuildStr = baseData[latestBuildOff:latestEndOff]
+maxBuildNumber = int(latestBuildStr)
+if buildNumber > maxBuildNumber:
+ print "Requested build number (%d) is too high. Maximum is %d." % \
+ (buildNumber, maxBuildNumber)
+ sys.exit(10)
+# Treat negative numbers specially
+if sys.argv[1][0] == '-':
+ buildNumber = maxBuildNumber + buildNumber
+ if buildNumber < 0:
+ buildNumber = 0
+ print "Retrieving build number %d of %d" % (buildNumber, maxBuildNumber)
+
+# Found the last summary results in the current directory
+results = glob.glob("analyze*_summary.txt")
+results.sort()
+previous = "%04d" % (buildNumber - 1)
+if results:
+ possiblePrevious = results[-1][7:11]
+ if int(possiblePrevious) == buildNumber:
+ if len(results) > 1:
+ previous = results[-2][7:11]
+ else:
+ previous = possiblePrevious
+
+dataURL = baseURL + "builds/" + str(buildNumber) + "/steps/compile/logs/stdio"
+revisionURL = baseURL + "builds/" + str(buildNumber)
+
+# Retrieve the revision
+revisionData = urllib.urlopen(revisionURL).read()
+key = "Got Revision</td><td>"
+Off = revisionData.find(key) + len(key)
+if Off > len(key):
+ revision = revisionData[Off: Off + 40]
+ print "Revision is '%s'" % revision
+ print "Environment variables can be set with set_analyze_revision.bat"
+ payload = "set ANALYZE_REVISION=%s\r\n" % revision
+ payload += "set ANALYZE_BUILD_NUMBER=%04d\r\n" % buildNumber
+ payload += "set ANALYZE_PREV_BUILD_NUMBER=%s\r\n" % previous
+ open("set_analyze_revision.bat", "wt").write(payload)
+
+ # Retrieve the raw warning data
+ print "Retrieving raw build results. Please wait."
+ data = urllib.urlopen(dataURL).read()
+ if data.count("status: SUCCESS") == 0:
+ print "Build failed or is incomplete."
+ else:
+ # Fix up "'" and '"'
+ data = data.replace("&#39;", "'").replace("&#34;", '"')
+ # Fix up '<' and '>'
+ data = data.replace("&lt;", "<").replace("&gt;", ">")
+ # Fix up '&'
+ data = data.replace("&amp;", "&")
+ # Fix up random spans
+ data = data.replace('</span><span class="stdout">', '')
+ # Fix up the source paths to match my local /analyze repo
+ if "ANALYZE_REPO" in os.environ:
+ sourcePath = r"e:\b\build\slave\chromium_windows_analyze\build\src"
+ destPath = os.path.join(os.environ["ANALYZE_REPO"], "src")
+ data = data.replace(sourcePath, destPath)
+ outputName = "analyze%04d_full.txt" % buildNumber
+ open(outputName, "w").write(data)
+ print "Done. Data is in %s" % outputName
+else:
+ print "No revision information found!"
diff --git a/chromium/tools/win/new_analyze_warnings/warning_diff.py b/chromium/tools/win/new_analyze_warnings/warning_diff.py
new file mode 100644
index 00000000000..1eeb9234a6f
--- /dev/null
+++ b/chromium/tools/win/new_analyze_warnings/warning_diff.py
@@ -0,0 +1,165 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+This script takes two warning summary files and reports on the new warnings
+and the fixed warnings. The warning summaries are created by
+warnings_by_type.py.
+
+A warning is identified by the source file and the warning text, without the
+Lines component or any line 'xxx' references within the warning.
+All warnings with the same signature are grouped together (duplicates are
+assumed to have been removed already).
+
+If a file contains multiple warnings with the same signature then a report
+will be generated for each warning when the warning count changes.
+"""
+
+import sys
+import re
+
+# Some sample warnings:
+# sctp_bsd_addr.c(182) : warning C28125: The function
+# 'InitializeCriticalSection' must be called from within a try/except block:
+# The requirement might be conditional.
+# exception_handler.cc(813) : warning C6387: 'child_thread_handle' could be '0':
+# this does not adhere to the specification for the function 'CloseHandle'. :
+# Lines: 773, 774, 775, 776, 777, 784, 802, 804, 809, 813
+# unistr.cpp(1823) : warning C28193: 'temporary value' holds a value that must
+# be examined.: Lines: 1823, 1824
+# Note "line '1428'" in this warning, and 'count went from 3 to 2':
+# scheduler.cc(1452) : warning C6246: Local declaration of 'node' hides
+# declaration of the same name in outer scope. For additional information, see
+# previous declaration at line '1428' of 'scheduler.cc'.: count went from 3 to 2
+# Note "line 454" in this warning:
+# gurl.cc(449) : warning C28112: A variable (empty_gurl) which is accessed via
+# an Interlocked function must always be accessed via an Interlocked function.
+# See line 454: It is not always safe to access a variable which is accessed
+# via the Interlocked* family of functions in any other way.
+
+
+warningsToIgnore = [
+ # We assume that memory allocations never fail
+ "C6255", # _alloca indicates failure by raising a stack overflow exception.
+ "C6308", # 'realloc' might return null pointer, leaking memory
+ "C6387", # Param could be '0': this does not adhere to the specification for
+ # the function
+ # I have yet to see errors caused by passing 'char' to isspace and friends
+ "C6330", # 'char' passed as _Param_(1) when 'unsigned char' is required
+ # This warning needs to be in clang to make it effective
+ "C6262", # Function uses too much stack
+ # Triggers on isnan, isinf, and template metaprogramming.
+ "C6334", # sizeof operator applied to an expression with an operator might
+ # yield unexpected results:
+ ]
+
+warningRe = re.compile(r"(.*)\(\d+\) : warning (C\d{4,5}): (.*)")
+warningRefLine = re.compile(r"(.*line ')\d+('.*)")
+warningRefLine2 = re.compile(r"(.*line )\d+(:.*)")
+
+def RemoveExtraneous(line):
+ """
+ Remove extraneous data such as the optional 'Lines:' block at the end of some
+ warnings, and line ending characters.
+ This ensures better matching and makes for less cluttered results.
+ """
+ linesOffset = line.find(": Lines:")
+ if linesOffset >= 0:
+ line = line[:linesOffset]
+ return line.strip()
+
+
+
+def SummarizeWarnings(filename):
+ """
+ This function reads the file and looks for warning messages. It creates a
+ dictionary with the keys being the filename, warning number, and warning text,
+ and returns this.
+ The warning summary at the end is ignored because it doesn't match the regex
+ due to the 'C' being stripped from the warnings, for just this purpose.
+ """
+ warnings = {}
+ for line in open(filename).readlines():
+ line = line.replace(r"\chromium\src", r"\analyze_chromium\src")
+ line = line.replace(r"\chromium2\src", r"\analyze_chromium\src")
+ line = RemoveExtraneous(line)
+ match = warningRe.match(line)
+ if match:
+ file, warningNumber, description = match.groups()
+ ignore = False
+ if warningNumber in warningsToIgnore:
+ ignore = True
+ glesTest = "gles2_implementation_unittest"
+ if warningNumber == "C6001" and line.count(glesTest) > 0:
+ ignore = True # Many spurious warnings of this form
+ if not ignore:
+ # See if the description contains line numbers, so that we can
+ # remove them.
+ matchLine = warningRefLine.match(description)
+ if not matchLine:
+ matchLine = warningRefLine2.match(description)
+ if matchLine:
+ # Replace referenced line numbers with #undef so that they don't cause
+ # mismatches.
+ description = "#undef".join(matchLine.groups())
+ # Look for "the readable size is " and "the writable size is " because
+ # these are often followed by sizes that vary in uninteresting ways,
+ # especially between 32-bit and 64-bit builds.
+ readableText = "the readable size is "
+ writableText = "the writable size is "
+ if description.find(readableText) >= 0:
+ description = description[:description.find(readableText)]
+ if description.find(writableText) >= 0:
+ description = description[:description.find(writableText)]
+
+ key = (file, warningNumber, description)
+ if not key in warnings:
+ warnings[key] = []
+ warnings[key].append(line.strip())
+ return warnings
+
+
+
+def PrintAdditions(oldResults, newResults, message, invert):
+ results = []
+ for key in newResults.keys():
+ if oldResults.has_key(key):
+ # Check to see if the warning count has changed
+ old = oldResults[key]
+ new = newResults[key]
+ if len(new) > len(old):
+ # If the warning count has increased then we don't know which ones are
+ # new. Sigh... Report the new ones, up to some maximum:
+ for warning in newResults[key]:
+ if invert:
+ results.append(warning + ": count went from %d to %d" % \
+ (len(newResults[key]), len(oldResults[key])))
+ else:
+ results.append(warning + ": count went from %d to %d" % \
+ (len(oldResults[key]), len(newResults[key])))
+ else:
+ # Totally new (or fixed) warning.
+ results += newResults[key]
+ # This sort is not perfect because it is alphabetic and it needs to switch to
+ # numeric when it encounters digits. Later.
+ results.sort()
+ print "%s (%d total)" % (message, len(results))
+ for line in results:
+ print line
+
+
+
+if len(sys.argv) < 3:
+ print "Usage: %s oldsummary.txt newsummary.txt" % sys.argv[0]
+ print "Prints the changes in warnings between two /analyze runs."
+ sys.exit(0)
+
+oldFilename = sys.argv[1]
+newFilename = sys.argv[2]
+oldResults = SummarizeWarnings(oldFilename)
+newResults = SummarizeWarnings(newFilename)
+
+PrintAdditions(oldResults, newResults, "New warnings", False)
+print
+print
+PrintAdditions(newResults, oldResults, "Fixed warnings", True)
diff --git a/chromium/tools/win/new_analyze_warnings/warnings_by_type.py b/chromium/tools/win/new_analyze_warnings/warnings_by_type.py
new file mode 100644
index 00000000000..b7974b0d79b
--- /dev/null
+++ b/chromium/tools/win/new_analyze_warnings/warnings_by_type.py
@@ -0,0 +1,160 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""
+Run this script to summarize VC++ warnings and errors. This is normally used
+to summarize the results of Chrome's /analyze runs. Just pass the name of
+a file containing build output -- typically with a _full.txt ending -- and
+a *_summary.txt file will be created. The warnings are grouped
+by warning number, sorted by source file/line, and uniquified.
+In addition a summary is created at the end that records how many unique
+warnings of each type there are, and which warning numbers were the noisiest.
+
+If you pass -codesnippets as the final argument then a few lines of code will
+be extracted for each warning. This is useful for creating summaries of fixed
+warnings.
+"""
+
+import re
+import sys
+import os
+from collections import defaultdict
+
+grabCodeSnippets = False
+
+if len(sys.argv) < 2:
+ print "Missing input filename."
+ sys.exit(10)
+inputName = sys.argv[1]
+outputName = inputName.replace("_full", "_summary")
+if inputName == outputName:
+ outputName += "_summary.txt"
+
+if len(sys.argv) > 2:
+ if sys.argv[2] == "-codesnippets":
+ grabCodeSnippets = True
+ snippetExtent = 3 # How many lines of context to grap, +/-
+ else:
+ print "Unsupported command line option."
+ sys.exit(10)
+
+# Code snippets, typically used for filing bugs or demonstrating issues,
+# don't need detailed line information, so strip it out.
+stripLines = grabCodeSnippets
+
+# Typical warning and error patterns:
+# wspiapi.h(933) : warning C6102: Using 'SystemDir'
+# exception_handler.cc(813) : warning C6387: 'child_thread_handle' could be '0':
+# unistr.cpp(1823) : warning C28193: 'temporary value' holds a value that must
+# be examined.: Lines: 1823, 1824
+# LINK : warning LNK4014: cannot find member object nativec\\malloc.obj
+# hash_set(17): error C2338: <hash_set> is deprecated and will be REMOVED
+# Regex to extract warning/error number for processing warnings.
+# Note that in VS 2015 there is no space before the colon before 'warning'
+warningRe = re.compile(r".*: (warning|error) (C\d{4,5}|LNK\d{4,5}):")
+# Regex to extract file/line/"warning" and line number. This is used when
+# grabbing a snippet of nearby code.
+codeRe = re.compile(r"(.*)\((\d*)\) : .*")
+
+failedBuild = False
+
+# For each warning ID we will have a dictionary of uniquified warning lines
+warnByID = defaultdict(dict)
+# We will also count how many times warnings show up in the raw output, to make
+# it easier to address the really noisy ones.
+warnCountByID = defaultdict(int)
+
+output = open(outputName, "wt")
+# Scan the input building up a database of warnings, discarding duplicates.
+for line in open(inputName).readlines():
+ # Detect and warn on failed builds since their results will be incomplete.
+ if line.count("subcommand failed") > 0:
+ failedBuild = True
+ # Ignore lines without warnings
+ if line.count(": warning ") == 0 and line.count(": error ") == 0:
+ continue
+ # Ignore "Command line warning D9025 : overriding '/WX' with '/WX-'" and
+ # warnings from depot_tools header files
+ if line.count("D9025") > 0 or line.count("depot_tools") > 0:
+ continue
+ # Ignore warnings from unit tests -- some are intentional
+ if line.count("_unittest.cc") > 0:
+ continue
+ match = warningRe.match(line)
+ if match:
+ warningID = match.groups()[1]
+ else:
+ warningID = " " # A few warnings lack a warning ID
+ warnCountByID[warningID] += 1
+ # Insert warnings (warning ID and text) into a dictionary to automatically
+ # purge duplicates.
+ if stripLines:
+ linesIndex = line.find(": Lines:")
+ if linesIndex >= 0:
+ line = line[:linesIndex]
+ warnByID[warningID][line.strip()] = True
+
+if failedBuild:
+ print >>output, "Build did not entirely succeed!"
+
+warnIDsByCount = warnByID.keys()
+# Sort by (post uniquification) warning frequency, least frequent first
+# Sort first by ID, and then by ID frequency. Sort is stable so this gives us
+# consistent results within warning IDs with the same frequency
+warnIDsByCount.sort(lambda x, y: cmp(x, y))
+warnIDsByCount.sort(lambda x, y: cmp(len(warnByID[x]), len(warnByID[y])))
+
+# Print all the warnings, grouped by warning ID and sorted by frequency,
+# then filename
+totalWarnings = 0
+print >>output, "All warnings by type, sorted by count:"
+for warnID in warnIDsByCount:
+ warningLines = warnByID[warnID].keys()
+ totalWarnings += len(warningLines)
+ warningLines.sort() # Sort by file name
+ for warningText in warningLines:
+ print >>output, warningText
+ if grabCodeSnippets:
+ codeMatch = codeRe.match(warningText)
+ if codeMatch:
+ try:
+ file, line = codeMatch.groups()
+ line = int(line)
+ lines = open(file).readlines()
+ lines = lines[line-snippetExtent-1:line+snippetExtent]
+ for line in lines:
+ print >>output, line,
+ except:
+ pass
+ print >>output # Blank separator line between warning types
+
+print >>output, "Warning counts by type, sorted by count:"
+for warnID in warnIDsByCount:
+ warningLines = warnByID[warnID].keys()
+ # Get a sample of this type of warning
+ warningExemplar = warningLines[0]
+ # Clean up the warning exemplar
+ linesIndex = warningExemplar.find(": Lines:")
+ if linesIndex >= 0:
+ warningExemplar = warningExemplar[:linesIndex]
+ warnIDIndex = warningExemplar.find(warnID)
+ if warnIDIndex >= 0:
+ warningExemplar = warningExemplar[warnIDIndex + len(warnID) + 2:]
+ # Print the warning count and warning number -- omitting the leading 'C' or
+ # 'LNK' so that searching on C6001 won't find the summary.
+ count = len(warnByID[warnID])
+ while warnID[0].isalpha():
+ warnID = warnID[1:]
+ print >>output, "%4d: %5s, eg. %s" % (count, warnID, warningExemplar)
+
+print >>output
+print >>output, "%d warnings of %d types" % (totalWarnings, len(warnIDsByCount))
+
+print >>output
+print >>output, "Noisy warning counts in raw output:"
+totalRawCount = 0
+for warnID in warnCountByID.keys():
+ if warnCountByID[warnID] > 500:
+ print >>output, "%5s: %6d" % (warnID[1:], warnCountByID[warnID])
+ totalRawCount += warnCountByID[warnID]
+print >>output, "Total: %6d" % totalRawCount
diff --git a/chromium/tools/win/sizeviewer/README.chromium b/chromium/tools/win/sizeviewer/README.chromium
new file mode 100644
index 00000000000..d891769b0d3
--- /dev/null
+++ b/chromium/tools/win/sizeviewer/README.chromium
@@ -0,0 +1,7 @@
+code_tally.exe binary from
+http://syzygy-archive.commondatastorage.googleapis.com/index.html?path=builds/official/1626/
+
+favicon.png is chart_pie.png unmodified from
+http://www.famfamfam.com/lab/icons/silk/
+
+codemirror.js and clike.js are 4.8.0 unmodified from http://codemirror.net/.
diff --git a/chromium/tools/win/sizeviewer/clike.js b/chromium/tools/win/sizeviewer/clike.js
new file mode 100644
index 00000000000..710953b2298
--- /dev/null
+++ b/chromium/tools/win/sizeviewer/clike.js
@@ -0,0 +1,489 @@
+// CodeMirror, copyright (c) by Marijn Haverbeke and others
+// Distributed under an MIT license: http://codemirror.net/LICENSE
+
+(function(mod) {
+ if (typeof exports == "object" && typeof module == "object") // CommonJS
+ mod(require("../../lib/codemirror"));
+ else if (typeof define == "function" && define.amd) // AMD
+ define(["../../lib/codemirror"], mod);
+ else // Plain browser env
+ mod(CodeMirror);
+})(function(CodeMirror) {
+"use strict";
+
+CodeMirror.defineMode("clike", function(config, parserConfig) {
+ var indentUnit = config.indentUnit,
+ statementIndentUnit = parserConfig.statementIndentUnit || indentUnit,
+ dontAlignCalls = parserConfig.dontAlignCalls,
+ keywords = parserConfig.keywords || {},
+ builtin = parserConfig.builtin || {},
+ blockKeywords = parserConfig.blockKeywords || {},
+ atoms = parserConfig.atoms || {},
+ hooks = parserConfig.hooks || {},
+ multiLineStrings = parserConfig.multiLineStrings,
+ indentStatements = parserConfig.indentStatements !== false;
+ var isOperatorChar = /[+\-*&%=<>!?|\/]/;
+
+ var curPunc;
+
+ function tokenBase(stream, state) {
+ var ch = stream.next();
+ if (hooks[ch]) {
+ var result = hooks[ch](stream, state);
+ if (result !== false) return result;
+ }
+ if (ch == '"' || ch == "'") {
+ state.tokenize = tokenString(ch);
+ return state.tokenize(stream, state);
+ }
+ if (/[\[\]{}\(\),;\:\.]/.test(ch)) {
+ curPunc = ch;
+ return null;
+ }
+ if (/\d/.test(ch)) {
+ stream.eatWhile(/[\w\.]/);
+ return "number";
+ }
+ if (ch == "/") {
+ if (stream.eat("*")) {
+ state.tokenize = tokenComment;
+ return tokenComment(stream, state);
+ }
+ if (stream.eat("/")) {
+ stream.skipToEnd();
+ return "comment";
+ }
+ }
+ if (isOperatorChar.test(ch)) {
+ stream.eatWhile(isOperatorChar);
+ return "operator";
+ }
+ stream.eatWhile(/[\w\$_\xa1-\uffff]/);
+ var cur = stream.current();
+ if (keywords.propertyIsEnumerable(cur)) {
+ if (blockKeywords.propertyIsEnumerable(cur)) curPunc = "newstatement";
+ return "keyword";
+ }
+ if (builtin.propertyIsEnumerable(cur)) {
+ if (blockKeywords.propertyIsEnumerable(cur)) curPunc = "newstatement";
+ return "builtin";
+ }
+ if (atoms.propertyIsEnumerable(cur)) return "atom";
+ return "variable";
+ }
+
+ function tokenString(quote) {
+ return function(stream, state) {
+ var escaped = false, next, end = false;
+ while ((next = stream.next()) != null) {
+ if (next == quote && !escaped) {end = true; break;}
+ escaped = !escaped && next == "\\";
+ }
+ if (end || !(escaped || multiLineStrings))
+ state.tokenize = null;
+ return "string";
+ };
+ }
+
+ function tokenComment(stream, state) {
+ var maybeEnd = false, ch;
+ while (ch = stream.next()) {
+ if (ch == "/" && maybeEnd) {
+ state.tokenize = null;
+ break;
+ }
+ maybeEnd = (ch == "*");
+ }
+ return "comment";
+ }
+
+ function Context(indented, column, type, align, prev) {
+ this.indented = indented;
+ this.column = column;
+ this.type = type;
+ this.align = align;
+ this.prev = prev;
+ }
+ function pushContext(state, col, type) {
+ var indent = state.indented;
+ if (state.context && state.context.type == "statement")
+ indent = state.context.indented;
+ return state.context = new Context(indent, col, type, null, state.context);
+ }
+ function popContext(state) {
+ var t = state.context.type;
+ if (t == ")" || t == "]" || t == "}")
+ state.indented = state.context.indented;
+ return state.context = state.context.prev;
+ }
+
+ // Interface
+
+ return {
+ startState: function(basecolumn) {
+ return {
+ tokenize: null,
+ context: new Context((basecolumn || 0) - indentUnit, 0, "top", false),
+ indented: 0,
+ startOfLine: true
+ };
+ },
+
+ token: function(stream, state) {
+ var ctx = state.context;
+ if (stream.sol()) {
+ if (ctx.align == null) ctx.align = false;
+ state.indented = stream.indentation();
+ state.startOfLine = true;
+ }
+ if (stream.eatSpace()) return null;
+ curPunc = null;
+ var style = (state.tokenize || tokenBase)(stream, state);
+ if (style == "comment" || style == "meta") return style;
+ if (ctx.align == null) ctx.align = true;
+
+ if ((curPunc == ";" || curPunc == ":" || curPunc == ",") && ctx.type == "statement") popContext(state);
+ else if (curPunc == "{") pushContext(state, stream.column(), "}");
+ else if (curPunc == "[") pushContext(state, stream.column(), "]");
+ else if (curPunc == "(") pushContext(state, stream.column(), ")");
+ else if (curPunc == "}") {
+ while (ctx.type == "statement") ctx = popContext(state);
+ if (ctx.type == "}") ctx = popContext(state);
+ while (ctx.type == "statement") ctx = popContext(state);
+ }
+ else if (curPunc == ctx.type) popContext(state);
+ else if (indentStatements &&
+ (((ctx.type == "}" || ctx.type == "top") && curPunc != ';') ||
+ (ctx.type == "statement" && curPunc == "newstatement")))
+ pushContext(state, stream.column(), "statement");
+ state.startOfLine = false;
+ return style;
+ },
+
+ indent: function(state, textAfter) {
+ if (state.tokenize != tokenBase && state.tokenize != null) return CodeMirror.Pass;
+ var ctx = state.context, firstChar = textAfter && textAfter.charAt(0);
+ if (ctx.type == "statement" && firstChar == "}") ctx = ctx.prev;
+ var closing = firstChar == ctx.type;
+ if (ctx.type == "statement") return ctx.indented + (firstChar == "{" ? 0 : statementIndentUnit);
+ else if (ctx.align && (!dontAlignCalls || ctx.type != ")")) return ctx.column + (closing ? 0 : 1);
+ else if (ctx.type == ")" && !closing) return ctx.indented + statementIndentUnit;
+ else return ctx.indented + (closing ? 0 : indentUnit);
+ },
+
+ electricChars: "{}",
+ blockCommentStart: "/*",
+ blockCommentEnd: "*/",
+ lineComment: "//",
+ fold: "brace"
+ };
+});
+
+ function words(str) {
+ var obj = {}, words = str.split(" ");
+ for (var i = 0; i < words.length; ++i) obj[words[i]] = true;
+ return obj;
+ }
+ var cKeywords = "auto if break int case long char register continue return default short do sizeof " +
+ "double static else struct entry switch extern typedef float union for unsigned " +
+ "goto while enum void const signed volatile";
+
+ function cppHook(stream, state) {
+ if (!state.startOfLine) return false;
+ for (;;) {
+ if (stream.skipTo("\\")) {
+ stream.next();
+ if (stream.eol()) {
+ state.tokenize = cppHook;
+ break;
+ }
+ } else {
+ stream.skipToEnd();
+ state.tokenize = null;
+ break;
+ }
+ }
+ return "meta";
+ }
+
+ function cpp11StringHook(stream, state) {
+ stream.backUp(1);
+ // Raw strings.
+ if (stream.match(/(R|u8R|uR|UR|LR)/)) {
+ var match = stream.match(/"([^\s\\()]{0,16})\(/);
+ if (!match) {
+ return false;
+ }
+ state.cpp11RawStringDelim = match[1];
+ state.tokenize = tokenRawString;
+ return tokenRawString(stream, state);
+ }
+ // Unicode strings/chars.
+ if (stream.match(/(u8|u|U|L)/)) {
+ if (stream.match(/["']/, /* eat */ false)) {
+ return "string";
+ }
+ return false;
+ }
+ // Ignore this hook.
+ stream.next();
+ return false;
+ }
+
+ // C#-style strings where "" escapes a quote.
+ function tokenAtString(stream, state) {
+ var next;
+ while ((next = stream.next()) != null) {
+ if (next == '"' && !stream.eat('"')) {
+ state.tokenize = null;
+ break;
+ }
+ }
+ return "string";
+ }
+
+ // C++11 raw string literal is <prefix>"<delim>( anything )<delim>", where
+ // <delim> can be a string up to 16 characters long.
+ function tokenRawString(stream, state) {
+ // Escape characters that have special regex meanings.
+ var delim = state.cpp11RawStringDelim.replace(/[^\w\s]/g, '\\$&');
+ var match = stream.match(new RegExp(".*?\\)" + delim + '"'));
+ if (match)
+ state.tokenize = null;
+ else
+ stream.skipToEnd();
+ return "string";
+ }
+
+ function def(mimes, mode) {
+ if (typeof mimes == "string") mimes = [mimes];
+ var words = [];
+ function add(obj) {
+ if (obj) for (var prop in obj) if (obj.hasOwnProperty(prop))
+ words.push(prop);
+ }
+ add(mode.keywords);
+ add(mode.builtin);
+ add(mode.atoms);
+ if (words.length) {
+ mode.helperType = mimes[0];
+ CodeMirror.registerHelper("hintWords", mimes[0], words);
+ }
+
+ for (var i = 0; i < mimes.length; ++i)
+ CodeMirror.defineMIME(mimes[i], mode);
+ }
+
+ def(["text/x-csrc", "text/x-c", "text/x-chdr"], {
+ name: "clike",
+ keywords: words(cKeywords),
+ blockKeywords: words("case do else for if switch while struct"),
+ atoms: words("null"),
+ hooks: {"#": cppHook},
+ modeProps: {fold: ["brace", "include"]}
+ });
+
+ def(["text/x-c++src", "text/x-c++hdr"], {
+ name: "clike",
+ keywords: words(cKeywords + " asm dynamic_cast namespace reinterpret_cast try bool explicit new " +
+ "static_cast typeid catch operator template typename class friend private " +
+ "this using const_cast inline public throw virtual delete mutable protected " +
+ "wchar_t alignas alignof constexpr decltype nullptr noexcept thread_local final " +
+ "static_assert override"),
+ blockKeywords: words("catch class do else finally for if struct switch try while"),
+ atoms: words("true false null"),
+ hooks: {
+ "#": cppHook,
+ "u": cpp11StringHook,
+ "U": cpp11StringHook,
+ "L": cpp11StringHook,
+ "R": cpp11StringHook
+ },
+ modeProps: {fold: ["brace", "include"]}
+ });
+
+ def("text/x-java", {
+ name: "clike",
+ keywords: words("abstract assert boolean break byte case catch char class const continue default " +
+ "do double else enum extends final finally float for goto if implements import " +
+ "instanceof int interface long native new package private protected public " +
+ "return short static strictfp super switch synchronized this throw throws transient " +
+ "try void volatile while"),
+ blockKeywords: words("catch class do else finally for if switch try while"),
+ atoms: words("true false null"),
+ hooks: {
+ "@": function(stream) {
+ stream.eatWhile(/[\w\$_]/);
+ return "meta";
+ }
+ },
+ modeProps: {fold: ["brace", "import"]}
+ });
+
+ def("text/x-csharp", {
+ name: "clike",
+ keywords: words("abstract as base break case catch checked class const continue" +
+ " default delegate do else enum event explicit extern finally fixed for" +
+ " foreach goto if implicit in interface internal is lock namespace new" +
+ " operator out override params private protected public readonly ref return sealed" +
+ " sizeof stackalloc static struct switch this throw try typeof unchecked" +
+ " unsafe using virtual void volatile while add alias ascending descending dynamic from get" +
+ " global group into join let orderby partial remove select set value var yield"),
+ blockKeywords: words("catch class do else finally for foreach if struct switch try while"),
+ builtin: words("Boolean Byte Char DateTime DateTimeOffset Decimal Double" +
+ " Guid Int16 Int32 Int64 Object SByte Single String TimeSpan UInt16 UInt32" +
+ " UInt64 bool byte char decimal double short int long object" +
+ " sbyte float string ushort uint ulong"),
+ atoms: words("true false null"),
+ hooks: {
+ "@": function(stream, state) {
+ if (stream.eat('"')) {
+ state.tokenize = tokenAtString;
+ return tokenAtString(stream, state);
+ }
+ stream.eatWhile(/[\w\$_]/);
+ return "meta";
+ }
+ }
+ });
+
+ function tokenTripleString(stream, state) {
+ var escaped = false;
+ while (!stream.eol()) {
+ if (!escaped && stream.match('"""')) {
+ state.tokenize = null;
+ break;
+ }
+ escaped = stream.next() != "\\" && !escaped;
+ }
+ return "string";
+ }
+
+ def("text/x-scala", {
+ name: "clike",
+ keywords: words(
+
+ /* scala */
+ "abstract case catch class def do else extends false final finally for forSome if " +
+ "implicit import lazy match new null object override package private protected return " +
+ "sealed super this throw trait try trye type val var while with yield _ : = => <- <: " +
+ "<% >: # @ " +
+
+ /* package scala */
+ "assert assume require print println printf readLine readBoolean readByte readShort " +
+ "readChar readInt readLong readFloat readDouble " +
+
+ "AnyVal App Application Array BufferedIterator BigDecimal BigInt Char Console Either " +
+ "Enumeration Equiv Error Exception Fractional Function IndexedSeq Integral Iterable " +
+ "Iterator List Map Numeric Nil NotNull Option Ordered Ordering PartialFunction PartialOrdering " +
+ "Product Proxy Range Responder Seq Serializable Set Specializable Stream StringBuilder " +
+ "StringContext Symbol Throwable Traversable TraversableOnce Tuple Unit Vector :: #:: " +
+
+ /* package java.lang */
+ "Boolean Byte Character CharSequence Class ClassLoader Cloneable Comparable " +
+ "Compiler Double Exception Float Integer Long Math Number Object Package Pair Process " +
+ "Runtime Runnable SecurityManager Short StackTraceElement StrictMath String " +
+ "StringBuffer System Thread ThreadGroup ThreadLocal Throwable Triple Void"
+ ),
+ multiLineStrings: true,
+ blockKeywords: words("catch class do else finally for forSome if match switch try while"),
+ atoms: words("true false null"),
+ indentStatements: false,
+ hooks: {
+ "@": function(stream) {
+ stream.eatWhile(/[\w\$_]/);
+ return "meta";
+ },
+ '"': function(stream, state) {
+ if (!stream.match('""')) return false;
+ state.tokenize = tokenTripleString;
+ return state.tokenize(stream, state);
+ }
+ }
+ });
+
+ def(["x-shader/x-vertex", "x-shader/x-fragment"], {
+ name: "clike",
+ keywords: words("float int bool void " +
+ "vec2 vec3 vec4 ivec2 ivec3 ivec4 bvec2 bvec3 bvec4 " +
+ "mat2 mat3 mat4 " +
+ "sampler1D sampler2D sampler3D samplerCube " +
+ "sampler1DShadow sampler2DShadow" +
+ "const attribute uniform varying " +
+ "break continue discard return " +
+ "for while do if else struct " +
+ "in out inout"),
+ blockKeywords: words("for while do if else struct"),
+ builtin: words("radians degrees sin cos tan asin acos atan " +
+ "pow exp log exp2 sqrt inversesqrt " +
+ "abs sign floor ceil fract mod min max clamp mix step smootstep " +
+ "length distance dot cross normalize ftransform faceforward " +
+ "reflect refract matrixCompMult " +
+ "lessThan lessThanEqual greaterThan greaterThanEqual " +
+ "equal notEqual any all not " +
+ "texture1D texture1DProj texture1DLod texture1DProjLod " +
+ "texture2D texture2DProj texture2DLod texture2DProjLod " +
+ "texture3D texture3DProj texture3DLod texture3DProjLod " +
+ "textureCube textureCubeLod " +
+ "shadow1D shadow2D shadow1DProj shadow2DProj " +
+ "shadow1DLod shadow2DLod shadow1DProjLod shadow2DProjLod " +
+ "dFdx dFdy fwidth " +
+ "noise1 noise2 noise3 noise4"),
+ atoms: words("true false " +
+ "gl_FragColor gl_SecondaryColor gl_Normal gl_Vertex " +
+ "gl_MultiTexCoord0 gl_MultiTexCoord1 gl_MultiTexCoord2 gl_MultiTexCoord3 " +
+ "gl_MultiTexCoord4 gl_MultiTexCoord5 gl_MultiTexCoord6 gl_MultiTexCoord7 " +
+ "gl_FogCoord " +
+ "gl_Position gl_PointSize gl_ClipVertex " +
+ "gl_FrontColor gl_BackColor gl_FrontSecondaryColor gl_BackSecondaryColor " +
+ "gl_TexCoord gl_FogFragCoord " +
+ "gl_FragCoord gl_FrontFacing " +
+ "gl_FragColor gl_FragData gl_FragDepth " +
+ "gl_ModelViewMatrix gl_ProjectionMatrix gl_ModelViewProjectionMatrix " +
+ "gl_TextureMatrix gl_NormalMatrix gl_ModelViewMatrixInverse " +
+ "gl_ProjectionMatrixInverse gl_ModelViewProjectionMatrixInverse " +
+ "gl_TexureMatrixTranspose gl_ModelViewMatrixInverseTranspose " +
+ "gl_ProjectionMatrixInverseTranspose " +
+ "gl_ModelViewProjectionMatrixInverseTranspose " +
+ "gl_TextureMatrixInverseTranspose " +
+ "gl_NormalScale gl_DepthRange gl_ClipPlane " +
+ "gl_Point gl_FrontMaterial gl_BackMaterial gl_LightSource gl_LightModel " +
+ "gl_FrontLightModelProduct gl_BackLightModelProduct " +
+ "gl_TextureColor gl_EyePlaneS gl_EyePlaneT gl_EyePlaneR gl_EyePlaneQ " +
+ "gl_FogParameters " +
+ "gl_MaxLights gl_MaxClipPlanes gl_MaxTextureUnits gl_MaxTextureCoords " +
+ "gl_MaxVertexAttribs gl_MaxVertexUniformComponents gl_MaxVaryingFloats " +
+ "gl_MaxVertexTextureImageUnits gl_MaxTextureImageUnits " +
+ "gl_MaxFragmentUniformComponents gl_MaxCombineTextureImageUnits " +
+ "gl_MaxDrawBuffers"),
+ hooks: {"#": cppHook},
+ modeProps: {fold: ["brace", "include"]}
+ });
+
+ def("text/x-nesc", {
+ name: "clike",
+ keywords: words(cKeywords + "as atomic async call command component components configuration event generic " +
+ "implementation includes interface module new norace nx_struct nx_union post provides " +
+ "signal task uses abstract extends"),
+ blockKeywords: words("case do else for if switch while struct"),
+ atoms: words("null"),
+ hooks: {"#": cppHook},
+ modeProps: {fold: ["brace", "include"]}
+ });
+
+ def("text/x-objectivec", {
+ name: "clike",
+ keywords: words(cKeywords + "inline restrict _Bool _Complex _Imaginery BOOL Class bycopy byref id IMP in " +
+ "inout nil oneway out Protocol SEL self super atomic nonatomic retain copy readwrite readonly"),
+ atoms: words("YES NO NULL NILL ON OFF"),
+ hooks: {
+ "@": function(stream) {
+ stream.eatWhile(/[\w\$]/);
+ return "keyword";
+ },
+ "#": cppHook
+ },
+ modeProps: {fold: "brace"}
+ });
+
+});
diff --git a/chromium/tools/win/sizeviewer/codemirror.js b/chromium/tools/win/sizeviewer/codemirror.js
new file mode 100644
index 00000000000..e6011f7e66c
--- /dev/null
+++ b/chromium/tools/win/sizeviewer/codemirror.js
@@ -0,0 +1,7922 @@
+// CodeMirror, copyright (c) by Marijn Haverbeke and others
+// Distributed under an MIT license: http://codemirror.net/LICENSE
+
+// This is CodeMirror (http://codemirror.net), a code editor
+// implemented in JavaScript on top of the browser's DOM.
+//
+// You can find some technical background for some of the code below
+// at http://marijnhaverbeke.nl/blog/#cm-internals .
+
+(function(mod) {
+ if (typeof exports == "object" && typeof module == "object") // CommonJS
+ module.exports = mod();
+ else if (typeof define == "function" && define.amd) // AMD
+ return define([], mod);
+ else // Plain browser env
+ this.CodeMirror = mod();
+})(function() {
+ "use strict";
+
+ // BROWSER SNIFFING
+
+ // Kludges for bugs and behavior differences that can't be feature
+ // detected are enabled based on userAgent etc sniffing.
+
+ var gecko = /gecko\/\d/i.test(navigator.userAgent);
+ // ie_uptoN means Internet Explorer version N or lower
+ var ie_upto10 = /MSIE \d/.test(navigator.userAgent);
+ var ie_11up = /Trident\/(?:[7-9]|\d{2,})\..*rv:(\d+)/.exec(navigator.userAgent);
+ var ie = ie_upto10 || ie_11up;
+ var ie_version = ie && (ie_upto10 ? document.documentMode || 6 : ie_11up[1]);
+ var webkit = /WebKit\//.test(navigator.userAgent);
+ var qtwebkit = webkit && /Qt\/\d+\.\d+/.test(navigator.userAgent);
+ var chrome = /Chrome\//.test(navigator.userAgent);
+ var presto = /Opera\//.test(navigator.userAgent);
+ var safari = /Apple Computer/.test(navigator.vendor);
+ var khtml = /KHTML\//.test(navigator.userAgent);
+ var mac_geMountainLion = /Mac OS X 1\d\D([8-9]|\d\d)\D/.test(navigator.userAgent);
+ var phantom = /PhantomJS/.test(navigator.userAgent);
+
+ var ios = /AppleWebKit/.test(navigator.userAgent) && /Mobile\/\w+/.test(navigator.userAgent);
+ // This is woefully incomplete. Suggestions for alternative methods welcome.
+ var mobile = ios || /Android|webOS|BlackBerry|Opera Mini|Opera Mobi|IEMobile/i.test(navigator.userAgent);
+ var mac = ios || /Mac/.test(navigator.platform);
+ var windows = /win/i.test(navigator.platform);
+
+ var presto_version = presto && navigator.userAgent.match(/Version\/(\d*\.\d*)/);
+ if (presto_version) presto_version = Number(presto_version[1]);
+ if (presto_version && presto_version >= 15) { presto = false; webkit = true; }
+ // Some browsers use the wrong event properties to signal cmd/ctrl on OS X
+ var flipCtrlCmd = mac && (qtwebkit || presto && (presto_version == null || presto_version < 12.11));
+ var captureRightClick = gecko || (ie && ie_version >= 9);
+
+ // Optimize some code when these features are not used.
+ var sawReadOnlySpans = false, sawCollapsedSpans = false;
+
+ // EDITOR CONSTRUCTOR
+
+ // A CodeMirror instance represents an editor. This is the object
+ // that user code is usually dealing with.
+
+ function CodeMirror(place, options) {
+ if (!(this instanceof CodeMirror)) return new CodeMirror(place, options);
+
+ this.options = options = options ? copyObj(options) : {};
+ // Determine effective options based on given values and defaults.
+ copyObj(defaults, options, false);
+ setGuttersForLineNumbers(options);
+
+ var doc = options.value;
+ if (typeof doc == "string") doc = new Doc(doc, options.mode);
+ this.doc = doc;
+
+ var display = this.display = new Display(place, doc);
+ display.wrapper.CodeMirror = this;
+ updateGutters(this);
+ themeChanged(this);
+ if (options.lineWrapping)
+ this.display.wrapper.className += " CodeMirror-wrap";
+ if (options.autofocus && !mobile) focusInput(this);
+
+ this.state = {
+ keyMaps: [], // stores maps added by addKeyMap
+ overlays: [], // highlighting overlays, as added by addOverlay
+ modeGen: 0, // bumped when mode/overlay changes, used to invalidate highlighting info
+ overwrite: false, focused: false,
+ suppressEdits: false, // used to disable editing during key handlers when in readOnly mode
+ pasteIncoming: false, cutIncoming: false, // help recognize paste/cut edits in readInput
+ draggingText: false,
+ highlight: new Delayed(), // stores highlight worker timeout
+ keySeq: null // Unfinished key sequence
+ };
+
+ // Override magic textarea content restore that IE sometimes does
+ // on our hidden textarea on reload
+ if (ie && ie_version < 11) setTimeout(bind(resetInput, this, true), 20);
+
+ registerEventHandlers(this);
+ ensureGlobalHandlers();
+
+ startOperation(this);
+ this.curOp.forceUpdate = true;
+ attachDoc(this, doc);
+
+ if ((options.autofocus && !mobile) || activeElt() == display.input)
+ setTimeout(bind(onFocus, this), 20);
+ else
+ onBlur(this);
+
+ for (var opt in optionHandlers) if (optionHandlers.hasOwnProperty(opt))
+ optionHandlers[opt](this, options[opt], Init);
+ maybeUpdateLineNumberWidth(this);
+ for (var i = 0; i < initHooks.length; ++i) initHooks[i](this);
+ endOperation(this);
+ }
+
+ // DISPLAY CONSTRUCTOR
+
+ // The display handles the DOM integration, both for input reading
+ // and content drawing. It holds references to DOM nodes and
+ // display-related state.
+
+ function Display(place, doc) {
+ var d = this;
+
+ // The semihidden textarea that is focused when the editor is
+ // focused, and receives input.
+ var input = d.input = elt("textarea", null, null, "position: absolute; padding: 0; width: 1px; height: 1em; outline: none");
+ // The textarea is kept positioned near the cursor to prevent the
+ // fact that it'll be scrolled into view on input from scrolling
+ // our fake cursor out of view. On webkit, when wrap=off, paste is
+ // very slow. So make the area wide instead.
+ if (webkit) input.style.width = "1000px";
+ else input.setAttribute("wrap", "off");
+ // If border: 0; -- iOS fails to open keyboard (issue #1287)
+ if (ios) input.style.border = "1px solid black";
+ input.setAttribute("autocorrect", "off"); input.setAttribute("autocapitalize", "off"); input.setAttribute("spellcheck", "false");
+
+ // Wraps and hides input textarea
+ d.inputDiv = elt("div", [input], null, "overflow: hidden; position: relative; width: 3px; height: 0px;");
+ // The fake scrollbar elements.
+ d.scrollbarH = elt("div", [elt("div", null, null, "height: 100%; min-height: 1px")], "CodeMirror-hscrollbar");
+ d.scrollbarV = elt("div", [elt("div", null, null, "min-width: 1px")], "CodeMirror-vscrollbar");
+ // Covers bottom-right square when both scrollbars are present.
+ d.scrollbarFiller = elt("div", null, "CodeMirror-scrollbar-filler");
+ // Covers bottom of gutter when coverGutterNextToScrollbar is on
+ // and h scrollbar is present.
+ d.gutterFiller = elt("div", null, "CodeMirror-gutter-filler");
+ // Will contain the actual code, positioned to cover the viewport.
+ d.lineDiv = elt("div", null, "CodeMirror-code");
+ // Elements are added to these to represent selection and cursors.
+ d.selectionDiv = elt("div", null, null, "position: relative; z-index: 1");
+ d.cursorDiv = elt("div", null, "CodeMirror-cursors");
+ // A visibility: hidden element used to find the size of things.
+ d.measure = elt("div", null, "CodeMirror-measure");
+ // When lines outside of the viewport are measured, they are drawn in this.
+ d.lineMeasure = elt("div", null, "CodeMirror-measure");
+ // Wraps everything that needs to exist inside the vertically-padded coordinate system
+ d.lineSpace = elt("div", [d.measure, d.lineMeasure, d.selectionDiv, d.cursorDiv, d.lineDiv],
+ null, "position: relative; outline: none");
+ // Moved around its parent to cover visible view.
+ d.mover = elt("div", [elt("div", [d.lineSpace], "CodeMirror-lines")], null, "position: relative");
+ // Set to the height of the document, allowing scrolling.
+ d.sizer = elt("div", [d.mover], "CodeMirror-sizer");
+ // Behavior of elts with overflow: auto and padding is
+ // inconsistent across browsers. This is used to ensure the
+ // scrollable area is big enough.
+ d.heightForcer = elt("div", null, null, "position: absolute; height: " + scrollerCutOff + "px; width: 1px;");
+ // Will contain the gutters, if any.
+ d.gutters = elt("div", null, "CodeMirror-gutters");
+ d.lineGutter = null;
+ // Actual scrollable element.
+ d.scroller = elt("div", [d.sizer, d.heightForcer, d.gutters], "CodeMirror-scroll");
+ d.scroller.setAttribute("tabIndex", "-1");
+ // The element in which the editor lives.
+ d.wrapper = elt("div", [d.inputDiv, d.scrollbarH, d.scrollbarV,
+ d.scrollbarFiller, d.gutterFiller, d.scroller], "CodeMirror");
+
+ // Work around IE7 z-index bug (not perfect, hence IE7 not really being supported)
+ if (ie && ie_version < 8) { d.gutters.style.zIndex = -1; d.scroller.style.paddingRight = 0; }
+ // Needed to hide big blue blinking cursor on Mobile Safari
+ if (ios) input.style.width = "0px";
+ if (!webkit) d.scroller.draggable = true;
+ // Needed to handle Tab key in KHTML
+ if (khtml) { d.inputDiv.style.height = "1px"; d.inputDiv.style.position = "absolute"; }
+ // Need to set a minimum width to see the scrollbar on IE7 (but must not set it on IE8).
+ if (ie && ie_version < 8) d.scrollbarH.style.minHeight = d.scrollbarV.style.minWidth = "18px";
+
+ if (place) {
+ if (place.appendChild) place.appendChild(d.wrapper);
+ else place(d.wrapper);
+ }
+
+ // Current rendered range (may be bigger than the view window).
+ d.viewFrom = d.viewTo = doc.first;
+ // Information about the rendered lines.
+ d.view = [];
+ // Holds info about a single rendered line when it was rendered
+ // for measurement, while not in view.
+ d.externalMeasured = null;
+ // Empty space (in pixels) above the view
+ d.viewOffset = 0;
+ d.lastWrapHeight = d.lastWrapWidth = 0;
+ d.updateLineNumbers = null;
+
+ // Used to only resize the line number gutter when necessary (when
+ // the amount of lines crosses a boundary that makes its width change)
+ d.lineNumWidth = d.lineNumInnerWidth = d.lineNumChars = null;
+ // See readInput and resetInput
+ d.prevInput = "";
+ // Set to true when a non-horizontal-scrolling line widget is
+ // added. As an optimization, line widget aligning is skipped when
+ // this is false.
+ d.alignWidgets = false;
+ // Flag that indicates whether we expect input to appear real soon
+ // now (after some event like 'keypress' or 'input') and are
+ // polling intensively.
+ d.pollingFast = false;
+ // Self-resetting timeout for the poller
+ d.poll = new Delayed();
+
+ d.cachedCharWidth = d.cachedTextHeight = d.cachedPaddingH = null;
+
+ // Tracks when resetInput has punted to just putting a short
+ // string into the textarea instead of the full selection.
+ d.inaccurateSelection = false;
+
+ // Tracks the maximum line length so that the horizontal scrollbar
+ // can be kept static when scrolling.
+ d.maxLine = null;
+ d.maxLineLength = 0;
+ d.maxLineChanged = false;
+
+ // Used for measuring wheel scrolling granularity
+ d.wheelDX = d.wheelDY = d.wheelStartX = d.wheelStartY = null;
+
+ // True when shift is held down.
+ d.shift = false;
+
+ // Used to track whether anything happened since the context menu
+ // was opened.
+ d.selForContextMenu = null;
+ }
+
+ // STATE UPDATES
+
+ // Used to get the editor into a consistent state again when options change.
+
+ function loadMode(cm) {
+ cm.doc.mode = CodeMirror.getMode(cm.options, cm.doc.modeOption);
+ resetModeState(cm);
+ }
+
+ function resetModeState(cm) {
+ cm.doc.iter(function(line) {
+ if (line.stateAfter) line.stateAfter = null;
+ if (line.styles) line.styles = null;
+ });
+ cm.doc.frontier = cm.doc.first;
+ startWorker(cm, 100);
+ cm.state.modeGen++;
+ if (cm.curOp) regChange(cm);
+ }
+
+ function wrappingChanged(cm) {
+ if (cm.options.lineWrapping) {
+ addClass(cm.display.wrapper, "CodeMirror-wrap");
+ cm.display.sizer.style.minWidth = "";
+ } else {
+ rmClass(cm.display.wrapper, "CodeMirror-wrap");
+ findMaxLine(cm);
+ }
+ estimateLineHeights(cm);
+ regChange(cm);
+ clearCaches(cm);
+ setTimeout(function(){updateScrollbars(cm);}, 100);
+ }
+
+ // Returns a function that estimates the height of a line, to use as
+ // first approximation until the line becomes visible (and is thus
+ // properly measurable).
+ function estimateHeight(cm) {
+ var th = textHeight(cm.display), wrapping = cm.options.lineWrapping;
+ var perLine = wrapping && Math.max(5, cm.display.scroller.clientWidth / charWidth(cm.display) - 3);
+ return function(line) {
+ if (lineIsHidden(cm.doc, line)) return 0;
+
+ var widgetsHeight = 0;
+ if (line.widgets) for (var i = 0; i < line.widgets.length; i++) {
+ if (line.widgets[i].height) widgetsHeight += line.widgets[i].height;
+ }
+
+ if (wrapping)
+ return widgetsHeight + (Math.ceil(line.text.length / perLine) || 1) * th;
+ else
+ return widgetsHeight + th;
+ };
+ }
+
+ function estimateLineHeights(cm) {
+ var doc = cm.doc, est = estimateHeight(cm);
+ doc.iter(function(line) {
+ var estHeight = est(line);
+ if (estHeight != line.height) updateLineHeight(line, estHeight);
+ });
+ }
+
+ function themeChanged(cm) {
+ cm.display.wrapper.className = cm.display.wrapper.className.replace(/\s*cm-s-\S+/g, "") +
+ cm.options.theme.replace(/(^|\s)\s*/g, " cm-s-");
+ clearCaches(cm);
+ }
+
+ function guttersChanged(cm) {
+ updateGutters(cm);
+ regChange(cm);
+ setTimeout(function(){alignHorizontally(cm);}, 20);
+ }
+
+ // Rebuild the gutter elements, ensure the margin to the left of the
+ // code matches their width.
+ function updateGutters(cm) {
+ var gutters = cm.display.gutters, specs = cm.options.gutters;
+ removeChildren(gutters);
+ for (var i = 0; i < specs.length; ++i) {
+ var gutterClass = specs[i];
+ var gElt = gutters.appendChild(elt("div", null, "CodeMirror-gutter " + gutterClass));
+ if (gutterClass == "CodeMirror-linenumbers") {
+ cm.display.lineGutter = gElt;
+ gElt.style.width = (cm.display.lineNumWidth || 1) + "px";
+ }
+ }
+ gutters.style.display = i ? "" : "none";
+ updateGutterSpace(cm);
+ }
+
+ function updateGutterSpace(cm) {
+ var width = cm.display.gutters.offsetWidth;
+ cm.display.sizer.style.marginLeft = width + "px";
+ cm.display.scrollbarH.style.left = cm.options.fixedGutter ? width + "px" : 0;
+ }
+
+ // Compute the character length of a line, taking into account
+ // collapsed ranges (see markText) that might hide parts, and join
+ // other lines onto it.
+ function lineLength(line) {
+ if (line.height == 0) return 0;
+ var len = line.text.length, merged, cur = line;
+ while (merged = collapsedSpanAtStart(cur)) {
+ var found = merged.find(0, true);
+ cur = found.from.line;
+ len += found.from.ch - found.to.ch;
+ }
+ cur = line;
+ while (merged = collapsedSpanAtEnd(cur)) {
+ var found = merged.find(0, true);
+ len -= cur.text.length - found.from.ch;
+ cur = found.to.line;
+ len += cur.text.length - found.to.ch;
+ }
+ return len;
+ }
+
+ // Find the longest line in the document.
+ function findMaxLine(cm) {
+ var d = cm.display, doc = cm.doc;
+ d.maxLine = getLine(doc, doc.first);
+ d.maxLineLength = lineLength(d.maxLine);
+ d.maxLineChanged = true;
+ doc.iter(function(line) {
+ var len = lineLength(line);
+ if (len > d.maxLineLength) {
+ d.maxLineLength = len;
+ d.maxLine = line;
+ }
+ });
+ }
+
+ // Make sure the gutters options contains the element
+ // "CodeMirror-linenumbers" when the lineNumbers option is true.
+ function setGuttersForLineNumbers(options) {
+ var found = indexOf(options.gutters, "CodeMirror-linenumbers");
+ if (found == -1 && options.lineNumbers) {
+ options.gutters = options.gutters.concat(["CodeMirror-linenumbers"]);
+ } else if (found > -1 && !options.lineNumbers) {
+ options.gutters = options.gutters.slice(0);
+ options.gutters.splice(found, 1);
+ }
+ }
+
+ // SCROLLBARS
+
+ function hScrollbarTakesSpace(cm) {
+ return cm.display.scroller.clientHeight - cm.display.wrapper.clientHeight < scrollerCutOff - 3;
+ }
+
+ // Prepare DOM reads needed to update the scrollbars. Done in one
+ // shot to minimize update/measure roundtrips.
+ function measureForScrollbars(cm) {
+ var scroll = cm.display.scroller;
+ return {
+ clientHeight: scroll.clientHeight,
+ barHeight: cm.display.scrollbarV.clientHeight,
+ scrollWidth: scroll.scrollWidth, clientWidth: scroll.clientWidth,
+ hScrollbarTakesSpace: hScrollbarTakesSpace(cm),
+ barWidth: cm.display.scrollbarH.clientWidth,
+ docHeight: Math.round(cm.doc.height + paddingVert(cm.display))
+ };
+ }
+
+ // Re-synchronize the fake scrollbars with the actual size of the
+ // content.
+ function updateScrollbars(cm, measure) {
+ if (!measure) measure = measureForScrollbars(cm);
+ var d = cm.display, sWidth = scrollbarWidth(d.measure);
+ var scrollHeight = measure.docHeight + scrollerCutOff;
+ var needsH = measure.scrollWidth > measure.clientWidth;
+ if (needsH && measure.scrollWidth <= measure.clientWidth + 1 &&
+ sWidth > 0 && !measure.hScrollbarTakesSpace)
+ needsH = false; // (Issue #2562)
+ var needsV = scrollHeight > measure.clientHeight;
+
+ if (needsV) {
+ d.scrollbarV.style.display = "block";
+ d.scrollbarV.style.bottom = needsH ? sWidth + "px" : "0";
+ // A bug in IE8 can cause this value to be negative, so guard it.
+ d.scrollbarV.firstChild.style.height =
+ Math.max(0, scrollHeight - measure.clientHeight + (measure.barHeight || d.scrollbarV.clientHeight)) + "px";
+ } else {
+ d.scrollbarV.style.display = "";
+ d.scrollbarV.firstChild.style.height = "0";
+ }
+ if (needsH) {
+ d.scrollbarH.style.display = "block";
+ d.scrollbarH.style.right = needsV ? sWidth + "px" : "0";
+ d.scrollbarH.firstChild.style.width =
+ (measure.scrollWidth - measure.clientWidth + (measure.barWidth || d.scrollbarH.clientWidth)) + "px";
+ } else {
+ d.scrollbarH.style.display = "";
+ d.scrollbarH.firstChild.style.width = "0";
+ }
+ if (needsH && needsV) {
+ d.scrollbarFiller.style.display = "block";
+ d.scrollbarFiller.style.height = d.scrollbarFiller.style.width = sWidth + "px";
+ } else d.scrollbarFiller.style.display = "";
+ if (needsH && cm.options.coverGutterNextToScrollbar && cm.options.fixedGutter) {
+ d.gutterFiller.style.display = "block";
+ d.gutterFiller.style.height = sWidth + "px";
+ d.gutterFiller.style.width = d.gutters.offsetWidth + "px";
+ } else d.gutterFiller.style.display = "";
+
+ if (!cm.state.checkedOverlayScrollbar && measure.clientHeight > 0) {
+ if (sWidth === 0) {
+ var w = mac && !mac_geMountainLion ? "12px" : "18px";
+ d.scrollbarV.style.minWidth = d.scrollbarH.style.minHeight = w;
+ var barMouseDown = function(e) {
+ if (e_target(e) != d.scrollbarV && e_target(e) != d.scrollbarH)
+ operation(cm, onMouseDown)(e);
+ };
+ on(d.scrollbarV, "mousedown", barMouseDown);
+ on(d.scrollbarH, "mousedown", barMouseDown);
+ }
+ cm.state.checkedOverlayScrollbar = true;
+ }
+ }
+
+ // Compute the lines that are visible in a given viewport (defaults
+ // the the current scroll position). viewport may contain top,
+ // height, and ensure (see op.scrollToPos) properties.
+ function visibleLines(display, doc, viewport) {
+ var top = viewport && viewport.top != null ? Math.max(0, viewport.top) : display.scroller.scrollTop;
+ top = Math.floor(top - paddingTop(display));
+ var bottom = viewport && viewport.bottom != null ? viewport.bottom : top + display.wrapper.clientHeight;
+
+ var from = lineAtHeight(doc, top), to = lineAtHeight(doc, bottom);
+ // Ensure is a {from: {line, ch}, to: {line, ch}} object, and
+ // forces those lines into the viewport (if possible).
+ if (viewport && viewport.ensure) {
+ var ensureFrom = viewport.ensure.from.line, ensureTo = viewport.ensure.to.line;
+ if (ensureFrom < from)
+ return {from: ensureFrom,
+ to: lineAtHeight(doc, heightAtLine(getLine(doc, ensureFrom)) + display.wrapper.clientHeight)};
+ if (Math.min(ensureTo, doc.lastLine()) >= to)
+ return {from: lineAtHeight(doc, heightAtLine(getLine(doc, ensureTo)) - display.wrapper.clientHeight),
+ to: ensureTo};
+ }
+ return {from: from, to: Math.max(to, from + 1)};
+ }
+
+ // LINE NUMBERS
+
+ // Re-align line numbers and gutter marks to compensate for
+ // horizontal scrolling.
+ function alignHorizontally(cm) {
+ var display = cm.display, view = display.view;
+ if (!display.alignWidgets && (!display.gutters.firstChild || !cm.options.fixedGutter)) return;
+ var comp = compensateForHScroll(display) - display.scroller.scrollLeft + cm.doc.scrollLeft;
+ var gutterW = display.gutters.offsetWidth, left = comp + "px";
+ for (var i = 0; i < view.length; i++) if (!view[i].hidden) {
+ if (cm.options.fixedGutter && view[i].gutter)
+ view[i].gutter.style.left = left;
+ var align = view[i].alignable;
+ if (align) for (var j = 0; j < align.length; j++)
+ align[j].style.left = left;
+ }
+ if (cm.options.fixedGutter)
+ display.gutters.style.left = (comp + gutterW) + "px";
+ }
+
+ // Used to ensure that the line number gutter is still the right
+ // size for the current document size. Returns true when an update
+ // is needed.
+ function maybeUpdateLineNumberWidth(cm) {
+ if (!cm.options.lineNumbers) return false;
+ var doc = cm.doc, last = lineNumberFor(cm.options, doc.first + doc.size - 1), display = cm.display;
+ if (last.length != display.lineNumChars) {
+ var test = display.measure.appendChild(elt("div", [elt("div", last)],
+ "CodeMirror-linenumber CodeMirror-gutter-elt"));
+ var innerW = test.firstChild.offsetWidth, padding = test.offsetWidth - innerW;
+ display.lineGutter.style.width = "";
+ display.lineNumInnerWidth = Math.max(innerW, display.lineGutter.offsetWidth - padding);
+ display.lineNumWidth = display.lineNumInnerWidth + padding;
+ display.lineNumChars = display.lineNumInnerWidth ? last.length : -1;
+ display.lineGutter.style.width = display.lineNumWidth + "px";
+ updateGutterSpace(cm);
+ return true;
+ }
+ return false;
+ }
+
+ function lineNumberFor(options, i) {
+ return String(options.lineNumberFormatter(i + options.firstLineNumber));
+ }
+
+ // Computes display.scroller.scrollLeft + display.gutters.offsetWidth,
+ // but using getBoundingClientRect to get a sub-pixel-accurate
+ // result.
+ function compensateForHScroll(display) {
+ return display.scroller.getBoundingClientRect().left - display.sizer.getBoundingClientRect().left;
+ }
+
+ // DISPLAY DRAWING
+
+ function DisplayUpdate(cm, viewport, force) {
+ var display = cm.display;
+
+ this.viewport = viewport;
+ // Store some values that we'll need later (but don't want to force a relayout for)
+ this.visible = visibleLines(display, cm.doc, viewport);
+ this.editorIsHidden = !display.wrapper.offsetWidth;
+ this.wrapperHeight = display.wrapper.clientHeight;
+ this.wrapperWidth = display.wrapper.clientWidth;
+ this.oldViewFrom = display.viewFrom; this.oldViewTo = display.viewTo;
+ this.oldScrollerWidth = display.scroller.clientWidth;
+ this.force = force;
+ this.dims = getDimensions(cm);
+ }
+
+ // Does the actual updating of the line display. Bails out
+ // (returning false) when there is nothing to be done and forced is
+ // false.
+ function updateDisplayIfNeeded(cm, update) {
+ var display = cm.display, doc = cm.doc;
+ if (update.editorIsHidden) {
+ resetView(cm);
+ return false;
+ }
+
+ // Bail out if the visible area is already rendered and nothing changed.
+ if (!update.force &&
+ update.visible.from >= display.viewFrom && update.visible.to <= display.viewTo &&
+ (display.updateLineNumbers == null || display.updateLineNumbers >= display.viewTo) &&
+ countDirtyView(cm) == 0)
+ return false;
+
+ if (maybeUpdateLineNumberWidth(cm)) {
+ resetView(cm);
+ update.dims = getDimensions(cm);
+ }
+
+ // Compute a suitable new viewport (from & to)
+ var end = doc.first + doc.size;
+ var from = Math.max(update.visible.from - cm.options.viewportMargin, doc.first);
+ var to = Math.min(end, update.visible.to + cm.options.viewportMargin);
+ if (display.viewFrom < from && from - display.viewFrom < 20) from = Math.max(doc.first, display.viewFrom);
+ if (display.viewTo > to && display.viewTo - to < 20) to = Math.min(end, display.viewTo);
+ if (sawCollapsedSpans) {
+ from = visualLineNo(cm.doc, from);
+ to = visualLineEndNo(cm.doc, to);
+ }
+
+ var different = from != display.viewFrom || to != display.viewTo ||
+ display.lastWrapHeight != update.wrapperHeight || display.lastWrapWidth != update.wrapperWidth;
+ adjustView(cm, from, to);
+
+ display.viewOffset = heightAtLine(getLine(cm.doc, display.viewFrom));
+ // Position the mover div to align with the current scroll position
+ cm.display.mover.style.top = display.viewOffset + "px";
+
+ var toUpdate = countDirtyView(cm);
+ if (!different && toUpdate == 0 && !update.force &&
+ (display.updateLineNumbers == null || display.updateLineNumbers >= display.viewTo))
+ return false;
+
+ // For big changes, we hide the enclosing element during the
+ // update, since that speeds up the operations on most browsers.
+ var focused = activeElt();
+ if (toUpdate > 4) display.lineDiv.style.display = "none";
+ patchDisplay(cm, display.updateLineNumbers, update.dims);
+ if (toUpdate > 4) display.lineDiv.style.display = "";
+ // There might have been a widget with a focused element that got
+ // hidden or updated, if so re-focus it.
+ if (focused && activeElt() != focused && focused.offsetHeight) focused.focus();
+
+ // Prevent selection and cursors from interfering with the scroll
+ // width.
+ removeChildren(display.cursorDiv);
+ removeChildren(display.selectionDiv);
+
+ if (different) {
+ display.lastWrapHeight = update.wrapperHeight;
+ display.lastWrapWidth = update.wrapperWidth;
+ startWorker(cm, 400);
+ }
+
+ display.updateLineNumbers = null;
+
+ return true;
+ }
+
+ function postUpdateDisplay(cm, update) {
+ var force = update.force, viewport = update.viewport;
+ for (var first = true;; first = false) {
+ if (first && cm.options.lineWrapping && update.oldScrollerWidth != cm.display.scroller.clientWidth) {
+ force = true;
+ } else {
+ force = false;
+ // Clip forced viewport to actual scrollable area.
+ if (viewport && viewport.top != null)
+ viewport = {top: Math.min(cm.doc.height + paddingVert(cm.display) - scrollerCutOff -
+ cm.display.scroller.clientHeight, viewport.top)};
+ // Updated line heights might result in the drawn area not
+ // actually covering the viewport. Keep looping until it does.
+ update.visible = visibleLines(cm.display, cm.doc, viewport);
+ if (update.visible.from >= cm.display.viewFrom && update.visible.to <= cm.display.viewTo)
+ break;
+ }
+ if (!updateDisplayIfNeeded(cm, update)) break;
+ updateHeightsInViewport(cm);
+ var barMeasure = measureForScrollbars(cm);
+ updateSelection(cm);
+ setDocumentHeight(cm, barMeasure);
+ updateScrollbars(cm, barMeasure);
+ }
+
+ signalLater(cm, "update", cm);
+ if (cm.display.viewFrom != update.oldViewFrom || cm.display.viewTo != update.oldViewTo)
+ signalLater(cm, "viewportChange", cm, cm.display.viewFrom, cm.display.viewTo);
+ }
+
+ function updateDisplaySimple(cm, viewport) {
+ var update = new DisplayUpdate(cm, viewport);
+ if (updateDisplayIfNeeded(cm, update)) {
+ updateHeightsInViewport(cm);
+ postUpdateDisplay(cm, update);
+ var barMeasure = measureForScrollbars(cm);
+ updateSelection(cm);
+ setDocumentHeight(cm, barMeasure);
+ updateScrollbars(cm, barMeasure);
+ }
+ }
+
+ function setDocumentHeight(cm, measure) {
+ cm.display.sizer.style.minHeight = cm.display.heightForcer.style.top = measure.docHeight + "px";
+ cm.display.gutters.style.height = Math.max(measure.docHeight, measure.clientHeight - scrollerCutOff) + "px";
+ }
+
+ function checkForWebkitWidthBug(cm, measure) {
+ // Work around Webkit bug where it sometimes reserves space for a
+ // non-existing phantom scrollbar in the scroller (Issue #2420)
+ if (cm.display.sizer.offsetWidth + cm.display.gutters.offsetWidth < cm.display.scroller.clientWidth - 1) {
+ cm.display.sizer.style.minHeight = cm.display.heightForcer.style.top = "0px";
+ cm.display.gutters.style.height = measure.docHeight + "px";
+ }
+ }
+
+ // Read the actual heights of the rendered lines, and update their
+ // stored heights to match.
+ function updateHeightsInViewport(cm) {
+ var display = cm.display;
+ var prevBottom = display.lineDiv.offsetTop;
+ for (var i = 0; i < display.view.length; i++) {
+ var cur = display.view[i], height;
+ if (cur.hidden) continue;
+ if (ie && ie_version < 8) {
+ var bot = cur.node.offsetTop + cur.node.offsetHeight;
+ height = bot - prevBottom;
+ prevBottom = bot;
+ } else {
+ var box = cur.node.getBoundingClientRect();
+ height = box.bottom - box.top;
+ }
+ var diff = cur.line.height - height;
+ if (height < 2) height = textHeight(display);
+ if (diff > .001 || diff < -.001) {
+ updateLineHeight(cur.line, height);
+ updateWidgetHeight(cur.line);
+ if (cur.rest) for (var j = 0; j < cur.rest.length; j++)
+ updateWidgetHeight(cur.rest[j]);
+ }
+ }
+ }
+
+ // Read and store the height of line widgets associated with the
+ // given line.
+ function updateWidgetHeight(line) {
+ if (line.widgets) for (var i = 0; i < line.widgets.length; ++i)
+ line.widgets[i].height = line.widgets[i].node.offsetHeight;
+ }
+
+ // Do a bulk-read of the DOM positions and sizes needed to draw the
+ // view, so that we don't interleave reading and writing to the DOM.
+ function getDimensions(cm) {
+ var d = cm.display, left = {}, width = {};
+ var gutterLeft = d.gutters.clientLeft;
+ for (var n = d.gutters.firstChild, i = 0; n; n = n.nextSibling, ++i) {
+ left[cm.options.gutters[i]] = n.offsetLeft + n.clientLeft + gutterLeft;
+ width[cm.options.gutters[i]] = n.clientWidth;
+ }
+ return {fixedPos: compensateForHScroll(d),
+ gutterTotalWidth: d.gutters.offsetWidth,
+ gutterLeft: left,
+ gutterWidth: width,
+ wrapperWidth: d.wrapper.clientWidth};
+ }
+
+ // Sync the actual display DOM structure with display.view, removing
+ // nodes for lines that are no longer in view, and creating the ones
+ // that are not there yet, and updating the ones that are out of
+ // date.
+ function patchDisplay(cm, updateNumbersFrom, dims) {
+ var display = cm.display, lineNumbers = cm.options.lineNumbers;
+ var container = display.lineDiv, cur = container.firstChild;
+
+ function rm(node) {
+ var next = node.nextSibling;
+ // Works around a throw-scroll bug in OS X Webkit
+ if (webkit && mac && cm.display.currentWheelTarget == node)
+ node.style.display = "none";
+ else
+ node.parentNode.removeChild(node);
+ return next;
+ }
+
+ var view = display.view, lineN = display.viewFrom;
+ // Loop over the elements in the view, syncing cur (the DOM nodes
+ // in display.lineDiv) with the view as we go.
+ for (var i = 0; i < view.length; i++) {
+ var lineView = view[i];
+ if (lineView.hidden) {
+ } else if (!lineView.node) { // Not drawn yet
+ var node = buildLineElement(cm, lineView, lineN, dims);
+ container.insertBefore(node, cur);
+ } else { // Already drawn
+ while (cur != lineView.node) cur = rm(cur);
+ var updateNumber = lineNumbers && updateNumbersFrom != null &&
+ updateNumbersFrom <= lineN && lineView.lineNumber;
+ if (lineView.changes) {
+ if (indexOf(lineView.changes, "gutter") > -1) updateNumber = false;
+ updateLineForChanges(cm, lineView, lineN, dims);
+ }
+ if (updateNumber) {
+ removeChildren(lineView.lineNumber);
+ lineView.lineNumber.appendChild(document.createTextNode(lineNumberFor(cm.options, lineN)));
+ }
+ cur = lineView.node.nextSibling;
+ }
+ lineN += lineView.size;
+ }
+ while (cur) cur = rm(cur);
+ }
+
+ // When an aspect of a line changes, a string is added to
+ // lineView.changes. This updates the relevant part of the line's
+ // DOM structure.
+ function updateLineForChanges(cm, lineView, lineN, dims) {
+ for (var j = 0; j < lineView.changes.length; j++) {
+ var type = lineView.changes[j];
+ if (type == "text") updateLineText(cm, lineView);
+ else if (type == "gutter") updateLineGutter(cm, lineView, lineN, dims);
+ else if (type == "class") updateLineClasses(lineView);
+ else if (type == "widget") updateLineWidgets(lineView, dims);
+ }
+ lineView.changes = null;
+ }
+
+ // Lines with gutter elements, widgets or a background class need to
+ // be wrapped, and have the extra elements added to the wrapper div
+ function ensureLineWrapped(lineView) {
+ if (lineView.node == lineView.text) {
+ lineView.node = elt("div", null, null, "position: relative");
+ if (lineView.text.parentNode)
+ lineView.text.parentNode.replaceChild(lineView.node, lineView.text);
+ lineView.node.appendChild(lineView.text);
+ if (ie && ie_version < 8) lineView.node.style.zIndex = 2;
+ }
+ return lineView.node;
+ }
+
+ function updateLineBackground(lineView) {
+ var cls = lineView.bgClass ? lineView.bgClass + " " + (lineView.line.bgClass || "") : lineView.line.bgClass;
+ if (cls) cls += " CodeMirror-linebackground";
+ if (lineView.background) {
+ if (cls) lineView.background.className = cls;
+ else { lineView.background.parentNode.removeChild(lineView.background); lineView.background = null; }
+ } else if (cls) {
+ var wrap = ensureLineWrapped(lineView);
+ lineView.background = wrap.insertBefore(elt("div", null, cls), wrap.firstChild);
+ }
+ }
+
+ // Wrapper around buildLineContent which will reuse the structure
+ // in display.externalMeasured when possible.
+ function getLineContent(cm, lineView) {
+ var ext = cm.display.externalMeasured;
+ if (ext && ext.line == lineView.line) {
+ cm.display.externalMeasured = null;
+ lineView.measure = ext.measure;
+ return ext.built;
+ }
+ return buildLineContent(cm, lineView);
+ }
+
+ // Redraw the line's text. Interacts with the background and text
+ // classes because the mode may output tokens that influence these
+ // classes.
+ function updateLineText(cm, lineView) {
+ var cls = lineView.text.className;
+ var built = getLineContent(cm, lineView);
+ if (lineView.text == lineView.node) lineView.node = built.pre;
+ lineView.text.parentNode.replaceChild(built.pre, lineView.text);
+ lineView.text = built.pre;
+ if (built.bgClass != lineView.bgClass || built.textClass != lineView.textClass) {
+ lineView.bgClass = built.bgClass;
+ lineView.textClass = built.textClass;
+ updateLineClasses(lineView);
+ } else if (cls) {
+ lineView.text.className = cls;
+ }
+ }
+
+ function updateLineClasses(lineView) {
+ updateLineBackground(lineView);
+ if (lineView.line.wrapClass)
+ ensureLineWrapped(lineView).className = lineView.line.wrapClass;
+ else if (lineView.node != lineView.text)
+ lineView.node.className = "";
+ var textClass = lineView.textClass ? lineView.textClass + " " + (lineView.line.textClass || "") : lineView.line.textClass;
+ lineView.text.className = textClass || "";
+ }
+
+ function updateLineGutter(cm, lineView, lineN, dims) {
+ if (lineView.gutter) {
+ lineView.node.removeChild(lineView.gutter);
+ lineView.gutter = null;
+ }
+ var markers = lineView.line.gutterMarkers;
+ if (cm.options.lineNumbers || markers) {
+ var wrap = ensureLineWrapped(lineView);
+ var gutterWrap = lineView.gutter =
+ wrap.insertBefore(elt("div", null, "CodeMirror-gutter-wrapper", "left: " +
+ (cm.options.fixedGutter ? dims.fixedPos : -dims.gutterTotalWidth) +
+ "px; width: " + dims.gutterTotalWidth + "px"),
+ lineView.text);
+ if (lineView.line.gutterClass)
+ gutterWrap.className += " " + lineView.line.gutterClass;
+ if (cm.options.lineNumbers && (!markers || !markers["CodeMirror-linenumbers"]))
+ lineView.lineNumber = gutterWrap.appendChild(
+ elt("div", lineNumberFor(cm.options, lineN),
+ "CodeMirror-linenumber CodeMirror-gutter-elt",
+ "left: " + dims.gutterLeft["CodeMirror-linenumbers"] + "px; width: "
+ + cm.display.lineNumInnerWidth + "px"));
+ if (markers) for (var k = 0; k < cm.options.gutters.length; ++k) {
+ var id = cm.options.gutters[k], found = markers.hasOwnProperty(id) && markers[id];
+ if (found)
+ gutterWrap.appendChild(elt("div", [found], "CodeMirror-gutter-elt", "left: " +
+ dims.gutterLeft[id] + "px; width: " + dims.gutterWidth[id] + "px"));
+ }
+ }
+ }
+
+ function updateLineWidgets(lineView, dims) {
+ if (lineView.alignable) lineView.alignable = null;
+ for (var node = lineView.node.firstChild, next; node; node = next) {
+ var next = node.nextSibling;
+ if (node.className == "CodeMirror-linewidget")
+ lineView.node.removeChild(node);
+ }
+ insertLineWidgets(lineView, dims);
+ }
+
+ // Build a line's DOM representation from scratch
+ function buildLineElement(cm, lineView, lineN, dims) {
+ var built = getLineContent(cm, lineView);
+ lineView.text = lineView.node = built.pre;
+ if (built.bgClass) lineView.bgClass = built.bgClass;
+ if (built.textClass) lineView.textClass = built.textClass;
+
+ updateLineClasses(lineView);
+ updateLineGutter(cm, lineView, lineN, dims);
+ insertLineWidgets(lineView, dims);
+ return lineView.node;
+ }
+
+ // A lineView may contain multiple logical lines (when merged by
+ // collapsed spans). The widgets for all of them need to be drawn.
+ function insertLineWidgets(lineView, dims) {
+ insertLineWidgetsFor(lineView.line, lineView, dims, true);
+ if (lineView.rest) for (var i = 0; i < lineView.rest.length; i++)
+ insertLineWidgetsFor(lineView.rest[i], lineView, dims, false);
+ }
+
+ function insertLineWidgetsFor(line, lineView, dims, allowAbove) {
+ if (!line.widgets) return;
+ var wrap = ensureLineWrapped(lineView);
+ for (var i = 0, ws = line.widgets; i < ws.length; ++i) {
+ var widget = ws[i], node = elt("div", [widget.node], "CodeMirror-linewidget");
+ if (!widget.handleMouseEvents) node.ignoreEvents = true;
+ positionLineWidget(widget, node, lineView, dims);
+ if (allowAbove && widget.above)
+ wrap.insertBefore(node, lineView.gutter || lineView.text);
+ else
+ wrap.appendChild(node);
+ signalLater(widget, "redraw");
+ }
+ }
+
+ function positionLineWidget(widget, node, lineView, dims) {
+ if (widget.noHScroll) {
+ (lineView.alignable || (lineView.alignable = [])).push(node);
+ var width = dims.wrapperWidth;
+ node.style.left = dims.fixedPos + "px";
+ if (!widget.coverGutter) {
+ width -= dims.gutterTotalWidth;
+ node.style.paddingLeft = dims.gutterTotalWidth + "px";
+ }
+ node.style.width = width + "px";
+ }
+ if (widget.coverGutter) {
+ node.style.zIndex = 5;
+ node.style.position = "relative";
+ if (!widget.noHScroll) node.style.marginLeft = -dims.gutterTotalWidth + "px";
+ }
+ }
+
+ // POSITION OBJECT
+
+ // A Pos instance represents a position within the text.
+ var Pos = CodeMirror.Pos = function(line, ch) {
+ if (!(this instanceof Pos)) return new Pos(line, ch);
+ this.line = line; this.ch = ch;
+ };
+
+ // Compare two positions, return 0 if they are the same, a negative
+ // number when a is less, and a positive number otherwise.
+ var cmp = CodeMirror.cmpPos = function(a, b) { return a.line - b.line || a.ch - b.ch; };
+
+ function copyPos(x) {return Pos(x.line, x.ch);}
+ function maxPos(a, b) { return cmp(a, b) < 0 ? b : a; }
+ function minPos(a, b) { return cmp(a, b) < 0 ? a : b; }
+
+ // SELECTION / CURSOR
+
+ // Selection objects are immutable. A new one is created every time
+ // the selection changes. A selection is one or more non-overlapping
+ // (and non-touching) ranges, sorted, and an integer that indicates
+ // which one is the primary selection (the one that's scrolled into
+ // view, that getCursor returns, etc).
+ function Selection(ranges, primIndex) {
+ this.ranges = ranges;
+ this.primIndex = primIndex;
+ }
+
+ Selection.prototype = {
+ primary: function() { return this.ranges[this.primIndex]; },
+ equals: function(other) {
+ if (other == this) return true;
+ if (other.primIndex != this.primIndex || other.ranges.length != this.ranges.length) return false;
+ for (var i = 0; i < this.ranges.length; i++) {
+ var here = this.ranges[i], there = other.ranges[i];
+ if (cmp(here.anchor, there.anchor) != 0 || cmp(here.head, there.head) != 0) return false;
+ }
+ return true;
+ },
+ deepCopy: function() {
+ for (var out = [], i = 0; i < this.ranges.length; i++)
+ out[i] = new Range(copyPos(this.ranges[i].anchor), copyPos(this.ranges[i].head));
+ return new Selection(out, this.primIndex);
+ },
+ somethingSelected: function() {
+ for (var i = 0; i < this.ranges.length; i++)
+ if (!this.ranges[i].empty()) return true;
+ return false;
+ },
+ contains: function(pos, end) {
+ if (!end) end = pos;
+ for (var i = 0; i < this.ranges.length; i++) {
+ var range = this.ranges[i];
+ if (cmp(end, range.from()) >= 0 && cmp(pos, range.to()) <= 0)
+ return i;
+ }
+ return -1;
+ }
+ };
+
+ function Range(anchor, head) {
+ this.anchor = anchor; this.head = head;
+ }
+
+ Range.prototype = {
+ from: function() { return minPos(this.anchor, this.head); },
+ to: function() { return maxPos(this.anchor, this.head); },
+ empty: function() {
+ return this.head.line == this.anchor.line && this.head.ch == this.anchor.ch;
+ }
+ };
+
+ // Take an unsorted, potentially overlapping set of ranges, and
+ // build a selection out of it. 'Consumes' ranges array (modifying
+ // it).
+ function normalizeSelection(ranges, primIndex) {
+ var prim = ranges[primIndex];
+ ranges.sort(function(a, b) { return cmp(a.from(), b.from()); });
+ primIndex = indexOf(ranges, prim);
+ for (var i = 1; i < ranges.length; i++) {
+ var cur = ranges[i], prev = ranges[i - 1];
+ if (cmp(prev.to(), cur.from()) >= 0) {
+ var from = minPos(prev.from(), cur.from()), to = maxPos(prev.to(), cur.to());
+ var inv = prev.empty() ? cur.from() == cur.head : prev.from() == prev.head;
+ if (i <= primIndex) --primIndex;
+ ranges.splice(--i, 2, new Range(inv ? to : from, inv ? from : to));
+ }
+ }
+ return new Selection(ranges, primIndex);
+ }
+
+ function simpleSelection(anchor, head) {
+ return new Selection([new Range(anchor, head || anchor)], 0);
+ }
+
+ // Most of the external API clips given positions to make sure they
+ // actually exist within the document.
+ function clipLine(doc, n) {return Math.max(doc.first, Math.min(n, doc.first + doc.size - 1));}
+ function clipPos(doc, pos) {
+ if (pos.line < doc.first) return Pos(doc.first, 0);
+ var last = doc.first + doc.size - 1;
+ if (pos.line > last) return Pos(last, getLine(doc, last).text.length);
+ return clipToLen(pos, getLine(doc, pos.line).text.length);
+ }
+ function clipToLen(pos, linelen) {
+ var ch = pos.ch;
+ if (ch == null || ch > linelen) return Pos(pos.line, linelen);
+ else if (ch < 0) return Pos(pos.line, 0);
+ else return pos;
+ }
+ function isLine(doc, l) {return l >= doc.first && l < doc.first + doc.size;}
+ function clipPosArray(doc, array) {
+ for (var out = [], i = 0; i < array.length; i++) out[i] = clipPos(doc, array[i]);
+ return out;
+ }
+
+ // SELECTION UPDATES
+
+ // The 'scroll' parameter given to many of these indicated whether
+ // the new cursor position should be scrolled into view after
+ // modifying the selection.
+
+ // If shift is held or the extend flag is set, extends a range to
+ // include a given position (and optionally a second position).
+ // Otherwise, simply returns the range between the given positions.
+ // Used for cursor motion and such.
+ function extendRange(doc, range, head, other) {
+ if (doc.cm && doc.cm.display.shift || doc.extend) {
+ var anchor = range.anchor;
+ if (other) {
+ var posBefore = cmp(head, anchor) < 0;
+ if (posBefore != (cmp(other, anchor) < 0)) {
+ anchor = head;
+ head = other;
+ } else if (posBefore != (cmp(head, other) < 0)) {
+ head = other;
+ }
+ }
+ return new Range(anchor, head);
+ } else {
+ return new Range(other || head, head);
+ }
+ }
+
+ // Extend the primary selection range, discard the rest.
+ function extendSelection(doc, head, other, options) {
+ setSelection(doc, new Selection([extendRange(doc, doc.sel.primary(), head, other)], 0), options);
+ }
+
+ // Extend all selections (pos is an array of selections with length
+ // equal the number of selections)
+ function extendSelections(doc, heads, options) {
+ for (var out = [], i = 0; i < doc.sel.ranges.length; i++)
+ out[i] = extendRange(doc, doc.sel.ranges[i], heads[i], null);
+ var newSel = normalizeSelection(out, doc.sel.primIndex);
+ setSelection(doc, newSel, options);
+ }
+
+ // Updates a single range in the selection.
+ function replaceOneSelection(doc, i, range, options) {
+ var ranges = doc.sel.ranges.slice(0);
+ ranges[i] = range;
+ setSelection(doc, normalizeSelection(ranges, doc.sel.primIndex), options);
+ }
+
+ // Reset the selection to a single range.
+ function setSimpleSelection(doc, anchor, head, options) {
+ setSelection(doc, simpleSelection(anchor, head), options);
+ }
+
+ // Give beforeSelectionChange handlers a change to influence a
+ // selection update.
+ function filterSelectionChange(doc, sel) {
+ var obj = {
+ ranges: sel.ranges,
+ update: function(ranges) {
+ this.ranges = [];
+ for (var i = 0; i < ranges.length; i++)
+ this.ranges[i] = new Range(clipPos(doc, ranges[i].anchor),
+ clipPos(doc, ranges[i].head));
+ }
+ };
+ signal(doc, "beforeSelectionChange", doc, obj);
+ if (doc.cm) signal(doc.cm, "beforeSelectionChange", doc.cm, obj);
+ if (obj.ranges != sel.ranges) return normalizeSelection(obj.ranges, obj.ranges.length - 1);
+ else return sel;
+ }
+
+ function setSelectionReplaceHistory(doc, sel, options) {
+ var done = doc.history.done, last = lst(done);
+ if (last && last.ranges) {
+ done[done.length - 1] = sel;
+ setSelectionNoUndo(doc, sel, options);
+ } else {
+ setSelection(doc, sel, options);
+ }
+ }
+
+ // Set a new selection.
+ function setSelection(doc, sel, options) {
+ setSelectionNoUndo(doc, sel, options);
+ addSelectionToHistory(doc, doc.sel, doc.cm ? doc.cm.curOp.id : NaN, options);
+ }
+
+ function setSelectionNoUndo(doc, sel, options) {
+ if (hasHandler(doc, "beforeSelectionChange") || doc.cm && hasHandler(doc.cm, "beforeSelectionChange"))
+ sel = filterSelectionChange(doc, sel);
+
+ var bias = options && options.bias ||
+ (cmp(sel.primary().head, doc.sel.primary().head) < 0 ? -1 : 1);
+ setSelectionInner(doc, skipAtomicInSelection(doc, sel, bias, true));
+
+ if (!(options && options.scroll === false) && doc.cm)
+ ensureCursorVisible(doc.cm);
+ }
+
+ function setSelectionInner(doc, sel) {
+ if (sel.equals(doc.sel)) return;
+
+ doc.sel = sel;
+
+ if (doc.cm) {
+ doc.cm.curOp.updateInput = doc.cm.curOp.selectionChanged = true;
+ signalCursorActivity(doc.cm);
+ }
+ signalLater(doc, "cursorActivity", doc);
+ }
+
+ // Verify that the selection does not partially select any atomic
+ // marked ranges.
+ function reCheckSelection(doc) {
+ setSelectionInner(doc, skipAtomicInSelection(doc, doc.sel, null, false), sel_dontScroll);
+ }
+
+ // Return a selection that does not partially select any atomic
+ // ranges.
+ function skipAtomicInSelection(doc, sel, bias, mayClear) {
+ var out;
+ for (var i = 0; i < sel.ranges.length; i++) {
+ var range = sel.ranges[i];
+ var newAnchor = skipAtomic(doc, range.anchor, bias, mayClear);
+ var newHead = skipAtomic(doc, range.head, bias, mayClear);
+ if (out || newAnchor != range.anchor || newHead != range.head) {
+ if (!out) out = sel.ranges.slice(0, i);
+ out[i] = new Range(newAnchor, newHead);
+ }
+ }
+ return out ? normalizeSelection(out, sel.primIndex) : sel;
+ }
+
+ // Ensure a given position is not inside an atomic range.
+ function skipAtomic(doc, pos, bias, mayClear) {
+ var flipped = false, curPos = pos;
+ var dir = bias || 1;
+ doc.cantEdit = false;
+ search: for (;;) {
+ var line = getLine(doc, curPos.line);
+ if (line.markedSpans) {
+ for (var i = 0; i < line.markedSpans.length; ++i) {
+ var sp = line.markedSpans[i], m = sp.marker;
+ if ((sp.from == null || (m.inclusiveLeft ? sp.from <= curPos.ch : sp.from < curPos.ch)) &&
+ (sp.to == null || (m.inclusiveRight ? sp.to >= curPos.ch : sp.to > curPos.ch))) {
+ if (mayClear) {
+ signal(m, "beforeCursorEnter");
+ if (m.explicitlyCleared) {
+ if (!line.markedSpans) break;
+ else {--i; continue;}
+ }
+ }
+ if (!m.atomic) continue;
+ var newPos = m.find(dir < 0 ? -1 : 1);
+ if (cmp(newPos, curPos) == 0) {
+ newPos.ch += dir;
+ if (newPos.ch < 0) {
+ if (newPos.line > doc.first) newPos = clipPos(doc, Pos(newPos.line - 1));
+ else newPos = null;
+ } else if (newPos.ch > line.text.length) {
+ if (newPos.line < doc.first + doc.size - 1) newPos = Pos(newPos.line + 1, 0);
+ else newPos = null;
+ }
+ if (!newPos) {
+ if (flipped) {
+ // Driven in a corner -- no valid cursor position found at all
+ // -- try again *with* clearing, if we didn't already
+ if (!mayClear) return skipAtomic(doc, pos, bias, true);
+ // Otherwise, turn off editing until further notice, and return the start of the doc
+ doc.cantEdit = true;
+ return Pos(doc.first, 0);
+ }
+ flipped = true; newPos = pos; dir = -dir;
+ }
+ }
+ curPos = newPos;
+ continue search;
+ }
+ }
+ }
+ return curPos;
+ }
+ }
+
+ // SELECTION DRAWING
+
+ // Redraw the selection and/or cursor
+ function drawSelection(cm) {
+ var display = cm.display, doc = cm.doc, result = {};
+ var curFragment = result.cursors = document.createDocumentFragment();
+ var selFragment = result.selection = document.createDocumentFragment();
+
+ for (var i = 0; i < doc.sel.ranges.length; i++) {
+ var range = doc.sel.ranges[i];
+ var collapsed = range.empty();
+ if (collapsed || cm.options.showCursorWhenSelecting)
+ drawSelectionCursor(cm, range, curFragment);
+ if (!collapsed)
+ drawSelectionRange(cm, range, selFragment);
+ }
+
+ // Move the hidden textarea near the cursor to prevent scrolling artifacts
+ if (cm.options.moveInputWithCursor) {
+ var headPos = cursorCoords(cm, doc.sel.primary().head, "div");
+ var wrapOff = display.wrapper.getBoundingClientRect(), lineOff = display.lineDiv.getBoundingClientRect();
+ result.teTop = Math.max(0, Math.min(display.wrapper.clientHeight - 10,
+ headPos.top + lineOff.top - wrapOff.top));
+ result.teLeft = Math.max(0, Math.min(display.wrapper.clientWidth - 10,
+ headPos.left + lineOff.left - wrapOff.left));
+ }
+
+ return result;
+ }
+
+ function showSelection(cm, drawn) {
+ removeChildrenAndAdd(cm.display.cursorDiv, drawn.cursors);
+ removeChildrenAndAdd(cm.display.selectionDiv, drawn.selection);
+ if (drawn.teTop != null) {
+ cm.display.inputDiv.style.top = drawn.teTop + "px";
+ cm.display.inputDiv.style.left = drawn.teLeft + "px";
+ }
+ }
+
+ function updateSelection(cm) {
+ showSelection(cm, drawSelection(cm));
+ }
+
+ // Draws a cursor for the given range
+ function drawSelectionCursor(cm, range, output) {
+ var pos = cursorCoords(cm, range.head, "div", null, null, !cm.options.singleCursorHeightPerLine);
+
+ var cursor = output.appendChild(elt("div", "\u00a0", "CodeMirror-cursor"));
+ cursor.style.left = pos.left + "px";
+ cursor.style.top = pos.top + "px";
+ cursor.style.height = Math.max(0, pos.bottom - pos.top) * cm.options.cursorHeight + "px";
+
+ if (pos.other) {
+ // Secondary cursor, shown when on a 'jump' in bi-directional text
+ var otherCursor = output.appendChild(elt("div", "\u00a0", "CodeMirror-cursor CodeMirror-secondarycursor"));
+ otherCursor.style.display = "";
+ otherCursor.style.left = pos.other.left + "px";
+ otherCursor.style.top = pos.other.top + "px";
+ otherCursor.style.height = (pos.other.bottom - pos.other.top) * .85 + "px";
+ }
+ }
+
+ // Draws the given range as a highlighted selection
+ function drawSelectionRange(cm, range, output) {
+ var display = cm.display, doc = cm.doc;
+ var fragment = document.createDocumentFragment();
+ var padding = paddingH(cm.display), leftSide = padding.left, rightSide = display.lineSpace.offsetWidth - padding.right;
+
+ function add(left, top, width, bottom) {
+ if (top < 0) top = 0;
+ top = Math.round(top);
+ bottom = Math.round(bottom);
+ fragment.appendChild(elt("div", null, "CodeMirror-selected", "position: absolute; left: " + left +
+ "px; top: " + top + "px; width: " + (width == null ? rightSide - left : width) +
+ "px; height: " + (bottom - top) + "px"));
+ }
+
+ function drawForLine(line, fromArg, toArg) {
+ var lineObj = getLine(doc, line);
+ var lineLen = lineObj.text.length;
+ var start, end;
+ function coords(ch, bias) {
+ return charCoords(cm, Pos(line, ch), "div", lineObj, bias);
+ }
+
+ iterateBidiSections(getOrder(lineObj), fromArg || 0, toArg == null ? lineLen : toArg, function(from, to, dir) {
+ var leftPos = coords(from, "left"), rightPos, left, right;
+ if (from == to) {
+ rightPos = leftPos;
+ left = right = leftPos.left;
+ } else {
+ rightPos = coords(to - 1, "right");
+ if (dir == "rtl") { var tmp = leftPos; leftPos = rightPos; rightPos = tmp; }
+ left = leftPos.left;
+ right = rightPos.right;
+ }
+ if (fromArg == null && from == 0) left = leftSide;
+ if (rightPos.top - leftPos.top > 3) { // Different lines, draw top part
+ add(left, leftPos.top, null, leftPos.bottom);
+ left = leftSide;
+ if (leftPos.bottom < rightPos.top) add(left, leftPos.bottom, null, rightPos.top);
+ }
+ if (toArg == null && to == lineLen) right = rightSide;
+ if (!start || leftPos.top < start.top || leftPos.top == start.top && leftPos.left < start.left)
+ start = leftPos;
+ if (!end || rightPos.bottom > end.bottom || rightPos.bottom == end.bottom && rightPos.right > end.right)
+ end = rightPos;
+ if (left < leftSide + 1) left = leftSide;
+ add(left, rightPos.top, right - left, rightPos.bottom);
+ });
+ return {start: start, end: end};
+ }
+
+ var sFrom = range.from(), sTo = range.to();
+ if (sFrom.line == sTo.line) {
+ drawForLine(sFrom.line, sFrom.ch, sTo.ch);
+ } else {
+ var fromLine = getLine(doc, sFrom.line), toLine = getLine(doc, sTo.line);
+ var singleVLine = visualLine(fromLine) == visualLine(toLine);
+ var leftEnd = drawForLine(sFrom.line, sFrom.ch, singleVLine ? fromLine.text.length + 1 : null).end;
+ var rightStart = drawForLine(sTo.line, singleVLine ? 0 : null, sTo.ch).start;
+ if (singleVLine) {
+ if (leftEnd.top < rightStart.top - 2) {
+ add(leftEnd.right, leftEnd.top, null, leftEnd.bottom);
+ add(leftSide, rightStart.top, rightStart.left, rightStart.bottom);
+ } else {
+ add(leftEnd.right, leftEnd.top, rightStart.left - leftEnd.right, leftEnd.bottom);
+ }
+ }
+ if (leftEnd.bottom < rightStart.top)
+ add(leftSide, leftEnd.bottom, null, rightStart.top);
+ }
+
+ output.appendChild(fragment);
+ }
+
+ // Cursor-blinking
+ function restartBlink(cm) {
+ if (!cm.state.focused) return;
+ var display = cm.display;
+ clearInterval(display.blinker);
+ var on = true;
+ display.cursorDiv.style.visibility = "";
+ if (cm.options.cursorBlinkRate > 0)
+ display.blinker = setInterval(function() {
+ display.cursorDiv.style.visibility = (on = !on) ? "" : "hidden";
+ }, cm.options.cursorBlinkRate);
+ else if (cm.options.cursorBlinkRate < 0)
+ display.cursorDiv.style.visibility = "hidden";
+ }
+
+ // HIGHLIGHT WORKER
+
+ function startWorker(cm, time) {
+ if (cm.doc.mode.startState && cm.doc.frontier < cm.display.viewTo)
+ cm.state.highlight.set(time, bind(highlightWorker, cm));
+ }
+
+ function highlightWorker(cm) {
+ var doc = cm.doc;
+ if (doc.frontier < doc.first) doc.frontier = doc.first;
+ if (doc.frontier >= cm.display.viewTo) return;
+ var end = +new Date + cm.options.workTime;
+ var state = copyState(doc.mode, getStateBefore(cm, doc.frontier));
+ var changedLines = [];
+
+ doc.iter(doc.frontier, Math.min(doc.first + doc.size, cm.display.viewTo + 500), function(line) {
+ if (doc.frontier >= cm.display.viewFrom) { // Visible
+ var oldStyles = line.styles;
+ var highlighted = highlightLine(cm, line, state, true);
+ line.styles = highlighted.styles;
+ var oldCls = line.styleClasses, newCls = highlighted.classes;
+ if (newCls) line.styleClasses = newCls;
+ else if (oldCls) line.styleClasses = null;
+ var ischange = !oldStyles || oldStyles.length != line.styles.length ||
+ oldCls != newCls && (!oldCls || !newCls || oldCls.bgClass != newCls.bgClass || oldCls.textClass != newCls.textClass);
+ for (var i = 0; !ischange && i < oldStyles.length; ++i) ischange = oldStyles[i] != line.styles[i];
+ if (ischange) changedLines.push(doc.frontier);
+ line.stateAfter = copyState(doc.mode, state);
+ } else {
+ processLine(cm, line.text, state);
+ line.stateAfter = doc.frontier % 5 == 0 ? copyState(doc.mode, state) : null;
+ }
+ ++doc.frontier;
+ if (+new Date > end) {
+ startWorker(cm, cm.options.workDelay);
+ return true;
+ }
+ });
+ if (changedLines.length) runInOp(cm, function() {
+ for (var i = 0; i < changedLines.length; i++)
+ regLineChange(cm, changedLines[i], "text");
+ });
+ }
+
+ // Finds the line to start with when starting a parse. Tries to
+ // find a line with a stateAfter, so that it can start with a
+ // valid state. If that fails, it returns the line with the
+ // smallest indentation, which tends to need the least context to
+ // parse correctly.
+ function findStartLine(cm, n, precise) {
+ var minindent, minline, doc = cm.doc;
+ var lim = precise ? -1 : n - (cm.doc.mode.innerMode ? 1000 : 100);
+ for (var search = n; search > lim; --search) {
+ if (search <= doc.first) return doc.first;
+ var line = getLine(doc, search - 1);
+ if (line.stateAfter && (!precise || search <= doc.frontier)) return search;
+ var indented = countColumn(line.text, null, cm.options.tabSize);
+ if (minline == null || minindent > indented) {
+ minline = search - 1;
+ minindent = indented;
+ }
+ }
+ return minline;
+ }
+
+ function getStateBefore(cm, n, precise) {
+ var doc = cm.doc, display = cm.display;
+ if (!doc.mode.startState) return true;
+ var pos = findStartLine(cm, n, precise), state = pos > doc.first && getLine(doc, pos-1).stateAfter;
+ if (!state) state = startState(doc.mode);
+ else state = copyState(doc.mode, state);
+ doc.iter(pos, n, function(line) {
+ processLine(cm, line.text, state);
+ var save = pos == n - 1 || pos % 5 == 0 || pos >= display.viewFrom && pos < display.viewTo;
+ line.stateAfter = save ? copyState(doc.mode, state) : null;
+ ++pos;
+ });
+ if (precise) doc.frontier = pos;
+ return state;
+ }
+
+ // POSITION MEASUREMENT
+
+ function paddingTop(display) {return display.lineSpace.offsetTop;}
+ function paddingVert(display) {return display.mover.offsetHeight - display.lineSpace.offsetHeight;}
+ function paddingH(display) {
+ if (display.cachedPaddingH) return display.cachedPaddingH;
+ var e = removeChildrenAndAdd(display.measure, elt("pre", "x"));
+ var style = window.getComputedStyle ? window.getComputedStyle(e) : e.currentStyle;
+ var data = {left: parseInt(style.paddingLeft), right: parseInt(style.paddingRight)};
+ if (!isNaN(data.left) && !isNaN(data.right)) display.cachedPaddingH = data;
+ return data;
+ }
+
+ // Ensure the lineView.wrapping.heights array is populated. This is
+ // an array of bottom offsets for the lines that make up a drawn
+ // line. When lineWrapping is on, there might be more than one
+ // height.
+ function ensureLineHeights(cm, lineView, rect) {
+ var wrapping = cm.options.lineWrapping;
+ var curWidth = wrapping && cm.display.scroller.clientWidth;
+ if (!lineView.measure.heights || wrapping && lineView.measure.width != curWidth) {
+ var heights = lineView.measure.heights = [];
+ if (wrapping) {
+ lineView.measure.width = curWidth;
+ var rects = lineView.text.firstChild.getClientRects();
+ for (var i = 0; i < rects.length - 1; i++) {
+ var cur = rects[i], next = rects[i + 1];
+ if (Math.abs(cur.bottom - next.bottom) > 2)
+ heights.push((cur.bottom + next.top) / 2 - rect.top);
+ }
+ }
+ heights.push(rect.bottom - rect.top);
+ }
+ }
+
+ // Find a line map (mapping character offsets to text nodes) and a
+ // measurement cache for the given line number. (A line view might
+ // contain multiple lines when collapsed ranges are present.)
+ function mapFromLineView(lineView, line, lineN) {
+ if (lineView.line == line)
+ return {map: lineView.measure.map, cache: lineView.measure.cache};
+ for (var i = 0; i < lineView.rest.length; i++)
+ if (lineView.rest[i] == line)
+ return {map: lineView.measure.maps[i], cache: lineView.measure.caches[i]};
+ for (var i = 0; i < lineView.rest.length; i++)
+ if (lineNo(lineView.rest[i]) > lineN)
+ return {map: lineView.measure.maps[i], cache: lineView.measure.caches[i], before: true};
+ }
+
+ // Render a line into the hidden node display.externalMeasured. Used
+ // when measurement is needed for a line that's not in the viewport.
+ function updateExternalMeasurement(cm, line) {
+ line = visualLine(line);
+ var lineN = lineNo(line);
+ var view = cm.display.externalMeasured = new LineView(cm.doc, line, lineN);
+ view.lineN = lineN;
+ var built = view.built = buildLineContent(cm, view);
+ view.text = built.pre;
+ removeChildrenAndAdd(cm.display.lineMeasure, built.pre);
+ return view;
+ }
+
+ // Get a {top, bottom, left, right} box (in line-local coordinates)
+ // for a given character.
+ function measureChar(cm, line, ch, bias) {
+ return measureCharPrepared(cm, prepareMeasureForLine(cm, line), ch, bias);
+ }
+
+ // Find a line view that corresponds to the given line number.
+ function findViewForLine(cm, lineN) {
+ if (lineN >= cm.display.viewFrom && lineN < cm.display.viewTo)
+ return cm.display.view[findViewIndex(cm, lineN)];
+ var ext = cm.display.externalMeasured;
+ if (ext && lineN >= ext.lineN && lineN < ext.lineN + ext.size)
+ return ext;
+ }
+
+ // Measurement can be split in two steps, the set-up work that
+ // applies to the whole line, and the measurement of the actual
+ // character. Functions like coordsChar, that need to do a lot of
+ // measurements in a row, can thus ensure that the set-up work is
+ // only done once.
+ function prepareMeasureForLine(cm, line) {
+ var lineN = lineNo(line);
+ var view = findViewForLine(cm, lineN);
+ if (view && !view.text)
+ view = null;
+ else if (view && view.changes)
+ updateLineForChanges(cm, view, lineN, getDimensions(cm));
+ if (!view)
+ view = updateExternalMeasurement(cm, line);
+
+ var info = mapFromLineView(view, line, lineN);
+ return {
+ line: line, view: view, rect: null,
+ map: info.map, cache: info.cache, before: info.before,
+ hasHeights: false
+ };
+ }
+
+ // Given a prepared measurement object, measures the position of an
+ // actual character (or fetches it from the cache).
+ function measureCharPrepared(cm, prepared, ch, bias, varHeight) {
+ if (prepared.before) ch = -1;
+ var key = ch + (bias || ""), found;
+ if (prepared.cache.hasOwnProperty(key)) {
+ found = prepared.cache[key];
+ } else {
+ if (!prepared.rect)
+ prepared.rect = prepared.view.text.getBoundingClientRect();
+ if (!prepared.hasHeights) {
+ ensureLineHeights(cm, prepared.view, prepared.rect);
+ prepared.hasHeights = true;
+ }
+ found = measureCharInner(cm, prepared, ch, bias);
+ if (!found.bogus) prepared.cache[key] = found;
+ }
+ return {left: found.left, right: found.right,
+ top: varHeight ? found.rtop : found.top,
+ bottom: varHeight ? found.rbottom : found.bottom};
+ }
+
+ var nullRect = {left: 0, right: 0, top: 0, bottom: 0};
+
+ function measureCharInner(cm, prepared, ch, bias) {
+ var map = prepared.map;
+
+ var node, start, end, collapse;
+ // First, search the line map for the text node corresponding to,
+ // or closest to, the target character.
+ for (var i = 0; i < map.length; i += 3) {
+ var mStart = map[i], mEnd = map[i + 1];
+ if (ch < mStart) {
+ start = 0; end = 1;
+ collapse = "left";
+ } else if (ch < mEnd) {
+ start = ch - mStart;
+ end = start + 1;
+ } else if (i == map.length - 3 || ch == mEnd && map[i + 3] > ch) {
+ end = mEnd - mStart;
+ start = end - 1;
+ if (ch >= mEnd) collapse = "right";
+ }
+ if (start != null) {
+ node = map[i + 2];
+ if (mStart == mEnd && bias == (node.insertLeft ? "left" : "right"))
+ collapse = bias;
+ if (bias == "left" && start == 0)
+ while (i && map[i - 2] == map[i - 3] && map[i - 1].insertLeft) {
+ node = map[(i -= 3) + 2];
+ collapse = "left";
+ }
+ if (bias == "right" && start == mEnd - mStart)
+ while (i < map.length - 3 && map[i + 3] == map[i + 4] && !map[i + 5].insertLeft) {
+ node = map[(i += 3) + 2];
+ collapse = "right";
+ }
+ break;
+ }
+ }
+
+ var rect;
+ if (node.nodeType == 3) { // If it is a text node, use a range to retrieve the coordinates.
+ for (var i = 0; i < 4; i++) { // Retry a maximum of 4 times when nonsense rectangles are returned
+ while (start && isExtendingChar(prepared.line.text.charAt(mStart + start))) --start;
+ while (mStart + end < mEnd && isExtendingChar(prepared.line.text.charAt(mStart + end))) ++end;
+ if (ie && ie_version < 9 && start == 0 && end == mEnd - mStart) {
+ rect = node.parentNode.getBoundingClientRect();
+ } else if (ie && cm.options.lineWrapping) {
+ var rects = range(node, start, end).getClientRects();
+ if (rects.length)
+ rect = rects[bias == "right" ? rects.length - 1 : 0];
+ else
+ rect = nullRect;
+ } else {
+ rect = range(node, start, end).getBoundingClientRect() || nullRect;
+ }
+ if (rect.left || rect.right || start == 0) break;
+ end = start;
+ start = start - 1;
+ collapse = "right";
+ }
+ if (ie && ie_version < 11) rect = maybeUpdateRectForZooming(cm.display.measure, rect);
+ } else { // If it is a widget, simply get the box for the whole widget.
+ if (start > 0) collapse = bias = "right";
+ var rects;
+ if (cm.options.lineWrapping && (rects = node.getClientRects()).length > 1)
+ rect = rects[bias == "right" ? rects.length - 1 : 0];
+ else
+ rect = node.getBoundingClientRect();
+ }
+ if (ie && ie_version < 9 && !start && (!rect || !rect.left && !rect.right)) {
+ var rSpan = node.parentNode.getClientRects()[0];
+ if (rSpan)
+ rect = {left: rSpan.left, right: rSpan.left + charWidth(cm.display), top: rSpan.top, bottom: rSpan.bottom};
+ else
+ rect = nullRect;
+ }
+
+ var rtop = rect.top - prepared.rect.top, rbot = rect.bottom - prepared.rect.top;
+ var mid = (rtop + rbot) / 2;
+ var heights = prepared.view.measure.heights;
+ for (var i = 0; i < heights.length - 1; i++)
+ if (mid < heights[i]) break;
+ var top = i ? heights[i - 1] : 0, bot = heights[i];
+ var result = {left: (collapse == "right" ? rect.right : rect.left) - prepared.rect.left,
+ right: (collapse == "left" ? rect.left : rect.right) - prepared.rect.left,
+ top: top, bottom: bot};
+ if (!rect.left && !rect.right) result.bogus = true;
+ if (!cm.options.singleCursorHeightPerLine) { result.rtop = rtop; result.rbottom = rbot; }
+
+ return result;
+ }
+
+ // Work around problem with bounding client rects on ranges being
+ // returned incorrectly when zoomed on IE10 and below.
+ function maybeUpdateRectForZooming(measure, rect) {
+ if (!window.screen || screen.logicalXDPI == null ||
+ screen.logicalXDPI == screen.deviceXDPI || !hasBadZoomedRects(measure))
+ return rect;
+ var scaleX = screen.logicalXDPI / screen.deviceXDPI;
+ var scaleY = screen.logicalYDPI / screen.deviceYDPI;
+ return {left: rect.left * scaleX, right: rect.right * scaleX,
+ top: rect.top * scaleY, bottom: rect.bottom * scaleY};
+ }
+
+ function clearLineMeasurementCacheFor(lineView) {
+ if (lineView.measure) {
+ lineView.measure.cache = {};
+ lineView.measure.heights = null;
+ if (lineView.rest) for (var i = 0; i < lineView.rest.length; i++)
+ lineView.measure.caches[i] = {};
+ }
+ }
+
+ function clearLineMeasurementCache(cm) {
+ cm.display.externalMeasure = null;
+ removeChildren(cm.display.lineMeasure);
+ for (var i = 0; i < cm.display.view.length; i++)
+ clearLineMeasurementCacheFor(cm.display.view[i]);
+ }
+
+ function clearCaches(cm) {
+ clearLineMeasurementCache(cm);
+ cm.display.cachedCharWidth = cm.display.cachedTextHeight = cm.display.cachedPaddingH = null;
+ if (!cm.options.lineWrapping) cm.display.maxLineChanged = true;
+ cm.display.lineNumChars = null;
+ }
+
+ function pageScrollX() { return window.pageXOffset || (document.documentElement || document.body).scrollLeft; }
+ function pageScrollY() { return window.pageYOffset || (document.documentElement || document.body).scrollTop; }
+
+ // Converts a {top, bottom, left, right} box from line-local
+ // coordinates into another coordinate system. Context may be one of
+ // "line", "div" (display.lineDiv), "local"/null (editor), or "page".
+ function intoCoordSystem(cm, lineObj, rect, context) {
+ if (lineObj.widgets) for (var i = 0; i < lineObj.widgets.length; ++i) if (lineObj.widgets[i].above) {
+ var size = widgetHeight(lineObj.widgets[i]);
+ rect.top += size; rect.bottom += size;
+ }
+ if (context == "line") return rect;
+ if (!context) context = "local";
+ var yOff = heightAtLine(lineObj);
+ if (context == "local") yOff += paddingTop(cm.display);
+ else yOff -= cm.display.viewOffset;
+ if (context == "page" || context == "window") {
+ var lOff = cm.display.lineSpace.getBoundingClientRect();
+ yOff += lOff.top + (context == "window" ? 0 : pageScrollY());
+ var xOff = lOff.left + (context == "window" ? 0 : pageScrollX());
+ rect.left += xOff; rect.right += xOff;
+ }
+ rect.top += yOff; rect.bottom += yOff;
+ return rect;
+ }
+
+ // Coverts a box from "div" coords to another coordinate system.
+ // Context may be "window", "page", "div", or "local"/null.
+ function fromCoordSystem(cm, coords, context) {
+ if (context == "div") return coords;
+ var left = coords.left, top = coords.top;
+ // First move into "page" coordinate system
+ if (context == "page") {
+ left -= pageScrollX();
+ top -= pageScrollY();
+ } else if (context == "local" || !context) {
+ var localBox = cm.display.sizer.getBoundingClientRect();
+ left += localBox.left;
+ top += localBox.top;
+ }
+
+ var lineSpaceBox = cm.display.lineSpace.getBoundingClientRect();
+ return {left: left - lineSpaceBox.left, top: top - lineSpaceBox.top};
+ }
+
+ function charCoords(cm, pos, context, lineObj, bias) {
+ if (!lineObj) lineObj = getLine(cm.doc, pos.line);
+ return intoCoordSystem(cm, lineObj, measureChar(cm, lineObj, pos.ch, bias), context);
+ }
+
+ // Returns a box for a given cursor position, which may have an
+ // 'other' property containing the position of the secondary cursor
+ // on a bidi boundary.
+ function cursorCoords(cm, pos, context, lineObj, preparedMeasure, varHeight) {
+ lineObj = lineObj || getLine(cm.doc, pos.line);
+ if (!preparedMeasure) preparedMeasure = prepareMeasureForLine(cm, lineObj);
+ function get(ch, right) {
+ var m = measureCharPrepared(cm, preparedMeasure, ch, right ? "right" : "left", varHeight);
+ if (right) m.left = m.right; else m.right = m.left;
+ return intoCoordSystem(cm, lineObj, m, context);
+ }
+ function getBidi(ch, partPos) {
+ var part = order[partPos], right = part.level % 2;
+ if (ch == bidiLeft(part) && partPos && part.level < order[partPos - 1].level) {
+ part = order[--partPos];
+ ch = bidiRight(part) - (part.level % 2 ? 0 : 1);
+ right = true;
+ } else if (ch == bidiRight(part) && partPos < order.length - 1 && part.level < order[partPos + 1].level) {
+ part = order[++partPos];
+ ch = bidiLeft(part) - part.level % 2;
+ right = false;
+ }
+ if (right && ch == part.to && ch > part.from) return get(ch - 1);
+ return get(ch, right);
+ }
+ var order = getOrder(lineObj), ch = pos.ch;
+ if (!order) return get(ch);
+ var partPos = getBidiPartAt(order, ch);
+ var val = getBidi(ch, partPos);
+ if (bidiOther != null) val.other = getBidi(ch, bidiOther);
+ return val;
+ }
+
+ // Used to cheaply estimate the coordinates for a position. Used for
+ // intermediate scroll updates.
+ function estimateCoords(cm, pos) {
+ var left = 0, pos = clipPos(cm.doc, pos);
+ if (!cm.options.lineWrapping) left = charWidth(cm.display) * pos.ch;
+ var lineObj = getLine(cm.doc, pos.line);
+ var top = heightAtLine(lineObj) + paddingTop(cm.display);
+ return {left: left, right: left, top: top, bottom: top + lineObj.height};
+ }
+
+ // Positions returned by coordsChar contain some extra information.
+ // xRel is the relative x position of the input coordinates compared
+ // to the found position (so xRel > 0 means the coordinates are to
+ // the right of the character position, for example). When outside
+ // is true, that means the coordinates lie outside the line's
+ // vertical range.
+ function PosWithInfo(line, ch, outside, xRel) {
+ var pos = Pos(line, ch);
+ pos.xRel = xRel;
+ if (outside) pos.outside = true;
+ return pos;
+ }
+
+ // Compute the character position closest to the given coordinates.
+ // Input must be lineSpace-local ("div" coordinate system).
+ function coordsChar(cm, x, y) {
+ var doc = cm.doc;
+ y += cm.display.viewOffset;
+ if (y < 0) return PosWithInfo(doc.first, 0, true, -1);
+ var lineN = lineAtHeight(doc, y), last = doc.first + doc.size - 1;
+ if (lineN > last)
+ return PosWithInfo(doc.first + doc.size - 1, getLine(doc, last).text.length, true, 1);
+ if (x < 0) x = 0;
+
+ var lineObj = getLine(doc, lineN);
+ for (;;) {
+ var found = coordsCharInner(cm, lineObj, lineN, x, y);
+ var merged = collapsedSpanAtEnd(lineObj);
+ var mergedPos = merged && merged.find(0, true);
+ if (merged && (found.ch > mergedPos.from.ch || found.ch == mergedPos.from.ch && found.xRel > 0))
+ lineN = lineNo(lineObj = mergedPos.to.line);
+ else
+ return found;
+ }
+ }
+
+ function coordsCharInner(cm, lineObj, lineNo, x, y) {
+ var innerOff = y - heightAtLine(lineObj);
+ var wrongLine = false, adjust = 2 * cm.display.wrapper.clientWidth;
+ var preparedMeasure = prepareMeasureForLine(cm, lineObj);
+
+ function getX(ch) {
+ var sp = cursorCoords(cm, Pos(lineNo, ch), "line", lineObj, preparedMeasure);
+ wrongLine = true;
+ if (innerOff > sp.bottom) return sp.left - adjust;
+ else if (innerOff < sp.top) return sp.left + adjust;
+ else wrongLine = false;
+ return sp.left;
+ }
+
+ var bidi = getOrder(lineObj), dist = lineObj.text.length;
+ var from = lineLeft(lineObj), to = lineRight(lineObj);
+ var fromX = getX(from), fromOutside = wrongLine, toX = getX(to), toOutside = wrongLine;
+
+ if (x > toX) return PosWithInfo(lineNo, to, toOutside, 1);
+ // Do a binary search between these bounds.
+ for (;;) {
+ if (bidi ? to == from || to == moveVisually(lineObj, from, 1) : to - from <= 1) {
+ var ch = x < fromX || x - fromX <= toX - x ? from : to;
+ var xDiff = x - (ch == from ? fromX : toX);
+ while (isExtendingChar(lineObj.text.charAt(ch))) ++ch;
+ var pos = PosWithInfo(lineNo, ch, ch == from ? fromOutside : toOutside,
+ xDiff < -1 ? -1 : xDiff > 1 ? 1 : 0);
+ return pos;
+ }
+ var step = Math.ceil(dist / 2), middle = from + step;
+ if (bidi) {
+ middle = from;
+ for (var i = 0; i < step; ++i) middle = moveVisually(lineObj, middle, 1);
+ }
+ var middleX = getX(middle);
+ if (middleX > x) {to = middle; toX = middleX; if (toOutside = wrongLine) toX += 1000; dist = step;}
+ else {from = middle; fromX = middleX; fromOutside = wrongLine; dist -= step;}
+ }
+ }
+
+ var measureText;
+ // Compute the default text height.
+ function textHeight(display) {
+ if (display.cachedTextHeight != null) return display.cachedTextHeight;
+ if (measureText == null) {
+ measureText = elt("pre");
+ // Measure a bunch of lines, for browsers that compute
+ // fractional heights.
+ for (var i = 0; i < 49; ++i) {
+ measureText.appendChild(document.createTextNode("x"));
+ measureText.appendChild(elt("br"));
+ }
+ measureText.appendChild(document.createTextNode("x"));
+ }
+ removeChildrenAndAdd(display.measure, measureText);
+ var height = measureText.offsetHeight / 50;
+ if (height > 3) display.cachedTextHeight = height;
+ removeChildren(display.measure);
+ return height || 1;
+ }
+
+ // Compute the default character width.
+ function charWidth(display) {
+ if (display.cachedCharWidth != null) return display.cachedCharWidth;
+ var anchor = elt("span", "xxxxxxxxxx");
+ var pre = elt("pre", [anchor]);
+ removeChildrenAndAdd(display.measure, pre);
+ var rect = anchor.getBoundingClientRect(), width = (rect.right - rect.left) / 10;
+ if (width > 2) display.cachedCharWidth = width;
+ return width || 10;
+ }
+
+ // OPERATIONS
+
+ // Operations are used to wrap a series of changes to the editor
+ // state in such a way that each change won't have to update the
+ // cursor and display (which would be awkward, slow, and
+ // error-prone). Instead, display updates are batched and then all
+ // combined and executed at once.
+
+ var operationGroup = null;
+
+ var nextOpId = 0;
+ // Start a new operation.
+ function startOperation(cm) {
+ cm.curOp = {
+ cm: cm,
+ viewChanged: false, // Flag that indicates that lines might need to be redrawn
+ startHeight: cm.doc.height, // Used to detect need to update scrollbar
+ forceUpdate: false, // Used to force a redraw
+ updateInput: null, // Whether to reset the input textarea
+ typing: false, // Whether this reset should be careful to leave existing text (for compositing)
+ changeObjs: null, // Accumulated changes, for firing change events
+ cursorActivityHandlers: null, // Set of handlers to fire cursorActivity on
+ cursorActivityCalled: 0, // Tracks which cursorActivity handlers have been called already
+ selectionChanged: false, // Whether the selection needs to be redrawn
+ updateMaxLine: false, // Set when the widest line needs to be determined anew
+ scrollLeft: null, scrollTop: null, // Intermediate scroll position, not pushed to DOM yet
+ scrollToPos: null, // Used to scroll to a specific position
+ id: ++nextOpId // Unique ID
+ };
+ if (operationGroup) {
+ operationGroup.ops.push(cm.curOp);
+ } else {
+ cm.curOp.ownsGroup = operationGroup = {
+ ops: [cm.curOp],
+ delayedCallbacks: []
+ };
+ }
+ }
+
+ function fireCallbacksForOps(group) {
+ // Calls delayed callbacks and cursorActivity handlers until no
+ // new ones appear
+ var callbacks = group.delayedCallbacks, i = 0;
+ do {
+ for (; i < callbacks.length; i++)
+ callbacks[i]();
+ for (var j = 0; j < group.ops.length; j++) {
+ var op = group.ops[j];
+ if (op.cursorActivityHandlers)
+ while (op.cursorActivityCalled < op.cursorActivityHandlers.length)
+ op.cursorActivityHandlers[op.cursorActivityCalled++](op.cm);
+ }
+ } while (i < callbacks.length);
+ }
+
+ // Finish an operation, updating the display and signalling delayed events
+ function endOperation(cm) {
+ var op = cm.curOp, group = op.ownsGroup;
+ if (!group) return;
+
+ try { fireCallbacksForOps(group); }
+ finally {
+ operationGroup = null;
+ for (var i = 0; i < group.ops.length; i++)
+ group.ops[i].cm.curOp = null;
+ endOperations(group);
+ }
+ }
+
+ // The DOM updates done when an operation finishes are batched so
+ // that the minimum number of relayouts are required.
+ function endOperations(group) {
+ var ops = group.ops;
+ for (var i = 0; i < ops.length; i++) // Read DOM
+ endOperation_R1(ops[i]);
+ for (var i = 0; i < ops.length; i++) // Write DOM (maybe)
+ endOperation_W1(ops[i]);
+ for (var i = 0; i < ops.length; i++) // Read DOM
+ endOperation_R2(ops[i]);
+ for (var i = 0; i < ops.length; i++) // Write DOM (maybe)
+ endOperation_W2(ops[i]);
+ for (var i = 0; i < ops.length; i++) // Read DOM
+ endOperation_finish(ops[i]);
+ }
+
+ function endOperation_R1(op) {
+ var cm = op.cm, display = cm.display;
+ if (op.updateMaxLine) findMaxLine(cm);
+
+ op.mustUpdate = op.viewChanged || op.forceUpdate || op.scrollTop != null ||
+ op.scrollToPos && (op.scrollToPos.from.line < display.viewFrom ||
+ op.scrollToPos.to.line >= display.viewTo) ||
+ display.maxLineChanged && cm.options.lineWrapping;
+ op.update = op.mustUpdate &&
+ new DisplayUpdate(cm, op.mustUpdate && {top: op.scrollTop, ensure: op.scrollToPos}, op.forceUpdate);
+ }
+
+ function endOperation_W1(op) {
+ op.updatedDisplay = op.mustUpdate && updateDisplayIfNeeded(op.cm, op.update);
+ }
+
+ function endOperation_R2(op) {
+ var cm = op.cm, display = cm.display;
+ if (op.updatedDisplay) updateHeightsInViewport(cm);
+
+ op.barMeasure = measureForScrollbars(cm);
+
+ // If the max line changed since it was last measured, measure it,
+ // and ensure the document's width matches it.
+ // updateDisplay_W2 will use these properties to do the actual resizing
+ if (display.maxLineChanged && !cm.options.lineWrapping) {
+ op.adjustWidthTo = measureChar(cm, display.maxLine, display.maxLine.text.length).left + 3;
+ op.maxScrollLeft = Math.max(0, display.sizer.offsetLeft + op.adjustWidthTo +
+ scrollerCutOff - display.scroller.clientWidth);
+ }
+
+ if (op.updatedDisplay || op.selectionChanged)
+ op.newSelectionNodes = drawSelection(cm);
+ }
+
+ function endOperation_W2(op) {
+ var cm = op.cm;
+
+ if (op.adjustWidthTo != null) {
+ cm.display.sizer.style.minWidth = op.adjustWidthTo + "px";
+ if (op.maxScrollLeft < cm.doc.scrollLeft)
+ setScrollLeft(cm, Math.min(cm.display.scroller.scrollLeft, op.maxScrollLeft), true);
+ cm.display.maxLineChanged = false;
+ }
+
+ if (op.newSelectionNodes)
+ showSelection(cm, op.newSelectionNodes);
+ if (op.updatedDisplay)
+ setDocumentHeight(cm, op.barMeasure);
+ if (op.updatedDisplay || op.startHeight != cm.doc.height)
+ updateScrollbars(cm, op.barMeasure);
+
+ if (op.selectionChanged) restartBlink(cm);
+
+ if (cm.state.focused && op.updateInput)
+ resetInput(cm, op.typing);
+ }
+
+ function endOperation_finish(op) {
+ var cm = op.cm, display = cm.display, doc = cm.doc;
+
+ if (op.adjustWidthTo != null && Math.abs(op.barMeasure.scrollWidth - cm.display.scroller.scrollWidth) > 1)
+ updateScrollbars(cm);
+
+ if (op.updatedDisplay) postUpdateDisplay(cm, op.update);
+
+ // Abort mouse wheel delta measurement, when scrolling explicitly
+ if (display.wheelStartX != null && (op.scrollTop != null || op.scrollLeft != null || op.scrollToPos))
+ display.wheelStartX = display.wheelStartY = null;
+
+ // Propagate the scroll position to the actual DOM scroller
+ if (op.scrollTop != null && (display.scroller.scrollTop != op.scrollTop || op.forceScroll)) {
+ var top = Math.max(0, Math.min(display.scroller.scrollHeight - display.scroller.clientHeight, op.scrollTop));
+ display.scroller.scrollTop = display.scrollbarV.scrollTop = doc.scrollTop = top;
+ }
+ if (op.scrollLeft != null && (display.scroller.scrollLeft != op.scrollLeft || op.forceScroll)) {
+ var left = Math.max(0, Math.min(display.scroller.scrollWidth - display.scroller.clientWidth, op.scrollLeft));
+ display.scroller.scrollLeft = display.scrollbarH.scrollLeft = doc.scrollLeft = left;
+ alignHorizontally(cm);
+ }
+ // If we need to scroll a specific position into view, do so.
+ if (op.scrollToPos) {
+ var coords = scrollPosIntoView(cm, clipPos(doc, op.scrollToPos.from),
+ clipPos(doc, op.scrollToPos.to), op.scrollToPos.margin);
+ if (op.scrollToPos.isCursor && cm.state.focused) maybeScrollWindow(cm, coords);
+ }
+
+ // Fire events for markers that are hidden/unidden by editing or
+ // undoing
+ var hidden = op.maybeHiddenMarkers, unhidden = op.maybeUnhiddenMarkers;
+ if (hidden) for (var i = 0; i < hidden.length; ++i)
+ if (!hidden[i].lines.length) signal(hidden[i], "hide");
+ if (unhidden) for (var i = 0; i < unhidden.length; ++i)
+ if (unhidden[i].lines.length) signal(unhidden[i], "unhide");
+
+ if (display.wrapper.offsetHeight)
+ doc.scrollTop = cm.display.scroller.scrollTop;
+
+ // Apply workaround for two webkit bugs
+ if (op.updatedDisplay && webkit) {
+ if (cm.options.lineWrapping)
+ checkForWebkitWidthBug(cm, op.barMeasure); // (Issue #2420)
+ if (op.barMeasure.scrollWidth > op.barMeasure.clientWidth &&
+ op.barMeasure.scrollWidth < op.barMeasure.clientWidth + 1 &&
+ !hScrollbarTakesSpace(cm))
+ updateScrollbars(cm); // (Issue #2562)
+ }
+
+ // Fire change events, and delayed event handlers
+ if (op.changeObjs)
+ signal(cm, "changes", cm, op.changeObjs);
+ }
+
+ // Run the given function in an operation
+ function runInOp(cm, f) {
+ if (cm.curOp) return f();
+ startOperation(cm);
+ try { return f(); }
+ finally { endOperation(cm); }
+ }
+ // Wraps a function in an operation. Returns the wrapped function.
+ function operation(cm, f) {
+ return function() {
+ if (cm.curOp) return f.apply(cm, arguments);
+ startOperation(cm);
+ try { return f.apply(cm, arguments); }
+ finally { endOperation(cm); }
+ };
+ }
+ // Used to add methods to editor and doc instances, wrapping them in
+ // operations.
+ function methodOp(f) {
+ return function() {
+ if (this.curOp) return f.apply(this, arguments);
+ startOperation(this);
+ try { return f.apply(this, arguments); }
+ finally { endOperation(this); }
+ };
+ }
+ function docMethodOp(f) {
+ return function() {
+ var cm = this.cm;
+ if (!cm || cm.curOp) return f.apply(this, arguments);
+ startOperation(cm);
+ try { return f.apply(this, arguments); }
+ finally { endOperation(cm); }
+ };
+ }
+
+ // VIEW TRACKING
+
+ // These objects are used to represent the visible (currently drawn)
+ // part of the document. A LineView may correspond to multiple
+ // logical lines, if those are connected by collapsed ranges.
+ function LineView(doc, line, lineN) {
+ // The starting line
+ this.line = line;
+ // Continuing lines, if any
+ this.rest = visualLineContinued(line);
+ // Number of logical lines in this visual line
+ this.size = this.rest ? lineNo(lst(this.rest)) - lineN + 1 : 1;
+ this.node = this.text = null;
+ this.hidden = lineIsHidden(doc, line);
+ }
+
+ // Create a range of LineView objects for the given lines.
+ function buildViewArray(cm, from, to) {
+ var array = [], nextPos;
+ for (var pos = from; pos < to; pos = nextPos) {
+ var view = new LineView(cm.doc, getLine(cm.doc, pos), pos);
+ nextPos = pos + view.size;
+ array.push(view);
+ }
+ return array;
+ }
+
+ // Updates the display.view data structure for a given change to the
+ // document. From and to are in pre-change coordinates. Lendiff is
+ // the amount of lines added or subtracted by the change. This is
+ // used for changes that span multiple lines, or change the way
+ // lines are divided into visual lines. regLineChange (below)
+ // registers single-line changes.
+ function regChange(cm, from, to, lendiff) {
+ if (from == null) from = cm.doc.first;
+ if (to == null) to = cm.doc.first + cm.doc.size;
+ if (!lendiff) lendiff = 0;
+
+ var display = cm.display;
+ if (lendiff && to < display.viewTo &&
+ (display.updateLineNumbers == null || display.updateLineNumbers > from))
+ display.updateLineNumbers = from;
+
+ cm.curOp.viewChanged = true;
+
+ if (from >= display.viewTo) { // Change after
+ if (sawCollapsedSpans && visualLineNo(cm.doc, from) < display.viewTo)
+ resetView(cm);
+ } else if (to <= display.viewFrom) { // Change before
+ if (sawCollapsedSpans && visualLineEndNo(cm.doc, to + lendiff) > display.viewFrom) {
+ resetView(cm);
+ } else {
+ display.viewFrom += lendiff;
+ display.viewTo += lendiff;
+ }
+ } else if (from <= display.viewFrom && to >= display.viewTo) { // Full overlap
+ resetView(cm);
+ } else if (from <= display.viewFrom) { // Top overlap
+ var cut = viewCuttingPoint(cm, to, to + lendiff, 1);
+ if (cut) {
+ display.view = display.view.slice(cut.index);
+ display.viewFrom = cut.lineN;
+ display.viewTo += lendiff;
+ } else {
+ resetView(cm);
+ }
+ } else if (to >= display.viewTo) { // Bottom overlap
+ var cut = viewCuttingPoint(cm, from, from, -1);
+ if (cut) {
+ display.view = display.view.slice(0, cut.index);
+ display.viewTo = cut.lineN;
+ } else {
+ resetView(cm);
+ }
+ } else { // Gap in the middle
+ var cutTop = viewCuttingPoint(cm, from, from, -1);
+ var cutBot = viewCuttingPoint(cm, to, to + lendiff, 1);
+ if (cutTop && cutBot) {
+ display.view = display.view.slice(0, cutTop.index)
+ .concat(buildViewArray(cm, cutTop.lineN, cutBot.lineN))
+ .concat(display.view.slice(cutBot.index));
+ display.viewTo += lendiff;
+ } else {
+ resetView(cm);
+ }
+ }
+
+ var ext = display.externalMeasured;
+ if (ext) {
+ if (to < ext.lineN)
+ ext.lineN += lendiff;
+ else if (from < ext.lineN + ext.size)
+ display.externalMeasured = null;
+ }
+ }
+
+ // Register a change to a single line. Type must be one of "text",
+ // "gutter", "class", "widget"
+ function regLineChange(cm, line, type) {
+ cm.curOp.viewChanged = true;
+ var display = cm.display, ext = cm.display.externalMeasured;
+ if (ext && line >= ext.lineN && line < ext.lineN + ext.size)
+ display.externalMeasured = null;
+
+ if (line < display.viewFrom || line >= display.viewTo) return;
+ var lineView = display.view[findViewIndex(cm, line)];
+ if (lineView.node == null) return;
+ var arr = lineView.changes || (lineView.changes = []);
+ if (indexOf(arr, type) == -1) arr.push(type);
+ }
+
+ // Clear the view.
+ function resetView(cm) {
+ cm.display.viewFrom = cm.display.viewTo = cm.doc.first;
+ cm.display.view = [];
+ cm.display.viewOffset = 0;
+ }
+
+ // Find the view element corresponding to a given line. Return null
+ // when the line isn't visible.
+ function findViewIndex(cm, n) {
+ if (n >= cm.display.viewTo) return null;
+ n -= cm.display.viewFrom;
+ if (n < 0) return null;
+ var view = cm.display.view;
+ for (var i = 0; i < view.length; i++) {
+ n -= view[i].size;
+ if (n < 0) return i;
+ }
+ }
+
+ function viewCuttingPoint(cm, oldN, newN, dir) {
+ var index = findViewIndex(cm, oldN), diff, view = cm.display.view;
+ if (!sawCollapsedSpans || newN == cm.doc.first + cm.doc.size)
+ return {index: index, lineN: newN};
+ for (var i = 0, n = cm.display.viewFrom; i < index; i++)
+ n += view[i].size;
+ if (n != oldN) {
+ if (dir > 0) {
+ if (index == view.length - 1) return null;
+ diff = (n + view[index].size) - oldN;
+ index++;
+ } else {
+ diff = n - oldN;
+ }
+ oldN += diff; newN += diff;
+ }
+ while (visualLineNo(cm.doc, newN) != newN) {
+ if (index == (dir < 0 ? 0 : view.length - 1)) return null;
+ newN += dir * view[index - (dir < 0 ? 1 : 0)].size;
+ index += dir;
+ }
+ return {index: index, lineN: newN};
+ }
+
+ // Force the view to cover a given range, adding empty view element
+ // or clipping off existing ones as needed.
+ function adjustView(cm, from, to) {
+ var display = cm.display, view = display.view;
+ if (view.length == 0 || from >= display.viewTo || to <= display.viewFrom) {
+ display.view = buildViewArray(cm, from, to);
+ display.viewFrom = from;
+ } else {
+ if (display.viewFrom > from)
+ display.view = buildViewArray(cm, from, display.viewFrom).concat(display.view);
+ else if (display.viewFrom < from)
+ display.view = display.view.slice(findViewIndex(cm, from));
+ display.viewFrom = from;
+ if (display.viewTo < to)
+ display.view = display.view.concat(buildViewArray(cm, display.viewTo, to));
+ else if (display.viewTo > to)
+ display.view = display.view.slice(0, findViewIndex(cm, to));
+ }
+ display.viewTo = to;
+ }
+
+ // Count the number of lines in the view whose DOM representation is
+ // out of date (or nonexistent).
+ function countDirtyView(cm) {
+ var view = cm.display.view, dirty = 0;
+ for (var i = 0; i < view.length; i++) {
+ var lineView = view[i];
+ if (!lineView.hidden && (!lineView.node || lineView.changes)) ++dirty;
+ }
+ return dirty;
+ }
+
+ // INPUT HANDLING
+
+ // Poll for input changes, using the normal rate of polling. This
+ // runs as long as the editor is focused.
+ function slowPoll(cm) {
+ if (cm.display.pollingFast) return;
+ cm.display.poll.set(cm.options.pollInterval, function() {
+ readInput(cm);
+ if (cm.state.focused) slowPoll(cm);
+ });
+ }
+
+ // When an event has just come in that is likely to add or change
+ // something in the input textarea, we poll faster, to ensure that
+ // the change appears on the screen quickly.
+ function fastPoll(cm) {
+ var missed = false;
+ cm.display.pollingFast = true;
+ function p() {
+ var changed = readInput(cm);
+ if (!changed && !missed) {missed = true; cm.display.poll.set(60, p);}
+ else {cm.display.pollingFast = false; slowPoll(cm);}
+ }
+ cm.display.poll.set(20, p);
+ }
+
+ // This will be set to an array of strings when copying, so that,
+ // when pasting, we know what kind of selections the copied text
+ // was made out of.
+ var lastCopied = null;
+
+ // Read input from the textarea, and update the document to match.
+ // When something is selected, it is present in the textarea, and
+ // selected (unless it is huge, in which case a placeholder is
+ // used). When nothing is selected, the cursor sits after previously
+ // seen text (can be empty), which is stored in prevInput (we must
+ // not reset the textarea when typing, because that breaks IME).
+ function readInput(cm) {
+ var input = cm.display.input, prevInput = cm.display.prevInput, doc = cm.doc;
+ // Since this is called a *lot*, try to bail out as cheaply as
+ // possible when it is clear that nothing happened. hasSelection
+ // will be the case when there is a lot of text in the textarea,
+ // in which case reading its value would be expensive.
+ if (!cm.state.focused || (hasSelection(input) && !prevInput) || isReadOnly(cm) || cm.options.disableInput || cm.state.keySeq)
+ return false;
+ // See paste handler for more on the fakedLastChar kludge
+ if (cm.state.pasteIncoming && cm.state.fakedLastChar) {
+ input.value = input.value.substring(0, input.value.length - 1);
+ cm.state.fakedLastChar = false;
+ }
+ var text = input.value;
+ // If nothing changed, bail.
+ if (text == prevInput && !cm.somethingSelected()) return false;
+ // Work around nonsensical selection resetting in IE9/10, and
+ // inexplicable appearance of private area unicode characters on
+ // some key combos in Mac (#2689).
+ if (ie && ie_version >= 9 && cm.display.inputHasSelection === text ||
+ mac && /[\uf700-\uf7ff]/.test(text)) {
+ resetInput(cm);
+ return false;
+ }
+
+ var withOp = !cm.curOp;
+ if (withOp) startOperation(cm);
+ cm.display.shift = false;
+
+ if (text.charCodeAt(0) == 0x200b && doc.sel == cm.display.selForContextMenu && !prevInput)
+ prevInput = "\u200b";
+ // Find the part of the input that is actually new
+ var same = 0, l = Math.min(prevInput.length, text.length);
+ while (same < l && prevInput.charCodeAt(same) == text.charCodeAt(same)) ++same;
+ var inserted = text.slice(same), textLines = splitLines(inserted);
+
+ // When pasing N lines into N selections, insert one line per selection
+ var multiPaste = null;
+ if (cm.state.pasteIncoming && doc.sel.ranges.length > 1) {
+ if (lastCopied && lastCopied.join("\n") == inserted)
+ multiPaste = doc.sel.ranges.length % lastCopied.length == 0 && map(lastCopied, splitLines);
+ else if (textLines.length == doc.sel.ranges.length)
+ multiPaste = map(textLines, function(l) { return [l]; });
+ }
+
+ // Normal behavior is to insert the new text into every selection
+ for (var i = doc.sel.ranges.length - 1; i >= 0; i--) {
+ var range = doc.sel.ranges[i];
+ var from = range.from(), to = range.to();
+ // Handle deletion
+ if (same < prevInput.length)
+ from = Pos(from.line, from.ch - (prevInput.length - same));
+ // Handle overwrite
+ else if (cm.state.overwrite && range.empty() && !cm.state.pasteIncoming)
+ to = Pos(to.line, Math.min(getLine(doc, to.line).text.length, to.ch + lst(textLines).length));
+ var updateInput = cm.curOp.updateInput;
+ var changeEvent = {from: from, to: to, text: multiPaste ? multiPaste[i % multiPaste.length] : textLines,
+ origin: cm.state.pasteIncoming ? "paste" : cm.state.cutIncoming ? "cut" : "+input"};
+ makeChange(cm.doc, changeEvent);
+ signalLater(cm, "inputRead", cm, changeEvent);
+ // When an 'electric' character is inserted, immediately trigger a reindent
+ if (inserted && !cm.state.pasteIncoming && cm.options.electricChars &&
+ cm.options.smartIndent && range.head.ch < 100 &&
+ (!i || doc.sel.ranges[i - 1].head.line != range.head.line)) {
+ var mode = cm.getModeAt(range.head);
+ var end = changeEnd(changeEvent);
+ if (mode.electricChars) {
+ for (var j = 0; j < mode.electricChars.length; j++)
+ if (inserted.indexOf(mode.electricChars.charAt(j)) > -1) {
+ indentLine(cm, end.line, "smart");
+ break;
+ }
+ } else if (mode.electricInput) {
+ if (mode.electricInput.test(getLine(doc, end.line).text.slice(0, end.ch)))
+ indentLine(cm, end.line, "smart");
+ }
+ }
+ }
+ ensureCursorVisible(cm);
+ cm.curOp.updateInput = updateInput;
+ cm.curOp.typing = true;
+
+ // Don't leave long text in the textarea, since it makes further polling slow
+ if (text.length > 1000 || text.indexOf("\n") > -1) input.value = cm.display.prevInput = "";
+ else cm.display.prevInput = text;
+ if (withOp) endOperation(cm);
+ cm.state.pasteIncoming = cm.state.cutIncoming = false;
+ return true;
+ }
+
+ // Reset the input to correspond to the selection (or to be empty,
+ // when not typing and nothing is selected)
+ function resetInput(cm, typing) {
+ var minimal, selected, doc = cm.doc;
+ if (cm.somethingSelected()) {
+ cm.display.prevInput = "";
+ var range = doc.sel.primary();
+ minimal = hasCopyEvent &&
+ (range.to().line - range.from().line > 100 || (selected = cm.getSelection()).length > 1000);
+ var content = minimal ? "-" : selected || cm.getSelection();
+ cm.display.input.value = content;
+ if (cm.state.focused) selectInput(cm.display.input);
+ if (ie && ie_version >= 9) cm.display.inputHasSelection = content;
+ } else if (!typing) {
+ cm.display.prevInput = cm.display.input.value = "";
+ if (ie && ie_version >= 9) cm.display.inputHasSelection = null;
+ }
+ cm.display.inaccurateSelection = minimal;
+ }
+
+ function focusInput(cm) {
+ if (cm.options.readOnly != "nocursor" && (!mobile || activeElt() != cm.display.input))
+ cm.display.input.focus();
+ }
+
+ function ensureFocus(cm) {
+ if (!cm.state.focused) { focusInput(cm); onFocus(cm); }
+ }
+
+ function isReadOnly(cm) {
+ return cm.options.readOnly || cm.doc.cantEdit;
+ }
+
+ // EVENT HANDLERS
+
+ // Attach the necessary event handlers when initializing the editor
+ function registerEventHandlers(cm) {
+ var d = cm.display;
+ on(d.scroller, "mousedown", operation(cm, onMouseDown));
+ // Older IE's will not fire a second mousedown for a double click
+ if (ie && ie_version < 11)
+ on(d.scroller, "dblclick", operation(cm, function(e) {
+ if (signalDOMEvent(cm, e)) return;
+ var pos = posFromMouse(cm, e);
+ if (!pos || clickInGutter(cm, e) || eventInWidget(cm.display, e)) return;
+ e_preventDefault(e);
+ var word = cm.findWordAt(pos);
+ extendSelection(cm.doc, word.anchor, word.head);
+ }));
+ else
+ on(d.scroller, "dblclick", function(e) { signalDOMEvent(cm, e) || e_preventDefault(e); });
+ // Prevent normal selection in the editor (we handle our own)
+ on(d.lineSpace, "selectstart", function(e) {
+ if (!eventInWidget(d, e)) e_preventDefault(e);
+ });
+ // Some browsers fire contextmenu *after* opening the menu, at
+ // which point we can't mess with it anymore. Context menu is
+ // handled in onMouseDown for these browsers.
+ if (!captureRightClick) on(d.scroller, "contextmenu", function(e) {onContextMenu(cm, e);});
+
+ // Sync scrolling between fake scrollbars and real scrollable
+ // area, ensure viewport is updated when scrolling.
+ on(d.scroller, "scroll", function() {
+ if (d.scroller.clientHeight) {
+ setScrollTop(cm, d.scroller.scrollTop);
+ setScrollLeft(cm, d.scroller.scrollLeft, true);
+ signal(cm, "scroll", cm);
+ }
+ });
+ on(d.scrollbarV, "scroll", function() {
+ if (d.scroller.clientHeight) setScrollTop(cm, d.scrollbarV.scrollTop);
+ });
+ on(d.scrollbarH, "scroll", function() {
+ if (d.scroller.clientHeight) setScrollLeft(cm, d.scrollbarH.scrollLeft);
+ });
+
+ // Listen to wheel events in order to try and update the viewport on time.
+ on(d.scroller, "mousewheel", function(e){onScrollWheel(cm, e);});
+ on(d.scroller, "DOMMouseScroll", function(e){onScrollWheel(cm, e);});
+
+ // Prevent clicks in the scrollbars from killing focus
+ function reFocus() { if (cm.state.focused) setTimeout(bind(focusInput, cm), 0); }
+ on(d.scrollbarH, "mousedown", reFocus);
+ on(d.scrollbarV, "mousedown", reFocus);
+ // Prevent wrapper from ever scrolling
+ on(d.wrapper, "scroll", function() { d.wrapper.scrollTop = d.wrapper.scrollLeft = 0; });
+
+ on(d.input, "keyup", function(e) { onKeyUp.call(cm, e); });
+ on(d.input, "input", function() {
+ if (ie && ie_version >= 9 && cm.display.inputHasSelection) cm.display.inputHasSelection = null;
+ fastPoll(cm);
+ });
+ on(d.input, "keydown", operation(cm, onKeyDown));
+ on(d.input, "keypress", operation(cm, onKeyPress));
+ on(d.input, "focus", bind(onFocus, cm));
+ on(d.input, "blur", bind(onBlur, cm));
+
+ function drag_(e) {
+ if (!signalDOMEvent(cm, e)) e_stop(e);
+ }
+ if (cm.options.dragDrop) {
+ on(d.scroller, "dragstart", function(e){onDragStart(cm, e);});
+ on(d.scroller, "dragenter", drag_);
+ on(d.scroller, "dragover", drag_);
+ on(d.scroller, "drop", operation(cm, onDrop));
+ }
+ on(d.scroller, "paste", function(e) {
+ if (eventInWidget(d, e)) return;
+ cm.state.pasteIncoming = true;
+ focusInput(cm);
+ fastPoll(cm);
+ });
+ on(d.input, "paste", function() {
+ // Workaround for webkit bug https://bugs.webkit.org/show_bug.cgi?id=90206
+ // Add a char to the end of textarea before paste occur so that
+ // selection doesn't span to the end of textarea.
+ if (webkit && !cm.state.fakedLastChar && !(new Date - cm.state.lastMiddleDown < 200)) {
+ var start = d.input.selectionStart, end = d.input.selectionEnd;
+ d.input.value += "$";
+ // The selection end needs to be set before the start, otherwise there
+ // can be an intermediate non-empty selection between the two, which
+ // can override the middle-click paste buffer on linux and cause the
+ // wrong thing to get pasted.
+ d.input.selectionEnd = end;
+ d.input.selectionStart = start;
+ cm.state.fakedLastChar = true;
+ }
+ cm.state.pasteIncoming = true;
+ fastPoll(cm);
+ });
+
+ function prepareCopyCut(e) {
+ if (cm.somethingSelected()) {
+ lastCopied = cm.getSelections();
+ if (d.inaccurateSelection) {
+ d.prevInput = "";
+ d.inaccurateSelection = false;
+ d.input.value = lastCopied.join("\n");
+ selectInput(d.input);
+ }
+ } else {
+ var text = [], ranges = [];
+ for (var i = 0; i < cm.doc.sel.ranges.length; i++) {
+ var line = cm.doc.sel.ranges[i].head.line;
+ var lineRange = {anchor: Pos(line, 0), head: Pos(line + 1, 0)};
+ ranges.push(lineRange);
+ text.push(cm.getRange(lineRange.anchor, lineRange.head));
+ }
+ if (e.type == "cut") {
+ cm.setSelections(ranges, null, sel_dontScroll);
+ } else {
+ d.prevInput = "";
+ d.input.value = text.join("\n");
+ selectInput(d.input);
+ }
+ lastCopied = text;
+ }
+ if (e.type == "cut") cm.state.cutIncoming = true;
+ }
+ on(d.input, "cut", prepareCopyCut);
+ on(d.input, "copy", prepareCopyCut);
+
+ // Needed to handle Tab key in KHTML
+ if (khtml) on(d.sizer, "mouseup", function() {
+ if (activeElt() == d.input) d.input.blur();
+ focusInput(cm);
+ });
+ }
+
+ // Called when the window resizes
+ function onResize(cm) {
+ var d = cm.display;
+ if (d.lastWrapHeight == d.wrapper.clientHeight && d.lastWrapWidth == d.wrapper.clientWidth)
+ return;
+ // Might be a text scaling operation, clear size caches.
+ d.cachedCharWidth = d.cachedTextHeight = d.cachedPaddingH = null;
+ cm.setSize();
+ }
+
+ // MOUSE EVENTS
+
+ // Return true when the given mouse event happened in a widget
+ function eventInWidget(display, e) {
+ for (var n = e_target(e); n != display.wrapper; n = n.parentNode) {
+ if (!n || n.ignoreEvents || n.parentNode == display.sizer && n != display.mover) return true;
+ }
+ }
+
+ // Given a mouse event, find the corresponding position. If liberal
+ // is false, it checks whether a gutter or scrollbar was clicked,
+ // and returns null if it was. forRect is used by rectangular
+ // selections, and tries to estimate a character position even for
+ // coordinates beyond the right of the text.
+ function posFromMouse(cm, e, liberal, forRect) {
+ var display = cm.display;
+ if (!liberal) {
+ var target = e_target(e);
+ if (target == display.scrollbarH || target == display.scrollbarV ||
+ target == display.scrollbarFiller || target == display.gutterFiller) return null;
+ }
+ var x, y, space = display.lineSpace.getBoundingClientRect();
+ // Fails unpredictably on IE[67] when mouse is dragged around quickly.
+ try { x = e.clientX - space.left; y = e.clientY - space.top; }
+ catch (e) { return null; }
+ var coords = coordsChar(cm, x, y), line;
+ if (forRect && coords.xRel == 1 && (line = getLine(cm.doc, coords.line).text).length == coords.ch) {
+ var colDiff = countColumn(line, line.length, cm.options.tabSize) - line.length;
+ coords = Pos(coords.line, Math.max(0, Math.round((x - paddingH(cm.display).left) / charWidth(cm.display)) - colDiff));
+ }
+ return coords;
+ }
+
+ // A mouse down can be a single click, double click, triple click,
+ // start of selection drag, start of text drag, new cursor
+ // (ctrl-click), rectangle drag (alt-drag), or xwin
+ // middle-click-paste. Or it might be a click on something we should
+ // not interfere with, such as a scrollbar or widget.
+ function onMouseDown(e) {
+ if (signalDOMEvent(this, e)) return;
+ var cm = this, display = cm.display;
+ display.shift = e.shiftKey;
+
+ if (eventInWidget(display, e)) {
+ if (!webkit) {
+ // Briefly turn off draggability, to allow widgets to do
+ // normal dragging things.
+ display.scroller.draggable = false;
+ setTimeout(function(){display.scroller.draggable = true;}, 100);
+ }
+ return;
+ }
+ if (clickInGutter(cm, e)) return;
+ var start = posFromMouse(cm, e);
+ window.focus();
+
+ switch (e_button(e)) {
+ case 1:
+ if (start)
+ leftButtonDown(cm, e, start);
+ else if (e_target(e) == display.scroller)
+ e_preventDefault(e);
+ break;
+ case 2:
+ if (webkit) cm.state.lastMiddleDown = +new Date;
+ if (start) extendSelection(cm.doc, start);
+ setTimeout(bind(focusInput, cm), 20);
+ e_preventDefault(e);
+ break;
+ case 3:
+ if (captureRightClick) onContextMenu(cm, e);
+ break;
+ }
+ }
+
+ var lastClick, lastDoubleClick;
+ function leftButtonDown(cm, e, start) {
+ setTimeout(bind(ensureFocus, cm), 0);
+
+ var now = +new Date, type;
+ if (lastDoubleClick && lastDoubleClick.time > now - 400 && cmp(lastDoubleClick.pos, start) == 0) {
+ type = "triple";
+ } else if (lastClick && lastClick.time > now - 400 && cmp(lastClick.pos, start) == 0) {
+ type = "double";
+ lastDoubleClick = {time: now, pos: start};
+ } else {
+ type = "single";
+ lastClick = {time: now, pos: start};
+ }
+
+ var sel = cm.doc.sel, modifier = mac ? e.metaKey : e.ctrlKey;
+ if (cm.options.dragDrop && dragAndDrop && !isReadOnly(cm) &&
+ type == "single" && sel.contains(start) > -1 && sel.somethingSelected())
+ leftButtonStartDrag(cm, e, start, modifier);
+ else
+ leftButtonSelect(cm, e, start, type, modifier);
+ }
+
+ // Start a text drag. When it ends, see if any dragging actually
+ // happen, and treat as a click if it didn't.
+ function leftButtonStartDrag(cm, e, start, modifier) {
+ var display = cm.display;
+ var dragEnd = operation(cm, function(e2) {
+ if (webkit) display.scroller.draggable = false;
+ cm.state.draggingText = false;
+ off(document, "mouseup", dragEnd);
+ off(display.scroller, "drop", dragEnd);
+ if (Math.abs(e.clientX - e2.clientX) + Math.abs(e.clientY - e2.clientY) < 10) {
+ e_preventDefault(e2);
+ if (!modifier)
+ extendSelection(cm.doc, start);
+ focusInput(cm);
+ // Work around unexplainable focus problem in IE9 (#2127)
+ if (ie && ie_version == 9)
+ setTimeout(function() {document.body.focus(); focusInput(cm);}, 20);
+ }
+ });
+ // Let the drag handler handle this.
+ if (webkit) display.scroller.draggable = true;
+ cm.state.draggingText = dragEnd;
+ // IE's approach to draggable
+ if (display.scroller.dragDrop) display.scroller.dragDrop();
+ on(document, "mouseup", dragEnd);
+ on(display.scroller, "drop", dragEnd);
+ }
+
+ // Normal selection, as opposed to text dragging.
+ function leftButtonSelect(cm, e, start, type, addNew) {
+ var display = cm.display, doc = cm.doc;
+ e_preventDefault(e);
+
+ var ourRange, ourIndex, startSel = doc.sel;
+ if (addNew && !e.shiftKey) {
+ ourIndex = doc.sel.contains(start);
+ if (ourIndex > -1)
+ ourRange = doc.sel.ranges[ourIndex];
+ else
+ ourRange = new Range(start, start);
+ } else {
+ ourRange = doc.sel.primary();
+ }
+
+ if (e.altKey) {
+ type = "rect";
+ if (!addNew) ourRange = new Range(start, start);
+ start = posFromMouse(cm, e, true, true);
+ ourIndex = -1;
+ } else if (type == "double") {
+ var word = cm.findWordAt(start);
+ if (cm.display.shift || doc.extend)
+ ourRange = extendRange(doc, ourRange, word.anchor, word.head);
+ else
+ ourRange = word;
+ } else if (type == "triple") {
+ var line = new Range(Pos(start.line, 0), clipPos(doc, Pos(start.line + 1, 0)));
+ if (cm.display.shift || doc.extend)
+ ourRange = extendRange(doc, ourRange, line.anchor, line.head);
+ else
+ ourRange = line;
+ } else {
+ ourRange = extendRange(doc, ourRange, start);
+ }
+
+ if (!addNew) {
+ ourIndex = 0;
+ setSelection(doc, new Selection([ourRange], 0), sel_mouse);
+ startSel = doc.sel;
+ } else if (ourIndex > -1) {
+ replaceOneSelection(doc, ourIndex, ourRange, sel_mouse);
+ } else {
+ ourIndex = doc.sel.ranges.length;
+ setSelection(doc, normalizeSelection(doc.sel.ranges.concat([ourRange]), ourIndex),
+ {scroll: false, origin: "*mouse"});
+ }
+
+ var lastPos = start;
+ function extendTo(pos) {
+ if (cmp(lastPos, pos) == 0) return;
+ lastPos = pos;
+
+ if (type == "rect") {
+ var ranges = [], tabSize = cm.options.tabSize;
+ var startCol = countColumn(getLine(doc, start.line).text, start.ch, tabSize);
+ var posCol = countColumn(getLine(doc, pos.line).text, pos.ch, tabSize);
+ var left = Math.min(startCol, posCol), right = Math.max(startCol, posCol);
+ for (var line = Math.min(start.line, pos.line), end = Math.min(cm.lastLine(), Math.max(start.line, pos.line));
+ line <= end; line++) {
+ var text = getLine(doc, line).text, leftPos = findColumn(text, left, tabSize);
+ if (left == right)
+ ranges.push(new Range(Pos(line, leftPos), Pos(line, leftPos)));
+ else if (text.length > leftPos)
+ ranges.push(new Range(Pos(line, leftPos), Pos(line, findColumn(text, right, tabSize))));
+ }
+ if (!ranges.length) ranges.push(new Range(start, start));
+ setSelection(doc, normalizeSelection(startSel.ranges.slice(0, ourIndex).concat(ranges), ourIndex),
+ {origin: "*mouse", scroll: false});
+ cm.scrollIntoView(pos);
+ } else {
+ var oldRange = ourRange;
+ var anchor = oldRange.anchor, head = pos;
+ if (type != "single") {
+ if (type == "double")
+ var range = cm.findWordAt(pos);
+ else
+ var range = new Range(Pos(pos.line, 0), clipPos(doc, Pos(pos.line + 1, 0)));
+ if (cmp(range.anchor, anchor) > 0) {
+ head = range.head;
+ anchor = minPos(oldRange.from(), range.anchor);
+ } else {
+ head = range.anchor;
+ anchor = maxPos(oldRange.to(), range.head);
+ }
+ }
+ var ranges = startSel.ranges.slice(0);
+ ranges[ourIndex] = new Range(clipPos(doc, anchor), head);
+ setSelection(doc, normalizeSelection(ranges, ourIndex), sel_mouse);
+ }
+ }
+
+ var editorSize = display.wrapper.getBoundingClientRect();
+ // Used to ensure timeout re-tries don't fire when another extend
+ // happened in the meantime (clearTimeout isn't reliable -- at
+ // least on Chrome, the timeouts still happen even when cleared,
+ // if the clear happens after their scheduled firing time).
+ var counter = 0;
+
+ function extend(e) {
+ var curCount = ++counter;
+ var cur = posFromMouse(cm, e, true, type == "rect");
+ if (!cur) return;
+ if (cmp(cur, lastPos) != 0) {
+ ensureFocus(cm);
+ extendTo(cur);
+ var visible = visibleLines(display, doc);
+ if (cur.line >= visible.to || cur.line < visible.from)
+ setTimeout(operation(cm, function(){if (counter == curCount) extend(e);}), 150);
+ } else {
+ var outside = e.clientY < editorSize.top ? -20 : e.clientY > editorSize.bottom ? 20 : 0;
+ if (outside) setTimeout(operation(cm, function() {
+ if (counter != curCount) return;
+ display.scroller.scrollTop += outside;
+ extend(e);
+ }), 50);
+ }
+ }
+
+ function done(e) {
+ counter = Infinity;
+ e_preventDefault(e);
+ focusInput(cm);
+ off(document, "mousemove", move);
+ off(document, "mouseup", up);
+ doc.history.lastSelOrigin = null;
+ }
+
+ var move = operation(cm, function(e) {
+ if (!e_button(e)) done(e);
+ else extend(e);
+ });
+ var up = operation(cm, done);
+ on(document, "mousemove", move);
+ on(document, "mouseup", up);
+ }
+
+ // Determines whether an event happened in the gutter, and fires the
+ // handlers for the corresponding event.
+ function gutterEvent(cm, e, type, prevent, signalfn) {
+ try { var mX = e.clientX, mY = e.clientY; }
+ catch(e) { return false; }
+ if (mX >= Math.floor(cm.display.gutters.getBoundingClientRect().right)) return false;
+ if (prevent) e_preventDefault(e);
+
+ var display = cm.display;
+ var lineBox = display.lineDiv.getBoundingClientRect();
+
+ if (mY > lineBox.bottom || !hasHandler(cm, type)) return e_defaultPrevented(e);
+ mY -= lineBox.top - display.viewOffset;
+
+ for (var i = 0; i < cm.options.gutters.length; ++i) {
+ var g = display.gutters.childNodes[i];
+ if (g && g.getBoundingClientRect().right >= mX) {
+ var line = lineAtHeight(cm.doc, mY);
+ var gutter = cm.options.gutters[i];
+ signalfn(cm, type, cm, line, gutter, e);
+ return e_defaultPrevented(e);
+ }
+ }
+ }
+
+ function clickInGutter(cm, e) {
+ return gutterEvent(cm, e, "gutterClick", true, signalLater);
+ }
+
+ // Kludge to work around strange IE behavior where it'll sometimes
+ // re-fire a series of drag-related events right after the drop (#1551)
+ var lastDrop = 0;
+
+ function onDrop(e) {
+ var cm = this;
+ if (signalDOMEvent(cm, e) || eventInWidget(cm.display, e))
+ return;
+ e_preventDefault(e);
+ if (ie) lastDrop = +new Date;
+ var pos = posFromMouse(cm, e, true), files = e.dataTransfer.files;
+ if (!pos || isReadOnly(cm)) return;
+ // Might be a file drop, in which case we simply extract the text
+ // and insert it.
+ if (files && files.length && window.FileReader && window.File) {
+ var n = files.length, text = Array(n), read = 0;
+ var loadFile = function(file, i) {
+ var reader = new FileReader;
+ reader.onload = operation(cm, function() {
+ text[i] = reader.result;
+ if (++read == n) {
+ pos = clipPos(cm.doc, pos);
+ var change = {from: pos, to: pos, text: splitLines(text.join("\n")), origin: "paste"};
+ makeChange(cm.doc, change);
+ setSelectionReplaceHistory(cm.doc, simpleSelection(pos, changeEnd(change)));
+ }
+ });
+ reader.readAsText(file);
+ };
+ for (var i = 0; i < n; ++i) loadFile(files[i], i);
+ } else { // Normal drop
+ // Don't do a replace if the drop happened inside of the selected text.
+ if (cm.state.draggingText && cm.doc.sel.contains(pos) > -1) {
+ cm.state.draggingText(e);
+ // Ensure the editor is re-focused
+ setTimeout(bind(focusInput, cm), 20);
+ return;
+ }
+ try {
+ var text = e.dataTransfer.getData("Text");
+ if (text) {
+ if (cm.state.draggingText && !(mac ? e.metaKey : e.ctrlKey))
+ var selected = cm.listSelections();
+ setSelectionNoUndo(cm.doc, simpleSelection(pos, pos));
+ if (selected) for (var i = 0; i < selected.length; ++i)
+ replaceRange(cm.doc, "", selected[i].anchor, selected[i].head, "drag");
+ cm.replaceSelection(text, "around", "paste");
+ focusInput(cm);
+ }
+ }
+ catch(e){}
+ }
+ }
+
+ function onDragStart(cm, e) {
+ if (ie && (!cm.state.draggingText || +new Date - lastDrop < 100)) { e_stop(e); return; }
+ if (signalDOMEvent(cm, e) || eventInWidget(cm.display, e)) return;
+
+ e.dataTransfer.setData("Text", cm.getSelection());
+
+ // Use dummy image instead of default browsers image.
+ // Recent Safari (~6.0.2) have a tendency to segfault when this happens, so we don't do it there.
+ if (e.dataTransfer.setDragImage && !safari) {
+ var img = elt("img", null, null, "position: fixed; left: 0; top: 0;");
+ img.src = "data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==";
+ if (presto) {
+ img.width = img.height = 1;
+ cm.display.wrapper.appendChild(img);
+ // Force a relayout, or Opera won't use our image for some obscure reason
+ img._top = img.offsetTop;
+ }
+ e.dataTransfer.setDragImage(img, 0, 0);
+ if (presto) img.parentNode.removeChild(img);
+ }
+ }
+
+ // SCROLL EVENTS
+
+ // Sync the scrollable area and scrollbars, ensure the viewport
+ // covers the visible area.
+ function setScrollTop(cm, val) {
+ if (Math.abs(cm.doc.scrollTop - val) < 2) return;
+ cm.doc.scrollTop = val;
+ if (!gecko) updateDisplaySimple(cm, {top: val});
+ if (cm.display.scroller.scrollTop != val) cm.display.scroller.scrollTop = val;
+ if (cm.display.scrollbarV.scrollTop != val) cm.display.scrollbarV.scrollTop = val;
+ if (gecko) updateDisplaySimple(cm);
+ startWorker(cm, 100);
+ }
+ // Sync scroller and scrollbar, ensure the gutter elements are
+ // aligned.
+ function setScrollLeft(cm, val, isScroller) {
+ if (isScroller ? val == cm.doc.scrollLeft : Math.abs(cm.doc.scrollLeft - val) < 2) return;
+ val = Math.min(val, cm.display.scroller.scrollWidth - cm.display.scroller.clientWidth);
+ cm.doc.scrollLeft = val;
+ alignHorizontally(cm);
+ if (cm.display.scroller.scrollLeft != val) cm.display.scroller.scrollLeft = val;
+ if (cm.display.scrollbarH.scrollLeft != val) cm.display.scrollbarH.scrollLeft = val;
+ }
+
+ // Since the delta values reported on mouse wheel events are
+ // unstandardized between browsers and even browser versions, and
+ // generally horribly unpredictable, this code starts by measuring
+ // the scroll effect that the first few mouse wheel events have,
+ // and, from that, detects the way it can convert deltas to pixel
+ // offsets afterwards.
+ //
+ // The reason we want to know the amount a wheel event will scroll
+ // is that it gives us a chance to update the display before the
+ // actual scrolling happens, reducing flickering.
+
+ var wheelSamples = 0, wheelPixelsPerUnit = null;
+ // Fill in a browser-detected starting value on browsers where we
+ // know one. These don't have to be accurate -- the result of them
+ // being wrong would just be a slight flicker on the first wheel
+ // scroll (if it is large enough).
+ if (ie) wheelPixelsPerUnit = -.53;
+ else if (gecko) wheelPixelsPerUnit = 15;
+ else if (chrome) wheelPixelsPerUnit = -.7;
+ else if (safari) wheelPixelsPerUnit = -1/3;
+
+ function onScrollWheel(cm, e) {
+ var dx = e.wheelDeltaX, dy = e.wheelDeltaY;
+ if (dx == null && e.detail && e.axis == e.HORIZONTAL_AXIS) dx = e.detail;
+ if (dy == null && e.detail && e.axis == e.VERTICAL_AXIS) dy = e.detail;
+ else if (dy == null) dy = e.wheelDelta;
+
+ var display = cm.display, scroll = display.scroller;
+ // Quit if there's nothing to scroll here
+ if (!(dx && scroll.scrollWidth > scroll.clientWidth ||
+ dy && scroll.scrollHeight > scroll.clientHeight)) return;
+
+ // Webkit browsers on OS X abort momentum scrolls when the target
+ // of the scroll event is removed from the scrollable element.
+ // This hack (see related code in patchDisplay) makes sure the
+ // element is kept around.
+ if (dy && mac && webkit) {
+ outer: for (var cur = e.target, view = display.view; cur != scroll; cur = cur.parentNode) {
+ for (var i = 0; i < view.length; i++) {
+ if (view[i].node == cur) {
+ cm.display.currentWheelTarget = cur;
+ break outer;
+ }
+ }
+ }
+ }
+
+ // On some browsers, horizontal scrolling will cause redraws to
+ // happen before the gutter has been realigned, causing it to
+ // wriggle around in a most unseemly way. When we have an
+ // estimated pixels/delta value, we just handle horizontal
+ // scrolling entirely here. It'll be slightly off from native, but
+ // better than glitching out.
+ if (dx && !gecko && !presto && wheelPixelsPerUnit != null) {
+ if (dy)
+ setScrollTop(cm, Math.max(0, Math.min(scroll.scrollTop + dy * wheelPixelsPerUnit, scroll.scrollHeight - scroll.clientHeight)));
+ setScrollLeft(cm, Math.max(0, Math.min(scroll.scrollLeft + dx * wheelPixelsPerUnit, scroll.scrollWidth - scroll.clientWidth)));
+ e_preventDefault(e);
+ display.wheelStartX = null; // Abort measurement, if in progress
+ return;
+ }
+
+ // 'Project' the visible viewport to cover the area that is being
+ // scrolled into view (if we know enough to estimate it).
+ if (dy && wheelPixelsPerUnit != null) {
+ var pixels = dy * wheelPixelsPerUnit;
+ var top = cm.doc.scrollTop, bot = top + display.wrapper.clientHeight;
+ if (pixels < 0) top = Math.max(0, top + pixels - 50);
+ else bot = Math.min(cm.doc.height, bot + pixels + 50);
+ updateDisplaySimple(cm, {top: top, bottom: bot});
+ }
+
+ if (wheelSamples < 20) {
+ if (display.wheelStartX == null) {
+ display.wheelStartX = scroll.scrollLeft; display.wheelStartY = scroll.scrollTop;
+ display.wheelDX = dx; display.wheelDY = dy;
+ setTimeout(function() {
+ if (display.wheelStartX == null) return;
+ var movedX = scroll.scrollLeft - display.wheelStartX;
+ var movedY = scroll.scrollTop - display.wheelStartY;
+ var sample = (movedY && display.wheelDY && movedY / display.wheelDY) ||
+ (movedX && display.wheelDX && movedX / display.wheelDX);
+ display.wheelStartX = display.wheelStartY = null;
+ if (!sample) return;
+ wheelPixelsPerUnit = (wheelPixelsPerUnit * wheelSamples + sample) / (wheelSamples + 1);
+ ++wheelSamples;
+ }, 200);
+ } else {
+ display.wheelDX += dx; display.wheelDY += dy;
+ }
+ }
+ }
+
+ // KEY EVENTS
+
+ // Run a handler that was bound to a key.
+ function doHandleBinding(cm, bound, dropShift) {
+ if (typeof bound == "string") {
+ bound = commands[bound];
+ if (!bound) return false;
+ }
+ // Ensure previous input has been read, so that the handler sees a
+ // consistent view of the document
+ if (cm.display.pollingFast && readInput(cm)) cm.display.pollingFast = false;
+ var prevShift = cm.display.shift, done = false;
+ try {
+ if (isReadOnly(cm)) cm.state.suppressEdits = true;
+ if (dropShift) cm.display.shift = false;
+ done = bound(cm) != Pass;
+ } finally {
+ cm.display.shift = prevShift;
+ cm.state.suppressEdits = false;
+ }
+ return done;
+ }
+
+ function lookupKeyForEditor(cm, name, handle) {
+ for (var i = 0; i < cm.state.keyMaps.length; i++) {
+ var result = lookupKey(name, cm.state.keyMaps[i], handle);
+ if (result) return result;
+ }
+ return (cm.options.extraKeys && lookupKey(name, cm.options.extraKeys, handle))
+ || lookupKey(name, cm.options.keyMap, handle);
+ }
+
+ var stopSeq = new Delayed;
+ function dispatchKey(cm, name, e, handle) {
+ var seq = cm.state.keySeq;
+ if (seq) {
+ if (isModifierKey(name)) return "handled";
+ stopSeq.set(50, function() {
+ if (cm.state.keySeq == seq) {
+ cm.state.keySeq = null;
+ resetInput(cm);
+ }
+ });
+ name = seq + " " + name;
+ }
+ var result = lookupKeyForEditor(cm, name, handle);
+
+ if (result == "multi")
+ cm.state.keySeq = name;
+ if (result == "handled")
+ signalLater(cm, "keyHandled", cm, name, e);
+
+ if (result == "handled" || result == "multi") {
+ e_preventDefault(e);
+ restartBlink(cm);
+ }
+
+ if (seq && !result && /\'$/.test(name)) {
+ e_preventDefault(e);
+ return true;
+ }
+ return !!result;
+ }
+
+ // Handle a key from the keydown event.
+ function handleKeyBinding(cm, e) {
+ var name = keyName(e, true);
+ if (!name) return false;
+
+ if (e.shiftKey && !cm.state.keySeq) {
+ // First try to resolve full name (including 'Shift-'). Failing
+ // that, see if there is a cursor-motion command (starting with
+ // 'go') bound to the keyname without 'Shift-'.
+ return dispatchKey(cm, "Shift-" + name, e, function(b) {return doHandleBinding(cm, b, true);})
+ || dispatchKey(cm, name, e, function(b) {
+ if (typeof b == "string" ? /^go[A-Z]/.test(b) : b.motion)
+ return doHandleBinding(cm, b);
+ });
+ } else {
+ return dispatchKey(cm, name, e, function(b) { return doHandleBinding(cm, b); });
+ }
+ }
+
+ // Handle a key from the keypress event
+ function handleCharBinding(cm, e, ch) {
+ return dispatchKey(cm, "'" + ch + "'", e,
+ function(b) { return doHandleBinding(cm, b, true); });
+ }
+
+ var lastStoppedKey = null;
+ function onKeyDown(e) {
+ var cm = this;
+ ensureFocus(cm);
+ if (signalDOMEvent(cm, e)) return;
+ // IE does strange things with escape.
+ if (ie && ie_version < 11 && e.keyCode == 27) e.returnValue = false;
+ var code = e.keyCode;
+ cm.display.shift = code == 16 || e.shiftKey;
+ var handled = handleKeyBinding(cm, e);
+ if (presto) {
+ lastStoppedKey = handled ? code : null;
+ // Opera has no cut event... we try to at least catch the key combo
+ if (!handled && code == 88 && !hasCopyEvent && (mac ? e.metaKey : e.ctrlKey))
+ cm.replaceSelection("", null, "cut");
+ }
+
+ // Turn mouse into crosshair when Alt is held on Mac.
+ if (code == 18 && !/\bCodeMirror-crosshair\b/.test(cm.display.lineDiv.className))
+ showCrossHair(cm);
+ }
+
+ function showCrossHair(cm) {
+ var lineDiv = cm.display.lineDiv;
+ addClass(lineDiv, "CodeMirror-crosshair");
+
+ function up(e) {
+ if (e.keyCode == 18 || !e.altKey) {
+ rmClass(lineDiv, "CodeMirror-crosshair");
+ off(document, "keyup", up);
+ off(document, "mouseover", up);
+ }
+ }
+ on(document, "keyup", up);
+ on(document, "mouseover", up);
+ }
+
+ function onKeyUp(e) {
+ if (e.keyCode == 16) this.doc.sel.shift = false;
+ signalDOMEvent(this, e);
+ }
+
+ function onKeyPress(e) {
+ var cm = this;
+ if (signalDOMEvent(cm, e) || e.ctrlKey && !e.altKey || mac && e.metaKey) return;
+ var keyCode = e.keyCode, charCode = e.charCode;
+ if (presto && keyCode == lastStoppedKey) {lastStoppedKey = null; e_preventDefault(e); return;}
+ if (((presto && (!e.which || e.which < 10)) || khtml) && handleKeyBinding(cm, e)) return;
+ var ch = String.fromCharCode(charCode == null ? keyCode : charCode);
+ if (handleCharBinding(cm, e, ch)) return;
+ if (ie && ie_version >= 9) cm.display.inputHasSelection = null;
+ fastPoll(cm);
+ }
+
+ // FOCUS/BLUR EVENTS
+
+ function onFocus(cm) {
+ if (cm.options.readOnly == "nocursor") return;
+ if (!cm.state.focused) {
+ signal(cm, "focus", cm);
+ cm.state.focused = true;
+ addClass(cm.display.wrapper, "CodeMirror-focused");
+ // The prevInput test prevents this from firing when a context
+ // menu is closed (since the resetInput would kill the
+ // select-all detection hack)
+ if (!cm.curOp && cm.display.selForContextMenu != cm.doc.sel) {
+ resetInput(cm);
+ if (webkit) setTimeout(bind(resetInput, cm, true), 0); // Issue #1730
+ }
+ }
+ slowPoll(cm);
+ restartBlink(cm);
+ }
+ function onBlur(cm) {
+ if (cm.state.focused) {
+ signal(cm, "blur", cm);
+ cm.state.focused = false;
+ rmClass(cm.display.wrapper, "CodeMirror-focused");
+ }
+ clearInterval(cm.display.blinker);
+ setTimeout(function() {if (!cm.state.focused) cm.display.shift = false;}, 150);
+ }
+
+ // CONTEXT MENU HANDLING
+
+ // To make the context menu work, we need to briefly unhide the
+ // textarea (making it as unobtrusive as possible) to let the
+ // right-click take effect on it.
+ function onContextMenu(cm, e) {
+ if (signalDOMEvent(cm, e, "contextmenu")) return;
+ var display = cm.display;
+ if (eventInWidget(display, e) || contextMenuInGutter(cm, e)) return;
+
+ var pos = posFromMouse(cm, e), scrollPos = display.scroller.scrollTop;
+ if (!pos || presto) return; // Opera is difficult.
+
+ // Reset the current text selection only if the click is done outside of the selection
+ // and 'resetSelectionOnContextMenu' option is true.
+ var reset = cm.options.resetSelectionOnContextMenu;
+ if (reset && cm.doc.sel.contains(pos) == -1)
+ operation(cm, setSelection)(cm.doc, simpleSelection(pos), sel_dontScroll);
+
+ var oldCSS = display.input.style.cssText;
+ display.inputDiv.style.position = "absolute";
+ display.input.style.cssText = "position: fixed; width: 30px; height: 30px; top: " + (e.clientY - 5) +
+ "px; left: " + (e.clientX - 5) + "px; z-index: 1000; background: " +
+ (ie ? "rgba(255, 255, 255, .05)" : "transparent") +
+ "; outline: none; border-width: 0; outline: none; overflow: hidden; opacity: .05; filter: alpha(opacity=5);";
+ if (webkit) var oldScrollY = window.scrollY; // Work around Chrome issue (#2712)
+ focusInput(cm);
+ if (webkit) window.scrollTo(null, oldScrollY);
+ resetInput(cm);
+ // Adds "Select all" to context menu in FF
+ if (!cm.somethingSelected()) display.input.value = display.prevInput = " ";
+ display.selForContextMenu = cm.doc.sel;
+ clearTimeout(display.detectingSelectAll);
+
+ // Select-all will be greyed out if there's nothing to select, so
+ // this adds a zero-width space so that we can later check whether
+ // it got selected.
+ function prepareSelectAllHack() {
+ if (display.input.selectionStart != null) {
+ var selected = cm.somethingSelected();
+ var extval = display.input.value = "\u200b" + (selected ? display.input.value : "");
+ display.prevInput = selected ? "" : "\u200b";
+ display.input.selectionStart = 1; display.input.selectionEnd = extval.length;
+ // Re-set this, in case some other handler touched the
+ // selection in the meantime.
+ display.selForContextMenu = cm.doc.sel;
+ }
+ }
+ function rehide() {
+ display.inputDiv.style.position = "relative";
+ display.input.style.cssText = oldCSS;
+ if (ie && ie_version < 9) display.scrollbarV.scrollTop = display.scroller.scrollTop = scrollPos;
+ slowPoll(cm);
+
+ // Try to detect the user choosing select-all
+ if (display.input.selectionStart != null) {
+ if (!ie || (ie && ie_version < 9)) prepareSelectAllHack();
+ var i = 0, poll = function() {
+ if (display.selForContextMenu == cm.doc.sel && display.input.selectionStart == 0)
+ operation(cm, commands.selectAll)(cm);
+ else if (i++ < 10) display.detectingSelectAll = setTimeout(poll, 500);
+ else resetInput(cm);
+ };
+ display.detectingSelectAll = setTimeout(poll, 200);
+ }
+ }
+
+ if (ie && ie_version >= 9) prepareSelectAllHack();
+ if (captureRightClick) {
+ e_stop(e);
+ var mouseup = function() {
+ off(window, "mouseup", mouseup);
+ setTimeout(rehide, 20);
+ };
+ on(window, "mouseup", mouseup);
+ } else {
+ setTimeout(rehide, 50);
+ }
+ }
+
+ function contextMenuInGutter(cm, e) {
+ if (!hasHandler(cm, "gutterContextMenu")) return false;
+ return gutterEvent(cm, e, "gutterContextMenu", false, signal);
+ }
+
+ // UPDATING
+
+ // Compute the position of the end of a change (its 'to' property
+ // refers to the pre-change end).
+ var changeEnd = CodeMirror.changeEnd = function(change) {
+ if (!change.text) return change.to;
+ return Pos(change.from.line + change.text.length - 1,
+ lst(change.text).length + (change.text.length == 1 ? change.from.ch : 0));
+ };
+
+ // Adjust a position to refer to the post-change position of the
+ // same text, or the end of the change if the change covers it.
+ function adjustForChange(pos, change) {
+ if (cmp(pos, change.from) < 0) return pos;
+ if (cmp(pos, change.to) <= 0) return changeEnd(change);
+
+ var line = pos.line + change.text.length - (change.to.line - change.from.line) - 1, ch = pos.ch;
+ if (pos.line == change.to.line) ch += changeEnd(change).ch - change.to.ch;
+ return Pos(line, ch);
+ }
+
+ function computeSelAfterChange(doc, change) {
+ var out = [];
+ for (var i = 0; i < doc.sel.ranges.length; i++) {
+ var range = doc.sel.ranges[i];
+ out.push(new Range(adjustForChange(range.anchor, change),
+ adjustForChange(range.head, change)));
+ }
+ return normalizeSelection(out, doc.sel.primIndex);
+ }
+
+ function offsetPos(pos, old, nw) {
+ if (pos.line == old.line)
+ return Pos(nw.line, pos.ch - old.ch + nw.ch);
+ else
+ return Pos(nw.line + (pos.line - old.line), pos.ch);
+ }
+
+ // Used by replaceSelections to allow moving the selection to the
+ // start or around the replaced test. Hint may be "start" or "around".
+ function computeReplacedSel(doc, changes, hint) {
+ var out = [];
+ var oldPrev = Pos(doc.first, 0), newPrev = oldPrev;
+ for (var i = 0; i < changes.length; i++) {
+ var change = changes[i];
+ var from = offsetPos(change.from, oldPrev, newPrev);
+ var to = offsetPos(changeEnd(change), oldPrev, newPrev);
+ oldPrev = change.to;
+ newPrev = to;
+ if (hint == "around") {
+ var range = doc.sel.ranges[i], inv = cmp(range.head, range.anchor) < 0;
+ out[i] = new Range(inv ? to : from, inv ? from : to);
+ } else {
+ out[i] = new Range(from, from);
+ }
+ }
+ return new Selection(out, doc.sel.primIndex);
+ }
+
+ // Allow "beforeChange" event handlers to influence a change
+ function filterChange(doc, change, update) {
+ var obj = {
+ canceled: false,
+ from: change.from,
+ to: change.to,
+ text: change.text,
+ origin: change.origin,
+ cancel: function() { this.canceled = true; }
+ };
+ if (update) obj.update = function(from, to, text, origin) {
+ if (from) this.from = clipPos(doc, from);
+ if (to) this.to = clipPos(doc, to);
+ if (text) this.text = text;
+ if (origin !== undefined) this.origin = origin;
+ };
+ signal(doc, "beforeChange", doc, obj);
+ if (doc.cm) signal(doc.cm, "beforeChange", doc.cm, obj);
+
+ if (obj.canceled) return null;
+ return {from: obj.from, to: obj.to, text: obj.text, origin: obj.origin};
+ }
+
+ // Apply a change to a document, and add it to the document's
+ // history, and propagating it to all linked documents.
+ function makeChange(doc, change, ignoreReadOnly) {
+ if (doc.cm) {
+ if (!doc.cm.curOp) return operation(doc.cm, makeChange)(doc, change, ignoreReadOnly);
+ if (doc.cm.state.suppressEdits) return;
+ }
+
+ if (hasHandler(doc, "beforeChange") || doc.cm && hasHandler(doc.cm, "beforeChange")) {
+ change = filterChange(doc, change, true);
+ if (!change) return;
+ }
+
+ // Possibly split or suppress the update based on the presence
+ // of read-only spans in its range.
+ var split = sawReadOnlySpans && !ignoreReadOnly && removeReadOnlyRanges(doc, change.from, change.to);
+ if (split) {
+ for (var i = split.length - 1; i >= 0; --i)
+ makeChangeInner(doc, {from: split[i].from, to: split[i].to, text: i ? [""] : change.text});
+ } else {
+ makeChangeInner(doc, change);
+ }
+ }
+
+ function makeChangeInner(doc, change) {
+ if (change.text.length == 1 && change.text[0] == "" && cmp(change.from, change.to) == 0) return;
+ var selAfter = computeSelAfterChange(doc, change);
+ addChangeToHistory(doc, change, selAfter, doc.cm ? doc.cm.curOp.id : NaN);
+
+ makeChangeSingleDoc(doc, change, selAfter, stretchSpansOverChange(doc, change));
+ var rebased = [];
+
+ linkedDocs(doc, function(doc, sharedHist) {
+ if (!sharedHist && indexOf(rebased, doc.history) == -1) {
+ rebaseHist(doc.history, change);
+ rebased.push(doc.history);
+ }
+ makeChangeSingleDoc(doc, change, null, stretchSpansOverChange(doc, change));
+ });
+ }
+
+ // Revert a change stored in a document's history.
+ function makeChangeFromHistory(doc, type, allowSelectionOnly) {
+ if (doc.cm && doc.cm.state.suppressEdits) return;
+
+ var hist = doc.history, event, selAfter = doc.sel;
+ var source = type == "undo" ? hist.done : hist.undone, dest = type == "undo" ? hist.undone : hist.done;
+
+ // Verify that there is a useable event (so that ctrl-z won't
+ // needlessly clear selection events)
+ for (var i = 0; i < source.length; i++) {
+ event = source[i];
+ if (allowSelectionOnly ? event.ranges && !event.equals(doc.sel) : !event.ranges)
+ break;
+ }
+ if (i == source.length) return;
+ hist.lastOrigin = hist.lastSelOrigin = null;
+
+ for (;;) {
+ event = source.pop();
+ if (event.ranges) {
+ pushSelectionToHistory(event, dest);
+ if (allowSelectionOnly && !event.equals(doc.sel)) {
+ setSelection(doc, event, {clearRedo: false});
+ return;
+ }
+ selAfter = event;
+ }
+ else break;
+ }
+
+ // Build up a reverse change object to add to the opposite history
+ // stack (redo when undoing, and vice versa).
+ var antiChanges = [];
+ pushSelectionToHistory(selAfter, dest);
+ dest.push({changes: antiChanges, generation: hist.generation});
+ hist.generation = event.generation || ++hist.maxGeneration;
+
+ var filter = hasHandler(doc, "beforeChange") || doc.cm && hasHandler(doc.cm, "beforeChange");
+
+ for (var i = event.changes.length - 1; i >= 0; --i) {
+ var change = event.changes[i];
+ change.origin = type;
+ if (filter && !filterChange(doc, change, false)) {
+ source.length = 0;
+ return;
+ }
+
+ antiChanges.push(historyChangeFromChange(doc, change));
+
+ var after = i ? computeSelAfterChange(doc, change) : lst(source);
+ makeChangeSingleDoc(doc, change, after, mergeOldSpans(doc, change));
+ if (!i && doc.cm) doc.cm.scrollIntoView({from: change.from, to: changeEnd(change)});
+ var rebased = [];
+
+ // Propagate to the linked documents
+ linkedDocs(doc, function(doc, sharedHist) {
+ if (!sharedHist && indexOf(rebased, doc.history) == -1) {
+ rebaseHist(doc.history, change);
+ rebased.push(doc.history);
+ }
+ makeChangeSingleDoc(doc, change, null, mergeOldSpans(doc, change));
+ });
+ }
+ }
+
+ // Sub-views need their line numbers shifted when text is added
+ // above or below them in the parent document.
+ function shiftDoc(doc, distance) {
+ if (distance == 0) return;
+ doc.first += distance;
+ doc.sel = new Selection(map(doc.sel.ranges, function(range) {
+ return new Range(Pos(range.anchor.line + distance, range.anchor.ch),
+ Pos(range.head.line + distance, range.head.ch));
+ }), doc.sel.primIndex);
+ if (doc.cm) {
+ regChange(doc.cm, doc.first, doc.first - distance, distance);
+ for (var d = doc.cm.display, l = d.viewFrom; l < d.viewTo; l++)
+ regLineChange(doc.cm, l, "gutter");
+ }
+ }
+
+ // More lower-level change function, handling only a single document
+ // (not linked ones).
+ function makeChangeSingleDoc(doc, change, selAfter, spans) {
+ if (doc.cm && !doc.cm.curOp)
+ return operation(doc.cm, makeChangeSingleDoc)(doc, change, selAfter, spans);
+
+ if (change.to.line < doc.first) {
+ shiftDoc(doc, change.text.length - 1 - (change.to.line - change.from.line));
+ return;
+ }
+ if (change.from.line > doc.lastLine()) return;
+
+ // Clip the change to the size of this doc
+ if (change.from.line < doc.first) {
+ var shift = change.text.length - 1 - (doc.first - change.from.line);
+ shiftDoc(doc, shift);
+ change = {from: Pos(doc.first, 0), to: Pos(change.to.line + shift, change.to.ch),
+ text: [lst(change.text)], origin: change.origin};
+ }
+ var last = doc.lastLine();
+ if (change.to.line > last) {
+ change = {from: change.from, to: Pos(last, getLine(doc, last).text.length),
+ text: [change.text[0]], origin: change.origin};
+ }
+
+ change.removed = getBetween(doc, change.from, change.to);
+
+ if (!selAfter) selAfter = computeSelAfterChange(doc, change);
+ if (doc.cm) makeChangeSingleDocInEditor(doc.cm, change, spans);
+ else updateDoc(doc, change, spans);
+ setSelectionNoUndo(doc, selAfter, sel_dontScroll);
+ }
+
+ // Handle the interaction of a change to a document with the editor
+ // that this document is part of.
+ function makeChangeSingleDocInEditor(cm, change, spans) {
+ var doc = cm.doc, display = cm.display, from = change.from, to = change.to;
+
+ var recomputeMaxLength = false, checkWidthStart = from.line;
+ if (!cm.options.lineWrapping) {
+ checkWidthStart = lineNo(visualLine(getLine(doc, from.line)));
+ doc.iter(checkWidthStart, to.line + 1, function(line) {
+ if (line == display.maxLine) {
+ recomputeMaxLength = true;
+ return true;
+ }
+ });
+ }
+
+ if (doc.sel.contains(change.from, change.to) > -1)
+ signalCursorActivity(cm);
+
+ updateDoc(doc, change, spans, estimateHeight(cm));
+
+ if (!cm.options.lineWrapping) {
+ doc.iter(checkWidthStart, from.line + change.text.length, function(line) {
+ var len = lineLength(line);
+ if (len > display.maxLineLength) {
+ display.maxLine = line;
+ display.maxLineLength = len;
+ display.maxLineChanged = true;
+ recomputeMaxLength = false;
+ }
+ });
+ if (recomputeMaxLength) cm.curOp.updateMaxLine = true;
+ }
+
+ // Adjust frontier, schedule worker
+ doc.frontier = Math.min(doc.frontier, from.line);
+ startWorker(cm, 400);
+
+ var lendiff = change.text.length - (to.line - from.line) - 1;
+ // Remember that these lines changed, for updating the display
+ if (from.line == to.line && change.text.length == 1 && !isWholeLineUpdate(cm.doc, change))
+ regLineChange(cm, from.line, "text");
+ else
+ regChange(cm, from.line, to.line + 1, lendiff);
+
+ var changesHandler = hasHandler(cm, "changes"), changeHandler = hasHandler(cm, "change");
+ if (changeHandler || changesHandler) {
+ var obj = {
+ from: from, to: to,
+ text: change.text,
+ removed: change.removed,
+ origin: change.origin
+ };
+ if (changeHandler) signalLater(cm, "change", cm, obj);
+ if (changesHandler) (cm.curOp.changeObjs || (cm.curOp.changeObjs = [])).push(obj);
+ }
+ cm.display.selForContextMenu = null;
+ }
+
+ function replaceRange(doc, code, from, to, origin) {
+ if (!to) to = from;
+ if (cmp(to, from) < 0) { var tmp = to; to = from; from = tmp; }
+ if (typeof code == "string") code = splitLines(code);
+ makeChange(doc, {from: from, to: to, text: code, origin: origin});
+ }
+
+ // SCROLLING THINGS INTO VIEW
+
+ // If an editor sits on the top or bottom of the window, partially
+ // scrolled out of view, this ensures that the cursor is visible.
+ function maybeScrollWindow(cm, coords) {
+ if (signalDOMEvent(cm, "scrollCursorIntoView")) return;
+
+ var display = cm.display, box = display.sizer.getBoundingClientRect(), doScroll = null;
+ if (coords.top + box.top < 0) doScroll = true;
+ else if (coords.bottom + box.top > (window.innerHeight || document.documentElement.clientHeight)) doScroll = false;
+ if (doScroll != null && !phantom) {
+ var scrollNode = elt("div", "\u200b", null, "position: absolute; top: " +
+ (coords.top - display.viewOffset - paddingTop(cm.display)) + "px; height: " +
+ (coords.bottom - coords.top + scrollerCutOff) + "px; left: " +
+ coords.left + "px; width: 2px;");
+ cm.display.lineSpace.appendChild(scrollNode);
+ scrollNode.scrollIntoView(doScroll);
+ cm.display.lineSpace.removeChild(scrollNode);
+ }
+ }
+
+ // Scroll a given position into view (immediately), verifying that
+ // it actually became visible (as line heights are accurately
+ // measured, the position of something may 'drift' during drawing).
+ function scrollPosIntoView(cm, pos, end, margin) {
+ if (margin == null) margin = 0;
+ for (var limit = 0; limit < 5; limit++) {
+ var changed = false, coords = cursorCoords(cm, pos);
+ var endCoords = !end || end == pos ? coords : cursorCoords(cm, end);
+ var scrollPos = calculateScrollPos(cm, Math.min(coords.left, endCoords.left),
+ Math.min(coords.top, endCoords.top) - margin,
+ Math.max(coords.left, endCoords.left),
+ Math.max(coords.bottom, endCoords.bottom) + margin);
+ var startTop = cm.doc.scrollTop, startLeft = cm.doc.scrollLeft;
+ if (scrollPos.scrollTop != null) {
+ setScrollTop(cm, scrollPos.scrollTop);
+ if (Math.abs(cm.doc.scrollTop - startTop) > 1) changed = true;
+ }
+ if (scrollPos.scrollLeft != null) {
+ setScrollLeft(cm, scrollPos.scrollLeft);
+ if (Math.abs(cm.doc.scrollLeft - startLeft) > 1) changed = true;
+ }
+ if (!changed) return coords;
+ }
+ }
+
+ // Scroll a given set of coordinates into view (immediately).
+ function scrollIntoView(cm, x1, y1, x2, y2) {
+ var scrollPos = calculateScrollPos(cm, x1, y1, x2, y2);
+ if (scrollPos.scrollTop != null) setScrollTop(cm, scrollPos.scrollTop);
+ if (scrollPos.scrollLeft != null) setScrollLeft(cm, scrollPos.scrollLeft);
+ }
+
+ // Calculate a new scroll position needed to scroll the given
+ // rectangle into view. Returns an object with scrollTop and
+ // scrollLeft properties. When these are undefined, the
+ // vertical/horizontal position does not need to be adjusted.
+ function calculateScrollPos(cm, x1, y1, x2, y2) {
+ var display = cm.display, snapMargin = textHeight(cm.display);
+ if (y1 < 0) y1 = 0;
+ var screentop = cm.curOp && cm.curOp.scrollTop != null ? cm.curOp.scrollTop : display.scroller.scrollTop;
+ var screen = display.scroller.clientHeight - scrollerCutOff, result = {};
+ if (y2 - y1 > screen) y2 = y1 + screen;
+ var docBottom = cm.doc.height + paddingVert(display);
+ var atTop = y1 < snapMargin, atBottom = y2 > docBottom - snapMargin;
+ if (y1 < screentop) {
+ result.scrollTop = atTop ? 0 : y1;
+ } else if (y2 > screentop + screen) {
+ var newTop = Math.min(y1, (atBottom ? docBottom : y2) - screen);
+ if (newTop != screentop) result.scrollTop = newTop;
+ }
+
+ var screenleft = cm.curOp && cm.curOp.scrollLeft != null ? cm.curOp.scrollLeft : display.scroller.scrollLeft;
+ var screenw = display.scroller.clientWidth - scrollerCutOff - display.gutters.offsetWidth;
+ var tooWide = x2 - x1 > screenw;
+ if (tooWide) x2 = x1 + screenw;
+ if (x1 < 10)
+ result.scrollLeft = 0;
+ else if (x1 < screenleft)
+ result.scrollLeft = Math.max(0, x1 - (tooWide ? 0 : 10));
+ else if (x2 > screenw + screenleft - 3)
+ result.scrollLeft = x2 + (tooWide ? 0 : 10) - screenw;
+
+ return result;
+ }
+
+ // Store a relative adjustment to the scroll position in the current
+ // operation (to be applied when the operation finishes).
+ function addToScrollPos(cm, left, top) {
+ if (left != null || top != null) resolveScrollToPos(cm);
+ if (left != null)
+ cm.curOp.scrollLeft = (cm.curOp.scrollLeft == null ? cm.doc.scrollLeft : cm.curOp.scrollLeft) + left;
+ if (top != null)
+ cm.curOp.scrollTop = (cm.curOp.scrollTop == null ? cm.doc.scrollTop : cm.curOp.scrollTop) + top;
+ }
+
+ // Make sure that at the end of the operation the current cursor is
+ // shown.
+ function ensureCursorVisible(cm) {
+ resolveScrollToPos(cm);
+ var cur = cm.getCursor(), from = cur, to = cur;
+ if (!cm.options.lineWrapping) {
+ from = cur.ch ? Pos(cur.line, cur.ch - 1) : cur;
+ to = Pos(cur.line, cur.ch + 1);
+ }
+ cm.curOp.scrollToPos = {from: from, to: to, margin: cm.options.cursorScrollMargin, isCursor: true};
+ }
+
+ // When an operation has its scrollToPos property set, and another
+ // scroll action is applied before the end of the operation, this
+ // 'simulates' scrolling that position into view in a cheap way, so
+ // that the effect of intermediate scroll commands is not ignored.
+ function resolveScrollToPos(cm) {
+ var range = cm.curOp.scrollToPos;
+ if (range) {
+ cm.curOp.scrollToPos = null;
+ var from = estimateCoords(cm, range.from), to = estimateCoords(cm, range.to);
+ var sPos = calculateScrollPos(cm, Math.min(from.left, to.left),
+ Math.min(from.top, to.top) - range.margin,
+ Math.max(from.right, to.right),
+ Math.max(from.bottom, to.bottom) + range.margin);
+ cm.scrollTo(sPos.scrollLeft, sPos.scrollTop);
+ }
+ }
+
+ // API UTILITIES
+
+ // Indent the given line. The how parameter can be "smart",
+ // "add"/null, "subtract", or "prev". When aggressive is false
+ // (typically set to true for forced single-line indents), empty
+ // lines are not indented, and places where the mode returns Pass
+ // are left alone.
+ function indentLine(cm, n, how, aggressive) {
+ var doc = cm.doc, state;
+ if (how == null) how = "add";
+ if (how == "smart") {
+ // Fall back to "prev" when the mode doesn't have an indentation
+ // method.
+ if (!doc.mode.indent) how = "prev";
+ else state = getStateBefore(cm, n);
+ }
+
+ var tabSize = cm.options.tabSize;
+ var line = getLine(doc, n), curSpace = countColumn(line.text, null, tabSize);
+ if (line.stateAfter) line.stateAfter = null;
+ var curSpaceString = line.text.match(/^\s*/)[0], indentation;
+ if (!aggressive && !/\S/.test(line.text)) {
+ indentation = 0;
+ how = "not";
+ } else if (how == "smart") {
+ indentation = doc.mode.indent(state, line.text.slice(curSpaceString.length), line.text);
+ if (indentation == Pass || indentation > 150) {
+ if (!aggressive) return;
+ how = "prev";
+ }
+ }
+ if (how == "prev") {
+ if (n > doc.first) indentation = countColumn(getLine(doc, n-1).text, null, tabSize);
+ else indentation = 0;
+ } else if (how == "add") {
+ indentation = curSpace + cm.options.indentUnit;
+ } else if (how == "subtract") {
+ indentation = curSpace - cm.options.indentUnit;
+ } else if (typeof how == "number") {
+ indentation = curSpace + how;
+ }
+ indentation = Math.max(0, indentation);
+
+ var indentString = "", pos = 0;
+ if (cm.options.indentWithTabs)
+ for (var i = Math.floor(indentation / tabSize); i; --i) {pos += tabSize; indentString += "\t";}
+ if (pos < indentation) indentString += spaceStr(indentation - pos);
+
+ if (indentString != curSpaceString) {
+ replaceRange(doc, indentString, Pos(n, 0), Pos(n, curSpaceString.length), "+input");
+ } else {
+ // Ensure that, if the cursor was in the whitespace at the start
+ // of the line, it is moved to the end of that space.
+ for (var i = 0; i < doc.sel.ranges.length; i++) {
+ var range = doc.sel.ranges[i];
+ if (range.head.line == n && range.head.ch < curSpaceString.length) {
+ var pos = Pos(n, curSpaceString.length);
+ replaceOneSelection(doc, i, new Range(pos, pos));
+ break;
+ }
+ }
+ }
+ line.stateAfter = null;
+ }
+
+ // Utility for applying a change to a line by handle or number,
+ // returning the number and optionally registering the line as
+ // changed.
+ function changeLine(doc, handle, changeType, op) {
+ var no = handle, line = handle;
+ if (typeof handle == "number") line = getLine(doc, clipLine(doc, handle));
+ else no = lineNo(handle);
+ if (no == null) return null;
+ if (op(line, no) && doc.cm) regLineChange(doc.cm, no, changeType);
+ return line;
+ }
+
+ // Helper for deleting text near the selection(s), used to implement
+ // backspace, delete, and similar functionality.
+ function deleteNearSelection(cm, compute) {
+ var ranges = cm.doc.sel.ranges, kill = [];
+ // Build up a set of ranges to kill first, merging overlapping
+ // ranges.
+ for (var i = 0; i < ranges.length; i++) {
+ var toKill = compute(ranges[i]);
+ while (kill.length && cmp(toKill.from, lst(kill).to) <= 0) {
+ var replaced = kill.pop();
+ if (cmp(replaced.from, toKill.from) < 0) {
+ toKill.from = replaced.from;
+ break;
+ }
+ }
+ kill.push(toKill);
+ }
+ // Next, remove those actual ranges.
+ runInOp(cm, function() {
+ for (var i = kill.length - 1; i >= 0; i--)
+ replaceRange(cm.doc, "", kill[i].from, kill[i].to, "+delete");
+ ensureCursorVisible(cm);
+ });
+ }
+
+ // Used for horizontal relative motion. Dir is -1 or 1 (left or
+ // right), unit can be "char", "column" (like char, but doesn't
+ // cross line boundaries), "word" (across next word), or "group" (to
+ // the start of next group of word or non-word-non-whitespace
+ // chars). The visually param controls whether, in right-to-left
+ // text, direction 1 means to move towards the next index in the
+ // string, or towards the character to the right of the current
+ // position. The resulting position will have a hitSide=true
+ // property if it reached the end of the document.
+ function findPosH(doc, pos, dir, unit, visually) {
+ var line = pos.line, ch = pos.ch, origDir = dir;
+ var lineObj = getLine(doc, line);
+ var possible = true;
+ function findNextLine() {
+ var l = line + dir;
+ if (l < doc.first || l >= doc.first + doc.size) return (possible = false);
+ line = l;
+ return lineObj = getLine(doc, l);
+ }
+ function moveOnce(boundToLine) {
+ var next = (visually ? moveVisually : moveLogically)(lineObj, ch, dir, true);
+ if (next == null) {
+ if (!boundToLine && findNextLine()) {
+ if (visually) ch = (dir < 0 ? lineRight : lineLeft)(lineObj);
+ else ch = dir < 0 ? lineObj.text.length : 0;
+ } else return (possible = false);
+ } else ch = next;
+ return true;
+ }
+
+ if (unit == "char") moveOnce();
+ else if (unit == "column") moveOnce(true);
+ else if (unit == "word" || unit == "group") {
+ var sawType = null, group = unit == "group";
+ var helper = doc.cm && doc.cm.getHelper(pos, "wordChars");
+ for (var first = true;; first = false) {
+ if (dir < 0 && !moveOnce(!first)) break;
+ var cur = lineObj.text.charAt(ch) || "\n";
+ var type = isWordChar(cur, helper) ? "w"
+ : group && cur == "\n" ? "n"
+ : !group || /\s/.test(cur) ? null
+ : "p";
+ if (group && !first && !type) type = "s";
+ if (sawType && sawType != type) {
+ if (dir < 0) {dir = 1; moveOnce();}
+ break;
+ }
+
+ if (type) sawType = type;
+ if (dir > 0 && !moveOnce(!first)) break;
+ }
+ }
+ var result = skipAtomic(doc, Pos(line, ch), origDir, true);
+ if (!possible) result.hitSide = true;
+ return result;
+ }
+
+ // For relative vertical movement. Dir may be -1 or 1. Unit can be
+ // "page" or "line". The resulting position will have a hitSide=true
+ // property if it reached the end of the document.
+ function findPosV(cm, pos, dir, unit) {
+ var doc = cm.doc, x = pos.left, y;
+ if (unit == "page") {
+ var pageSize = Math.min(cm.display.wrapper.clientHeight, window.innerHeight || document.documentElement.clientHeight);
+ y = pos.top + dir * (pageSize - (dir < 0 ? 1.5 : .5) * textHeight(cm.display));
+ } else if (unit == "line") {
+ y = dir > 0 ? pos.bottom + 3 : pos.top - 3;
+ }
+ for (;;) {
+ var target = coordsChar(cm, x, y);
+ if (!target.outside) break;
+ if (dir < 0 ? y <= 0 : y >= doc.height) { target.hitSide = true; break; }
+ y += dir * 5;
+ }
+ return target;
+ }
+
+ // EDITOR METHODS
+
+ // The publicly visible API. Note that methodOp(f) means
+ // 'wrap f in an operation, performed on its `this` parameter'.
+
+ // This is not the complete set of editor methods. Most of the
+ // methods defined on the Doc type are also injected into
+ // CodeMirror.prototype, for backwards compatibility and
+ // convenience.
+
+ CodeMirror.prototype = {
+ constructor: CodeMirror,
+ focus: function(){window.focus(); focusInput(this); fastPoll(this);},
+
+ setOption: function(option, value) {
+ var options = this.options, old = options[option];
+ if (options[option] == value && option != "mode") return;
+ options[option] = value;
+ if (optionHandlers.hasOwnProperty(option))
+ operation(this, optionHandlers[option])(this, value, old);
+ },
+
+ getOption: function(option) {return this.options[option];},
+ getDoc: function() {return this.doc;},
+
+ addKeyMap: function(map, bottom) {
+ this.state.keyMaps[bottom ? "push" : "unshift"](getKeyMap(map));
+ },
+ removeKeyMap: function(map) {
+ var maps = this.state.keyMaps;
+ for (var i = 0; i < maps.length; ++i)
+ if (maps[i] == map || maps[i].name == map) {
+ maps.splice(i, 1);
+ return true;
+ }
+ },
+
+ addOverlay: methodOp(function(spec, options) {
+ var mode = spec.token ? spec : CodeMirror.getMode(this.options, spec);
+ if (mode.startState) throw new Error("Overlays may not be stateful.");
+ this.state.overlays.push({mode: mode, modeSpec: spec, opaque: options && options.opaque});
+ this.state.modeGen++;
+ regChange(this);
+ }),
+ removeOverlay: methodOp(function(spec) {
+ var overlays = this.state.overlays;
+ for (var i = 0; i < overlays.length; ++i) {
+ var cur = overlays[i].modeSpec;
+ if (cur == spec || typeof spec == "string" && cur.name == spec) {
+ overlays.splice(i, 1);
+ this.state.modeGen++;
+ regChange(this);
+ return;
+ }
+ }
+ }),
+
+ indentLine: methodOp(function(n, dir, aggressive) {
+ if (typeof dir != "string" && typeof dir != "number") {
+ if (dir == null) dir = this.options.smartIndent ? "smart" : "prev";
+ else dir = dir ? "add" : "subtract";
+ }
+ if (isLine(this.doc, n)) indentLine(this, n, dir, aggressive);
+ }),
+ indentSelection: methodOp(function(how) {
+ var ranges = this.doc.sel.ranges, end = -1;
+ for (var i = 0; i < ranges.length; i++) {
+ var range = ranges[i];
+ if (!range.empty()) {
+ var from = range.from(), to = range.to();
+ var start = Math.max(end, from.line);
+ end = Math.min(this.lastLine(), to.line - (to.ch ? 0 : 1)) + 1;
+ for (var j = start; j < end; ++j)
+ indentLine(this, j, how);
+ var newRanges = this.doc.sel.ranges;
+ if (from.ch == 0 && ranges.length == newRanges.length && newRanges[i].from().ch > 0)
+ replaceOneSelection(this.doc, i, new Range(from, newRanges[i].to()), sel_dontScroll);
+ } else if (range.head.line > end) {
+ indentLine(this, range.head.line, how, true);
+ end = range.head.line;
+ if (i == this.doc.sel.primIndex) ensureCursorVisible(this);
+ }
+ }
+ }),
+
+ // Fetch the parser token for a given character. Useful for hacks
+ // that want to inspect the mode state (say, for completion).
+ getTokenAt: function(pos, precise) {
+ return takeToken(this, pos, precise);
+ },
+
+ getLineTokens: function(line, precise) {
+ return takeToken(this, Pos(line), precise, true);
+ },
+
+ getTokenTypeAt: function(pos) {
+ pos = clipPos(this.doc, pos);
+ var styles = getLineStyles(this, getLine(this.doc, pos.line));
+ var before = 0, after = (styles.length - 1) / 2, ch = pos.ch;
+ var type;
+ if (ch == 0) type = styles[2];
+ else for (;;) {
+ var mid = (before + after) >> 1;
+ if ((mid ? styles[mid * 2 - 1] : 0) >= ch) after = mid;
+ else if (styles[mid * 2 + 1] < ch) before = mid + 1;
+ else { type = styles[mid * 2 + 2]; break; }
+ }
+ var cut = type ? type.indexOf("cm-overlay ") : -1;
+ return cut < 0 ? type : cut == 0 ? null : type.slice(0, cut - 1);
+ },
+
+ getModeAt: function(pos) {
+ var mode = this.doc.mode;
+ if (!mode.innerMode) return mode;
+ return CodeMirror.innerMode(mode, this.getTokenAt(pos).state).mode;
+ },
+
+ getHelper: function(pos, type) {
+ return this.getHelpers(pos, type)[0];
+ },
+
+ getHelpers: function(pos, type) {
+ var found = [];
+ if (!helpers.hasOwnProperty(type)) return helpers;
+ var help = helpers[type], mode = this.getModeAt(pos);
+ if (typeof mode[type] == "string") {
+ if (help[mode[type]]) found.push(help[mode[type]]);
+ } else if (mode[type]) {
+ for (var i = 0; i < mode[type].length; i++) {
+ var val = help[mode[type][i]];
+ if (val) found.push(val);
+ }
+ } else if (mode.helperType && help[mode.helperType]) {
+ found.push(help[mode.helperType]);
+ } else if (help[mode.name]) {
+ found.push(help[mode.name]);
+ }
+ for (var i = 0; i < help._global.length; i++) {
+ var cur = help._global[i];
+ if (cur.pred(mode, this) && indexOf(found, cur.val) == -1)
+ found.push(cur.val);
+ }
+ return found;
+ },
+
+ getStateAfter: function(line, precise) {
+ var doc = this.doc;
+ line = clipLine(doc, line == null ? doc.first + doc.size - 1: line);
+ return getStateBefore(this, line + 1, precise);
+ },
+
+ cursorCoords: function(start, mode) {
+ var pos, range = this.doc.sel.primary();
+ if (start == null) pos = range.head;
+ else if (typeof start == "object") pos = clipPos(this.doc, start);
+ else pos = start ? range.from() : range.to();
+ return cursorCoords(this, pos, mode || "page");
+ },
+
+ charCoords: function(pos, mode) {
+ return charCoords(this, clipPos(this.doc, pos), mode || "page");
+ },
+
+ coordsChar: function(coords, mode) {
+ coords = fromCoordSystem(this, coords, mode || "page");
+ return coordsChar(this, coords.left, coords.top);
+ },
+
+ lineAtHeight: function(height, mode) {
+ height = fromCoordSystem(this, {top: height, left: 0}, mode || "page").top;
+ return lineAtHeight(this.doc, height + this.display.viewOffset);
+ },
+ heightAtLine: function(line, mode) {
+ var end = false, last = this.doc.first + this.doc.size - 1;
+ if (line < this.doc.first) line = this.doc.first;
+ else if (line > last) { line = last; end = true; }
+ var lineObj = getLine(this.doc, line);
+ return intoCoordSystem(this, lineObj, {top: 0, left: 0}, mode || "page").top +
+ (end ? this.doc.height - heightAtLine(lineObj) : 0);
+ },
+
+ defaultTextHeight: function() { return textHeight(this.display); },
+ defaultCharWidth: function() { return charWidth(this.display); },
+
+ setGutterMarker: methodOp(function(line, gutterID, value) {
+ return changeLine(this.doc, line, "gutter", function(line) {
+ var markers = line.gutterMarkers || (line.gutterMarkers = {});
+ markers[gutterID] = value;
+ if (!value && isEmpty(markers)) line.gutterMarkers = null;
+ return true;
+ });
+ }),
+
+ clearGutter: methodOp(function(gutterID) {
+ var cm = this, doc = cm.doc, i = doc.first;
+ doc.iter(function(line) {
+ if (line.gutterMarkers && line.gutterMarkers[gutterID]) {
+ line.gutterMarkers[gutterID] = null;
+ regLineChange(cm, i, "gutter");
+ if (isEmpty(line.gutterMarkers)) line.gutterMarkers = null;
+ }
+ ++i;
+ });
+ }),
+
+ addLineWidget: methodOp(function(handle, node, options) {
+ return addLineWidget(this, handle, node, options);
+ }),
+
+ removeLineWidget: function(widget) { widget.clear(); },
+
+ lineInfo: function(line) {
+ if (typeof line == "number") {
+ if (!isLine(this.doc, line)) return null;
+ var n = line;
+ line = getLine(this.doc, line);
+ if (!line) return null;
+ } else {
+ var n = lineNo(line);
+ if (n == null) return null;
+ }
+ return {line: n, handle: line, text: line.text, gutterMarkers: line.gutterMarkers,
+ textClass: line.textClass, bgClass: line.bgClass, wrapClass: line.wrapClass,
+ widgets: line.widgets};
+ },
+
+ getViewport: function() { return {from: this.display.viewFrom, to: this.display.viewTo};},
+
+ addWidget: function(pos, node, scroll, vert, horiz) {
+ var display = this.display;
+ pos = cursorCoords(this, clipPos(this.doc, pos));
+ var top = pos.bottom, left = pos.left;
+ node.style.position = "absolute";
+ display.sizer.appendChild(node);
+ if (vert == "over") {
+ top = pos.top;
+ } else if (vert == "above" || vert == "near") {
+ var vspace = Math.max(display.wrapper.clientHeight, this.doc.height),
+ hspace = Math.max(display.sizer.clientWidth, display.lineSpace.clientWidth);
+ // Default to positioning above (if specified and possible); otherwise default to positioning below
+ if ((vert == 'above' || pos.bottom + node.offsetHeight > vspace) && pos.top > node.offsetHeight)
+ top = pos.top - node.offsetHeight;
+ else if (pos.bottom + node.offsetHeight <= vspace)
+ top = pos.bottom;
+ if (left + node.offsetWidth > hspace)
+ left = hspace - node.offsetWidth;
+ }
+ node.style.top = top + "px";
+ node.style.left = node.style.right = "";
+ if (horiz == "right") {
+ left = display.sizer.clientWidth - node.offsetWidth;
+ node.style.right = "0px";
+ } else {
+ if (horiz == "left") left = 0;
+ else if (horiz == "middle") left = (display.sizer.clientWidth - node.offsetWidth) / 2;
+ node.style.left = left + "px";
+ }
+ if (scroll)
+ scrollIntoView(this, left, top, left + node.offsetWidth, top + node.offsetHeight);
+ },
+
+ triggerOnKeyDown: methodOp(onKeyDown),
+ triggerOnKeyPress: methodOp(onKeyPress),
+ triggerOnKeyUp: onKeyUp,
+
+ execCommand: function(cmd) {
+ if (commands.hasOwnProperty(cmd))
+ return commands[cmd](this);
+ },
+
+ findPosH: function(from, amount, unit, visually) {
+ var dir = 1;
+ if (amount < 0) { dir = -1; amount = -amount; }
+ for (var i = 0, cur = clipPos(this.doc, from); i < amount; ++i) {
+ cur = findPosH(this.doc, cur, dir, unit, visually);
+ if (cur.hitSide) break;
+ }
+ return cur;
+ },
+
+ moveH: methodOp(function(dir, unit) {
+ var cm = this;
+ cm.extendSelectionsBy(function(range) {
+ if (cm.display.shift || cm.doc.extend || range.empty())
+ return findPosH(cm.doc, range.head, dir, unit, cm.options.rtlMoveVisually);
+ else
+ return dir < 0 ? range.from() : range.to();
+ }, sel_move);
+ }),
+
+ deleteH: methodOp(function(dir, unit) {
+ var sel = this.doc.sel, doc = this.doc;
+ if (sel.somethingSelected())
+ doc.replaceSelection("", null, "+delete");
+ else
+ deleteNearSelection(this, function(range) {
+ var other = findPosH(doc, range.head, dir, unit, false);
+ return dir < 0 ? {from: other, to: range.head} : {from: range.head, to: other};
+ });
+ }),
+
+ findPosV: function(from, amount, unit, goalColumn) {
+ var dir = 1, x = goalColumn;
+ if (amount < 0) { dir = -1; amount = -amount; }
+ for (var i = 0, cur = clipPos(this.doc, from); i < amount; ++i) {
+ var coords = cursorCoords(this, cur, "div");
+ if (x == null) x = coords.left;
+ else coords.left = x;
+ cur = findPosV(this, coords, dir, unit);
+ if (cur.hitSide) break;
+ }
+ return cur;
+ },
+
+ moveV: methodOp(function(dir, unit) {
+ var cm = this, doc = this.doc, goals = [];
+ var collapse = !cm.display.shift && !doc.extend && doc.sel.somethingSelected();
+ doc.extendSelectionsBy(function(range) {
+ if (collapse)
+ return dir < 0 ? range.from() : range.to();
+ var headPos = cursorCoords(cm, range.head, "div");
+ if (range.goalColumn != null) headPos.left = range.goalColumn;
+ goals.push(headPos.left);
+ var pos = findPosV(cm, headPos, dir, unit);
+ if (unit == "page" && range == doc.sel.primary())
+ addToScrollPos(cm, null, charCoords(cm, pos, "div").top - headPos.top);
+ return pos;
+ }, sel_move);
+ if (goals.length) for (var i = 0; i < doc.sel.ranges.length; i++)
+ doc.sel.ranges[i].goalColumn = goals[i];
+ }),
+
+ // Find the word at the given position (as returned by coordsChar).
+ findWordAt: function(pos) {
+ var doc = this.doc, line = getLine(doc, pos.line).text;
+ var start = pos.ch, end = pos.ch;
+ if (line) {
+ var helper = this.getHelper(pos, "wordChars");
+ if ((pos.xRel < 0 || end == line.length) && start) --start; else ++end;
+ var startChar = line.charAt(start);
+ var check = isWordChar(startChar, helper)
+ ? function(ch) { return isWordChar(ch, helper); }
+ : /\s/.test(startChar) ? function(ch) {return /\s/.test(ch);}
+ : function(ch) {return !/\s/.test(ch) && !isWordChar(ch);};
+ while (start > 0 && check(line.charAt(start - 1))) --start;
+ while (end < line.length && check(line.charAt(end))) ++end;
+ }
+ return new Range(Pos(pos.line, start), Pos(pos.line, end));
+ },
+
+ toggleOverwrite: function(value) {
+ if (value != null && value == this.state.overwrite) return;
+ if (this.state.overwrite = !this.state.overwrite)
+ addClass(this.display.cursorDiv, "CodeMirror-overwrite");
+ else
+ rmClass(this.display.cursorDiv, "CodeMirror-overwrite");
+
+ signal(this, "overwriteToggle", this, this.state.overwrite);
+ },
+ hasFocus: function() { return activeElt() == this.display.input; },
+
+ scrollTo: methodOp(function(x, y) {
+ if (x != null || y != null) resolveScrollToPos(this);
+ if (x != null) this.curOp.scrollLeft = x;
+ if (y != null) this.curOp.scrollTop = y;
+ }),
+ getScrollInfo: function() {
+ var scroller = this.display.scroller, co = scrollerCutOff;
+ return {left: scroller.scrollLeft, top: scroller.scrollTop,
+ height: scroller.scrollHeight - co, width: scroller.scrollWidth - co,
+ clientHeight: scroller.clientHeight - co, clientWidth: scroller.clientWidth - co};
+ },
+
+ scrollIntoView: methodOp(function(range, margin) {
+ if (range == null) {
+ range = {from: this.doc.sel.primary().head, to: null};
+ if (margin == null) margin = this.options.cursorScrollMargin;
+ } else if (typeof range == "number") {
+ range = {from: Pos(range, 0), to: null};
+ } else if (range.from == null) {
+ range = {from: range, to: null};
+ }
+ if (!range.to) range.to = range.from;
+ range.margin = margin || 0;
+
+ if (range.from.line != null) {
+ resolveScrollToPos(this);
+ this.curOp.scrollToPos = range;
+ } else {
+ var sPos = calculateScrollPos(this, Math.min(range.from.left, range.to.left),
+ Math.min(range.from.top, range.to.top) - range.margin,
+ Math.max(range.from.right, range.to.right),
+ Math.max(range.from.bottom, range.to.bottom) + range.margin);
+ this.scrollTo(sPos.scrollLeft, sPos.scrollTop);
+ }
+ }),
+
+ setSize: methodOp(function(width, height) {
+ var cm = this;
+ function interpret(val) {
+ return typeof val == "number" || /^\d+$/.test(String(val)) ? val + "px" : val;
+ }
+ if (width != null) cm.display.wrapper.style.width = interpret(width);
+ if (height != null) cm.display.wrapper.style.height = interpret(height);
+ if (cm.options.lineWrapping) clearLineMeasurementCache(this);
+ var lineNo = cm.display.viewFrom;
+ cm.doc.iter(lineNo, cm.display.viewTo, function(line) {
+ if (line.widgets) for (var i = 0; i < line.widgets.length; i++)
+ if (line.widgets[i].noHScroll) { regLineChange(cm, lineNo, "widget"); break; }
+ ++lineNo;
+ });
+ cm.curOp.forceUpdate = true;
+ signal(cm, "refresh", this);
+ }),
+
+ operation: function(f){return runInOp(this, f);},
+
+ refresh: methodOp(function() {
+ var oldHeight = this.display.cachedTextHeight;
+ regChange(this);
+ this.curOp.forceUpdate = true;
+ clearCaches(this);
+ this.scrollTo(this.doc.scrollLeft, this.doc.scrollTop);
+ updateGutterSpace(this);
+ if (oldHeight == null || Math.abs(oldHeight - textHeight(this.display)) > .5)
+ estimateLineHeights(this);
+ signal(this, "refresh", this);
+ }),
+
+ swapDoc: methodOp(function(doc) {
+ var old = this.doc;
+ old.cm = null;
+ attachDoc(this, doc);
+ clearCaches(this);
+ resetInput(this);
+ this.scrollTo(doc.scrollLeft, doc.scrollTop);
+ this.curOp.forceScroll = true;
+ signalLater(this, "swapDoc", this, old);
+ return old;
+ }),
+
+ getInputField: function(){return this.display.input;},
+ getWrapperElement: function(){return this.display.wrapper;},
+ getScrollerElement: function(){return this.display.scroller;},
+ getGutterElement: function(){return this.display.gutters;}
+ };
+ eventMixin(CodeMirror);
+
+ // OPTION DEFAULTS
+
+ // The default configuration options.
+ var defaults = CodeMirror.defaults = {};
+ // Functions to run when options are changed.
+ var optionHandlers = CodeMirror.optionHandlers = {};
+
+ function option(name, deflt, handle, notOnInit) {
+ CodeMirror.defaults[name] = deflt;
+ if (handle) optionHandlers[name] =
+ notOnInit ? function(cm, val, old) {if (old != Init) handle(cm, val, old);} : handle;
+ }
+
+ // Passed to option handlers when there is no old value.
+ var Init = CodeMirror.Init = {toString: function(){return "CodeMirror.Init";}};
+
+ // These two are, on init, called from the constructor because they
+ // have to be initialized before the editor can start at all.
+ option("value", "", function(cm, val) {
+ cm.setValue(val);
+ }, true);
+ option("mode", null, function(cm, val) {
+ cm.doc.modeOption = val;
+ loadMode(cm);
+ }, true);
+
+ option("indentUnit", 2, loadMode, true);
+ option("indentWithTabs", false);
+ option("smartIndent", true);
+ option("tabSize", 4, function(cm) {
+ resetModeState(cm);
+ clearCaches(cm);
+ regChange(cm);
+ }, true);
+ option("specialChars", /[\t\u0000-\u0019\u00ad\u200b-\u200f\u2028\u2029\ufeff]/g, function(cm, val) {
+ cm.options.specialChars = new RegExp(val.source + (val.test("\t") ? "" : "|\t"), "g");
+ cm.refresh();
+ }, true);
+ option("specialCharPlaceholder", defaultSpecialCharPlaceholder, function(cm) {cm.refresh();}, true);
+ option("electricChars", true);
+ option("rtlMoveVisually", !windows);
+ option("wholeLineUpdateBefore", true);
+
+ option("theme", "default", function(cm) {
+ themeChanged(cm);
+ guttersChanged(cm);
+ }, true);
+ option("keyMap", "default", function(cm, val, old) {
+ var next = getKeyMap(val);
+ var prev = old != CodeMirror.Init && getKeyMap(old);
+ if (prev && prev.detach) prev.detach(cm, next);
+ if (next.attach) next.attach(cm, prev || null);
+ });
+ option("extraKeys", null);
+
+ option("lineWrapping", false, wrappingChanged, true);
+ option("gutters", [], function(cm) {
+ setGuttersForLineNumbers(cm.options);
+ guttersChanged(cm);
+ }, true);
+ option("fixedGutter", true, function(cm, val) {
+ cm.display.gutters.style.left = val ? compensateForHScroll(cm.display) + "px" : "0";
+ cm.refresh();
+ }, true);
+ option("coverGutterNextToScrollbar", false, updateScrollbars, true);
+ option("lineNumbers", false, function(cm) {
+ setGuttersForLineNumbers(cm.options);
+ guttersChanged(cm);
+ }, true);
+ option("firstLineNumber", 1, guttersChanged, true);
+ option("lineNumberFormatter", function(integer) {return integer;}, guttersChanged, true);
+ option("showCursorWhenSelecting", false, updateSelection, true);
+
+ option("resetSelectionOnContextMenu", true);
+
+ option("readOnly", false, function(cm, val) {
+ if (val == "nocursor") {
+ onBlur(cm);
+ cm.display.input.blur();
+ cm.display.disabled = true;
+ } else {
+ cm.display.disabled = false;
+ if (!val) resetInput(cm);
+ }
+ });
+ option("disableInput", false, function(cm, val) {if (!val) resetInput(cm);}, true);
+ option("dragDrop", true);
+
+ option("cursorBlinkRate", 530);
+ option("cursorScrollMargin", 0);
+ option("cursorHeight", 1, updateSelection, true);
+ option("singleCursorHeightPerLine", true, updateSelection, true);
+ option("workTime", 100);
+ option("workDelay", 100);
+ option("flattenSpans", true, resetModeState, true);
+ option("addModeClass", false, resetModeState, true);
+ option("pollInterval", 100);
+ option("undoDepth", 200, function(cm, val){cm.doc.history.undoDepth = val;});
+ option("historyEventDelay", 1250);
+ option("viewportMargin", 10, function(cm){cm.refresh();}, true);
+ option("maxHighlightLength", 10000, resetModeState, true);
+ option("moveInputWithCursor", true, function(cm, val) {
+ if (!val) cm.display.inputDiv.style.top = cm.display.inputDiv.style.left = 0;
+ });
+
+ option("tabindex", null, function(cm, val) {
+ cm.display.input.tabIndex = val || "";
+ });
+ option("autofocus", null);
+
+ // MODE DEFINITION AND QUERYING
+
+ // Known modes, by name and by MIME
+ var modes = CodeMirror.modes = {}, mimeModes = CodeMirror.mimeModes = {};
+
+ // Extra arguments are stored as the mode's dependencies, which is
+ // used by (legacy) mechanisms like loadmode.js to automatically
+ // load a mode. (Preferred mechanism is the require/define calls.)
+ CodeMirror.defineMode = function(name, mode) {
+ if (!CodeMirror.defaults.mode && name != "null") CodeMirror.defaults.mode = name;
+ if (arguments.length > 2)
+ mode.dependencies = Array.prototype.slice.call(arguments, 2);
+ modes[name] = mode;
+ };
+
+ CodeMirror.defineMIME = function(mime, spec) {
+ mimeModes[mime] = spec;
+ };
+
+ // Given a MIME type, a {name, ...options} config object, or a name
+ // string, return a mode config object.
+ CodeMirror.resolveMode = function(spec) {
+ if (typeof spec == "string" && mimeModes.hasOwnProperty(spec)) {
+ spec = mimeModes[spec];
+ } else if (spec && typeof spec.name == "string" && mimeModes.hasOwnProperty(spec.name)) {
+ var found = mimeModes[spec.name];
+ if (typeof found == "string") found = {name: found};
+ spec = createObj(found, spec);
+ spec.name = found.name;
+ } else if (typeof spec == "string" && /^[\w\-]+\/[\w\-]+\+xml$/.test(spec)) {
+ return CodeMirror.resolveMode("application/xml");
+ }
+ if (typeof spec == "string") return {name: spec};
+ else return spec || {name: "null"};
+ };
+
+ // Given a mode spec (anything that resolveMode accepts), find and
+ // initialize an actual mode object.
+ CodeMirror.getMode = function(options, spec) {
+ var spec = CodeMirror.resolveMode(spec);
+ var mfactory = modes[spec.name];
+ if (!mfactory) return CodeMirror.getMode(options, "text/plain");
+ var modeObj = mfactory(options, spec);
+ if (modeExtensions.hasOwnProperty(spec.name)) {
+ var exts = modeExtensions[spec.name];
+ for (var prop in exts) {
+ if (!exts.hasOwnProperty(prop)) continue;
+ if (modeObj.hasOwnProperty(prop)) modeObj["_" + prop] = modeObj[prop];
+ modeObj[prop] = exts[prop];
+ }
+ }
+ modeObj.name = spec.name;
+ if (spec.helperType) modeObj.helperType = spec.helperType;
+ if (spec.modeProps) for (var prop in spec.modeProps)
+ modeObj[prop] = spec.modeProps[prop];
+
+ return modeObj;
+ };
+
+ // Minimal default mode.
+ CodeMirror.defineMode("null", function() {
+ return {token: function(stream) {stream.skipToEnd();}};
+ });
+ CodeMirror.defineMIME("text/plain", "null");
+
+ // This can be used to attach properties to mode objects from
+ // outside the actual mode definition.
+ var modeExtensions = CodeMirror.modeExtensions = {};
+ CodeMirror.extendMode = function(mode, properties) {
+ var exts = modeExtensions.hasOwnProperty(mode) ? modeExtensions[mode] : (modeExtensions[mode] = {});
+ copyObj(properties, exts);
+ };
+
+ // EXTENSIONS
+
+ CodeMirror.defineExtension = function(name, func) {
+ CodeMirror.prototype[name] = func;
+ };
+ CodeMirror.defineDocExtension = function(name, func) {
+ Doc.prototype[name] = func;
+ };
+ CodeMirror.defineOption = option;
+
+ var initHooks = [];
+ CodeMirror.defineInitHook = function(f) {initHooks.push(f);};
+
+ var helpers = CodeMirror.helpers = {};
+ CodeMirror.registerHelper = function(type, name, value) {
+ if (!helpers.hasOwnProperty(type)) helpers[type] = CodeMirror[type] = {_global: []};
+ helpers[type][name] = value;
+ };
+ CodeMirror.registerGlobalHelper = function(type, name, predicate, value) {
+ CodeMirror.registerHelper(type, name, value);
+ helpers[type]._global.push({pred: predicate, val: value});
+ };
+
+ // MODE STATE HANDLING
+
+ // Utility functions for working with state. Exported because nested
+ // modes need to do this for their inner modes.
+
+ var copyState = CodeMirror.copyState = function(mode, state) {
+ if (state === true) return state;
+ if (mode.copyState) return mode.copyState(state);
+ var nstate = {};
+ for (var n in state) {
+ var val = state[n];
+ if (val instanceof Array) val = val.concat([]);
+ nstate[n] = val;
+ }
+ return nstate;
+ };
+
+ var startState = CodeMirror.startState = function(mode, a1, a2) {
+ return mode.startState ? mode.startState(a1, a2) : true;
+ };
+
+ // Given a mode and a state (for that mode), find the inner mode and
+ // state at the position that the state refers to.
+ CodeMirror.innerMode = function(mode, state) {
+ while (mode.innerMode) {
+ var info = mode.innerMode(state);
+ if (!info || info.mode == mode) break;
+ state = info.state;
+ mode = info.mode;
+ }
+ return info || {mode: mode, state: state};
+ };
+
+ // STANDARD COMMANDS
+
+ // Commands are parameter-less actions that can be performed on an
+ // editor, mostly used for keybindings.
+ var commands = CodeMirror.commands = {
+ selectAll: function(cm) {cm.setSelection(Pos(cm.firstLine(), 0), Pos(cm.lastLine()), sel_dontScroll);},
+ singleSelection: function(cm) {
+ cm.setSelection(cm.getCursor("anchor"), cm.getCursor("head"), sel_dontScroll);
+ },
+ killLine: function(cm) {
+ deleteNearSelection(cm, function(range) {
+ if (range.empty()) {
+ var len = getLine(cm.doc, range.head.line).text.length;
+ if (range.head.ch == len && range.head.line < cm.lastLine())
+ return {from: range.head, to: Pos(range.head.line + 1, 0)};
+ else
+ return {from: range.head, to: Pos(range.head.line, len)};
+ } else {
+ return {from: range.from(), to: range.to()};
+ }
+ });
+ },
+ deleteLine: function(cm) {
+ deleteNearSelection(cm, function(range) {
+ return {from: Pos(range.from().line, 0),
+ to: clipPos(cm.doc, Pos(range.to().line + 1, 0))};
+ });
+ },
+ delLineLeft: function(cm) {
+ deleteNearSelection(cm, function(range) {
+ return {from: Pos(range.from().line, 0), to: range.from()};
+ });
+ },
+ delWrappedLineLeft: function(cm) {
+ deleteNearSelection(cm, function(range) {
+ var top = cm.charCoords(range.head, "div").top + 5;
+ var leftPos = cm.coordsChar({left: 0, top: top}, "div");
+ return {from: leftPos, to: range.from()};
+ });
+ },
+ delWrappedLineRight: function(cm) {
+ deleteNearSelection(cm, function(range) {
+ var top = cm.charCoords(range.head, "div").top + 5;
+ var rightPos = cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div");
+ return {from: range.from(), to: rightPos };
+ });
+ },
+ undo: function(cm) {cm.undo();},
+ redo: function(cm) {cm.redo();},
+ undoSelection: function(cm) {cm.undoSelection();},
+ redoSelection: function(cm) {cm.redoSelection();},
+ goDocStart: function(cm) {cm.extendSelection(Pos(cm.firstLine(), 0));},
+ goDocEnd: function(cm) {cm.extendSelection(Pos(cm.lastLine()));},
+ goLineStart: function(cm) {
+ cm.extendSelectionsBy(function(range) { return lineStart(cm, range.head.line); },
+ {origin: "+move", bias: 1});
+ },
+ goLineStartSmart: function(cm) {
+ cm.extendSelectionsBy(function(range) {
+ return lineStartSmart(cm, range.head);
+ }, {origin: "+move", bias: 1});
+ },
+ goLineEnd: function(cm) {
+ cm.extendSelectionsBy(function(range) { return lineEnd(cm, range.head.line); },
+ {origin: "+move", bias: -1});
+ },
+ goLineRight: function(cm) {
+ cm.extendSelectionsBy(function(range) {
+ var top = cm.charCoords(range.head, "div").top + 5;
+ return cm.coordsChar({left: cm.display.lineDiv.offsetWidth + 100, top: top}, "div");
+ }, sel_move);
+ },
+ goLineLeft: function(cm) {
+ cm.extendSelectionsBy(function(range) {
+ var top = cm.charCoords(range.head, "div").top + 5;
+ return cm.coordsChar({left: 0, top: top}, "div");
+ }, sel_move);
+ },
+ goLineLeftSmart: function(cm) {
+ cm.extendSelectionsBy(function(range) {
+ var top = cm.charCoords(range.head, "div").top + 5;
+ var pos = cm.coordsChar({left: 0, top: top}, "div");
+ if (pos.ch < cm.getLine(pos.line).search(/\S/)) return lineStartSmart(cm, range.head);
+ return pos;
+ }, sel_move);
+ },
+ goLineUp: function(cm) {cm.moveV(-1, "line");},
+ goLineDown: function(cm) {cm.moveV(1, "line");},
+ goPageUp: function(cm) {cm.moveV(-1, "page");},
+ goPageDown: function(cm) {cm.moveV(1, "page");},
+ goCharLeft: function(cm) {cm.moveH(-1, "char");},
+ goCharRight: function(cm) {cm.moveH(1, "char");},
+ goColumnLeft: function(cm) {cm.moveH(-1, "column");},
+ goColumnRight: function(cm) {cm.moveH(1, "column");},
+ goWordLeft: function(cm) {cm.moveH(-1, "word");},
+ goGroupRight: function(cm) {cm.moveH(1, "group");},
+ goGroupLeft: function(cm) {cm.moveH(-1, "group");},
+ goWordRight: function(cm) {cm.moveH(1, "word");},
+ delCharBefore: function(cm) {cm.deleteH(-1, "char");},
+ delCharAfter: function(cm) {cm.deleteH(1, "char");},
+ delWordBefore: function(cm) {cm.deleteH(-1, "word");},
+ delWordAfter: function(cm) {cm.deleteH(1, "word");},
+ delGroupBefore: function(cm) {cm.deleteH(-1, "group");},
+ delGroupAfter: function(cm) {cm.deleteH(1, "group");},
+ indentAuto: function(cm) {cm.indentSelection("smart");},
+ indentMore: function(cm) {cm.indentSelection("add");},
+ indentLess: function(cm) {cm.indentSelection("subtract");},
+ insertTab: function(cm) {cm.replaceSelection("\t");},
+ insertSoftTab: function(cm) {
+ var spaces = [], ranges = cm.listSelections(), tabSize = cm.options.tabSize;
+ for (var i = 0; i < ranges.length; i++) {
+ var pos = ranges[i].from();
+ var col = countColumn(cm.getLine(pos.line), pos.ch, tabSize);
+ spaces.push(new Array(tabSize - col % tabSize + 1).join(" "));
+ }
+ cm.replaceSelections(spaces);
+ },
+ defaultTab: function(cm) {
+ if (cm.somethingSelected()) cm.indentSelection("add");
+ else cm.execCommand("insertTab");
+ },
+ transposeChars: function(cm) {
+ runInOp(cm, function() {
+ var ranges = cm.listSelections(), newSel = [];
+ for (var i = 0; i < ranges.length; i++) {
+ var cur = ranges[i].head, line = getLine(cm.doc, cur.line).text;
+ if (line) {
+ if (cur.ch == line.length) cur = new Pos(cur.line, cur.ch - 1);
+ if (cur.ch > 0) {
+ cur = new Pos(cur.line, cur.ch + 1);
+ cm.replaceRange(line.charAt(cur.ch - 1) + line.charAt(cur.ch - 2),
+ Pos(cur.line, cur.ch - 2), cur, "+transpose");
+ } else if (cur.line > cm.doc.first) {
+ var prev = getLine(cm.doc, cur.line - 1).text;
+ if (prev)
+ cm.replaceRange(line.charAt(0) + "\n" + prev.charAt(prev.length - 1),
+ Pos(cur.line - 1, prev.length - 1), Pos(cur.line, 1), "+transpose");
+ }
+ }
+ newSel.push(new Range(cur, cur));
+ }
+ cm.setSelections(newSel);
+ });
+ },
+ newlineAndIndent: function(cm) {
+ runInOp(cm, function() {
+ var len = cm.listSelections().length;
+ for (var i = 0; i < len; i++) {
+ var range = cm.listSelections()[i];
+ cm.replaceRange("\n", range.anchor, range.head, "+input");
+ cm.indentLine(range.from().line + 1, null, true);
+ ensureCursorVisible(cm);
+ }
+ });
+ },
+ toggleOverwrite: function(cm) {cm.toggleOverwrite();}
+ };
+
+
+ // STANDARD KEYMAPS
+
+ var keyMap = CodeMirror.keyMap = {};
+
+ keyMap.basic = {
+ "Left": "goCharLeft", "Right": "goCharRight", "Up": "goLineUp", "Down": "goLineDown",
+ "End": "goLineEnd", "Home": "goLineStartSmart", "PageUp": "goPageUp", "PageDown": "goPageDown",
+ "Delete": "delCharAfter", "Backspace": "delCharBefore", "Shift-Backspace": "delCharBefore",
+ "Tab": "defaultTab", "Shift-Tab": "indentAuto",
+ "Enter": "newlineAndIndent", "Insert": "toggleOverwrite",
+ "Esc": "singleSelection"
+ };
+ // Note that the save and find-related commands aren't defined by
+ // default. User code or addons can define them. Unknown commands
+ // are simply ignored.
+ keyMap.pcDefault = {
+ "Ctrl-A": "selectAll", "Ctrl-D": "deleteLine", "Ctrl-Z": "undo", "Shift-Ctrl-Z": "redo", "Ctrl-Y": "redo",
+ "Ctrl-Home": "goDocStart", "Ctrl-End": "goDocEnd", "Ctrl-Up": "goLineUp", "Ctrl-Down": "goLineDown",
+ "Ctrl-Left": "goGroupLeft", "Ctrl-Right": "goGroupRight", "Alt-Left": "goLineStart", "Alt-Right": "goLineEnd",
+ "Ctrl-Backspace": "delGroupBefore", "Ctrl-Delete": "delGroupAfter", "Ctrl-S": "save", "Ctrl-F": "find",
+ "Ctrl-G": "findNext", "Shift-Ctrl-G": "findPrev", "Shift-Ctrl-F": "replace", "Shift-Ctrl-R": "replaceAll",
+ "Ctrl-[": "indentLess", "Ctrl-]": "indentMore",
+ "Ctrl-U": "undoSelection", "Shift-Ctrl-U": "redoSelection", "Alt-U": "redoSelection",
+ fallthrough: "basic"
+ };
+ // Very basic readline/emacs-style bindings, which are standard on Mac.
+ keyMap.emacsy = {
+ "Ctrl-F": "goCharRight", "Ctrl-B": "goCharLeft", "Ctrl-P": "goLineUp", "Ctrl-N": "goLineDown",
+ "Alt-F": "goWordRight", "Alt-B": "goWordLeft", "Ctrl-A": "goLineStart", "Ctrl-E": "goLineEnd",
+ "Ctrl-V": "goPageDown", "Shift-Ctrl-V": "goPageUp", "Ctrl-D": "delCharAfter", "Ctrl-H": "delCharBefore",
+ "Alt-D": "delWordAfter", "Alt-Backspace": "delWordBefore", "Ctrl-K": "killLine", "Ctrl-T": "transposeChars"
+ };
+ keyMap.macDefault = {
+ "Cmd-A": "selectAll", "Cmd-D": "deleteLine", "Cmd-Z": "undo", "Shift-Cmd-Z": "redo", "Cmd-Y": "redo",
+ "Cmd-Home": "goDocStart", "Cmd-Up": "goDocStart", "Cmd-End": "goDocEnd", "Cmd-Down": "goDocEnd", "Alt-Left": "goGroupLeft",
+ "Alt-Right": "goGroupRight", "Cmd-Left": "goLineLeft", "Cmd-Right": "goLineRight", "Alt-Backspace": "delGroupBefore",
+ "Ctrl-Alt-Backspace": "delGroupAfter", "Alt-Delete": "delGroupAfter", "Cmd-S": "save", "Cmd-F": "find",
+ "Cmd-G": "findNext", "Shift-Cmd-G": "findPrev", "Cmd-Alt-F": "replace", "Shift-Cmd-Alt-F": "replaceAll",
+ "Cmd-[": "indentLess", "Cmd-]": "indentMore", "Cmd-Backspace": "delWrappedLineLeft", "Cmd-Delete": "delWrappedLineRight",
+ "Cmd-U": "undoSelection", "Shift-Cmd-U": "redoSelection", "Ctrl-Up": "goDocStart", "Ctrl-Down": "goDocEnd",
+ fallthrough: ["basic", "emacsy"]
+ };
+ keyMap["default"] = mac ? keyMap.macDefault : keyMap.pcDefault;
+
+ // KEYMAP DISPATCH
+
+ function normalizeKeyName(name) {
+ var parts = name.split(/-(?!$)/), name = parts[parts.length - 1];
+ var alt, ctrl, shift, cmd;
+ for (var i = 0; i < parts.length - 1; i++) {
+ var mod = parts[i];
+ if (/^(cmd|meta|m)$/i.test(mod)) cmd = true;
+ else if (/^a(lt)?$/i.test(mod)) alt = true;
+ else if (/^(c|ctrl|control)$/i.test(mod)) ctrl = true;
+ else if (/^s(hift)$/i.test(mod)) shift = true;
+ else throw new Error("Unrecognized modifier name: " + mod);
+ }
+ if (alt) name = "Alt-" + name;
+ if (ctrl) name = "Ctrl-" + name;
+ if (cmd) name = "Cmd-" + name;
+ if (shift) name = "Shift-" + name;
+ return name;
+ }
+
+ // This is a kludge to keep keymaps mostly working as raw objects
+ // (backwards compatibility) while at the same time support features
+ // like normalization and multi-stroke key bindings. It compiles a
+ // new normalized keymap, and then updates the old object to reflect
+ // this.
+ CodeMirror.normalizeKeyMap = function(keymap) {
+ var copy = {};
+ for (var keyname in keymap) if (keymap.hasOwnProperty(keyname)) {
+ var value = keymap[keyname];
+ if (/^(name|fallthrough|(de|at)tach)$/.test(keyname)) continue;
+ if (value == "...") { delete keymap[keyname]; continue; }
+
+ var keys = map(keyname.split(" "), normalizeKeyName);
+ for (var i = 0; i < keys.length; i++) {
+ var val, name;
+ if (i == keys.length - 1) {
+ name = keyname;
+ val = value;
+ } else {
+ name = keys.slice(0, i + 1).join(" ");
+ val = "...";
+ }
+ var prev = copy[name];
+ if (!prev) copy[name] = val;
+ else if (prev != val) throw new Error("Inconsistent bindings for " + name);
+ }
+ delete keymap[keyname];
+ }
+ for (var prop in copy) keymap[prop] = copy[prop];
+ return keymap;
+ };
+
+ var lookupKey = CodeMirror.lookupKey = function(key, map, handle) {
+ map = getKeyMap(map);
+ var found = map.call ? map.call(key) : map[key];
+ if (found === false) return "nothing";
+ if (found === "...") return "multi";
+ if (found != null && handle(found)) return "handled";
+
+ if (map.fallthrough) {
+ if (Object.prototype.toString.call(map.fallthrough) != "[object Array]")
+ return lookupKey(key, map.fallthrough, handle);
+ for (var i = 0; i < map.fallthrough.length; i++) {
+ var result = lookupKey(key, map.fallthrough[i], handle);
+ if (result) return result;
+ }
+ }
+ };
+
+ // Modifier key presses don't count as 'real' key presses for the
+ // purpose of keymap fallthrough.
+ var isModifierKey = CodeMirror.isModifierKey = function(value) {
+ var name = typeof value == "string" ? value : keyNames[value.keyCode];
+ return name == "Ctrl" || name == "Alt" || name == "Shift" || name == "Mod";
+ };
+
+ // Look up the name of a key as indicated by an event object.
+ var keyName = CodeMirror.keyName = function(event, noShift) {
+ if (presto && event.keyCode == 34 && event["char"]) return false;
+ var base = keyNames[event.keyCode], name = base;
+ if (name == null || event.altGraphKey) return false;
+ if (event.altKey && base != "Alt") name = "Alt-" + name;
+ if ((flipCtrlCmd ? event.metaKey : event.ctrlKey) && base != "Ctrl") name = "Ctrl-" + name;
+ if ((flipCtrlCmd ? event.ctrlKey : event.metaKey) && base != "Cmd") name = "Cmd-" + name;
+ if (!noShift && event.shiftKey && base != "Shift") name = "Shift-" + name;
+ return name;
+ };
+
+ function getKeyMap(val) {
+ return typeof val == "string" ? keyMap[val] : val;
+ }
+
+ // FROMTEXTAREA
+
+ CodeMirror.fromTextArea = function(textarea, options) {
+ if (!options) options = {};
+ options.value = textarea.value;
+ if (!options.tabindex && textarea.tabindex)
+ options.tabindex = textarea.tabindex;
+ if (!options.placeholder && textarea.placeholder)
+ options.placeholder = textarea.placeholder;
+ // Set autofocus to true if this textarea is focused, or if it has
+ // autofocus and no other element is focused.
+ if (options.autofocus == null) {
+ var hasFocus = activeElt();
+ options.autofocus = hasFocus == textarea ||
+ textarea.getAttribute("autofocus") != null && hasFocus == document.body;
+ }
+
+ function save() {textarea.value = cm.getValue();}
+ if (textarea.form) {
+ on(textarea.form, "submit", save);
+ // Deplorable hack to make the submit method do the right thing.
+ if (!options.leaveSubmitMethodAlone) {
+ var form = textarea.form, realSubmit = form.submit;
+ try {
+ var wrappedSubmit = form.submit = function() {
+ save();
+ form.submit = realSubmit;
+ form.submit();
+ form.submit = wrappedSubmit;
+ };
+ } catch(e) {}
+ }
+ }
+
+ textarea.style.display = "none";
+ var cm = CodeMirror(function(node) {
+ textarea.parentNode.insertBefore(node, textarea.nextSibling);
+ }, options);
+ cm.save = save;
+ cm.getTextArea = function() { return textarea; };
+ cm.toTextArea = function() {
+ cm.toTextArea = isNaN; // Prevent this from being ran twice
+ save();
+ textarea.parentNode.removeChild(cm.getWrapperElement());
+ textarea.style.display = "";
+ if (textarea.form) {
+ off(textarea.form, "submit", save);
+ if (typeof textarea.form.submit == "function")
+ textarea.form.submit = realSubmit;
+ }
+ };
+ return cm;
+ };
+
+ // STRING STREAM
+
+ // Fed to the mode parsers, provides helper functions to make
+ // parsers more succinct.
+
+ var StringStream = CodeMirror.StringStream = function(string, tabSize) {
+ this.pos = this.start = 0;
+ this.string = string;
+ this.tabSize = tabSize || 8;
+ this.lastColumnPos = this.lastColumnValue = 0;
+ this.lineStart = 0;
+ };
+
+ StringStream.prototype = {
+ eol: function() {return this.pos >= this.string.length;},
+ sol: function() {return this.pos == this.lineStart;},
+ peek: function() {return this.string.charAt(this.pos) || undefined;},
+ next: function() {
+ if (this.pos < this.string.length)
+ return this.string.charAt(this.pos++);
+ },
+ eat: function(match) {
+ var ch = this.string.charAt(this.pos);
+ if (typeof match == "string") var ok = ch == match;
+ else var ok = ch && (match.test ? match.test(ch) : match(ch));
+ if (ok) {++this.pos; return ch;}
+ },
+ eatWhile: function(match) {
+ var start = this.pos;
+ while (this.eat(match)){}
+ return this.pos > start;
+ },
+ eatSpace: function() {
+ var start = this.pos;
+ while (/[\s\u00a0]/.test(this.string.charAt(this.pos))) ++this.pos;
+ return this.pos > start;
+ },
+ skipToEnd: function() {this.pos = this.string.length;},
+ skipTo: function(ch) {
+ var found = this.string.indexOf(ch, this.pos);
+ if (found > -1) {this.pos = found; return true;}
+ },
+ backUp: function(n) {this.pos -= n;},
+ column: function() {
+ if (this.lastColumnPos < this.start) {
+ this.lastColumnValue = countColumn(this.string, this.start, this.tabSize, this.lastColumnPos, this.lastColumnValue);
+ this.lastColumnPos = this.start;
+ }
+ return this.lastColumnValue - (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0);
+ },
+ indentation: function() {
+ return countColumn(this.string, null, this.tabSize) -
+ (this.lineStart ? countColumn(this.string, this.lineStart, this.tabSize) : 0);
+ },
+ match: function(pattern, consume, caseInsensitive) {
+ if (typeof pattern == "string") {
+ var cased = function(str) {return caseInsensitive ? str.toLowerCase() : str;};
+ var substr = this.string.substr(this.pos, pattern.length);
+ if (cased(substr) == cased(pattern)) {
+ if (consume !== false) this.pos += pattern.length;
+ return true;
+ }
+ } else {
+ var match = this.string.slice(this.pos).match(pattern);
+ if (match && match.index > 0) return null;
+ if (match && consume !== false) this.pos += match[0].length;
+ return match;
+ }
+ },
+ current: function(){return this.string.slice(this.start, this.pos);},
+ hideFirstChars: function(n, inner) {
+ this.lineStart += n;
+ try { return inner(); }
+ finally { this.lineStart -= n; }
+ }
+ };
+
+ // TEXTMARKERS
+
+ // Created with markText and setBookmark methods. A TextMarker is a
+ // handle that can be used to clear or find a marked position in the
+ // document. Line objects hold arrays (markedSpans) containing
+ // {from, to, marker} object pointing to such marker objects, and
+ // indicating that such a marker is present on that line. Multiple
+ // lines may point to the same marker when it spans across lines.
+ // The spans will have null for their from/to properties when the
+ // marker continues beyond the start/end of the line. Markers have
+ // links back to the lines they currently touch.
+
+ var TextMarker = CodeMirror.TextMarker = function(doc, type) {
+ this.lines = [];
+ this.type = type;
+ this.doc = doc;
+ };
+ eventMixin(TextMarker);
+
+ // Clear the marker.
+ TextMarker.prototype.clear = function() {
+ if (this.explicitlyCleared) return;
+ var cm = this.doc.cm, withOp = cm && !cm.curOp;
+ if (withOp) startOperation(cm);
+ if (hasHandler(this, "clear")) {
+ var found = this.find();
+ if (found) signalLater(this, "clear", found.from, found.to);
+ }
+ var min = null, max = null;
+ for (var i = 0; i < this.lines.length; ++i) {
+ var line = this.lines[i];
+ var span = getMarkedSpanFor(line.markedSpans, this);
+ if (cm && !this.collapsed) regLineChange(cm, lineNo(line), "text");
+ else if (cm) {
+ if (span.to != null) max = lineNo(line);
+ if (span.from != null) min = lineNo(line);
+ }
+ line.markedSpans = removeMarkedSpan(line.markedSpans, span);
+ if (span.from == null && this.collapsed && !lineIsHidden(this.doc, line) && cm)
+ updateLineHeight(line, textHeight(cm.display));
+ }
+ if (cm && this.collapsed && !cm.options.lineWrapping) for (var i = 0; i < this.lines.length; ++i) {
+ var visual = visualLine(this.lines[i]), len = lineLength(visual);
+ if (len > cm.display.maxLineLength) {
+ cm.display.maxLine = visual;
+ cm.display.maxLineLength = len;
+ cm.display.maxLineChanged = true;
+ }
+ }
+
+ if (min != null && cm && this.collapsed) regChange(cm, min, max + 1);
+ this.lines.length = 0;
+ this.explicitlyCleared = true;
+ if (this.atomic && this.doc.cantEdit) {
+ this.doc.cantEdit = false;
+ if (cm) reCheckSelection(cm.doc);
+ }
+ if (cm) signalLater(cm, "markerCleared", cm, this);
+ if (withOp) endOperation(cm);
+ if (this.parent) this.parent.clear();
+ };
+
+ // Find the position of the marker in the document. Returns a {from,
+ // to} object by default. Side can be passed to get a specific side
+ // -- 0 (both), -1 (left), or 1 (right). When lineObj is true, the
+ // Pos objects returned contain a line object, rather than a line
+ // number (used to prevent looking up the same line twice).
+ TextMarker.prototype.find = function(side, lineObj) {
+ if (side == null && this.type == "bookmark") side = 1;
+ var from, to;
+ for (var i = 0; i < this.lines.length; ++i) {
+ var line = this.lines[i];
+ var span = getMarkedSpanFor(line.markedSpans, this);
+ if (span.from != null) {
+ from = Pos(lineObj ? line : lineNo(line), span.from);
+ if (side == -1) return from;
+ }
+ if (span.to != null) {
+ to = Pos(lineObj ? line : lineNo(line), span.to);
+ if (side == 1) return to;
+ }
+ }
+ return from && {from: from, to: to};
+ };
+
+ // Signals that the marker's widget changed, and surrounding layout
+ // should be recomputed.
+ TextMarker.prototype.changed = function() {
+ var pos = this.find(-1, true), widget = this, cm = this.doc.cm;
+ if (!pos || !cm) return;
+ runInOp(cm, function() {
+ var line = pos.line, lineN = lineNo(pos.line);
+ var view = findViewForLine(cm, lineN);
+ if (view) {
+ clearLineMeasurementCacheFor(view);
+ cm.curOp.selectionChanged = cm.curOp.forceUpdate = true;
+ }
+ cm.curOp.updateMaxLine = true;
+ if (!lineIsHidden(widget.doc, line) && widget.height != null) {
+ var oldHeight = widget.height;
+ widget.height = null;
+ var dHeight = widgetHeight(widget) - oldHeight;
+ if (dHeight)
+ updateLineHeight(line, line.height + dHeight);
+ }
+ });
+ };
+
+ TextMarker.prototype.attachLine = function(line) {
+ if (!this.lines.length && this.doc.cm) {
+ var op = this.doc.cm.curOp;
+ if (!op.maybeHiddenMarkers || indexOf(op.maybeHiddenMarkers, this) == -1)
+ (op.maybeUnhiddenMarkers || (op.maybeUnhiddenMarkers = [])).push(this);
+ }
+ this.lines.push(line);
+ };
+ TextMarker.prototype.detachLine = function(line) {
+ this.lines.splice(indexOf(this.lines, line), 1);
+ if (!this.lines.length && this.doc.cm) {
+ var op = this.doc.cm.curOp;
+ (op.maybeHiddenMarkers || (op.maybeHiddenMarkers = [])).push(this);
+ }
+ };
+
+ // Collapsed markers have unique ids, in order to be able to order
+ // them, which is needed for uniquely determining an outer marker
+ // when they overlap (they may nest, but not partially overlap).
+ var nextMarkerId = 0;
+
+ // Create a marker, wire it up to the right lines, and
+ function markText(doc, from, to, options, type) {
+ // Shared markers (across linked documents) are handled separately
+ // (markTextShared will call out to this again, once per
+ // document).
+ if (options && options.shared) return markTextShared(doc, from, to, options, type);
+ // Ensure we are in an operation.
+ if (doc.cm && !doc.cm.curOp) return operation(doc.cm, markText)(doc, from, to, options, type);
+
+ var marker = new TextMarker(doc, type), diff = cmp(from, to);
+ if (options) copyObj(options, marker, false);
+ // Don't connect empty markers unless clearWhenEmpty is false
+ if (diff > 0 || diff == 0 && marker.clearWhenEmpty !== false)
+ return marker;
+ if (marker.replacedWith) {
+ // Showing up as a widget implies collapsed (widget replaces text)
+ marker.collapsed = true;
+ marker.widgetNode = elt("span", [marker.replacedWith], "CodeMirror-widget");
+ if (!options.handleMouseEvents) marker.widgetNode.ignoreEvents = true;
+ if (options.insertLeft) marker.widgetNode.insertLeft = true;
+ }
+ if (marker.collapsed) {
+ if (conflictingCollapsedRange(doc, from.line, from, to, marker) ||
+ from.line != to.line && conflictingCollapsedRange(doc, to.line, from, to, marker))
+ throw new Error("Inserting collapsed marker partially overlapping an existing one");
+ sawCollapsedSpans = true;
+ }
+
+ if (marker.addToHistory)
+ addChangeToHistory(doc, {from: from, to: to, origin: "markText"}, doc.sel, NaN);
+
+ var curLine = from.line, cm = doc.cm, updateMaxLine;
+ doc.iter(curLine, to.line + 1, function(line) {
+ if (cm && marker.collapsed && !cm.options.lineWrapping && visualLine(line) == cm.display.maxLine)
+ updateMaxLine = true;
+ if (marker.collapsed && curLine != from.line) updateLineHeight(line, 0);
+ addMarkedSpan(line, new MarkedSpan(marker,
+ curLine == from.line ? from.ch : null,
+ curLine == to.line ? to.ch : null));
+ ++curLine;
+ });
+ // lineIsHidden depends on the presence of the spans, so needs a second pass
+ if (marker.collapsed) doc.iter(from.line, to.line + 1, function(line) {
+ if (lineIsHidden(doc, line)) updateLineHeight(line, 0);
+ });
+
+ if (marker.clearOnEnter) on(marker, "beforeCursorEnter", function() { marker.clear(); });
+
+ if (marker.readOnly) {
+ sawReadOnlySpans = true;
+ if (doc.history.done.length || doc.history.undone.length)
+ doc.clearHistory();
+ }
+ if (marker.collapsed) {
+ marker.id = ++nextMarkerId;
+ marker.atomic = true;
+ }
+ if (cm) {
+ // Sync editor state
+ if (updateMaxLine) cm.curOp.updateMaxLine = true;
+ if (marker.collapsed)
+ regChange(cm, from.line, to.line + 1);
+ else if (marker.className || marker.title || marker.startStyle || marker.endStyle)
+ for (var i = from.line; i <= to.line; i++) regLineChange(cm, i, "text");
+ if (marker.atomic) reCheckSelection(cm.doc);
+ signalLater(cm, "markerAdded", cm, marker);
+ }
+ return marker;
+ }
+
+ // SHARED TEXTMARKERS
+
+ // A shared marker spans multiple linked documents. It is
+ // implemented as a meta-marker-object controlling multiple normal
+ // markers.
+ var SharedTextMarker = CodeMirror.SharedTextMarker = function(markers, primary) {
+ this.markers = markers;
+ this.primary = primary;
+ for (var i = 0; i < markers.length; ++i)
+ markers[i].parent = this;
+ };
+ eventMixin(SharedTextMarker);
+
+ SharedTextMarker.prototype.clear = function() {
+ if (this.explicitlyCleared) return;
+ this.explicitlyCleared = true;
+ for (var i = 0; i < this.markers.length; ++i)
+ this.markers[i].clear();
+ signalLater(this, "clear");
+ };
+ SharedTextMarker.prototype.find = function(side, lineObj) {
+ return this.primary.find(side, lineObj);
+ };
+
+ function markTextShared(doc, from, to, options, type) {
+ options = copyObj(options);
+ options.shared = false;
+ var markers = [markText(doc, from, to, options, type)], primary = markers[0];
+ var widget = options.widgetNode;
+ linkedDocs(doc, function(doc) {
+ if (widget) options.widgetNode = widget.cloneNode(true);
+ markers.push(markText(doc, clipPos(doc, from), clipPos(doc, to), options, type));
+ for (var i = 0; i < doc.linked.length; ++i)
+ if (doc.linked[i].isParent) return;
+ primary = lst(markers);
+ });
+ return new SharedTextMarker(markers, primary);
+ }
+
+ function findSharedMarkers(doc) {
+ return doc.findMarks(Pos(doc.first, 0), doc.clipPos(Pos(doc.lastLine())),
+ function(m) { return m.parent; });
+ }
+
+ function copySharedMarkers(doc, markers) {
+ for (var i = 0; i < markers.length; i++) {
+ var marker = markers[i], pos = marker.find();
+ var mFrom = doc.clipPos(pos.from), mTo = doc.clipPos(pos.to);
+ if (cmp(mFrom, mTo)) {
+ var subMark = markText(doc, mFrom, mTo, marker.primary, marker.primary.type);
+ marker.markers.push(subMark);
+ subMark.parent = marker;
+ }
+ }
+ }
+
+ function detachSharedMarkers(markers) {
+ for (var i = 0; i < markers.length; i++) {
+ var marker = markers[i], linked = [marker.primary.doc];;
+ linkedDocs(marker.primary.doc, function(d) { linked.push(d); });
+ for (var j = 0; j < marker.markers.length; j++) {
+ var subMarker = marker.markers[j];
+ if (indexOf(linked, subMarker.doc) == -1) {
+ subMarker.parent = null;
+ marker.markers.splice(j--, 1);
+ }
+ }
+ }
+ }
+
+ // TEXTMARKER SPANS
+
+ function MarkedSpan(marker, from, to) {
+ this.marker = marker;
+ this.from = from; this.to = to;
+ }
+
+ // Search an array of spans for a span matching the given marker.
+ function getMarkedSpanFor(spans, marker) {
+ if (spans) for (var i = 0; i < spans.length; ++i) {
+ var span = spans[i];
+ if (span.marker == marker) return span;
+ }
+ }
+ // Remove a span from an array, returning undefined if no spans are
+ // left (we don't store arrays for lines without spans).
+ function removeMarkedSpan(spans, span) {
+ for (var r, i = 0; i < spans.length; ++i)
+ if (spans[i] != span) (r || (r = [])).push(spans[i]);
+ return r;
+ }
+ // Add a span to a line.
+ function addMarkedSpan(line, span) {
+ line.markedSpans = line.markedSpans ? line.markedSpans.concat([span]) : [span];
+ span.marker.attachLine(line);
+ }
+
+ // Used for the algorithm that adjusts markers for a change in the
+ // document. These functions cut an array of spans at a given
+ // character position, returning an array of remaining chunks (or
+ // undefined if nothing remains).
+ function markedSpansBefore(old, startCh, isInsert) {
+ if (old) for (var i = 0, nw; i < old.length; ++i) {
+ var span = old[i], marker = span.marker;
+ var startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= startCh : span.from < startCh);
+ if (startsBefore || span.from == startCh && marker.type == "bookmark" && (!isInsert || !span.marker.insertLeft)) {
+ var endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= startCh : span.to > startCh);
+ (nw || (nw = [])).push(new MarkedSpan(marker, span.from, endsAfter ? null : span.to));
+ }
+ }
+ return nw;
+ }
+ function markedSpansAfter(old, endCh, isInsert) {
+ if (old) for (var i = 0, nw; i < old.length; ++i) {
+ var span = old[i], marker = span.marker;
+ var endsAfter = span.to == null || (marker.inclusiveRight ? span.to >= endCh : span.to > endCh);
+ if (endsAfter || span.from == endCh && marker.type == "bookmark" && (!isInsert || span.marker.insertLeft)) {
+ var startsBefore = span.from == null || (marker.inclusiveLeft ? span.from <= endCh : span.from < endCh);
+ (nw || (nw = [])).push(new MarkedSpan(marker, startsBefore ? null : span.from - endCh,
+ span.to == null ? null : span.to - endCh));
+ }
+ }
+ return nw;
+ }
+
+ // Given a change object, compute the new set of marker spans that
+ // cover the line in which the change took place. Removes spans
+ // entirely within the change, reconnects spans belonging to the
+ // same marker that appear on both sides of the change, and cuts off
+ // spans partially within the change. Returns an array of span
+ // arrays with one element for each line in (after) the change.
+ function stretchSpansOverChange(doc, change) {
+ var oldFirst = isLine(doc, change.from.line) && getLine(doc, change.from.line).markedSpans;
+ var oldLast = isLine(doc, change.to.line) && getLine(doc, change.to.line).markedSpans;
+ if (!oldFirst && !oldLast) return null;
+
+ var startCh = change.from.ch, endCh = change.to.ch, isInsert = cmp(change.from, change.to) == 0;
+ // Get the spans that 'stick out' on both sides
+ var first = markedSpansBefore(oldFirst, startCh, isInsert);
+ var last = markedSpansAfter(oldLast, endCh, isInsert);
+
+ // Next, merge those two ends
+ var sameLine = change.text.length == 1, offset = lst(change.text).length + (sameLine ? startCh : 0);
+ if (first) {
+ // Fix up .to properties of first
+ for (var i = 0; i < first.length; ++i) {
+ var span = first[i];
+ if (span.to == null) {
+ var found = getMarkedSpanFor(last, span.marker);
+ if (!found) span.to = startCh;
+ else if (sameLine) span.to = found.to == null ? null : found.to + offset;
+ }
+ }
+ }
+ if (last) {
+ // Fix up .from in last (or move them into first in case of sameLine)
+ for (var i = 0; i < last.length; ++i) {
+ var span = last[i];
+ if (span.to != null) span.to += offset;
+ if (span.from == null) {
+ var found = getMarkedSpanFor(first, span.marker);
+ if (!found) {
+ span.from = offset;
+ if (sameLine) (first || (first = [])).push(span);
+ }
+ } else {
+ span.from += offset;
+ if (sameLine) (first || (first = [])).push(span);
+ }
+ }
+ }
+ // Make sure we didn't create any zero-length spans
+ if (first) first = clearEmptySpans(first);
+ if (last && last != first) last = clearEmptySpans(last);
+
+ var newMarkers = [first];
+ if (!sameLine) {
+ // Fill gap with whole-line-spans
+ var gap = change.text.length - 2, gapMarkers;
+ if (gap > 0 && first)
+ for (var i = 0; i < first.length; ++i)
+ if (first[i].to == null)
+ (gapMarkers || (gapMarkers = [])).push(new MarkedSpan(first[i].marker, null, null));
+ for (var i = 0; i < gap; ++i)
+ newMarkers.push(gapMarkers);
+ newMarkers.push(last);
+ }
+ return newMarkers;
+ }
+
+ // Remove spans that are empty and don't have a clearWhenEmpty
+ // option of false.
+ function clearEmptySpans(spans) {
+ for (var i = 0; i < spans.length; ++i) {
+ var span = spans[i];
+ if (span.from != null && span.from == span.to && span.marker.clearWhenEmpty !== false)
+ spans.splice(i--, 1);
+ }
+ if (!spans.length) return null;
+ return spans;
+ }
+
+ // Used for un/re-doing changes from the history. Combines the
+ // result of computing the existing spans with the set of spans that
+ // existed in the history (so that deleting around a span and then
+ // undoing brings back the span).
+ function mergeOldSpans(doc, change) {
+ var old = getOldSpans(doc, change);
+ var stretched = stretchSpansOverChange(doc, change);
+ if (!old) return stretched;
+ if (!stretched) return old;
+
+ for (var i = 0; i < old.length; ++i) {
+ var oldCur = old[i], stretchCur = stretched[i];
+ if (oldCur && stretchCur) {
+ spans: for (var j = 0; j < stretchCur.length; ++j) {
+ var span = stretchCur[j];
+ for (var k = 0; k < oldCur.length; ++k)
+ if (oldCur[k].marker == span.marker) continue spans;
+ oldCur.push(span);
+ }
+ } else if (stretchCur) {
+ old[i] = stretchCur;
+ }
+ }
+ return old;
+ }
+
+ // Used to 'clip' out readOnly ranges when making a change.
+ function removeReadOnlyRanges(doc, from, to) {
+ var markers = null;
+ doc.iter(from.line, to.line + 1, function(line) {
+ if (line.markedSpans) for (var i = 0; i < line.markedSpans.length; ++i) {
+ var mark = line.markedSpans[i].marker;
+ if (mark.readOnly && (!markers || indexOf(markers, mark) == -1))
+ (markers || (markers = [])).push(mark);
+ }
+ });
+ if (!markers) return null;
+ var parts = [{from: from, to: to}];
+ for (var i = 0; i < markers.length; ++i) {
+ var mk = markers[i], m = mk.find(0);
+ for (var j = 0; j < parts.length; ++j) {
+ var p = parts[j];
+ if (cmp(p.to, m.from) < 0 || cmp(p.from, m.to) > 0) continue;
+ var newParts = [j, 1], dfrom = cmp(p.from, m.from), dto = cmp(p.to, m.to);
+ if (dfrom < 0 || !mk.inclusiveLeft && !dfrom)
+ newParts.push({from: p.from, to: m.from});
+ if (dto > 0 || !mk.inclusiveRight && !dto)
+ newParts.push({from: m.to, to: p.to});
+ parts.splice.apply(parts, newParts);
+ j += newParts.length - 1;
+ }
+ }
+ return parts;
+ }
+
+ // Connect or disconnect spans from a line.
+ function detachMarkedSpans(line) {
+ var spans = line.markedSpans;
+ if (!spans) return;
+ for (var i = 0; i < spans.length; ++i)
+ spans[i].marker.detachLine(line);
+ line.markedSpans = null;
+ }
+ function attachMarkedSpans(line, spans) {
+ if (!spans) return;
+ for (var i = 0; i < spans.length; ++i)
+ spans[i].marker.attachLine(line);
+ line.markedSpans = spans;
+ }
+
+ // Helpers used when computing which overlapping collapsed span
+ // counts as the larger one.
+ function extraLeft(marker) { return marker.inclusiveLeft ? -1 : 0; }
+ function extraRight(marker) { return marker.inclusiveRight ? 1 : 0; }
+
+ // Returns a number indicating which of two overlapping collapsed
+ // spans is larger (and thus includes the other). Falls back to
+ // comparing ids when the spans cover exactly the same range.
+ function compareCollapsedMarkers(a, b) {
+ var lenDiff = a.lines.length - b.lines.length;
+ if (lenDiff != 0) return lenDiff;
+ var aPos = a.find(), bPos = b.find();
+ var fromCmp = cmp(aPos.from, bPos.from) || extraLeft(a) - extraLeft(b);
+ if (fromCmp) return -fromCmp;
+ var toCmp = cmp(aPos.to, bPos.to) || extraRight(a) - extraRight(b);
+ if (toCmp) return toCmp;
+ return b.id - a.id;
+ }
+
+ // Find out whether a line ends or starts in a collapsed span. If
+ // so, return the marker for that span.
+ function collapsedSpanAtSide(line, start) {
+ var sps = sawCollapsedSpans && line.markedSpans, found;
+ if (sps) for (var sp, i = 0; i < sps.length; ++i) {
+ sp = sps[i];
+ if (sp.marker.collapsed && (start ? sp.from : sp.to) == null &&
+ (!found || compareCollapsedMarkers(found, sp.marker) < 0))
+ found = sp.marker;
+ }
+ return found;
+ }
+ function collapsedSpanAtStart(line) { return collapsedSpanAtSide(line, true); }
+ function collapsedSpanAtEnd(line) { return collapsedSpanAtSide(line, false); }
+
+ // Test whether there exists a collapsed span that partially
+ // overlaps (covers the start or end, but not both) of a new span.
+ // Such overlap is not allowed.
+ function conflictingCollapsedRange(doc, lineNo, from, to, marker) {
+ var line = getLine(doc, lineNo);
+ var sps = sawCollapsedSpans && line.markedSpans;
+ if (sps) for (var i = 0; i < sps.length; ++i) {
+ var sp = sps[i];
+ if (!sp.marker.collapsed) continue;
+ var found = sp.marker.find(0);
+ var fromCmp = cmp(found.from, from) || extraLeft(sp.marker) - extraLeft(marker);
+ var toCmp = cmp(found.to, to) || extraRight(sp.marker) - extraRight(marker);
+ if (fromCmp >= 0 && toCmp <= 0 || fromCmp <= 0 && toCmp >= 0) continue;
+ if (fromCmp <= 0 && (cmp(found.to, from) > 0 || (sp.marker.inclusiveRight && marker.inclusiveLeft)) ||
+ fromCmp >= 0 && (cmp(found.from, to) < 0 || (sp.marker.inclusiveLeft && marker.inclusiveRight)))
+ return true;
+ }
+ }
+
+ // A visual line is a line as drawn on the screen. Folding, for
+ // example, can cause multiple logical lines to appear on the same
+ // visual line. This finds the start of the visual line that the
+ // given line is part of (usually that is the line itself).
+ function visualLine(line) {
+ var merged;
+ while (merged = collapsedSpanAtStart(line))
+ line = merged.find(-1, true).line;
+ return line;
+ }
+
+ // Returns an array of logical lines that continue the visual line
+ // started by the argument, or undefined if there are no such lines.
+ function visualLineContinued(line) {
+ var merged, lines;
+ while (merged = collapsedSpanAtEnd(line)) {
+ line = merged.find(1, true).line;
+ (lines || (lines = [])).push(line);
+ }
+ return lines;
+ }
+
+ // Get the line number of the start of the visual line that the
+ // given line number is part of.
+ function visualLineNo(doc, lineN) {
+ var line = getLine(doc, lineN), vis = visualLine(line);
+ if (line == vis) return lineN;
+ return lineNo(vis);
+ }
+ // Get the line number of the start of the next visual line after
+ // the given line.
+ function visualLineEndNo(doc, lineN) {
+ if (lineN > doc.lastLine()) return lineN;
+ var line = getLine(doc, lineN), merged;
+ if (!lineIsHidden(doc, line)) return lineN;
+ while (merged = collapsedSpanAtEnd(line))
+ line = merged.find(1, true).line;
+ return lineNo(line) + 1;
+ }
+
+ // Compute whether a line is hidden. Lines count as hidden when they
+ // are part of a visual line that starts with another line, or when
+ // they are entirely covered by collapsed, non-widget span.
+ function lineIsHidden(doc, line) {
+ var sps = sawCollapsedSpans && line.markedSpans;
+ if (sps) for (var sp, i = 0; i < sps.length; ++i) {
+ sp = sps[i];
+ if (!sp.marker.collapsed) continue;
+ if (sp.from == null) return true;
+ if (sp.marker.widgetNode) continue;
+ if (sp.from == 0 && sp.marker.inclusiveLeft && lineIsHiddenInner(doc, line, sp))
+ return true;
+ }
+ }
+ function lineIsHiddenInner(doc, line, span) {
+ if (span.to == null) {
+ var end = span.marker.find(1, true);
+ return lineIsHiddenInner(doc, end.line, getMarkedSpanFor(end.line.markedSpans, span.marker));
+ }
+ if (span.marker.inclusiveRight && span.to == line.text.length)
+ return true;
+ for (var sp, i = 0; i < line.markedSpans.length; ++i) {
+ sp = line.markedSpans[i];
+ if (sp.marker.collapsed && !sp.marker.widgetNode && sp.from == span.to &&
+ (sp.to == null || sp.to != span.from) &&
+ (sp.marker.inclusiveLeft || span.marker.inclusiveRight) &&
+ lineIsHiddenInner(doc, line, sp)) return true;
+ }
+ }
+
+ // LINE WIDGETS
+
+ // Line widgets are block elements displayed above or below a line.
+
+ var LineWidget = CodeMirror.LineWidget = function(cm, node, options) {
+ if (options) for (var opt in options) if (options.hasOwnProperty(opt))
+ this[opt] = options[opt];
+ this.cm = cm;
+ this.node = node;
+ };
+ eventMixin(LineWidget);
+
+ function adjustScrollWhenAboveVisible(cm, line, diff) {
+ if (heightAtLine(line) < ((cm.curOp && cm.curOp.scrollTop) || cm.doc.scrollTop))
+ addToScrollPos(cm, null, diff);
+ }
+
+ LineWidget.prototype.clear = function() {
+ var cm = this.cm, ws = this.line.widgets, line = this.line, no = lineNo(line);
+ if (no == null || !ws) return;
+ for (var i = 0; i < ws.length; ++i) if (ws[i] == this) ws.splice(i--, 1);
+ if (!ws.length) line.widgets = null;
+ var height = widgetHeight(this);
+ runInOp(cm, function() {
+ adjustScrollWhenAboveVisible(cm, line, -height);
+ regLineChange(cm, no, "widget");
+ updateLineHeight(line, Math.max(0, line.height - height));
+ });
+ };
+ LineWidget.prototype.changed = function() {
+ var oldH = this.height, cm = this.cm, line = this.line;
+ this.height = null;
+ var diff = widgetHeight(this) - oldH;
+ if (!diff) return;
+ runInOp(cm, function() {
+ cm.curOp.forceUpdate = true;
+ adjustScrollWhenAboveVisible(cm, line, diff);
+ updateLineHeight(line, line.height + diff);
+ });
+ };
+
+ function widgetHeight(widget) {
+ if (widget.height != null) return widget.height;
+ if (!contains(document.body, widget.node)) {
+ var parentStyle = "position: relative;";
+ if (widget.coverGutter)
+ parentStyle += "margin-left: -" + widget.cm.getGutterElement().offsetWidth + "px;";
+ removeChildrenAndAdd(widget.cm.display.measure, elt("div", [widget.node], null, parentStyle));
+ }
+ return widget.height = widget.node.offsetHeight;
+ }
+
+ function addLineWidget(cm, handle, node, options) {
+ var widget = new LineWidget(cm, node, options);
+ if (widget.noHScroll) cm.display.alignWidgets = true;
+ changeLine(cm.doc, handle, "widget", function(line) {
+ var widgets = line.widgets || (line.widgets = []);
+ if (widget.insertAt == null) widgets.push(widget);
+ else widgets.splice(Math.min(widgets.length - 1, Math.max(0, widget.insertAt)), 0, widget);
+ widget.line = line;
+ if (!lineIsHidden(cm.doc, line)) {
+ var aboveVisible = heightAtLine(line) < cm.doc.scrollTop;
+ updateLineHeight(line, line.height + widgetHeight(widget));
+ if (aboveVisible) addToScrollPos(cm, null, widget.height);
+ cm.curOp.forceUpdate = true;
+ }
+ return true;
+ });
+ return widget;
+ }
+
+ // LINE DATA STRUCTURE
+
+ // Line objects. These hold state related to a line, including
+ // highlighting info (the styles array).
+ var Line = CodeMirror.Line = function(text, markedSpans, estimateHeight) {
+ this.text = text;
+ attachMarkedSpans(this, markedSpans);
+ this.height = estimateHeight ? estimateHeight(this) : 1;
+ };
+ eventMixin(Line);
+ Line.prototype.lineNo = function() { return lineNo(this); };
+
+ // Change the content (text, markers) of a line. Automatically
+ // invalidates cached information and tries to re-estimate the
+ // line's height.
+ function updateLine(line, text, markedSpans, estimateHeight) {
+ line.text = text;
+ if (line.stateAfter) line.stateAfter = null;
+ if (line.styles) line.styles = null;
+ if (line.order != null) line.order = null;
+ detachMarkedSpans(line);
+ attachMarkedSpans(line, markedSpans);
+ var estHeight = estimateHeight ? estimateHeight(line) : 1;
+ if (estHeight != line.height) updateLineHeight(line, estHeight);
+ }
+
+ // Detach a line from the document tree and its markers.
+ function cleanUpLine(line) {
+ line.parent = null;
+ detachMarkedSpans(line);
+ }
+
+ function extractLineClasses(type, output) {
+ if (type) for (;;) {
+ var lineClass = type.match(/(?:^|\s+)line-(background-)?(\S+)/);
+ if (!lineClass) break;
+ type = type.slice(0, lineClass.index) + type.slice(lineClass.index + lineClass[0].length);
+ var prop = lineClass[1] ? "bgClass" : "textClass";
+ if (output[prop] == null)
+ output[prop] = lineClass[2];
+ else if (!(new RegExp("(?:^|\s)" + lineClass[2] + "(?:$|\s)")).test(output[prop]))
+ output[prop] += " " + lineClass[2];
+ }
+ return type;
+ }
+
+ function callBlankLine(mode, state) {
+ if (mode.blankLine) return mode.blankLine(state);
+ if (!mode.innerMode) return;
+ var inner = CodeMirror.innerMode(mode, state);
+ if (inner.mode.blankLine) return inner.mode.blankLine(inner.state);
+ }
+
+ function readToken(mode, stream, state, inner) {
+ for (var i = 0; i < 10; i++) {
+ if (inner) inner[0] = CodeMirror.innerMode(mode, state).mode;
+ var style = mode.token(stream, state);
+ if (stream.pos > stream.start) return style;
+ }
+ throw new Error("Mode " + mode.name + " failed to advance stream.");
+ }
+
+ // Utility for getTokenAt and getLineTokens
+ function takeToken(cm, pos, precise, asArray) {
+ function getObj(copy) {
+ return {start: stream.start, end: stream.pos,
+ string: stream.current(),
+ type: style || null,
+ state: copy ? copyState(doc.mode, state) : state};
+ }
+
+ var doc = cm.doc, mode = doc.mode, style;
+ pos = clipPos(doc, pos);
+ var line = getLine(doc, pos.line), state = getStateBefore(cm, pos.line, precise);
+ var stream = new StringStream(line.text, cm.options.tabSize), tokens;
+ if (asArray) tokens = [];
+ while ((asArray || stream.pos < pos.ch) && !stream.eol()) {
+ stream.start = stream.pos;
+ style = readToken(mode, stream, state);
+ if (asArray) tokens.push(getObj(true));
+ }
+ return asArray ? tokens : getObj();
+ }
+
+ // Run the given mode's parser over a line, calling f for each token.
+ function runMode(cm, text, mode, state, f, lineClasses, forceToEnd) {
+ var flattenSpans = mode.flattenSpans;
+ if (flattenSpans == null) flattenSpans = cm.options.flattenSpans;
+ var curStart = 0, curStyle = null;
+ var stream = new StringStream(text, cm.options.tabSize), style;
+ var inner = cm.options.addModeClass && [null];
+ if (text == "") extractLineClasses(callBlankLine(mode, state), lineClasses);
+ while (!stream.eol()) {
+ if (stream.pos > cm.options.maxHighlightLength) {
+ flattenSpans = false;
+ if (forceToEnd) processLine(cm, text, state, stream.pos);
+ stream.pos = text.length;
+ style = null;
+ } else {
+ style = extractLineClasses(readToken(mode, stream, state, inner), lineClasses);
+ }
+ if (inner) {
+ var mName = inner[0].name;
+ if (mName) style = "m-" + (style ? mName + " " + style : mName);
+ }
+ if (!flattenSpans || curStyle != style) {
+ if (curStart < stream.start) f(stream.start, curStyle);
+ curStart = stream.start; curStyle = style;
+ }
+ stream.start = stream.pos;
+ }
+ while (curStart < stream.pos) {
+ // Webkit seems to refuse to render text nodes longer than 57444 characters
+ var pos = Math.min(stream.pos, curStart + 50000);
+ f(pos, curStyle);
+ curStart = pos;
+ }
+ }
+
+ // Compute a style array (an array starting with a mode generation
+ // -- for invalidation -- followed by pairs of end positions and
+ // style strings), which is used to highlight the tokens on the
+ // line.
+ function highlightLine(cm, line, state, forceToEnd) {
+ // A styles array always starts with a number identifying the
+ // mode/overlays that it is based on (for easy invalidation).
+ var st = [cm.state.modeGen], lineClasses = {};
+ // Compute the base array of styles
+ runMode(cm, line.text, cm.doc.mode, state, function(end, style) {
+ st.push(end, style);
+ }, lineClasses, forceToEnd);
+
+ // Run overlays, adjust style array.
+ for (var o = 0; o < cm.state.overlays.length; ++o) {
+ var overlay = cm.state.overlays[o], i = 1, at = 0;
+ runMode(cm, line.text, overlay.mode, true, function(end, style) {
+ var start = i;
+ // Ensure there's a token end at the current position, and that i points at it
+ while (at < end) {
+ var i_end = st[i];
+ if (i_end > end)
+ st.splice(i, 1, end, st[i+1], i_end);
+ i += 2;
+ at = Math.min(end, i_end);
+ }
+ if (!style) return;
+ if (overlay.opaque) {
+ st.splice(start, i - start, end, "cm-overlay " + style);
+ i = start + 2;
+ } else {
+ for (; start < i; start += 2) {
+ var cur = st[start+1];
+ st[start+1] = (cur ? cur + " " : "") + "cm-overlay " + style;
+ }
+ }
+ }, lineClasses);
+ }
+
+ return {styles: st, classes: lineClasses.bgClass || lineClasses.textClass ? lineClasses : null};
+ }
+
+ function getLineStyles(cm, line, updateFrontier) {
+ if (!line.styles || line.styles[0] != cm.state.modeGen) {
+ var result = highlightLine(cm, line, line.stateAfter = getStateBefore(cm, lineNo(line)));
+ line.styles = result.styles;
+ if (result.classes) line.styleClasses = result.classes;
+ else if (line.styleClasses) line.styleClasses = null;
+ if (updateFrontier === cm.doc.frontier) cm.doc.frontier++;
+ }
+ return line.styles;
+ }
+
+ // Lightweight form of highlight -- proceed over this line and
+ // update state, but don't save a style array. Used for lines that
+ // aren't currently visible.
+ function processLine(cm, text, state, startAt) {
+ var mode = cm.doc.mode;
+ var stream = new StringStream(text, cm.options.tabSize);
+ stream.start = stream.pos = startAt || 0;
+ if (text == "") callBlankLine(mode, state);
+ while (!stream.eol() && stream.pos <= cm.options.maxHighlightLength) {
+ readToken(mode, stream, state);
+ stream.start = stream.pos;
+ }
+ }
+
+ // Convert a style as returned by a mode (either null, or a string
+ // containing one or more styles) to a CSS style. This is cached,
+ // and also looks for line-wide styles.
+ var styleToClassCache = {}, styleToClassCacheWithMode = {};
+ function interpretTokenStyle(style, options) {
+ if (!style || /^\s*$/.test(style)) return null;
+ var cache = options.addModeClass ? styleToClassCacheWithMode : styleToClassCache;
+ return cache[style] ||
+ (cache[style] = style.replace(/\S+/g, "cm-$&"));
+ }
+
+ // Render the DOM representation of the text of a line. Also builds
+ // up a 'line map', which points at the DOM nodes that represent
+ // specific stretches of text, and is used by the measuring code.
+ // The returned object contains the DOM node, this map, and
+ // information about line-wide styles that were set by the mode.
+ function buildLineContent(cm, lineView) {
+ // The padding-right forces the element to have a 'border', which
+ // is needed on Webkit to be able to get line-level bounding
+ // rectangles for it (in measureChar).
+ var content = elt("span", null, null, webkit ? "padding-right: .1px" : null);
+ var builder = {pre: elt("pre", [content]), content: content, col: 0, pos: 0, cm: cm};
+ lineView.measure = {};
+
+ // Iterate over the logical lines that make up this visual line.
+ for (var i = 0; i <= (lineView.rest ? lineView.rest.length : 0); i++) {
+ var line = i ? lineView.rest[i - 1] : lineView.line, order;
+ builder.pos = 0;
+ builder.addToken = buildToken;
+ // Optionally wire in some hacks into the token-rendering
+ // algorithm, to deal with browser quirks.
+ if ((ie || webkit) && cm.getOption("lineWrapping"))
+ builder.addToken = buildTokenSplitSpaces(builder.addToken);
+ if (hasBadBidiRects(cm.display.measure) && (order = getOrder(line)))
+ builder.addToken = buildTokenBadBidi(builder.addToken, order);
+ builder.map = [];
+ var allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line);
+ insertLineContent(line, builder, getLineStyles(cm, line, allowFrontierUpdate));
+ if (line.styleClasses) {
+ if (line.styleClasses.bgClass)
+ builder.bgClass = joinClasses(line.styleClasses.bgClass, builder.bgClass || "");
+ if (line.styleClasses.textClass)
+ builder.textClass = joinClasses(line.styleClasses.textClass, builder.textClass || "");
+ }
+
+ // Ensure at least a single node is present, for measuring.
+ if (builder.map.length == 0)
+ builder.map.push(0, 0, builder.content.appendChild(zeroWidthElement(cm.display.measure)));
+
+ // Store the map and a cache object for the current logical line
+ if (i == 0) {
+ lineView.measure.map = builder.map;
+ lineView.measure.cache = {};
+ } else {
+ (lineView.measure.maps || (lineView.measure.maps = [])).push(builder.map);
+ (lineView.measure.caches || (lineView.measure.caches = [])).push({});
+ }
+ }
+
+ // See issue #2901
+ if (webkit && /\bcm-tab\b/.test(builder.content.lastChild.className))
+ builder.content.className = "cm-tab-wrap-hack";
+
+ signal(cm, "renderLine", cm, lineView.line, builder.pre);
+ if (builder.pre.className)
+ builder.textClass = joinClasses(builder.pre.className, builder.textClass || "");
+
+ return builder;
+ }
+
+ function defaultSpecialCharPlaceholder(ch) {
+ var token = elt("span", "\u2022", "cm-invalidchar");
+ token.title = "\\u" + ch.charCodeAt(0).toString(16);
+ return token;
+ }
+
+ // Build up the DOM representation for a single token, and add it to
+ // the line map. Takes care to render special characters separately.
+ function buildToken(builder, text, style, startStyle, endStyle, title) {
+ if (!text) return;
+ var special = builder.cm.options.specialChars, mustWrap = false;
+ if (!special.test(text)) {
+ builder.col += text.length;
+ var content = document.createTextNode(text);
+ builder.map.push(builder.pos, builder.pos + text.length, content);
+ if (ie && ie_version < 9) mustWrap = true;
+ builder.pos += text.length;
+ } else {
+ var content = document.createDocumentFragment(), pos = 0;
+ while (true) {
+ special.lastIndex = pos;
+ var m = special.exec(text);
+ var skipped = m ? m.index - pos : text.length - pos;
+ if (skipped) {
+ var txt = document.createTextNode(text.slice(pos, pos + skipped));
+ if (ie && ie_version < 9) content.appendChild(elt("span", [txt]));
+ else content.appendChild(txt);
+ builder.map.push(builder.pos, builder.pos + skipped, txt);
+ builder.col += skipped;
+ builder.pos += skipped;
+ }
+ if (!m) break;
+ pos += skipped + 1;
+ if (m[0] == "\t") {
+ var tabSize = builder.cm.options.tabSize, tabWidth = tabSize - builder.col % tabSize;
+ var txt = content.appendChild(elt("span", spaceStr(tabWidth), "cm-tab"));
+ builder.col += tabWidth;
+ } else {
+ var txt = builder.cm.options.specialCharPlaceholder(m[0]);
+ if (ie && ie_version < 9) content.appendChild(elt("span", [txt]));
+ else content.appendChild(txt);
+ builder.col += 1;
+ }
+ builder.map.push(builder.pos, builder.pos + 1, txt);
+ builder.pos++;
+ }
+ }
+ if (style || startStyle || endStyle || mustWrap) {
+ var fullStyle = style || "";
+ if (startStyle) fullStyle += startStyle;
+ if (endStyle) fullStyle += endStyle;
+ var token = elt("span", [content], fullStyle);
+ if (title) token.title = title;
+ return builder.content.appendChild(token);
+ }
+ builder.content.appendChild(content);
+ }
+
+ function buildTokenSplitSpaces(inner) {
+ function split(old) {
+ var out = " ";
+ for (var i = 0; i < old.length - 2; ++i) out += i % 2 ? " " : "\u00a0";
+ out += " ";
+ return out;
+ }
+ return function(builder, text, style, startStyle, endStyle, title) {
+ inner(builder, text.replace(/ {3,}/g, split), style, startStyle, endStyle, title);
+ };
+ }
+
+ // Work around nonsense dimensions being reported for stretches of
+ // right-to-left text.
+ function buildTokenBadBidi(inner, order) {
+ return function(builder, text, style, startStyle, endStyle, title) {
+ style = style ? style + " cm-force-border" : "cm-force-border";
+ var start = builder.pos, end = start + text.length;
+ for (;;) {
+ // Find the part that overlaps with the start of this text
+ for (var i = 0; i < order.length; i++) {
+ var part = order[i];
+ if (part.to > start && part.from <= start) break;
+ }
+ if (part.to >= end) return inner(builder, text, style, startStyle, endStyle, title);
+ inner(builder, text.slice(0, part.to - start), style, startStyle, null, title);
+ startStyle = null;
+ text = text.slice(part.to - start);
+ start = part.to;
+ }
+ };
+ }
+
+ function buildCollapsedSpan(builder, size, marker, ignoreWidget) {
+ var widget = !ignoreWidget && marker.widgetNode;
+ if (widget) {
+ builder.map.push(builder.pos, builder.pos + size, widget);
+ builder.content.appendChild(widget);
+ }
+ builder.pos += size;
+ }
+
+ // Outputs a number of spans to make up a line, taking highlighting
+ // and marked text into account.
+ function insertLineContent(line, builder, styles) {
+ var spans = line.markedSpans, allText = line.text, at = 0;
+ if (!spans) {
+ for (var i = 1; i < styles.length; i+=2)
+ builder.addToken(builder, allText.slice(at, at = styles[i]), interpretTokenStyle(styles[i+1], builder.cm.options));
+ return;
+ }
+
+ var len = allText.length, pos = 0, i = 1, text = "", style;
+ var nextChange = 0, spanStyle, spanEndStyle, spanStartStyle, title, collapsed;
+ for (;;) {
+ if (nextChange == pos) { // Update current marker set
+ spanStyle = spanEndStyle = spanStartStyle = title = "";
+ collapsed = null; nextChange = Infinity;
+ var foundBookmarks = [];
+ for (var j = 0; j < spans.length; ++j) {
+ var sp = spans[j], m = sp.marker;
+ if (sp.from <= pos && (sp.to == null || sp.to > pos)) {
+ if (sp.to != null && nextChange > sp.to) { nextChange = sp.to; spanEndStyle = ""; }
+ if (m.className) spanStyle += " " + m.className;
+ if (m.startStyle && sp.from == pos) spanStartStyle += " " + m.startStyle;
+ if (m.endStyle && sp.to == nextChange) spanEndStyle += " " + m.endStyle;
+ if (m.title && !title) title = m.title;
+ if (m.collapsed && (!collapsed || compareCollapsedMarkers(collapsed.marker, m) < 0))
+ collapsed = sp;
+ } else if (sp.from > pos && nextChange > sp.from) {
+ nextChange = sp.from;
+ }
+ if (m.type == "bookmark" && sp.from == pos && m.widgetNode) foundBookmarks.push(m);
+ }
+ if (collapsed && (collapsed.from || 0) == pos) {
+ buildCollapsedSpan(builder, (collapsed.to == null ? len + 1 : collapsed.to) - pos,
+ collapsed.marker, collapsed.from == null);
+ if (collapsed.to == null) return;
+ }
+ if (!collapsed && foundBookmarks.length) for (var j = 0; j < foundBookmarks.length; ++j)
+ buildCollapsedSpan(builder, 0, foundBookmarks[j]);
+ }
+ if (pos >= len) break;
+
+ var upto = Math.min(len, nextChange);
+ while (true) {
+ if (text) {
+ var end = pos + text.length;
+ if (!collapsed) {
+ var tokenText = end > upto ? text.slice(0, upto - pos) : text;
+ builder.addToken(builder, tokenText, style ? style + spanStyle : spanStyle,
+ spanStartStyle, pos + tokenText.length == nextChange ? spanEndStyle : "", title);
+ }
+ if (end >= upto) {text = text.slice(upto - pos); pos = upto; break;}
+ pos = end;
+ spanStartStyle = "";
+ }
+ text = allText.slice(at, at = styles[i++]);
+ style = interpretTokenStyle(styles[i++], builder.cm.options);
+ }
+ }
+ }
+
+ // DOCUMENT DATA STRUCTURE
+
+ // By default, updates that start and end at the beginning of a line
+ // are treated specially, in order to make the association of line
+ // widgets and marker elements with the text behave more intuitive.
+ function isWholeLineUpdate(doc, change) {
+ return change.from.ch == 0 && change.to.ch == 0 && lst(change.text) == "" &&
+ (!doc.cm || doc.cm.options.wholeLineUpdateBefore);
+ }
+
+ // Perform a change on the document data structure.
+ function updateDoc(doc, change, markedSpans, estimateHeight) {
+ function spansFor(n) {return markedSpans ? markedSpans[n] : null;}
+ function update(line, text, spans) {
+ updateLine(line, text, spans, estimateHeight);
+ signalLater(line, "change", line, change);
+ }
+
+ var from = change.from, to = change.to, text = change.text;
+ var firstLine = getLine(doc, from.line), lastLine = getLine(doc, to.line);
+ var lastText = lst(text), lastSpans = spansFor(text.length - 1), nlines = to.line - from.line;
+
+ // Adjust the line structure
+ if (isWholeLineUpdate(doc, change)) {
+ // This is a whole-line replace. Treated specially to make
+ // sure line objects move the way they are supposed to.
+ for (var i = 0, added = []; i < text.length - 1; ++i)
+ added.push(new Line(text[i], spansFor(i), estimateHeight));
+ update(lastLine, lastLine.text, lastSpans);
+ if (nlines) doc.remove(from.line, nlines);
+ if (added.length) doc.insert(from.line, added);
+ } else if (firstLine == lastLine) {
+ if (text.length == 1) {
+ update(firstLine, firstLine.text.slice(0, from.ch) + lastText + firstLine.text.slice(to.ch), lastSpans);
+ } else {
+ for (var added = [], i = 1; i < text.length - 1; ++i)
+ added.push(new Line(text[i], spansFor(i), estimateHeight));
+ added.push(new Line(lastText + firstLine.text.slice(to.ch), lastSpans, estimateHeight));
+ update(firstLine, firstLine.text.slice(0, from.ch) + text[0], spansFor(0));
+ doc.insert(from.line + 1, added);
+ }
+ } else if (text.length == 1) {
+ update(firstLine, firstLine.text.slice(0, from.ch) + text[0] + lastLine.text.slice(to.ch), spansFor(0));
+ doc.remove(from.line + 1, nlines);
+ } else {
+ update(firstLine, firstLine.text.slice(0, from.ch) + text[0], spansFor(0));
+ update(lastLine, lastText + lastLine.text.slice(to.ch), lastSpans);
+ for (var i = 1, added = []; i < text.length - 1; ++i)
+ added.push(new Line(text[i], spansFor(i), estimateHeight));
+ if (nlines > 1) doc.remove(from.line + 1, nlines - 1);
+ doc.insert(from.line + 1, added);
+ }
+
+ signalLater(doc, "change", doc, change);
+ }
+
+ // The document is represented as a BTree consisting of leaves, with
+ // chunk of lines in them, and branches, with up to ten leaves or
+ // other branch nodes below them. The top node is always a branch
+ // node, and is the document object itself (meaning it has
+ // additional methods and properties).
+ //
+ // All nodes have parent links. The tree is used both to go from
+ // line numbers to line objects, and to go from objects to numbers.
+ // It also indexes by height, and is used to convert between height
+ // and line object, and to find the total height of the document.
+ //
+ // See also http://marijnhaverbeke.nl/blog/codemirror-line-tree.html
+
+ function LeafChunk(lines) {
+ this.lines = lines;
+ this.parent = null;
+ for (var i = 0, height = 0; i < lines.length; ++i) {
+ lines[i].parent = this;
+ height += lines[i].height;
+ }
+ this.height = height;
+ }
+
+ LeafChunk.prototype = {
+ chunkSize: function() { return this.lines.length; },
+ // Remove the n lines at offset 'at'.
+ removeInner: function(at, n) {
+ for (var i = at, e = at + n; i < e; ++i) {
+ var line = this.lines[i];
+ this.height -= line.height;
+ cleanUpLine(line);
+ signalLater(line, "delete");
+ }
+ this.lines.splice(at, n);
+ },
+ // Helper used to collapse a small branch into a single leaf.
+ collapse: function(lines) {
+ lines.push.apply(lines, this.lines);
+ },
+ // Insert the given array of lines at offset 'at', count them as
+ // having the given height.
+ insertInner: function(at, lines, height) {
+ this.height += height;
+ this.lines = this.lines.slice(0, at).concat(lines).concat(this.lines.slice(at));
+ for (var i = 0; i < lines.length; ++i) lines[i].parent = this;
+ },
+ // Used to iterate over a part of the tree.
+ iterN: function(at, n, op) {
+ for (var e = at + n; at < e; ++at)
+ if (op(this.lines[at])) return true;
+ }
+ };
+
+ function BranchChunk(children) {
+ this.children = children;
+ var size = 0, height = 0;
+ for (var i = 0; i < children.length; ++i) {
+ var ch = children[i];
+ size += ch.chunkSize(); height += ch.height;
+ ch.parent = this;
+ }
+ this.size = size;
+ this.height = height;
+ this.parent = null;
+ }
+
+ BranchChunk.prototype = {
+ chunkSize: function() { return this.size; },
+ removeInner: function(at, n) {
+ this.size -= n;
+ for (var i = 0; i < this.children.length; ++i) {
+ var child = this.children[i], sz = child.chunkSize();
+ if (at < sz) {
+ var rm = Math.min(n, sz - at), oldHeight = child.height;
+ child.removeInner(at, rm);
+ this.height -= oldHeight - child.height;
+ if (sz == rm) { this.children.splice(i--, 1); child.parent = null; }
+ if ((n -= rm) == 0) break;
+ at = 0;
+ } else at -= sz;
+ }
+ // If the result is smaller than 25 lines, ensure that it is a
+ // single leaf node.
+ if (this.size - n < 25 &&
+ (this.children.length > 1 || !(this.children[0] instanceof LeafChunk))) {
+ var lines = [];
+ this.collapse(lines);
+ this.children = [new LeafChunk(lines)];
+ this.children[0].parent = this;
+ }
+ },
+ collapse: function(lines) {
+ for (var i = 0; i < this.children.length; ++i) this.children[i].collapse(lines);
+ },
+ insertInner: function(at, lines, height) {
+ this.size += lines.length;
+ this.height += height;
+ for (var i = 0; i < this.children.length; ++i) {
+ var child = this.children[i], sz = child.chunkSize();
+ if (at <= sz) {
+ child.insertInner(at, lines, height);
+ if (child.lines && child.lines.length > 50) {
+ while (child.lines.length > 50) {
+ var spilled = child.lines.splice(child.lines.length - 25, 25);
+ var newleaf = new LeafChunk(spilled);
+ child.height -= newleaf.height;
+ this.children.splice(i + 1, 0, newleaf);
+ newleaf.parent = this;
+ }
+ this.maybeSpill();
+ }
+ break;
+ }
+ at -= sz;
+ }
+ },
+ // When a node has grown, check whether it should be split.
+ maybeSpill: function() {
+ if (this.children.length <= 10) return;
+ var me = this;
+ do {
+ var spilled = me.children.splice(me.children.length - 5, 5);
+ var sibling = new BranchChunk(spilled);
+ if (!me.parent) { // Become the parent node
+ var copy = new BranchChunk(me.children);
+ copy.parent = me;
+ me.children = [copy, sibling];
+ me = copy;
+ } else {
+ me.size -= sibling.size;
+ me.height -= sibling.height;
+ var myIndex = indexOf(me.parent.children, me);
+ me.parent.children.splice(myIndex + 1, 0, sibling);
+ }
+ sibling.parent = me.parent;
+ } while (me.children.length > 10);
+ me.parent.maybeSpill();
+ },
+ iterN: function(at, n, op) {
+ for (var i = 0; i < this.children.length; ++i) {
+ var child = this.children[i], sz = child.chunkSize();
+ if (at < sz) {
+ var used = Math.min(n, sz - at);
+ if (child.iterN(at, used, op)) return true;
+ if ((n -= used) == 0) break;
+ at = 0;
+ } else at -= sz;
+ }
+ }
+ };
+
+ var nextDocId = 0;
+ var Doc = CodeMirror.Doc = function(text, mode, firstLine) {
+ if (!(this instanceof Doc)) return new Doc(text, mode, firstLine);
+ if (firstLine == null) firstLine = 0;
+
+ BranchChunk.call(this, [new LeafChunk([new Line("", null)])]);
+ this.first = firstLine;
+ this.scrollTop = this.scrollLeft = 0;
+ this.cantEdit = false;
+ this.cleanGeneration = 1;
+ this.frontier = firstLine;
+ var start = Pos(firstLine, 0);
+ this.sel = simpleSelection(start);
+ this.history = new History(null);
+ this.id = ++nextDocId;
+ this.modeOption = mode;
+
+ if (typeof text == "string") text = splitLines(text);
+ updateDoc(this, {from: start, to: start, text: text});
+ setSelection(this, simpleSelection(start), sel_dontScroll);
+ };
+
+ Doc.prototype = createObj(BranchChunk.prototype, {
+ constructor: Doc,
+ // Iterate over the document. Supports two forms -- with only one
+ // argument, it calls that for each line in the document. With
+ // three, it iterates over the range given by the first two (with
+ // the second being non-inclusive).
+ iter: function(from, to, op) {
+ if (op) this.iterN(from - this.first, to - from, op);
+ else this.iterN(this.first, this.first + this.size, from);
+ },
+
+ // Non-public interface for adding and removing lines.
+ insert: function(at, lines) {
+ var height = 0;
+ for (var i = 0; i < lines.length; ++i) height += lines[i].height;
+ this.insertInner(at - this.first, lines, height);
+ },
+ remove: function(at, n) { this.removeInner(at - this.first, n); },
+
+ // From here, the methods are part of the public interface. Most
+ // are also available from CodeMirror (editor) instances.
+
+ getValue: function(lineSep) {
+ var lines = getLines(this, this.first, this.first + this.size);
+ if (lineSep === false) return lines;
+ return lines.join(lineSep || "\n");
+ },
+ setValue: docMethodOp(function(code) {
+ var top = Pos(this.first, 0), last = this.first + this.size - 1;
+ makeChange(this, {from: top, to: Pos(last, getLine(this, last).text.length),
+ text: splitLines(code), origin: "setValue"}, true);
+ setSelection(this, simpleSelection(top));
+ }),
+ replaceRange: function(code, from, to, origin) {
+ from = clipPos(this, from);
+ to = to ? clipPos(this, to) : from;
+ replaceRange(this, code, from, to, origin);
+ },
+ getRange: function(from, to, lineSep) {
+ var lines = getBetween(this, clipPos(this, from), clipPos(this, to));
+ if (lineSep === false) return lines;
+ return lines.join(lineSep || "\n");
+ },
+
+ getLine: function(line) {var l = this.getLineHandle(line); return l && l.text;},
+
+ getLineHandle: function(line) {if (isLine(this, line)) return getLine(this, line);},
+ getLineNumber: function(line) {return lineNo(line);},
+
+ getLineHandleVisualStart: function(line) {
+ if (typeof line == "number") line = getLine(this, line);
+ return visualLine(line);
+ },
+
+ lineCount: function() {return this.size;},
+ firstLine: function() {return this.first;},
+ lastLine: function() {return this.first + this.size - 1;},
+
+ clipPos: function(pos) {return clipPos(this, pos);},
+
+ getCursor: function(start) {
+ var range = this.sel.primary(), pos;
+ if (start == null || start == "head") pos = range.head;
+ else if (start == "anchor") pos = range.anchor;
+ else if (start == "end" || start == "to" || start === false) pos = range.to();
+ else pos = range.from();
+ return pos;
+ },
+ listSelections: function() { return this.sel.ranges; },
+ somethingSelected: function() {return this.sel.somethingSelected();},
+
+ setCursor: docMethodOp(function(line, ch, options) {
+ setSimpleSelection(this, clipPos(this, typeof line == "number" ? Pos(line, ch || 0) : line), null, options);
+ }),
+ setSelection: docMethodOp(function(anchor, head, options) {
+ setSimpleSelection(this, clipPos(this, anchor), clipPos(this, head || anchor), options);
+ }),
+ extendSelection: docMethodOp(function(head, other, options) {
+ extendSelection(this, clipPos(this, head), other && clipPos(this, other), options);
+ }),
+ extendSelections: docMethodOp(function(heads, options) {
+ extendSelections(this, clipPosArray(this, heads, options));
+ }),
+ extendSelectionsBy: docMethodOp(function(f, options) {
+ extendSelections(this, map(this.sel.ranges, f), options);
+ }),
+ setSelections: docMethodOp(function(ranges, primary, options) {
+ if (!ranges.length) return;
+ for (var i = 0, out = []; i < ranges.length; i++)
+ out[i] = new Range(clipPos(this, ranges[i].anchor),
+ clipPos(this, ranges[i].head));
+ if (primary == null) primary = Math.min(ranges.length - 1, this.sel.primIndex);
+ setSelection(this, normalizeSelection(out, primary), options);
+ }),
+ addSelection: docMethodOp(function(anchor, head, options) {
+ var ranges = this.sel.ranges.slice(0);
+ ranges.push(new Range(clipPos(this, anchor), clipPos(this, head || anchor)));
+ setSelection(this, normalizeSelection(ranges, ranges.length - 1), options);
+ }),
+
+ getSelection: function(lineSep) {
+ var ranges = this.sel.ranges, lines;
+ for (var i = 0; i < ranges.length; i++) {
+ var sel = getBetween(this, ranges[i].from(), ranges[i].to());
+ lines = lines ? lines.concat(sel) : sel;
+ }
+ if (lineSep === false) return lines;
+ else return lines.join(lineSep || "\n");
+ },
+ getSelections: function(lineSep) {
+ var parts = [], ranges = this.sel.ranges;
+ for (var i = 0; i < ranges.length; i++) {
+ var sel = getBetween(this, ranges[i].from(), ranges[i].to());
+ if (lineSep !== false) sel = sel.join(lineSep || "\n");
+ parts[i] = sel;
+ }
+ return parts;
+ },
+ replaceSelection: function(code, collapse, origin) {
+ var dup = [];
+ for (var i = 0; i < this.sel.ranges.length; i++)
+ dup[i] = code;
+ this.replaceSelections(dup, collapse, origin || "+input");
+ },
+ replaceSelections: docMethodOp(function(code, collapse, origin) {
+ var changes = [], sel = this.sel;
+ for (var i = 0; i < sel.ranges.length; i++) {
+ var range = sel.ranges[i];
+ changes[i] = {from: range.from(), to: range.to(), text: splitLines(code[i]), origin: origin};
+ }
+ var newSel = collapse && collapse != "end" && computeReplacedSel(this, changes, collapse);
+ for (var i = changes.length - 1; i >= 0; i--)
+ makeChange(this, changes[i]);
+ if (newSel) setSelectionReplaceHistory(this, newSel);
+ else if (this.cm) ensureCursorVisible(this.cm);
+ }),
+ undo: docMethodOp(function() {makeChangeFromHistory(this, "undo");}),
+ redo: docMethodOp(function() {makeChangeFromHistory(this, "redo");}),
+ undoSelection: docMethodOp(function() {makeChangeFromHistory(this, "undo", true);}),
+ redoSelection: docMethodOp(function() {makeChangeFromHistory(this, "redo", true);}),
+
+ setExtending: function(val) {this.extend = val;},
+ getExtending: function() {return this.extend;},
+
+ historySize: function() {
+ var hist = this.history, done = 0, undone = 0;
+ for (var i = 0; i < hist.done.length; i++) if (!hist.done[i].ranges) ++done;
+ for (var i = 0; i < hist.undone.length; i++) if (!hist.undone[i].ranges) ++undone;
+ return {undo: done, redo: undone};
+ },
+ clearHistory: function() {this.history = new History(this.history.maxGeneration);},
+
+ markClean: function() {
+ this.cleanGeneration = this.changeGeneration(true);
+ },
+ changeGeneration: function(forceSplit) {
+ if (forceSplit)
+ this.history.lastOp = this.history.lastSelOp = this.history.lastOrigin = null;
+ return this.history.generation;
+ },
+ isClean: function (gen) {
+ return this.history.generation == (gen || this.cleanGeneration);
+ },
+
+ getHistory: function() {
+ return {done: copyHistoryArray(this.history.done),
+ undone: copyHistoryArray(this.history.undone)};
+ },
+ setHistory: function(histData) {
+ var hist = this.history = new History(this.history.maxGeneration);
+ hist.done = copyHistoryArray(histData.done.slice(0), null, true);
+ hist.undone = copyHistoryArray(histData.undone.slice(0), null, true);
+ },
+
+ addLineClass: docMethodOp(function(handle, where, cls) {
+ return changeLine(this, handle, where == "gutter" ? "gutter" : "class", function(line) {
+ var prop = where == "text" ? "textClass"
+ : where == "background" ? "bgClass"
+ : where == "gutter" ? "gutterClass" : "wrapClass";
+ if (!line[prop]) line[prop] = cls;
+ else if (classTest(cls).test(line[prop])) return false;
+ else line[prop] += " " + cls;
+ return true;
+ });
+ }),
+ removeLineClass: docMethodOp(function(handle, where, cls) {
+ return changeLine(this, handle, "class", function(line) {
+ var prop = where == "text" ? "textClass"
+ : where == "background" ? "bgClass"
+ : where == "gutter" ? "gutterClass" : "wrapClass";
+ var cur = line[prop];
+ if (!cur) return false;
+ else if (cls == null) line[prop] = null;
+ else {
+ var found = cur.match(classTest(cls));
+ if (!found) return false;
+ var end = found.index + found[0].length;
+ line[prop] = cur.slice(0, found.index) + (!found.index || end == cur.length ? "" : " ") + cur.slice(end) || null;
+ }
+ return true;
+ });
+ }),
+
+ markText: function(from, to, options) {
+ return markText(this, clipPos(this, from), clipPos(this, to), options, "range");
+ },
+ setBookmark: function(pos, options) {
+ var realOpts = {replacedWith: options && (options.nodeType == null ? options.widget : options),
+ insertLeft: options && options.insertLeft,
+ clearWhenEmpty: false, shared: options && options.shared};
+ pos = clipPos(this, pos);
+ return markText(this, pos, pos, realOpts, "bookmark");
+ },
+ findMarksAt: function(pos) {
+ pos = clipPos(this, pos);
+ var markers = [], spans = getLine(this, pos.line).markedSpans;
+ if (spans) for (var i = 0; i < spans.length; ++i) {
+ var span = spans[i];
+ if ((span.from == null || span.from <= pos.ch) &&
+ (span.to == null || span.to >= pos.ch))
+ markers.push(span.marker.parent || span.marker);
+ }
+ return markers;
+ },
+ findMarks: function(from, to, filter) {
+ from = clipPos(this, from); to = clipPos(this, to);
+ var found = [], lineNo = from.line;
+ this.iter(from.line, to.line + 1, function(line) {
+ var spans = line.markedSpans;
+ if (spans) for (var i = 0; i < spans.length; i++) {
+ var span = spans[i];
+ if (!(lineNo == from.line && from.ch > span.to ||
+ span.from == null && lineNo != from.line||
+ lineNo == to.line && span.from > to.ch) &&
+ (!filter || filter(span.marker)))
+ found.push(span.marker.parent || span.marker);
+ }
+ ++lineNo;
+ });
+ return found;
+ },
+ getAllMarks: function() {
+ var markers = [];
+ this.iter(function(line) {
+ var sps = line.markedSpans;
+ if (sps) for (var i = 0; i < sps.length; ++i)
+ if (sps[i].from != null) markers.push(sps[i].marker);
+ });
+ return markers;
+ },
+
+ posFromIndex: function(off) {
+ var ch, lineNo = this.first;
+ this.iter(function(line) {
+ var sz = line.text.length + 1;
+ if (sz > off) { ch = off; return true; }
+ off -= sz;
+ ++lineNo;
+ });
+ return clipPos(this, Pos(lineNo, ch));
+ },
+ indexFromPos: function (coords) {
+ coords = clipPos(this, coords);
+ var index = coords.ch;
+ if (coords.line < this.first || coords.ch < 0) return 0;
+ this.iter(this.first, coords.line, function (line) {
+ index += line.text.length + 1;
+ });
+ return index;
+ },
+
+ copy: function(copyHistory) {
+ var doc = new Doc(getLines(this, this.first, this.first + this.size), this.modeOption, this.first);
+ doc.scrollTop = this.scrollTop; doc.scrollLeft = this.scrollLeft;
+ doc.sel = this.sel;
+ doc.extend = false;
+ if (copyHistory) {
+ doc.history.undoDepth = this.history.undoDepth;
+ doc.setHistory(this.getHistory());
+ }
+ return doc;
+ },
+
+ linkedDoc: function(options) {
+ if (!options) options = {};
+ var from = this.first, to = this.first + this.size;
+ if (options.from != null && options.from > from) from = options.from;
+ if (options.to != null && options.to < to) to = options.to;
+ var copy = new Doc(getLines(this, from, to), options.mode || this.modeOption, from);
+ if (options.sharedHist) copy.history = this.history;
+ (this.linked || (this.linked = [])).push({doc: copy, sharedHist: options.sharedHist});
+ copy.linked = [{doc: this, isParent: true, sharedHist: options.sharedHist}];
+ copySharedMarkers(copy, findSharedMarkers(this));
+ return copy;
+ },
+ unlinkDoc: function(other) {
+ if (other instanceof CodeMirror) other = other.doc;
+ if (this.linked) for (var i = 0; i < this.linked.length; ++i) {
+ var link = this.linked[i];
+ if (link.doc != other) continue;
+ this.linked.splice(i, 1);
+ other.unlinkDoc(this);
+ detachSharedMarkers(findSharedMarkers(this));
+ break;
+ }
+ // If the histories were shared, split them again
+ if (other.history == this.history) {
+ var splitIds = [other.id];
+ linkedDocs(other, function(doc) {splitIds.push(doc.id);}, true);
+ other.history = new History(null);
+ other.history.done = copyHistoryArray(this.history.done, splitIds);
+ other.history.undone = copyHistoryArray(this.history.undone, splitIds);
+ }
+ },
+ iterLinkedDocs: function(f) {linkedDocs(this, f);},
+
+ getMode: function() {return this.mode;},
+ getEditor: function() {return this.cm;}
+ });
+
+ // Public alias.
+ Doc.prototype.eachLine = Doc.prototype.iter;
+
+ // Set up methods on CodeMirror's prototype to redirect to the editor's document.
+ var dontDelegate = "iter insert remove copy getEditor".split(" ");
+ for (var prop in Doc.prototype) if (Doc.prototype.hasOwnProperty(prop) && indexOf(dontDelegate, prop) < 0)
+ CodeMirror.prototype[prop] = (function(method) {
+ return function() {return method.apply(this.doc, arguments);};
+ })(Doc.prototype[prop]);
+
+ eventMixin(Doc);
+
+ // Call f for all linked documents.
+ function linkedDocs(doc, f, sharedHistOnly) {
+ function propagate(doc, skip, sharedHist) {
+ if (doc.linked) for (var i = 0; i < doc.linked.length; ++i) {
+ var rel = doc.linked[i];
+ if (rel.doc == skip) continue;
+ var shared = sharedHist && rel.sharedHist;
+ if (sharedHistOnly && !shared) continue;
+ f(rel.doc, shared);
+ propagate(rel.doc, doc, shared);
+ }
+ }
+ propagate(doc, null, true);
+ }
+
+ // Attach a document to an editor.
+ function attachDoc(cm, doc) {
+ if (doc.cm) throw new Error("This document is already in use.");
+ cm.doc = doc;
+ doc.cm = cm;
+ estimateLineHeights(cm);
+ loadMode(cm);
+ if (!cm.options.lineWrapping) findMaxLine(cm);
+ cm.options.mode = doc.modeOption;
+ regChange(cm);
+ }
+
+ // LINE UTILITIES
+
+ // Find the line object corresponding to the given line number.
+ function getLine(doc, n) {
+ n -= doc.first;
+ if (n < 0 || n >= doc.size) throw new Error("There is no line " + (n + doc.first) + " in the document.");
+ for (var chunk = doc; !chunk.lines;) {
+ for (var i = 0;; ++i) {
+ var child = chunk.children[i], sz = child.chunkSize();
+ if (n < sz) { chunk = child; break; }
+ n -= sz;
+ }
+ }
+ return chunk.lines[n];
+ }
+
+ // Get the part of a document between two positions, as an array of
+ // strings.
+ function getBetween(doc, start, end) {
+ var out = [], n = start.line;
+ doc.iter(start.line, end.line + 1, function(line) {
+ var text = line.text;
+ if (n == end.line) text = text.slice(0, end.ch);
+ if (n == start.line) text = text.slice(start.ch);
+ out.push(text);
+ ++n;
+ });
+ return out;
+ }
+ // Get the lines between from and to, as array of strings.
+ function getLines(doc, from, to) {
+ var out = [];
+ doc.iter(from, to, function(line) { out.push(line.text); });
+ return out;
+ }
+
+ // Update the height of a line, propagating the height change
+ // upwards to parent nodes.
+ function updateLineHeight(line, height) {
+ var diff = height - line.height;
+ if (diff) for (var n = line; n; n = n.parent) n.height += diff;
+ }
+
+ // Given a line object, find its line number by walking up through
+ // its parent links.
+ function lineNo(line) {
+ if (line.parent == null) return null;
+ var cur = line.parent, no = indexOf(cur.lines, line);
+ for (var chunk = cur.parent; chunk; cur = chunk, chunk = chunk.parent) {
+ for (var i = 0;; ++i) {
+ if (chunk.children[i] == cur) break;
+ no += chunk.children[i].chunkSize();
+ }
+ }
+ return no + cur.first;
+ }
+
+ // Find the line at the given vertical position, using the height
+ // information in the document tree.
+ function lineAtHeight(chunk, h) {
+ var n = chunk.first;
+ outer: do {
+ for (var i = 0; i < chunk.children.length; ++i) {
+ var child = chunk.children[i], ch = child.height;
+ if (h < ch) { chunk = child; continue outer; }
+ h -= ch;
+ n += child.chunkSize();
+ }
+ return n;
+ } while (!chunk.lines);
+ for (var i = 0; i < chunk.lines.length; ++i) {
+ var line = chunk.lines[i], lh = line.height;
+ if (h < lh) break;
+ h -= lh;
+ }
+ return n + i;
+ }
+
+
+ // Find the height above the given line.
+ function heightAtLine(lineObj) {
+ lineObj = visualLine(lineObj);
+
+ var h = 0, chunk = lineObj.parent;
+ for (var i = 0; i < chunk.lines.length; ++i) {
+ var line = chunk.lines[i];
+ if (line == lineObj) break;
+ else h += line.height;
+ }
+ for (var p = chunk.parent; p; chunk = p, p = chunk.parent) {
+ for (var i = 0; i < p.children.length; ++i) {
+ var cur = p.children[i];
+ if (cur == chunk) break;
+ else h += cur.height;
+ }
+ }
+ return h;
+ }
+
+ // Get the bidi ordering for the given line (and cache it). Returns
+ // false for lines that are fully left-to-right, and an array of
+ // BidiSpan objects otherwise.
+ function getOrder(line) {
+ var order = line.order;
+ if (order == null) order = line.order = bidiOrdering(line.text);
+ return order;
+ }
+
+ // HISTORY
+
+ function History(startGen) {
+ // Arrays of change events and selections. Doing something adds an
+ // event to done and clears undo. Undoing moves events from done
+ // to undone, redoing moves them in the other direction.
+ this.done = []; this.undone = [];
+ this.undoDepth = Infinity;
+ // Used to track when changes can be merged into a single undo
+ // event
+ this.lastModTime = this.lastSelTime = 0;
+ this.lastOp = this.lastSelOp = null;
+ this.lastOrigin = this.lastSelOrigin = null;
+ // Used by the isClean() method
+ this.generation = this.maxGeneration = startGen || 1;
+ }
+
+ // Create a history change event from an updateDoc-style change
+ // object.
+ function historyChangeFromChange(doc, change) {
+ var histChange = {from: copyPos(change.from), to: changeEnd(change), text: getBetween(doc, change.from, change.to)};
+ attachLocalSpans(doc, histChange, change.from.line, change.to.line + 1);
+ linkedDocs(doc, function(doc) {attachLocalSpans(doc, histChange, change.from.line, change.to.line + 1);}, true);
+ return histChange;
+ }
+
+ // Pop all selection events off the end of a history array. Stop at
+ // a change event.
+ function clearSelectionEvents(array) {
+ while (array.length) {
+ var last = lst(array);
+ if (last.ranges) array.pop();
+ else break;
+ }
+ }
+
+ // Find the top change event in the history. Pop off selection
+ // events that are in the way.
+ function lastChangeEvent(hist, force) {
+ if (force) {
+ clearSelectionEvents(hist.done);
+ return lst(hist.done);
+ } else if (hist.done.length && !lst(hist.done).ranges) {
+ return lst(hist.done);
+ } else if (hist.done.length > 1 && !hist.done[hist.done.length - 2].ranges) {
+ hist.done.pop();
+ return lst(hist.done);
+ }
+ }
+
+ // Register a change in the history. Merges changes that are within
+ // a single operation, ore are close together with an origin that
+ // allows merging (starting with "+") into a single event.
+ function addChangeToHistory(doc, change, selAfter, opId) {
+ var hist = doc.history;
+ hist.undone.length = 0;
+ var time = +new Date, cur;
+
+ if ((hist.lastOp == opId ||
+ hist.lastOrigin == change.origin && change.origin &&
+ ((change.origin.charAt(0) == "+" && doc.cm && hist.lastModTime > time - doc.cm.options.historyEventDelay) ||
+ change.origin.charAt(0) == "*")) &&
+ (cur = lastChangeEvent(hist, hist.lastOp == opId))) {
+ // Merge this change into the last event
+ var last = lst(cur.changes);
+ if (cmp(change.from, change.to) == 0 && cmp(change.from, last.to) == 0) {
+ // Optimized case for simple insertion -- don't want to add
+ // new changesets for every character typed
+ last.to = changeEnd(change);
+ } else {
+ // Add new sub-event
+ cur.changes.push(historyChangeFromChange(doc, change));
+ }
+ } else {
+ // Can not be merged, start a new event.
+ var before = lst(hist.done);
+ if (!before || !before.ranges)
+ pushSelectionToHistory(doc.sel, hist.done);
+ cur = {changes: [historyChangeFromChange(doc, change)],
+ generation: hist.generation};
+ hist.done.push(cur);
+ while (hist.done.length > hist.undoDepth) {
+ hist.done.shift();
+ if (!hist.done[0].ranges) hist.done.shift();
+ }
+ }
+ hist.done.push(selAfter);
+ hist.generation = ++hist.maxGeneration;
+ hist.lastModTime = hist.lastSelTime = time;
+ hist.lastOp = hist.lastSelOp = opId;
+ hist.lastOrigin = hist.lastSelOrigin = change.origin;
+
+ if (!last) signal(doc, "historyAdded");
+ }
+
+ function selectionEventCanBeMerged(doc, origin, prev, sel) {
+ var ch = origin.charAt(0);
+ return ch == "*" ||
+ ch == "+" &&
+ prev.ranges.length == sel.ranges.length &&
+ prev.somethingSelected() == sel.somethingSelected() &&
+ new Date - doc.history.lastSelTime <= (doc.cm ? doc.cm.options.historyEventDelay : 500);
+ }
+
+ // Called whenever the selection changes, sets the new selection as
+ // the pending selection in the history, and pushes the old pending
+ // selection into the 'done' array when it was significantly
+ // different (in number of selected ranges, emptiness, or time).
+ function addSelectionToHistory(doc, sel, opId, options) {
+ var hist = doc.history, origin = options && options.origin;
+
+ // A new event is started when the previous origin does not match
+ // the current, or the origins don't allow matching. Origins
+ // starting with * are always merged, those starting with + are
+ // merged when similar and close together in time.
+ if (opId == hist.lastSelOp ||
+ (origin && hist.lastSelOrigin == origin &&
+ (hist.lastModTime == hist.lastSelTime && hist.lastOrigin == origin ||
+ selectionEventCanBeMerged(doc, origin, lst(hist.done), sel))))
+ hist.done[hist.done.length - 1] = sel;
+ else
+ pushSelectionToHistory(sel, hist.done);
+
+ hist.lastSelTime = +new Date;
+ hist.lastSelOrigin = origin;
+ hist.lastSelOp = opId;
+ if (options && options.clearRedo !== false)
+ clearSelectionEvents(hist.undone);
+ }
+
+ function pushSelectionToHistory(sel, dest) {
+ var top = lst(dest);
+ if (!(top && top.ranges && top.equals(sel)))
+ dest.push(sel);
+ }
+
+ // Used to store marked span information in the history.
+ function attachLocalSpans(doc, change, from, to) {
+ var existing = change["spans_" + doc.id], n = 0;
+ doc.iter(Math.max(doc.first, from), Math.min(doc.first + doc.size, to), function(line) {
+ if (line.markedSpans)
+ (existing || (existing = change["spans_" + doc.id] = {}))[n] = line.markedSpans;
+ ++n;
+ });
+ }
+
+ // When un/re-doing restores text containing marked spans, those
+ // that have been explicitly cleared should not be restored.
+ function removeClearedSpans(spans) {
+ if (!spans) return null;
+ for (var i = 0, out; i < spans.length; ++i) {
+ if (spans[i].marker.explicitlyCleared) { if (!out) out = spans.slice(0, i); }
+ else if (out) out.push(spans[i]);
+ }
+ return !out ? spans : out.length ? out : null;
+ }
+
+ // Retrieve and filter the old marked spans stored in a change event.
+ function getOldSpans(doc, change) {
+ var found = change["spans_" + doc.id];
+ if (!found) return null;
+ for (var i = 0, nw = []; i < change.text.length; ++i)
+ nw.push(removeClearedSpans(found[i]));
+ return nw;
+ }
+
+ // Used both to provide a JSON-safe object in .getHistory, and, when
+ // detaching a document, to split the history in two
+ function copyHistoryArray(events, newGroup, instantiateSel) {
+ for (var i = 0, copy = []; i < events.length; ++i) {
+ var event = events[i];
+ if (event.ranges) {
+ copy.push(instantiateSel ? Selection.prototype.deepCopy.call(event) : event);
+ continue;
+ }
+ var changes = event.changes, newChanges = [];
+ copy.push({changes: newChanges});
+ for (var j = 0; j < changes.length; ++j) {
+ var change = changes[j], m;
+ newChanges.push({from: change.from, to: change.to, text: change.text});
+ if (newGroup) for (var prop in change) if (m = prop.match(/^spans_(\d+)$/)) {
+ if (indexOf(newGroup, Number(m[1])) > -1) {
+ lst(newChanges)[prop] = change[prop];
+ delete change[prop];
+ }
+ }
+ }
+ }
+ return copy;
+ }
+
+ // Rebasing/resetting history to deal with externally-sourced changes
+
+ function rebaseHistSelSingle(pos, from, to, diff) {
+ if (to < pos.line) {
+ pos.line += diff;
+ } else if (from < pos.line) {
+ pos.line = from;
+ pos.ch = 0;
+ }
+ }
+
+ // Tries to rebase an array of history events given a change in the
+ // document. If the change touches the same lines as the event, the
+ // event, and everything 'behind' it, is discarded. If the change is
+ // before the event, the event's positions are updated. Uses a
+ // copy-on-write scheme for the positions, to avoid having to
+ // reallocate them all on every rebase, but also avoid problems with
+ // shared position objects being unsafely updated.
+ function rebaseHistArray(array, from, to, diff) {
+ for (var i = 0; i < array.length; ++i) {
+ var sub = array[i], ok = true;
+ if (sub.ranges) {
+ if (!sub.copied) { sub = array[i] = sub.deepCopy(); sub.copied = true; }
+ for (var j = 0; j < sub.ranges.length; j++) {
+ rebaseHistSelSingle(sub.ranges[j].anchor, from, to, diff);
+ rebaseHistSelSingle(sub.ranges[j].head, from, to, diff);
+ }
+ continue;
+ }
+ for (var j = 0; j < sub.changes.length; ++j) {
+ var cur = sub.changes[j];
+ if (to < cur.from.line) {
+ cur.from = Pos(cur.from.line + diff, cur.from.ch);
+ cur.to = Pos(cur.to.line + diff, cur.to.ch);
+ } else if (from <= cur.to.line) {
+ ok = false;
+ break;
+ }
+ }
+ if (!ok) {
+ array.splice(0, i + 1);
+ i = 0;
+ }
+ }
+ }
+
+ function rebaseHist(hist, change) {
+ var from = change.from.line, to = change.to.line, diff = change.text.length - (to - from) - 1;
+ rebaseHistArray(hist.done, from, to, diff);
+ rebaseHistArray(hist.undone, from, to, diff);
+ }
+
+ // EVENT UTILITIES
+
+ // Due to the fact that we still support jurassic IE versions, some
+ // compatibility wrappers are needed.
+
+ var e_preventDefault = CodeMirror.e_preventDefault = function(e) {
+ if (e.preventDefault) e.preventDefault();
+ else e.returnValue = false;
+ };
+ var e_stopPropagation = CodeMirror.e_stopPropagation = function(e) {
+ if (e.stopPropagation) e.stopPropagation();
+ else e.cancelBubble = true;
+ };
+ function e_defaultPrevented(e) {
+ return e.defaultPrevented != null ? e.defaultPrevented : e.returnValue == false;
+ }
+ var e_stop = CodeMirror.e_stop = function(e) {e_preventDefault(e); e_stopPropagation(e);};
+
+ function e_target(e) {return e.target || e.srcElement;}
+ function e_button(e) {
+ var b = e.which;
+ if (b == null) {
+ if (e.button & 1) b = 1;
+ else if (e.button & 2) b = 3;
+ else if (e.button & 4) b = 2;
+ }
+ if (mac && e.ctrlKey && b == 1) b = 3;
+ return b;
+ }
+
+ // EVENT HANDLING
+
+ // Lightweight event framework. on/off also work on DOM nodes,
+ // registering native DOM handlers.
+
+ var on = CodeMirror.on = function(emitter, type, f) {
+ if (emitter.addEventListener)
+ emitter.addEventListener(type, f, false);
+ else if (emitter.attachEvent)
+ emitter.attachEvent("on" + type, f);
+ else {
+ var map = emitter._handlers || (emitter._handlers = {});
+ var arr = map[type] || (map[type] = []);
+ arr.push(f);
+ }
+ };
+
+ var off = CodeMirror.off = function(emitter, type, f) {
+ if (emitter.removeEventListener)
+ emitter.removeEventListener(type, f, false);
+ else if (emitter.detachEvent)
+ emitter.detachEvent("on" + type, f);
+ else {
+ var arr = emitter._handlers && emitter._handlers[type];
+ if (!arr) return;
+ for (var i = 0; i < arr.length; ++i)
+ if (arr[i] == f) { arr.splice(i, 1); break; }
+ }
+ };
+
+ var signal = CodeMirror.signal = function(emitter, type /*, values...*/) {
+ var arr = emitter._handlers && emitter._handlers[type];
+ if (!arr) return;
+ var args = Array.prototype.slice.call(arguments, 2);
+ for (var i = 0; i < arr.length; ++i) arr[i].apply(null, args);
+ };
+
+ var orphanDelayedCallbacks = null;
+
+ // Often, we want to signal events at a point where we are in the
+ // middle of some work, but don't want the handler to start calling
+ // other methods on the editor, which might be in an inconsistent
+ // state or simply not expect any other events to happen.
+ // signalLater looks whether there are any handlers, and schedules
+ // them to be executed when the last operation ends, or, if no
+ // operation is active, when a timeout fires.
+ function signalLater(emitter, type /*, values...*/) {
+ var arr = emitter._handlers && emitter._handlers[type];
+ if (!arr) return;
+ var args = Array.prototype.slice.call(arguments, 2), list;
+ if (operationGroup) {
+ list = operationGroup.delayedCallbacks;
+ } else if (orphanDelayedCallbacks) {
+ list = orphanDelayedCallbacks;
+ } else {
+ list = orphanDelayedCallbacks = [];
+ setTimeout(fireOrphanDelayed, 0);
+ }
+ function bnd(f) {return function(){f.apply(null, args);};};
+ for (var i = 0; i < arr.length; ++i)
+ list.push(bnd(arr[i]));
+ }
+
+ function fireOrphanDelayed() {
+ var delayed = orphanDelayedCallbacks;
+ orphanDelayedCallbacks = null;
+ for (var i = 0; i < delayed.length; ++i) delayed[i]();
+ }
+
+ // The DOM events that CodeMirror handles can be overridden by
+ // registering a (non-DOM) handler on the editor for the event name,
+ // and preventDefault-ing the event in that handler.
+ function signalDOMEvent(cm, e, override) {
+ if (typeof e == "string")
+ e = {type: e, preventDefault: function() { this.defaultPrevented = true; }};
+ signal(cm, override || e.type, cm, e);
+ return e_defaultPrevented(e) || e.codemirrorIgnore;
+ }
+
+ function signalCursorActivity(cm) {
+ var arr = cm._handlers && cm._handlers.cursorActivity;
+ if (!arr) return;
+ var set = cm.curOp.cursorActivityHandlers || (cm.curOp.cursorActivityHandlers = []);
+ for (var i = 0; i < arr.length; ++i) if (indexOf(set, arr[i]) == -1)
+ set.push(arr[i]);
+ }
+
+ function hasHandler(emitter, type) {
+ var arr = emitter._handlers && emitter._handlers[type];
+ return arr && arr.length > 0;
+ }
+
+ // Add on and off methods to a constructor's prototype, to make
+ // registering events on such objects more convenient.
+ function eventMixin(ctor) {
+ ctor.prototype.on = function(type, f) {on(this, type, f);};
+ ctor.prototype.off = function(type, f) {off(this, type, f);};
+ }
+
+ // MISC UTILITIES
+
+ // Number of pixels added to scroller and sizer to hide scrollbar
+ var scrollerCutOff = 30;
+
+ // Returned or thrown by various protocols to signal 'I'm not
+ // handling this'.
+ var Pass = CodeMirror.Pass = {toString: function(){return "CodeMirror.Pass";}};
+
+ // Reused option objects for setSelection & friends
+ var sel_dontScroll = {scroll: false}, sel_mouse = {origin: "*mouse"}, sel_move = {origin: "+move"};
+
+ function Delayed() {this.id = null;}
+ Delayed.prototype.set = function(ms, f) {
+ clearTimeout(this.id);
+ this.id = setTimeout(f, ms);
+ };
+
+ // Counts the column offset in a string, taking tabs into account.
+ // Used mostly to find indentation.
+ var countColumn = CodeMirror.countColumn = function(string, end, tabSize, startIndex, startValue) {
+ if (end == null) {
+ end = string.search(/[^\s\u00a0]/);
+ if (end == -1) end = string.length;
+ }
+ for (var i = startIndex || 0, n = startValue || 0;;) {
+ var nextTab = string.indexOf("\t", i);
+ if (nextTab < 0 || nextTab >= end)
+ return n + (end - i);
+ n += nextTab - i;
+ n += tabSize - (n % tabSize);
+ i = nextTab + 1;
+ }
+ };
+
+ // The inverse of countColumn -- find the offset that corresponds to
+ // a particular column.
+ function findColumn(string, goal, tabSize) {
+ for (var pos = 0, col = 0;;) {
+ var nextTab = string.indexOf("\t", pos);
+ if (nextTab == -1) nextTab = string.length;
+ var skipped = nextTab - pos;
+ if (nextTab == string.length || col + skipped >= goal)
+ return pos + Math.min(skipped, goal - col);
+ col += nextTab - pos;
+ col += tabSize - (col % tabSize);
+ pos = nextTab + 1;
+ if (col >= goal) return pos;
+ }
+ }
+
+ var spaceStrs = [""];
+ function spaceStr(n) {
+ while (spaceStrs.length <= n)
+ spaceStrs.push(lst(spaceStrs) + " ");
+ return spaceStrs[n];
+ }
+
+ function lst(arr) { return arr[arr.length-1]; }
+
+ var selectInput = function(node) { node.select(); };
+ if (ios) // Mobile Safari apparently has a bug where select() is broken.
+ selectInput = function(node) { node.selectionStart = 0; node.selectionEnd = node.value.length; };
+ else if (ie) // Suppress mysterious IE10 errors
+ selectInput = function(node) { try { node.select(); } catch(_e) {} };
+
+ function indexOf(array, elt) {
+ for (var i = 0; i < array.length; ++i)
+ if (array[i] == elt) return i;
+ return -1;
+ }
+ if ([].indexOf) indexOf = function(array, elt) { return array.indexOf(elt); };
+ function map(array, f) {
+ var out = [];
+ for (var i = 0; i < array.length; i++) out[i] = f(array[i], i);
+ return out;
+ }
+ if ([].map) map = function(array, f) { return array.map(f); };
+
+ function createObj(base, props) {
+ var inst;
+ if (Object.create) {
+ inst = Object.create(base);
+ } else {
+ var ctor = function() {};
+ ctor.prototype = base;
+ inst = new ctor();
+ }
+ if (props) copyObj(props, inst);
+ return inst;
+ };
+
+ function copyObj(obj, target, overwrite) {
+ if (!target) target = {};
+ for (var prop in obj)
+ if (obj.hasOwnProperty(prop) && (overwrite !== false || !target.hasOwnProperty(prop)))
+ target[prop] = obj[prop];
+ return target;
+ }
+
+ function bind(f) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ return function(){return f.apply(null, args);};
+ }
+
+ var nonASCIISingleCaseWordChar = /[\u00df\u0590-\u05f4\u0600-\u06ff\u3040-\u309f\u30a0-\u30ff\u3400-\u4db5\u4e00-\u9fcc\uac00-\ud7af]/;
+ var isWordCharBasic = CodeMirror.isWordChar = function(ch) {
+ return /\w/.test(ch) || ch > "\x80" &&
+ (ch.toUpperCase() != ch.toLowerCase() || nonASCIISingleCaseWordChar.test(ch));
+ };
+ function isWordChar(ch, helper) {
+ if (!helper) return isWordCharBasic(ch);
+ if (helper.source.indexOf("\\w") > -1 && isWordCharBasic(ch)) return true;
+ return helper.test(ch);
+ }
+
+ function isEmpty(obj) {
+ for (var n in obj) if (obj.hasOwnProperty(n) && obj[n]) return false;
+ return true;
+ }
+
+ // Extending unicode characters. A series of a non-extending char +
+ // any number of extending chars is treated as a single unit as far
+ // as editing and measuring is concerned. This is not fully correct,
+ // since some scripts/fonts/browsers also treat other configurations
+ // of code points as a group.
+ var extendingChars = /[\u0300-\u036f\u0483-\u0489\u0591-\u05bd\u05bf\u05c1\u05c2\u05c4\u05c5\u05c7\u0610-\u061a\u064b-\u065e\u0670\u06d6-\u06dc\u06de-\u06e4\u06e7\u06e8\u06ea-\u06ed\u0711\u0730-\u074a\u07a6-\u07b0\u07eb-\u07f3\u0816-\u0819\u081b-\u0823\u0825-\u0827\u0829-\u082d\u0900-\u0902\u093c\u0941-\u0948\u094d\u0951-\u0955\u0962\u0963\u0981\u09bc\u09be\u09c1-\u09c4\u09cd\u09d7\u09e2\u09e3\u0a01\u0a02\u0a3c\u0a41\u0a42\u0a47\u0a48\u0a4b-\u0a4d\u0a51\u0a70\u0a71\u0a75\u0a81\u0a82\u0abc\u0ac1-\u0ac5\u0ac7\u0ac8\u0acd\u0ae2\u0ae3\u0b01\u0b3c\u0b3e\u0b3f\u0b41-\u0b44\u0b4d\u0b56\u0b57\u0b62\u0b63\u0b82\u0bbe\u0bc0\u0bcd\u0bd7\u0c3e-\u0c40\u0c46-\u0c48\u0c4a-\u0c4d\u0c55\u0c56\u0c62\u0c63\u0cbc\u0cbf\u0cc2\u0cc6\u0ccc\u0ccd\u0cd5\u0cd6\u0ce2\u0ce3\u0d3e\u0d41-\u0d44\u0d4d\u0d57\u0d62\u0d63\u0dca\u0dcf\u0dd2-\u0dd4\u0dd6\u0ddf\u0e31\u0e34-\u0e3a\u0e47-\u0e4e\u0eb1\u0eb4-\u0eb9\u0ebb\u0ebc\u0ec8-\u0ecd\u0f18\u0f19\u0f35\u0f37\u0f39\u0f71-\u0f7e\u0f80-\u0f84\u0f86\u0f87\u0f90-\u0f97\u0f99-\u0fbc\u0fc6\u102d-\u1030\u1032-\u1037\u1039\u103a\u103d\u103e\u1058\u1059\u105e-\u1060\u1071-\u1074\u1082\u1085\u1086\u108d\u109d\u135f\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17b7-\u17bd\u17c6\u17c9-\u17d3\u17dd\u180b-\u180d\u18a9\u1920-\u1922\u1927\u1928\u1932\u1939-\u193b\u1a17\u1a18\u1a56\u1a58-\u1a5e\u1a60\u1a62\u1a65-\u1a6c\u1a73-\u1a7c\u1a7f\u1b00-\u1b03\u1b34\u1b36-\u1b3a\u1b3c\u1b42\u1b6b-\u1b73\u1b80\u1b81\u1ba2-\u1ba5\u1ba8\u1ba9\u1c2c-\u1c33\u1c36\u1c37\u1cd0-\u1cd2\u1cd4-\u1ce0\u1ce2-\u1ce8\u1ced\u1dc0-\u1de6\u1dfd-\u1dff\u200c\u200d\u20d0-\u20f0\u2cef-\u2cf1\u2de0-\u2dff\u302a-\u302f\u3099\u309a\ua66f-\ua672\ua67c\ua67d\ua6f0\ua6f1\ua802\ua806\ua80b\ua825\ua826\ua8c4\ua8e0-\ua8f1\ua926-\ua92d\ua947-\ua951\ua980-\ua982\ua9b3\ua9b6-\ua9b9\ua9bc\uaa29-\uaa2e\uaa31\uaa32\uaa35\uaa36\uaa43\uaa4c\uaab0\uaab2-\uaab4\uaab7\uaab8\uaabe\uaabf\uaac1\uabe5\uabe8\uabed\udc00-\udfff\ufb1e\ufe00-\ufe0f\ufe20-\ufe26\uff9e\uff9f]/;
+ function isExtendingChar(ch) { return ch.charCodeAt(0) >= 768 && extendingChars.test(ch); }
+
+ // DOM UTILITIES
+
+ function elt(tag, content, className, style) {
+ var e = document.createElement(tag);
+ if (className) e.className = className;
+ if (style) e.style.cssText = style;
+ if (typeof content == "string") e.appendChild(document.createTextNode(content));
+ else if (content) for (var i = 0; i < content.length; ++i) e.appendChild(content[i]);
+ return e;
+ }
+
+ var range;
+ if (document.createRange) range = function(node, start, end) {
+ var r = document.createRange();
+ r.setEnd(node, end);
+ r.setStart(node, start);
+ return r;
+ };
+ else range = function(node, start, end) {
+ var r = document.body.createTextRange();
+ try { r.moveToElementText(node.parentNode); }
+ catch(e) { return r; }
+ r.collapse(true);
+ r.moveEnd("character", end);
+ r.moveStart("character", start);
+ return r;
+ };
+
+ function removeChildren(e) {
+ for (var count = e.childNodes.length; count > 0; --count)
+ e.removeChild(e.firstChild);
+ return e;
+ }
+
+ function removeChildrenAndAdd(parent, e) {
+ return removeChildren(parent).appendChild(e);
+ }
+
+ function contains(parent, child) {
+ if (parent.contains)
+ return parent.contains(child);
+ while (child = child.parentNode)
+ if (child == parent) return true;
+ }
+
+ function activeElt() { return document.activeElement; }
+ // Older versions of IE throws unspecified error when touching
+ // document.activeElement in some cases (during loading, in iframe)
+ if (ie && ie_version < 11) activeElt = function() {
+ try { return document.activeElement; }
+ catch(e) { return document.body; }
+ };
+
+ function classTest(cls) { return new RegExp("(^|\\s)" + cls + "(?:$|\\s)\\s*"); }
+ var rmClass = CodeMirror.rmClass = function(node, cls) {
+ var current = node.className;
+ var match = classTest(cls).exec(current);
+ if (match) {
+ var after = current.slice(match.index + match[0].length);
+ node.className = current.slice(0, match.index) + (after ? match[1] + after : "");
+ }
+ };
+ var addClass = CodeMirror.addClass = function(node, cls) {
+ var current = node.className;
+ if (!classTest(cls).test(current)) node.className += (current ? " " : "") + cls;
+ };
+ function joinClasses(a, b) {
+ var as = a.split(" ");
+ for (var i = 0; i < as.length; i++)
+ if (as[i] && !classTest(as[i]).test(b)) b += " " + as[i];
+ return b;
+ }
+
+ // WINDOW-WIDE EVENTS
+
+ // These must be handled carefully, because naively registering a
+ // handler for each editor will cause the editors to never be
+ // garbage collected.
+
+ function forEachCodeMirror(f) {
+ if (!document.body.getElementsByClassName) return;
+ var byClass = document.body.getElementsByClassName("CodeMirror");
+ for (var i = 0; i < byClass.length; i++) {
+ var cm = byClass[i].CodeMirror;
+ if (cm) f(cm);
+ }
+ }
+
+ var globalsRegistered = false;
+ function ensureGlobalHandlers() {
+ if (globalsRegistered) return;
+ registerGlobalHandlers();
+ globalsRegistered = true;
+ }
+ function registerGlobalHandlers() {
+ // When the window resizes, we need to refresh active editors.
+ var resizeTimer;
+ on(window, "resize", function() {
+ if (resizeTimer == null) resizeTimer = setTimeout(function() {
+ resizeTimer = null;
+ knownScrollbarWidth = null;
+ forEachCodeMirror(onResize);
+ }, 100);
+ });
+ // When the window loses focus, we want to show the editor as blurred
+ on(window, "blur", function() {
+ forEachCodeMirror(onBlur);
+ });
+ }
+
+ // FEATURE DETECTION
+
+ // Detect drag-and-drop
+ var dragAndDrop = function() {
+ // There is *some* kind of drag-and-drop support in IE6-8, but I
+ // couldn't get it to work yet.
+ if (ie && ie_version < 9) return false;
+ var div = elt('div');
+ return "draggable" in div || "dragDrop" in div;
+ }();
+
+ var knownScrollbarWidth;
+ function scrollbarWidth(measure) {
+ if (knownScrollbarWidth != null) return knownScrollbarWidth;
+ var test = elt("div", null, null, "width: 50px; height: 50px; overflow-x: scroll");
+ removeChildrenAndAdd(measure, test);
+ if (test.offsetWidth)
+ knownScrollbarWidth = test.offsetHeight - test.clientHeight;
+ return knownScrollbarWidth || 0;
+ }
+
+ var zwspSupported;
+ function zeroWidthElement(measure) {
+ if (zwspSupported == null) {
+ var test = elt("span", "\u200b");
+ removeChildrenAndAdd(measure, elt("span", [test, document.createTextNode("x")]));
+ if (measure.firstChild.offsetHeight != 0)
+ zwspSupported = test.offsetWidth <= 1 && test.offsetHeight > 2 && !(ie && ie_version < 8);
+ }
+ if (zwspSupported) return elt("span", "\u200b");
+ else return elt("span", "\u00a0", null, "display: inline-block; width: 1px; margin-right: -1px");
+ }
+
+ // Feature-detect IE's crummy client rect reporting for bidi text
+ var badBidiRects;
+ function hasBadBidiRects(measure) {
+ if (badBidiRects != null) return badBidiRects;
+ var txt = removeChildrenAndAdd(measure, document.createTextNode("A\u062eA"));
+ var r0 = range(txt, 0, 1).getBoundingClientRect();
+ if (!r0 || r0.left == r0.right) return false; // Safari returns null in some cases (#2780)
+ var r1 = range(txt, 1, 2).getBoundingClientRect();
+ return badBidiRects = (r1.right - r0.right < 3);
+ }
+
+ // See if "".split is the broken IE version, if so, provide an
+ // alternative way to split lines.
+ var splitLines = CodeMirror.splitLines = "\n\nb".split(/\n/).length != 3 ? function(string) {
+ var pos = 0, result = [], l = string.length;
+ while (pos <= l) {
+ var nl = string.indexOf("\n", pos);
+ if (nl == -1) nl = string.length;
+ var line = string.slice(pos, string.charAt(nl - 1) == "\r" ? nl - 1 : nl);
+ var rt = line.indexOf("\r");
+ if (rt != -1) {
+ result.push(line.slice(0, rt));
+ pos += rt + 1;
+ } else {
+ result.push(line);
+ pos = nl + 1;
+ }
+ }
+ return result;
+ } : function(string){return string.split(/\r\n?|\n/);};
+
+ var hasSelection = window.getSelection ? function(te) {
+ try { return te.selectionStart != te.selectionEnd; }
+ catch(e) { return false; }
+ } : function(te) {
+ try {var range = te.ownerDocument.selection.createRange();}
+ catch(e) {}
+ if (!range || range.parentElement() != te) return false;
+ return range.compareEndPoints("StartToEnd", range) != 0;
+ };
+
+ var hasCopyEvent = (function() {
+ var e = elt("div");
+ if ("oncopy" in e) return true;
+ e.setAttribute("oncopy", "return;");
+ return typeof e.oncopy == "function";
+ })();
+
+ var badZoomedRects = null;
+ function hasBadZoomedRects(measure) {
+ if (badZoomedRects != null) return badZoomedRects;
+ var node = removeChildrenAndAdd(measure, elt("span", "x"));
+ var normal = node.getBoundingClientRect();
+ var fromRange = range(node, 0, 1).getBoundingClientRect();
+ return badZoomedRects = Math.abs(normal.left - fromRange.left) > 1;
+ }
+
+ // KEY NAMES
+
+ var keyNames = {3: "Enter", 8: "Backspace", 9: "Tab", 13: "Enter", 16: "Shift", 17: "Ctrl", 18: "Alt",
+ 19: "Pause", 20: "CapsLock", 27: "Esc", 32: "Space", 33: "PageUp", 34: "PageDown", 35: "End",
+ 36: "Home", 37: "Left", 38: "Up", 39: "Right", 40: "Down", 44: "PrintScrn", 45: "Insert",
+ 46: "Delete", 59: ";", 61: "=", 91: "Mod", 92: "Mod", 93: "Mod", 107: "=", 109: "-", 127: "Delete",
+ 173: "-", 186: ";", 187: "=", 188: ",", 189: "-", 190: ".", 191: "/", 192: "`", 219: "[", 220: "\\",
+ 221: "]", 222: "'", 63232: "Up", 63233: "Down", 63234: "Left", 63235: "Right", 63272: "Delete",
+ 63273: "Home", 63275: "End", 63276: "PageUp", 63277: "PageDown", 63302: "Insert"};
+ CodeMirror.keyNames = keyNames;
+ (function() {
+ // Number keys
+ for (var i = 0; i < 10; i++) keyNames[i + 48] = keyNames[i + 96] = String(i);
+ // Alphabetic keys
+ for (var i = 65; i <= 90; i++) keyNames[i] = String.fromCharCode(i);
+ // Function keys
+ for (var i = 1; i <= 12; i++) keyNames[i + 111] = keyNames[i + 63235] = "F" + i;
+ })();
+
+ // BIDI HELPERS
+
+ function iterateBidiSections(order, from, to, f) {
+ if (!order) return f(from, to, "ltr");
+ var found = false;
+ for (var i = 0; i < order.length; ++i) {
+ var part = order[i];
+ if (part.from < to && part.to > from || from == to && part.to == from) {
+ f(Math.max(part.from, from), Math.min(part.to, to), part.level == 1 ? "rtl" : "ltr");
+ found = true;
+ }
+ }
+ if (!found) f(from, to, "ltr");
+ }
+
+ function bidiLeft(part) { return part.level % 2 ? part.to : part.from; }
+ function bidiRight(part) { return part.level % 2 ? part.from : part.to; }
+
+ function lineLeft(line) { var order = getOrder(line); return order ? bidiLeft(order[0]) : 0; }
+ function lineRight(line) {
+ var order = getOrder(line);
+ if (!order) return line.text.length;
+ return bidiRight(lst(order));
+ }
+
+ function lineStart(cm, lineN) {
+ var line = getLine(cm.doc, lineN);
+ var visual = visualLine(line);
+ if (visual != line) lineN = lineNo(visual);
+ var order = getOrder(visual);
+ var ch = !order ? 0 : order[0].level % 2 ? lineRight(visual) : lineLeft(visual);
+ return Pos(lineN, ch);
+ }
+ function lineEnd(cm, lineN) {
+ var merged, line = getLine(cm.doc, lineN);
+ while (merged = collapsedSpanAtEnd(line)) {
+ line = merged.find(1, true).line;
+ lineN = null;
+ }
+ var order = getOrder(line);
+ var ch = !order ? line.text.length : order[0].level % 2 ? lineLeft(line) : lineRight(line);
+ return Pos(lineN == null ? lineNo(line) : lineN, ch);
+ }
+ function lineStartSmart(cm, pos) {
+ var start = lineStart(cm, pos.line);
+ var line = getLine(cm.doc, start.line);
+ var order = getOrder(line);
+ if (!order || order[0].level == 0) {
+ var firstNonWS = Math.max(0, line.text.search(/\S/));
+ var inWS = pos.line == start.line && pos.ch <= firstNonWS && pos.ch;
+ return Pos(start.line, inWS ? 0 : firstNonWS);
+ }
+ return start;
+ }
+
+ function compareBidiLevel(order, a, b) {
+ var linedir = order[0].level;
+ if (a == linedir) return true;
+ if (b == linedir) return false;
+ return a < b;
+ }
+ var bidiOther;
+ function getBidiPartAt(order, pos) {
+ bidiOther = null;
+ for (var i = 0, found; i < order.length; ++i) {
+ var cur = order[i];
+ if (cur.from < pos && cur.to > pos) return i;
+ if ((cur.from == pos || cur.to == pos)) {
+ if (found == null) {
+ found = i;
+ } else if (compareBidiLevel(order, cur.level, order[found].level)) {
+ if (cur.from != cur.to) bidiOther = found;
+ return i;
+ } else {
+ if (cur.from != cur.to) bidiOther = i;
+ return found;
+ }
+ }
+ }
+ return found;
+ }
+
+ function moveInLine(line, pos, dir, byUnit) {
+ if (!byUnit) return pos + dir;
+ do pos += dir;
+ while (pos > 0 && isExtendingChar(line.text.charAt(pos)));
+ return pos;
+ }
+
+ // This is needed in order to move 'visually' through bi-directional
+ // text -- i.e., pressing left should make the cursor go left, even
+ // when in RTL text. The tricky part is the 'jumps', where RTL and
+ // LTR text touch each other. This often requires the cursor offset
+ // to move more than one unit, in order to visually move one unit.
+ function moveVisually(line, start, dir, byUnit) {
+ var bidi = getOrder(line);
+ if (!bidi) return moveLogically(line, start, dir, byUnit);
+ var pos = getBidiPartAt(bidi, start), part = bidi[pos];
+ var target = moveInLine(line, start, part.level % 2 ? -dir : dir, byUnit);
+
+ for (;;) {
+ if (target > part.from && target < part.to) return target;
+ if (target == part.from || target == part.to) {
+ if (getBidiPartAt(bidi, target) == pos) return target;
+ part = bidi[pos += dir];
+ return (dir > 0) == part.level % 2 ? part.to : part.from;
+ } else {
+ part = bidi[pos += dir];
+ if (!part) return null;
+ if ((dir > 0) == part.level % 2)
+ target = moveInLine(line, part.to, -1, byUnit);
+ else
+ target = moveInLine(line, part.from, 1, byUnit);
+ }
+ }
+ }
+
+ function moveLogically(line, start, dir, byUnit) {
+ var target = start + dir;
+ if (byUnit) while (target > 0 && isExtendingChar(line.text.charAt(target))) target += dir;
+ return target < 0 || target > line.text.length ? null : target;
+ }
+
+ // Bidirectional ordering algorithm
+ // See http://unicode.org/reports/tr9/tr9-13.html for the algorithm
+ // that this (partially) implements.
+
+ // One-char codes used for character types:
+ // L (L): Left-to-Right
+ // R (R): Right-to-Left
+ // r (AL): Right-to-Left Arabic
+ // 1 (EN): European Number
+ // + (ES): European Number Separator
+ // % (ET): European Number Terminator
+ // n (AN): Arabic Number
+ // , (CS): Common Number Separator
+ // m (NSM): Non-Spacing Mark
+ // b (BN): Boundary Neutral
+ // s (B): Paragraph Separator
+ // t (S): Segment Separator
+ // w (WS): Whitespace
+ // N (ON): Other Neutrals
+
+ // Returns null if characters are ordered as they appear
+ // (left-to-right), or an array of sections ({from, to, level}
+ // objects) in the order in which they occur visually.
+ var bidiOrdering = (function() {
+ // Character types for codepoints 0 to 0xff
+ var lowTypes = "bbbbbbbbbtstwsbbbbbbbbbbbbbbssstwNN%%%NNNNNN,N,N1111111111NNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNNNLLLLLLLLLLLLLLLLLLLLLLLLLLNNNNbbbbbbsbbbbbbbbbbbbbbbbbbbbbbbbbb,N%%%%NNNNLNNNNN%%11NLNNN1LNNNNNLLLLLLLLLLLLLLLLLLLLLLLNLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLN";
+ // Character types for codepoints 0x600 to 0x6ff
+ var arabicTypes = "rrrrrrrrrrrr,rNNmmmmmmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmrrrrrrrnnnnnnnnnn%nnrrrmrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrmmmmmmmmmmmmmmmmmmmNmmmm";
+ function charType(code) {
+ if (code <= 0xf7) return lowTypes.charAt(code);
+ else if (0x590 <= code && code <= 0x5f4) return "R";
+ else if (0x600 <= code && code <= 0x6ed) return arabicTypes.charAt(code - 0x600);
+ else if (0x6ee <= code && code <= 0x8ac) return "r";
+ else if (0x2000 <= code && code <= 0x200b) return "w";
+ else if (code == 0x200c) return "b";
+ else return "L";
+ }
+
+ var bidiRE = /[\u0590-\u05f4\u0600-\u06ff\u0700-\u08ac]/;
+ var isNeutral = /[stwN]/, isStrong = /[LRr]/, countsAsLeft = /[Lb1n]/, countsAsNum = /[1n]/;
+ // Browsers seem to always treat the boundaries of block elements as being L.
+ var outerType = "L";
+
+ function BidiSpan(level, from, to) {
+ this.level = level;
+ this.from = from; this.to = to;
+ }
+
+ return function(str) {
+ if (!bidiRE.test(str)) return false;
+ var len = str.length, types = [];
+ for (var i = 0, type; i < len; ++i)
+ types.push(type = charType(str.charCodeAt(i)));
+
+ // W1. Examine each non-spacing mark (NSM) in the level run, and
+ // change the type of the NSM to the type of the previous
+ // character. If the NSM is at the start of the level run, it will
+ // get the type of sor.
+ for (var i = 0, prev = outerType; i < len; ++i) {
+ var type = types[i];
+ if (type == "m") types[i] = prev;
+ else prev = type;
+ }
+
+ // W2. Search backwards from each instance of a European number
+ // until the first strong type (R, L, AL, or sor) is found. If an
+ // AL is found, change the type of the European number to Arabic
+ // number.
+ // W3. Change all ALs to R.
+ for (var i = 0, cur = outerType; i < len; ++i) {
+ var type = types[i];
+ if (type == "1" && cur == "r") types[i] = "n";
+ else if (isStrong.test(type)) { cur = type; if (type == "r") types[i] = "R"; }
+ }
+
+ // W4. A single European separator between two European numbers
+ // changes to a European number. A single common separator between
+ // two numbers of the same type changes to that type.
+ for (var i = 1, prev = types[0]; i < len - 1; ++i) {
+ var type = types[i];
+ if (type == "+" && prev == "1" && types[i+1] == "1") types[i] = "1";
+ else if (type == "," && prev == types[i+1] &&
+ (prev == "1" || prev == "n")) types[i] = prev;
+ prev = type;
+ }
+
+ // W5. A sequence of European terminators adjacent to European
+ // numbers changes to all European numbers.
+ // W6. Otherwise, separators and terminators change to Other
+ // Neutral.
+ for (var i = 0; i < len; ++i) {
+ var type = types[i];
+ if (type == ",") types[i] = "N";
+ else if (type == "%") {
+ for (var end = i + 1; end < len && types[end] == "%"; ++end) {}
+ var replace = (i && types[i-1] == "!") || (end < len && types[end] == "1") ? "1" : "N";
+ for (var j = i; j < end; ++j) types[j] = replace;
+ i = end - 1;
+ }
+ }
+
+ // W7. Search backwards from each instance of a European number
+ // until the first strong type (R, L, or sor) is found. If an L is
+ // found, then change the type of the European number to L.
+ for (var i = 0, cur = outerType; i < len; ++i) {
+ var type = types[i];
+ if (cur == "L" && type == "1") types[i] = "L";
+ else if (isStrong.test(type)) cur = type;
+ }
+
+ // N1. A sequence of neutrals takes the direction of the
+ // surrounding strong text if the text on both sides has the same
+ // direction. European and Arabic numbers act as if they were R in
+ // terms of their influence on neutrals. Start-of-level-run (sor)
+ // and end-of-level-run (eor) are used at level run boundaries.
+ // N2. Any remaining neutrals take the embedding direction.
+ for (var i = 0; i < len; ++i) {
+ if (isNeutral.test(types[i])) {
+ for (var end = i + 1; end < len && isNeutral.test(types[end]); ++end) {}
+ var before = (i ? types[i-1] : outerType) == "L";
+ var after = (end < len ? types[end] : outerType) == "L";
+ var replace = before || after ? "L" : "R";
+ for (var j = i; j < end; ++j) types[j] = replace;
+ i = end - 1;
+ }
+ }
+
+ // Here we depart from the documented algorithm, in order to avoid
+ // building up an actual levels array. Since there are only three
+ // levels (0, 1, 2) in an implementation that doesn't take
+ // explicit embedding into account, we can build up the order on
+ // the fly, without following the level-based algorithm.
+ var order = [], m;
+ for (var i = 0; i < len;) {
+ if (countsAsLeft.test(types[i])) {
+ var start = i;
+ for (++i; i < len && countsAsLeft.test(types[i]); ++i) {}
+ order.push(new BidiSpan(0, start, i));
+ } else {
+ var pos = i, at = order.length;
+ for (++i; i < len && types[i] != "L"; ++i) {}
+ for (var j = pos; j < i;) {
+ if (countsAsNum.test(types[j])) {
+ if (pos < j) order.splice(at, 0, new BidiSpan(1, pos, j));
+ var nstart = j;
+ for (++j; j < i && countsAsNum.test(types[j]); ++j) {}
+ order.splice(at, 0, new BidiSpan(2, nstart, j));
+ pos = j;
+ } else ++j;
+ }
+ if (pos < i) order.splice(at, 0, new BidiSpan(1, pos, i));
+ }
+ }
+ if (order[0].level == 1 && (m = str.match(/^\s+/))) {
+ order[0].from = m[0].length;
+ order.unshift(new BidiSpan(0, 0, m[0].length));
+ }
+ if (lst(order).level == 1 && (m = str.match(/\s+$/))) {
+ lst(order).to -= m[0].length;
+ order.push(new BidiSpan(0, len - m[0].length, len));
+ }
+ if (order[0].level != lst(order).level)
+ order.push(new BidiSpan(order[0].level, len, len));
+
+ return order;
+ };
+ })();
+
+ // THE END
+
+ CodeMirror.version = "4.8.0";
+
+ return CodeMirror;
+});
diff --git a/chromium/tools/win/sizeviewer/favicon.png b/chromium/tools/win/sizeviewer/favicon.png
new file mode 100644
index 00000000000..fe00fa050a8
--- /dev/null
+++ b/chromium/tools/win/sizeviewer/favicon.png
Binary files differ
diff --git a/chromium/tools/win/sizeviewer/main.js b/chromium/tools/win/sizeviewer/main.js
new file mode 100644
index 00000000000..b716319a734
--- /dev/null
+++ b/chromium/tools/win/sizeviewer/main.js
@@ -0,0 +1,120 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+google.load("visualization", "1", {packages:["treemap"]});
+google.setOnLoadCallback(drawChart);
+function drawChart() {
+ var data = google.visualization.arrayToDataTable(g_raw_data);
+
+ tree = new google.visualization.TreeMap(
+ document.getElementById('chart_div'));
+
+ tree.draw(data, {
+ minColor: '#faa',
+ midColor: '#f77',
+ maxColor: '#f44',
+ headerHeight: 20,
+ fontColor: 'black',
+ showScale: true,
+ minColorValue: 0,
+ maxColorValue: g_maxval,
+ generateTooltip: tooltip
+ });
+
+ // Update from 'Loading'.
+ document.getElementById('title').innerText = g_dllname;
+
+ // Set favicon.
+ var doc_head = document.getElementsByTagName('head')[0];
+ var new_link = document.createElement('link');
+ new_link.rel = 'shortcut icon';
+ new_link.href = 'data:image/png;base64,'+g_favicon;
+ doc_head.appendChild(new_link);
+
+ var cur_line_sizes = null;
+ function nodeSelect() {
+ symlist.setValue('');
+ var selected = tree.getSelection();
+ if (selected.length > 0) {
+ var filename = data.getValue(selected[0].row, 0);
+ var size = data.getValue(selected[0].row, 2);
+ if (size >= 0) {
+ // Is a leaf.
+ cur_line_sizes = g_line_data[filename];
+ var body = g_file_contents[filename];
+ editor.setValue(body);
+ var maximum_size = 0;
+ for (var line in cur_line_sizes) {
+ maximum_size = Math.max(maximum_size, cur_line_sizes[line][0]);
+ }
+ for (var line in cur_line_sizes) {
+ var symbol_indices = cur_line_sizes[line][1];
+ var symbols = [];
+ for (var i = 0; i < symbol_indices.length; ++i) {
+ symbols.push(g_symbol_list[symbol_indices[i]]);
+ }
+ var size = cur_line_sizes[line][0];
+ // Zero based lines.
+ var line_num = parseInt(line, 10) - 1;
+ if (size >= maximum_size * 0.9)
+ editor.addLineClass(line_num, 'gutter', 'linebg-top10');
+ else if (size >= maximum_size * 0.75)
+ editor.addLineClass(line_num, 'gutter', 'linebg-top25');
+ else if (size >= maximum_size * 0.5)
+ editor.addLineClass(line_num, 'gutter', 'linebg-top50');
+ function addTag() {
+ var line_num = parseInt(line, 10);
+ var symbols_tooltip = symbols.join('\n');
+ var num_syms = symbols.length;
+ // markText wants 0-based lines.
+ var mark = editor.markText({line: line_num - 1, ch: 0},
+ {line: line_num, ch: 0},
+ { className: 'symbol-tag' });
+ CodeMirror.on(mark, 'beforeCursorEnter', function(e) {
+ symlist.setValue(num_syms +
+ ' symbol(s) contributing to line ' +
+ line_num + ':\n' +
+ symbols_tooltip);
+ });
+ }
+ addTag();
+ }
+ }
+ }
+ }
+ google.visualization.events.addListener(tree, 'select', nodeSelect);
+
+ editor = CodeMirror.fromTextArea(
+ document.getElementById('source_view'), {
+ readOnly: "nocursor",
+ mode: { name: 'text/x-c++src' },
+ lineNumbers: true,
+ lineNumberFormatter: weightGetter
+ });
+ editor.setSize(850, 600);
+
+ symlist = CodeMirror.fromTextArea(
+ document.getElementById('symlist_view'), {
+ readOnly: "nocursor",
+ mode: { name: 'none' },
+ lineNumbers: false
+ });
+ symlist.setSize(850, 150);
+
+ function tooltip(row, size, value) {
+ return '<div style="background:#fd9;' +
+ ' padding:10px; border-style:solid"><b>' +
+ data.getValue(row, 0) + '</b><br>' +
+ data.getColumnLabel(2) +
+ ' (total value of this cell and its children): ' + size +
+ '<br>';
+ }
+
+ function weightGetter(line) {
+ if (cur_line_sizes && cur_line_sizes.hasOwnProperty('' + line)) {
+ return cur_line_sizes['' + line][0] + ' bytes ' + line;
+ }
+ return line;
+ }
+}
diff --git a/chromium/tools/win/sizeviewer/sizeviewer.py b/chromium/tools/win/sizeviewer/sizeviewer.py
new file mode 100644
index 00000000000..9fc4c8b69eb
--- /dev/null
+++ b/chromium/tools/win/sizeviewer/sizeviewer.py
@@ -0,0 +1,199 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import base64
+import codecs
+import json
+import os
+import string
+import subprocess
+import sys
+
+
+BASE_DIR = os.path.dirname(os.path.abspath(__file__))
+
+
+def Run(*args):
+ p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ out, err = p.communicate()
+ if p.returncode != 0:
+ raise SystemExit(out)
+
+
+def FindNode(node, component):
+ for child in node['children']:
+ if child['name'] == component:
+ return child
+ return None
+
+
+def InsertIntoTree(tree, source_name, size):
+ components = source_name[3:].split('\\')
+ node = tree
+ for index, component in enumerate(components):
+ data = FindNode(node, component)
+ if not data:
+ data = { 'name': source_name, 'name': component }
+ if index == len(components) - 1:
+ data['size'] = size
+ else:
+ data['children'] = []
+ node['children'].append(data)
+ node = data
+
+
+def FlattenTree(tree):
+ result = [['Path', 'Parent', 'Size', 'Value']]
+ def Flatten(node, parent):
+ name = node['name']
+ if parent and parent != '/':
+ name = parent + '/' + name
+ if 'children' in node:
+ result.append([name, parent, -1, -1])
+ for c in node['children']:
+ Flatten(c, name)
+ else:
+ result.append([name, parent, node['size'], node['size']])
+ Flatten(tree, '')
+ return result
+
+
+def GetAsset(filename):
+ with open(os.path.join(BASE_DIR, filename), 'rb') as f:
+ return f.read()
+
+
+def AppendAsScriptBlock(f, value, var=None):
+ f.write('<script type="text/javascript">\n')
+ if var:
+ f.write('var ' + var + ' = ')
+ f.write(value)
+ if var:
+ f.write(';\n')
+ f.write('</script>\n')
+
+
+def main():
+ jsons = []
+ if len(sys.argv) > 1:
+ dlls = sys.argv[1:]
+ else:
+ out_dir = os.path.join(BASE_DIR, '..', '..', '..', 'out', 'Release')
+ dlls = [os.path.normpath(os.path.join(out_dir, dll))
+ for dll in ('chrome.dll', 'chrome_child.dll')]
+ for dll_path in dlls:
+ if os.path.exists(dll_path):
+ print 'Tallying %s...' % dll_path
+ json_path = dll_path + '.json'
+ Run(os.path.join(BASE_DIR, '..', '..', '..', 'third_party', 'syzygy',
+ 'binaries', 'exe', 'experimental', 'code_tally.exe'),
+ '--input-image=' + dll_path,
+ '--input-pdb=' + dll_path + '.pdb',
+ '--output-file=' + json_path)
+ jsons.append(json_path)
+ if not jsons:
+ print 'Couldn\'t find dlls.'
+ print 'Pass fully qualified dll name(s) if you want to use something other '
+ print 'than out\\Release\\chrome.dll and chrome_child.dll.'
+ return 1
+
+ # Munge the code_tally json format into an easier-to-view format.
+ for json_name in jsons:
+ with open(json_name, 'r') as jsonf:
+ all_data = json.load(jsonf)
+ html_path = os.path.splitext(json_name)[0] + '.html'
+ print 'Generating %s... (standlone)' % html_path
+ by_source = {}
+ symbols_index = {}
+ symbols = []
+ for obj_name, obj_data in all_data['objects'].iteritems():
+ for symbol, symbol_data in obj_data.iteritems():
+ size = int(symbol_data['size'])
+ # Sometimes there's symbols with no source file, we just ignore those.
+ if 'contribs' in symbol_data:
+ i = 0
+ while i < len(symbol_data['contribs']):
+ src_index = symbol_data['contribs'][i]
+ i += 1
+ per_line = symbol_data['contribs'][i]
+ i += 1
+ source = all_data['sources'][int(src_index)]
+ if source not in by_source:
+ by_source[source] = {'lines': {}, 'total_size': 0}
+ size = 0
+ # per_line is [line, size, line, size, line, size, ...]
+ for j in range(0, len(per_line), 2):
+ line_number = per_line[j]
+ size += per_line[j + 1]
+ # Save some time/space in JS by using an array here. 0 == size,
+ # 1 == symbol list.
+ by_source[source]['lines'].setdefault(line_number, [0, []])
+ by_source[source]['lines'][line_number][0] += per_line[j + 1]
+ if symbol in symbols_index:
+ symindex = symbols_index[symbol]
+ else:
+ symbols.append(symbol)
+ symbols_index[symbol] = symindex = len(symbols) - 1
+ by_source[source]['lines'][line_number][1].append(
+ symindex)
+ by_source[source]['total_size'] += size
+ binary_name = all_data['executable']['name']
+ data = {}
+ data['name'] = '/'
+ data['children'] = []
+ file_contents = {}
+ line_data = {}
+ for source, file_data in by_source.iteritems():
+ InsertIntoTree(data, source, file_data['total_size'])
+
+ store_as = source[3:].replace('\\', '/')
+ try:
+ with codecs.open(source, 'rb', encoding='latin1') as f:
+ file_contents[store_as] = f.read()
+ except IOError:
+ file_contents[store_as] = '// Unable to load source.'
+
+ line_data[store_as] = file_data['lines']
+ # code_tally attempts to assign fractional bytes when code is shared
+ # across multiple symbols. Round off here for display after summing above.
+ for per_line in line_data[store_as].values():
+ per_line[0] = round(per_line[0])
+
+ flattened = FlattenTree(data)
+ maxval = 0
+ for i in flattened[1:]:
+ maxval = max(i[2], maxval)
+ flattened_str = json.dumps(flattened)
+
+ to_write = GetAsset('template.html')
+ # Save all data and what would normally be external resources into the
+ # one html so that it's a standalone report.
+ with open(html_path, 'w') as f:
+ f.write(to_write)
+ # These aren't subbed in as a silly workaround for 32-bit python.
+ # The end result is only ~100M, but while substituting these into a
+ # template, it otherwise raises a MemoryError, I guess due to
+ # fragmentation. So instead, we just append them as variables to the file
+ # and then refer to the variables in the main script.
+ filedata_str = json.dumps(file_contents).replace(
+ '</script>', '</scr"+"ipt>')
+ AppendAsScriptBlock(f, filedata_str, var='g_file_contents')
+ AppendAsScriptBlock(f, json.dumps(line_data), var='g_line_data')
+ AppendAsScriptBlock(f, json.dumps(symbols), var='g_symbol_list')
+ favicon_str = json.dumps(base64.b64encode(GetAsset('favicon.png')))
+ AppendAsScriptBlock(f, favicon_str, var='g_favicon')
+ AppendAsScriptBlock(f, flattened_str, var='g_raw_data')
+ AppendAsScriptBlock(f, str(maxval), var='g_maxval')
+ dllname_str = binary_name + ' ' + all_data['executable']['version']
+ AppendAsScriptBlock(f, json.dumps(dllname_str), var='g_dllname')
+ AppendAsScriptBlock(f, GetAsset('codemirror.js'))
+ AppendAsScriptBlock(f, GetAsset('clike.js'))
+ AppendAsScriptBlock(f, GetAsset('main.js'))
+ f.write('</html>')
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/chromium/tools/win/sizeviewer/template.html b/chromium/tools/win/sizeviewer/template.html
new file mode 100644
index 00000000000..a5fe1332e8a
--- /dev/null
+++ b/chromium/tools/win/sizeviewer/template.html
@@ -0,0 +1,380 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <script type="text/javascript" src="https://www.google.com/jsapi"></script>
+ <style type="text/css">
+/* ----------------------------------------- Slightly modified codemirror.css */
+
+/* BASICS */
+
+.CodeMirror {
+ /* Set height, width, borders, and global font properties here */
+ font-family: 'Consolas', 'Monaco', 'Courier';
+ font-size: 12px;
+ height: 300px;
+}
+.CodeMirror-scroll {
+ /* Set scrolling behaviour here */
+ overflow: auto;
+}
+
+/* PADDING */
+
+.CodeMirror-lines {
+ padding: 4px 0; /* Vertical padding around content */
+}
+.CodeMirror pre {
+ padding: 0 4px; /* Horizontal padding of content */
+}
+
+.CodeMirror-scrollbar-filler, .CodeMirror-gutter-filler {
+ background-color: white; /* The little square between H and V scrollbars */
+}
+
+/* GUTTER */
+
+.CodeMirror-gutters {
+ border-right: 1px solid #ddd;
+ background-color: #f7f7f7;
+ white-space: nowrap;
+}
+.CodeMirror-linenumbers {}
+.CodeMirror-linenumber {
+ padding: 0 3px 0 5px;
+ min-width: 20px;
+ text-align: right;
+ color: #999;
+ -moz-box-sizing: content-box;
+ box-sizing: content-box;
+}
+
+.CodeMirror-guttermarker { color: black; }
+.CodeMirror-guttermarker-subtle { color: #999; }
+
+/* CURSOR */
+
+.CodeMirror div.CodeMirror-cursor {
+ border-left: 1px solid black;
+}
+/* Shown when moving in bi-directional text */
+.CodeMirror div.CodeMirror-secondarycursor {
+ border-left: 1px solid silver;
+}
+.CodeMirror.cm-fat-cursor div.CodeMirror-cursor {
+ width: auto;
+ border: 0;
+ background: #7e7;
+}
+.CodeMirror.cm-fat-cursor div.CodeMirror-cursors {
+ z-index: 1;
+}
+
+.cm-animate-fat-cursor {
+ width: auto;
+ border: 0;
+ -webkit-animation: blink 1.06s steps(1) infinite;
+ -moz-animation: blink 1.06s steps(1) infinite;
+ animation: blink 1.06s steps(1) infinite;
+}
+@-moz-keyframes blink {
+ 0% { background: #7e7; }
+ 50% { background: none; }
+ 100% { background: #7e7; }
+}
+@-webkit-keyframes blink {
+ 0% { background: #7e7; }
+ 50% { background: none; }
+ 100% { background: #7e7; }
+}
+@keyframes blink {
+ 0% { background: #7e7; }
+ 50% { background: none; }
+ 100% { background: #7e7; }
+}
+
+/* Can style cursor different in overwrite (non-insert) mode */
+div.CodeMirror-overwrite div.CodeMirror-cursor {}
+
+.cm-tab { display: inline-block; text-decoration: inherit; }
+
+.CodeMirror-ruler {
+ border-left: 1px solid #ccc;
+ position: absolute;
+}
+
+/* DEFAULT THEME */
+
+.cm-s-default .cm-keyword {color: #708;}
+.cm-s-default .cm-atom {color: #219;}
+.cm-s-default .cm-number {color: #164;}
+.cm-s-default .cm-def {color: #00f;}
+.cm-s-default .cm-variable,
+.cm-s-default .cm-punctuation,
+.cm-s-default .cm-property,
+.cm-s-default .cm-operator {}
+.cm-s-default .cm-variable-2 {color: #05a;}
+.cm-s-default .cm-variable-3 {color: #085;}
+.cm-s-default .cm-comment {color: #a50;}
+.cm-s-default .cm-string {color: #a11;}
+.cm-s-default .cm-string-2 {color: #f50;}
+.cm-s-default .cm-meta {color: #555;}
+.cm-s-default .cm-qualifier {color: #555;}
+.cm-s-default .cm-builtin {color: #30a;}
+.cm-s-default .cm-bracket {color: #997;}
+.cm-s-default .cm-tag {color: #170;}
+.cm-s-default .cm-attribute {color: #00c;}
+.cm-s-default .cm-header {color: blue;}
+.cm-s-default .cm-quote {color: #090;}
+.cm-s-default .cm-hr {color: #999;}
+.cm-s-default .cm-link {color: #00c;}
+
+.cm-negative {color: #d44;}
+.cm-positive {color: #292;}
+.cm-header, .cm-strong {font-weight: bold;}
+.cm-em {font-style: italic;}
+.cm-link {text-decoration: underline;}
+.cm-strikethrough {text-decoration: line-through;}
+
+.cm-s-default .cm-error {color: #f00;}
+.cm-invalidchar {color: #f00;}
+
+/* Default styles for common addons */
+
+div.CodeMirror span.CodeMirror-matchingbracket {color: #0f0;}
+div.CodeMirror span.CodeMirror-nonmatchingbracket {color: #f22;}
+.CodeMirror-matchingtag { background: rgba(255, 150, 0, .3); }
+.CodeMirror-activeline-background {background: #e8f2ff;}
+
+/* STOP */
+
+/* The rest of this file contains styles related to the mechanics of
+ the editor. You probably shouldn't touch them. */
+
+.CodeMirror {
+ line-height: 1;
+ position: relative;
+ overflow: hidden;
+ background: white;
+ color: black;
+}
+
+.CodeMirror-scroll {
+ /* 30px is the magic margin used to hide the element's real scrollbars */
+ /* See overflow: hidden in .CodeMirror */
+ margin-bottom: -30px; margin-right: -30px;
+ padding-bottom: 30px;
+ height: 100%;
+ outline: none; /* Prevent dragging from highlighting the element */
+ position: relative;
+ -moz-box-sizing: content-box;
+ box-sizing: content-box;
+}
+.CodeMirror-sizer {
+ position: relative;
+ border-right: 30px solid transparent;
+ -moz-box-sizing: content-box;
+ box-sizing: content-box;
+}
+
+/* The fake, visible scrollbars. Used to force redraw during scrolling
+ before actuall scrolling happens, thus preventing shaking and
+ flickering artifacts. */
+.CodeMirror-vscrollbar, .CodeMirror-hscrollbar, .CodeMirror-scrollbar-filler, .CodeMirror-gutter-filler {
+ position: absolute;
+ z-index: 6;
+ display: none;
+}
+.CodeMirror-vscrollbar {
+ right: 0; top: 0;
+ overflow-x: hidden;
+ overflow-y: scroll;
+}
+.CodeMirror-hscrollbar {
+ bottom: 0; left: 0;
+ overflow-y: hidden;
+ overflow-x: scroll;
+}
+.CodeMirror-scrollbar-filler {
+ right: 0; bottom: 0;
+}
+.CodeMirror-gutter-filler {
+ left: 0; bottom: 0;
+}
+
+.CodeMirror-gutters {
+ position: absolute; left: 0; top: 0;
+ padding-bottom: 30px;
+ z-index: 3;
+}
+.CodeMirror-gutter {
+ white-space: normal;
+ height: 100%;
+ -moz-box-sizing: content-box;
+ box-sizing: content-box;
+ padding-bottom: 30px;
+ margin-bottom: -32px;
+ display: inline-block;
+ /* Hack to make IE7 behave */
+ *zoom:1;
+ *display:inline;
+}
+.CodeMirror-gutter-wrapper {
+ position: absolute;
+ z-index: 4;
+ height: 100%;
+}
+.CodeMirror-gutter-elt {
+ position: absolute;
+ cursor: default;
+ z-index: 4;
+}
+
+.CodeMirror-lines {
+ cursor: text;
+ min-height: 1px; /* prevents collapsing before first draw */
+}
+.CodeMirror pre {
+ /* Reset some styles that the rest of the page might have set */
+ -moz-border-radius: 0; -webkit-border-radius: 0; border-radius: 0;
+ border-width: 0;
+ background: transparent;
+ font-family: inherit;
+ font-size: inherit;
+ margin: 0;
+ white-space: pre;
+ word-wrap: normal;
+ line-height: inherit;
+ color: inherit;
+ z-index: 2;
+ position: relative;
+ overflow: visible;
+}
+.CodeMirror-wrap pre {
+ word-wrap: break-word;
+ white-space: pre-wrap;
+ word-break: normal;
+}
+
+.CodeMirror-linebackground {
+ position: absolute;
+ left: 0; right: 0; top: 0; bottom: 0;
+ z-index: 0;
+}
+
+.CodeMirror-linewidget {
+ position: relative;
+ z-index: 2;
+ overflow: auto;
+}
+
+.CodeMirror-widget {}
+
+.CodeMirror-wrap .CodeMirror-scroll {
+ overflow-x: hidden;
+}
+
+.CodeMirror-measure {
+ position: absolute;
+ width: 100%;
+ height: 0;
+ overflow: hidden;
+ visibility: hidden;
+}
+.CodeMirror-measure pre { position: static; }
+
+.CodeMirror div.CodeMirror-cursor {
+ position: absolute;
+ border-right: none;
+ width: 0;
+}
+
+div.CodeMirror-cursors {
+ visibility: hidden;
+ position: relative;
+ z-index: 3;
+}
+.CodeMirror-focused div.CodeMirror-cursors {
+ visibility: visible;
+}
+
+.CodeMirror-selected { background: #d9d9d9; }
+.CodeMirror-focused .CodeMirror-selected { background: #d7d4f0; }
+.CodeMirror-crosshair { cursor: crosshair; }
+
+.cm-searching {
+ background: #ffa;
+ background: rgba(255, 255, 0, .4);
+}
+
+/* IE7 hack to prevent it from returning funny offsetTops on the spans */
+.CodeMirror span { *vertical-align: text-bottom; }
+
+/* Used to force a border model for a node */
+.cm-force-border { padding-right: .1px; }
+
+@media print {
+ /* Hide the cursor when printing */
+ .CodeMirror div.CodeMirror-cursors {
+ visibility: hidden;
+ }
+}
+
+/* See issue #2901 */
+.cm-tab-wrap-hack:after { content: ''; }
+
+/* Help users use markselection to safely style text background */
+span.CodeMirror-selectedtext { background: none; }
+
+/* ---------------------------------- End of slightly modified codemirror.css */
+
+body {
+ font-family: "Helvetica Neue", Helvetica, Arial;
+}
+
+#outer {
+ width: 1500px;
+ height: 800px;
+ display: flex;
+}
+.CodeMirror {
+ border: 1px solid black;
+ margin-top: 3px;
+}
+.CodeMirror-linenumbers {
+ width: 150px;
+}
+.linebg-top10 { background: #f88; }
+.linebg-top25 { background: #fbb; }
+.linebg-top50 { background: #fee; }
+.symbol-tag { background: #eee; }
+.charts-tooltip { z-index: 1000; }
+ </style>
+ </head>
+ <body>
+ <h1 id='title'>Loading...</h1>
+ <div id="outer">
+ <div id="tree>">
+ <button onclick="tree.goUpAndDraw()">Go up</button>
+ <div id="chart_div" style="width: 600px; height: 750px;">
+ </div>
+ </div>
+ <div id=source">
+ <textarea id="source_view" style="width: 850px; height: 600px;">
+/*
+
+Drill down on the left to an interesting file to see a line-by-line breakdown.
+
+Largest lines are annotated here in darkest red.
+
+Click on a line with bytes assigned to see contributing symbols below
+
+*/
+ </textarea>
+ <textarea id="symlist_view" style="width: 850px; height: 100px;">
+ </textarea>
+ </div>
+ </body>
+<!--
+Various things appended: codemirror.js, clike.js, various g_xxx variables ending
+with main.js, followed by the closing html tag.
+-->
diff --git a/chromium/tools/win/static_initializers/static_initializers.cc b/chromium/tools/win/static_initializers/static_initializers.cc
new file mode 100644
index 00000000000..440bb9de833
--- /dev/null
+++ b/chromium/tools/win/static_initializers/static_initializers.cc
@@ -0,0 +1,178 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include <dia2.h>
+#include <stdio.h>
+
+#include <string>
+
+// Create an IDiaData source and open a PDB file.
+static bool LoadDataFromPdb(const wchar_t* filename,
+ IDiaDataSource** source,
+ IDiaSession** session,
+ IDiaSymbol** global,
+ DWORD* machine_type) {
+ // Alternate path to search for debug data.
+ const wchar_t search_path[] = L"SRV**\\\\symbols\\symbols";
+ DWORD mach_type = 0;
+ HRESULT hr = CoInitialize(NULL);
+
+ // Obtain access to the provider.
+ hr = CoCreateInstance(__uuidof(DiaSource),
+ NULL,
+ CLSCTX_INPROC_SERVER,
+ __uuidof(IDiaDataSource),
+ (void**)source);
+
+ if (FAILED(hr)) {
+ printf("CoCreateInstance failed - HRESULT = %08lX\n", hr);
+ return false;
+ }
+
+ wchar_t ext[MAX_PATH];
+ _wsplitpath_s(filename, NULL, 0, NULL, 0, NULL, 0, ext, MAX_PATH);
+
+ // Open and prepare the debug data associated with the executable.
+ hr = (*source)->loadDataForExe(filename, search_path, NULL);
+ if (FAILED(hr)) {
+ printf("loadDataForExe failed - HRESULT = %08lX\n", hr);
+ return false;
+ }
+
+ // Open a session for querying symbols.
+ hr = (*source)->openSession(session);
+
+ if (FAILED(hr)) {
+ printf("openSession failed - HRESULT = %08lX\n", hr);
+ return false;
+ }
+
+ // Retrieve a reference to the global scope.
+ hr = (*session)->get_globalScope(global);
+
+ if (FAILED(hr)) {
+ printf("get_globalScope failed\n");
+ return false;
+ }
+
+ // Set machine type for getting correct register names.
+ if (SUCCEEDED((*global)->get_machineType(&mach_type))) {
+ switch (mach_type) {
+ case IMAGE_FILE_MACHINE_I386:
+ *machine_type = CV_CFL_80386;
+ break;
+ case IMAGE_FILE_MACHINE_IA64:
+ *machine_type = CV_CFL_IA64;
+ break;
+ case IMAGE_FILE_MACHINE_AMD64:
+ *machine_type = CV_CFL_AMD64;
+ break;
+ default:
+ printf("unexpected machine type\n");
+ return false;
+ }
+ }
+
+ return true;
+}
+
+// Release DIA objects and CoUninitialize.
+static void Cleanup(IDiaSymbol* global_symbol, IDiaSession* dia_session) {
+ if (global_symbol)
+ global_symbol->Release();
+ if (dia_session)
+ dia_session->Release();
+ CoUninitialize();
+}
+
+static void PrintIfDynamicInitializer(const std::wstring& module,
+ IDiaSymbol* symbol) {
+ DWORD symtag;
+
+ if (FAILED(symbol->get_symTag(&symtag)))
+ return;
+
+ if (symtag != SymTagFunction && symtag != SymTagBlock)
+ return;
+
+ BSTR bstr_name;
+ if (SUCCEEDED(symbol->get_name(&bstr_name))) {
+ if (wcsstr(bstr_name, L"`dynamic initializer for '")) {
+ wprintf(L"%s: %s\n", module.c_str(), bstr_name);
+ SysFreeString(bstr_name);
+ }
+ }
+}
+
+static bool DumpStaticInitializers(IDiaSymbol* global_symbol) {
+ // Retrieve the compilands first.
+ IDiaEnumSymbols* enum_symbols;
+ if (FAILED(global_symbol->findChildren(
+ SymTagCompiland, NULL, nsNone, &enum_symbols))) {
+ return false;
+ }
+
+ IDiaSymbol* compiland;
+ ULONG element_count = 0;
+
+ std::wstring current_module;
+ while (SUCCEEDED(enum_symbols->Next(1, &compiland, &element_count)) &&
+ (element_count == 1)) {
+ BSTR bstr_name;
+ if (FAILED(compiland->get_name(&bstr_name))) {
+ current_module = L"<unknown>";
+ } else {
+ current_module = bstr_name;
+ SysFreeString(bstr_name);
+ }
+
+ // Find all the symbols defined in this compiland, and print them if they
+ // have the name corresponding to an initializer.
+ IDiaEnumSymbols* enum_children;
+ if (SUCCEEDED(compiland->findChildren(
+ SymTagNull, NULL, nsNone, &enum_children))) {
+ IDiaSymbol* symbol;
+ ULONG children = 0;
+ while (SUCCEEDED(enum_children->Next(1, &symbol, &children)) &&
+ children == 1) { // Enumerate until we don't get any more symbols.
+ PrintIfDynamicInitializer(current_module, symbol);
+ symbol->Release();
+ }
+ enum_children->Release();
+ }
+ compiland->Release();
+ }
+
+ enum_symbols->Release();
+ return true;
+}
+
+int wmain(int argc, wchar_t* argv[]) {
+ if (argc != 2) {
+ wprintf(L"usage: %ls binary_name\n", argv[0]);
+ return 1;
+ }
+
+ IDiaDataSource* dia_data_source;
+ IDiaSession* dia_session;
+ IDiaSymbol* global_symbol;
+ DWORD machine_type = CV_CFL_80386;
+ if (!LoadDataFromPdb(argv[1],
+ &dia_data_source,
+ &dia_session,
+ &global_symbol,
+ &machine_type)) {
+ wprintf(L"Couldn't load data from pdb.\n");
+ return 1;
+ }
+
+ wprintf(L"Static initializers in %s:\n", argv[1]);
+
+ if (!DumpStaticInitializers(global_symbol))
+ return 1;
+
+ Cleanup(global_symbol, dia_session);
+
+ return 0;
+}
diff --git a/chromium/tools/xdisplaycheck/BUILD.gn b/chromium/tools/xdisplaycheck/BUILD.gn
new file mode 100644
index 00000000000..779e1636519
--- /dev/null
+++ b/chromium/tools/xdisplaycheck/BUILD.gn
@@ -0,0 +1,15 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+executable("xdisplaycheck") {
+ sources = [
+ "xdisplaycheck.cc",
+ ]
+
+ configs += [ "//build/config/linux:x11" ]
+
+ deps = [
+ "//build/config/sanitizers:deps",
+ ]
+}
diff --git a/chromium/tools/xdisplaycheck/xdisplaycheck.cc b/chromium/tools/xdisplaycheck/xdisplaycheck.cc
new file mode 100644
index 00000000000..6623153a846
--- /dev/null
+++ b/chromium/tools/xdisplaycheck/xdisplaycheck.cc
@@ -0,0 +1,119 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This is a small program that tries to connect to the X server. It
+// continually retries until it connects or 30 seconds pass. If it fails
+// to connect to the X server or fails to find needed functiona, it returns
+// an error code of -1.
+//
+// This is to help verify that a useful X server is available before we start
+// start running tests on the build bots.
+
+#include <errno.h>
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include <X11/Xlib.h>
+
+#if defined(USE_AURA)
+#include <X11/extensions/XInput2.h>
+#endif
+
+void Sleep(int duration_ms) {
+ struct timespec sleep_time, remaining;
+
+ // Contains the portion of duration_ms >= 1 sec.
+ sleep_time.tv_sec = duration_ms / 1000;
+ duration_ms -= sleep_time.tv_sec * 1000;
+
+ // Contains the portion of duration_ms < 1 sec.
+ sleep_time.tv_nsec = duration_ms * 1000 * 1000; // nanoseconds.
+
+ while (nanosleep(&sleep_time, &remaining) == -1 && errno == EINTR)
+ sleep_time = remaining;
+}
+
+class XScopedDisplay {
+ public:
+ XScopedDisplay() : display_(NULL) {}
+ ~XScopedDisplay() {
+ if (display_) XCloseDisplay(display_);
+ }
+
+ void set(Display* display) { display_ = display; }
+ Display* display() { return display_; }
+
+ private:
+ Display* display_;
+};
+
+int main(int argc, char* argv[]) {
+ XScopedDisplay scoped_display;
+ if (argv[1] && strcmp(argv[1], "--noserver") == 0) {
+ scoped_display.set(XOpenDisplay(NULL));
+ if (scoped_display.display()) {
+ fprintf(stderr, "Found unexpected connectable display %s\n",
+ XDisplayName(NULL));
+ }
+ // Return success when we got an unexpected display so that the code
+ // without the --noserver is the same, but slow, rather than inverted.
+ return !scoped_display.display();
+ }
+
+ int kNumTries = 78; // 78*77/2 * 10 = 30s of waiting
+ int tries;
+ for (tries = 0; tries < kNumTries; ++tries) {
+ scoped_display.set(XOpenDisplay(NULL));
+ if (scoped_display.display())
+ break;
+ Sleep(10 * tries);
+ }
+
+ if (!scoped_display.display()) {
+ fprintf(stderr, "Failed to connect to %s\n", XDisplayName(NULL));
+ return -1;
+ }
+
+ fprintf(stderr, "Connected after %d retries\n", tries);
+
+#if defined(USE_AURA)
+ // Check for XInput2
+ int opcode, event, err;
+ if (!XQueryExtension(scoped_display.display(), "XInputExtension", &opcode,
+ &event, &err)) {
+ fprintf(stderr,
+ "Failed to get XInputExtension on %s.\n", XDisplayName(NULL));
+ return -2;
+ }
+
+ int major = 2, minor = 0;
+ if (XIQueryVersion(scoped_display.display(), &major, &minor) == BadRequest) {
+ fprintf(stderr,
+ "Server does not have XInput2 on %s.\n", XDisplayName(NULL));
+ return -3;
+ }
+
+ // Ask for the list of devices. This can cause some Xvfb to crash.
+ int count = 0;
+ XIDeviceInfo* devices =
+ XIQueryDevice(scoped_display.display(), XIAllDevices, &count);
+ if (devices)
+ XIFreeDeviceInfo(devices);
+
+ fprintf(stderr,
+ "XInput2 verified initially sane on %s.\n", XDisplayName(NULL));
+#endif
+ return 0;
+}
+
+#if defined(LEAK_SANITIZER)
+// XOpenDisplay leaks memory if it takes more than one try to connect. This
+// causes LSan bots to fail. We don't care about memory leaks in xdisplaycheck
+// anyway, so just disable LSan completely.
+// This function isn't referenced from the executable itself. Make sure it isn't
+// stripped by the linker.
+__attribute__((used))
+__attribute__((visibility("default")))
+extern "C" int __lsan_is_turned_off() { return 1; }
+#endif
diff --git a/chromium/tools/yes_no.py b/chromium/tools/yes_no.py
new file mode 100644
index 00000000000..8682ec8de5e
--- /dev/null
+++ b/chromium/tools/yes_no.py
@@ -0,0 +1,28 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+
+def YesNo(prompt):
+ """Prompts with a yes/no question, returns True if yes."""
+ print prompt,
+ sys.stdout.flush()
+ # http://code.activestate.com/recipes/134892/
+ if sys.platform == 'win32':
+ import msvcrt
+ ch = msvcrt.getch()
+ else:
+ import termios
+ import tty
+ fd = sys.stdin.fileno()
+ old_settings = termios.tcgetattr(fd)
+ ch = 'n'
+ try:
+ tty.setraw(sys.stdin.fileno())
+ ch = sys.stdin.read(1)
+ finally:
+ termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
+ print ch
+ return ch in ('Y', 'y')